in pyspark-sdk/setup.py [0:0]
def run(self):
install.run(self)
spark_home_dir = os.environ.get('SPARK_HOME', None)
if spark_home_dir:
uber_jar_target = Path(spark_home_dir) / "jars" / UBER_JAR_NAME
jars_in_deps = os.listdir(Path(os.getcwd()) / Path(JARS_TARGET))
uber_jar_name = [jar for jar in jars_in_deps if jar.startswith(UBER_JAR_NAME_PREFIX)].pop()
uber_jar_dir = Path(os.getcwd()) / Path(JARS_TARGET) / uber_jar_name
print(f"Copying feature store uber jar to {uber_jar_target}")
shutil.copy(uber_jar_dir, uber_jar_target)
else:
print("Environment variable SPARK_HOME is not set, dependent jars are not installed to SPARK_HOME.")
print("Installation finished.")