in plugins/spark_upgrade/spark_config/__init__.py [0:0]
def get_rules(self) -> List[Rule]:
update_spark_conf_init = Rule(
name="update_spark_conf_init",
query="cs new SparkConf()",
replace_node="*",
replace='new SparkConf().set("spark.sql.legacy.timeParserPolicy","LEGACY").set("spark.sql.legacy.allowUntypedScalaUDF", "true")',
filters={
Filter(
not_enclosing_node='cs new SparkConf().set("spark.sql.legacy.timeParserPolicy","LEGACY").set("spark.sql.legacy.allowUntypedScalaUDF", "true")'
)
},
)
update_spark_session_builder_init = Rule(
name="update_spark_conf_init",
query="cs SparkSession.builder()",
replace_node="*",
replace='SparkSession.builder().config("spark.sql.legacy.timeParserPolicy","LEGACY").config("spark.sql.legacy.allowUntypedScalaUDF", "true")',
filters={
Filter(
not_enclosing_node='cs SparkSession.builder().config("spark.sql.legacy.timeParserPolicy","LEGACY").config("spark.sql.legacy.allowUntypedScalaUDF", "true")'
)
},
)
update_import_array_queue = Rule(
name="update_import_array_queue",
query=(
"cs import org.spark_project.jetty.util.ArrayQueue;"
if self.language == "java"
else "cs import org.spark_project.jetty.util.ArrayQueue"
),
replace_node="*",
replace=(
"import java.util.ArrayDeque;"
if self.language == "java"
else "import java.util.ArrayDeque"
),
)
return [
update_spark_conf_init,
update_spark_session_builder_init,
update_import_array_queue,
]