in plugins/spark_upgrade/scala_session_builder/__init__.py [0:0]
def __call__(self) -> dict[str, bool]:
if self.language != "scala":
return {}
piranha_args = self.get_piranha_arguments()
summaries: list[PiranhaOutputSummary] = execute_piranha(piranha_args)
assert summaries is not None
for summary in summaries:
file_path: str = summary.path
edit: Edit
if len(summary.rewrites) == 0:
continue
print(f"rewrites: {len(summary.rewrites)}")
calls_to_add_str = ""
# the rewrite's edit will have `calls` with all matches
edit = summary.rewrites[0]
if edit.matched_rule == "delete_calls_query":
match: Match = edit.p_match
val_id = match.matches["val_id"]
calls = match.matches["calls"]
print(f"calls: {calls}")
calls_to_add_str = calls.replace(
f"{val_id}.sqlContext.setConf", ".config"
)
match = summary.rewrites[0].p_match
val_def = match.matches["val_def"]
assert isinstance(val_def, str)
assert "getOrCreate()" in val_def
replace_str = calls_to_add_str + "\n.getOrCreate()"
new_val_def = val_def.replace(".getOrCreate()", replace_str)
replace_val_def_rule = Rule(
name="replace_val_def_rule",
query=VAL_DEF_QUERY,
replace_node="val_def",
replace=new_val_def,
filters={
Filter(
enclosing_node="(val_definition) @_vl_def",
not_contains=(
[
"""(
(identifier) @conf_id
(#eq? @conf_id "config")
)"""
]
),
)
},
)
rule_graph = RuleGraph(
rules=[replace_val_def_rule],
edges=[],
)
execute_piranha(
PiranhaArguments(
language=self.language,
rule_graph=rule_graph,
paths_to_codebase=[file_path],
)
)
if not summaries:
return {self.step_name(): False}
return {self.step_name(): True}