src/datamigration/dags/datamigration_utils/hive_bq_load_utils_inc.py [326:342]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    client = bigquery.Client()
    dict = read_config_file(ast.literal_eval(config))
    dt = dict["dt"]
    df = read_pd_from_gcs(
        dict["temp_bucket"],
        constants.df_inc_table_list_metadata.format(dt=dt),
    )
    database_list_array = df["bq_dataset"].unique()
    database_list = sorted(database_list_array)
    print(database_list)
    df_list = []
    for dbname in database_list:
        hive_tables = (
            "'"
            + "','".join(df.loc[df["bq_dataset"] == dbname]["table"].values.tolist())
            + "'"
        )
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



src/datamigration/dags/datamigration_utils/hive_bq_load_utils_inc.py [368:384]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    client = bigquery.Client()
    dict = read_config_file(ast.literal_eval(config))
    dt = dict["dt"]
    df = read_pd_from_gcs(
        dict["temp_bucket"],
        constants.df_inc_table_list_metadata.format(dt=dt),
    )
    database_list_array = df["bq_dataset"].unique()
    database_list = sorted(database_list_array)
    print(database_list)
    df_list = []
    for dbname in database_list:
        hive_tables = (
            "'"
            + "','".join(df.loc[df["bq_dataset"] == dbname]["table"].values.tolist())
            + "'"
        )
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



