def _parse_args()

in src/common/materializer/create_bq_object.py [0:0]


def _parse_args() -> tuple[str, str, str, str, dict, bool, bool, str, bool]:
    """Parses, validates and returns arguments, sets up logging."""
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "--module_name",
        type=str,
        required=True,
        help="Module for which to generate BQ table/view. Required.")
    parser.add_argument(
        "--jinja_data_file",
        type=str,
        required=True,
        help=("Jinja data file containing replacement values for the sql_file "
              "file. settings file, with relative path. Required."))
    parser.add_argument(
        "--target_dataset_type",
        type=str,
        required=False,
        default="Reporting",
        help=("Type of dataset (CDC/Reporting) for which this table or view "
              " is created. Default value is 'Reporting'."))
    parser.add_argument(
        "--target_dataset",
        type=str,
        required=True,
        help=("Full name of BiQuery dataset in which this table or view will "
              "be created. Required."))
    parser.add_argument(
        "--bq_object_setting",
        type=str,
        required=True,
        help=("BQ Object Setting dictionary - containing value corresponding "
              "to the entry in the materializer settings file for the given "
              "table. Required."))
    parser.add_argument(
        "--load_test_data",
        default=False,
        action="store_true",
        help="Flag to indicate if test data should be loaded in the tables.")
    parser.add_argument(
        "--debug",
        default=False,
        action="store_true",
        help="Flag to set log level to DEBUG. Default is WARNING")
    parser.add_argument("--allow_telemetry",
                        default=False,
                        action="store_true",
                        help="Flag to indicate if telemetry is allowed.")
    parser.add_argument(
        "--location",
        type=str,
        required=True,
        help="Location to pass to BigQueryInsertJob operators in DAGs.")
    parser.add_argument(
        "--skip_dag",
        default=False,
        action="store_true",
        help="Flag to indicate if Composer DAG should not be generated.")

    args = parser.parse_args()

    enable_debug = args.debug
    logging.basicConfig(level=logging.DEBUG if enable_debug else logging.INFO)

    module_name = args.module_name
    jinja_data_file = args.jinja_data_file
    target_dataset_type = args.target_dataset_type.lower()
    target_dataset = args.target_dataset
    bq_object_setting_str = args.bq_object_setting
    load_test_data = args.load_test_data
    allow_telemetry = args.allow_telemetry
    location = args.location
    skip_dag = args.skip_dag

    logging.info("Arguments:")
    logging.info("  module_name = %s", module_name)
    logging.info("  jinja_data_file = %s", jinja_data_file)
    logging.info("  target_dataset_type = %s", target_dataset_type)
    logging.info("  target_dataset = %s", target_dataset)
    logging.info("  bq_object_setting_str = %s", bq_object_setting_str)
    logging.info("  load_test_data = %s", load_test_data)
    logging.info("  debug = %s", enable_debug)
    logging.info("  allow_telemetry = %s", allow_telemetry)
    logging.info("  location = %s", location)
    logging.info("  skip_dag = %s", skip_dag)

    if not Path(jinja_data_file).is_file():
        raise ValueError(
            f"🛑 jinja_data_file '{jinja_data_file}' does not exist.")

    try:
        bq_object_setting = json.loads(bq_object_setting_str)
    except Exception as e:
        raise ValueError(f"🛑 Failed to read table settings. Error = {e}.")

    return (module_name, jinja_data_file, target_dataset_type, target_dataset,
            bq_object_setting, load_test_data, allow_telemetry, location,
            skip_dag)