in python/dataproc_templates/jdbc/jdbc_to_bigquery.py [0:0]
def parse_args(args: Optional[Sequence[str]] = None) -> Dict[str, Any]:
parser: argparse.ArgumentParser = argparse.ArgumentParser()
parser.add_argument(
f'--{constants.JDBC_BQ_OUTPUT_DATASET}',
dest=constants.JDBC_BQ_OUTPUT_DATASET,
required=True,
help='BigQuery dataset for the output table'
)
parser.add_argument(
f'--{constants.JDBC_BQ_OUTPUT_TABLE}',
dest=constants.JDBC_BQ_OUTPUT_TABLE,
required=True,
help='BigQuery output table name'
)
parser.add_argument(
f'--{constants.JDBC_BQ_LD_TEMP_BUCKET_NAME}',
dest=constants.JDBC_BQ_LD_TEMP_BUCKET_NAME,
required=True,
help='Spark BigQuery connector temporary bucket'
)
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument(
f'--{constants.JDBC_BQ_INPUT_URL}',
dest=constants.JDBC_BQ_INPUT_URL,
required=False,
default="",
help='JDBC input URL'
)
group.add_argument(
f'--{constants.JDBC_BQ_INPUT_URL_SECRET}',
dest=constants.JDBC_BQ_INPUT_URL_SECRET,
required=False,
default="",
help='JDBC input URL secret name'
)
parser.add_argument(
f'--{constants.JDBC_BQ_INPUT_DRIVER}',
dest=constants.JDBC_BQ_INPUT_DRIVER,
required=True,
help='JDBC input driver name'
)
parser.add_argument(
f'--{constants.JDBC_BQ_INPUT_TABLE}',
dest=constants.JDBC_BQ_INPUT_TABLE,
required=True,
help='JDBC input table name'
)
parser.add_argument(
f'--{constants.JDBC_BQ_INPUT_PARTITIONCOLUMN}',
dest=constants.JDBC_BQ_INPUT_PARTITIONCOLUMN,
required=False,
default="",
help='JDBC input table partition column name'
)
parser.add_argument(
f'--{constants.JDBC_BQ_INPUT_LOWERBOUND}',
dest=constants.JDBC_BQ_INPUT_LOWERBOUND,
required=False,
default="",
help='JDBC input table partition column lower bound which is used to decide the partition stride'
)
parser.add_argument(
f'--{constants.JDBC_BQ_INPUT_UPPERBOUND}',
dest=constants.JDBC_BQ_INPUT_UPPERBOUND,
required=False,
default="",
help='JDBC input table partition column upper bound which is used to decide the partition stride'
)
parser.add_argument(
f'--{constants.JDBC_BQ_NUMPARTITIONS}',
dest=constants.JDBC_BQ_NUMPARTITIONS,
required=False,
default="10",
help='The maximum number of partitions that can be used for parallelism in table reading and writing. Default set to 10'
)
parser.add_argument(
f'--{constants.JDBC_BQ_INPUT_FETCHSIZE}',
dest=constants.JDBC_BQ_INPUT_FETCHSIZE,
required=False,
default=0,
type=int,
help='Determines how many rows to fetch per round trip'
)
parser.add_argument(
f'--{constants.JDBC_BQ_SESSIONINITSTATEMENT}',
dest=constants.JDBC_BQ_SESSIONINITSTATEMENT,
required=False,
default="",
help='Custom SQL statement to execute in each reader database session'
)
parser.add_argument(
f'--{constants.JDBC_BQ_OUTPUT_MODE}',
dest=constants.JDBC_BQ_OUTPUT_MODE,
required=False,
default=constants.OUTPUT_MODE_APPEND,
help=(
'Output write mode '
'(one of: append,overwrite,ignore,errorifexists) '
'(Defaults to append)'
),
choices=[
constants.OUTPUT_MODE_OVERWRITE,
constants.OUTPUT_MODE_APPEND,
constants.OUTPUT_MODE_IGNORE,
constants.OUTPUT_MODE_ERRORIFEXISTS
]
)
known_args: argparse.Namespace
known_args, _ = parser.parse_known_args(args)
return vars(known_args)