in client/migrationx/migrationx-transformer/src/main/java/com/aliyun/dataworks/migrationx/transformer/core/sqoop/ImportTool.java [77:148]
protected RelatedOptions getImportOptions() {
// Imports
RelatedOptions importOpts = new RelatedOptions("Import control arguments");
importOpts.addOption(
OptionBuilder.withDescription("Use direct import fast path").withLongOpt(DIRECT_ARG).create());
if (!allTables) {
importOpts.addOption(OptionBuilder.withArgName("table-name").hasArg().withDescription("Table to read")
.withLongOpt(TABLE_ARG).create());
importOpts.addOption(OptionBuilder.withArgName("col,col,col...").hasArg()
.withDescription("Columns to import from table").withLongOpt(COLUMNS_ARG).create());
importOpts.addOption(OptionBuilder.withArgName("column-name").hasArg()
.withDescription("Column of the table used to split work units").withLongOpt(SPLIT_BY_ARG)
.create());
importOpts.addOption(OptionBuilder.withArgName("size").hasArg().withDescription(
"Upper Limit of rows per split for split columns of Date/Time/Timestamp and integer types. For "
+ "date or timestamp fields it is "
+ "calculated in seconds. split-limit should be greater than 0")
.withLongOpt(SPLIT_LIMIT_ARG).create());
importOpts.addOption(OptionBuilder.withArgName("where clause").hasArg()
.withDescription("WHERE clause to use during import").withLongOpt(WHERE_ARG).create());
importOpts.addOption(
OptionBuilder.withDescription("Imports data in append mode").withLongOpt(APPEND_ARG).create());
importOpts.addOption(
OptionBuilder.withDescription("Imports data in delete mode").withLongOpt(DELETE_ARG).create());
importOpts.addOption(OptionBuilder.withArgName("dir").hasArg()
.withDescription("HDFS plain table destination").withLongOpt(TARGET_DIR_ARG).create());
importOpts.addOption(
OptionBuilder.withArgName("statement").hasArg().withDescription("Import results of SQL 'statement'")
.withLongOpt(SQL_QUERY_ARG).create(SQL_QUERY_SHORT_ARG));
importOpts.addOption(OptionBuilder.withArgName("statement").hasArg()
.withDescription("Set boundary query for retrieving max and min" + " value of the primary key")
.withLongOpt(SQL_QUERY_BOUNDARY).create());
importOpts.addOption(OptionBuilder.withArgName("column").hasArg()
.withDescription("Key column to use to join results").withLongOpt(MERGE_KEY_ARG).create());
addValidationOpts(importOpts);
}
importOpts.addOption(OptionBuilder.withArgName("dir").hasArg()
.withDescription("HDFS parent for table destination").withLongOpt(WAREHOUSE_DIR_ARG).create());
importOpts.addOption(OptionBuilder.withDescription("Imports data to SequenceFiles")
.withLongOpt(FMT_SEQUENCEFILE_ARG).create());
importOpts.addOption(OptionBuilder.withDescription("Imports data as plain text (default)")
.withLongOpt(FMT_TEXTFILE_ARG).create());
importOpts.addOption(OptionBuilder.withDescription("Imports data to Avro data files")
.withLongOpt(FMT_AVRODATAFILE_ARG).create());
importOpts.addOption(OptionBuilder.withDescription("Imports data to Parquet files")
.withLongOpt(FMT_PARQUETFILE_ARG).create());
importOpts.addOption(
OptionBuilder.withArgName("n").hasArg().withDescription("Use 'n' map tasks to import in parallel")
.withLongOpt(NUM_MAPPERS_ARG).create(NUM_MAPPERS_SHORT_ARG));
importOpts.addOption(OptionBuilder.withArgName("name").hasArg()
.withDescription("Set name for generated mapreduce job").withLongOpt(MAPREDUCE_JOB_NAME).create());
importOpts.addOption(OptionBuilder.withDescription("Enable compression").withLongOpt(COMPRESS_ARG)
.create(COMPRESS_SHORT_ARG));
importOpts.addOption(OptionBuilder.withArgName("codec").hasArg()
.withDescription("Compression codec to use for import").withLongOpt(COMPRESSION_CODEC_ARG).create());
importOpts.addOption(OptionBuilder.withArgName("n").hasArg()
.withDescription("Split the input stream every 'n' bytes " + "when importing in direct mode")
.withLongOpt(DIRECT_SPLIT_SIZE_ARG).create());
importOpts.addOption(OptionBuilder.withArgName("n").hasArg()
.withDescription("Set the maximum size for an inline LOB").withLongOpt(INLINE_LOB_LIMIT_ARG).create());
importOpts.addOption(OptionBuilder.withArgName("n").hasArg()
.withDescription("Set number 'n' of rows to fetch from the " + "database when more rows are needed")
.withLongOpt(FETCH_SIZE_ARG).create());
importOpts.addOption(OptionBuilder.withArgName("reset-mappers")
.withDescription("Reset the number of mappers to one mapper if no split key available")
.withLongOpt(AUTORESET_TO_ONE_MAPPER).create());
return importOpts;
}