in odps-sqoop/src/java/org/apache/sqoop/tool/BaseSqoopTool.java [1595:1681]
protected void validateHiveOptions(SqoopOptions options)
throws InvalidOptionsException {
if (options.getHiveDelimsReplacement() != null
&& options.doHiveDropDelims()) {
throw new InvalidOptionsException("The " + HIVE_DROP_DELIMS_ARG
+ " option conflicts with the " + HIVE_DELIMS_REPLACEMENT_ARG
+ " option." + HELP_STR);
}
// Make sure that one of hCatalog or hive jobs are used
String hCatTable = options.getHCatTableName();
if (hCatTable != null && options.doHiveImport()) {
throw new InvalidOptionsException("The " + HCATALOG_TABLE_ARG
+ " option conflicts with the " + HIVE_IMPORT_ARG
+ " option." + HELP_STR);
}
if (options.doHiveImport()
&& options.getFileLayout() == SqoopOptions.FileLayout.AvroDataFile) {
throw new InvalidOptionsException("Hive import is not compatible with "
+ "importing into AVRO format.");
}
if (options.doHiveImport()
&& options.getFileLayout() == SqoopOptions.FileLayout.SequenceFile) {
throw new InvalidOptionsException("Hive import is not compatible with "
+ "importing into SequenceFile format.");
}
if (options.doHiveImport()
&& options.isAppendMode()
&& !options.getIncrementalMode().equals(IncrementalMode.AppendRows)) {
throw new InvalidOptionsException("Append mode for hive imports is not "
+ " yet supported. Please remove the parameter --append-mode");
}
// Many users are reporting issues when they are trying to import data
// directly into hive warehouse. This should prevent users from doing
// so in case of a default location.
String defaultHiveWarehouse = "/user/hive/warehouse";
if (options.doHiveImport()
&& ((
options.getWarehouseDir() != null
&& options.getWarehouseDir().startsWith(defaultHiveWarehouse)
) || (
options.getTargetDir() != null
&& options.getTargetDir().startsWith(defaultHiveWarehouse)
))) {
LOG.warn("It seems that you're doing hive import directly into default");
LOG.warn("hive warehouse directory which is not supported. Sqoop is");
LOG.warn("firstly importing data into separate directory and then");
LOG.warn("inserting data into hive. Please consider removing");
LOG.warn("--target-dir or --warehouse-dir into /user/hive/warehouse in");
LOG.warn("case that you will detect any issues.");
}
// Warn about using hive specific arguments without hive import itself
// In HCatalog support some of the Hive options are reused
if (!options.doHiveImport()
&& (((options.getHiveHome() != null
&& !options.getHiveHome().
equals(SqoopOptions.getHiveHomeDefault())
&& hCatTable == null))
|| options.doOverwriteHiveTable()
|| options.doFailIfHiveTableExists()
|| (options.getHiveTableName() != null
&& !options.getHiveTableName().equals(options.getTableName()))
|| (options.getHivePartitionKey() != null && hCatTable == null)
|| (options.getHivePartitionValue() != null && hCatTable == null)
|| (options.getMapColumnHive().size() > 0 && hCatTable == null))) {
LOG.warn("It seems that you've specified at least one of following:");
LOG.warn("\t--hive-home");
LOG.warn("\t--hive-overwrite");
LOG.warn("\t--create-hive-table");
LOG.warn("\t--hive-table");
LOG.warn("\t--hive-partition-key");
LOG.warn("\t--hive-partition-value");
LOG.warn("\t--map-column-hive");
LOG.warn("Without specifying parameter --hive-import. Please note that");
LOG.warn("those arguments will not be used in this session. Either");
LOG.warn("specify --hive-import to apply them correctly or remove them");
LOG.warn("from command line to remove this warning.");
LOG.info("Please note that --hive-home, --hive-partition-key, ");
LOG.info("\t hive-partition-value and --map-column-hive options are ");
LOG.info("\t are also valid for HCatalog imports and exports");
}
}