protected void validateHCatalogOptions()

in client/migrationx/migrationx-transformer/src/main/java/com/aliyun/dataworks/migrationx/transformer/core/sqoop/BaseSqoopTool.java [1429:1553]


    protected void validateHCatalogOptions(SqoopOptions options)
        throws InvalidOptionsException {
        // Make sure that one of hCatalog or hive jobs are used
        String hCatTable = options.getHCatTableName();
        if (hCatTable == null) {
            if (options.getHCatHome() != null && !options.getHCatHome().equals(SqoopOptions.getHCatHomeDefault())) {
                LOG.warn("--hcatalog-home option will be ignored in "
                    + "non-HCatalog jobs");
            }
            if (options.getHCatDatabaseName() != null) {
                LOG.warn("--hcatalog-database option will be ignored  "
                    + "without --hcatalog-table");
            }

            if (options.getHCatStorageStanza() != null) {
                LOG.warn("--hcatalog-storage-stanza option will be ignored "
                    + "without --hatalog-table");
            }
            return;
        }

        if (options.explicitInputDelims()) {
            LOG.warn("Input field/record delimiter options are not "
                + "used in HCatalog jobs unless the format is text.   It is better "
                + "to use --hive-import in those cases.  For text formats");
        }
        if (options.explicitOutputDelims()
            || options.getHiveDelimsReplacement() != null
            || options.doHiveDropDelims()) {
            LOG.warn("Output field/record delimiter options are not useful"
                + " in HCatalog jobs for most of the output types except text based "
                + " formats is text. It is better "
                + "to use --hive-import in those cases.  For non text formats, ");
        }
        if (options.doHiveImport()) {
            throw new InvalidOptionsException("The " + HCATALOG_TABLE_ARG
                + " option conflicts with the " + HIVE_IMPORT_ARG
                + " option." + HELP_STR);
        }
        if (options.getTargetDir() != null) {
            throw new InvalidOptionsException("The " + TARGET_DIR_ARG
                + " option conflicts with the " + HCATALOG_TABLE_ARG
                + " option." + HELP_STR);
        }
        if (options.getWarehouseDir() != null) {
            throw new InvalidOptionsException("The " + WAREHOUSE_DIR_ARG
                + " option conflicts with the " + HCATALOG_TABLE_ARG
                + " option." + HELP_STR);
        }

        if (options.isAppendMode()) {
            throw new InvalidOptionsException("Append mode for imports is not "
                + " compatible with HCatalog. Please remove the parameter"
                + "--append-mode");
        }
        if (options.getExportDir() != null) {
            throw new InvalidOptionsException("The " + EXPORT_PATH_ARG
                + " option conflicts with the " + HCATALOG_TABLE_ARG
                + " option." + HELP_STR);
        }

        if (options.getFileLayout() == SqoopOptions.FileLayout.AvroDataFile) {
            throw new InvalidOptionsException("HCatalog job is not compatible with "
                + " AVRO format option " + FMT_AVRODATAFILE_ARG
                + " option." + HELP_STR);

        }

        if (options.getFileLayout() == SqoopOptions.FileLayout.SequenceFile) {
            throw new InvalidOptionsException("HCatalog job  is not compatible with "
                + "SequenceFile format option " + FMT_SEQUENCEFILE_ARG
                + " option." + HELP_STR);
        }

        if (options.getHCatalogPartitionKeys() != null
            && options.getHCatalogPartitionValues() == null) {
            throw new InvalidOptionsException("Either both --hcatalog-partition-keys"
                + " and --hcatalog-partition-values should be provided or both of these"
                + " options should be omitted.");
        }

        if (options.getHCatalogPartitionKeys() != null) {
            if (options.getHivePartitionKey() != null) {
                LOG.warn("Both --hcatalog-partition-keys and --hive-partition-key"
                    + "options are provided.  --hive-partition-key option will be"
                    + "ignored");
            }

            String[] keys = options.getHCatalogPartitionKeys().split(",");
            String[] vals = options.getHCatalogPartitionValues().split(",");

            if (keys.length != vals.length) {
                throw new InvalidOptionsException("Number of static partition keys "
                    + "provided dpes match the number of partition values");
            }

            for (int i = 0; i < keys.length; ++i) {
                String k = keys[i].trim();
                if (k.isEmpty()) {
                    throw new InvalidOptionsException(
                        "Invalid HCatalog static partition key at position " + i);
                }
            }
            for (int i = 0; i < vals.length; ++i) {
                String v = vals[i].trim();
                if (v.isEmpty()) {
                    throw new InvalidOptionsException(
                        "Invalid HCatalog static partition key at position " + v);
                }
            }
        } else {
            if (options.getHivePartitionKey() != null
                && options.getHivePartitionValue() == null) {
                throw new InvalidOptionsException("Either both --hive-partition-key and"
                    + " --hive-partition-value options should be provided or both of "
                    + "these options should be omitted");
            }
        }
        if (options.doCreateHCatalogTable() &&
            options.doDropAndCreateHCatalogTable()) {
            throw new InvalidOptionsException("Options --create-hcatalog-table" +
                " and --drop-and-create-hcatalog-table are mutually exclusive." +
                " Use any one of them");
        }
    }