private static ConfigDef newConfigDef()

in kafka-connect/kafka-connect/src/main/java/org/apache/iceberg/connect/IcebergSinkConfig.java [112:229]


  private static ConfigDef newConfigDef() {
    ConfigDef configDef = new ConfigDef();
    configDef.define(
        TABLES_PROP,
        ConfigDef.Type.LIST,
        null,
        Importance.HIGH,
        "Comma-delimited list of destination tables");
    configDef.define(
        TABLES_DYNAMIC_PROP,
        ConfigDef.Type.BOOLEAN,
        false,
        Importance.MEDIUM,
        "Enable dynamic routing to tables based on a record value");
    configDef.define(
        TABLES_ROUTE_FIELD_PROP,
        ConfigDef.Type.STRING,
        null,
        Importance.MEDIUM,
        "Source record field for routing records to tables");
    configDef.define(
        TABLES_DEFAULT_COMMIT_BRANCH,
        ConfigDef.Type.STRING,
        null,
        Importance.MEDIUM,
        "Default branch for commits");
    configDef.define(
        TABLES_DEFAULT_ID_COLUMNS,
        ConfigDef.Type.STRING,
        null,
        Importance.MEDIUM,
        "Default ID columns for tables, comma-separated");
    configDef.define(
        TABLES_DEFAULT_PARTITION_BY,
        ConfigDef.Type.STRING,
        null,
        Importance.MEDIUM,
        "Default partition spec to use when creating tables, comma-separated");
    configDef.define(
        TABLES_AUTO_CREATE_ENABLED_PROP,
        ConfigDef.Type.BOOLEAN,
        false,
        Importance.MEDIUM,
        "Set to true to automatically create destination tables, false otherwise");
    configDef.define(
        TABLES_SCHEMA_FORCE_OPTIONAL_PROP,
        ConfigDef.Type.BOOLEAN,
        false,
        Importance.MEDIUM,
        "Set to true to set columns as optional during table create and evolution, false to respect schema");
    configDef.define(
        TABLES_SCHEMA_CASE_INSENSITIVE_PROP,
        ConfigDef.Type.BOOLEAN,
        false,
        Importance.MEDIUM,
        "Set to true to look up table columns by case-insensitive name, false for case-sensitive");
    configDef.define(
        TABLES_EVOLVE_SCHEMA_ENABLED_PROP,
        ConfigDef.Type.BOOLEAN,
        false,
        Importance.MEDIUM,
        "Set to true to add any missing record fields to the table schema, false otherwise");
    configDef.define(
        CATALOG_NAME_PROP,
        ConfigDef.Type.STRING,
        DEFAULT_CATALOG_NAME,
        Importance.MEDIUM,
        "Iceberg catalog name");
    configDef.define(
        CONTROL_TOPIC_PROP,
        ConfigDef.Type.STRING,
        DEFAULT_CONTROL_TOPIC,
        Importance.MEDIUM,
        "Name of the control topic");
    configDef.define(
        CONTROL_GROUP_ID_PREFIX_PROP,
        ConfigDef.Type.STRING,
        DEFAULT_CONTROL_GROUP_PREFIX,
        Importance.LOW,
        "Prefix of the control consumer group");
    configDef.define(
        CONNECT_GROUP_ID_PROP,
        ConfigDef.Type.STRING,
        null,
        Importance.LOW,
        "Name of the Connect consumer group, should not be set under normal conditions");
    configDef.define(
        COMMIT_INTERVAL_MS_PROP,
        ConfigDef.Type.INT,
        COMMIT_INTERVAL_MS_DEFAULT,
        Importance.MEDIUM,
        "Coordinator interval for performing Iceberg table commits, in millis");
    configDef.define(
        COMMIT_TIMEOUT_MS_PROP,
        ConfigDef.Type.INT,
        COMMIT_TIMEOUT_MS_DEFAULT,
        Importance.MEDIUM,
        "Coordinator time to wait for worker responses before committing, in millis");
    configDef.define(
        COMMIT_THREADS_PROP,
        ConfigDef.Type.INT,
        Runtime.getRuntime().availableProcessors() * 2,
        Importance.MEDIUM,
        "Coordinator threads to use for table commits, default is (cores * 2)");
    configDef.define(
        TRANSACTIONAL_PREFIX_PROP,
        ConfigDef.Type.STRING,
        null,
        Importance.LOW,
        "Optional prefix of the transactional id for the coordinator");
    configDef.define(
        HADOOP_CONF_DIR_PROP,
        ConfigDef.Type.STRING,
        null,
        Importance.MEDIUM,
        "If specified, Hadoop config files in this directory will be loaded");
    return configDef;
  }