private CreateTableOperation convertCreateTable()

in flink-connector-hive/src/main/java/org/apache/flink/table/planner/delegation/hive/parse/HiveParserDDLSemanticAnalyzer.java [1111:1201]


    private CreateTableOperation convertCreateTable(
            String compoundName,
            boolean isExternal,
            boolean ifNotExists,
            boolean isTemporary,
            List<FieldSchema> cols,
            List<FieldSchema> partCols,
            String comment,
            String location,
            Map<String, String> tblProps,
            HiveParserRowFormatParams rowFormatParams,
            HiveParserStorageFormat storageFormat,
            List<PrimaryKey> primaryKeys,
            List<NotNullConstraint> notNullConstraints) {
        Map<String, String> props = new HashMap<>();
        if (tblProps != null) {
            props.putAll(tblProps);
        }
        markHiveConnector(props);
        // external
        if (isExternal) {
            props.put(TABLE_IS_EXTERNAL, "true");
        }
        // PK trait
        UniqueConstraint uniqueConstraint = null;
        if (primaryKeys != null && !primaryKeys.isEmpty()) {
            PrimaryKey primaryKey = primaryKeys.get(0);
            byte trait = 0;
            if (primaryKey.isEnable()) {
                trait = HiveDDLUtils.enableConstraint(trait);
            }
            if (primaryKey.isValidate()) {
                trait = HiveDDLUtils.validateConstraint(trait);
            }
            if (primaryKey.isRely()) {
                trait = HiveDDLUtils.relyConstraint(trait);
            }
            props.put(PK_CONSTRAINT_TRAIT, String.valueOf(trait));
            List<String> pkCols =
                    primaryKeys.stream().map(PrimaryKey::getPk).collect(Collectors.toList());
            String constraintName = primaryKey.getConstraintName();
            if (constraintName == null) {
                constraintName = pkCols.stream().collect(Collectors.joining("_", "PK_", ""));
            }
            uniqueConstraint = UniqueConstraint.primaryKey(constraintName, pkCols);
        }
        // NOT NULL constraints
        List<String> notNullCols = new ArrayList<>();
        if (!notNullConstraints.isEmpty()) {
            List<String> traits = new ArrayList<>();
            for (NotNullConstraint notNull : notNullConstraints) {
                byte trait = 0;
                if (notNull.isEnable()) {
                    trait = HiveDDLUtils.enableConstraint(trait);
                }
                if (notNull.isValidate()) {
                    trait = HiveDDLUtils.validateConstraint(trait);
                }
                if (notNull.isRely()) {
                    trait = HiveDDLUtils.relyConstraint(trait);
                }
                traits.add(String.valueOf(trait));
                notNullCols.add(notNull.getColName());
            }
            props.put(NOT_NULL_CONSTRAINT_TRAITS, String.join(COL_DELIMITER, traits));
            props.put(NOT_NULL_COLS, String.join(COL_DELIMITER, notNullCols));
        }
        // row format
        if (rowFormatParams != null) {
            encodeRowFormat(rowFormatParams, props);
        }
        // storage format
        if (storageFormat != null) {
            encodeStorageFormat(storageFormat, props);
        }
        // location
        if (location != null) {
            props.put(TABLE_LOCATION_URI, location);
        }
        ObjectIdentifier identifier = parseObjectIdentifier(compoundName);
        Set<String> notNullColSet = new HashSet<>(notNullCols);
        if (uniqueConstraint != null) {
            notNullColSet.addAll(uniqueConstraint.getColumns());
        }
        Schema schema = HiveTableUtil.createSchema(cols, partCols, notNullColSet, uniqueConstraint);
        return new CreateTableOperation(
                identifier,
                CatalogTable.of(schema, comment, HiveCatalog.getFieldNames(partCols), props),
                ifNotExists,
                isTemporary);
    }