public static ColumnParser create()

in spark-load/spark-load-dpp/src/main/java/org/apache/doris/load/loadv2/dpp/ColumnParser.java [45:85]


    public static ColumnParser create(EtlJobConfig.EtlColumn etlColumn) throws SparkDppException {
        String columnType = etlColumn.columnType;
        if (columnType.equalsIgnoreCase("TINYINT")) {
            return new TinyIntParser();
        } else if (columnType.equalsIgnoreCase("SMALLINT")) {
            return new SmallIntParser();
        } else if (columnType.equalsIgnoreCase("INT")) {
            return new IntParser();
        } else if (columnType.equalsIgnoreCase("BIGINT")) {
            return new BigIntParser();
        } else if (columnType.equalsIgnoreCase("FLOAT")) {
            return new FloatParser();
        } else if (columnType.equalsIgnoreCase("DOUBLE")) {
            return new DoubleParser();
        } else if (columnType.equalsIgnoreCase("BOOLEAN")) {
            return new BooleanParser();
        } else if (columnType.equalsIgnoreCase("DATE")
                || columnType.equalsIgnoreCase("DATEV2")) {
            return new DateParser();
        } else if (columnType.equalsIgnoreCase("DATETIME")
                || columnType.equalsIgnoreCase("DATETIMEV2")) {
            return new DatetimeParser();
        } else if (columnType.equalsIgnoreCase("STRING")
                || columnType.equalsIgnoreCase("TEXT")) {
            return new StringTypeParser(etlColumn);
        } else if (columnType.equalsIgnoreCase("VARCHAR")
                || columnType.equalsIgnoreCase("CHAR")
                || columnType.equalsIgnoreCase("BITMAP")
                || columnType.equalsIgnoreCase("HLL")) {
            return new StringParser(etlColumn);
        } else if (columnType.equalsIgnoreCase("DECIMALV2")
                || columnType.equalsIgnoreCase("DECIMAL32")
                || columnType.equalsIgnoreCase("DECIMAL64")
                || columnType.equalsIgnoreCase("DECIMAL128")) {
            return new DecimalParser(etlColumn);
        } else if (columnType.equalsIgnoreCase("LARGEINT")) {
            return new LargeIntParser();
        } else {
            throw new SparkDppException("unsupported type:" + columnType);
        }
    }