in src/spark-project/spark-common/src/main/scala/org/apache/spark/sql/SparderTypeUtil.scala [434:458]
def kylinRawTableSQLTypeToSparkType(dataTp: DataType): org.apache.spark.sql.types.DataType = {
dataTp.getName match {
case "decimal" | "numeric" => DecimalType(dataTp.getPrecision, dataTp.getScale)
case "date" => DateType
case "time" => DateType
case "timestamp" => TimestampType
case "datetime" => DateType
case "tinyint" => ByteType
case "smallint" => ShortType
case "integer" => IntegerType
case "int4" => IntegerType
case "bigint" => LongType
case "long8" => LongType
case "float" => FloatType
case "double" => DoubleType
case "real" => DoubleType
case tp if tp.startsWith("char") => StringType
case tp if tp.startsWith("varchar") => StringType
case "bitmap" => LongType
case "dim_dc" => LongType
case "boolean" => BooleanType
case "array<string>" => ArrayType(StringType)
case noSupport => throw new IllegalArgumentException(s"No supported data type: $noSupport")
}
}