in src/spark-project/spark-common/src/main/scala/org/apache/spark/sql/SparderTypeUtil.scala [151:178]
def convertSqlTypeToSparkType(dt: RelDataType): org.apache.spark.sql.types.DataType = {
dt.getSqlTypeName match {
case SqlTypeName.DECIMAL => DecimalType(dt.getPrecision, dt.getScale)
case SqlTypeName.CHAR => StringType
case SqlTypeName.VARCHAR => StringType
case SqlTypeName.INTEGER => IntegerType
case SqlTypeName.TINYINT => ByteType
case SqlTypeName.SMALLINT => ShortType
case SqlTypeName.BIGINT => LongType
case SqlTypeName.FLOAT => FloatType
case SqlTypeName.DOUBLE => DoubleType
case SqlTypeName.DATE => DateType
case SqlTypeName.TIMESTAMP => TimestampType
case SqlTypeName.BOOLEAN => BooleanType
case SqlTypeName.ARRAY if dt.getComponentType.getSqlTypeName == SqlTypeName.VARCHAR => ArrayType(StringType)
case SqlTypeName.OTHER if dt.getComponentType == null => ArrayType(StringType) // handle null
case SqlTypeName.ANY => StringType
// see https://olapio.atlassian.net/browse/KE-42045
// Function: explode、subtract_bitmap_value、subtract_bitmap_uuid
case SqlTypeName.VARBINARY | SqlTypeName.BINARY => BinaryType
case SqlTypeName.ARRAY =>
val elementDataType = convertSqlTypeToSparkType(dt.getComponentType)
ArrayType(elementDataType)
case SqlTypeName.NULL => NullType // to support ifnull(null, null)
case _ =>
throw new IllegalArgumentException(s"unsupported SqlTypeName $dt")
}
}