in integration/spark/src/main/scala/org/apache/carbondata/spark/util/DataTypeConverterUtil.scala [58:91]
def convertToCarbonTypeForSpark2(dataType: String): DataType = {
dataType.toLowerCase match {
case "booleantype" => DataTypes.BOOLEAN
case "stringtype" => DataTypes.STRING
case "inttype" => DataTypes.INT
case "integertype" => DataTypes.INT
case "tinyinttype" => DataTypes.SHORT
case "shorttype" => DataTypes.SHORT
case "longtype" => DataTypes.LONG
case "biginttype" => DataTypes.LONG
case "numerictype" => DataTypes.DOUBLE
case "doubletype" => DataTypes.DOUBLE
case "floattype" => DataTypes.DOUBLE
case "decimaltype" => DataTypes.createDefaultDecimalType
case FIXED_DECIMALTYPE(_, _) => DataTypes.createDefaultDecimalType
case "timestamptype" => DataTypes.TIMESTAMP
case "datetype" => DataTypes.DATE
case "binarytype" => DataTypes.BINARY
case others =>
if (others != null && others.startsWith("arraytype")) {
DataTypes.createDefaultArrayType()
} else if (others != null && others.startsWith("structtype")) {
DataTypes.createDefaultStructType()
} else if (others != null && others.startsWith("maptype")) {
DataTypes.createDefaultMapType
} else if (others != null && others.startsWith("char")) {
DataTypes.STRING
} else if (others != null && others.startsWith("varchar")) {
DataTypes.STRING
} else {
CarbonException.analysisException(s"Unsupported data type: $dataType")
}
}
}