in spark/src/main/scala/org/apache/comet/expressions/CometCast.scala [54:109]
def isSupported(
fromType: DataType,
toType: DataType,
timeZoneId: Option[String],
evalMode: CometEvalMode.Value): SupportLevel = {
if (fromType == toType) {
return Compatible()
}
(fromType, toType) match {
case (dt: DataType, _) if dt.typeName == "timestamp_ntz" =>
// https://github.com/apache/datafusion-comet/issues/378
toType match {
case DataTypes.TimestampType | DataTypes.DateType | DataTypes.StringType =>
Incompatible()
case _ =>
Unsupported
}
case (_: DecimalType, _: DecimalType) =>
Compatible()
case (DataTypes.StringType, _) =>
canCastFromString(toType, timeZoneId, evalMode)
case (_, DataTypes.StringType) =>
canCastToString(fromType, timeZoneId, evalMode)
case (DataTypes.TimestampType, _) =>
canCastFromTimestamp(toType)
case (_: DecimalType, _) =>
canCastFromDecimal(toType)
case (DataTypes.BooleanType, _) =>
canCastFromBoolean(toType)
case (DataTypes.ByteType, _) =>
canCastFromByte(toType)
case (DataTypes.ShortType, _) =>
canCastFromShort(toType)
case (DataTypes.IntegerType, _) =>
canCastFromInt(toType)
case (DataTypes.LongType, _) =>
canCastFromLong(toType)
case (DataTypes.FloatType, _) =>
canCastFromFloat(toType)
case (DataTypes.DoubleType, _) =>
canCastFromDouble(toType)
case (from_struct: StructType, to_struct: StructType) =>
from_struct.fields.zip(to_struct.fields).foreach { case (a, b) =>
isSupported(a.dataType, b.dataType, timeZoneId, evalMode) match {
case Compatible(_) =>
// all good
case other =>
return other
}
}
Compatible()
case _ => Unsupported
}
}