private def canCastToString()

in spark/src/main/scala/org/apache/comet/expressions/CometCast.scala [149:188]


  private def canCastToString(
      fromType: DataType,
      timeZoneId: Option[String],
      evalMode: CometEvalMode.Value): SupportLevel = {
    fromType match {
      case DataTypes.BooleanType => Compatible()
      case DataTypes.ByteType | DataTypes.ShortType | DataTypes.IntegerType |
          DataTypes.LongType =>
        Compatible()
      case DataTypes.DateType => Compatible()
      case DataTypes.TimestampType => Compatible()
      case DataTypes.FloatType | DataTypes.DoubleType =>
        Compatible(
          Some(
            "There can be differences in precision. " +
              "For example, the input \"1.4E-45\" will produce 1.0E-45 " +
              "instead of 1.4E-45"))
      case _: DecimalType =>
        // https://github.com/apache/datafusion-comet/issues/1068
        Compatible(
          Some(
            "There can be formatting differences in some case due to Spark using " +
              "scientific notation where Comet does not"))
      case DataTypes.BinaryType =>
        // https://github.com/apache/datafusion-comet/issues/377
        Incompatible(Some("Only works for binary data representing valid UTF-8 strings"))
      case StructType(fields) =>
        for (field <- fields) {
          isSupported(field.dataType, DataTypes.StringType, timeZoneId, evalMode) match {
            case s: Incompatible =>
              return s
            case Unsupported =>
              return Unsupported
            case _ =>
          }
        }
        Compatible()
      case _ => Unsupported
    }
  }