in src/spark-project/spark-common/src/main/scala/org/apache/spark/sql/SparderTypeUtil.scala [460:506]
private def safetyConvertStringToValue(s: Any, rowType: RelDataType, toCalcite: Boolean): Any = {
try {
rowType.getSqlTypeName match {
case SqlTypeName.DECIMAL => transferDecimal(s, rowType)
case SqlTypeName.CHAR => s.toString
case SqlTypeName.VARCHAR => s.toString
case SqlTypeName.INTEGER => s.toString.toDouble.toInt
case SqlTypeName.TINYINT => s.toString.toDouble.toByte
case SqlTypeName.SMALLINT => s.toString.toDouble.toShort
case SqlTypeName.BIGINT => s.toString.toDouble.toLong
case SqlTypeName.FLOAT => java.lang.Float.parseFloat(s.toString)
case SqlTypeName.DOUBLE => java.lang.Double.parseDouble(s.toString)
case SqlTypeName.DATE =>
// time over here is with timezone.
val string = s.toString
if (string.contains("-")) {
val time = DateFormat.stringToDate(string).getTime
if (toCalcite) {
(time / (3600 * 24 * 1000)).toInt
} else {
// ms to s
time / 1000
}
} else {
// should not come to here?
if (toCalcite) {
(toCalciteTimestamp(DateFormat.stringToMillis(string)) / (3600 * 24 * 1000)).toInt
} else {
DateFormat.stringToMillis(string)
}
}
case SqlTypeName.TIMESTAMP | SqlTypeName.TIME =>
val ts = s.asInstanceOf[Timestamp].getTime
if (toCalcite) {
ts
} else {
// ms to s
ts / 1000
}
case SqlTypeName.BOOLEAN => s;
case _ => s.toString
}
} catch {
case th: Throwable =>
throw new RuntimeException(s"Error for convert value : $s , class: ${s.getClass}", th)
}
}