in spark-load/spark-load-dpp/src/main/java/org/apache/doris/load/loadv2/dpp/SparkDpp.java [791:824]
private Object convertPartitionKey(Object srcValue, Class<?> dstClass, boolean isV2Type) throws SparkDppException {
if (dstClass.equals(Float.class) || dstClass.equals(Double.class)) {
return null;
}
if (srcValue instanceof Double) {
if (dstClass.equals(Short.class)) {
return ((Double) srcValue).shortValue();
} else if (dstClass.equals(Integer.class)) {
return ((Double) srcValue).intValue();
} else if (dstClass.equals(Long.class)) {
return ((Double) srcValue).longValue();
} else if (dstClass.equals(BigInteger.class)) {
// TODO(wb) gson will cast origin value to double by default
// when the partition column is largeint, this will cause error data
// need fix it thoroughly
return new BigInteger(srcValue.toString());
} else if (dstClass.equals(java.sql.Date.class) || dstClass.equals(java.util.Date.class)) {
double srcValueDouble = (double) srcValue;
return convertToJavaDate((int) srcValueDouble);
} else if (dstClass.equals(java.sql.Timestamp.class)) {
double srcValueDouble = (double) srcValue;
if (isV2Type) {
return convertV2ToJavaDatetime((long) srcValueDouble);
}
return convertToJavaDatetime((long) srcValueDouble);
} else {
// dst type is string
return srcValue.toString();
}
} else {
LOG.warn("unsupport partition key:" + srcValue);
throw new SparkDppException("unsupport partition key:" + srcValue);
}
}