in flink-connector-hive/src/main/java/org/apache/flink/table/planner/delegation/hive/HiveParserRexNodeConverter.java [306:519]
public static RexNode convertConstant(ExprNodeConstantDesc literal, RelOptCluster cluster)
throws SemanticException {
RexBuilder rexBuilder = cluster.getRexBuilder();
RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory();
PrimitiveTypeInfo hiveType = (PrimitiveTypeInfo) literal.getTypeInfo();
RelDataType calciteDataType = HiveParserTypeConverter.convert(hiveType, dtFactory);
PrimitiveObjectInspector.PrimitiveCategory hiveTypeCategory =
hiveType.getPrimitiveCategory();
ConstantObjectInspector coi = literal.getWritableObjectInspector();
Object value =
ObjectInspectorUtils.copyToStandardJavaObject(coi.getWritableConstantValue(), coi);
RexNode calciteLiteral;
HiveShim hiveShim = HiveParserUtils.getSessionHiveShim();
// If value is null, return a null literal directly
if (value == null) {
return rexBuilder.makeNullLiteral(calciteDataType);
}
// TODO: Verify if we need to use ConstantObjectInspector to unwrap data
switch (hiveTypeCategory) {
case BOOLEAN:
calciteLiteral = rexBuilder.makeLiteral((Boolean) value);
break;
case BYTE:
calciteLiteral =
rexBuilder.makeExactLiteral(new BigDecimal((Byte) value), calciteDataType);
break;
case SHORT:
calciteLiteral =
rexBuilder.makeExactLiteral(new BigDecimal((Short) value), calciteDataType);
break;
case INT:
calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Integer) value));
break;
case LONG:
calciteLiteral = rexBuilder.makeBigintLiteral(new BigDecimal((Long) value));
break;
// TODO: is Decimal an exact numeric or approximate numeric?
case DECIMAL:
if (value instanceof HiveDecimal) {
value = ((HiveDecimal) value).bigDecimalValue();
} else if (value instanceof Decimal128) {
value = ((Decimal128) value).toBigDecimal();
}
if (value == null) {
// We have found an invalid decimal value while enforcing precision and scale.
// Ideally, we would
// replace it with null here, which is what Hive does. However, we need to plumb
// this thru up
// somehow, because otherwise having different expression type in AST causes the
// plan generation
// to fail after CBO, probably due to some residual state in SA/QB.
// For now, we will not run CBO in the presence of invalid decimal literals.
throw new SemanticException(
"Expression " + literal.getExprString() + " is not a valid decimal");
// TODO: return createNullLiteral(literal);
}
BigDecimal bd = (BigDecimal) value;
BigInteger unscaled = bd.unscaledValue();
if (unscaled.compareTo(MIN_LONG_BI) >= 0 && unscaled.compareTo(MAX_LONG_BI) <= 0) {
calciteLiteral = rexBuilder.makeExactLiteral(bd);
} else {
// CBO doesn't support unlimited precision decimals. In practice, this
// will work...
// An alternative would be to throw CboSemanticException and fall back
// to no CBO.
RelDataType relType =
cluster.getTypeFactory()
.createSqlType(
SqlTypeName.DECIMAL,
unscaled.toString().length(),
bd.scale());
calciteLiteral = rexBuilder.makeExactLiteral(bd, relType);
}
break;
case FLOAT:
calciteLiteral =
rexBuilder.makeApproxLiteral(
new BigDecimal(Float.toString((Float) value)), calciteDataType);
break;
case DOUBLE:
// TODO: The best solution is to support NaN in expression reduction.
if (Double.isNaN((Double) value)) {
throw new SemanticException("NaN");
}
calciteLiteral =
rexBuilder.makeApproxLiteral(
new BigDecimal(Double.toString((Double) value)), calciteDataType);
break;
case CHAR:
if (value instanceof HiveChar) {
value = ((HiveChar) value).getValue();
}
calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
break;
case VARCHAR:
if (value instanceof HiveVarchar) {
value = ((HiveVarchar) value).getValue();
}
calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
break;
case STRING:
Object constantDescVal = literal.getValue();
constantDescVal =
constantDescVal instanceof NlsString
? constantDescVal
: asUnicodeString((String) value);
// calcite treat string literal as char type, we should treat it as string just like
// hive
RelDataType type = HiveParserTypeConverter.convert(hiveType, dtFactory);
// if we get here, the value is not null
type = dtFactory.createTypeWithNullability(type, false);
calciteLiteral = rexBuilder.makeLiteral(constantDescVal, type, true);
break;
case DATE:
LocalDate localDate = HiveParserUtils.getSessionHiveShim().toFlinkDate(value);
DateString dateString =
new DateString(
localDate.getYear(),
localDate.getMonthValue(),
localDate.getDayOfMonth());
calciteLiteral = rexBuilder.makeDateLiteral(dateString);
break;
case TIMESTAMP:
TimestampString timestampString;
if (value instanceof Calendar) {
timestampString = TimestampString.fromCalendarFields((Calendar) value);
} else {
LocalDateTime localDateTime =
HiveParserUtils.getSessionHiveShim().toFlinkTimestamp(value);
timestampString =
new TimestampString(
localDateTime.getYear(),
localDateTime.getMonthValue(),
localDateTime.getDayOfMonth(),
localDateTime.getHour(),
localDateTime.getMinute(),
localDateTime.getSecond());
timestampString = timestampString.withNanos(localDateTime.getNano());
}
// hive always treats timestamp with precision 9
calciteLiteral = rexBuilder.makeTimestampLiteral(timestampString, 9);
break;
case VOID:
calciteLiteral =
cluster.getRexBuilder()
.makeLiteral(null, dtFactory.createSqlType(SqlTypeName.NULL), true);
break;
case BINARY:
case UNKNOWN:
default:
if (hiveShim.isIntervalYearMonthType(hiveTypeCategory)) {
// Calcite year-month literal value is months as BigDecimal
BigDecimal totalMonths;
if (value instanceof HiveParserIntervalYearMonth) {
totalMonths =
BigDecimal.valueOf(
((HiveParserIntervalYearMonth) value).getTotalMonths());
} else if (value instanceof HiveIntervalYearMonth) {
totalMonths =
BigDecimal.valueOf(
((HiveIntervalYearMonth) value).getTotalMonths());
} else {
throw new SemanticException(
String.format(
"Unexpected class %s for Hive's interval day time type",
value.getClass().getName()));
}
calciteLiteral =
rexBuilder.makeIntervalLiteral(
totalMonths,
new SqlIntervalQualifier(
TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
} else if (hiveShim.isIntervalDayTimeType(hiveTypeCategory)) {
// Calcite day-time interval is millis value as BigDecimal
// Seconds converted to millis
BigDecimal secsValueBd;
// Nanos converted to millis
BigDecimal nanosValueBd;
if (value instanceof HiveParserIntervalDayTime) {
secsValueBd =
BigDecimal.valueOf(
((HiveParserIntervalDayTime) value).getTotalSeconds()
* 1000);
nanosValueBd =
BigDecimal.valueOf(
((HiveParserIntervalDayTime) value).getNanos(), 6);
} else if (value instanceof HiveIntervalDayTime) {
secsValueBd =
BigDecimal.valueOf(
((HiveIntervalDayTime) value).getTotalSeconds() * 1000);
nanosValueBd =
BigDecimal.valueOf(((HiveIntervalDayTime) value).getNanos(), 6);
} else {
throw new SemanticException(
String.format(
"Unexpected class %s for Hive's interval day time type.",
value.getClass().getName()));
}
// Nanos converted to millis
calciteLiteral =
rexBuilder.makeIntervalLiteral(
secsValueBd.add(nanosValueBd),
new SqlIntervalQualifier(
TimeUnit.MILLISECOND, null, new SqlParserPos(1, 1)));
} else {
throw new RuntimeException("UnSupported Literal type " + hiveTypeCategory);
}
}
return calciteLiteral;
}