in flink-connector-hive/src/main/java/org/apache/flink/table/planner/delegation/hive/HiveParserRexNodeConverter.java [521:629]
private RexNode convertGenericFunc(ExprNodeGenericFuncDesc func) throws SemanticException {
ExprNodeDesc tmpExprNode;
RexNode tmpRN;
List<RexNode> childRexNodeLst = new ArrayList<>();
List<RelDataType> argTypes = new ArrayList<>();
// TODO: 1) Expand to other functions as needed 2) What about types other than primitive.
TypeInfo tgtDT = null;
GenericUDF tgtUdf = func.getGenericUDF();
if (tgtUdf instanceof GenericUDFIn) {
return convertIN(func);
}
boolean isNumeric = isNumericBinary(func);
boolean isCompare = !isNumeric && tgtUdf instanceof GenericUDFBaseCompare;
boolean isWhenCase = tgtUdf instanceof GenericUDFWhen || tgtUdf instanceof GenericUDFCase;
boolean isTransformableTimeStamp =
func.getGenericUDF() instanceof GenericUDFUnixTimeStamp
&& func.getChildren().size() != 0;
if (isNumeric) {
tgtDT = func.getTypeInfo();
assert func.getChildren().size() == 2;
// TODO: checking 2 children is useless, compare already does that.
} else if (isCompare && (func.getChildren().size() == 2)) {
tgtDT =
FunctionRegistry.getCommonClassForComparison(
func.getChildren().get(0).getTypeInfo(),
func.getChildren().get(1).getTypeInfo());
} else if (isWhenCase) {
// If it is a CASE or WHEN, we need to check that children do not contain stateful
// functions as they are not allowed
if (checkForStatefulFunctions(func.getChildren())) {
throw new SemanticException("Stateful expressions cannot be used inside of CASE");
}
} else if (isTransformableTimeStamp) {
func =
ExprNodeGenericFuncDesc.newInstance(
new GenericUDFToUnixTimeStamp(), func.getChildren());
}
for (ExprNodeDesc childExpr : func.getChildren()) {
tmpExprNode = childExpr;
if (tgtDT != null
&& TypeInfoUtils.isConversionRequiredForComparison(
tgtDT, childExpr.getTypeInfo())) {
if (isCompare) {
// For compare, we will convert requisite children
tmpExprNode =
HiveASTParseUtils.createConversionCast(
childExpr, (PrimitiveTypeInfo) tgtDT);
} else if (isNumeric) {
// For numeric, we'll do minimum necessary cast - if we cast to the type
// of expression, bad things will happen.
PrimitiveTypeInfo minArgType =
HiveParserExprNodeDescUtils.deriveMinArgumentCast(childExpr, tgtDT);
tmpExprNode = HiveASTParseUtils.createConversionCast(childExpr, minArgType);
} else {
throw new AssertionError(
"Unexpected " + tgtDT + " - not a numeric op or compare");
}
}
argTypes.add(
HiveParserTypeConverter.convert(
tmpExprNode.getTypeInfo(), cluster.getTypeFactory()));
tmpRN = convert(tmpExprNode);
childRexNodeLst.add(tmpRN);
}
// process the function
RelDataType retType =
HiveParserTypeConverter.convert(func.getTypeInfo(), cluster.getTypeFactory());
SqlOperator calciteOp =
HiveParserSqlFunctionConverter.getCalciteOperator(
func.getFuncText(), func.getGenericUDF(), argTypes, retType, funcConverter);
if (calciteOp.getKind() == SqlKind.CASE) {
// If it is a case operator, we need to rewrite it
childRexNodeLst = rewriteCaseChildren(func, childRexNodeLst);
}
RexNode expr = cluster.getRexBuilder().makeCall(calciteOp, childRexNodeLst);
// check whether we need a calcite cast
RexNode cast = handleExplicitCast(func, childRexNodeLst, ((RexCall) expr).getOperator());
if (cast != null) {
expr = cast;
retType = cast.getType();
}
// TODO: Cast Function in Calcite have a bug where it infer type on cast throws
// an exception
if (flattenExpr
&& expr instanceof RexCall
&& !(((RexCall) expr).getOperator() instanceof SqlCastFunction)) {
RexCall call = (RexCall) expr;
expr =
cluster.getRexBuilder()
.makeCall(
retType,
call.getOperator(),
RexUtil.flatten(call.getOperands(), call.getOperator()));
}
return expr;
}