private RelNode genLogicalPlan()

in flink-connector-hive/src/main/java/org/apache/flink/table/planner/delegation/hive/HiveParserCalcitePlanner.java [2710:2874]


    private RelNode genLogicalPlan(
            HiveParserQB qb,
            boolean outerMostQB,
            Map<String, Integer> outerNameToPosMap,
            HiveParserRowResolver outerRR)
            throws SemanticException {
        RelNode res;
        // First generate all the opInfos for the elements in the from clause
        Map<String, RelNode> aliasToRel = new HashMap<>();
        // 0. Check if we can handle the SubQuery;
        // canHandleQbForCbo returns null if the query can be handled.
        String reason = HiveParserUtils.canHandleQbForCbo(semanticAnalyzer.getQueryProperties());
        if (reason != null) {
            String msg = "CBO can not handle Sub Query" + " because it: " + reason;
            throw new SemanticException(msg);
        }

        // 1. Build Rel For Src (SubQuery, TS, Join)
        // 1.1. Recurse over the subqueries to fill the subquery part of the plan
        for (String subqAlias : qb.getSubqAliases()) {
            HiveParserQBExpr qbexpr = qb.getSubqForAlias(subqAlias);
            RelNode relNode = genLogicalPlan(qbexpr);
            aliasToRel.put(subqAlias, relNode);
            if (qb.getViewToTabSchema().containsKey(subqAlias)) {
                if (!(relNode instanceof Project)) {
                    throw new SemanticException(
                            "View "
                                    + subqAlias
                                    + " is corresponding to "
                                    + relNode.toString()
                                    + ", rather than a Project.");
                }
            }
        }

        // 1.2 Recurse over all the source tables
        for (String tableAlias : qb.getTabAliases()) {
            RelNode op = genTableLogicalPlan(tableAlias, qb);
            aliasToRel.put(tableAlias, op);
        }

        if (aliasToRel.isEmpty()) {
            RelNode dummySrc = LogicalValues.createOneRow(cluster);
            aliasToRel.put(HiveParserSemanticAnalyzer.DUMMY_TABLE, dummySrc);
            HiveParserRowResolver dummyRR = new HiveParserRowResolver();
            dummyRR.put(
                    HiveParserSemanticAnalyzer.DUMMY_TABLE,
                    "dummy_col",
                    new ColumnInfo(
                            getColumnInternalName(0),
                            TypeInfoFactory.intTypeInfo,
                            HiveParserSemanticAnalyzer.DUMMY_TABLE,
                            false));
            relToRowResolver.put(dummySrc, dummyRR);
            relToHiveColNameCalcitePosMap.put(dummySrc, buildHiveToCalciteColumnMap(dummyRR));
        }

        if (!qb.getParseInfo().getAliasToLateralViews().isEmpty()) {
            // process lateral views
            res = genLateralViewPlan(qb, aliasToRel);
        } else if (qb.getParseInfo().getJoinExpr() != null) {
            // 1.3 process join
            res = genJoinLogicalPlan(qb.getParseInfo().getJoinExpr(), aliasToRel);
        } else {
            // If no join then there should only be either 1 TS or 1 SubQuery
            res = aliasToRel.values().iterator().next();
        }

        // 2. Build Rel for where Clause
        RelNode filterRel = genFilterLogicalPlan(qb, res, outerNameToPosMap, outerRR);
        res = (filterRel == null) ? res : filterRel;
        RelNode starSrcRel = res;

        // 3. Build Rel for GB Clause
        RelNode gbRel = genGBLogicalPlan(qb, res);
        res = gbRel == null ? res : gbRel;

        // 4. Build Rel for GB Having Clause
        RelNode gbHavingRel = genGBHavingLogicalPlan(qb, res);
        res = gbHavingRel == null ? res : gbHavingRel;

        // 5. Build Rel for Select Clause
        RelNode selectRel = genSelectLogicalPlan(qb, res, starSrcRel, outerNameToPosMap, outerRR);
        res = selectRel == null ? res : selectRel;

        // 6. Build Rel for OB Clause
        Pair<Sort, RelNode> obAndTopProj = genOBLogicalPlan(qb, res, outerMostQB);
        Sort orderRel = obAndTopProj.getKey();
        RelNode topConstrainingProjRel = obAndTopProj.getValue();
        res = orderRel == null ? res : orderRel;

        // Build Rel for SortBy/ClusterBy/DistributeBy. It can happen only if we don't have OrderBy.
        if (orderRel == null) {
            Pair<RelNode, RelNode> distAndTopProj = genDistSortBy(qb, res, outerMostQB);
            RelNode distRel = distAndTopProj.getKey();
            topConstrainingProjRel = distAndTopProj.getValue();
            res = distRel == null ? res : distRel;
        }

        // 7. Build Rel for Limit Clause
        Sort limitRel = genLimitLogicalPlan(qb, res);
        if (limitRel != null) {
            if (orderRel != null) {
                // merge limit into the order-by node
                HiveParserRowResolver orderRR = relToRowResolver.remove(orderRel);
                Map<String, Integer> orderColNameToPos =
                        relToHiveColNameCalcitePosMap.remove(orderRel);
                res =
                        LogicalSort.create(
                                orderRel.getInput(),
                                orderRel.collation,
                                limitRel.offset,
                                limitRel.fetch);
                relToRowResolver.put(res, orderRR);
                relToHiveColNameCalcitePosMap.put(res, orderColNameToPos);

                relToRowResolver.remove(limitRel);
                relToHiveColNameCalcitePosMap.remove(limitRel);
            } else {
                res = limitRel;
            }
        }

        // 8. Introduce top constraining select if needed.
        if (topConstrainingProjRel != null) {
            List<RexNode> originalInputRefs =
                    topConstrainingProjRel.getRowType().getFieldList().stream()
                            .map(input -> new RexInputRef(input.getIndex(), input.getType()))
                            .collect(Collectors.toList());
            HiveParserRowResolver topConstrainingProjRR = new HiveParserRowResolver();
            if (!HiveParserRowResolver.add(
                    topConstrainingProjRR, relToRowResolver.get(topConstrainingProjRel))) {
                LOG.warn("Duplicates detected when adding columns to RR: see previous message");
            }
            res = genSelectRelNode(originalInputRefs, topConstrainingProjRR, res);
        }

        // 9. In case this HiveParserQB corresponds to subquery then modify its RR to point to
        // subquery alias.
        if (qb.getParseInfo().getAlias() != null) {
            HiveParserRowResolver rr = relToRowResolver.get(res);
            HiveParserRowResolver newRR = new HiveParserRowResolver();
            String alias = qb.getParseInfo().getAlias();
            for (ColumnInfo colInfo : rr.getColumnInfos()) {
                String name = colInfo.getInternalName();
                String[] tmp = rr.reverseLookup(name);
                if ("".equals(tmp[0]) || tmp[1] == null) {
                    // ast expression is not a valid column name for table
                    tmp[1] = colInfo.getInternalName();
                }
                ColumnInfo newColInfo = new ColumnInfo(colInfo);
                newColInfo.setTabAlias(alias);
                newRR.putWithCheck(alias, tmp[1], colInfo.getInternalName(), newColInfo);
            }
            relToRowResolver.put(res, newRR);
            relToHiveColNameCalcitePosMap.put(res, buildHiveToCalciteColumnMap(newRR));
        }

        if (LOG.isDebugEnabled()) {
            LOG.debug("Created Plan for Query Block " + qb.getId());
        }

        semanticAnalyzer.setQB(qb);
        return res;
    }