private def collectFallbackNodes()

in gluten-substrait/src/main/scala/org/apache/spark/sql/execution/GlutenExplainUtils.scala [77:118]


  private def collectFallbackNodes(plan: QueryPlan[_]): FallbackInfo = {
    var numGlutenNodes = 0
    val fallbackNodeToReason = new mutable.HashMap[String, String]

    def collect(tmp: QueryPlan[_]): Unit = {
      tmp.foreachUp {
        case _: ExecutedCommandExec =>
        case _: CommandResultExec =>
        case _: V2CommandExec =>
        case _: DataWritingCommandExec =>
        case _: WholeStageCodegenExec =>
        case _: WholeStageTransformer =>
        case _: InputAdapter =>
        case _: ColumnarInputAdapter =>
        case _: InputIteratorTransformer =>
        case _: ColumnarToRowTransition =>
        case _: RowToColumnarTransition =>
        case _: ReusedExchangeExec =>
        case _: NoopLeaf =>
        case w: WriteFilesExec if w.child.isInstanceOf[NoopLeaf] =>
        case sub: AdaptiveSparkPlanExec if sub.isSubquery => collect(sub.executedPlan)
        case _: AdaptiveSparkPlanExec =>
        case p: QueryStageExec => collect(p.plan)
        case p: GlutenPlan =>
          numGlutenNodes += 1
          p.innerChildren.foreach(collect)
        case i: InMemoryTableScanExec =>
          if (PlanUtil.isGlutenTableCache(i)) {
            numGlutenNodes += 1
          } else {
            addFallbackNodeWithReason(i, "Columnar table cache is disabled", fallbackNodeToReason)
          }
        case _: AQEShuffleReadExec => // Ignore
        case p: SparkPlan =>
          handleVanillaSparkPlan(p, fallbackNodeToReason)
          p.innerChildren.foreach(collect)
        case _ =>
      }
    }
    collect(plan)
    (numGlutenNodes, fallbackNodeToReason.toMap)
  }