in pinot-query-planner/src/main/java/org/apache/pinot/query/planner/explain/PlanNodeMerger.java [448:562]
public PlanNode visitExplained(ExplainedNode node, PlanNode context) {
if (context.getClass() != ExplainedNode.class) {
return null;
}
ExplainedNode otherNode = (ExplainedNode) context;
if (!node.getTitle().equals(otherNode.getTitle())) {
return null;
}
Map<String, Plan.ExplainNode.AttributeValue> selfAttributes = node.getAttributes();
Map<String, Plan.ExplainNode.AttributeValue> otherAttributes = otherNode.getAttributes();
List<PlanNode> children;
if (node.getTitle().contains(COMBINE)) {
children = mergeCombineChildren(node, otherNode);
} else {
children = mergeChildren(node, context);
}
if (children == null) {
return null;
}
if (selfAttributes.isEmpty()) {
return otherNode.withInputs(children);
}
if (otherAttributes.isEmpty()) {
return node.withInputs(children);
}
boolean allIdempotent = Streams.concat(selfAttributes.values().stream(), otherAttributes.values().stream())
.allMatch(val -> {
if (val.getMergeType() == Plan.ExplainNode.AttributeValue.MergeType.IDEMPOTENT) {
return true;
}
return !val.hasLong() && val.getMergeType() == Plan.ExplainNode.AttributeValue.MergeType.DEFAULT;
});
if (allIdempotent && selfAttributes.keySet().equals(otherAttributes.keySet())) {
// either same map can be returned or nodes are not mergeable. Anyway, no need to create a new hash map
for (Map.Entry<String, Plan.ExplainNode.AttributeValue> selfEntry : selfAttributes.entrySet()) {
Plan.ExplainNode.AttributeValue otherValue = otherAttributes.get(selfEntry.getKey());
if (!Objects.equals(otherValue, selfEntry.getValue())) {
return null;
}
}
return node.withInputs(children);
} else {
ExplainAttributeBuilder attributeBuilder = new ExplainAttributeBuilder();
for (Map.Entry<String, Plan.ExplainNode.AttributeValue> selfEntry : selfAttributes.entrySet()) {
Plan.ExplainNode.AttributeValue selfValue = selfEntry.getValue();
Plan.ExplainNode.AttributeValue otherValue = otherAttributes.get(selfEntry.getKey());
if (otherValue == null) {
continue;
}
if (selfValue.getMergeType() != otherValue.getMergeType()) {
return null;
}
switch (selfValue.getMergeType()) {
case DEFAULT: {
if (selfValue.hasLong() && otherValue.hasLong()) { // If both are long, add them
attributeBuilder.putLong(selfEntry.getKey(), selfValue.getLong() + otherValue.getLong());
} else { // Otherwise behave as if they are idempotent
if (!Objects.equals(otherValue, selfValue)) {
return null;
}
attributeBuilder.putAttribute(selfEntry.getKey(), selfValue);
}
break;
}
case IDEMPOTENT: {
if (!Objects.equals(otherValue, selfValue)) {
return null;
}
attributeBuilder.putAttribute(selfEntry.getKey(), selfValue);
break;
}
case IGNORABLE: {
if (Objects.equals(otherValue, selfValue)) {
attributeBuilder.putAttribute(selfEntry.getKey(), selfValue);
} else if (_verbose) {
// If mode is verbose, we will not merge the nodes when an ignorable attribute is different
return null;
}
// Otherwise, we will ignore the attribute
break;
}
// In case the merge type is unrecognized, we will not merge the nodes
case UNRECOGNIZED:
default:
return null;
}
}
for (Map.Entry<String, Plan.ExplainNode.AttributeValue> otherEntry : otherAttributes.entrySet()) {
Plan.ExplainNode.AttributeValue selfValue = selfAttributes.get(otherEntry.getKey());
if (selfValue != null) { // it has already been merged
continue;
}
switch (otherEntry.getValue().getMergeType()) {
case DEFAULT:
attributeBuilder.putAttribute(otherEntry.getKey(), otherEntry.getValue());
break;
case IGNORABLE:
if (_verbose) {
return null;
}
break;
case IDEMPOTENT:
case UNRECOGNIZED:
default:
return null;
}
}
return new ExplainedNode(node.getStageId(), node.getDataSchema(), node.getNodeHint(), children, node.getTitle(),
attributeBuilder.build());
}
}