wayang-platforms/wayang-spark/src/main/java/org/apache/wayang/spark/operators/ml/SparkDecisionTreeClassificationOperator.java [74:100]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        super(that);
    }

    @Override
    public List<ChannelDescriptor> getSupportedInputChannels(int index) {
        return Arrays.asList(RddChannel.UNCACHED_DESCRIPTOR, RddChannel.CACHED_DESCRIPTOR);
    }

    @Override
    public List<ChannelDescriptor> getSupportedOutputChannels(int index) {
        return Collections.singletonList(CollectionChannel.DESCRIPTOR);
    }

    @Override
    public Tuple<Collection<ExecutionLineageNode>, Collection<ChannelInstance>> evaluate(
            ChannelInstance[] inputs,
            ChannelInstance[] outputs,
            SparkExecutor sparkExecutor,
            OptimizationContext.OperatorContext operatorContext) {
        assert inputs.length == this.getNumInputs();
        assert outputs.length == this.getNumOutputs();

        final RddChannel.Instance x = (RddChannel.Instance) inputs[0];
        final RddChannel.Instance y = (RddChannel.Instance) inputs[1];
        final CollectionChannel.Instance output = (CollectionChannel.Instance) outputs[0];

        final JavaRDD<double[]> xRdd = x.provideRdd();
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



wayang-platforms/wayang-spark/src/main/java/org/apache/wayang/spark/operators/ml/SparkLinearRegressionOperator.java [74:100]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        super(that);
    }

    @Override
    public List<ChannelDescriptor> getSupportedInputChannels(int index) {
        return Arrays.asList(RddChannel.UNCACHED_DESCRIPTOR, RddChannel.CACHED_DESCRIPTOR);
    }

    @Override
    public List<ChannelDescriptor> getSupportedOutputChannels(int index) {
        return Collections.singletonList(CollectionChannel.DESCRIPTOR);
    }

    @Override
    public Tuple<Collection<ExecutionLineageNode>, Collection<ChannelInstance>> evaluate(
            ChannelInstance[] inputs,
            ChannelInstance[] outputs,
            SparkExecutor sparkExecutor,
            OptimizationContext.OperatorContext operatorContext) {
        assert inputs.length == this.getNumInputs();
        assert outputs.length == this.getNumOutputs();

        final RddChannel.Instance x = (RddChannel.Instance) inputs[0];
        final RddChannel.Instance y = (RddChannel.Instance) inputs[1];
        final CollectionChannel.Instance output = (CollectionChannel.Instance) outputs[0];

        final JavaRDD<double[]> xRdd = x.provideRdd();
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



