utilities/Utils.py [562:578]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    processing_inputs = []
    flow_processing_input = _create_flow_notebook_processing_input(processing_dir, flow_uri)
    processing_inputs.append(flow_processing_input)

    for node in flow["nodes"]:
        if "dataset_definition" in node["parameters"]:
            data_def = node["parameters"]["dataset_definition"]
            name = data_def["name"]
            source_type = data_def["datasetSourceType"]

            if source_type == "S3":
                s3_processing_input = _create_s3_processing_input(
                    processing_dir, name, data_def)
                processing_inputs.append(s3_processing_input)
            else:
                raise ValueError(f"{source_type} is not supported for Data Wrangler Processing.")
    return processing_inputs
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



utilities/feature_store_helper.py [1758:1774]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        processing_inputs = []
        flow_processing_input = _create_flow_notebook_processing_input(processing_dir, flow_uri)
        processing_inputs.append(flow_processing_input)

        for node in flow["nodes"]:
            if "dataset_definition" in node["parameters"]:
                data_def = node["parameters"]["dataset_definition"]
                name = data_def["name"]
                source_type = data_def["datasetSourceType"]

                if source_type == "S3":
                    s3_processing_input = _create_s3_processing_input(
                        processing_dir, name, data_def)
                    processing_inputs.append(s3_processing_input)
                else:
                    raise ValueError(f"{source_type} is not supported for Data Wrangler Processing.")
        return processing_inputs
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



