easy_rec/python/protos/pipeline.proto (51 lines of code) (raw):

syntax = "proto2"; package protos; import "easy_rec/python/protos/train.proto"; import "easy_rec/python/protos/eval.proto"; import "easy_rec/python/protos/export.proto"; import "easy_rec/python/protos/dataset.proto"; import "easy_rec/python/protos/feature_config.proto"; import "easy_rec/python/protos/easy_rec_model.proto"; import "easy_rec/python/protos/data_source.proto"; import "easy_rec/python/protos/hive_config.proto"; // EasyRecConfig: the pipeline_config, including all sub configs message EasyRecConfig { oneof train_path { string train_input_path = 1; KafkaServer kafka_train_input = 2; DatahubServer datahub_train_input = 12; HiveConfig hive_train_input = 101; BinaryDataInput binary_train_input = 102; string parquet_train_input = 103; } oneof eval_path { string eval_input_path = 3; KafkaServer kafka_eval_input = 4; DatahubServer datahub_eval_input = 13; HiveConfig hive_eval_input= 201; BinaryDataInput binary_eval_input = 202; string parquet_eval_input = 203; } required string model_dir = 5; //train config, including optimizer, weight decay, num_steps and so on optional TrainConfig train_config = 6; optional EvalConfig eval_config = 7; optional DatasetConfig data_config = 8; //for compatibility repeated FeatureConfig feature_configs = 9; optional FeatureConfigV2 feature_config = 10; // recommendation model config required EasyRecModel model_config = 14; optional ExportConfig export_config = 15; // Json file[RTP FG] to define input data and features: // * In easy_rec.python.utils.fg_util.load_fg_json_to_config: // data_config and feature_config will be generated // based on fg_json. // * After generation, a prefix '!' is added: // fg_json_path = '!' + fg_json_path // indicates config update is already done, and should not // be updated anymore. In this way, we make load_fg_json_to_config // function reentrant. // This step is done before edit_config_json to take effect. optional string fg_json_path = 16; }