chatlearn/runtime/environment.py [235:251]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            assert len(should_stop) == 1
            if should_stop[0]:
                break

    def execute(self, is_eval):
        data_queues, out_queue = self.setup_queues()
        data_producer_iter = cycle(iter(self.models[0].replicas))
        args = []
        for mb in range(self.batch_per_episode()):
            current_data_producer = next(data_producer_iter)
            query = current_data_producer.master.next_batch.remote(is_eval=is_eval)
            encoded_data = encode_data(mb, query)
            replica_data_list = []
            model_to_replica = {}
            for model_group in self.model_flow.flow_topology:
                for model_node in model_group:
                    model = model_node.model
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



chatlearn/runtime/environment.py [301:317]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            assert len(should_stop) == 1
            if should_stop[0]:
                break

    def execute(self, is_eval):
        data_queues, out_queue = self.setup_queues()
        data_producer_iter = cycle(iter(self.models[0].replicas))
        args = []
        for mb in range(self.batch_per_episode()):
            current_data_producer = next(data_producer_iter)
            query = current_data_producer.master.next_batch.remote(is_eval=is_eval)
            encoded_data = encode_data(mb, query)
            replica_data_list = []
            model_to_replica = {}
            for model_group in self.model_flow.flow_topology:
                for model_node in model_group:
                    model = model_node.model
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



