in elf/utils_elf.py [0:0]
def _init_collectors(self, GC, co, descriptions, use_gpu=True, use_numpy=False):
num_games = co.num_games
total_batchsize = 0
for key, v in descriptions.items():
total_batchsize += v["batchsize"]
if co.num_collectors > 0:
num_recv_thread = co.num_collectors
else:
num_recv_thread = math.floor(num_games / total_batchsize)
num_recv_thread = max(num_recv_thread, 1)
print("#recv_thread = %d" % num_recv_thread)
inputs = []
replies = []
idx2name = {}
name2idx = defaultdict(list)
gid2gpu = {}
gpu2gid = []
for key, v in descriptions.items():
input = v["input"]
reply = v["reply"]
batchsize = v["batchsize"]
T = input["T"]
if reply is not None and reply["T"] > T:
T = reply["T"]
gstat = GC.CreateGroupStat()
gstat.hist_len = T
gstat.name = v.get("name", "")
timeout_usec = v.get("timeout_usec", 0)
gpu2gid.append(list())
for i in range(num_recv_thread):
group_id = GC.AddCollectors(batchsize, len(gpu2gid) - 1, timeout_usec, gstat)
input_batch = Batch.load(GC, "input", input, group_id, use_gpu=use_gpu, use_numpy=use_numpy)
input_batch.batchsize = batchsize
inputs.append(input_batch)
if reply is not None:
reply_batch = Batch.load(GC, "reply", reply, group_id, use_gpu=use_gpu, use_numpy=use_numpy)
reply_batch.batchsize= batchsize
replies.append(reply_batch)
else:
replies.append(None)
idx2name[group_id] = key
name2idx[key].append(group_id)
gpu2gid[-1].append(group_id)
gid2gpu[group_id] = len(gpu2gid) - 1
print(GC.GetCollectorInfos())
# Zero out all replies.
for reply in replies:
if reply is not None:
reply.setzero()
self.GC = GC
self.inputs = inputs
self.replies = replies
self.idx2name = idx2name
self.name2idx = name2idx
self.gid2gpu = gid2gpu
self.gpu2gid = gpu2gid