in ppo_ewma/torch_util.py [0:0]
def flatten_tensors(xs, dtype=None, buf=None):
if buf is None:
buf = xs[0].new_empty(sum(x.numel() for x in xs), dtype=dtype)
i = 0
for x in xs:
buf[i : i + x.numel()].copy_(x.view(-1))
i += x.numel()
return buf