in workload_generator/AIOB_simAI_workload_generator.py [0:0]
def dump_file(self, filename):
filename = filename + ".txt"
with open(filename, "w") as f:
if not self.args.multi_all_reduce_enable:
f.write(f"MICRO" + "\n")
f.write(str(len(self.workload)) + "\n")
for item in self.workload:
f.write(
"\t".join([str(getattr(item, k)) for k in item.__dict__.keys()])
+ "\n"
)
else:
f.write(
f"HYBRID_TRANSFORMER_FWD_IN_BCKWD model_parallel_NPU_group: {self.args.tensor_model_parallel_size} \
expert_parallel_npu_group: {self.args.expert_model_parallel_size} pp: {self.args.pipeline_model_parallel} \
ga: {self.ga_num} all_gpus: {self.args.world_size} checkpoints: 0 checkpoint_initiates: 0"
+ "\n"
)
f.write(str(len(self.workload)) + "\n")
for item in self.workload:
f.write(
"\t".join([str(getattr(item, k)) for k in item.__dict__.keys()])
+ "\n"
)