in workload_generator/AIOB_simAI_workload_generator.py [0:0]
def dump_file(self, filename):
filename = filename + ".txt"
pp_comm_value = 2 * self.args.micro_batch * self.args.seq_length * self.args.hidden_size
if self.args.enable_sequence_parallel:
pp_comm_value /= self.args.tensor_model_parallel_size
pp_comm = (
f"pp_comm: {pp_comm_value}"
if self.args.pipeline_model_parallel != 1
else "pp_comm: 0"
)
with open(filename, "w") as f:
f.write((
f"HYBRID_TRANSFORMER_FWD_IN_BCKWD model_parallel_NPU_group: {self.args.tensor_model_parallel_size} "
f"ep: {self.args.expert_model_parallel_size} "
f"pp: {self.args.pipeline_model_parallel} "
f"vpp: {self.args.num_layers} "
f"ga: {self.ga_num} all_gpus: {self.args.world_size} "
f"checkpoints: 0 checkpoint_initiates: 0 "
) + pp_comm + "\n")
f.write(str(len(self.workload)) + "\n")
for item in self.workload:
f.write(
"\t".join([str(getattr(item, k)) for k in item.__dict__.keys()])
+ "\n"
)