in smdebug/tensorflow/base_hook.py [0:0]
def _initialize_writers(self, only_initialize_if_missing=False) -> None:
# In keras, sometimes we are not sure if writer is initialized
# (such as metrics at end of epoch), that's why it passes the flag only_init_if_missing
if self.dry_run:
return
if self.distribution_strategy in [
TFDistributionStrategy.PARAMETER_SERVER,
TFDistributionStrategy.HOROVOD,
TFDistributionStrategy.SMDATAPARALLEL,
]:
if self.save_all_workers is True or self.worker == self.chief_worker:
if self.writer is None or only_initialize_if_missing is False:
self.writer = FileWriter(
trial_dir=self.out_dir, step=self.step, worker=self.worker
)
elif self.distribution_strategy == TFDistributionStrategy.MIRRORED:
if len(self.device_map):
for device, device_string in self.device_map.items():
if device_string in self.writer_map and only_initialize_if_missing is True:
continue
if self.save_all_workers is True or device == self.chief_worker:
self.writer_map[device_string] = FileWriter(
trial_dir=self.out_dir, step=self.step, worker=device_string
)
else:
# training on CPU when all device strings have cpu
if self.writer is None or only_initialize_if_missing is False:
self.writer = FileWriter(
trial_dir=self.out_dir, step=self.step, worker=self.worker
)
elif self.distribution_strategy == TFDistributionStrategy.NONE:
if self.writer is None or only_initialize_if_missing is False:
self.writer = FileWriter(trial_dir=self.out_dir, step=self.step, worker=self.worker)
else:
raise NotImplementedError