def export_collections()

in smdebug/tensorflow/base_hook.py [0:0]


    def export_collections(self):
        # When TF 2.x GradientTape is used, prepare_layers() is not used
        # as the tensors provided by GradientTape are eager tensors and hence,
        # do not require preparing layers
        if not self.tape:
            assert self._prepared_tensors[self.mode]

        if self.save_all_workers is False:
            num_workers = 1
        else:
            num_workers = self._get_num_workers()
        self.collection_manager.set_num_workers(num_workers)

        if self.distribution_strategy in [
            TFDistributionStrategy.PARAMETER_SERVER,
            TFDistributionStrategy.HOROVOD,
            TFDistributionStrategy.SMDATAPARALLEL,
        ]:
            if self.save_all_workers is False and self.worker != self.chief_worker:
                return
        elif self.distribution_strategy == TFDistributionStrategy.MIRRORED:
            if len(self.device_map):
                for device, serialized_device in self.device_map.items():
                    if self.save_all_workers is True or device == self.chief_worker:
                        collection_file_name = f"{serialized_device}_collections.json"
                        self.collection_manager.export(self.out_dir, collection_file_name)
                return

        # below is used in these cases
        # if mirrored and device_map is empty (CPU training)
        # if horovod/param server and worker == chief worker
        collection_file_name = f"{self.worker}_collections.json"
        self.collection_manager.export(self.out_dir, collection_file_name)