def _ProcessEvent()

in tensorboard/backend/event_processing/plugin_event_accumulator.py [0:0]


    def _ProcessEvent(self, event):
        """Called whenever an event is loaded."""
        if self._first_event_timestamp is None:
            self._first_event_timestamp = event.wall_time

        if event.HasField("file_version"):
            new_file_version = _ParseFileVersion(event.file_version)
            if self.file_version and self.file_version != new_file_version:
                ## This should not happen.
                logger.warning(
                    (
                        "Found new file_version for event.proto. This will "
                        "affect purging logic for TensorFlow restarts. "
                        "Old: {0} New: {1}"
                    ).format(self.file_version, new_file_version)
                )
            self.file_version = new_file_version

        self._MaybePurgeOrphanedData(event)

        ## Process the event.
        # GraphDef and MetaGraphDef are handled in a special way:
        # If no graph_def Event is available, but a meta_graph_def is, and it
        # contains a graph_def, then use the meta_graph_def.graph_def as our graph.
        # If a graph_def Event is available, always prefer it to the graph_def
        # inside the meta_graph_def.
        if event.HasField("graph_def"):
            if self._graph is not None:
                logger.warning(
                    (
                        "Found more than one graph event per run, or there was "
                        "a metagraph containing a graph_def, as well as one or "
                        "more graph events.  Overwriting the graph with the "
                        "newest event."
                    )
                )
            self._graph = event.graph_def
            self._graph_from_metagraph = False
        elif event.HasField("meta_graph_def"):
            if self._meta_graph is not None:
                logger.warning(
                    (
                        "Found more than one metagraph event per run. "
                        "Overwriting the metagraph with the newest event."
                    )
                )
            self._meta_graph = event.meta_graph_def
            if self._graph is None or self._graph_from_metagraph:
                # We may have a graph_def in the metagraph.  If so, and no
                # graph_def is directly available, use this one instead.
                meta_graph = meta_graph_pb2.MetaGraphDef()
                meta_graph.ParseFromString(self._meta_graph)
                if meta_graph.graph_def:
                    if self._graph is not None:
                        logger.warning(
                            (
                                "Found multiple metagraphs containing graph_defs,"
                                "but did not find any graph events.  Overwriting the "
                                "graph with the newest metagraph version."
                            )
                        )
                    self._graph_from_metagraph = True
                    self._graph = meta_graph.graph_def.SerializeToString()
        elif event.HasField("tagged_run_metadata"):
            tag = event.tagged_run_metadata.tag
            if tag in self._tagged_metadata:
                logger.warning(
                    'Found more than one "run metadata" event with tag '
                    + tag
                    + ". Overwriting it with the newest event."
                )
            self._tagged_metadata[tag] = event.tagged_run_metadata.run_metadata
        elif event.HasField("summary"):
            for value in event.summary.value:
                if value.HasField("metadata"):
                    tag = value.tag
                    # We only store the first instance of the metadata. This check
                    # is important: the `FileWriter` does strip metadata from all
                    # values except the first one per each tag, but a new
                    # `FileWriter` is created every time a training job stops and
                    # restarts. Hence, we must also ignore non-initial metadata in
                    # this logic.
                    if tag not in self.summary_metadata:
                        self.summary_metadata[tag] = value.metadata
                        plugin_data = value.metadata.plugin_data
                        if plugin_data.plugin_name:
                            with self._plugin_tag_lock:
                                self._plugin_to_tag_to_content[
                                    plugin_data.plugin_name
                                ][tag] = plugin_data.content
                        else:
                            logger.warning(
                                (
                                    "This summary with tag %r is oddly not associated with a "
                                    "plugin."
                                ),
                                tag,
                            )

                if value.HasField("tensor"):
                    datum = value.tensor
                    tag = value.tag
                    if not tag:
                        # This tensor summary was created using the old method that used
                        # plugin assets. We must still continue to support it.
                        tag = value.node_name
                    self._ProcessTensor(tag, event.wall_time, event.step, datum)