fn test()

in tensorboard/data/server/run.rs [431:615]


    fn test() -> Result<(), Box<dyn std::error::Error>> {
        let logdir = tempfile::tempdir()?;
        let f1_name = logdir.path().join("tfevents.123");
        let f2_name = logdir.path().join("tfevents.456");
        let mut f1 = BufWriter::new(File::create(&f1_name)?);
        let mut f2 = BufWriter::new(File::create(&f2_name)?);

        // Write file versions.
        for (f, wall_time) in &mut [(&mut f1, 1234.0), (&mut f2, 2345.0)] {
            let file_version = pb::Event {
                wall_time: *wall_time,
                what: Some(pb::event::What::FileVersion("brain.Event:2".to_string())),
                ..Default::default()
            };
            f.write_event(&file_version)?;
        }

        // Write some data points across both files.
        let run = Run("train".to_string());
        let tag = Tag("accuracy".to_string());
        f1.write_graph(
            Step(0),
            WallTime::new(1235.0).unwrap(),
            Bytes::from_static(b"<sample model graph>"),
        )?;
        f1.write_tagged_run_metadata(
            &Tag("step0000".to_string()),
            Step(0),
            WallTime::new(1235.0).unwrap(),
            Bytes::from_static(b"<sample run metadata>"),
        )?;
        f1.write_tensor(
            &Tag("weights".to_string()),
            Step(0),
            WallTime::new(1235.0).unwrap(),
            pb::TensorProto {
                dtype: pb::DataType::DtString.into(),
                string_val: vec![Bytes::from_static(b"foo")],
                ..Default::default()
            },
            pb::SummaryMetadata {
                plugin_data: Some(pb::summary_metadata::PluginData {
                    plugin_name: plugin_names::HISTOGRAMS.to_string(),
                    ..Default::default()
                }),
                ..Default::default()
            },
        )?;
        f1.write_scalar(&tag, Step(0), WallTime::new(1235.0).unwrap(), 0.25)?;
        f1.write_scalar(&tag, Step(1), WallTime::new(1236.0).unwrap(), 0.50)?;
        f1.write_scalar(&tag, Step(2), WallTime::new(1237.0).unwrap(), 0.75)?;
        f1.write_scalar(&tag, Step(3), WallTime::new(1238.0).unwrap(), 1.00)?;
        // preempt!
        f2.write_scalar(&tag, Step(2), WallTime::new(2346.0).unwrap(), 0.70)?;
        f2.write_scalar(&tag, Step(3), WallTime::new(2347.0).unwrap(), 0.85)?;
        f2.write_scalar(&tag, Step(4), WallTime::new(2348.0).unwrap(), 0.90)?;
        // flush, so that the data's there when we read it
        f1.into_inner()?.flush()?;
        f2.into_inner()?.flush()?;

        let mut loader = RunLoader::new(run.clone(), Arc::new(PluginSamplingHint::default()));
        let logdir = DiskLogdir::new(logdir.path().to_path_buf());
        let commit = Commit::new();
        commit
            .runs
            .write()
            .expect("write-locking runs map")
            .insert(run.clone(), Default::default());
        loader.reload(
            &logdir,
            vec![EventFileBuf(f1_name), EventFileBuf(f2_name)],
            &commit.runs.read().unwrap()[&run],
        );

        // Start time should be that of the file version event, even though that didn't correspond
        // to any time series.
        assert_eq!(loader.data.start_time, Some(WallTime::new(1234.0).unwrap()));

        let runs = commit.runs.read().expect("read-locking runs map");
        let run_data: &commit::RunData = &*runs
            .get(&run)
            .expect("looking up data for run")
            .read()
            .expect("read-locking run data map");

        assert_eq!(run_data.scalars.keys().collect::<Vec<_>>(), vec![&tag]);
        let scalar_ts = run_data.scalars.get(&tag).unwrap();
        assert_eq!(
            *scalar_ts.metadata,
            pb::SummaryMetadata {
                plugin_data: Some(pb::summary_metadata::PluginData {
                    plugin_name: plugin_names::SCALARS.to_string(),
                    ..Default::default()
                }),
                data_class: pb::DataClass::Scalar.into(),
                ..Default::default()
            }
        );
        // Points should be as expected (no downsampling at these sizes).
        let scalar = commit::ScalarValue;
        assert_eq!(
            scalar_ts.valid_values().collect::<Vec<_>>(),
            vec![
                (Step(0), WallTime::new(1235.0).unwrap(), &scalar(0.25)),
                (Step(1), WallTime::new(1236.0).unwrap(), &scalar(0.50)),
                (Step(2), WallTime::new(2346.0).unwrap(), &scalar(0.70)),
                (Step(3), WallTime::new(2347.0).unwrap(), &scalar(0.85)),
                (Step(4), WallTime::new(2348.0).unwrap(), &scalar(0.90)),
            ]
        );

        assert_eq!(run_data.tensors.len(), 1);
        let tensor_ts = run_data.tensors.get(&Tag("weights".to_string())).unwrap();
        assert_eq!(
            *tensor_ts.metadata,
            pb::SummaryMetadata {
                plugin_data: Some(pb::summary_metadata::PluginData {
                    plugin_name: plugin_names::HISTOGRAMS.to_string(),
                    ..Default::default()
                }),
                data_class: pb::DataClass::Tensor.into(),
                ..Default::default()
            }
        );
        assert_eq!(
            tensor_ts.valid_values().collect::<Vec<_>>(),
            vec![(
                Step(0),
                WallTime::new(1235.0).unwrap(),
                &pb::TensorProto {
                    dtype: pb::DataType::DtString.into(),
                    string_val: vec![Bytes::from_static(b"foo")],
                    ..Default::default()
                }
            )]
        );

        assert_eq!(run_data.blob_sequences.len(), 2);

        let run_graph_tag = Tag(GraphDefValue::TAG_NAME.to_string());
        let graph_ts = run_data.blob_sequences.get(&run_graph_tag).unwrap();
        assert_eq!(
            *graph_ts.metadata,
            pb::SummaryMetadata {
                plugin_data: Some(pb::summary_metadata::PluginData {
                    plugin_name: plugin_names::GRAPHS.to_string(),
                    ..Default::default()
                }),
                data_class: pb::DataClass::BlobSequence.into(),
                ..Default::default()
            }
        );
        assert_eq!(
            graph_ts.valid_values().collect::<Vec<_>>(),
            vec![(
                Step(0),
                WallTime::new(1235.0).unwrap(),
                &commit::BlobSequenceValue(vec![Bytes::from_static(b"<sample model graph>")])
            )]
        );

        let run_metadata_tag = Tag("step0000".to_string());
        let run_metadata_ts = run_data.blob_sequences.get(&run_metadata_tag).unwrap();
        assert_eq!(
            *run_metadata_ts.metadata,
            pb::SummaryMetadata {
                plugin_data: Some(pb::summary_metadata::PluginData {
                    plugin_name: plugin_names::GRAPH_TAGGED_RUN_METADATA.to_string(),
                    ..Default::default()
                }),
                data_class: pb::DataClass::BlobSequence.into(),
                ..Default::default()
            }
        );
        assert_eq!(
            run_metadata_ts.valid_values().collect::<Vec<_>>(),
            vec![(
                Step(0),
                WallTime::new(1235.0).unwrap(),
                &commit::BlobSequenceValue(vec![Bytes::from_static(b"<sample run metadata>")])
            )]
        );

        Ok(())
    }