optimum_benchmark/trackers/latency.py [270:286]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        if self.is_pytorch_cuda:
            torch.cuda.synchronize()
            latencies = [
                start_event.elapsed_time(end_event) / 1e3
                for start_event, end_event in zip(self.start_events, self.end_events)
            ]
        else:
            latencies = [
                (end_event - start_event) for start_event, end_event in zip(self.start_events, self.end_events)
            ]

        assert all(latency >= 0 for latency in latencies), (
            "Found some negative latencies while performing substraction. "
            "Please increase the dimensions of your benchmark or the number of warmup runs."
        )

        return Latency.from_values(latencies, unit=LATENCY_UNIT)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



