in dataflux_core/performance_tests/list_and_download.py [0:0]
def test_list_and_download_segmented(self):
# This function is needed to avoid OOM errors when the dataset size
# exceeds the memory of the VM.
config = self.get_config()
list_result = self.run_list(config)
num_segments = config["expected_total_size"] / FIFTY_GB
segment_size = ceil(config["expected_file_count"] / num_segments)
segments = [
list_result[i:i + segment_size]
for i in range(0, len(list_result), segment_size)
]
total_size = 0
for seg in segments:
total_size += self.run_download(config, seg)
if (config["expected_total_size"]
and total_size != config["expected_total_size"]):
raise AssertionError(
f"Expected {config['expected_total_size']} bytes but got {total_size} bytes"
)