in perfmetrics/scripts/fio/fio_metrics.py [0:0]
def _extract_metrics(self, fio_out) -> List[Dict[str, Any]]:
"""Extracts and returns required metrics from fio output dict.
The extracted metrics are stored in a list. Each entry in the list is a
dictionary. Each dictionary stores the following fio metrics related
to a particualar job:
filesize, number of threads, IOPS, Bandwidth and latency (min,
max and mean)
Args:
fio_out: JSON object representing the fio output
Returns:
List of dicts, contains list of jobs and required parameters and metrics
for each job
Example return value:
[{'params': {'filesize': 50000, 'num_threads': 40, 'rw': 'read'},
'start_time': 1653027084, 'end_time': 1653027155, 'metrics':
{'iops': 95.26093, 'bw_bytes': 99888324, 'io_bytes': 6040846336,
'lat_s_mean': 0.41775487677469203, 'lat_s_min': 0.35337776000000004,
'lat_s_max': 1.6975198690000002, 'lat_s_perc_20': 0.37958451200000004,
'lat_s_perc_50': 0.38797312, 'lat_s_perc_90': 0.49283072000000006,
'lat_s_perc_95': 0.526385152}}]
Raises:
NoValuesError: Data not present in json object or key in LEVELS is not
present in FIO output
"""
if not fio_out:
raise NoValuesError('No data in json object')
job_params = self._get_job_params(fio_out)
start_end_times = self._get_start_end_times(fio_out, job_params)
all_jobs = []
# Get the required metrics for every job
for i, job in enumerate(fio_out[consts.JOBS]):
rw = job_params[i][consts.RW]
job_rw = job[_get_rw(rw)]
job_metrics = {}
for metric in REQ_JOB_METRICS:
val = job_rw
"""
For metric.levels=['lat_ns', 'percentile', '20.000000']
After 1st iteration, sub = 'lat_ns', val = job_rw['lat_ns']
After 2nd iteration, sub = 'percentile', val =
job_rw['lat_ns']['percentile']
After 3rd iteration, sub = '20.000000', val =
job_rw['lat_ns']['percentile']['20.000000'] and hence we get the
required metric value
"""
for sub in metric.levels:
if sub in val:
val = val[sub]
else:
val = 0
raise NoValuesError(
f'Required metric {sub} not present in json output')
job_metrics[metric.name] = val * metric.conversion
start_time_s, end_time_s = start_end_times[i]
# start_time>=end_time OR all the metrics are zero,
# log skip warning and continue to next job
if ((start_time_s >= end_time_s) or
(all(not value for value in job_metrics.values()))):
# TODO(ahanadatta): Print statement will be replaced by logging.
print(f'No job metrics in json, skipping job index {i}')
continue
all_jobs.append({
consts.PARAMS: job_params[i],
consts.START_TIME: start_time_s,
consts.END_TIME: end_time_s,
consts.METRICS: job_metrics
})
if not all_jobs:
raise NoValuesError('No data could be extracted from file')
return all_jobs