in benchmark/benchmark/logs.py [0:0]
def _parse_primaries(self, log):
if search(r'(?:panicked|Error)', log) is not None:
raise ParseError('Primary(s) panicked')
tmp = findall(r'\[(.*Z) .* Created B\d+\([^ ]+\) -> ([^ ]+=)', log)
tmp = [(d, self._to_posix(t)) for t, d in tmp]
proposals = self._merge_results([tmp])
tmp = findall(r'\[(.*Z) .* Committed B\d+\([^ ]+\) -> ([^ ]+=)', log)
tmp = [(d, self._to_posix(t)) for t, d in tmp]
commits = self._merge_results([tmp])
configs = {
'header_size': int(
search(r'Header size .* (\d+)', log).group(1)
),
'max_header_delay': int(
search(r'Max header delay .* (\d+)', log).group(1)
),
'gc_depth': int(
search(r'Garbage collection depth .* (\d+)', log).group(1)
),
'sync_retry_delay': int(
search(r'Sync retry delay .* (\d+)', log).group(1)
),
'sync_retry_nodes': int(
search(r'Sync retry nodes .* (\d+)', log).group(1)
),
'batch_size': int(
search(r'Batch size .* (\d+)', log).group(1)
),
'max_batch_delay': int(
search(r'Max batch delay .* (\d+)', log).group(1)
),
}
ip = search(r'booted on (\d+.\d+.\d+.\d+)', log).group(1)
return proposals, commits, configs, ip