in src/modules/log_parser.py [0:0]
def merge_logs(self) -> None:
"""
Merges multiple log files into a single list for processing.
"""
try:
all_logs = []
parsed_logs = []
if not self.logs:
self.result.update(
{
"filtered_logs": json.dumps([]),
"status": TestStatus.SUCCESS.value,
"message": "No logs provided to merge",
}
)
return
for logs in self.logs:
if isinstance(logs, str):
try:
parsed = json.loads(logs)
parsed_logs.extend(parsed)
except json.JSONDecodeError:
parsed_logs.append(logs)
else:
parsed_logs.extend(logs)
for log in parsed_logs:
try:
if self.ansible_os_family == "REDHAT":
timestamp_str = " ".join(log.split()[:3])
log_time = datetime.strptime(timestamp_str, "%b %d %H:%M:%S")
log_time = log_time.replace(year=datetime.now().year)
all_logs.append((log_time, log))
elif self.ansible_os_family == "SUSE":
timestamp_str = log.split(".")[0]
log_time = datetime.strptime(timestamp_str, "%Y-%m-%dT%H:%M:%S")
all_logs.append((log_time, log))
else:
all_logs.append((datetime.min, log))
except (ValueError, IndexError):
all_logs.append((datetime.min, log))
sorted_logs = [log for _, log in sorted(all_logs, key=lambda x: x[0])]
self.result.update(
{
"filtered_logs": json.dumps(sorted_logs),
"status": TestStatus.SUCCESS.value,
}
)
except Exception as ex:
self.handle_error(ex)