in elkserver/docker/redelk-base/redelkinstalldata/scripts/daemon.py [0:0]
def process_alarms(connector_dict, alarm_dict):
"""Process the alarm results and send notifications via connector modules"""
logger.info("Processing alarms")
# now we can loop over the modules once again and log the lines
for alarm in alarm_dict:
if alarm in alarms and alarms[alarm]["enabled"]:
alarm_status = alarm_dict[alarm]["status"]
# If the alarm did fail to run, skip processing the notification and tagging as we are not sure of the results
if alarm_status == "error":
logger.warning(
"Alarm %s did not run correctly, skipping processing (status: %s)",
alarm,
alarm_status,
)
continue
if alarm_status == "did_not_run":
logger.debug(
"Alarm %s did not run (this was expected), skipping processing (status: %s)",
alarm,
alarm_status,
)
continue
if alarm_status == "unknown":
logger.warning(
"Alarm %s returned and unknown status (this should never happen), skipping processing (status: %s)",
alarm,
alarm_status,
)
continue
logger.debug("Alarm %s enabled, processing hits", alarm)
result = alarm_dict[alarm]["result"]
alarm_name = alarm_dict[alarm]["info"]["submodule"]
# logger.debug('Alarm results: %s' % aD[a]['result'])
for result_hits in result["hits"]["hits"]:
# First check if there is a mutation data to add
logger.debug(result_hits)
if result_hits["_id"] in result["mutations"]:
mutations = result["mutations"][result_hits["_id"]]
else:
mutations = {}
# And now, let's add mutations data to the doc and update back the hits
result_hits = add_alarm_data(result_hits, mutations, alarm_name)
# Let's tag the docs with the alarm name
set_tags(alarm_name, result["hits"]["hits"])
logger.debug(
"calling settags %s (%d hits)", alarm_name, result["hits"]["total"]
)
# Needed as groupHits will change r['hits']['hits'] and different alarms might do different grouping
result = copy.deepcopy(alarm_dict[alarm]["result"])
if result["hits"]["total"] > 0:
# Group the hits before sending it to the alarm, based on the 'groubpby' array returned by the alarm
group_by = list(result["groupby"])
result["hits"]["hits"] = group_hits(result["hits"]["hits"], group_by)
for connector in connector_dict:
# connector will process ['hits']['hits'] which contains a list of 'jsons' looking like an ES line
# connector will report the fields in ['hits']['fields'] for each of the lines in the list
if (
connector in notifications
and notifications[connector]["enabled"]
):
connector_mod = connector_dict[connector]["m"].Module()
logger.info(
"connector %s enabled, sending alarm (%d hits)",
connector,
result["hits"]["total"],
)
connector_mod.send_alarm(result)