in source/msam/chalicelib/connections.py [0:0]
def mediaconnect_flow_medialive_input_ddb_items():
"""
Identify and format MediaConnect Flow to MediaLive Input connections for cache storage.
"""
items = []
connection_type = "mediaconnect-flow-medialive-input"
try:
# get MediaConnect flows
mediaconnect_flows_cached = cache.cached_by_service(
"mediaconnect-flow")
# process each flow
for flow in mediaconnect_flows_cached:
flow_data = json.loads(flow["data"])
# for each flow, process each outputs
for flow_output in flow_data["Outputs"]:
match_found = False
# check for MediaLiveInputArn first
try:
if flow_output["MediaLiveInputArn"]:
config = {
"from": flow_data["FlowArn"],
"to": flow_output["MediaLiveInputArn"],
"scheme": "MEDIACONNECT"
}
print(config)
items.append(
connection_to_ddb_item(
flow_data["FlowArn"],
flow_output["MediaLiveInputArn"],
connection_type, config))
# if that didn't work, then check for IPs (Destination)
except KeyError as error:
# for each output, look for the matching MediaLive input
medialive_in_cached = cache.cached_by_service(
"medialive-input")
# iterate over all medialive inputs
for ml_input in medialive_in_cached:
if not match_found:
ml_input_data = json.loads(ml_input["data"])
# there are 2 ip addresses in ml_input
for destination in ml_input_data["Destinations"]:
# match the flow output ip address to a mediaLive input ip address
try:
if destination["Ip"] == flow_output[
"Destination"]:
config = {
"from": flow["arn"],
"to": ml_input["arn"],
"scheme": ml_input_data["Type"]
}
print(config)
items.append(
connection_to_ddb_item(
flow["arn"], ml_input["arn"],
connection_type, config))
match_found = True
break
except Exception as error:
print(error)
else:
break
except Exception as error:
print(error)
except ClientError as error:
print(error)
return items