in src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/resolver.py [0:0]
def fetch_stack_output(self, value, key, optional=False): # pylint: disable=too-many-statements
partition = get_partition(DEFAULT_REGION)
try:
[_, account_id, region, stack_name, output_key] = str(value).split(':')
except ValueError as error:
raise ValueError(
f"{value} is not a valid import string. Syntax should be "
"import:account_id:region:stack_name:output_key"
) from error
if Resolver._is_optional(output_key):
LOGGER.info("Parameter %s is considered optional", output_key)
optional = True
output_key = output_key[:-1] if optional else output_key
try:
role = self.sts.assume_cross_account_role(
f'arn:{partition}:iam::{account_id}:role/adf-readonly-automation-role',
'importer'
)
cloudformation = CloudFormation(
region=region,
deployment_account_region=os.environ["AWS_REGION"],
role=role,
stack_name=stack_name,
account_id=account_id
)
stack_output = self.cache.check(value) or cloudformation.get_stack_output(output_key)
if stack_output:
LOGGER.info("Stack output value is %s", stack_output)
self.cache.add(value, stack_output)
except ClientError:
if not optional:
raise
stack_output = ""
try:
parent_key = list(Resolver.determine_parent_key(self.comparison_parameters, key))[0]
if optional:
self.stage_parameters[parent_key][key] = stack_output
else:
if not stack_output:
raise Exception(
f"No Stack Output found on {account_id} in {region} "
f"with stack name {stack_name} and "
f"output key {output_key}"
)
self.stage_parameters[parent_key][key] = stack_output
except IndexError as error:
if stack_output:
if self.stage_parameters.get(key):
self.stage_parameters[key] = stack_output
else:
raise Exception(
"Could not determine the structure of the file in order "
"to import from CloudFormation",
) from error
return True