in src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/generate_pipeline_inputs.py [0:0]
def main():
LOGGER.info('ADF Version %s', ADF_VERSION)
LOGGER.info("ADF Log Level is %s", ADF_LOG_LEVEL)
_create_inputs_folder()
parameter_store = ParameterStore(
DEPLOYMENT_ACCOUNT_REGION,
boto3
)
s3 = S3(DEPLOYMENT_ACCOUNT_REGION, SHARED_MODULES_BUCKET)
deployment_map = DeploymentMap(
parameter_store,
s3,
ADF_PIPELINE_PREFIX
)
sts = STS()
partition = get_partition(DEPLOYMENT_ACCOUNT_REGION)
cross_account_access_role = parameter_store.fetch_parameter('cross_account_access_role')
role = sts.assume_cross_account_role(
f'arn:{partition}:iam::{MASTER_ACCOUNT_ID}:role/'
f'{cross_account_access_role}-readonly',
'pipeline'
)
organizations = Organizations(role)
ensure_event_bus_status(ORGANIZATION_ID)
try:
auto_create_repositories = parameter_store.fetch_parameter('auto_create_repositories')
except ParameterNotFoundError:
auto_create_repositories = 'enabled'
threads = []
_cache = Cache()
for p in deployment_map.map_contents.get('pipelines', []):
_source_account_id = p.get('default_providers', {}).get('source', {}).get('properties', {}).get('account_id', {})
if _source_account_id and int(_source_account_id) != int(DEPLOYMENT_ACCOUNT_ID) and not _cache.check(_source_account_id):
rule = Rule(p['default_providers']['source']['properties']['account_id'])
rule.create_update()
_cache.add(p['default_providers']['source']['properties']['account_id'], True)
thread = PropagatingThread(target=worker_thread, args=(
p,
organizations,
auto_create_repositories,
deployment_map,
parameter_store
))
thread.start()
threads.append(thread)
for thread in threads:
thread.join()