def worker_thread()

in src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/generate_pipeline_inputs.py [0:0]


def worker_thread(p, organizations, auto_create_repositories, deployment_map, parameter_store):
    LOGGER.debug("Worker Thread started for %s", p.get('name'))
    pipeline = Pipeline(p)
    if auto_create_repositories == 'enabled':
        code_account_id = p.get('default_providers', {}).get('source', {}).get('properties', {}).get('account_id', {})
        has_custom_repo = p.get('default_providers', {}).get('source', {}).get('properties', {}).get('repository', {})
        if auto_create_repositories and code_account_id and str(code_account_id).isdigit() and not has_custom_repo:
            repo = Repo(code_account_id, p.get('name'), p.get('description'))
            repo.create_update()

    regions = []
    for target in p.get('targets', []):
        target_structure = TargetStructure(target)
        for step in target_structure.target:
            regions = step.get(
                'regions', p.get(
                    'regions', DEPLOYMENT_ACCOUNT_REGION))
            paths_tags = []
            for path in step.get('path', []):
                paths_tags.append(path)
            if step.get('tags') is not None:
                paths_tags.append(step.get('tags', {}))
            for path_or_tag in paths_tags:
                pipeline.stage_regions.append(regions)
                pipeline_target = Target(path_or_tag, target_structure, organizations, step, regions)
                pipeline_target.fetch_accounts_for_target()

            pipeline.template_dictionary["targets"].append(target.target_structure.generate_waves())

    if DEPLOYMENT_ACCOUNT_REGION not in regions:
        pipeline.stage_regions.append(DEPLOYMENT_ACCOUNT_REGION)
    pipeline.generate_input()
    ssm_params = fetch_required_ssm_params(
        pipeline.input["regions"] or [DEPLOYMENT_ACCOUNT_REGION]
    )
    deployment_map.update_deployment_parameters(pipeline)
    store_regional_parameter_config(pipeline, parameter_store)
    with open(f'cdk_inputs/{pipeline.input["name"]}.json', mode='w', encoding='utf-8') as outfile:
        data = {}
        data['input'] = pipeline.input
        data['input']['default_scm_branch'] = ssm_params.get('default_scm_branch')
        data['ssm_params'] = ssm_params
        json.dump(data, outfile)