in dev/breeze/src/airflow_breeze/params/shell_params.py [0:0]
def env_variables_for_docker_commands(self) -> dict[str, str]:
"""
Constructs environment variables needed by the docker-compose command, based on Shell parameters
passed to it. We cannot cache this property because it can be run few times after modifying shell
params - for example when we first run "pull" on images before tests anda then run tests - each
separately with different test types.
This is the only place where you need to add environment variables if you want to pass them to
docker or docker-compose.
:return: dictionary of env variables to use for docker-compose and docker command
"""
_env: dict[str, str] = {}
_set_var(_env, "AIRFLOW_CI_IMAGE", self.airflow_image_name)
_set_var(_env, "AIRFLOW_CONSTRAINTS_LOCATION", self.airflow_constraints_location)
_set_var(_env, "AIRFLOW_CONSTRAINTS_MODE", self.airflow_constraints_mode)
_set_var(_env, "AIRFLOW_CONSTRAINTS_REFERENCE", self.airflow_constraints_reference)
_set_var(_env, "AIRFLOW_ENV", "development")
_set_var(_env, "AIRFLOW_EXTRAS", self.airflow_extras)
_set_var(_env, "AIRFLOW_SKIP_CONSTRAINTS", self.airflow_skip_constraints)
_set_var(_env, "AIRFLOW_IMAGE_KUBERNETES", self.airflow_image_kubernetes)
_set_var(_env, "AIRFLOW_VERSION", self.airflow_version)
_set_var(_env, "AIRFLOW__API_AUTH__JWT_SECRET", b64encode(os.urandom(16)).decode("utf-8"))
_set_var(_env, "AIRFLOW__CELERY__BROKER_URL", self.airflow_celery_broker_url)
_set_var(_env, "AIRFLOW__CORE__AUTH_MANAGER", self.auth_manager_path)
_set_var(_env, "AIRFLOW__CORE__EXECUTOR", self.executor)
if self.auth_manager == SIMPLE_AUTH_MANAGER:
_set_var(_env, "AIRFLOW__CORE__SIMPLE_AUTH_MANAGER_USERS", "admin:admin,viewer:viewer")
_set_var(
_env,
"AIRFLOW__CORE__SIMPLE_AUTH_MANAGER_PASSWORDS_FILE",
"/opt/airflow/dev/breeze/src/airflow_breeze/files/simple_auth_manager_passwords.json",
)
_set_var(_env, "AIRFLOW__WEBSERVER__SECRET_KEY", b64encode(os.urandom(16)).decode("utf-8"))
if self.executor == EDGE_EXECUTOR:
_set_var(
_env,
"AIRFLOW__CORE__EXECUTOR",
"airflow.providers.edge3.executors.edge_executor.EdgeExecutor",
)
_set_var(_env, "AIRFLOW__EDGE__API_ENABLED", "true")
_set_var(
_env, "AIRFLOW__CORE__INTERNAL_API_SECRET_KEY", b64encode(os.urandom(16)).decode("utf-8")
)
# For testing Edge Worker on Windows... Default Run ID is having a colon (":") from the time which is
# made into the log path template, which then fails to be used in Windows. So we replace it with a dash
_set_var(
_env,
"AIRFLOW__LOGGING__LOG_FILENAME_TEMPLATE",
"dag_id={{ ti.dag_id }}/run_id={{ ti.run_id|replace(':', '-') }}/task_id={{ ti.task_id }}/"
"{% if ti.map_index >= 0 %}map_index={{ ti.map_index }}/{% endif %}"
"attempt={{ try_number|default(ti.try_number) }}.log",
)
port = 8080
_set_var(_env, "AIRFLOW__EDGE__API_URL", f"http://localhost:{port}/edge_worker/v1/rpcapi")
_set_var(_env, "ANSWER", get_forced_answer() or "")
_set_var(_env, "BACKEND", self.backend)
_set_var(_env, "BASE_BRANCH", self.base_branch, "main")
_set_var(_env, "BREEZE", "true")
_set_var(_env, "BREEZE_INIT_COMMAND", None, "")
_set_var(_env, "CELERY_BROKER_URLS_MAP", CELERY_BROKER_URLS_MAP)
_set_var(_env, "CELERY_FLOWER", self.celery_flower)
_set_var(_env, "CLEAN_AIRFLOW_INSTALLATION", self.clean_airflow_installation)
_set_var(_env, "CI", None, "false")
_set_var(_env, "CI_BUILD_ID", None, "0")
_set_var(_env, "CI_EVENT_TYPE", None, GithubEvents.PULL_REQUEST.value)
_set_var(_env, "CI_JOB_ID", None, "0")
_set_var(_env, "CI_TARGET_BRANCH", self.airflow_branch)
_set_var(_env, "CI_TARGET_REPO", self.github_repository)
_set_var(_env, "COLLECT_ONLY", self.collect_only)
_set_var(_env, "COMMIT_SHA", None, commit_sha())
_set_var(_env, "COMPOSE_FILE", self.compose_file)
_set_var(_env, "DB_RESET", self.db_reset)
_set_var(_env, "DEFAULT_BRANCH", self.airflow_branch)
_set_var(_env, "DEFAULT_CONSTRAINTS_BRANCH", self.default_constraints_branch)
_set_var(_env, "DEV_MODE", self.dev_mode)
_set_var(_env, "DOCKER_IS_ROOTLESS", self.rootless_docker)
_set_var(_env, "DOWNGRADE_SQLALCHEMY", self.downgrade_sqlalchemy)
_set_var(_env, "DOWNGRADE_PENDULUM", self.downgrade_pendulum)
_set_var(_env, "DRILL_HOST_PORT", None, DRILL_HOST_PORT)
_set_var(_env, "ENABLE_COVERAGE", self.enable_coverage)
_set_var(_env, "FLOWER_HOST_PORT", None, FLOWER_HOST_PORT)
_set_var(_env, "GREMLIN_HOST_PORT", None, GREMLIN_HOST_PORT)
_set_var(_env, "EXCLUDED_PROVIDERS", self.excluded_providers)
_set_var(_env, "FORCE_LOWEST_DEPENDENCIES", self.force_lowest_dependencies)
_set_var(_env, "SQLALCHEMY_WARN_20", self.force_sa_warnings)
_set_var(_env, "GITHUB_ACTIONS", self.github_actions)
_set_var(_env, "HOST_GROUP_ID", self.host_group_id)
_set_var(_env, "HOST_OS", self.host_os)
_set_var(_env, "HOST_USER_ID", self.host_user_id)
_set_var(_env, "INIT_SCRIPT_FILE", None, "init.sh")
_set_var(_env, "INSTALL_AIRFLOW_WITH_CONSTRAINTS", self.install_airflow_with_constraints)
_set_var(_env, "INSTALL_AIRFLOW_PYTHON_CLIENT", self.install_airflow_python_client)
_set_var(_env, "INSTALL_AIRFLOW_VERSION", self.install_airflow_version)
_set_var(_env, "INSTALL_SELECTED_PROVIDERS", self.install_selected_providers)
_set_var(_env, "ISSUE_ID", self.issue_id)
_set_var(_env, "LOAD_DEFAULT_CONNECTIONS", self.load_default_connections)
_set_var(_env, "LOAD_EXAMPLES", self.load_example_dags)
_set_var(_env, "MSSQL_HOST_PORT", None, MSSQL_HOST_PORT)
_set_var(_env, "MYSQL_HOST_PORT", None, MYSQL_HOST_PORT)
_set_var(_env, "MYSQL_VERSION", self.mysql_version)
_set_var(_env, "MOUNT_SOURCES", self.mount_sources)
_set_var(_env, "NUM_RUNS", self.num_runs)
_set_var(_env, "ONLY_MIN_VERSION_UPDATE", self.only_min_version_update)
_set_var(_env, "DISTRIBUTION_FORMAT", self.distribution_format)
_set_var(_env, "POSTGRES_HOST_PORT", None, POSTGRES_HOST_PORT)
_set_var(_env, "POSTGRES_VERSION", self.postgres_version)
_set_var(_env, "PROVIDERS_CONSTRAINTS_LOCATION", self.providers_constraints_location)
_set_var(_env, "PROVIDERS_CONSTRAINTS_MODE", self.providers_constraints_mode)
_set_var(_env, "PROVIDERS_CONSTRAINTS_REFERENCE", self.providers_constraints_reference)
_set_var(_env, "PROVIDERS_SKIP_CONSTRAINTS", self.providers_skip_constraints)
_set_var(_env, "PYTHONDONTWRITEBYTECODE", "true")
_set_var(_env, "PYTHONWARNINGS", None, None)
_set_var(_env, "PYTHON_MAJOR_MINOR_VERSION", self.python)
_set_var(_env, "QUIET", self.quiet)
_set_var(_env, "REDIS_HOST_PORT", None, REDIS_HOST_PORT)
_set_var(_env, "REGENERATE_MISSING_DOCS", self.regenerate_missing_docs)
_set_var(_env, "RUN_TESTS", self.run_tests)
_set_var(_env, "SKIP_ENVIRONMENT_INITIALIZATION", self.skip_environment_initialization)
_set_var(_env, "SKIP_SSH_SETUP", self.skip_ssh_setup)
_set_var(_env, "SQLITE_URL", self.sqlite_url)
_set_var(_env, "SSH_PORT", None, SSH_PORT)
_set_var(_env, "STANDALONE_DAG_PROCESSOR", self.standalone_dag_processor)
_set_var(_env, "START_AIRFLOW", self.start_airflow)
_set_var(_env, "SUSPENDED_PROVIDERS_FOLDERS", self.suspended_providers_folders)
_set_var(
_env,
"START_API_SERVER_WITH_EXAMPLES",
self.start_api_server_with_examples,
)
_set_var(_env, "SYSTEM_TESTS_ENV_ID", None, "")
_set_var(_env, "TEST_TYPE", self.test_type, "")
_set_var(_env, "TEST_GROUP", str(self.test_group.value) if self.test_group else "")
_set_var(_env, "UPGRADE_BOTO", self.upgrade_boto)
_set_var(_env, "USE_AIRFLOW_VERSION", self.use_airflow_version, "")
_set_var(_env, "USE_DISTRIBUTIONS_FROM_DIST", self.use_distributions_from_dist)
_set_var(_env, "USE_UV", self.use_uv)
_set_var(_env, "USE_XDIST", self.use_xdist)
_set_var(_env, "VERBOSE", get_verbose())
_set_var(_env, "VERBOSE_COMMANDS", self.verbose_commands)
_set_var(_env, "VERSION_SUFFIX", self.version_suffix)
_set_var(_env, "WEB_HOST_PORT", None, WEB_HOST_PORT)
_set_var(_env, "_AIRFLOW_RUN_DB_TESTS_ONLY", self.run_db_tests_only)
_set_var(_env, "_AIRFLOW_SKIP_DB_TESTS", self.skip_db_tests)
self._generate_env_for_docker_compose_file_if_needed(_env)
_target_env: dict[str, str] = os.environ.copy()
_target_env.update(_env)
return _target_env