def load()

in src/buildstream/_context.py [0:0]


    def load(self, config: Optional[str] = None) -> None:
        # If a specific config file is not specified, default to trying
        # a $XDG_CONFIG_HOME/buildstream.conf file
        #
        if not config:
            #
            # Support parallel installations of BuildStream by first
            # trying a (major point) version specific configuration file
            # and then falling back to buildstream.conf.
            #
            for config_filename in ("buildstream2.conf", "buildstream.conf"):
                default_config = os.path.join(os.environ["XDG_CONFIG_HOME"], config_filename)
                if os.path.exists(default_config):
                    config = default_config
                    break

        # Load default config
        #
        defaults = _yaml.load(_site.default_user_config, shortname="userconfig.yaml")

        if config:
            self.config_origin = os.path.abspath(config)

            # Here we use the fullpath as the shortname as well, as it is useful to have
            # a fullpath displayed in errors for the user configuration
            user_config = _yaml.load(config, shortname=config)
            user_config._composite(defaults)

        # Give obsoletion warnings
        if "builddir" in defaults:
            raise LoadError("builddir is obsolete, use cachedir", LoadErrorReason.INVALID_DATA)

        if "artifactdir" in defaults:
            raise LoadError("artifactdir is obsolete", LoadErrorReason.INVALID_DATA)

        defaults.validate_keys(
            [
                "cachedir",
                "sourcedir",
                "builddir",
                "logdir",
                "scheduler",
                "build",
                "fetch",
                "track",
                "artifacts",
                "source-caches",
                "logging",
                "projects",
                "cache",
                "workspacedir",
                "remote-execution",
            ]
        )

        for directory in ["cachedir", "sourcedir", "logdir", "workspacedir"]:
            # Allow the ~ tilde expansion and any environment variables in
            # path specification in the config files.
            #
            path = defaults.get_str(directory)
            path = os.path.expanduser(path)
            path = os.path.expandvars(path)
            path = os.path.normpath(path)
            setattr(self, directory, path)

            # Relative paths don't make sense in user configuration. The exception is
            # workspacedir where `.` is useful as it will be combined with the name
            # specified on the command line.
            if not os.path.isabs(path) and not (directory == "workspacedir" and path == "."):
                raise LoadError("{} must be an absolute path".format(directory), LoadErrorReason.INVALID_DATA)

        # add directories not set by users
        assert self.cachedir
        self.tmpdir = os.path.join(self.cachedir, "tmp")
        self.casdir = os.path.join(self.cachedir, "cas")
        self.builddir = os.path.join(self.cachedir, "build")
        self.artifactdir = os.path.join(self.cachedir, "artifacts", "refs")

        # Move old artifact cas to cas if it exists and create symlink
        old_casdir = os.path.join(self.cachedir, "artifacts", "cas")
        if os.path.exists(old_casdir) and not os.path.islink(old_casdir) and not os.path.exists(self.casdir):
            os.rename(old_casdir, self.casdir)
            os.symlink(self.casdir, old_casdir)

        # Cleanup old extract directories
        old_extractdir = os.path.join(self.cachedir, "extract")
        if os.path.isdir(old_extractdir):
            shutil.rmtree(old_extractdir, ignore_errors=True)

        # Load quota configuration
        # We need to find the first existing directory in the path of our
        # casdir - the casdir may not have been created yet.
        cache = defaults.get_mapping("cache")
        cache.validate_keys(
            ["quota", "reserved-disk-space", "low-watermark", "storage-service", "pull-buildtrees", "cache-buildtrees"]
        )

        cas_volume = self.casdir
        while not os.path.exists(cas_volume):
            cas_volume = os.path.dirname(cas_volume)

        self.config_cache_quota_string = cache.get_str("quota")
        try:
            self.config_cache_quota = utils._parse_size(self.config_cache_quota_string, cas_volume)
        except utils.UtilError as e:
            raise LoadError(
                "{}\nPlease specify the value in bytes or as a % of full disk space.\n"
                "\nValid values are, for example: 800M 10G 1T 50%\n".format(str(e)),
                LoadErrorReason.INVALID_DATA,
            ) from e

        cache_reserved_string = cache.get_str("reserved-disk-space")
        try:
            self.config_cache_reserved = utils._parse_size(cache_reserved_string, cas_volume)
            if self.config_cache_reserved is None:
                provenance = cache.get_scalar("reserved-disk-space").get_provenance()
                raise LoadError(
                    "{}: Please specify the value in bytes or as a % of full disk space.\n"
                    "\nValid values are, for example: 2G 5%\n".format(provenance),
                    LoadErrorReason.INVALID_DATA,
                )
        except utils.UtilError as e:
            raise LoadError(
                "{}\nPlease specify the value in bytes or as a % of full disk space.\n"
                "\nValid values are, for example: 2G 5%\n".format(str(e)),
                LoadErrorReason.INVALID_DATA,
            ) from e

        low_watermark_string = cache.get_str("low-watermark")
        try:
            self.config_cache_low_watermark = utils._parse_percentage(low_watermark_string)
        except utils.UtilError as e:
            raise LoadError(
                "{}\nPlease specify the value as a % of the cache quota.".format(str(e)),
                LoadErrorReason.INVALID_DATA,
            ) from e

        remote_cache = cache.get_mapping("storage-service", default=None)
        if remote_cache:
            self.remote_cache_spec = RemoteSpec.new_from_node(remote_cache)

        # Load global artifact cache configuration
        cache_config = defaults.get_mapping("artifacts", default={})
        self._global_artifact_cache_config = _CacheConfig.new_from_node(cache_config)

        # Load global source cache configuration
        cache_config = defaults.get_mapping("source-caches", default={})
        self._global_source_cache_config = _CacheConfig.new_from_node(cache_config)

        # Load the global remote execution config
        remote_execution = defaults.get_mapping("remote-execution", default=None)
        if remote_execution:
            self.remote_execution_specs = self._load_remote_execution(remote_execution)

        # Load pull build trees configuration
        self.pull_buildtrees = cache.get_bool("pull-buildtrees")

        # Load cache build trees configuration
        self.cache_buildtrees = cache.get_enum("cache-buildtrees", _CacheBuildTrees)

        # Load logging config
        logging = defaults.get_mapping("logging")
        logging.validate_keys(
            [
                "key-length",
                "verbose",
                "error-lines",
                "message-lines",
                "debug",
                "element-format",
                "message-format",
                "throttle-ui-updates",
            ]
        )
        self.log_key_length = logging.get_int("key-length")
        self.log_debug = logging.get_bool("debug")
        self.log_verbose = logging.get_bool("verbose")
        self.log_error_lines = logging.get_int("error-lines")
        self.log_message_lines = logging.get_int("message-lines")
        self.log_element_format = logging.get_str("element-format")
        self.log_message_format = logging.get_str("message-format")
        self.log_throttle_updates = logging.get_bool("throttle-ui-updates")

        # Load scheduler config
        scheduler = defaults.get_mapping("scheduler")
        scheduler.validate_keys(["on-error", "fetchers", "builders", "pushers", "network-retries"])
        self.sched_error_action = scheduler.get_enum("on-error", _SchedulerErrorAction)
        self.sched_fetchers = scheduler.get_int("fetchers")
        self.sched_builders = scheduler.get_int("builders")
        self.sched_pushers = scheduler.get_int("pushers")
        self.sched_network_retries = scheduler.get_int("network-retries")

        # Load build config
        build = defaults.get_mapping("build")
        build.validate_keys(["max-jobs", "retry-failed", "dependencies"])
        self.build_max_jobs = build.get_int("max-jobs")
        self.build_retry_failed = build.get_bool("retry-failed")

        dependencies = build.get_str("dependencies")
        if dependencies not in ["none", "all"]:
            provenance = build.get_scalar("dependencies").get_provenance()
            raise LoadError(
                "{}: Invalid value for 'dependencies'. Choose 'none' or 'all'.".format(provenance),
                LoadErrorReason.INVALID_DATA,
            )
        self.build_dependencies = _PipelineSelection(dependencies)

        # Load fetch config
        fetch = defaults.get_mapping("fetch")
        fetch.validate_keys(["source"])
        self.fetch_source = fetch.get_enum("source", _SourceUriPolicy)

        # Load track config
        track = defaults.get_mapping("track")
        track.validate_keys(["source"])
        self.track_source = track.get_enum("source", _SourceUriPolicy)

        # Load per-projects overrides
        self._project_overrides = defaults.get_mapping("projects", default={})

        # Shallow validation of overrides, parts of buildstream which rely
        # on the overrides are expected to validate elsewhere.
        for overrides_project in self._project_overrides.keys():
            overrides = self._project_overrides.get_mapping(overrides_project)
            overrides.validate_keys(
                ["artifacts", "source-caches", "options", "strict", "default-mirror", "remote-execution", "mirrors"]
            )