def __init__()

in syne_tune/optimizer/schedulers/fifo.py [0:0]


    def __init__(self, config_space: Dict, **kwargs):
        super().__init__(config_space)
        # Check values and impute default values
        assert_no_invalid_options(
            kwargs, _ARGUMENT_KEYS, name='FIFOScheduler')
        kwargs = check_and_merge_defaults(
            kwargs, set(), _DEFAULT_OPTIONS, _CONSTRAINTS,
            dict_name='scheduler_options')
        metric = kwargs.get('metric')
        assert metric is not None, \
            "Argument 'metric' is mandatory. Pass the name of the metric " +\
            "reported by your training script, which you'd like to " +\
            "optimize, and use 'mode' to specify whether it should " +\
            "be minimized or maximized"
        self.metric = metric
        self.mode = kwargs['mode']
        self.max_resource_attr = kwargs.get('max_resource_attr')
        # Setting max_t (if not provided as argument -> self.max_t)
        # This value can often be obtained from config_space. We check these
        # attributes (in order): epochs, max_t, max_epochs.
        # In any case, the max_t argument takes precedence. If it is None, we use
        # the one inferred from config_space.
        self.max_t = self._infer_max_resource_level(
            kwargs.get('max_t'), self.max_resource_attr)
        # Generator for random seeds
        random_seed = kwargs.get('random_seed')
        if random_seed is None:
            random_seed = np.random.randint(0, 2 ** 32)
        logger.info(f"Master random_seed = {random_seed}")
        self.random_seed_generator = RandomSeedGenerator(random_seed)
        # Generate searcher
        searcher = kwargs['searcher']
        if isinstance(searcher, str):
            search_options = kwargs.get('search_options')
            if search_options is None:
                search_options = dict()
            else:
                search_options = search_options.copy()
            search_options.update({
                'configspace': self.config_space.copy(),
                'metric': self.metric,
                'points_to_evaluate': kwargs.get('points_to_evaluate'),
                'scheduler_mode': kwargs['mode'],
                'random_seed_generator': self.random_seed_generator})
            if self.max_t is not None:
                search_options['max_epochs'] = self.max_t
            # Subclasses may extend `search_options`
            search_options = self._extend_search_options(search_options)
            # Adjoin scheduler info to search_options, if not already done by
            # subclass (via `_extend_search_options`)
            if 'scheduler' not in search_options:
                search_options['scheduler'] = 'fifo'
            self.searcher: BaseSearcher = searcher_factory(
                searcher, **search_options)
        else:
            assert isinstance(searcher, BaseSearcher)
            self.searcher: BaseSearcher = searcher

        checkpoint = kwargs.get('checkpoint')
        self._checkpoint = checkpoint
        self._start_time = None  # Will be set at first `suggest`
        self._searcher_initialized = False
        # Resume experiment from checkpoint?
        if kwargs['resume']:
            assert checkpoint is not None, \
                "Need checkpoint to be set if resume = True"
            if os.path.isfile(checkpoint):
                raise NotImplementedError()
                # TODO: Need load
                # self.load_state_dict(load(checkpoint))
            else:
                msg = f'checkpoint path {checkpoint} is not available for resume.'
                logger.exception(msg)
                raise FileExistsError(msg)
        # Time keeper
        time_keeper = kwargs.get('time_keeper')
        if time_keeper is not None:
            self.set_time_keeper(time_keeper)
        else:
            self.time_keeper = None