def make_searcher_and_scheduler()

in benchmarking/cli/launch_utils.py [0:0]


def make_searcher_and_scheduler(params) -> (dict, dict):
    scheduler = params['scheduler']
    searcher = params['searcher']
    # Options for searcher
    search_options = dict()
    _enter_not_none(
        search_options, 'debug_log', params.get('debug_log'), tp=bool)
    _enter_not_none(
        search_options, 'normalize_targets', params.get('normalize_targets'),
        tp=bool)
    model = params.get('searcher_model')
    _enter_not_none(search_options, 'model', model)

    if searcher.startswith('bayesopt'):
        # Options for bayesopt searcher
        searcher_args = (
            ('num_init_random', int, False),
            ('num_init_candidates', int, False),
            ('num_fantasy_samples', int, False),
            ('resource_acq', str, True),
            ('resource_acq_bohb_threshold', int, True),
            ('gp_resource_kernel', str, True),
            ('opt_skip_period', int, False),
            ('opt_skip_init_length', int, False),
            ('opt_skip_num_max_resource', bool, False),
            ('opt_nstarts', int, False),
            ('opt_maxiter', int, False),
            ('initial_scoring', str, False),
            ('issm_gamma_one', bool, False),
            ('exponent_cost', float, False),
            ('expdecay_normalize_inputs', bool, False),
            ('use_new_code', bool, False),
            ('num_init_candidates_for_batch', int, False),
            ('no_fantasizing', bool, False),
        )
        gp_add_models = {'gp_issm', 'gp_expdecay'}
        for name, tp, warn in searcher_args:
            _enter_not_none(
                search_options, name, params.get('searcher_' + name), tp=tp)
            if warn and name in search_options and model in gp_add_models:
                logger.warning(f"{name} not used with searcher_model = {model}")
        if 'issm_gamma_one' in search_options and model != 'gp_issm':
            logger.warning(
                f"searcher_issm_gamma_one not used with searcher_model = {model}")
        if 'expdecay_normalize_inputs' in search_options and model != 'gp_expdecay':
            logger.warning(
                "searcher_expdecay_normalize_inputs not used with searcher_model "
                f"= {model}")
    elif searcher == 'kde':
        # Options for kde searcher
        searcher_args = (
            ('num_min_data_points', int),
            ('top_n_percent', int),
            ('min_bandwidth', float),
            ('num_candidates', int),
            ('bandwidth_factor', int),
            ('random_fraction', float),
        )
        for name, tp in searcher_args:
            _enter_not_none(
                search_options, name, params.get('searcher_' + name), tp=tp)

    # Options for scheduler
    random_seed_offset = params.get('random_seed_offset')
    if random_seed_offset is None:
        random_seed_offset = 0
    random_seed = (random_seed_offset + params['run_id']) % (2 ** 32)
    scheduler_options = {'random_seed': random_seed}
    name = 'max_resource_level' if scheduler == 'hyperband_synchronous' \
        else 'max_t'
    _enter_not_none(
        scheduler_options, name, params.get('max_resource_level'), tp=int)
    scheduler_args = ()
    if scheduler != 'fifo':
        # Only process these arguments for HyperbandScheduler
        prefix = 'hyperband_'
        assert scheduler.startswith(prefix)
        scheduler_args = scheduler_args + (
            ('reduction_factor', int),
            ('grace_period', int),
            ('brackets', int))
        if scheduler != 'hyperband_synchronous':
            sch_type = scheduler[len(prefix):]
            _enter_not_none(scheduler_options, 'type', sch_type)
            rung_levels = params.get('rung_levels')
            if rung_levels is not None:
                scheduler_options['rung_levels'] = sorted(
                    [int(x) for x in rung_levels.split()])
            scheduler_args = scheduler_args + (
                ('searcher_data', str),
                ('register_pending_myopic', bool),
                ('rung_system_per_bracket', bool))
    for name, tp in scheduler_args:
        _enter_not_none(
            scheduler_options, name, params.get(name), tp=tp)

    # Special constraints
    if scheduler != 'fifo' and searcher.startswith('bayesopt') \
            and model in gp_add_models:
        searcher_data = scheduler_options.get('searcher_data')
        if searcher_data is not None and searcher_data != 'all':
            logger.warning(
                f"searcher_model = '{model}' requires "
                f"searcher_data = 'all' (and not '{searcher_data}')")
        scheduler_options['searcher_data'] = 'all'

    return search_options, scheduler_options