in src/python/tensorflow_cloud/tuner/utils.py [0:0]
def _convert_hyperparams_to_vizier_params(
hyperparams: hp_module.HyperParameters) -> List[Any]:
"""Converts HyperParameters to a list of ParameterSpec in study_config."""
param_type = []
for hp in hyperparams.space:
param = {}
param["parameter"] = hp.name
if isinstance(hp, hp_module.Choice):
values = hp.values
if isinstance(values[0], str):
param["type"] = _CATEGORICAL
param["categorical_value_spec"] = {"values": values}
else:
param["type"] = _DISCRETE
param["discrete_value_spec"] = {"values": values}
elif isinstance(hp, hp_module.Int):
if hp.step is None or hp.step == 1:
param["type"] = _INTEGER
param["integer_value_spec"] = {
"min_value": hp.min_value,
"max_value": hp.max_value,
}
if hp.sampling is not None:
param.update(_get_scale_type(hp.sampling))
else:
# Note: hp.max_value is inclusive, while the end index of
# range() is exclusive, hence the +1
values = list(range(hp.min_value, hp.max_value + 1, hp.step))
param["type"] = _DISCRETE
param["discrete_value_spec"] = {"values": values}
elif isinstance(hp, hp_module.Float):
if hp.step is None:
param["type"] = _DOUBLE
param["double_value_spec"] = {
"min_value": hp.min_value,
"max_value": hp.max_value,
}
if hp.sampling is not None:
param.update(_get_scale_type(hp.sampling))
else:
# Match how KerasTuner generates the range
values = np.arange(
hp.min_value, hp.max_value + 1e-7, step=hp.step).tolist()
param["type"] = _DISCRETE
param["discrete_value_spec"] = {"values": values}
elif isinstance(hp, hp_module.Boolean):
param["type"] = _CATEGORICAL
param["categorical_value_spec"] = {"values": ["True", "False"]}
elif isinstance(hp, hp_module.Fixed):
if isinstance(hp.value, (str, bool)):
param["type"] = _CATEGORICAL
param["categorical_value_spec"] = {"values": [str(hp.value)]}
else:
param["type"] = _DISCRETE
param["discrete_value_spec"] = {"values": [float(hp.value)]}
else:
raise ValueError(
"`HyperParameter` type not recognized: {}".format(hp))
param_type.append(param)
return param_type