in nevergrad/optimization/base.py [0:0]
def tell(self, candidate: p.Parameter, loss: tp.Loss) -> None:
"""Provides the optimizer with the evaluation of a fitness value for a candidate.
Parameters
----------
x: np.ndarray
point where the function was evaluated
loss: float/list/np.ndarray
loss of the function (or multi-objective function
Note
----
The candidate should generally be one provided by :code:`ask()`, but can be also
a non-asked candidate. To create a p.Parameter instance from args and kwargs,
you can use :code:`candidate = optimizer.parametrization.spawn_child(new_value=your_value)`:
- for an :code:`Array(shape(2,))`: :code:`optimizer.parametrization.spawn_child(new_value=[12, 12])`
- for an :code:`Instrumentation`: :code:`optimizer.parametrization.spawn_child(new_value=(args, kwargs))`
Alternatively, you can provide a suggestion with :code:`optimizer.suggest(*args, **kwargs)`, the next :code:`ask`
will use this suggestion.
"""
# Check loss type
if isinstance(loss, (Real, float)):
# using "float" along "Real" because mypy does not understand "Real" for now Issue #3186
loss = float(loss)
# Non-sense values including NaNs should not be accepted.
# We do not use max-float as various later transformations could lead to greater values.
if not loss < 5.0e20: # pylint: disable=unneeded-not
self._warn(
f"Clipping very high value {loss} in tell (rescale the cost function?).",
errors.LossTooLargeWarning,
)
loss = 5.0e20 # sys.float_info.max leads to numerical problems so let us do this.
elif isinstance(loss, (tuple, list, np.ndarray)):
loss = np.array(loss, copy=False, dtype=float).ravel() if len(loss) != 1 else loss[0]
elif not isinstance(loss, np.ndarray):
raise TypeError(
f'"tell" method only supports float values but the passed loss was: {loss} (type: {type(loss)}.'
)
# check Parameter
if not isinstance(candidate, p.Parameter):
raise TypeError(
"'tell' must be provided with the candidate.\n"
"Use optimizer.parametrization.spawn_child(new_value)) if you want to "
"create a candidate that as not been asked for, "
"or optimizer.suggest(*args, **kwargs) to suggest a point that should be used for "
"the next ask"
)
# check loss length
self.num_objectives = 1 if isinstance(loss, float) else loss.size
# checks are done, start processing
candidate.freeze() # make sure it is not modified somewhere
# add reference if provided
if isinstance(candidate, p.MultiobjectiveReference):
if self._hypervolume_pareto is not None:
raise RuntimeError("MultiobjectiveReference can only be provided before the first tell.")
if not isinstance(loss, np.ndarray):
raise RuntimeError("MultiobjectiveReference must only be used for multiobjective losses")
self._hypervolume_pareto = mobj.HypervolumePareto(
upper_bounds=loss, seed=self._rng, no_hypervolume=self._no_hypervolume
)
if candidate.value is None:
return # no value, so stopping processing there
candidate = candidate.value
# preprocess multiobjective loss
if isinstance(loss, np.ndarray):
candidate._losses = loss
if not isinstance(loss, float):
loss = self._preprocess_multiobjective(candidate)
# call callbacks for logging etc...
candidate.loss = loss
assert isinstance(loss, float)
for callback in self._callbacks.get("tell", []):
# multiobjective reference is not handled :s
# but this allows obtaining both scalar and multiobjective loss (through losses)
callback(self, candidate, loss)
if not candidate.satisfies_constraints() and self.budget is not None:
penalty = self._constraints_manager.penalty(candidate, self.num_ask, self.budget)
loss = loss + penalty
if isinstance(loss, float) and (
self.num_objectives == 1 or self.num_objectives > 1 and not self._no_hypervolume
):
self._update_archive_and_bests(candidate, loss)
if candidate.uid in self._asked:
self._internal_tell_candidate(candidate, loss)
self._asked.remove(candidate.uid)
else:
self._internal_tell_not_asked(candidate, loss)
self._num_tell_not_asked += 1
self._num_tell += 1