in nevergrad/optimization/callbacks.py [0:0]
def __call__(self, optimizer: base.Optimizer, candidate: p.Parameter, loss: tp.FloatLoss) -> None:
data = {
"#parametrization": optimizer.parametrization.name,
"#optimizer": optimizer.name,
"#session": self._session,
"#num-ask": optimizer.num_ask,
"#num-tell": optimizer.num_tell,
"#num-tell-not-asked": optimizer.num_tell_not_asked,
"#uid": candidate.uid,
"#lineage": candidate.heritage["lineage"],
"#generation": candidate.generation,
"#parents_uids": [],
"#loss": loss,
}
if optimizer.num_objectives > 1: # multiobjective losses
data.update({f"#losses#{k}": val for k, val in enumerate(candidate.losses)})
data["#pareto-length"] = len(optimizer.pareto_front())
if hasattr(optimizer, "_configured_optimizer"):
configopt = optimizer._configured_optimizer # type: ignore
if isinstance(configopt, base.ConfiguredOptimizer):
data.update({"#optimizer#" + x: str(y) for x, y in configopt.config().items()})
if isinstance(candidate._meta.get("sigma"), float):
data["#meta-sigma"] = candidate._meta["sigma"] # for TBPSA-like algorithms
if candidate.generation > 1:
data["#parents_uids"] = candidate.parents_uids
for name, param in helpers.flatten(candidate, with_containers=False, order=1):
val = param.value
if isinstance(val, (np.float_, np.int_, np.bool_)):
val = val.item()
if inspect.ismethod(val):
val = repr(val.__self__) # show mutation class
data[name if name else "0"] = val.tolist() if isinstance(val, np.ndarray) else val
if isinstance(param, p.Data):
val = param.sigma.value
data[(name if name else "0") + "#sigma"] = (
val.tolist() if isinstance(val, np.ndarray) else val
)
try: # avoid bugging as much as possible
with self._filepath.open("a") as f:
f.write(json.dumps(data) + "\n")
except Exception as e: # pylint: disable=broad-except
warnings.warn(f"Failing to json data: {e}")