in classy_vision/optim/classy_optimizer.py [0:0]
def set_param_groups(self, param_groups, **kwargs):
"""
Specifies what parameters will be optimized.
This is the public API where users of ClassyOptimizer can specify what
parameters will get optimized. Unlike PyTorch optimizers, we don't
require the list of param_groups in the constructor.
Args:
param_groups: this is either a list of Tensors (e.g.
model.parameters()) or a list of dictionaries. If a dictionary,
must contain a key "params" having the same format and semantics as
PyTorch.
"""
def cast_param_groups(params):
"""Converts a list/dict to the PyTorch param_groups format."""
if params is None:
return []
if isinstance(params, dict):
assert "params" in params
return [params]
pg = list(params)
if len(pg) == 0:
raise ValueError("optimizer got an empty parameter list")
if not isinstance(pg[0], dict):
pg = [{"params": pg}]
return pg
self._param_group_schedulers = cast_param_groups(param_groups)
# Convert constant values to constant param schedulers. Use kwargs
# values as defaults.
for pg in self._param_group_schedulers:
for k, v in kwargs.items():
if isinstance(v, (int, float)):
pg[k] = ConstantParamScheduler(v)
else:
# This is necessary to copy values from kwargs to pg
pg[k] = v
for k, v in pg.items():
if isinstance(v, (int, float)):
pg[k] = ConstantParamScheduler(v)
self.prepare(self._run_schedulers(0, None))