in classy_vision/hooks/visdom_hook.py [0:0]
def on_phase_end(self, task) -> None:
"""
Plot the metrics on visdom.
"""
phase_type = task.phase_type
metrics = self.metrics
batches = len(task.losses)
if batches == 0:
return
# Loss for the phase
loss = sum(task.losses) / batches
loss_key = phase_type + "_loss"
if loss_key not in metrics:
metrics[loss_key] = []
metrics[loss_key].append(loss)
# Optimizer LR for the phase
optimizer_lr = task.optimizer.options_view.lr
lr_key = phase_type + "_learning_rate"
if lr_key not in metrics:
metrics[lr_key] = []
metrics[lr_key].append(optimizer_lr)
# Calculate meters
for meter in task.meters:
if isinstance(meter.value, collections.MutableMapping):
flattened_meters_dict = flatten_dict(meter.value, prefix=meter.name)
for k, v in flattened_meters_dict.items():
metric_key = phase_type + "_" + k
if metric_key not in metrics:
metrics[metric_key] = []
metrics[metric_key].append(v)
else:
metric_key = phase_type + "_" + meter.name
if metric_key not in metrics:
metrics[metric_key] = []
metrics[metric_key].append(meter.value)
# update learning curve visualizations:
phase_type = "train" if task.train else "test"
title = "%s-%s" % (phase_type, task.base_model.__class__.__name__)
title += self.title_suffix
if not task.train and is_primary():
logging.info("Plotting learning curves to visdom")
plot_learning_curves(
metrics, visdom_server=self.visdom, env=self.env, win=title, title=title
)