Helper functions and classes

Recorder customization

Patches Recorder to enable adding train-only metrics. Can be used to log adversarial loss.

@patch
def before_fit(self:Recorder):
        "Prepare state for training"
        self.lrs,self.iters,self.losses,self.values = [],[],[],[]
        names = self.metrics.attrgot('name')
        train_only_names = self.train_only_metrics.attrgot('name')
        if self.train_metrics and self.valid_metrics:
            names = L('loss') + names
            names = names.map('train_{}') + train_only_names + names.map('valid_{}')
        elif self.valid_metrics: names = L('train_loss', *train_only_names, 'valid_loss') + names
        else: names = L('train_loss') + train_only_names + names
        if self.add_time: names.append('time')
        self.metric_names = 'epoch'+names
        self.smooth_loss.reset()

None[source]

Recorder.add_train_metrics[source]

Recorder.add_train_metrics(*metrics)

def _met_func():
    return 0
metric = ValueMetric(_met_func, 'nothing')
learn = synth_learner(metrics=rmse)
learn.recorder.add_train_metrics(metric)
assert learn.recorder._train_only_metrics[0] is metric
learn.fit(2)
epoch train_loss nothing valid_loss _rmse time
0 15.411602 0 11.542326 3.397400 00:00
1 13.509046 0 8.202074 2.863926 00:00

WandbCallback customization

  1. Record the best values per metric as well as current one - used for post-analisys
  2. Log additional values found learn.log_extras - used to log losses computed by callbacks

class WandbCallback[source]

WandbCallback(log='gradients', log_preds=True, log_model=True, log_dataset=False, dataset_name=None, valid_dl=None, n_preds=36, seed=12345, reorder=True, compare=None) :: Callback

Saves model topology, losses & metrics

Fin