fairseq/optim/fp16_optimizer.py [69:87]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        if getattr(args, 'fp16_scale_window', None) is None:
            if len(args.update_freq) > 1:
                raise ValueError(
                    '--fp16-scale-window must be given explicitly when using a '
                    'custom --update-freq schedule'
                )
            scale_window = 2**14 / args.distributed_world_size / args.update_freq[0]
        else:
            scale_window = args.fp16_scale_window

        self.scaler = DynamicLossScaler(
            init_scale=args.fp16_init_scale,
            scale_window=scale_window,
            tolerance=args.fp16_scale_tolerance,
            threshold=args.threshold_loss_scale,
        )

    @classmethod
    def build_optimizer(cls, args, params):
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



fairseq/optim/fp16_optimizer.py [240:258]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        if getattr(args, 'fp16_scale_window', None) is None:
            if len(args.update_freq) > 1:
                raise ValueError(
                    '--fp16-scale-window must be given explicitly when using a '
                    'custom --update-freq schedule'
                )
            scale_window = 2**14 / args.distributed_world_size / args.update_freq[0]
        else:
            scale_window = args.fp16_scale_window

        self.scaler = DynamicLossScaler(
            init_scale=args.fp16_init_scale,
            scale_window=scale_window,
            tolerance=args.fp16_scale_tolerance,
            threshold=args.threshold_loss_scale,
        )

    @classmethod
    def build_optimizer(cls, args, params):
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



