word_language_model/word_language_model.py [203:222]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        else:
            args.lr = args.lr * 0.25
            trainer._init_optimizer('sgd',
                {
                    'learning_rate': args.lr,
                    'momentum': 0,
                    'wd': 0
                }
            )
            model.collect_params().load('model.params', ctx)


if __name__ == '__main__':
    if args.mode == 'hybrid':
        model.hybridize()

    ###############################################################################
    # Training code
    ###############################################################################
    train(args.epochs, context)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



word_language_model/word_language_model_train.py [206:225]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        else:
            args.lr = args.lr * 0.25
            trainer._init_optimizer('sgd',
                {
                    'learning_rate': args.lr,
                    'momentum': 0,
                    'wd': 0
                }
            )
            model.collect_params().load('model.params', ctx)


if __name__ == '__main__':
    if args.mode == 'hybrid':
        model.hybridize()

    ###############################################################################
    # Training code
    ###############################################################################
    train(args.epochs, context)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



