scripts/launch_sagemaker_unlikelihood_cnndm.py [69:109]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        "disable-validation": "True",
        "no-last-checkpoints": "True",
        "use-hellinger-loss": 0,
        "valid-subset": 'train'
    }

    if args.exp_type == "local":
        train_instance_type = 'local'
        train_instance_count = 1

        train_path = "<data-bin location>"
        init_path = "<MLE finetuned BART checkpoint dir>"
        save_path = "<model output location>"

        ngpus = 4 # modify based on the number of GPUs on the local machine.
        cmd = ['python', 'train.py', ]
        cmd += ['--save_dir', save_path]
        cmd += ['--train', train_path]
        cmd += ['--pretrained_path', init_path]
        cmd += ['--ngpus', '{}'.format(ngpus)]
        for key, value in hyperparameters.items():
            key = key.replace('_', '-')
            cmd.append('--{}'.format(key))
            cmd.append(str(value))
        stdout_fptr = open(save_path + "/Job_0.stdout", 'wt', encoding='utf-8')
        process = Popen(cmd, stdout=PIPE,
                        stderr=open(save_path + "/Job_0.stderr", 'wt', encoding='utf-8'),
                        encoding='utf-8',
                        bufsize=0,
                        )
        while process.poll() is None:
            line = process.stdout.readline()
            _write_screen_and_file(line, stdout_fptr)
        line = process.stdout.read()

        # special log writing for job_idx == 0
        _write_screen_and_file(line, stdout_fptr)

        if process.returncode != 0:
            raise Exception('job 0 terminated with non-zero returncode')
    else:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



scripts/launch_sagemaker_unlikelihood_xsum.py [69:109]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        "disable-validation": "True",
        "no-last-checkpoints": "True",
        "use-hellinger-loss": 0,
        "valid-subset": 'train'
    }

    if args.exp_type == "local":
        train_instance_type = 'local'
        train_instance_count = 1

        train_path = "<data-bin location>"
        init_path = "<MLE finetuned BART checkpoint dir>"
        save_path = "<model output location>"

        ngpus = 4 # modify based on the number of GPUs on the local machine.
        cmd = ['python', 'train.py', ]
        cmd += ['--save_dir', save_path]
        cmd += ['--train', train_path]
        cmd += ['--pretrained_path', init_path]
        cmd += ['--ngpus', '{}'.format(ngpus)]
        for key, value in hyperparameters.items():
            key = key.replace('_', '-')
            cmd.append('--{}'.format(key))
            cmd.append(str(value))
        stdout_fptr = open(save_path + "/Job_0.stdout", 'wt', encoding='utf-8')
        process = Popen(cmd, stdout=PIPE,
                        stderr=open(save_path + "/Job_0.stderr", 'wt', encoding='utf-8'),
                        encoding='utf-8',
                        bufsize=0,
                        )
        while process.poll() is None:
            line = process.stdout.readline()
            _write_screen_and_file(line, stdout_fptr)
        line = process.stdout.read()

        # special log writing for job_idx == 0
        _write_screen_and_file(line, stdout_fptr)

        if process.returncode != 0:
            raise Exception('job 0 terminated with non-zero returncode')
    else:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



