scripts/launcher_distributed_kd.py [347:411]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    print(f"Use Downloaded Model: {args.use_downloaded_model}")
    print(f"Type of use_downloaded_model: {type(args.use_downloaded_model)}")
    print(f"Action: {args.tune_action}")
    check_pytorch_version()


def completion_status():
    print("***** Finished Task *****")

    list_model_dir = f"ls -ltr {args.model_dir}"
    run_command(list_model_dir)

    list_quantized_model_dir = f"ls -ltr {args.model_dir}/quantized"
    run_command(list_quantized_model_dir)


def training_function():

    print_env_vars()

    # Step 1: Map values to functions
    function_map = {
        "fine-tune": finetune_model,
        "run-eval": run_eval,
        "run-quant": run_quant,
    }

    # Step 2: Iterate through the array and call the corresponding functions
    for value in args.tune_action.split(","):
        if value in function_map:
            print(f"function_key: {value}")
            try:
                function_map[value]()
            except Exception as e:
                print(f"An error occurred in function {value}: {e}")
                raise e
        else:
            print(f"No function defined for value {value}")


if __name__ == "__main__":

    report_error = 0
    args, _ = parse_arge()
    print(args)

    # get the current working directory
    current_working_directory = os.getcwd()

    # print output to the console
    print(current_working_directory)

    jinja_env = jinja2.Environment()

    # Dynamically modify fine-tuning yaml file.
    template = jinja_env.from_string(Path(args.tune_finetune_yaml).open().read())
    train_path = os.path.join(args.train_dir, "train.jsonl")
    metric_logger = "DiskLogger"
    if len(args.wandb_api_key) > 0:
        metric_logger = "WandBLogger"

    Path(args.tune_finetune_yaml).open("w").write(
        template.render(
            train_path=train_path,
            log_dir=args.log_dir,
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



scripts/launcher_single.py [276:341]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    print(f"Use Downloaded Model: {args.use_downloaded_model}")
    print(f"Type of use_downloaded_model: {type(args.use_downloaded_model)}")
    print(f"Action: {args.tune_action}")

    check_pytorch_version()


def completion_status():
    print("***** Finished Task *****")

    list_model_dir = f"ls -ltr {args.model_dir}"
    run_command(list_model_dir)

    list_quantized_model_dir = f"ls -ltr {args.model_dir}/quantized"
    run_command(list_quantized_model_dir)


def training_function():

    print_env_vars()

    # Step 1: Map values to functions
    function_map = {
        "fine-tune": finetune_model,
        "run-eval": run_eval,
        "run-quant": run_quant,
    }

    # Step 2: Iterate through the array and call the corresponding functions
    for value in args.tune_action.split(","):
        if value in function_map:
            print(f"function_key: {value}")
            try:
                function_map[value]()
            except Exception as e:
                print(f"An error occurred in function {value}: {e}")
                raise e
        else:
            print(f"No function defined for value {value}")


if __name__ == "__main__":

    report_error = 0
    args, _ = parse_arge()
    print(args)

    # get the current working directory
    current_working_directory = os.getcwd()

    # print output to the console
    print(current_working_directory)

    jinja_env = jinja2.Environment()

    # Dynamically modify fine-tuning yaml file.
    template = jinja_env.from_string(Path(args.tune_finetune_yaml).open().read())
    train_path = os.path.join(args.train_dir, "train.jsonl")
    metric_logger = "DiskLogger"
    if len(args.wandb_api_key) > 0:
        metric_logger = "WandBLogger"

    Path(args.tune_finetune_yaml).open("w").write(
        template.render(
            train_path=train_path,
            log_dir=args.log_dir,
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



