def install_custom_scripts()

in community/modules/scheduler/schedmd-slurm-gcp-v6-controller/modules/slurm_files/scripts/util.py [0:0]


def install_custom_scripts(check_hash=False):
    """download custom scripts from gcs bucket"""
    role, tokens = lookup().instance_role, []

    if role == "controller":
        tokens = ["controller", "prolog", "epilog"]
    elif role == "compute":
        tokens = [
            "prolog", 
            "epilog",
            f"nodeset-{lookup().node_nodeset_name()}"
        ]
    elif role == "login":
        tokens = [f"login-{instance_login_group()}"]

    prefixes = [f"slurm-{tok}-script" for tok in tokens]

    # TODO: use single `blob_list`, to reduce ~4x number of GCS requests
    blobs = list(chain.from_iterable(blob_list(prefix=p) for p in prefixes))

    script_pattern = re.compile(r"slurm-(?P<path>\S+)-script-(?P<name>\S+)")
    for blob in blobs:
        m = script_pattern.match(Path(blob.name).name)
        if not m:
            log.warning(f"found blob that doesn't match expected pattern: {blob.name}")
            continue
        path_parts = m["path"].split("-")
        path_parts[0] += ".d"
        stem, _, ext = m["name"].rpartition("_")
        filename = ".".join((stem, ext))

        path = Path(*path_parts, filename)
        fullpath = (dirs.custom_scripts / path).resolve()
        mkdirp(fullpath.parent)

        for par in path.parents:
            chown_slurm(dirs.custom_scripts / par)
        need_update = True
        if check_hash and fullpath.exists():
            # TODO: MD5 reported by gcloud may differ from the one calculated here (e.g. if blob got gzipped),
            # consider using gCRC32C
            need_update = hash_file(fullpath) != blob.md5_hash
        if need_update:
            log.info(f"installing custom script: {path} from {blob.name}")
            with fullpath.open("wb") as f:
                blob.download_to_file(f)
            chown_slurm(fullpath, mode=0o755)