def main()

in defs/gcs_uploader.py [0:0]


def main(argv):
    scratch = tempfile.mkdtemp(prefix="bazel-gcs.")
    atexit.register(lambda: shutil.rmtree(scratch))

    workspace_status = _workspace_status_dict(argv.root)
    with open(argv.manifest) as manifest:
        for artifact in manifest:
            artifact = artifact.strip("\n")
            src_file, dest_dir = artifact.split("\t")
            dest_dir = dest_dir.format(**workspace_status)
            scratch_dest_dir = os.path.join(scratch, dest_dir)
            try:
                os.makedirs(scratch_dest_dir)
            except (OSError):
                # skip directory already exists errors
                pass

            src = os.path.join(argv.root, src_file)
            dest = os.path.join(scratch_dest_dir, os.path.basename(src_file))
            os.symlink(src, dest)

    ret = 0
    uploaded_paths = []
    for gcs_path in argv.gcs_paths:
        gcs_path = gcs_path.format(**workspace_status)
        local_path = None
        if gcs_path.startswith("file://"):
            local_path = gcs_path[len("file://"):]
        elif "://" not in gcs_path:
            local_path = gcs_path
        if local_path and not os.path.exists(local_path):
            os.makedirs(local_path)

        cmd = ["gsutil"]
        # When rsyncing to a local directory, parallelization thrashes the disk.
        # It also seems to be buggy, causing frequent "File exists" errors.
        # To mitigate, only use parallel mode when rsyncing to a remote path.
        if not local_path:
            cmd.append("-m")
        cmd.extend(["rsync", "-C", "-r", scratch, gcs_path])
        ret |= subprocess.call(cmd)

        uploaded_paths.append(gcs_path)

    print("Uploaded to %s" % " ".join(uploaded_paths))
    sys.exit(ret)