def _main()

in odps_scripts/pyodps_pack.py [0:0]


def _main(parsed_args):
    if parsed_args.debug:
        logging.basicConfig(level=logging.DEBUG)
        logger.info(
            "System environment variables: %s", json.dumps(dict(os.environ), indent=2)
        )

    if parsed_args.pack_env:
        if parsed_args.specifiers:
            raise PackCommandException(
                "ERROR: Cannot supply --pack-env with other package specifiers."
            )
        parsed_args.specifiers = _collect_env_packages(
            parsed_args.exclude_editable, parsed_args.exclude, parsed_args.index_url
        )

    _filter_local_package_paths(parsed_args)
    _collect_install_requires(parsed_args)

    if (
        not parsed_args.specifiers
        and not parsed_args.package_path
        and not parsed_args.vcs_urls
    ):
        raise PackCommandException(
            "ERROR: You must give at least one requirement to install."
        )

    file_cfg = _get_default_pypi_config()

    def _first_or_none(list_val):
        return list_val[0] if list_val else None

    index_url = parsed_args.index_url or _first_or_none(file_cfg.get("index-url")) or ""

    if index_url:
        logger.debug("Using PyPI index %s", index_url)
    else:
        logger.debug("Using default PyPI index")

    prefer_binary_str = "true" if parsed_args.prefer_binary else ""
    no_deps_str = "true" if parsed_args.no_deps else ""
    debug_str = "true" if parsed_args.debug else ""
    no_merge_str = "true" if parsed_args.no_merge else ""
    use_pep517_str = str(parsed_args.use_pep517).lower()
    check_build_dependencies_str = (
        "true" if parsed_args.check_build_dependencies else ""
    )
    skip_scan_pkg_resources_str = "true" if parsed_args.skip_scan_pkg_resources else ""
    pre_str = "true" if parsed_args.pre else ""
    timeout_str = parsed_args.timeout or _first_or_none(file_cfg.get("timeout")) or ""
    proxy_str = parsed_args.proxy or _first_or_none(file_cfg.get("proxy")) or ""
    retries_str = parsed_args.retries or _first_or_none(file_cfg.get("retries")) or ""
    dynlibs_str = " ".join(parsed_args.dynlib)

    python_abi_version = _get_python_abi_version(
        parsed_args.python_version, parsed_args.mcpy27, parsed_args.dwpy27
    )

    extra_index_urls = (parsed_args.extra_index_url or []) + (
        file_cfg.get("extra-index-url") or []
    )
    extra_index_urls_str = " ".join(extra_index_urls)

    trusted_hosts = (parsed_args.trusted_host or []) + (
        file_cfg.get("trusted-host") or []
    )
    trusted_hosts_str = " ".join(trusted_hosts)

    with _create_temp_work_dir(
        parsed_args.specifiers,
        parsed_args.vcs_urls,
        parsed_args.install_requires,
        parsed_args.exclude,
        parsed_args.run_before,
        pypi_pre=pre_str,
        pypi_index=index_url,
        pypi_extra_index_urls=extra_index_urls_str,
        pypi_proxy=proxy_str,
        pypi_retries=retries_str,
        pypi_timeout=timeout_str,
        prefer_binary=prefer_binary_str,
        use_pep517=use_pep517_str,
        check_build_dependencies=check_build_dependencies_str,
        skip_scan_pkg_resources=skip_scan_pkg_resources_str,
        no_deps=no_deps_str,
        no_merge=no_merge_str,
        python_abi_version=python_abi_version,
        pypi_trusted_hosts=trusted_hosts_str,
        dynlibs=dynlibs_str,
        debug=debug_str,
    ) as work_dir:
        container_name = "pack-cnt-%d" % int(time.time())

        use_legacy_image = (
            parsed_args.legacy_image or parsed_args.mcpy27 or parsed_args.dwpy27
        )
        default_image = _get_default_image(use_legacy_image, parsed_args.arch)
        docker_image = docker_image_env or default_image

        minikube_mount_proc = None
        if pack_in_cluster or parsed_args.no_docker:
            _, rel_dirs = _copy_package_paths(
                parsed_args.package_path,
                work_dir,
                skip_user_path=False,
                find_vcs_root=parsed_args.find_vcs_root,
            )

            pyversion, pyabi = python_abi_version.split("-", 1)
            pyversion = pyversion[2:]
            build_cmd = [
                _get_bash_path(),
                os.path.join(work_dir, "scripts", _PACK_SCRIPT_FILE_NAME),
            ]
            build_env = {
                "PACK_ROOT": str(work_dir),
                "PYPLATFORM": default_image.replace("quay.io/pypa/", ""),
                "PYVERSION": pyversion,
                "PYABI": pyabi,
                "TARGET_ARCH": _get_arch(parsed_args.arch),
            }
            if rel_dirs:
                build_env["SRC_PACKAGE_PATHS"] = ":".join(rel_dirs)
            if parsed_args.no_docker:
                build_env["NON_DOCKER_MODE"] = "true"
                build_env["PYEXECUTABLE"] = _get_local_pack_executable(work_dir)
            else:
                temp_env = build_env
                build_env = os.environ.copy()
                build_env.update(temp_env)
                build_env["PACK_IN_CLUSTER"] = "true"
            build_cwd = os.getcwd()
            logger.debug("Command: %r", build_cmd)
            logger.debug("Environment variables: %r", build_env)
        else:
            build_cmd = _build_docker_run_command(
                container_name,
                docker_image,
                work_dir,
                parsed_args.package_path,
                parsed_args.docker_args,
                find_vcs_root=parsed_args.find_vcs_root,
            )
            build_cmd, minikube_mount_proc = _rewrite_minikube_command(build_cmd)
            build_env = None
            build_cwd = None
            logger.debug("Docker command: %r", build_cmd)

        try:
            proc = subprocess.Popen(build_cmd, env=build_env, cwd=build_cwd)
        except OSError as ex:
            if ex.errno != errno.ENOENT:
                raise

            logger.error(
                "Failed to execute command %r, the error message is %s.", build_cmd, ex
            )
            if pack_in_cluster or parsed_args.no_docker:
                if _is_windows:
                    raise PackException(
                        "Cannot locate git bash. Please install Git for Windows or "
                        "try WSL instead."
                    )
                else:
                    # in MacOS or Linux, this error is not a FAQ, thus just raise it
                    raise
            else:
                raise PackException(
                    "Cannot locate docker. Please install it, reopen your terminal and "
                    "retry. Or you may try `--no-docker` instead. If you've already "
                    "installed Docker, you may specify the path of its executable via "
                    "DOCKER_PATH environment."
                )
        cancelled = False
        try:
            proc.wait()
        except KeyboardInterrupt:
            cancelled = True
            if not parsed_args.no_docker and not pack_in_cluster:
                docker_rm_cmd = _build_docker_rm_command(container_name)
                logger.debug("Docker rm command: %r", docker_rm_cmd)
                subprocess.Popen(docker_rm_cmd, stdout=subprocess.PIPE)
                proc.wait()
        finally:
            if minikube_mount_proc is not None:
                minikube_mount_proc.terminate()

        if proc.returncode != 0:
            cancelled = cancelled or os.path.exists(
                os.path.join(work_dir, "scripts", ".cancelled")
            )
            if cancelled:
                _print_warning("Cancelled by user.")
            else:
                if parsed_args.no_docker:
                    _print_fail(
                        "Errors occurred when creating your package. This is often caused "
                        "by mismatching Python version, platform or architecture when "
                        "encountering binary packages. Please check outputs for details. "
                        "You may try building your packages inside Docker by removing "
                        "--no-docker option, which often resolves the issue."
                    )
                else:
                    _print_fail(
                        "Errors occurred when creating your package. Please check outputs "
                        "for details. You may add a `--debug` option to obtain more "
                        "information. Please provide all outputs with `--debug` specified "
                        "when you are seeking for help from MaxCompute assisting team."
                    )

                if proc.returncode == _SEGFAULT_ERR_CODE and use_legacy_image:
                    _print_fail(
                        "Image manylinux1 might crash silently under some Docker environments. "
                        "You may try under a native Linux environment. Details can be seen at "
                        "https://mail.python.org/pipermail/wheel-builders/2016-December/000239.html."
                    )
                elif _is_linux and "SUDO_USER" not in os.environ:
                    _print_fail(
                        "You need to run pyodps-pack with sudo to make sure docker is "
                        "executed properly."
                    )
        else:
            if parsed_args.no_merge:
                src_path = os.path.join(work_dir, "wheelhouse", "*.whl")
                for wheel_name in glob.glob(src_path):
                    shutil.move(wheel_name, os.path.basename(wheel_name))
            else:
                src_path = os.path.join(work_dir, "wheelhouse", _DEFAULT_OUTPUT_FILE)
                shutil.move(src_path, parsed_args.output)

            if _is_linux and "SUDO_UID" in os.environ and "SUDO_GID" in os.environ:
                own_desc = "%s:%s" % (os.environ["SUDO_UID"], os.environ["SUDO_GID"])
                target_path = "*.whl" if parsed_args.no_merge else parsed_args.output
                chown_proc = subprocess.Popen(["chown", own_desc, target_path])
                chown_proc.wait()

            if parsed_args.no_merge:
                print("Result wheels stored at current dir")
            else:
                print("Result package stored as %s" % parsed_args.output)
        return proc.returncode