in tools/build-release.py [0:0]
def build_release(email_address):
tools_dir = os.path.dirname(os.path.realpath(__file__))
# load configs
config_file = os.path.join(tools_dir, "release-configs.json")
with open(config_file) as configs:
try:
data = json.load(configs)
except json.JSONDecodeError:
fail("load config: unexpected json decode failure")
if "release" not in data:
fail("load config: release data not found")
release_meta = data["release"]
if "version" not in release_meta:
fail("load config: version data not found in release")
version = release_meta["version"]
release_package_name = "apache-yunikorn-{0}-src".format(version)
if "repositories" not in data:
fail("load config: repository list not found")
repo_list = data["repositories"]
print("release meta info:")
print(" - main version: %s" % version)
print(" - release package name: %s" % release_package_name)
staging_dir = os.path.join(os.path.dirname(tools_dir), "staging")
release_base = os.path.join(staging_dir, release_package_name)
release_top_path = os.path.join(os.path.dirname(tools_dir), "release-top-level-artifacts")
helm_chart_path = os.path.join(os.path.dirname(tools_dir), "helm-charts")
# setup artifacts in the release base dir
setup_base_dir(release_top_path, helm_chart_path, release_base, version)
# download source code from github repo
sha = dict()
for repo_meta in repo_list:
if "name" not in repo_meta:
fail("repository name missing in repo list")
name = repo_meta["name"]
if "alias" not in repo_meta:
fail("repository alias missing in repo list")
alias = repo_meta["alias"]
sha[name] = download_sourcecode(release_base, repo_meta)
update_make_version(name, os.path.join(release_base, alias), version)
# update required Golang, NodeJS and Angular versions
update_required_versions(release_base, repo_list)
# update the sha for all repos in the build scripts
# must be run after all repos have been checked out
update_sha(release_base, repo_list, sha)
# merge licenses for anything that was added not part of Apache
merge_licenses(release_base, repo_list)
# build the helm package
call_helm(staging_dir, release_base, version, email_address)
# Ensure release dirs are clean (and generate build.date files)
clean_release(release_base)
# generate staging source code tarball
tarball_name = release_package_name + "-staging.tar.gz"
tarball_path = os.path.join(staging_dir, tarball_name)
print("creating tarball %s" % tarball_path)
with tarfile.open(tarball_path, "w:gz") as tar:
tar.add(release_base, arcname=release_package_name, filter=exclude_files)
# generate yunikorn-web reproducible binaries
web_hashes_amd64 = build_web_and_generate_hashes(staging_dir, release_package_name, "x86_64")
web_hashes_arm64 = build_web_and_generate_hashes(staging_dir, release_package_name, "aarch64")
# generate yunikorn-k8shim reproducible binaries
shim_hashes_amd64 = build_shim_and_generate_hashes(staging_dir, release_package_name, "x86_64")
shim_hashes_arm64 = build_shim_and_generate_hashes(staging_dir, release_package_name, "aarch64")
# merge hashes
hashes_amd64 = "\n".join([shim_hashes_amd64, web_hashes_amd64])
hashes_arm64 = "\n".join([shim_hashes_arm64, web_hashes_arm64])
# remove staging tarball
os.remove(tarball_path)
# update reproducible build information in README
go_version = get_go_version()
update_reproducible_build_info(release_base, go_version, hashes_amd64, hashes_arm64)
# generate final source code tarball
tarball_name = release_package_name + ".tar.gz"
tarball_path = os.path.join(staging_dir, tarball_name)
print("creating tarball %s" % tarball_path)
with tarfile.open(tarball_path, "w:gz") as tar:
tar.add(release_base, arcname=release_package_name, filter=exclude_files)
write_checksum(tarball_path, tarball_name)
if email_address:
call_gpg(tarball_path, email_address)