in main.py [0:0]
def verify_files(project: str, keychain: gnupg.GPG, is_podling: bool) -> dict:
"""Verifies all download artifacts in a directory using the supplied keychain. Returns a dict of filenames and
their corresponding error messages if checksum or signature errors were found."""
errors: typing.Dict[str, str] = dict()
path = os.path.join(CFG["dist_dir"], project) if not is_podling else os.path.join(CFG["dist_dir"], "incubator", project)
known_exts = CFG.get("known_extensions")
strong_checksum_deadline = CFG.get("strong_checksum_deadline", 0) # If applicable, only require sha1/md5 for older files
# Check that we HAVE keys in the key chain
if not keychain.list_keys():
dl_files = os.listdir(path)
if not dl_files or (len(dl_files) == 1 and dl_files[0] == ".htaccess"): # Attic'ed project, skip it!
return errors
push_error(errors, "KEYS", "[CHK03] KEYS file could not be read or did not contain any valid signing keys!")
# Now check all files...
for root, _dirs, files in os.walk(path):
for filename in sorted(files):
extension = filename.split(".")[-1] if "." in filename else ""
if extension in known_exts:
filepath = os.path.join(root, filename)
if os.path.islink(filepath): # Skip symlinks
continue
if "--quiet" not in sys.argv:
print(f"Verifying {filepath}")
valid_checksums_found = 0
valid_weak_checksums_found = 0
# Verify strong checksums
for method in CFG.get("strong_checksums"):
chkfile = filepath + "." + method
chkfile_uc = filepath + "." + method.upper() # Uppercase extension? :(
if os.path.exists(chkfile) or os.path.exists(chkfile_uc):
file_errors = verify_checksum(filepath, method)
if file_errors:
push_error(errors, filepath, file_errors)
else:
valid_checksums_found += 1
# Check older algos, but only count if release is old enough
for method in CFG.get("weak_checksums"):
chkfile = filepath + "." + method
chkfile_uc = filepath + "." + method.upper() # Uppercase extension? :(
if os.path.exists(chkfile) or os.path.exists(chkfile_uc):
file_errors = verify_checksum(filepath, method)
if file_errors:
push_error(errors, filepath, file_errors)
else:
valid_weak_checksums_found += 1
if valid_checksums_found == 0 and os.stat(filepath).st_mtime <= strong_checksum_deadline:
valid_checksums_found += 1
# Ensure we had at least one valid checksum file of any kind (for old files).
if valid_checksums_found == 0 and os.stat(filepath).st_mtime <= strong_checksum_deadline:
push_error(errors, filepath, f"[CHK02] No valid checksum files (.md5, .sha1, .sha256, .sha512) found for {filename}")
# Ensure we had at least one (valid) sha256 or sha512 file if strong checksums are enforced.
elif valid_checksums_found == 0:
push_error(errors, filepath, f"[CHK02] No valid checksum files (.sha256, .sha512) found for {filename}")
if valid_weak_checksums_found:
push_error(errors, filepath, f"[CHK02] Only weak checksum files (.md5, .sha1) found for {filename}. Project MUST use sha256/sha512!")
# Verify detached signatures
asc_filepath = filepath + ".asc"
if os.path.exists(asc_filepath):
verified = keychain.verify_file(open(asc_filepath, "rb"), data_filename=filepath)
if not verified.valid:
# Possible status values:
# - 'no public key' - no further checks possible
# - 'signature bad' - found the key, but the sig does not match
# - 'signature valid' - implies key problem such as expired
# - None - e.g. for non-empty but invalid signature (at present; this may be fixed)
if verified.status is None or verified.status.startswith('error '):
push_error(errors, filepath, f"[CHK05] The signature file {filename}.asc could not be used to verify the release artifact (corrupt sig?)")
elif verified.status == 'no public key':
push_error(errors, filepath, f"[CHK01] The signature file {filename}.asc was signed with a key not found in the project's KEYS file: {verified.key_id}")
elif verified.status == 'signature bad':
# unfortunately the current version of gnupg corrupts the key_id in this case
push_error(errors, filepath, f"[CHK05] The signature file {filename}.asc could not be used to verify the release artifact (corrupt sig?)")
elif verified.status == 'signature valid':
# Assume we can get the key here, else how was the signature verified?
key = keychain.list_keys(False, [verified.key_id])[0]
fp_owner = key['uids'][0] # this is always in the main key
if verified.key_status == 'signing key has expired':
if verified.key_id == key['keyid']:
expires = key['expires']
else: # must be a subkey
expires = key['subkey_info'][verified.key_id]['expires']
if int(expires) < int(verified.sig_timestamp):
push_error(errors, filepath, f"[CHK04] Detached signature file {filename}.asc was signed by {fp_owner} ({verified.key_id}) but the key expired before the file was signed!")
else:
push_error(errors, filepath, f"[CHK04] Detached signature file {filename}.asc was signed by {fp_owner} ({verified.key_id}) but the key has status {verified.key_status}!")
else:
push_error(errors, filepath, f"[CHK05] Detached signature file {filename}.asc could not be used to verify {filename}: {verified.status}")
else:
push_error(errors, filepath, f"[CHK05] No detached signature file could be found for {filename} - all artifact bundles MUST have an accompanying .asc signature file!")
return errors