diff --git a/Makefile b/Makefile index 193bb525..8163b12f 100644 --- a/Makefile +++ b/Makefile @@ -28,7 +28,6 @@ install-deps: ## Install initial Debian packaging dependencies .PHONY: requirements requirements: ## Creates requirements files for the Python projects - ./scripts/create-requirements ./scripts/update-requirements .PHONY: build-wheels diff --git a/scripts/build-debianpackage b/scripts/build-debianpackage index c2543d62..bf60e644 100755 --- a/scripts/build-debianpackage +++ b/scripts/build-debianpackage @@ -70,7 +70,7 @@ cp -r "$CUR_DIR/$PKG_NAME/debian" "$TOP_BUILDDIR/$PKG_NAME/" cd "$TOP_BUILDDIR/$PKG_NAME/" # Verify all the hashes from the verified sha256sums.txt -$CUR_DIR/scripts/verify-hashes $CUR_DIR/sha256sums.txt ./requirements.txt +$CUR_DIR/scripts/verify-hashes $CUR_DIR/sha256sums.txt ./build-requirements.txt echo "All hashes verified." diff --git a/scripts/build-sync-wheels b/scripts/build-sync-wheels index 6ff04f76..14eff2e5 100755 --- a/scripts/build-sync-wheels +++ b/scripts/build-sync-wheels @@ -26,29 +26,13 @@ def main(): print("Project directory missing {0}.".format(args.p)) sys.exit(1) - lockfile_path = os.path.join(args.p, "Pipfile.lock") + req_path = os.path.join(args.p, "requirements.txt") - if not os.path.exists(lockfile_path): - print("Pipfile.lock missing at {0}.".format(lockfile_path)) + if not os.path.exists(req_path): + print("requirements.txt missing at {0}.".format(req_path)) sys.exit(3) - with open(lockfile_path) as fobj: - data = json.load(fobj) - - defaults = data["default"] with tempfile.TemporaryDirectory() as tmpdir: - newreq_path = os.path.join(tmpdir, "requirements.txt") - # First let us create the requirements.txt file with hashes - # from Pipfile.lock, these hashes are from upstream PyPI. - - with open(newreq_path, "w") as fobj: - for name in defaults: - if name in ["pyqt5", "sip"]: # These will come from Debian - continue - package_name = "{}{}".format(name, defaults[name]["version"]) - hashes = " ".join(["--hash={}".format(value) for value in defaults[name]["hashes"]]) - fobj.write("{} {}\n".format(package_name,hashes)) - # The --require-hashes option will be used by default if there are # hashes in the requirements.txt file. We specify it anyway to guard # against use of a requirements.txt file without hashes. @@ -61,7 +45,7 @@ def main(): "-d", tmpdir, "-r", - newreq_path, + req_path, ] subprocess.check_call(cmd) @@ -75,7 +59,7 @@ def main(): "-w", tmpdir, "-r", - newreq_path, + req_path, ] subprocess.check_call(cmd) diff --git a/scripts/create-requirements b/scripts/create-requirements deleted file mode 100755 index c6271b7b..00000000 --- a/scripts/create-requirements +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash -# Wrapper script to create/update the requirements.txt -# It finds the correct project directory from the `PKG_DIR` envirionment variable. -# - -set -e -set -u -set -o pipefail - -if [[ -z "${PKG_DIR:-}" ]]; then - echo "Set PKG_DIR of the project."; - exit 1 -fi - -cd $PKG_DIR - -# Now create both of the files -pipenv lock -r > requirements.txt diff --git a/scripts/update-requirements b/scripts/update-requirements index 80001221..b7078b97 100755 --- a/scripts/update-requirements +++ b/scripts/update-requirements @@ -12,19 +12,27 @@ from pprint import pprint def main(): PKG_DIR = os.environ["PKG_DIR"] - req_path = os.path.join(PKG_DIR, "requirements.txt") + if not PKG_DIR: + print("Set PKG_DIR of the project") + sys.exit(1) + + input_requirements_path = os.path.join( + PKG_DIR, "requirements.txt") - if not os.path.exists(req_path): - print("Missing {0}.".format(req_path)) + output_requirements_path = os.path.join( + PKG_DIR, "build-requirements.txt") + + if not os.path.exists(input_requirements_path): + print("Missing {0}.".format(input_requirements_path)) sys.exit(1) # First remove index line and any PyQt or sip dependency - cleanup(req_path) + cleaned_lines = cleanup(input_requirements_path) verify_sha256sums_file() # Now let us update the files along with the sha256sums from localwheels - add_sha256sums(req_path) + add_sha256sums(output_requirements_path, cleaned_lines) def verify_sha256sums_file(): "Verifies the sha256sums.txt file with gpg signature" @@ -42,11 +50,13 @@ def verify_sha256sums_file(): cmd = ["./scripts/verify-sha256sum-signature"] subprocess.check_call(cmd) -def add_sha256sums(path: str) -> None: +def add_sha256sums(path: str, requirements_lines: str) -> None: """Adds all the required sha256sums to the wheels :param path: path to the requirements file. :type path: str + :param requirements_lines: cleaned lines from the requirements file. + :type requirements_lines: str :return: None :rtype: None """ @@ -62,40 +72,39 @@ def add_sha256sums(path: str) -> None: continue files.append(line.split()) - with open(path) as fobj: - lines = fobj.readlines() - newlines = [] missing_wheels = [] - for line in lines: - line = line.strip() - words = line.split() - values = words[0].split("==") - values[1] = values[1].strip(';') - packagename = "-".join(values) - othername = "-".join([values[0].replace("-", "_"), values[1]]) + + # For each dependency in the requirements file + for line in requirements_lines: + package_name_and_version = line.strip().split()[0] + package_name = package_name_and_version.split('==')[0] + package_version = package_name_and_version.split('==')[1] + + wheel_name_prefix = '{}-{}'.format(package_name, package_version) + package_othername = '{}-{}'.format(package_name.replace("-", "_"), package_version) for name in files: lowername = name[1].lower() digest = name[0] - package_othername = packagename.replace("-", "_") + # Now check if a wheel is already available - if lowername.startswith(packagename) or lowername.startswith( - othername + if lowername.startswith(wheel_name_prefix) or lowername.startswith( + package_othername ): - # Check if the sha256sum is already there or not - if digest not in line: - # Now add the hash to the line - line = "{} --hash=sha256:{}\n".format(line, digest) + # Now add the hash to the line + line = "{} --hash=sha256:{}\n".format( + package_name_and_version, digest) + newlines.append(line) if line.find("--hash") == -1: # Missing wheel - missing_wheels.append(values) + missing_wheels.append(package_name_and_version) # Do not update the file if missing wheels if missing_wheels: print("The following dependent wheel(s) are missing:") - for value in missing_wheels: - print("{0}=={1}".format(value[0], value[1])) + for missing_dep in missing_wheels: + print("{}".format(missing_dep)) print("\nPlease build the wheel by using the following command:\n") print("\tPKG_DIR={0} make build-wheels\n".format(os.environ["PKG_DIR"])) @@ -111,7 +120,7 @@ def add_sha256sums(path: str) -> None: fobj.write(line) -def cleanup(path: str) -> None: +def cleanup(path: str) -> str: """Cleans up requirement files :param path: The file to cleanup @@ -138,13 +147,19 @@ def cleanup(path: str) -> None: # It will come as a debian package dependency. elif line.startswith("sip"): continue + # We want to ignore lines that are comments. + elif line.lstrip().startswith("#"): + continue + # We just want the package names, since we're + # using the hashes of the wheels (and we verified + # the integrity of those hashes by verifying the gpg + # signature of a textfile containing the list of hashes). + elif line.lstrip().startswith("--hash=sha256"): + continue else: finallines.append(line) - # Now let us write back the file. - with open(path, "w") as fobj: - for line in finallines: - fobj.write(line) + return finallines if __name__ == "__main__": diff --git a/scripts/verify-hashes b/scripts/verify-hashes index 62b3c1d2..66c4f133 100755 --- a/scripts/verify-hashes +++ b/scripts/verify-hashes @@ -27,19 +27,63 @@ lines = [] with open(sys.argv[2]) as fobj: lines = fobj.readlines() -# Let us verify each hash is in our data or not -for line in lines: - line = line.strip() - if not line: +# Now we want to verify that for each dependency in the project +# to be packaged, has a matching source tarball on FPF's PyPI. + +# Remove lines with comments. +uncommented_lines = [line for line in lines if not line.startswith('#')] + +# The hashes for a given requirement will be distributed +# across multiple lines, e.g. +# +# atomicwrites==1.2.1 \ +# --hash=sha256:0312ad34fcad8fac3704d441f7b317e50af620823353ec657a53e981f92920c0 \ +# --hash=sha256:ec9ae8adaae229e4f8446952d204a3e4b5fdd2d099f9be3aaf556120135fb3ee +# +# Let's create a list with one list element per dependency. + +dependencies_with_hashes = ''.join(uncommented_lines).replace('\\\n', '').splitlines() + +# Now we'll construct a dict containing each dependency, +# and a list of its hashes, e.g.: +# +# { +# 'alembic': ['04bcb970ca8659c3607ddd8ffd86cc9d6a99661c9bc590955e8813c66bfa582b'] +# } +# +# Note that at this point the hashes can be of upstream wheels. + +dependencies = {} +for dependency_line in dependencies_with_hashes: + if not dependency_line: continue - # Split based on --hash=sha256: - words = line.split("--hash=sha256:") - assert len(words) == 2 # We should have only hash per line - - if words[1] not in sha256sum_data: - print("Missing sha256sum {0}".format(words[1])) - sys.exit(1) -sys.exit(0) + package_name_and_version = dependency_line.split()[0] + # If this fails, we are missing a hash in requirements.txt. + assert len(dependency_line.split()) >= 2 + hashes = [] + for sha_256_hash in dependency_line.split()[1:]: + hashes.append(sha_256_hash.replace("--hash=sha256:", "")) + + dependencies.update({ + package_name_and_version: hashes + }) + +# Now check, for each dependency that there is at least one matching hash +# on FPF's PyPI (this will be the hash of the source tarball). +for dependency in dependencies.keys(): + + found_a_hash = False + for requirements_sha_256_hash in dependencies[dependency]: + if requirements_sha_256_hash in sha256sum_data: + found_a_hash = True + + # If we get here, it means we did not find a corresponding hash in our + # sha256sums data (representing the state of FPF's PyPI) + if not found_a_hash: + print("Missing sha256sum for package: {0}".format(dependency)) + sys.exit(1) + +sys.exit(0) diff --git a/securedrop-client/debian/rules b/securedrop-client/debian/rules index 62302428..b1941039 100755 --- a/securedrop-client/debian/rules +++ b/securedrop-client/debian/rules @@ -4,7 +4,7 @@ dh $@ --with python-virtualenv override_dh_virtualenv: - dh_virtualenv --python /usr/bin/python3.5 --setuptools -S --index-url https://dev-bin.ops.securedrop.org/simple + dh_virtualenv --python /usr/bin/python3.5 --setuptools -S --index-url https://dev-bin.ops.securedrop.org/simple --requirements build-requirements.txt override_dh_strip_nondeterminism: find ./debian/ -type f -name '*.pyc' -delete diff --git a/securedrop-client/debian/securedrop-client.install b/securedrop-client/debian/securedrop-client.install index 1eedf5cc..6a4365e2 100644 --- a/securedrop-client/debian/securedrop-client.install +++ b/securedrop-client/debian/securedrop-client.install @@ -2,6 +2,6 @@ files/alembic.ini usr/share/securedrop-client/ alembic/env.py usr/share/securedrop-client/alembic/ alembic/README usr/share/securedrop-client/alembic/ alembic/script.py.mako usr/share/securedrop-client/alembic/ -alembic/versions/15ac9509fc68_init.py usr/share/securedrop-client/alembic/versions/ +alembic/versions/2f363b3d680e_init.py usr/share/securedrop-client/alembic/versions/ files/securedrop-client usr/bin/ files/securedrop-client.desktop usr/share/applications/ diff --git a/securedrop-proxy/debian/rules b/securedrop-proxy/debian/rules index 94c81afb..f8ffaf08 100755 --- a/securedrop-proxy/debian/rules +++ b/securedrop-proxy/debian/rules @@ -1,7 +1,7 @@ #!/usr/bin/make -f %: - dh $@ --with python-virtualenv --python /usr/bin/python3.5 --setuptools --index-url https://dev-bin.ops.securedrop.org/simple + dh $@ --with python-virtualenv --python /usr/bin/python3.5 --setuptools --index-url https://dev-bin.ops.securedrop.org/simple --requirements build-requirements.txt override_dh_strip_nondeterminism: find ./debian/ -type f -name '*.pyc' -delete