Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rework packaging scripts to remove pipenv #43

Merged
merged 9 commits into from
May 30, 2019
1 change: 0 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ install-deps: ## Install initial Debian packaging dependencies

.PHONY: requirements
requirements: ## Creates requirements files for the Python projects
./scripts/create-requirements
./scripts/update-requirements

.PHONY: build-wheels
Expand Down
2 changes: 1 addition & 1 deletion scripts/build-debianpackage
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ cp -r "$CUR_DIR/$PKG_NAME/debian" "$TOP_BUILDDIR/$PKG_NAME/"
cd "$TOP_BUILDDIR/$PKG_NAME/"

# Verify all the hashes from the verified sha256sums.txt
$CUR_DIR/scripts/verify-hashes $CUR_DIR/sha256sums.txt ./requirements.txt
$CUR_DIR/scripts/verify-hashes $CUR_DIR/sha256sums.txt ./build-requirements.txt

echo "All hashes verified."

Expand Down
26 changes: 5 additions & 21 deletions scripts/build-sync-wheels
Original file line number Diff line number Diff line change
Expand Up @@ -26,29 +26,13 @@ def main():
print("Project directory missing {0}.".format(args.p))
sys.exit(1)

lockfile_path = os.path.join(args.p, "Pipfile.lock")
req_path = os.path.join(args.p, "requirements.txt")

if not os.path.exists(lockfile_path):
print("Pipfile.lock missing at {0}.".format(lockfile_path))
if not os.path.exists(req_path):
print("requirements.txt missing at {0}.".format(req_path))
sys.exit(3)

with open(lockfile_path) as fobj:
data = json.load(fobj)

defaults = data["default"]
with tempfile.TemporaryDirectory() as tmpdir:
newreq_path = os.path.join(tmpdir, "requirements.txt")
# First let us create the requirements.txt file with hashes
# from Pipfile.lock, these hashes are from upstream PyPI.

with open(newreq_path, "w") as fobj:
for name in defaults:
if name in ["pyqt5", "sip"]: # These will come from Debian
continue
package_name = "{}{}".format(name, defaults[name]["version"])
hashes = " ".join(["--hash={}".format(value) for value in defaults[name]["hashes"]])
fobj.write("{} {}\n".format(package_name,hashes))

# The --require-hashes option will be used by default if there are
# hashes in the requirements.txt file. We specify it anyway to guard
# against use of a requirements.txt file without hashes.
Expand All @@ -61,7 +45,7 @@ def main():
"-d",
tmpdir,
"-r",
newreq_path,
req_path,
]
subprocess.check_call(cmd)

Expand All @@ -75,7 +59,7 @@ def main():
"-w",
tmpdir,
"-r",
newreq_path,
req_path,
]
subprocess.check_call(cmd)

Expand Down
18 changes: 0 additions & 18 deletions scripts/create-requirements

This file was deleted.

77 changes: 46 additions & 31 deletions scripts/update-requirements
Original file line number Diff line number Diff line change
Expand Up @@ -12,19 +12,27 @@ from pprint import pprint

def main():
PKG_DIR = os.environ["PKG_DIR"]
req_path = os.path.join(PKG_DIR, "requirements.txt")
if not PKG_DIR:
print("Set PKG_DIR of the project")
sys.exit(1)

input_requirements_path = os.path.join(
PKG_DIR, "requirements.txt")

if not os.path.exists(req_path):
print("Missing {0}.".format(req_path))
output_requirements_path = os.path.join(
PKG_DIR, "build-requirements.txt")

if not os.path.exists(input_requirements_path):
print("Missing {0}.".format(input_requirements_path))
sys.exit(1)

# First remove index line and any PyQt or sip dependency
cleanup(req_path)
cleaned_lines = cleanup(input_requirements_path)

verify_sha256sums_file()

# Now let us update the files along with the sha256sums from localwheels
add_sha256sums(req_path)
add_sha256sums(output_requirements_path, cleaned_lines)

def verify_sha256sums_file():
"Verifies the sha256sums.txt file with gpg signature"
Expand All @@ -42,11 +50,13 @@ def verify_sha256sums_file():
cmd = ["./scripts/verify-sha256sum-signature"]
subprocess.check_call(cmd)

def add_sha256sums(path: str) -> None:
def add_sha256sums(path: str, requirements_lines: str) -> None:
"""Adds all the required sha256sums to the wheels

:param path: path to the requirements file.
:type path: str
:param requirements_lines: cleaned lines from the requirements file.
:type requirements_lines: str
:return: None
:rtype: None
"""
Expand All @@ -62,40 +72,39 @@ def add_sha256sums(path: str) -> None:
continue
files.append(line.split())

with open(path) as fobj:
lines = fobj.readlines()

newlines = []
missing_wheels = []
for line in lines:
line = line.strip()
words = line.split()
values = words[0].split("==")
values[1] = values[1].strip(';')
packagename = "-".join(values)
othername = "-".join([values[0].replace("-", "_"), values[1]])

# For each dependency in the requirements file
for line in requirements_lines:
package_name_and_version = line.strip().split()[0]
package_name = package_name_and_version.split('==')[0]
package_version = package_name_and_version.split('==')[1]

wheel_name_prefix = '{}-{}'.format(package_name, package_version)

for name in files:
lowername = name[1].lower()
digest = name[0]
package_othername = packagename.replace("-", "_")

package_othername = wheel_name_prefix.replace("-", "_")
# Now check if a wheel is already available
if lowername.startswith(packagename) or lowername.startswith(
othername
if lowername.startswith(wheel_name_prefix) or lowername.startswith(
package_othername
):
# Check if the sha256sum is already there or not
if digest not in line:
# Now add the hash to the line
line = "{} --hash=sha256:{}\n".format(line, digest)
# Now add the hash to the line
line = "{} --hash=sha256:{}\n".format(
package_name_and_version, digest)

newlines.append(line)
if line.find("--hash") == -1: # Missing wheel
missing_wheels.append(values)
missing_wheels.append(package_name_and_version)

# Do not update the file if missing wheels
if missing_wheels:
print("The following dependent wheel(s) are missing:")
for value in missing_wheels:
print("{0}=={1}".format(value[0], value[1]))
for missing_dep in missing_wheels:
print("{}".format(missing_dep))

print("\nPlease build the wheel by using the following command:\n")
print("\tPKG_DIR={0} make build-wheels\n".format(os.environ["PKG_DIR"]))
Expand All @@ -111,7 +120,7 @@ def add_sha256sums(path: str) -> None:
fobj.write(line)


def cleanup(path: str) -> None:
def cleanup(path: str) -> str:
"""Cleans up requirement files

:param path: The file to cleanup
Expand All @@ -138,13 +147,19 @@ def cleanup(path: str) -> None:
# It will come as a debian package dependency.
elif line.startswith("sip"):
continue
# We want to ignore lines that are comments.
elif line.lstrip().startswith("#"):
continue
# We just want the package names, since we're
# using the hashes of the wheels (and we verified
# the integrity of those hashes by verifying the gpg
# signature of a textfile containing the list of hashes).
elif line.lstrip().startswith("--hash=sha256"):
continue
else:
finallines.append(line)

# Now let us write back the file.
with open(path, "w") as fobj:
for line in finallines:
fobj.write(line)
return finallines


if __name__ == "__main__":
Expand Down
68 changes: 56 additions & 12 deletions scripts/verify-hashes
Original file line number Diff line number Diff line change
Expand Up @@ -27,19 +27,63 @@ lines = []
with open(sys.argv[2]) as fobj:
lines = fobj.readlines()

# Let us verify each hash is in our data or not
for line in lines:
line = line.strip()
if not line:
# Now we want to verify that for each dependency in the project
# to be packaged, has a matching source tarball on FPF's PyPI.

# Remove lines with comments.
uncommented_lines = [line for line in lines if not line.startswith('#')]

# The hashes for a given requirement will be distributed
# across multiple lines, e.g.
#
# atomicwrites==1.2.1 \
# --hash=sha256:0312ad34fcad8fac3704d441f7b317e50af620823353ec657a53e981f92920c0 \
# --hash=sha256:ec9ae8adaae229e4f8446952d204a3e4b5fdd2d099f9be3aaf556120135fb3ee
#
# Let's create a list with one list element per dependency.

dependencies_with_hashes = ''.join(uncommented_lines).replace('\\\n', '').splitlines()

# Now we'll construct a dict containing each dependency,
# and a list of its hashes, e.g.:
#
# {
# 'alembic': ['04bcb970ca8659c3607ddd8ffd86cc9d6a99661c9bc590955e8813c66bfa582b']
# }
#
# Note that at this point the hashes can be of upstream wheels.

dependencies = {}
for dependency_line in dependencies_with_hashes:
if not dependency_line:
continue
# Split based on --hash=sha256:
words = line.split("--hash=sha256:")
assert len(words) == 2 # We should have only hash per line

if words[1] not in sha256sum_data:
print("Missing sha256sum {0}".format(words[1]))
sys.exit(1)

sys.exit(0)
package_name_and_version = dependency_line.split()[0]

# If this fails, we are missing a hash in requirements.txt.
assert len(dependency_line.split()) >= 2

hashes = []
for sha_256_hash in dependency_line.split()[1:]:
hashes.append(sha_256_hash.replace("--hash=sha256:", ""))

dependencies.update({
package_name_and_version: hashes
})

# Now check, for each dependency that there is at least one matching hash
# on FPF's PyPI (this will be the hash of the source tarball).
for dependency in dependencies.keys():

found_a_hash = False
for requirements_sha_256_hash in dependencies[dependency]:
if requirements_sha_256_hash in sha256sum_data:
found_a_hash = True

# If we get here, it means we did not find a corresponding hash in our
# sha256sums data (representing the state of FPF's PyPI)
if not found_a_hash:
print("Missing sha256sum for package: {0}".format(dependency))
sys.exit(1)

sys.exit(0)
2 changes: 1 addition & 1 deletion securedrop-client/debian/securedrop-client.install
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@ files/alembic.ini usr/share/securedrop-client/
alembic/env.py usr/share/securedrop-client/alembic/
alembic/README usr/share/securedrop-client/alembic/
alembic/script.py.mako usr/share/securedrop-client/alembic/
alembic/versions/15ac9509fc68_init.py usr/share/securedrop-client/alembic/versions/
alembic/versions/2f363b3d680e_init.py usr/share/securedrop-client/alembic/versions/
files/securedrop-client usr/bin/
files/securedrop-client.desktop usr/share/applications/