From a384e8e05a7bbda6bb50ae7ee518d6bf77da1d7a Mon Sep 17 00:00:00 2001
From: Erik Moeller
Date: Fri, 5 Jun 2020 17:56:34 -0700
Subject: [PATCH 1/7] Add black formatter check - Re-adds dev-requirements -
Adds marke target - Generalizes and cleans up linter autodiscovery - Avoids
use of docker when running targets locally
---
.circleci/config.yml | 17 +++++--
.flake8 | 2 +-
Makefile | 14 ++++--
dev-requirements.in | 2 +
dev-requirements.txt | 100 ++++++++++++++++++++++++++++++++++++++++
launcher/Makefile | 2 +-
launcher/pyproject.toml | 2 +
pyproject.toml | 3 ++
scripts/flake8-linting | 26 -----------
scripts/lint-all | 32 +++++++++++++
10 files changed, 164 insertions(+), 36 deletions(-)
create mode 100644 dev-requirements.in
create mode 100644 dev-requirements.txt
create mode 100644 launcher/pyproject.toml
create mode 100644 pyproject.toml
delete mode 100755 scripts/flake8-linting
create mode 100755 scripts/lint-all
diff --git a/.circleci/config.yml b/.circleci/config.yml
index bb021be0..5286bd1b 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -2,12 +2,19 @@
version: 2
jobs:
lint:
- machine: true
+ docker:
+ - image: circleci/python:3.7-buster
steps:
- checkout
- - run: sudo apt-get update
- - run: sudo apt-get install -qq make
- - run: make flake8
+ - run:
+ name: Install dependencies and run linters
+ command: |
+ virtualenv .venv
+ source .venv/bin/activate
+ pip install --require-hashes -r dev-requirements.txt
+ sudo apt install file
+ make flake8
+ make black
buildrpm:
machine: true
steps:
@@ -27,7 +34,7 @@ jobs:
source .venv/bin/activate
pip install --require-hashes -r test-requirements.txt
sudo apt install lsof
- make test && make bandit && make black
+ make test && make bandit
workflows:
version: 2
diff --git a/.flake8 b/.flake8
index 1de5103e..516ae2de 100644
--- a/.flake8
+++ b/.flake8
@@ -1,3 +1,3 @@
[flake8]
ignore: W605
-max-line-length = 99
+max-line-length = 100
diff --git a/Makefile b/Makefile
index bb9c20ac..62852a10 100644
--- a/Makefile
+++ b/Makefile
@@ -13,6 +13,12 @@ all: ## Builds and provisions all VMs required for testing workstation
$(MAKE) prep-salt
./scripts/provision-all
+.PHONY: black
+black: ## Lints all Python files with flake8
+# Not requiring dom0 since linting requires extra packages,
+# available only in the developer environment, i.e. Work VM.
+ @./scripts/lint-all "black --check"
+
dev: all ## Builds and provisions all VMs required for testing workstation
prod: ## Configures a PRODUCTION install for pilot use
@@ -138,9 +144,7 @@ validate: assert-dom0 ## Checks for local requirements in dev env
flake8: ## Lints all Python files with flake8
# Not requiring dom0 since linting requires extra packages,
# available only in the developer environment, i.e. Work VM.
- @docker run -v $(PWD):/code -w /code --name sdw_flake8 --rm \
- --entrypoint /code/scripts/flake8-linting \
- python:3.5.7-slim-stretch
+ @./scripts/lint-all "flake8"
prep-dom0: prep-salt # Copies dom0 config files
sudo qubesctl --show-output --targets dom0 state.highstate
@@ -152,6 +156,10 @@ destroy-all: ## Destroys all VMs managed by Workstation salt config
update-pip-requirements: ## Updates all Python requirements files via pip-compile.
pip-compile --generate-hashes --output-file requirements.txt requirements.in
+venv: ## Provision and activate a Python 3 virtualenv for development.
+ python3 -m venv .venv
+ .venv/bin/pip install --require-hashes -r dev-requirements.txt
+
# Explanation of the below shell command should it ever break.
# 1. Set the field separator to ": ##" to parse lines for make targets.
# 2. Check for second field matching, skip otherwise.
diff --git a/dev-requirements.in b/dev-requirements.in
new file mode 100644
index 00000000..4e92b9de
--- /dev/null
+++ b/dev-requirements.in
@@ -0,0 +1,2 @@
+black
+flake8
diff --git a/dev-requirements.txt b/dev-requirements.txt
new file mode 100644
index 00000000..1db65e66
--- /dev/null
+++ b/dev-requirements.txt
@@ -0,0 +1,100 @@
+#
+# This file is autogenerated by pip-compile
+# To update, run:
+#
+# pip-compile --generate-hashes --output-file=dev-requirements.txt dev-requirements.in
+#
+appdirs==1.4.4 \
+ --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41 \
+ --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128 \
+ # via black
+attrs==19.3.0 \
+ --hash=sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c \
+ --hash=sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72 \
+ # via black
+black==19.10b0 \
+ --hash=sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b \
+ --hash=sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539 \
+ # via -r dev-requirements.in
+click==7.1.2 \
+ --hash=sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a \
+ --hash=sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc \
+ # via black
+flake8==3.8.3 \
+ --hash=sha256:15e351d19611c887e482fb960eae4d44845013cc142d42896e9862f775d8cf5c \
+ --hash=sha256:f04b9fcbac03b0a3e58c0ab3a0ecc462e023a9faf046d57794184028123aa208 \
+ # via -r dev-requirements.in
+importlib-metadata==1.6.1 \
+ --hash=sha256:0505dd08068cfec00f53a74a0ad927676d7757da81b7436a6eefe4c7cf75c545 \
+ --hash=sha256:15ec6c0fd909e893e3a08b3a7c76ecb149122fb14b7efe1199ddd4c7c57ea958 \
+ # via flake8
+mccabe==0.6.1 \
+ --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \
+ --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f \
+ # via flake8
+pathspec==0.8.0 \
+ --hash=sha256:7d91249d21749788d07a2d0f94147accd8f845507400749ea19c1ec9054a12b0 \
+ --hash=sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061 \
+ # via black
+pycodestyle==2.6.0 \
+ --hash=sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367 \
+ --hash=sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e \
+ # via flake8
+pyflakes==2.2.0 \
+ --hash=sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92 \
+ --hash=sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8 \
+ # via flake8
+regex==2020.5.14 \
+ --hash=sha256:1386e75c9d1574f6aa2e4eb5355374c8e55f9aac97e224a8a5a6abded0f9c927 \
+ --hash=sha256:27ff7325b297fb6e5ebb70d10437592433601c423f5acf86e5bc1ee2919b9561 \
+ --hash=sha256:329ba35d711e3428db6b45a53b1b13a0a8ba07cbbcf10bbed291a7da45f106c3 \
+ --hash=sha256:3a9394197664e35566242686d84dfd264c07b20f93514e2e09d3c2b3ffdf78fe \
+ --hash=sha256:51f17abbe973c7673a61863516bdc9c0ef467407a940f39501e786a07406699c \
+ --hash=sha256:579ea215c81d18da550b62ff97ee187b99f1b135fd894a13451e00986a080cad \
+ --hash=sha256:70c14743320a68c5dac7fc5a0f685be63bc2024b062fe2aaccc4acc3d01b14a1 \
+ --hash=sha256:7e61be8a2900897803c293247ef87366d5df86bf701083b6c43119c7c6c99108 \
+ --hash=sha256:8044d1c085d49673aadb3d7dc20ef5cb5b030c7a4fa253a593dda2eab3059929 \
+ --hash=sha256:89d76ce33d3266173f5be80bd4efcbd5196cafc34100fdab814f9b228dee0fa4 \
+ --hash=sha256:99568f00f7bf820c620f01721485cad230f3fb28f57d8fbf4a7967ec2e446994 \
+ --hash=sha256:a7c37f048ec3920783abab99f8f4036561a174f1314302ccfa4e9ad31cb00eb4 \
+ --hash=sha256:c2062c7d470751b648f1cacc3f54460aebfc261285f14bc6da49c6943bd48bdd \
+ --hash=sha256:c9bce6e006fbe771a02bda468ec40ffccbf954803b470a0345ad39c603402577 \
+ --hash=sha256:ce367d21f33e23a84fb83a641b3834dd7dd8e9318ad8ff677fbfae5915a239f7 \
+ --hash=sha256:ce450ffbfec93821ab1fea94779a8440e10cf63819be6e176eb1973a6017aff5 \
+ --hash=sha256:ce5cc53aa9fbbf6712e92c7cf268274eaff30f6bd12a0754e8133d85a8fb0f5f \
+ --hash=sha256:d466967ac8e45244b9dfe302bbe5e3337f8dc4dec8d7d10f5e950d83b140d33a \
+ --hash=sha256:d881c2e657c51d89f02ae4c21d9adbef76b8325fe4d5cf0e9ad62f850f3a98fd \
+ --hash=sha256:e565569fc28e3ba3e475ec344d87ed3cd8ba2d575335359749298a0899fe122e \
+ --hash=sha256:ea55b80eb0d1c3f1d8d784264a6764f931e172480a2f1868f2536444c5f01e01 \
+ # via black
+toml==0.10.1 \
+ --hash=sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f \
+ --hash=sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88 \
+ # via black
+typed-ast==1.4.1 \
+ --hash=sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355 \
+ --hash=sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919 \
+ --hash=sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa \
+ --hash=sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652 \
+ --hash=sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75 \
+ --hash=sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01 \
+ --hash=sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d \
+ --hash=sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1 \
+ --hash=sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907 \
+ --hash=sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c \
+ --hash=sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3 \
+ --hash=sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b \
+ --hash=sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614 \
+ --hash=sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb \
+ --hash=sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b \
+ --hash=sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41 \
+ --hash=sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6 \
+ --hash=sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34 \
+ --hash=sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe \
+ --hash=sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4 \
+ --hash=sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7 \
+ # via black
+zipp==3.1.0 \
+ --hash=sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b \
+ --hash=sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96 \
+ # via importlib-metadata
diff --git a/launcher/Makefile b/launcher/Makefile
index f5530427..a37fc502 100644
--- a/launcher/Makefile
+++ b/launcher/Makefile
@@ -11,7 +11,7 @@ test:
pytest --cov-report term-missing --cov=sdw_notify --cov=sdw_updater_gui/ --cov=sdw_util -v tests/
black: ## Runs the black code formatter on the launcher code
- black --check .
+ black --check --line-length=100 .
# Explanation of the below shell command should it ever break.
# 1. Set the field separator to ": ##" to parse lines for make targets.
diff --git a/launcher/pyproject.toml b/launcher/pyproject.toml
new file mode 100644
index 00000000..037585e8
--- /dev/null
+++ b/launcher/pyproject.toml
@@ -0,0 +1,2 @@
+[tool.black]
+line-length = 100
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 00000000..8aaabff8
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,3 @@
+[tool.black]
+line-length = 100
+exclude = 'launcher/.*'
diff --git a/scripts/flake8-linting b/scripts/flake8-linting
deleted file mode 100755
index 5fbd3e3c..00000000
--- a/scripts/flake8-linting
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/bash
-set -e
-set -u
-set -o pipefail
-
-
-# Install 'file', used for discovering all Python scripts.
-# This is a hack, we should fold in the 'file' package as a dependecy
-# in the underlying container.
-printf "Updating apt lists..."
-apt-get update > /dev/null
-printf " done.\n"
-
-apt-get install -qq file
-
-# Install flake8 inside container
-pip install -q flake8
-
-# First pass with flake8 autodiscovery
-flake8
-
-# Second pass to make sure we've found all Python scripts,
-# since flake8's autodiscovery option can miss some.
-find -type f -exec file -i {} + \
- | perl -F':\s+' -nE '$F[1] =~ m/text\/x-python/ and say $F[0]' \
- | xargs flake8
diff --git a/scripts/lint-all b/scripts/lint-all
new file mode 100755
index 00000000..4f1a300e
--- /dev/null
+++ b/scripts/lint-all
@@ -0,0 +1,32 @@
+#!/bin/bash
+set -e
+set -u
+set -o pipefail
+
+
+if [ -z "$@" ]; then
+ echo "Usage: lint-all "
+ echo
+ echo "lint-all: Run a linter against all Python files in the repository"
+ echo
+ echo "Example:"
+ echo " lint-all \"flake8 --max-line-length=100\""
+ exit 1
+fi
+
+# Because this repo contains some files without the .py file extension, we
+# scan all directories except for the ones specified in the first line for
+# files which
+#
+# - the file command recognizes as the MIME type "text/x-python", OR
+# - which are identified as Python by their file extension, and recognized as
+# plain text by the file command.
+#
+# In practice, this gives us full coverage of Pyhton files in this repository
+# without having to manually update a list of special cases.
+find . -type d \( -name .venv -o -name .git -o -name rpm-build \) \
+ -prune -o \
+ -type f -exec file --mime-type {} + | \
+ grep -E '(text/x-python$|^.*\.py:\s*text/plain$)' | \
+ cut -d: -f1 | \
+ xargs $@
From 782a04e59ae10d40085b5d8807e255143e5d05c5 Mon Sep 17 00:00:00 2001
From: Erik Moeller
Date: Thu, 11 Jun 2020 00:42:28 -0700
Subject: [PATCH 2/7] Apply black formatting to all Python files
---
launcher/sdw_notify/Notify.py | 12 +--
launcher/sdw_updater_gui/Updater.py | 40 ++------
launcher/sdw_updater_gui/UpdaterApp.py | 36 ++-----
launcher/sdw_updater_gui/UpdaterAppUi.py | 12 +--
launcher/sdw_updater_gui/strings.py | 6 +-
launcher/sdw_util/Util.py | 4 +-
launcher/tests/test_notify.py | 11 +--
launcher/tests/test_updater.py | 115 ++++++-----------------
launcher/tests/test_util.py | 8 +-
scripts/configure-environment | 4 +-
scripts/destroy-vm | 16 +---
scripts/securedrop-admin.py | 31 +++---
scripts/validate_config.py | 22 +++--
sd-proxy/do-not-open-here | 5 +-
setup.py | 7 +-
tests/base.py | 29 ++----
tests/test_app.py | 16 ++--
tests/test_dom0_config.py | 10 +-
tests/test_dom0_rpm_repo.py | 6 +-
tests/test_gpg.py | 6 +-
tests/test_proxy_vm.py | 11 +--
tests/test_qubes_rpc.py | 17 ++--
tests/test_sd_devices.py | 18 ++--
tests/test_sd_whonix.py | 14 ++-
tests/test_sys_firewall.py | 14 +--
tests/test_sys_usb.py | 1 -
tests/test_viewer.py | 11 ++-
tests/test_vms_exist.py | 30 +++---
tests/test_vms_platform.py | 10 +-
29 files changed, 180 insertions(+), 342 deletions(-)
diff --git a/launcher/sdw_notify/Notify.py b/launcher/sdw_notify/Notify.py
index 3e8904cd..1ac931e6 100644
--- a/launcher/sdw_notify/Notify.py
+++ b/launcher/sdw_notify/Notify.py
@@ -60,9 +60,7 @@ def is_update_check_necessary():
sdlog.error(
"Data in {} not in the expected format. "
"Expecting a timestamp in format '{}'. "
- "Showing security warning.".format(
- LAST_UPDATED_FILE, LAST_UPDATED_FORMAT
- )
+ "Showing security warning.".format(LAST_UPDATED_FILE, LAST_UPDATED_FORMAT)
)
return True
@@ -76,9 +74,7 @@ def is_update_check_necessary():
if not last_updated_file_exists:
sdlog.warning(
"Timestamp file '{}' does not exist. "
- "Updater may never have run. Showing security warning.".format(
- LAST_UPDATED_FILE
- )
+ "Updater may never have run. Showing security warning.".format(LAST_UPDATED_FILE)
)
return True
else:
@@ -113,9 +109,7 @@ def is_update_check_necessary():
sdlog.info(
"Last successful update ({0:.1f} hours ago) "
"is below the warning threshold ({1:.1f} hours). "
- "Exiting without warning.".format(
- updated_hours_ago, warning_threshold_hours
- )
+ "Exiting without warning.".format(updated_hours_ago, warning_threshold_hours)
)
return False
diff --git a/launcher/sdw_updater_gui/Updater.py b/launcher/sdw_updater_gui/Updater.py
index 41f3e98c..62d1c55d 100644
--- a/launcher/sdw_updater_gui/Updater.py
+++ b/launcher/sdw_updater_gui/Updater.py
@@ -54,9 +54,7 @@ def check_all_updates():
for progress_current, vm in enumerate(current_templates.keys()):
# yield the progress percentage for UI updates
- progress_percentage = int(
- ((progress_current + 1) / len(current_templates.keys())) * 100
- )
+ progress_percentage = int(((progress_current + 1) / len(current_templates.keys())) * 100)
update_results = check_updates(vm)
yield vm, progress_percentage, update_results
@@ -129,17 +127,11 @@ def _check_updates_debian(vm):
sdlog.info("Checking for updates {}:{}".format(vm, current_templates[vm]))
subprocess.check_call(["qvm-run", current_templates[vm], "sudo apt update"])
subprocess.check_call(
- [
- "qvm-run",
- current_templates[vm],
- "[[ $(apt list --upgradable | wc -l) -eq 1 ]]",
- ]
+ ["qvm-run", current_templates[vm], "[[ $(apt list --upgradable | wc -l) -eq 1 ]]",]
)
except subprocess.CalledProcessError as e:
sdlog.error(
- "Updates required for {} or cannot check for updates".format(
- current_templates[vm]
- )
+ "Updates required for {} or cannot check for updates".format(current_templates[vm])
)
sdlog.error(str(e))
updates_required = True
@@ -164,9 +156,7 @@ def _apply_updates_dom0():
try:
subprocess.check_call(["sudo", "qubes-dom0-update", "-y"])
except subprocess.CalledProcessError as e:
- sdlog.error(
- "An error has occurred updating dom0. Please contact your administrator."
- )
+ sdlog.error("An error has occurred updating dom0. Please contact your administrator.")
sdlog.error(str(e))
return UpdateStatus.UPDATES_FAILED
sdlog.info("dom0 update successful")
@@ -247,11 +237,7 @@ def _write_updates_status_flag_to_disk(status):
try:
sdlog.info("Setting update flag to {} in sd-app".format(status.value))
subprocess.check_call(
- [
- "qvm-run",
- "sd-app",
- "echo '{}' > {}".format(status.value, flag_file_path_sd_app),
- ]
+ ["qvm-run", "sd-app", "echo '{}' > {}".format(status.value, flag_file_path_sd_app),]
)
except subprocess.CalledProcessError as e:
sdlog.error("Error writing update status flag to sd-app")
@@ -286,9 +272,7 @@ def last_required_reboot_performed():
return True
if int(flag_contents["status"]) == int(UpdateStatus.REBOOT_REQUIRED.value):
- reboot_time = datetime.strptime(
- flag_contents["last_status_update"], DATE_FORMAT
- )
+ reboot_time = datetime.strptime(flag_contents["last_status_update"], DATE_FORMAT)
boot_time = datetime.now() - _get_uptime()
# The session was started *before* the reboot was requested by
@@ -315,9 +299,7 @@ def _get_uptime():
uptime_minutes = (uptime % 3600) // 60
uptime_seconds = uptime % 60
- delta = timedelta(
- hours=uptime_hours, minutes=uptime_minutes, seconds=uptime_seconds
- )
+ delta = timedelta(hours=uptime_hours, minutes=uptime_minutes, seconds=uptime_seconds)
return delta
@@ -471,9 +453,7 @@ def _safely_start_vm(vm):
["qvm-ls", "--running", "--raw-list"], stderr=subprocess.PIPE
)
sdlog.info("VMs running before start of {}: {}".format(vm, running_vms))
- subprocess.check_output(
- ["qvm-start", "--skip-if-running", vm], stderr=subprocess.PIPE
- )
+ subprocess.check_output(["qvm-start", "--skip-if-running", vm], stderr=subprocess.PIPE)
except subprocess.CalledProcessError as e:
sdlog.error("Error while starting {}".format(vm))
sdlog.error(str(e))
@@ -493,9 +473,7 @@ def should_launch_updater(interval):
return False
elif status["status"] == UpdateStatus.REBOOT_REQUIRED.value:
if last_required_reboot_performed():
- sdlog.info(
- "Required reboot performed, updating status and launching client."
- )
+ sdlog.info("Required reboot performed, updating status and launching client.")
_write_updates_status_flag_to_disk(UpdateStatus.UPDATES_OK)
return False
else:
diff --git a/launcher/sdw_updater_gui/UpdaterApp.py b/launcher/sdw_updater_gui/UpdaterApp.py
index 88cabbc0..6d06a478 100644
--- a/launcher/sdw_updater_gui/UpdaterApp.py
+++ b/launcher/sdw_updater_gui/UpdaterApp.py
@@ -46,9 +46,7 @@ def __init__(self, parent=None):
self.show()
- self.proposedActionDescription.setText(
- strings.description_status_checking_updates
- )
+ self.proposedActionDescription.setText(strings.description_status_checking_updates)
self.progress += 1
self.progressBar.setProperty("value", self.progress)
@@ -78,18 +76,14 @@ def update_status(self, result):
self.applyUpdatesButton.show()
self.cancelButton.setEnabled(True)
self.cancelButton.show()
- self.proposedActionDescription.setText(
- strings.description_status_updates_available
- )
+ self.proposedActionDescription.setText(strings.description_status_updates_available)
elif result["recommended_action"] == UpdateStatus.UPDATES_OK:
logger.info("VMs up-to-date, OK to start client")
self.clientOpenButton.setEnabled(True)
self.clientOpenButton.show()
self.cancelButton.setEnabled(True)
self.cancelButton.show()
- self.proposedActionDescription.setText(
- strings.description_status_up_to_date
- )
+ self.proposedActionDescription.setText(strings.description_status_up_to_date)
elif result["recommended_action"] == UpdateStatus.REBOOT_REQUIRED:
logger.info("Reboot will be required")
# We also have further updates to do, let's apply updates and reboot
@@ -101,9 +95,7 @@ def update_status(self, result):
self.applyUpdatesButton.show()
self.cancelButton.setEnabled(True)
self.cancelButton.show()
- self.proposedActionDescription.setText(
- strings.description_status_updates_available
- )
+ self.proposedActionDescription.setText(strings.description_status_updates_available)
# No updates required, show reboot button.
else:
logger.info("Reboot required")
@@ -111,15 +103,11 @@ def update_status(self, result):
self.rebootButton.show()
self.cancelButton.setEnabled(True)
self.cancelButton.show()
- self.proposedActionDescription.setText(
- strings.description_status_reboot_required
- )
+ self.proposedActionDescription.setText(strings.description_status_reboot_required)
else:
logger.error("Error checking for updates")
logger.error(str(result))
- self.proposedActionDescription.setText(
- strings.description_error_check_updates_failed
- )
+ self.proposedActionDescription.setText(strings.description_error_check_updates_failed)
@pyqtSlot(dict)
def upgrade_status(self, result):
@@ -137,18 +125,14 @@ def upgrade_status(self, result):
self.rebootButton.show()
self.cancelButton.setEnabled(True)
self.cancelButton.show()
- self.proposedActionDescription.setText(
- strings.description_status_reboot_required
- )
+ self.proposedActionDescription.setText(strings.description_status_reboot_required)
elif result["recommended_action"] == UpdateStatus.UPDATES_OK:
logger.info("VMs have been succesfully updated, OK to start client")
self.clientOpenButton.setEnabled(True)
self.clientOpenButton.show()
self.cancelButton.setEnabled(True)
self.cancelButton.show()
- self.proposedActionDescription.setText(
- strings.description_status_updates_complete
- )
+ self.proposedActionDescription.setText(strings.description_status_updates_complete)
else:
logger.info("Error upgrading VMs")
self.cancelButton.setEnabled(True)
@@ -194,9 +178,7 @@ def apply_all_updates(self):
logger.info("Starting UpgradeThread")
self.progress = 5
self.progressBar.setProperty("value", self.progress)
- self.proposedActionDescription.setText(
- strings.description_status_applying_updates
- )
+ self.proposedActionDescription.setText(strings.description_status_applying_updates)
self.applyUpdatesButton.setEnabled(False)
self.applyUpdatesButton.hide()
self.cancelButton.setEnabled(False)
diff --git a/launcher/sdw_updater_gui/UpdaterAppUi.py b/launcher/sdw_updater_gui/UpdaterAppUi.py
index 1efe27bf..10bd4cf7 100644
--- a/launcher/sdw_updater_gui/UpdaterAppUi.py
+++ b/launcher/sdw_updater_gui/UpdaterAppUi.py
@@ -78,9 +78,7 @@ def setupUi(self, UpdaterDialog):
QtCore.Qt.AlignLeading | QtCore.Qt.AlignLeft | QtCore.Qt.AlignTop
)
self.proposedActionDescription.setWordWrap(True)
- self.proposedActionDescription.setObjectName(
- _fromUtf8("proposedActionDescription")
- )
+ self.proposedActionDescription.setObjectName(_fromUtf8("proposedActionDescription"))
self.gridLayout.addWidget(self.proposedActionDescription, 3, 0, 1, 5)
self.label = QtGui.QLabel(self.layoutWidget)
self.label.setMinimumSize(QtCore.QSize(0, 20))
@@ -94,14 +92,10 @@ def setupUi(self, UpdaterDialog):
def retranslateUi(self, UpdaterDialog):
UpdaterDialog.setWindowTitle(
- _translate(
- "UpdaterDialog", "SecureDrop Workstation preflight updater", None
- )
+ _translate("UpdaterDialog", "SecureDrop Workstation preflight updater", None)
)
self.rebootButton.setText(_translate("UpdaterDialog", "Reboot", None))
- self.applyUpdatesButton.setText(
- _translate("UpdaterDialog", "Start Updates", None)
- )
+ self.applyUpdatesButton.setText(_translate("UpdaterDialog", "Start Updates", None))
self.cancelButton.setText(_translate("UpdaterDialog", "Cancel", None))
self.clientOpenButton.setText(_translate("UpdaterDialog", "Continue", None))
self.proposedActionDescription.setText(
diff --git a/launcher/sdw_updater_gui/strings.py b/launcher/sdw_updater_gui/strings.py
index a73e54f7..846f44e7 100644
--- a/launcher/sdw_updater_gui/strings.py
+++ b/launcher/sdw_updater_gui/strings.py
@@ -15,8 +15,7 @@
"to continue.
"
)
description_status_up_to_date = (
- " No updates today!
"
- "Click Continue to launch the SecureDrop app.
"
+ " No updates today!
" "Click Continue to launch the SecureDrop app.
"
)
description_error_check_updates_failed = (
" Cannot check for updates
There was an error retrieving updates. "
@@ -51,6 +50,5 @@
" Error rebooting workstation
" "Please contact your administrator.
"
)
description_error_lauching_client = (
- " Error launching the SecureDrop application
"
- "Please contact your administrator."
+ " Error launching the SecureDrop application
" "Please contact your administrator."
)
diff --git a/launcher/sdw_util/Util.py b/launcher/sdw_util/Util.py
index b036ddd0..07c857b9 100644
--- a/launcher/sdw_util/Util.py
+++ b/launcher/sdw_util/Util.py
@@ -22,9 +22,7 @@
LOCK_ERROR = "Error obtaining lock on '{}'. Process may already be running."
# Format for those logs
-LOG_FORMAT = (
- "%(asctime)s - %(name)s:%(lineno)d(%(funcName)s) " "%(levelname)s: %(message)s"
-)
+LOG_FORMAT = "%(asctime)s - %(name)s:%(lineno)d(%(funcName)s) " "%(levelname)s: %(message)s"
sdlog = logging.getLogger(__name__)
diff --git a/launcher/tests/test_notify.py b/launcher/tests/test_notify.py
index 04a97290..4833d5e3 100644
--- a/launcher/tests/test_notify.py
+++ b/launcher/tests/test_notify.py
@@ -8,15 +8,11 @@
from tempfile import TemporaryDirectory
relpath_notify = "../sdw_notify/Notify.py"
-path_to_notify = os.path.join(
- os.path.dirname(os.path.abspath(__file__)), relpath_notify
-)
+path_to_notify = os.path.join(os.path.dirname(os.path.abspath(__file__)), relpath_notify)
notify = SourceFileLoader("Notify", path_to_notify).load_module()
relpath_updater = "../sdw_updater_gui/Updater.py"
-path_to_updater = os.path.join(
- os.path.dirname(os.path.abspath(__file__)), relpath_updater
-)
+path_to_updater = os.path.join(os.path.dirname(os.path.abspath(__file__)), relpath_updater)
updater = SourceFileLoader("Updater", path_to_updater).load_module()
@@ -39,8 +35,7 @@
# Regex for info log if we've updated recently enough
NO_WARNING_REGEX = (
- r"Last successful update \(.* hours ago\) is below the warning threshold "
- r"\(.* hours\)."
+ r"Last successful update \(.* hours ago\) is below the warning threshold " r"\(.* hours\)."
)
# Regex for bad contents in `sdw-last-updated` file
diff --git a/launcher/tests/test_updater.py b/launcher/tests/test_updater.py
index 6b6b02a7..bfb624b2 100644
--- a/launcher/tests/test_updater.py
+++ b/launcher/tests/test_updater.py
@@ -9,9 +9,7 @@
from unittest.mock import call
relpath_updater_script = "../sdw_updater_gui/Updater.py"
-path_to_script = os.path.join(
- os.path.dirname(os.path.abspath(__file__)), relpath_updater_script
-)
+path_to_script = os.path.join(os.path.dirname(os.path.abspath(__file__)), relpath_updater_script)
updater = SourceFileLoader("Updater", path_to_script).load_module()
from Updater import UpdateStatus # noqa: E402
from Updater import current_templates # noqa: E402
@@ -80,14 +78,10 @@ def test_check_updates_dom0_up_to_date(mocked_info, mocked_error, mocked_call, c
assert not mocked_error.called
-@mock.patch(
- "subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")
-)
+@mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call"))
@mock.patch("Updater.sdlog.error")
@mock.patch("Updater.sdlog.info")
-def test_check_updates_dom0_needs_updates(
- mocked_info, mocked_error, mocked_call, capsys
-):
+def test_check_updates_dom0_needs_updates(mocked_info, mocked_error, mocked_call, capsys):
status = updater._check_updates_dom0()
assert status == UpdateStatus.UPDATES_REQUIRED
error_log = [
@@ -119,8 +113,7 @@ def test_check_debian_updates_up_to_date(
"subprocess.check_output", side_effect=["0", "0"],
)
@mock.patch(
- "subprocess.check_call",
- side_effect=[subprocess.CalledProcessError(1, "check_call"), "0"],
+ "subprocess.check_call", side_effect=[subprocess.CalledProcessError(1, "check_call"), "0"],
)
@mock.patch("Updater.sdlog.error")
@mock.patch("Updater.sdlog.info")
@@ -131,46 +124,33 @@ def test_check_updates_debian_updates_required(
assert status == UpdateStatus.UPDATES_REQUIRED
error_log = [
call(
- "Updates required for {} or cannot check for updates".format(
- "sd-app-buster-template"
- )
+ "Updates required for {} or cannot check for updates".format("sd-app-buster-template")
),
call("Command 'check_call' returned non-zero exit status 1."),
]
- info_log = [
- call("Checking for updates {}:{}".format("sd-app", "sd-app-buster-template"))
- ]
+ info_log = [call("Checking for updates {}:{}".format("sd-app", "sd-app-buster-template"))]
mocked_error.assert_has_calls(error_log)
mocked_info.assert_has_calls(info_log)
@mock.patch(
- "subprocess.check_output",
- side_effect=subprocess.CalledProcessError(1, "check_output",),
-)
-@mock.patch(
- "subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")
+ "subprocess.check_output", side_effect=subprocess.CalledProcessError(1, "check_output",),
)
+@mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call"))
@mock.patch("Updater.sdlog.error")
@mock.patch("Updater.sdlog.info")
-def test_check_debian_updates_failed(
- mocked_info, mocked_error, mocked_call, mocked_output, capsys
-):
+def test_check_debian_updates_failed(mocked_info, mocked_error, mocked_call, mocked_output, capsys):
status = updater._check_updates_debian("sd-app")
assert status == UpdateStatus.UPDATES_FAILED
error_log = [
call(
- "Updates required for {} or cannot check for updates".format(
- "sd-app-buster-template"
- )
+ "Updates required for {} or cannot check for updates".format("sd-app-buster-template")
),
call("Command 'check_call' returned non-zero exit status 1."),
call("Failed to shut down {}".format("sd-app-buster-template")),
call("Command 'check_output' returned non-zero exit status 1."),
]
- info_log = [
- call("Checking for updates {}:{}".format("sd-app", "sd-app-buster-template"))
- ]
+ info_log = [call("Checking for updates {}:{}".format("sd-app", "sd-app-buster-template"))]
mocked_error.assert_has_calls(error_log)
mocked_info.assert_has_calls(info_log)
@@ -179,25 +159,18 @@ def test_check_debian_updates_failed(
"subprocess.check_output", side_effect="0",
)
@mock.patch(
- "subprocess.check_call",
- side_effect=[subprocess.CalledProcessError(1, "check_call"), "0", "0"],
+ "subprocess.check_call", side_effect=[subprocess.CalledProcessError(1, "check_call"), "0", "0"],
)
@mock.patch("Updater.sdlog.error")
@mock.patch("Updater.sdlog.info")
-def test_check_debian_has_updates(
- mocked_info, mocked_error, mocked_call, mocked_output, capsys
-):
+def test_check_debian_has_updates(mocked_info, mocked_error, mocked_call, mocked_output, capsys):
error_log = [
call(
- "Updates required for {} or cannot check for updates".format(
- "sd-log-buster-template"
- )
+ "Updates required for {} or cannot check for updates".format("sd-log-buster-template")
),
call("Command 'check_call' returned non-zero exit status 1."),
]
- info_log = [
- call("Checking for updates {}:{}".format("sd-log", "sd-log-buster-template"))
- ]
+ info_log = [call("Checking for updates {}:{}".format("sd-log", "sd-log-buster-template"))]
status = updater._check_updates_debian("sd-log")
assert status == UpdateStatus.UPDATES_REQUIRED
@@ -231,11 +204,7 @@ def test_check_updates_calls_correct_commands(
subprocess_call_list = [
call(["qvm-run", current_templates[vm], "sudo apt update"]),
call(
- [
- "qvm-run",
- current_templates[vm],
- "[[ $(apt list --upgradable | wc -l) -eq 1 ]]",
- ]
+ ["qvm-run", current_templates[vm], "[[ $(apt list --upgradable | wc -l) -eq 1 ]]",]
),
]
check_output_call_list = [
@@ -259,9 +228,7 @@ def test_check_updates_calls_correct_commands(
@mock.patch("subprocess.check_call")
@mock.patch("Updater.sdlog.error")
@mock.patch("Updater.sdlog.info")
-def test_check_all_updates(
- mocked_info, mocked_error, mocked_call, mocked_check_updates
-):
+def test_check_all_updates(mocked_info, mocked_error, mocked_call, mocked_check_updates):
update_generator = updater.check_all_updates()
results = {}
@@ -368,9 +335,7 @@ def test_write_updates_status_flag_to_disk(
@pytest.mark.parametrize("status", UpdateStatus)
@mock.patch("os.path.expanduser", return_value=temp_dir)
-@mock.patch(
- "subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")
-)
+@mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call"))
@mock.patch("Updater.sdlog.error")
@mock.patch("Updater.sdlog.info")
def test_write_updates_status_flag_to_disk_failure_app(
@@ -404,9 +369,7 @@ def test_write_updates_status_flag_to_disk_failure_dom0(
@mock.patch("subprocess.check_call")
@mock.patch("Updater.sdlog.error")
@mock.patch("Updater.sdlog.info")
-def test_write_last_updated_flags_to_disk(
- mocked_info, mocked_error, mocked_call, mocked_expand
-):
+def test_write_last_updated_flags_to_disk(mocked_info, mocked_error, mocked_call, mocked_expand):
flag_file_sd_app = updater.FLAG_FILE_LAST_UPDATED_SD_APP
flag_file_dom0 = updater.get_dom0_path(updater.FLAG_FILE_LAST_UPDATED_DOM0)
current_time = str(datetime.now().strftime(updater.DATE_FORMAT))
@@ -428,9 +391,7 @@ def test_write_last_updated_flags_to_disk(
@mock.patch("os.path.expanduser", return_value=temp_dir)
-@mock.patch(
- "subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")
-)
+@mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call"))
@mock.patch("Updater.sdlog.error")
@mock.patch("Updater.sdlog.info")
def test_write_last_updated_flags_to_disk_fails(
@@ -447,9 +408,7 @@ def test_write_last_updated_flags_to_disk_fails(
@mock.patch("os.path.exists", return_value=False)
@mock.patch("os.path.expanduser", return_value=temp_dir)
-@mock.patch(
- "subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")
-)
+@mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call"))
@mock.patch("Updater.sdlog.error")
@mock.patch("Updater.sdlog.info")
def test_write_last_updated_flags_dom0_folder_creation_fail(
@@ -489,9 +448,7 @@ def test_apply_updates_dom0_success(
assert not apply_vm.called
-@mock.patch(
- "subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")
-)
+@mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call"))
@mock.patch("Updater.sdlog.error")
@mock.patch("Updater.sdlog.info")
def test_apply_updates_dom0_failure(mocked_info, mocked_error, mocked_call):
@@ -528,9 +485,7 @@ def test_apply_updates_vms(mocked_info, mocked_error, mocked_call, vm):
@pytest.mark.parametrize("vm", current_templates.keys())
-@mock.patch(
- "subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")
-)
+@mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call"))
@mock.patch("Updater.sdlog.error")
@mock.patch("Updater.sdlog.info")
def test_apply_updates_vms_fails(mocked_info, mocked_error, mocked_call, vm):
@@ -633,8 +588,7 @@ def test_safely_start(mocked_info, mocked_error, mocked_output, vm):
@pytest.mark.parametrize("vm", current_templates.keys())
@mock.patch(
- "subprocess.check_output",
- side_effect=subprocess.CalledProcessError(1, "check_output"),
+ "subprocess.check_output", side_effect=subprocess.CalledProcessError(1, "check_output"),
)
@mock.patch("Updater.sdlog.error")
@mock.patch("Updater.sdlog.info")
@@ -650,8 +604,7 @@ def test_safely_start_fails(mocked_info, mocked_error, mocked_output, vm):
@pytest.mark.parametrize("vm", current_templates.keys())
@mock.patch(
- "subprocess.check_output",
- side_effect=subprocess.CalledProcessError(1, "check_output"),
+ "subprocess.check_output", side_effect=subprocess.CalledProcessError(1, "check_output"),
)
@mock.patch("Updater.sdlog.error")
@mock.patch("Updater.sdlog.info")
@@ -706,17 +659,14 @@ def test_shutdown_and_start_vms(
]
updater.shutdown_and_start_vms()
mocked_output.assert_has_calls(sys_vm_kill_calls)
- mocked_shutdown.assert_has_calls(
- template_vm_calls + app_vm_calls + sys_vm_shutdown_calls
- )
+ mocked_shutdown.assert_has_calls(template_vm_calls + app_vm_calls + sys_vm_shutdown_calls)
app_vm_calls_reversed = list(reversed(app_vm_calls))
mocked_start.assert_has_calls(sys_vm_start_calls + app_vm_calls_reversed)
assert not mocked_error.called
@mock.patch(
- "subprocess.check_output",
- side_effect=subprocess.CalledProcessError(1, "check_output"),
+ "subprocess.check_output", side_effect=subprocess.CalledProcessError(1, "check_output"),
)
@mock.patch("Updater._safely_start_vm")
@mock.patch("Updater._safely_shutdown_vm")
@@ -826,9 +776,7 @@ def test_read_dom0_update_flag_from_disk_fails(
)
@mock.patch("Updater.sdlog.error")
@mock.patch("Updater.sdlog.info")
-def test_last_required_reboot_performed_successful(
- mocked_info, mocked_error, mocked_read
-):
+def test_last_required_reboot_performed_successful(mocked_info, mocked_error, mocked_read):
result = updater.last_required_reboot_performed()
assert result is True
assert not mocked_error.called
@@ -867,9 +815,7 @@ def test_last_required_reboot_performed_no_file(mocked_info, mocked_error, mocke
)
@mock.patch("Updater.sdlog.error")
@mock.patch("Updater.sdlog.info")
-def test_last_required_reboot_performed_not_required(
- mocked_info, mocked_error, mocked_read
-):
+def test_last_required_reboot_performed_not_required(mocked_info, mocked_error, mocked_read):
result = updater.last_required_reboot_performed()
assert result is True
assert not mocked_error.called
@@ -997,8 +943,7 @@ def test_apply_dom0_state_success(mocked_info, mocked_error, mocked_subprocess):
@mock.patch(
- "subprocess.check_call",
- side_effect=[subprocess.CalledProcessError(1, "check_call"), "0"],
+ "subprocess.check_call", side_effect=[subprocess.CalledProcessError(1, "check_call"), "0"],
)
@mock.patch("Updater.sdlog.error")
@mock.patch("Updater.sdlog.info")
diff --git a/launcher/tests/test_util.py b/launcher/tests/test_util.py
index 8a25165e..722486b5 100644
--- a/launcher/tests/test_util.py
+++ b/launcher/tests/test_util.py
@@ -53,9 +53,7 @@ def test_obtain_lock(mocked_info, mocked_warning, mocked_error):
@mock.patch("Util.sdlog.error")
@mock.patch("Util.sdlog.warning")
@mock.patch("Util.sdlog.info")
-def test_cannot_obtain_exclusive_lock_when_busy(
- mocked_info, mocked_warning, mocked_error
-):
+def test_cannot_obtain_exclusive_lock_when_busy(mocked_info, mocked_warning, mocked_error):
"""
Test whether only a single process can obtan an exclusive lock (basic
lockfile behavior).
@@ -126,9 +124,7 @@ def test_permission_error_is_handled(mocked_info, mocked_warning, mocked_error):
"""
Test whether permission errors obtaining a lock are handled correctly
"""
- with mock.patch(
- "builtins.open", side_effect=PermissionError()
- ) as mocked_open: # noqa: F821
+ with mock.patch("builtins.open", side_effect=PermissionError()) as mocked_open: # noqa: F821
lock = util.obtain_lock("test-open-error.lock")
assert lock is None
mocked_open.assert_called_once()
diff --git a/scripts/configure-environment b/scripts/configure-environment
index 35015209..437b486f 100755
--- a/scripts/configure-environment
+++ b/scripts/configure-environment
@@ -58,7 +58,7 @@ def confirm_staging():
def set_env_in_config(args):
- with open(args.config, 'r') as f:
+ with open(args.config, "r") as f:
old_config = json.load(f)
new_config = dict(old_config)
@@ -68,7 +68,7 @@ def set_env_in_config(args):
msg = "Updated config environment to '{}'...\n".format(args.environment)
sys.stderr.write(msg)
- with open(args.config, 'w') as f:
+ with open(args.config, "w") as f:
json.dump(new_config, f)
diff --git a/scripts/destroy-vm b/scripts/destroy-vm
index a0d65a81..805ad4c7 100755
--- a/scripts/destroy-vm
+++ b/scripts/destroy-vm
@@ -16,16 +16,10 @@ SDW_DEFAULT_TAG = "sd-workstation"
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
- "--all",
- default=False,
- required=False,
- action="store_true",
- help="Destroys all SDW VMs",
+ "--all", default=False, required=False, action="store_true", help="Destroys all SDW VMs",
)
parser.add_argument(
- "targets",
- help="List of SDW VMs to destroy",
- nargs=argparse.REMAINDER,
+ "targets", help="List of SDW VMs to destroy", nargs=argparse.REMAINDER,
)
args = parser.parse_args()
if not args.all and len(args.targets) < 1:
@@ -55,9 +49,9 @@ def destroy_all():
"""
# Remove DispVMs first, then AppVMs, then TemplateVMs last.
sdw_vms = [vm for vm in q.domains if SDW_DEFAULT_TAG in vm.tags]
- sdw_template_vms = [vm for vm in sdw_vms
- if vm.klass == "TemplateVM" and not
- vm.installed_by_rpm]
+ sdw_template_vms = [
+ vm for vm in sdw_vms if vm.klass == "TemplateVM" and not vm.installed_by_rpm
+ ]
sdw_disp_vms = [vm for vm in sdw_vms if vm.klass == "DispVM"]
sdw_app_vms = [vm for vm in sdw_vms if vm.klass == "AppVM"]
diff --git a/scripts/securedrop-admin.py b/scripts/securedrop-admin.py
index d3c20326..02a26c41 100644
--- a/scripts/securedrop-admin.py
+++ b/scripts/securedrop-admin.py
@@ -49,9 +49,7 @@ def copy_config():
Copies config.json and sd-journalist.sec to /srv/salt/sd
"""
try:
- subprocess.check_call(
- ["sudo", "cp", os.path.join(SCRIPTS_PATH, "config.json"), SALT_PATH]
- )
+ subprocess.check_call(["sudo", "cp", os.path.join(SCRIPTS_PATH, "config.json"), SALT_PATH])
subprocess.check_call(
["sudo", "cp", os.path.join(SCRIPTS_PATH, "sd-journalist.sec"), SALT_PATH]
)
@@ -98,26 +96,23 @@ def refresh_salt():
def perform_uninstall():
try:
- subprocess.check_call(
- ["sudo", "qubesctl", "state.sls", "sd-clean-default-dispvm"]
- )
+ subprocess.check_call(["sudo", "qubesctl", "state.sls", "sd-clean-default-dispvm"])
print("Destroying all VMs")
- subprocess.check_call(
- [os.path.join(SCRIPTS_PATH, "scripts/destroy-vm"), "--all"]
- )
+ subprocess.check_call([os.path.join(SCRIPTS_PATH, "scripts/destroy-vm"), "--all"])
subprocess.check_call(
[
- "sudo", "qubesctl", "--skip-dom0", "--targets",
- "whonix-gw-15", "state.sls", "sd-clean-whonix"
+ "sudo",
+ "qubesctl",
+ "--skip-dom0",
+ "--targets",
+ "whonix-gw-15",
+ "state.sls",
+ "sd-clean-whonix",
]
)
print("Reverting dom0 configuration")
- subprocess.check_call(
- ["sudo", "qubesctl", "state.sls", "sd-clean-all"]
- )
- subprocess.check_call(
- [os.path.join(SCRIPTS_PATH, "scripts/clean-salt")]
- )
+ subprocess.check_call(["sudo", "qubesctl", "state.sls", "sd-clean-all"])
+ subprocess.check_call([os.path.join(SCRIPTS_PATH, "scripts/clean-salt")])
print("Uninstalling Template")
subprocess.check_call(
["sudo", "dnf", "-y", "-q", "remove", "qubes-template-securedrop-workstation-buster"]
@@ -153,7 +148,7 @@ def main():
"from other VMs on the system."
)
response = input("Are you sure you want to uninstall (y/N)? ")
- if response.lower() != 'y':
+ if response.lower() != "y":
print("Exiting.")
sys.exit(0)
else:
diff --git a/scripts/validate_config.py b/scripts/validate_config.py
index 39188fda..a01f2fd7 100755
--- a/scripts/validate_config.py
+++ b/scripts/validate_config.py
@@ -10,11 +10,11 @@
from qubesadmin import Qubes
-TOR_V3_HOSTNAME_REGEX = r'^[a-z2-7]{56}\.onion$'
-TOR_V3_AUTH_REGEX = r'^[A-Z2-7]{52}$'
+TOR_V3_HOSTNAME_REGEX = r"^[a-z2-7]{56}\.onion$"
+TOR_V3_AUTH_REGEX = r"^[A-Z2-7]{52}$"
-TOR_V2_HOSTNAME_REGEX = r'^[a-z2-7]{16}\.onion$'
-TOR_V2_AUTH_COOKIE_REGEX = r'^[a-zA-z0-9+/]{22}$'
+TOR_V2_HOSTNAME_REGEX = r"^[a-z2-7]{16}\.onion$"
+TOR_V2_AUTH_COOKIE_REGEX = r"^[a-zA-z0-9+/]{22}$"
# CONFIG_FILEPATH = "/srv/salt/sd/config.json"
CONFIG_FILEPATH = "config.json"
@@ -97,10 +97,10 @@ def confirm_submission_privkey_file(self):
def confirm_submission_privkey_fingerprint(self):
assert "submission_key_fpr" in self.config
- assert re.match('^[a-fA-F0-9]{40}$', self.config["submission_key_fpr"])
+ assert re.match("^[a-fA-F0-9]{40}$", self.config["submission_key_fpr"])
def read_config_file(self):
- with open(self.config_filepath, 'r') as f:
+ with open(self.config_filepath, "r") as f:
j = json.load(f)
return j
@@ -111,10 +111,12 @@ def validate_existing_size(self):
assert "sd_app" in self.config["vmsizes"]
assert "sd_log" in self.config["vmsizes"]
- assert isinstance(self.config["vmsizes"]["sd_app"], int), \
- "Private volume size of sd-app must be an integer value."
- assert isinstance(self.config["vmsizes"]["sd_log"], int), \
- "Private volume size of sd-log must be an integer value."
+ assert isinstance(
+ self.config["vmsizes"]["sd_app"], int
+ ), "Private volume size of sd-app must be an integer value."
+ assert isinstance(
+ self.config["vmsizes"]["sd_log"], int
+ ), "Private volume size of sd-log must be an integer value."
app = Qubes()
if "sd-app" in app.domains:
diff --git a/sd-proxy/do-not-open-here b/sd-proxy/do-not-open-here
index 47801181..982a6ece 100755
--- a/sd-proxy/do-not-open-here
+++ b/sd-proxy/do-not-open-here
@@ -6,8 +6,9 @@ from PyQt5 import Qt
a = Qt.QApplication(sys.argv)
-nope = Qt.QLabel("Please do not use this VM to open any files"
- " aside from those downloaded from SecureDrop.")
+nope = Qt.QLabel(
+ "Please do not use this VM to open any files" " aside from those downloaded from SecureDrop."
+)
nope.show()
a.exec_()
diff --git a/setup.py b/setup.py
index 25808ed6..5bda10bc 100644
--- a/setup.py
+++ b/setup.py
@@ -17,15 +17,12 @@
license="GPLv3+",
python_requires=">=3.5",
url="https://github.com/freedomofpress/securdrop-workstation",
- data_files=[
- ("share/securedrop-workstation-dom0-config", ["config.json.example"]),
- ],
+ data_files=[("share/securedrop-workstation-dom0-config", ["config.json.example"]),],
classifiers=(
"Development Status :: 3 - Alpha",
"Programming Language :: Python :: 3",
"Topic :: Software Development :: Libraries :: Python Modules",
- "License :: OSI Approved :: "
- "GNU General Public License v3 or later (GPLv3+)",
+ "License :: OSI Approved :: " "GNU General Public License v3 or later (GPLv3+)",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
),
diff --git a/tests/base.py b/tests/base.py
index 547e44c0..a0b76484 100644
--- a/tests/base.py
+++ b/tests/base.py
@@ -6,22 +6,13 @@
# Reusable constant for DRY import across tests
-WANTED_VMS = [
- "sd-gpg",
- "sd-log",
- "sd-proxy",
- "sd-app",
- "sd-viewer",
- "sd-whonix",
- "sd-devices"
-]
+WANTED_VMS = ["sd-gpg", "sd-log", "sd-proxy", "sd-app", "sd-viewer", "sd-whonix", "sd-devices"]
# base class for per-VM testing
class SD_VM_Local_Test(unittest.TestCase):
-
def setUp(self):
self.app = Qubes()
self.vm = self.app.domains[self.vm_name]
@@ -44,8 +35,7 @@ def _reboot(self):
# * QubesVMNotStartedError (from qubesadmin.base)
for v in list(self.vm.connected_vms.values()):
if v.is_running():
- msg = ("Need to halt connected VM {}"
- " before testing".format(v))
+ msg = "Need to halt connected VM {}" " before testing".format(v)
print(msg)
v.shutdown()
while v.is_running():
@@ -68,8 +58,7 @@ def _run(self, cmd, user=""):
return contents
def _get_file_contents(self, path):
- cmd = ["qvm-run", "-p", self.vm_name,
- "sudo /bin/cat {}".format(path)]
+ cmd = ["qvm-run", "-p", self.vm_name, "sudo /bin/cat {}".format(path)]
contents = subprocess.check_output(cmd).decode("utf-8")
return contents
@@ -79,8 +68,7 @@ def _package_is_installed(self, pkg):
"""
# dpkg --verify will exit non-zero for a non-installed pkg,
# and dom0 will percolate that error code
- subprocess.check_call(["qvm-run", "-a", "-q", self.vm_name,
- "dpkg --verify {}".format(pkg)])
+ subprocess.check_call(["qvm-run", "-a", "-q", self.vm_name, "dpkg --verify {}".format(pkg)])
return True
def assertFilesMatch(self, remote_path, local_path):
@@ -90,6 +78,7 @@ def assertFilesMatch(self, remote_path, local_path):
with open(local_path) as f:
content = f.read()
import difflib
+
print("".join(difflib.unified_diff(remote_content, content)), end="")
self.assertTrue(remote_content == content)
@@ -99,8 +88,7 @@ def assertFileHasLine(self, remote_path, wanted_line):
for line in lines:
if line == wanted_line:
return True
- msg = "File {} does not contain expected line {}".format(remote_path,
- wanted_line)
+ msg = "File {} does not contain expected line {}".format(remote_path, wanted_line)
raise AssertionError(msg)
def _fileExists(self, remote_path):
@@ -109,8 +97,9 @@ def _fileExists(self, remote_path):
# ls will return non-zero and an exception will be thrown if the file
# does not exist, so we return false in that case.
try:
- subprocess.check_call(["qvm-run", "-a", "-q", self.vm_name,
- "ls {}".format(remote_path)])
+ subprocess.check_call(
+ ["qvm-run", "-a", "-q", self.vm_name, "ls {}".format(remote_path)]
+ )
except subprocess.CalledProcessError:
return False
diff --git a/tests/test_app.py b/tests/test_app.py
index 9848d8af..cfeaf602 100644
--- a/tests/test_app.py
+++ b/tests/test_app.py
@@ -10,19 +10,15 @@ def setUp(self):
super(SD_App_Tests, self).setUp()
def test_decrypt_sd_user_profile(self):
- contents = self._get_file_contents(
- "/etc/profile.d/sd-app-qubes-gpg-domain.sh"
- )
+ contents = self._get_file_contents("/etc/profile.d/sd-app-qubes-gpg-domain.sh")
expected_content = 'export QUBES_GPG_DOMAIN="sd-gpg"\n'
self.assertEqual(contents, expected_content)
def test_open_in_dvm_desktop(self):
- contents = self._get_file_contents(
- "/usr/share/applications/open-in-dvm.desktop"
- )
+ contents = self._get_file_contents("/usr/share/applications/open-in-dvm.desktop")
expected_contents = [
"TryExec=/usr/bin/qvm-open-in-vm",
- "Exec=/usr/bin/qvm-open-in-vm --view-only '@dispvm:sd-viewer' %f"
+ "Exec=/usr/bin/qvm-open-in-vm --view-only '@dispvm:sd-viewer' %f",
]
for line in expected_contents:
self.assertTrue(line in contents)
@@ -37,7 +33,7 @@ def test_mimeapps_functional(self):
cmd = "perl -F= -lane 'print $F[0]' /usr/share/applications/mimeapps.list"
results = self._run(cmd)
for line in results.split("\n"):
- if line != "[Default Applications]" and not line.startswith('#'):
+ if line != "[Default Applications]" and not line.startswith("#"):
actual_app = self._run("xdg-mime query default {}".format(line))
self.assertEqual(actual_app, "open-in-dvm.desktop")
@@ -51,7 +47,7 @@ def test_sd_client_dependencies_installed(self):
def test_sd_client_config(self):
with open("config.json") as c:
config = json.load(c)
- submission_fpr = config['submission_key_fpr']
+ submission_fpr = config["submission_key_fpr"]
line = '{{"journalist_key_fingerprint": "{}"}}'.format(submission_fpr)
self.assertFileHasLine("/home/user/.securedrop_client/config.json", line)
@@ -59,7 +55,7 @@ def test_sd_client_config(self):
def test_sd_client_apparmor(self):
cmd = "sudo aa-status --json"
results = json.loads(self._run(cmd))
- self.assertTrue(results['profiles']['/usr/bin/securedrop-client'] == "enforce")
+ self.assertTrue(results["profiles"]["/usr/bin/securedrop-client"] == "enforce")
def test_logging_configured(self):
self.logging_configured()
diff --git a/tests/test_dom0_config.py b/tests/test_dom0_config.py
index b1b8750c..3f10887f 100644
--- a/tests/test_dom0_config.py
+++ b/tests/test_dom0_config.py
@@ -9,7 +9,7 @@
"securedrop-workstation",
"sd-svs-buster-template",
"sd-export-buster-template",
- "sd-svs-disp-buster-template"
+ "sd-svs-disp-buster-template",
]
VMS_TO_UPDATE = [
@@ -19,12 +19,11 @@
"sd-devices-buster-template",
"whonix-ws-15",
"whonix-gw-15",
- "securedrop-workstation-buster"
+ "securedrop-workstation-buster",
]
class SD_Qubes_Dom0_Templates_Tests(unittest.TestCase):
-
def setUp(self):
pass
@@ -39,10 +38,7 @@ def test_Templates_cleaned_up(self):
self.assertFalse(template == line)
def test_vms_to_update_are_tagged(self):
- cmd = ["qvm-ls",
- "--tags", "sd-workstation-updates",
- "--raw-data",
- "--fields", "NAME"]
+ cmd = ["qvm-ls", "--tags", "sd-workstation-updates", "--raw-data", "--fields", "NAME"]
contents = subprocess.check_output(cmd).decode("utf-8").strip()
for template in VMS_TO_UPDATE:
self.assertTrue(template in contents)
diff --git a/tests/test_dom0_rpm_repo.py b/tests/test_dom0_rpm_repo.py
index 7d149313..6438c2f6 100644
--- a/tests/test_dom0_rpm_repo.py
+++ b/tests/test_dom0_rpm_repo.py
@@ -17,10 +17,10 @@ def setUp(self):
self.maxDiff = None
with open("config.json") as c:
config = json.load(c)
- if 'environment' not in config:
- config['environment'] = 'dev'
+ if "environment" not in config:
+ config["environment"] = "dev"
- if config['environment'] == 'prod':
+ if config["environment"] == "prod":
self.pubkey_wanted = self.pubkey_wanted_prod
self.yum_repo_url = self.yum_repo_url_prod
else:
diff --git a/tests/test_gpg.py b/tests/test_gpg.py
index 10705722..e7329b26 100644
--- a/tests/test_gpg.py
+++ b/tests/test_gpg.py
@@ -13,7 +13,7 @@ def find_fp_from_gpg_output(gpg):
# dom0 uses Fedora25 with gpg 1.4.22, whereas AppVMs
# use Debian9 with gpg 2.1.18, so we'll match fingerprint
# by a loose regex rather than substring match.
- regex = '\s*(Key fingerprint = )?([A-F0-9\s]{50})$'
+ regex = "\s*(Key fingerprint = )?([A-F0-9\s]{50})$"
m = re.match(regex, line)
if m is not None:
fp = m.groups()[1]
@@ -29,8 +29,7 @@ def get_local_fp():
def get_remote_fp():
- cmd = ["qvm-run", "-p", "sd-gpg",
- "/usr/bin/gpg --list-secret-keys --fingerprint"]
+ cmd = ["qvm-run", "-p", "sd-gpg", "/usr/bin/gpg --list-secret-keys --fingerprint"]
p = subprocess.check_output(cmd)
@@ -38,7 +37,6 @@ def get_remote_fp():
class SD_GPG_Tests(SD_VM_Local_Test):
-
def setUp(self):
self.vm_name = "sd-gpg"
super(SD_GPG_Tests, self).setUp()
diff --git a/tests/test_proxy_vm.py b/tests/test_proxy_vm.py
index 9ecdf9fa..93625ee9 100644
--- a/tests/test_proxy_vm.py
+++ b/tests/test_proxy_vm.py
@@ -10,8 +10,7 @@ def setUp(self):
super(SD_Proxy_Tests, self).setUp()
def test_do_not_open_here(self):
- self.assertFilesMatch("/usr/bin/do-not-open-here",
- "sd-proxy/do-not-open-here")
+ self.assertFilesMatch("/usr/bin/do-not-open-here", "sd-proxy/do-not-open-here")
def test_sd_proxy_package_installed(self):
self.assertTrue(self._package_is_installed("securedrop-proxy"))
@@ -19,7 +18,7 @@ def test_sd_proxy_package_installed(self):
def test_sd_proxy_yaml_config(self):
with open("config.json") as c:
config = json.load(c)
- hostname = config['hidserv']['hostname']
+ hostname = config["hidserv"]["hostname"]
wanted_lines = [
"host: {}".format(hostname),
@@ -47,9 +46,9 @@ def test_mime_types(self):
with open("sd-proxy/mimeapps.list", "r") as f:
lines = f.readlines()
for line in lines:
- if line != "[Default Applications]\n" and not line.startswith('#'):
- mime_type = line.split('=')[0]
- expected_app = line.split('=')[1].split(';')[0]
+ if line != "[Default Applications]\n" and not line.startswith("#"):
+ mime_type = line.split("=")[0]
+ expected_app = line.split("=")[1].split(";")[0]
actual_app = self._run("xdg-mime query default {}".format(mime_type))
self.assertEqual(actual_app, expected_app)
diff --git a/tests/test_qubes_rpc.py b/tests/test_qubes_rpc.py
index 59241533..efc65079 100644
--- a/tests/test_qubes_rpc.py
+++ b/tests/test_qubes_rpc.py
@@ -7,7 +7,6 @@
class SD_Qubes_Rpc_Tests(unittest.TestCase):
-
def setUp(self):
self.expected = self._loadVars()
@@ -19,30 +18,26 @@ def test_Policies(self):
# the absence of pytest in dom0.
fail = False
for policy in self.expected:
- if not self._startsWith(policy['policy'],
- policy['starts_with']):
+ if not self._startsWith(policy["policy"], policy["starts_with"]):
fail = True
self.assertFalse(fail)
def _startsWith(self, filename, expectedPolicy):
filePath = os.path.join(QUBES_POLICY_DIR, filename)
- with io.open(filePath, 'r') as f:
+ with io.open(filePath, "r") as f:
actualPolicy = f.read()
if actualPolicy.startswith(expectedPolicy):
return True
else:
print("\n\n#### BEGIN RPC policy error report ####\n\n")
- print("Policy for {} is:\n{}".format(filename,
- actualPolicy))
- print("Policy for {} should be:\n{}".format(filename,
- expectedPolicy))
+ print("Policy for {} is:\n{}".format(filename, actualPolicy))
+ print("Policy for {} should be:\n{}".format(filename, expectedPolicy))
print("\n\n#### END RPC policy error report ####\n\n")
return False
def _loadVars(self):
- filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)),
- "vars", "qubes-rpc.yml")
- with io.open(filepath, 'r') as f:
+ filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)), "vars", "qubes-rpc.yml")
+ with io.open(filepath, "r") as f:
data = yaml.safe_load(f)
return data
diff --git a/tests/test_sd_devices.py b/tests/test_sd_devices.py
index fa2ea8fc..23f5765f 100644
--- a/tests/test_sd_devices.py
+++ b/tests/test_sd_devices.py
@@ -5,7 +5,6 @@
class SD_Devices_Tests(SD_VM_Local_Test):
-
def setUp(self):
self.vm_name = "sd-devices-dvm"
super(SD_Devices_Tests, self).setUp()
@@ -25,24 +24,23 @@ def test_logging_configured(self):
self.logging_configured()
def test_mime_types(self):
- filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)),
- "vars", "sd-devices.mimeapps")
+ filepath = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), "vars", "sd-devices.mimeapps"
+ )
with open(filepath, "r") as f:
lines = f.readlines()
for line in lines:
- if line != "[Default Applications]\n" and not line.startswith('#'):
- mime_type = line.split('=')[0]
- expected_app = line.split('=')[1].split(';')[0]
+ if line != "[Default Applications]\n" and not line.startswith("#"):
+ mime_type = line.split("=")[0]
+ expected_app = line.split("=")[1].split(";")[0]
actual_app = self._run("xdg-mime query default {}".format(mime_type))
self.assertEqual(actual_app, expected_app)
def test_open_in_dvm_desktop(self):
- contents = self._get_file_contents(
- "/usr/share/applications/open-in-dvm.desktop"
- )
+ contents = self._get_file_contents("/usr/share/applications/open-in-dvm.desktop")
expected_contents = [
"TryExec=/usr/bin/qvm-open-in-vm",
- "Exec=/usr/bin/qvm-open-in-vm --view-only '@dispvm:sd-viewer' %f"
+ "Exec=/usr/bin/qvm-open-in-vm --view-only '@dispvm:sd-viewer' %f",
]
for line in expected_contents:
self.assertTrue(line in contents)
diff --git a/tests/test_sd_whonix.py b/tests/test_sd_whonix.py
index 3d1b3ac8..a894aa75 100644
--- a/tests/test_sd_whonix.py
+++ b/tests/test_sd_whonix.py
@@ -28,9 +28,7 @@ def test_accept_sd_xfer_extracted_file(self):
with open("config.json") as c:
config = json.load(c)
if len(config["hidserv"]["hostname"]) == 22:
- t = Template(
- "HidServAuth {{ d.hidserv.hostname }}" " {{ d.hidserv.key }}"
- )
+ t = Template("HidServAuth {{ d.hidserv.hostname }}" " {{ d.hidserv.key }}")
line = t.render(d=config)
else:
@@ -46,9 +44,7 @@ def test_v3_auth_private_file(self):
keyvalue = config["hidserv"]["key"]
line = "{0}:descriptor:x25519:{1}".format(hostname, keyvalue)
- self.assertFileHasLine(
- "/var/lib/tor/keys/app-journalist.auth_private", line
- )
+ self.assertFileHasLine("/var/lib/tor/keys/app-journalist.auth_private", line)
def test_sd_whonix_repo_enabled(self):
"""
@@ -73,8 +69,10 @@ def test_whonix_torrc(self):
torrc_contents = self._get_file_contents("/etc/tor/torrc")
duplicate_includes = """%include /etc/torrc.d/
%include /etc/torrc.d/95_whonix.conf"""
- self.assertFalse(duplicate_includes in torrc_contents,
- "Whonix GW torrc contains duplicate %include lines")
+ self.assertFalse(
+ duplicate_includes in torrc_contents,
+ "Whonix GW torrc contains duplicate %include lines",
+ )
def load_tests(loader, tests, pattern):
diff --git a/tests/test_sys_firewall.py b/tests/test_sys_firewall.py
index 49e3d632..0081736b 100644
--- a/tests/test_sys_firewall.py
+++ b/tests/test_sys_firewall.py
@@ -13,21 +13,21 @@ def setUp(self):
super(SD_Sys_Firewall_Tests, self).setUp()
with open("config.json") as c:
config = json.load(c)
- if 'environment' not in config:
- config['environment'] = 'dev'
+ if "environment" not in config:
+ config["environment"] = "dev"
- if config['environment'] == 'prod':
+ if config["environment"] == "prod":
self.pubkey_wanted = SD_Dom0_Rpm_Repo_Tests.pubkey_wanted_prod
else:
self.pubkey_wanted = SD_Dom0_Rpm_Repo_Tests.pubkey_wanted_test
def test_rpm_repo_public_key(self):
- self.assertFilesMatch(SD_Dom0_Rpm_Repo_Tests.pubkey_actual, # noqa
- self.pubkey_wanted)
+ self.assertFilesMatch(SD_Dom0_Rpm_Repo_Tests.pubkey_actual, self.pubkey_wanted) # noqa
def test_rpm_repo_public_key_script(self):
- self.assertFilesMatch("/rw/config/sd-copy-rpm-repo-pubkey.sh",
- "sys-firewall/sd-copy-rpm-repo-pubkey.sh")
+ self.assertFilesMatch(
+ "/rw/config/sd-copy-rpm-repo-pubkey.sh", "sys-firewall/sd-copy-rpm-repo-pubkey.sh"
+ )
def load_tests(loader, tests, pattern):
diff --git a/tests/test_sys_usb.py b/tests/test_sys_usb.py
index 39fb9aa4..2d477bea 100644
--- a/tests/test_sys_usb.py
+++ b/tests/test_sys_usb.py
@@ -4,7 +4,6 @@
class SD_SysUSB_Tests(SD_VM_Local_Test):
-
def setUp(self):
self.vm_name = "sys-usb"
super(SD_SysUSB_Tests, self).setUp()
diff --git a/tests/test_viewer.py b/tests/test_viewer.py
index 5882db44..30f86270 100644
--- a/tests/test_viewer.py
+++ b/tests/test_viewer.py
@@ -24,14 +24,15 @@ def test_logging_configured(self):
self.logging_configured()
def test_mime_types(self):
- filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)),
- "vars", "sd-viewer.mimeapps")
+ filepath = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), "vars", "sd-viewer.mimeapps"
+ )
with open(filepath, "r") as f:
lines = f.readlines()
for line in lines:
- if line != "[Default Applications]\n" and not line.startswith('#'):
- mime_type = line.split('=')[0]
- expected_app = line.split('=')[1].rstrip()
+ if line != "[Default Applications]\n" and not line.startswith("#"):
+ mime_type = line.split("=")[0]
+ expected_app = line.split("=")[1].rstrip()
actual_app = self._run("xdg-mime query default {}".format(mime_type))
self.assertEqual(actual_app, expected_app)
diff --git a/tests/test_vms_exist.py b/tests/test_vms_exist.py
index aeb9da2d..2b7e2c49 100644
--- a/tests/test_vms_exist.py
+++ b/tests/test_vms_exist.py
@@ -56,7 +56,7 @@ def test_sd_whonix_config(self):
self.assertTrue(vm.provides_network)
self.assertTrue(vm.autostart is True)
self.assertFalse(vm.template_for_dispvms)
- self.assertTrue('sd-workstation' in vm.tags)
+ self.assertTrue("sd-workstation" in vm.tags)
def test_sd_proxy_config(self):
vm = self.app.domains["sd-proxy"]
@@ -66,7 +66,7 @@ def test_sd_proxy_config(self):
self.assertTrue(vm.autostart is True)
self.assertFalse(vm.provides_network)
self.assertFalse(vm.template_for_dispvms)
- self.assertTrue('sd-workstation' in vm.tags)
+ self.assertTrue("sd-workstation" in vm.tags)
def test_sd_app_config(self):
vm = self.app.domains["sd-app"]
@@ -77,8 +77,8 @@ def test_sd_app_config(self):
self.assertFalse(vm.template_for_dispvms)
self._check_kernel(vm)
self._check_service_running(vm, "paxctld")
- self.assertTrue('sd-workstation' in vm.tags)
- self.assertTrue('sd-client' in vm.tags)
+ self.assertTrue("sd-workstation" in vm.tags)
+ self.assertTrue("sd-client" in vm.tags)
# Check the size of the private volume
# Should be 10GB
# >>> 1024 * 1024 * 10 * 1024
@@ -95,7 +95,7 @@ def test_sd_viewer_config(self):
self.assertTrue(vm.template_for_dispvms)
self._check_kernel(vm)
self._check_service_running(vm, "paxctld")
- self.assertTrue('sd-workstation' in vm.tags)
+ self.assertTrue("sd-workstation" in vm.tags)
def test_sd_gpg_config(self):
vm = self.app.domains["sd-gpg"]
@@ -107,7 +107,7 @@ def test_sd_gpg_config(self):
self.assertFalse(vm.provides_network)
self.assertFalse(vm.template_for_dispvms)
self._check_kernel(vm)
- self.assertTrue('sd-workstation' in vm.tags)
+ self.assertTrue("sd-workstation" in vm.tags)
def test_sd_log_config(self):
vm = self.app.domains["sd-log"]
@@ -121,7 +121,7 @@ def test_sd_log_config(self):
self._check_service_running(vm, "paxctld")
self._check_service_running(vm, "securedrop-log")
self.assertFalse(vm.template_for_dispvms)
- self.assertTrue('sd-workstation' in vm.tags)
+ self.assertTrue("sd-workstation" in vm.tags)
# Check the size of the private volume
# Should be same of config.json
# >>> 1024 * 1024 * 5 * 1024
@@ -135,7 +135,7 @@ def test_sd_workstation_template(self):
self.assertTrue(nvm is None)
self.assertTrue(vm.virt_mode == "hvm")
self.assertTrue(vm.kernel == "")
- self.assertTrue('sd-workstation' in vm.tags)
+ self.assertTrue("sd-workstation" in vm.tags)
self._check_kernel(vm)
self._check_service_running(vm, "paxctld")
@@ -143,34 +143,34 @@ def test_sd_proxy_template(self):
vm = self.app.domains["sd-proxy-buster-template"]
nvm = vm.netvm
self.assertTrue(nvm is None)
- self.assertTrue('sd-workstation' in vm.tags)
+ self.assertTrue("sd-workstation" in vm.tags)
def sd_app_template(self):
vm = self.app.domains["sd-app-buster-template"]
nvm = vm.netvm
self.assertTrue(nvm is None)
- self.assertTrue('sd-workstation' in vm.tags)
+ self.assertTrue("sd-workstation" in vm.tags)
self._check_kernel(vm)
def sd_viewer_template(self):
vm = self.app.domains["sd-viewer-buster-template"]
nvm = vm.netvm
self.assertTrue(nvm is None)
- self.assertTrue('sd-workstation' in vm.tags)
+ self.assertTrue("sd-workstation" in vm.tags)
self.assertTrue(vm.template_for_dispvms)
def sd_export_template(self):
vm = self.app.domains["sd-devices-buster-template"]
nvm = vm.netvm
self.assertTrue(nvm is None)
- self.assertTrue('sd-workstation' in vm.tags)
+ self.assertTrue("sd-workstation" in vm.tags)
self._check_kernel(vm)
def sd_export_dvm(self):
vm = self.app.domains["sd-devices-dvm"]
nvm = vm.netvm
self.assertTrue(nvm is None)
- self.assertTrue('sd-workstation' in vm.tags)
+ self.assertTrue("sd-workstation" in vm.tags)
self.assertTrue(vm.template_for_dispvms)
self._check_kernel(vm)
@@ -180,14 +180,14 @@ def sd_export(self):
self.assertTrue(nvm is None)
vm_type = vm.klass
self.assertTrue(vm_type == "DispVM")
- self.assertTrue('sd-workstation' in vm.tags)
+ self.assertTrue("sd-workstation" in vm.tags)
self._check_kernel(vm)
def sd_log_template(self):
vm = self.app.domains["sd-log-buster-template"]
nvm = vm.netvm
self.assertTrue(nvm is None)
- self.assertTrue('sd-workstation' in vm.tags)
+ self.assertTrue("sd-workstation" in vm.tags)
self.assertFalse(vm.template_for_dispvms)
self._check_kernel(vm)
diff --git a/tests/test_vms_platform.py b/tests/test_vms_platform.py
index 2cac32da..7b26c204 100644
--- a/tests/test_vms_platform.py
+++ b/tests/test_vms_platform.py
@@ -23,10 +23,10 @@ def setUp(self):
self.app = Qubes()
with open("config.json") as c:
config = json.load(c)
- if 'environment' not in config:
- config['environment'] = 'dev'
+ if "environment" not in config:
+ config["environment"] = "dev"
- if config['environment'] == 'prod':
+ if config["environment"] == "prod":
self.apt_url = FPF_APT_SOURCES_BUSTER
else:
self.apt_url = FPF_APT_SOURCES_BUSTER_DEV
@@ -134,9 +134,9 @@ def _ensure_keyring_package_exists_and_has_correct_key(self, vm):
"pub rsa4096 2016-10-20 [SC] [expires: 2021-06-30]",
" 22245C81E3BAEB4138B36061310F561200F4AD77",
"uid [ unknown] SecureDrop Release Signing Key",
- "uid [ unknown] SecureDrop Release Signing Key " # noqa: E501
+ "uid [ unknown] SecureDrop Release Signing Key ", # noqa: E501
]
- self.assertEqual(fpf_gpg_pub_key_info, results.split('\n'))
+ self.assertEqual(fpf_gpg_pub_key_info, results.split("\n"))
def _ensure_trusted_keyring_securedrop_key_removed(self, vm):
"""
From b2d74bc28b8af9d5da6351e52100265d8e87c430 Mon Sep 17 00:00:00 2001
From: Erik Moeller
Date: Thu, 11 Jun 2020 00:43:50 -0700
Subject: [PATCH 3/7] Add .git-blame-ignore-revs
---
.git-blame-ignore-revs | 1 +
1 file changed, 1 insertion(+)
create mode 100644 .git-blame-ignore-revs
diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
new file mode 100644
index 00000000..5f4f5966
--- /dev/null
+++ b/.git-blame-ignore-revs
@@ -0,0 +1 @@
+782a04e59ae10d40085b5d8807e255143e5d05c5
From b905e61384d4fbd7a145bfef991e25dd173221c5 Mon Sep 17 00:00:00 2001
From: Erik Moeller
Date: Fri, 5 Jun 2020 18:19:23 -0700
Subject: [PATCH 4/7] Work around https://github.com/psf/black/issues/1289
---
launcher/sdw_updater_gui/Updater.py | 4 ++--
launcher/tests/test_updater.py | 2 +-
setup.py | 2 +-
3 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/launcher/sdw_updater_gui/Updater.py b/launcher/sdw_updater_gui/Updater.py
index 62d1c55d..a5a94257 100644
--- a/launcher/sdw_updater_gui/Updater.py
+++ b/launcher/sdw_updater_gui/Updater.py
@@ -127,7 +127,7 @@ def _check_updates_debian(vm):
sdlog.info("Checking for updates {}:{}".format(vm, current_templates[vm]))
subprocess.check_call(["qvm-run", current_templates[vm], "sudo apt update"])
subprocess.check_call(
- ["qvm-run", current_templates[vm], "[[ $(apt list --upgradable | wc -l) -eq 1 ]]",]
+ ["qvm-run", current_templates[vm], "[[ $(apt list --upgradable | wc -l) -eq 1 ]]"]
)
except subprocess.CalledProcessError as e:
sdlog.error(
@@ -237,7 +237,7 @@ def _write_updates_status_flag_to_disk(status):
try:
sdlog.info("Setting update flag to {} in sd-app".format(status.value))
subprocess.check_call(
- ["qvm-run", "sd-app", "echo '{}' > {}".format(status.value, flag_file_path_sd_app),]
+ ["qvm-run", "sd-app", "echo '{}' > {}".format(status.value, flag_file_path_sd_app)]
)
except subprocess.CalledProcessError as e:
sdlog.error("Error writing update status flag to sd-app")
diff --git a/launcher/tests/test_updater.py b/launcher/tests/test_updater.py
index bfb624b2..57741c60 100644
--- a/launcher/tests/test_updater.py
+++ b/launcher/tests/test_updater.py
@@ -204,7 +204,7 @@ def test_check_updates_calls_correct_commands(
subprocess_call_list = [
call(["qvm-run", current_templates[vm], "sudo apt update"]),
call(
- ["qvm-run", current_templates[vm], "[[ $(apt list --upgradable | wc -l) -eq 1 ]]",]
+ ["qvm-run", current_templates[vm], "[[ $(apt list --upgradable | wc -l) -eq 1 ]]"]
),
]
check_output_call_list = [
diff --git a/setup.py b/setup.py
index 5bda10bc..12216d2a 100644
--- a/setup.py
+++ b/setup.py
@@ -17,7 +17,7 @@
license="GPLv3+",
python_requires=">=3.5",
url="https://github.com/freedomofpress/securdrop-workstation",
- data_files=[("share/securedrop-workstation-dom0-config", ["config.json.example"]),],
+ data_files=[("share/securedrop-workstation-dom0-config", ["config.json.example"])],
classifiers=(
"Development Status :: 3 - Alpha",
"Programming Language :: Python :: 3",
From bfc9bf70479e9570af3ba5e82e451deb4f3ff8f3 Mon Sep 17 00:00:00 2001
From: Erik Moeller
Date: Thu, 11 Jun 2020 00:46:13 -0700
Subject: [PATCH 5/7] Fix Makefile comment; mark target as phony
---
Makefile | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/Makefile b/Makefile
index 62852a10..cbc4339b 100644
--- a/Makefile
+++ b/Makefile
@@ -14,7 +14,7 @@ all: ## Builds and provisions all VMs required for testing workstation
./scripts/provision-all
.PHONY: black
-black: ## Lints all Python files with flake8
+black: ## Lints all Python files with black
# Not requiring dom0 since linting requires extra packages,
# available only in the developer environment, i.e. Work VM.
@./scripts/lint-all "black --check"
@@ -156,6 +156,7 @@ destroy-all: ## Destroys all VMs managed by Workstation salt config
update-pip-requirements: ## Updates all Python requirements files via pip-compile.
pip-compile --generate-hashes --output-file requirements.txt requirements.in
+.PHONY: venv
venv: ## Provision and activate a Python 3 virtualenv for development.
python3 -m venv .venv
.venv/bin/pip install --require-hashes -r dev-requirements.txt
From f9abd0e0cbd95a28e62f94dc8fa93bbdb25b4a55 Mon Sep 17 00:00:00 2001
From: Erik Moeller
Date: Thu, 11 Jun 2020 01:04:56 -0700
Subject: [PATCH 6/7] Do not exclude launcher
---
pyproject.toml | 1 -
1 file changed, 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
index 8aaabff8..aa4949aa 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,3 +1,2 @@
[tool.black]
line-length = 100
-exclude = 'launcher/.*'
From 3f5e9259953e6e6518328925b4942f3f02ac3296 Mon Sep 17 00:00:00 2001
From: Erik Moeller
Date: Mon, 15 Jun 2020 22:28:42 -0700
Subject: [PATCH 7/7] Check for existence of file command
---
scripts/lint-all | 9 +++++++++
1 file changed, 9 insertions(+)
diff --git a/scripts/lint-all b/scripts/lint-all
index 4f1a300e..2c34353d 100755
--- a/scripts/lint-all
+++ b/scripts/lint-all
@@ -3,6 +3,15 @@ set -e
set -u
set -o pipefail
+if ! [ -x "$(command -v file)" ]; then
+ echo "Error: file is not installed." >&2
+ echo >&2
+ echo "The file command is required to enumerate all Python files in this repo." >&2
+ echo "On Debian/Ubuntu systems, run the following command to install it:" >&2
+ echo >&2
+ echo " sudo apt-get install file" >&2
+ exit 1
+fi
if [ -z "$@" ]; then
echo "Usage: lint-all "