From 29e9877c364b8abaa5fafd56460210d3d0ed4ccd Mon Sep 17 00:00:00 2001 From: Joshua Thayer Date: Wed, 15 Aug 2018 09:58:36 -0700 Subject: [PATCH 001/352] initial commit --- .gitignore | 106 +++++++++++++++++++++++ Pipfile | 12 +++ sd-proxy.py | 67 +++++++++++++++ securedrop_proxy/__init__.py | 0 securedrop_proxy/__main__.py | 126 +++++++++++++++++++++++++++ securedrop_proxy/pipereader.py | 87 +++++++++++++++++++ securedrop_proxy/proxy.py | 150 +++++++++++++++++++++++++++++++++ securedrop_proxy/util.py | 103 ++++++++++++++++++++++ 8 files changed, 651 insertions(+) create mode 100644 .gitignore create mode 100644 Pipfile create mode 100755 sd-proxy.py create mode 100644 securedrop_proxy/__init__.py create mode 100644 securedrop_proxy/__main__.py create mode 100755 securedrop_proxy/pipereader.py create mode 100644 securedrop_proxy/proxy.py create mode 100644 securedrop_proxy/util.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..dea2de221 --- /dev/null +++ b/.gitignore @@ -0,0 +1,106 @@ +*.sqlite + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ diff --git a/Pipfile b/Pipfile new file mode 100644 index 000000000..7b34944ec --- /dev/null +++ b/Pipfile @@ -0,0 +1,12 @@ +[[source]] +url = "https://pypi.org/simple" +name = "pypi" +verify_ssl = true + +[requires] +python_version = "3.5" + +[packages] +furl = "*" + +[dev-packages] diff --git a/sd-proxy.py b/sd-proxy.py new file mode 100755 index 000000000..fe31d8b36 --- /dev/null +++ b/sd-proxy.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python3 + +import sys +import json +import securedrop_proxy.proxy as proxy +import uuid +import subprocess + +# "read conf" +conf = proxy.Conf() +conf.host = 'jsonplaceholder.typicode.com' +conf.scheme = 'https' +conf.port = 443 + +# instantiate response object +p = proxy.Proxy(conf) + +# timeout? + +# read from STDIN +incoming = [] +for line in sys.stdin: + incoming.append(line) + +# deserialize incoming request +client_req = None +try: + client_req = json.loads('\n'.join(incoming)) +except json.decoder.JSONDecodeError: + p.simple_error(400, 'Invalid JSON in request') + print(json.dumps(p.res.__dict__)) + sys.exit(1) + +# build request oject +req = proxy.Req() +try: + req.method = client_req['method'] + req.path_query = client_req['path_query'] +except KeyError: + p.simple_error(400, 'Missing keys in request') + print(json.dumps(p.res.__dict__)) + sys.exit(1) + +if "headers" in client_req: + req.headers = client_req['headers'] + +if "body" in client_req: + req.body = client_req['body'] + +def on_save(fh, res): + fn = str(uuid.uuid4()) + + # this will be `qvm-move...` in production + subprocess.run(["cp", fh.name, "/tmp/{}".format(fn)]) + + res.headers['X-Origin-Content-Type'] = res.headers['content-type'] + res.headers['Content-Type'] = 'application/json' + res.body = json.dumps({'filename': fn }) + +def on_done(res): + print(json.dumps(res.__dict__)) + +# complete proxy object +p.req = req +p.on_save = on_save +p.on_done = on_done +p.proxy() diff --git a/securedrop_proxy/__init__.py b/securedrop_proxy/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/securedrop_proxy/__main__.py b/securedrop_proxy/__main__.py new file mode 100644 index 000000000..6abb71d76 --- /dev/null +++ b/securedrop_proxy/__main__.py @@ -0,0 +1,126 @@ +import proxy +import json +import subprocess +import uuid + +conf = proxy.Conf() +conf.host = 'jsonplaceholder.typicode.com' +conf.scheme = 'https' +conf.port = 443 + +def on_save(fh, res): + + fn = str(uuid.uuid4()) + + # this will be `qvm-move...` in production + subprocess.run(["cp", fh.name, "/tmp/{}".format(fn)]) + + res.headers['X-Origin-Content-Type'] = res.headers['content-type'] + res.headers['Content-Type'] = 'application/json' + res.body = json.dumps({'filename': fn }) + +# does it work at all +req = proxy.Req() +req.method = 'GET' +req.path_query = '' +req.headers = {'Accept': 'application/json'} + +p = proxy.Proxy(conf, req, on_save) +p.proxy() + +print(p.res.status) +print(p.res.headers) +print(p.res.version) +print(p.res.body) + +# params +req = proxy.Req() +req.method = 'GET' +req.path_query = '/posts?userId=1' +req.headers = {'Accept': 'application/json'} + +p = proxy.Proxy(conf, req, on_save) +p.proxy() + +print(p.res.status) +#print(res.headers) +print(p.res.version) +print(json.loads(p.res.body.decode())) + + +# path +req = proxy.Req() +req.method = 'GET' +req.path_query = '/posts/1' +req.headers = {'Accept': 'application/json'} + +p = proxy.Proxy(conf, req, on_save) +p.proxy() + +print(p.res.status) # 200 +print(p.res.version) +print(json.loads(p.res.body.decode())) + + +# 404 +req = proxy.Req() +req.method = 'GET' +req.path_query = '/notfound' +req.headers = {'Accept': 'application/json'} + +p = proxy.Proxy(conf, req, on_save) +p.proxy() + +print(p.res.status) # 404 +print(p.res.headers) +print(p.res.version) +print(p.res.body) # {} + + +# 400 bad path +req = proxy.Req() +req.method = 'GET' +req.path_query = 'http://badpath.lol/path' +req.headers = {'Accept': 'application/json'} + +p = proxy.Proxy(conf, req, on_save) +p.proxy() + +print(p.res.status) # 400 +print(p.res.headers) +print(p.res.version) +print(p.res.body) # {'error': 'Path provided in request did not look valid'} + +# 400 no handler +req = proxy.Req() +req.method = 'GET' +req.path_query = 'http://badpath.lol/path' +req.headers = {'Accept': 'application/json'} + +p = proxy.Proxy(conf, req, None) +p.proxy() + +print(p.res.status) # 400 +print(p.res.headers) +print(p.res.version) +print(p.res.body) # {'error': 'Request callback is not set.'} + + +# 500 proxy error (in this case, misconfiguration) +conf = proxy.Conf() +conf.host = 'jsonplaceholder.typicode.com' +conf.scheme = 'https://http' # bad +conf.port = 443 + +req = proxy.Req() +req.method = 'GET' +req.path_query = '/posts/1' +req.headers = {'Accept': 'application/json'} + +p = proxy.Proxy(conf, req, on_save) +p.proxy() + +print(p.res.status) # 500 +print(p.res.headers) +print(p.res.version) +print(p.res.body) # {'error': 'Proxy error while generating URL to request'} diff --git a/securedrop_proxy/pipereader.py b/securedrop_proxy/pipereader.py new file mode 100755 index 000000000..d7322d0dc --- /dev/null +++ b/securedrop_proxy/pipereader.py @@ -0,0 +1,87 @@ +#!/usr/bin/python + +import os +import select +import errno + +BUFFSIZE = 64 + + +class PipeReader(): + def __init__(self, pipe, cb): + self._quit = False + self.pipe = pipe + self.cb = cb + + try: + os.mkfifo(pipe) + except OSError as oe: + if oe.errno != errno.EEXIST: + raise + + def quit(self): + self._quit = True + + def read(self): + + pipe = self.pipe + cb = self.cb + + fifo = os.open(pipe, os.O_RDONLY | os.O_NONBLOCK) + poller = select.epoll() + poller.register(fifo) + + while not self._quit: + events = poller.poll(timeout=1) + for fileno, event in events: + if event & select.EPOLLIN: + + # read at most BUFSIZE bytes from the fifo + data = os.read(fifo, BUFFSIZE) + + # in this application, we never want to read more + # than BUFSIZE bytes. writes from our client + # should be atomic up to PIPE_BUF byes, which is + # greater than our BUF_SIZE (see + # https://unix.stackexchange.com/questions/68146/what-are-guarantees-for-concurrent-writes-into-a-named-pipe). So, # noqa: E501 + # we can immediately close this filehandle + + poller.unregister(fileno) + os.close(fileno) + cb(self, data.rstrip(), None) + fifo = os.open(pipe, os.O_RDONLY | os.O_NONBLOCK) + poller.register(fifo) + + elif event & select.EPOLLHUP: + poller.unregister(fileno) + os.close(fileno) + + fifo = os.open(pipe, os.O_RDONLY | os.O_NONBLOCK) + poller.register(fifo) + + elif event & select.EPOLLERR: + print "Error while polling." + cb(None, "POLLING_ERROR") + poller.unregister(fileno) + os.close(fileno) + fifo = os.open(pipe, os.O_RDONLY | os.O_NONBLOCK) + print("FIFO opened {}".format(fifo)) + poller.register(fifo) + elif event: + print "Totally unhandled event: {}".format(event) + cb(None, "POLLING_ERROR") + poller.unregister(fileno) + os.close(fileno) + fifo = os.open(pipe, os.O_RDONLY | os.O_NONBLOCK) + poller.register(fifo) + + +def reporter(poller, msg, err): + print "Got a message: {} (error: {})".format(msg.rstrip(), err) + if msg.rstrip() == "quit": + poller.quit() + + +if __name__ == '__main__': + reader = PipeReader("mypipe", reporter) + reader.read() diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py new file mode 100644 index 000000000..9a14bf913 --- /dev/null +++ b/securedrop_proxy/proxy.py @@ -0,0 +1,150 @@ +import requests +import furl +import securedrop_proxy.util as util +import tempfile +import json + +class Req: + + def __init__(self): + self.method = '' + self.path_query = '' + self.body = None + self.headers = None + +class Response: + + def __init__(self, status): + self.status = status + self.body = None + self.headers = None + self.version = "0.1.1" + +class Conf: + scheme = '' + host = '' + port = 0 + +class Proxy: + + @staticmethod + def _on_done(res): + print(json.dumps(res.__dict__)) + + def __init__(self, conf, req=Req(), on_save=None, on_done=None): + self.conf = conf + self.req = req + self.res = None + self.on_save = on_save + if on_done is not None: + self.on_done = on_done + else: + self.on_done = self._on_done + + self._prepared_request = None + + @staticmethod + def valid_path(path): + u = furl.furl(path) + + if u.host is not None: + return False + return True + + def simple_error(self, status, err): + res = Response(status) + res.body = json.dumps({"error": err}) + res.headers = {"Content-Type": "application/json"} + + self.res = res + + def prep_request(self): + + scheme = self.conf.scheme + host = self.conf.host + port = self.conf.port + + path = self.req.path_query + method = self.req.method + + if not self.valid_path(path): + self.simple_error(400, 'Path provided in request did not look valid') + raise ValueError('Path provided was invalid') + + try: + url = furl.furl("{}://{}:{}/{}".format(scheme, host, port, path)) + except Exception as e: + + self.simple_error(500, 'Proxy error while generating URL to request') + # XXX is this wrong? + raise ValueError('Error generating URL from provided values') + + url.path.normalize() + + preq = requests.Request(method, url.url) + preq.stream = True + preq.headers = self.req.headers + preq.data = self.req.body + prep = preq.prepare() + + self._prepared_request = prep + + def handle_json_response(self): + + res = Response(self._presp.status_code) + + res.headers = self._presp.headers + res.body = self._presp.content.decode() + + self.res = res + + def handle_non_json_response(self): + + res = Response(self._presp.status_code) + + fh = tempfile.NamedTemporaryFile() + + for c in self._presp.iter_content(10): + fh.write(c) + + res.headers = self._presp.headers + + self.on_save(fh, res) + + + self.res = res + + def handle_response(self): + + ctype = util.parse_options_header(self._presp.headers['content-type']) + + if ctype[0] == "application/json": + self.handle_json_response() + else: + self.handle_non_json_response() + + # headers is a Requests class which doesn't JSON serialize. + # coerce it into a normal dict so it will + self.res.headers = self.res.headers.__dict__ + + def proxy(self): + + try: + if self.on_save is None: + self.simple_error(400, 'Request callback is not set.') + + raise ValueError('Request callback is not set.') + self.prep_request() + s = requests.Session() + self._presp = s.send(self._prepared_request) + self.handle_response() + + except ValueError: + + # effectively a 4xx error + # we have set self.response to indicate an error + pass + + # catch server errors here, handle maybe-differently from ValueErrors... + + self.on_done(self.res) diff --git a/securedrop_proxy/util.py b/securedrop_proxy/util.py new file mode 100644 index 000000000..cdb2a9cbf --- /dev/null +++ b/securedrop_proxy/util.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +import re + +# with thanks to https://github.com/pallets/werkzeug/blob/master/werkzeug/http.py +_option_header_piece_re = re.compile(r''' + ;\s* + (?P + "[^"\\]*(?:\\.[^"\\]*)*" # quoted string + | + [^\s;,=*]+ # token + ) + \s* + (?: # optionally followed by =value + (?: # equals sign, possibly with encoding + \*\s*=\s* # * indicates extended notation + (?P[^\s]+?) + '(?P[^\s]*?)' + | + =\s* # basic notation + ) + (?P + "[^"\\]*(?:\\.[^"\\]*)*" # quoted string + | + [^;,]+ # token + )? + )? + \s* +''', flags=re.VERBOSE) + +_option_header_start_mime_type = re.compile(r',\s*([^;,\s]+)([;,]\s*.+)?') + +def unquote_header_value(value, is_filename=False): + r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). + This does not use the real unquoting but what browsers are actually + using for quoting. + .. versionadded:: 0.5 + :param value: the header value to unquote. + """ + if value and value[0] == value[-1] == '"': + # this is not the real unquoting, but fixing this so that the + # RFC is met will result in bugs with internet explorer and + # probably some other browsers as well. IE for example is + # uploading files with "C:\foo\bar.txt" as filename + value = value[1:-1] + + # if this is a filename and the starting characters look like + # a UNC path, then just return the value without quotes. Using the + # replace sequence below on a UNC path has the effect of turning + # the leading double slash into a single slash and then + # _fix_ie_filename() doesn't work correctly. See #458. + if not is_filename or value[:2] != '\\\\': + return value.replace('\\\\', '\\').replace('\\"', '"') + return value + +def parse_options_header(value, multiple=False): + """Parse a ``Content-Type`` like header into a tuple with the content + type and the options: + >>> parse_options_header('text/html; charset=utf8') + ('text/html', {'charset': 'utf8'}) + This should not be used to parse ``Cache-Control`` like headers that use + a slightly different format. For these headers use the + :func:`parse_dict_header` function. + .. versionadded:: 0.5 + :param value: the header to parse. + :param multiple: Whether try to parse and return multiple MIME types + :return: (mimetype, options) or (mimetype, options, mimetype, options, …) + if multiple=True + """ + if not value: + return '', {} + + result = [] + + value = "," + value.replace("\n", ",") + while value: + match = _option_header_start_mime_type.match(value) + if not match: + break + result.append(match.group(1)) # mimetype + options = {} + # Parse options + rest = match.group(2) + while rest: + optmatch = _option_header_piece_re.match(rest) + if not optmatch: + break + option, encoding, _, option_value = optmatch.groups() + option = unquote_header_value(option) + if option_value is not None: + option_value = unquote_header_value( + option_value, + option == 'filename') + if encoding is not None: + option_value = _unquote(option_value).decode(encoding) + options[option] = option_value + rest = rest[optmatch.end():] + result.append(options) + if multiple is False: + return tuple(result) + value = rest + + return tuple(result) if result else ('', {}) From 6ed328079e822a9f838237cd5ed16345257773ba Mon Sep 17 00:00:00 2001 From: Joshua Thayer Date: Wed, 15 Aug 2018 21:04:58 -0700 Subject: [PATCH 002/352] Real config, better error handling --- Pipfile | 1 + config-example.yaml | 5 +++ examples/bad.json | 2 ++ examples/html.json | 2 ++ examples/posts.json | 2 ++ sd-proxy.py | 66 +++++++++++++++++++++++++++++--------- securedrop_proxy/config.py | 46 ++++++++++++++++++++++++++ securedrop_proxy/proxy.py | 14 +++----- 8 files changed, 113 insertions(+), 25 deletions(-) create mode 100644 config-example.yaml create mode 100644 examples/bad.json create mode 100644 examples/html.json create mode 100644 examples/posts.json create mode 100644 securedrop_proxy/config.py diff --git a/Pipfile b/Pipfile index 7b34944ec..8adc1e626 100644 --- a/Pipfile +++ b/Pipfile @@ -8,5 +8,6 @@ python_version = "3.5" [packages] furl = "*" +pyyaml = "*" [dev-packages] diff --git a/config-example.yaml b/config-example.yaml new file mode 100644 index 000000000..282052a86 --- /dev/null +++ b/config-example.yaml @@ -0,0 +1,5 @@ +host: jsonplaceholder.typicode.com +scheme: https +port: 443 +target_vm: compost +dev: False diff --git a/examples/bad.json b/examples/bad.json new file mode 100644 index 000000000..141fce1e4 --- /dev/null +++ b/examples/bad.json @@ -0,0 +1,2 @@ +"foo": "bar", + "baz": "bliff" } diff --git a/examples/html.json b/examples/html.json new file mode 100644 index 000000000..fa04743d3 --- /dev/null +++ b/examples/html.json @@ -0,0 +1,2 @@ +{ "method": "GET", + "path_query": "" } diff --git a/examples/posts.json b/examples/posts.json new file mode 100644 index 000000000..37c736c59 --- /dev/null +++ b/examples/posts.json @@ -0,0 +1,2 @@ +{ "method": "GET", + "path_query": "/posts?userId=1" } diff --git a/sd-proxy.py b/sd-proxy.py index fe31d8b36..523945cfb 100755 --- a/sd-proxy.py +++ b/sd-proxy.py @@ -1,23 +1,41 @@ #!/usr/bin/env python3 +# The sd-proxy RPC script triggered by qubes RPC. + +# This script is executed by `/etc/qubes-rpc/sd-proxy`. It must be +# called with exactly one argument: the path to its config file. See +# the README for configuration options. + import sys import json import securedrop_proxy.proxy as proxy import uuid import subprocess +import securedrop_proxy.config as config + +# a fresh, new proxy object +p = proxy.Proxy() -# "read conf" -conf = proxy.Conf() -conf.host = 'jsonplaceholder.typicode.com' -conf.scheme = 'https' -conf.port = 443 +# set up an error handler early, so we can use it during +# configuration, etc +def err_on_done(res): + print(json.dumps(res.__dict__)) + sys.exit(1) -# instantiate response object -p = proxy.Proxy(conf) +p.on_done = err_on_done -# timeout? +# path to config file must be at argv[1] +if len(sys.argv) != 2: + p.simple_error(500, 'sd-proxy script not called with path to configuration file') + p.on_done(p.res) + print(json.dumps(p.res.__dict__)) -# read from STDIN +# read config. `read_conf` will call `p.on_done` if there is a config +# problem, and will return a Conf object on success. +conf_path = sys.argv[1] +p.conf = config.read_conf(conf_path, p) + +# read user request from STDIN incoming = [] for line in sys.stdin: incoming.append(line) @@ -28,8 +46,7 @@ client_req = json.loads('\n'.join(incoming)) except json.decoder.JSONDecodeError: p.simple_error(400, 'Invalid JSON in request') - print(json.dumps(p.res.__dict__)) - sys.exit(1) + p.on_done(p.res) # build request oject req = proxy.Req() @@ -38,8 +55,7 @@ req.path_query = client_req['path_query'] except KeyError: p.simple_error(400, 'Missing keys in request') - print(json.dumps(p.res.__dict__)) - sys.exit(1) + p.on_done(p.res) if "headers" in client_req: req.headers = client_req['headers'] @@ -47,16 +63,36 @@ if "body" in client_req: req.body = client_req['body'] +# callback for handling non-JSON content. in production-like +# environments, we want to call `qvm-move-to-vm` (and expressly not +# `qvm-move`, since we want to include the destination VM name) to +# move the content to the target VM. for development and testing, we +# keep the file on the local VM. +# +# In any case, this callback mutates the given result object (in +# `res`) to include the name of the new file, or to indicate errors. def on_save(fh, res): fn = str(uuid.uuid4()) - # this will be `qvm-move...` in production - subprocess.run(["cp", fh.name, "/tmp/{}".format(fn)]) + try: + if p.conf.dev is True: + subprocess.run(["cp", fh.name, "/tmp/{}".format(fn)]) + else: + subprocess.run(["cp", fh.name, "/tmp/{}".format(fn)]) + subprocess.run(['qvm-move-to-vm', p.conf.target_vm, "/tmp/{}".format(fn)]) + except Exception: + res.status = 500 + res.headers['Content-Type'] = 'application/json' + res.headers['X-Origin-Content-Type'] = res.headers['content-type'] + res.body = json.dumps({"error": "Unhandled error while handling non-JSON content, sorry"}) + return res.headers['X-Origin-Content-Type'] = res.headers['content-type'] res.headers['Content-Type'] = 'application/json' res.body = json.dumps({'filename': fn }) +# new on_done handler (which, in practice, is largely like the early +# one) def on_done(res): print(json.dumps(res.__dict__)) diff --git a/securedrop_proxy/config.py b/securedrop_proxy/config.py new file mode 100644 index 000000000..e5c133250 --- /dev/null +++ b/securedrop_proxy/config.py @@ -0,0 +1,46 @@ +import os +import yaml + +class Conf: + scheme = '' + host = '' + port = 0 + dev = False + +def read_conf(conf_path, p): + + if not os.path.isfile(conf_path): + p.simple_error(500, 'Configuration file does not exist at {}'.format(conf_path)) + p.on_done(p.res) + + try: + fh = open(conf_path, 'r') + conf_in = yaml.load(fh) + except yaml.YAMLError: + p.simple_error(500, 'YAML syntax error while reading configuration file {}'.format(conf_path)) + p.on_done(p.res) + except Exception: + p.simple_error(500, 'Error while opening or reading configuration file {}'.format(conf_path)) + p.on_done(p.res) + + req_conf_keys = set(('host','scheme','port')) + missing_keys = req_conf_keys - set(conf_in.keys()) + if len(missing_keys) > 0: + p.simple_error(500, 'Configuration file missing required keys: {}'.format(missing_keys)) + p.on_done(p.res) + + c = Conf() + c.host = conf_in['host'] + c.scheme = conf_in['scheme'] + c.port = conf_in['port'] + + if 'dev' in conf_in and conf_in['dev'] is True: + c.dev = True + else: + if 'target_vm' not in conf_in: + p.simple_error(500, 'Configuration file missing `target_vm` key, which is required when not in development mode') + p.on_done(p.res) + + c.target_vm = conf_in['target_vm'] + + return c diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index 9a14bf913..9ffc264d9 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -20,18 +20,13 @@ def __init__(self, status): self.headers = None self.version = "0.1.1" -class Conf: - scheme = '' - host = '' - port = 0 - class Proxy: @staticmethod def _on_done(res): print(json.dumps(res.__dict__)) - def __init__(self, conf, req=Req(), on_save=None, on_done=None): + def __init__(self, conf=None, req=Req(), on_save=None, on_done=None): self.conf = conf self.req = req self.res = None @@ -76,7 +71,6 @@ def prep_request(self): except Exception as e: self.simple_error(500, 'Proxy error while generating URL to request') - # XXX is this wrong? raise ValueError('Error generating URL from provided values') url.path.normalize() @@ -111,7 +105,6 @@ def handle_non_json_response(self): self.on_save(fh, res) - self.res = res def handle_response(self): @@ -132,8 +125,8 @@ def proxy(self): try: if self.on_save is None: self.simple_error(400, 'Request callback is not set.') - raise ValueError('Request callback is not set.') + self.prep_request() s = requests.Session() self._presp = s.send(self._prepared_request) @@ -145,6 +138,7 @@ def proxy(self): # we have set self.response to indicate an error pass - # catch server errors here, handle maybe-differently from ValueErrors... + # catch server errors here, handle maybe-differently from + # ValueErrors... self.on_done(self.res) From df6490d28c67f56584968fedf0a506f333785eed Mon Sep 17 00:00:00 2001 From: Joshua Thayer Date: Wed, 15 Aug 2018 21:29:19 -0700 Subject: [PATCH 003/352] Adds README --- README.md | 71 +++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 71 insertions(+) create mode 100644 README.md diff --git a/README.md b/README.md new file mode 100644 index 000000000..89f46fd44 --- /dev/null +++ b/README.md @@ -0,0 +1,71 @@ +## securedrop workstation proxy + +This implements a Qubes RPC <-> HTTP proxy, used to forward requests +from the [securedrop workstation +client](https://github.com/freedomofpress/securedrop-client) to the +[securedrop server](https://github.com/freedomofpress/securedrop). + +### try it out + +The proxy works by reading a JSON object from STDIN, generating an +HTTP request from the JSON it's read, executing that request, then +writing to STDOUT a JSON object which represents the remote server's +response. For discussion about the shape of the request and response +objects, see +https://github.com/freedomofpress/securedrop-workstation/issues/107. + +#### install requirements + +This is still development code, and not ready for integration with the +rest of the securedrop-workstation project. That said, it is ready to +be poked at tested. + +To try the proxy script, first use `pipenv` to create an environment +and install requirements. In the root of the project directoy, run + + pipenv install + +#### configuration + +The proxy script must be run with the path to its configuration file +as its first argument. This repo includes an example configuration +file, at `config-example.yaml`. Configuration consists of the +following values: + + * host: the hostname of the remote server. must be set. + * port: the port the request should be sent to. must be set. + * scheme: http or https. must be set. + * dev: a boolean- True indicates we're running in development mode, any other value (or not set) indicates we're running in production. See below for what that means. + * target_vm: the name of the VM we should `qvm-move` non-JSON responses to. must be set if dev is not True + + +#### dev vs prod + +Configuration includes a "dev" attribute. At this point, the only +difference between dev and production modes is how non-JSON responses +are handled. In prod mode, the content is saved to a local file, then +moved (via `qvm-move`) to the VM indicated in `target_vm`. In dev +mode, the file is not moved off the VM, but is saved as a temporary +file in `/tmp`. In both cases, the response written to STDOUT includes +the name of the new file. + +#### running + +The following commands can be used to demonstrate the proxy. + +This demonstrates proxying a request which has an `application/json` response: + + $ cat examples/posts.json | ./sd-proxy.py ./config-example.yaml + +This demonstrates proxying a request which has a `text/html` response +and thus is saved to a temp file. The name of the temp file is +included in the result printed to STDOUT- in dev mode, the file can be +read at that name under `/tmp`. + + $ cat examples/html.json | ./sd-proxy.py ./config-example.yaml + +Finally, this demonstrates some error handling. The request contains +invalid JSON. The proxy detects that, and prints an error message +(still a valid proxy response). + + $ cat examples/bad.json | ./sd-proxy.py ./config-example.yaml From c84ca14dcab42aad3119ab04f6af587e048e1810 Mon Sep 17 00:00:00 2001 From: Joshua Thayer Date: Wed, 15 Aug 2018 22:44:32 -0700 Subject: [PATCH 004/352] readme --- README.md | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/README.md b/README.md index 89f46fd44..84e657abc 100644 --- a/README.md +++ b/README.md @@ -8,20 +8,20 @@ client](https://github.com/freedomofpress/securedrop-client) to the ### try it out The proxy works by reading a JSON object from STDIN, generating an -HTTP request from the JSON it's read, executing that request, then -writing to STDOUT a JSON object which represents the remote server's -response. For discussion about the shape of the request and response -objects, see +HTTP request from that JSON, making that request against the remote +server, then writing a JSON object which represents the remote +server's response to STDOUT. For discussion about the shape of the +request and response objects, see https://github.com/freedomofpress/securedrop-workstation/issues/107. -#### install requirements +This is still development code, not quite ready for integration with the +rest of the securedrop-workstation project. However, it is ready to +be poked at and demonstrated. -This is still development code, and not ready for integration with the -rest of the securedrop-workstation project. That said, it is ready to -be poked at tested. +#### install requirements To try the proxy script, first use `pipenv` to create an environment -and install requirements. In the root of the project directoy, run +and install requirements. In the root of the project directory, run pipenv install @@ -32,11 +32,11 @@ as its first argument. This repo includes an example configuration file, at `config-example.yaml`. Configuration consists of the following values: - * host: the hostname of the remote server. must be set. - * port: the port the request should be sent to. must be set. - * scheme: http or https. must be set. - * dev: a boolean- True indicates we're running in development mode, any other value (or not set) indicates we're running in production. See below for what that means. - * target_vm: the name of the VM we should `qvm-move` non-JSON responses to. must be set if dev is not True +- `host` - The hostname of the remote server. Must be set. +- `port` - The port the request should be sent to. Must be set. +- `scheme` - `http` or `https`. Must be set. +- `dev` - A boolean, where `True` indicates we're running in development mode, any other value (or not set) indicates we're running in production. See below for what that means. +- `target_vm` - The name of the VM we should `qvm-move` non-JSON responses to. Must be set if dev is not True. #### dev vs prod @@ -44,7 +44,7 @@ following values: Configuration includes a "dev" attribute. At this point, the only difference between dev and production modes is how non-JSON responses are handled. In prod mode, the content is saved to a local file, then -moved (via `qvm-move`) to the VM indicated in `target_vm`. In dev +moved (via `qvm-move`) to the VM indicated by `target_vm`. In dev mode, the file is not moved off the VM, but is saved as a temporary file in `/tmp`. In both cases, the response written to STDOUT includes the name of the new file. From 423b60d20499dce6ff0c356224268d0c22bebc74 Mon Sep 17 00:00:00 2001 From: Joshua Thayer Date: Wed, 15 Aug 2018 22:57:40 -0700 Subject: [PATCH 005/352] minor cleanup --- sd-proxy.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/sd-proxy.py b/sd-proxy.py index 523945cfb..77eedde29 100755 --- a/sd-proxy.py +++ b/sd-proxy.py @@ -28,7 +28,6 @@ def err_on_done(res): if len(sys.argv) != 2: p.simple_error(500, 'sd-proxy script not called with path to configuration file') p.on_done(p.res) - print(json.dumps(p.res.__dict__)) # read config. `read_conf` will call `p.on_done` if there is a config # problem, and will return a Conf object on success. @@ -75,10 +74,8 @@ def on_save(fh, res): fn = str(uuid.uuid4()) try: - if p.conf.dev is True: - subprocess.run(["cp", fh.name, "/tmp/{}".format(fn)]) - else: - subprocess.run(["cp", fh.name, "/tmp/{}".format(fn)]) + subprocess.run(["cp", fh.name, "/tmp/{}".format(fn)]) + if p.conf.dev is not True: subprocess.run(['qvm-move-to-vm', p.conf.target_vm, "/tmp/{}".format(fn)]) except Exception: res.status = 500 @@ -91,8 +88,7 @@ def on_save(fh, res): res.headers['Content-Type'] = 'application/json' res.body = json.dumps({'filename': fn }) -# new on_done handler (which, in practice, is largely like the early -# one) +# new on_done handler def on_done(res): print(json.dumps(res.__dict__)) From 0aca067f92da6d5dd9d34213d40d3d7bc43b60f7 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 16 Aug 2018 12:07:46 -0700 Subject: [PATCH 006/352] Unit tests: Add initial unit tests --- tests/__init__.py | 0 tests/test_config.py | 34 ++++++++++++++++++++++++++++++++++ 2 files changed, 34 insertions(+) create mode 100644 tests/__init__.py create mode 100644 tests/test_config.py diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/test_config.py b/tests/test_config.py new file mode 100644 index 000000000..636e13510 --- /dev/null +++ b/tests/test_config.py @@ -0,0 +1,34 @@ +from io import StringIO +import json +import sys +import unittest + +from securedrop_proxy import proxy +from securedrop_proxy import config + + +class TestConfig(unittest.TestCase): + def setUp(self): + self.p = proxy.Proxy() + + def err_on_done(res): + print(json.dumps(res.__dict__)) + sys.exit(1) + + self.p.on_done = err_on_done + + def test_config_file_does_not_exist(self): + saved_stdout = sys.stdout + try: + out = StringIO() + sys.stdout = out + with self.assertRaises(SystemExit): + config.read_conf('not/a/real/path', self.p) + output = out.getvalue().strip() + finally: + sys.stdout = saved_stdout + + response = json.loads(output) + assert response['status'] == 500 + assert "Configuration file does not exist" in response['body'] + assert response['headers']['Content-Type'] == 'application/json' From 61232f0258eb6350af7c15a46f618521025e761f Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 16 Aug 2018 12:09:27 -0700 Subject: [PATCH 007/352] CI: Install requirements and run unit tests --- .circleci/config.yml | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 .circleci/config.yml diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 000000000..3f0153f37 --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,13 @@ +version: 2 +jobs: + build: + docker: + - image: circleci/python:3.5 + steps: + - checkout + + - run: + name: Install requirements and run tests + command: | + pipenv install --dev + pipenv run python -m unittest From 73b2c32601dcfc29a1c127d084427f29694b9fef Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 16 Aug 2018 13:07:36 -0700 Subject: [PATCH 008/352] CI: Run tests in verbose mode --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 3f0153f37..5fdb0ed55 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -10,4 +10,4 @@ jobs: name: Install requirements and run tests command: | pipenv install --dev - pipenv run python -m unittest + pipenv run python -m unittest -v From 2e1af3965072526df36c4e690ebb1ae88e167a43 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 16 Aug 2018 13:39:25 -0700 Subject: [PATCH 009/352] Requirements: Add requests to Pipfile --- Pipfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Pipfile b/Pipfile index 8adc1e626..b65758a89 100644 --- a/Pipfile +++ b/Pipfile @@ -9,5 +9,6 @@ python_version = "3.5" [packages] furl = "*" pyyaml = "*" +requests = "*" [dev-packages] From ca46c3bc9bc1a98de20a7e83be7e91582ad89f05 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 16 Aug 2018 13:43:03 -0700 Subject: [PATCH 010/352] README: Add Circle CI badge --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 84e657abc..c27419cc6 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,7 @@ ## securedrop workstation proxy +[![CircleCI](https://circleci.com/gh/freedomofpress/securedrop-proxy.svg?style=svg)](https://circleci.com/gh/freedomofpress/securedrop-proxy) + This implements a Qubes RPC <-> HTTP proxy, used to forward requests from the [securedrop workstation client](https://github.com/freedomofpress/securedrop-client) to the From 56d01540800f47a7aa5b2a7178224ff8fa581c42 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 16 Aug 2018 14:12:57 -0700 Subject: [PATCH 011/352] Unit tests: No callback should generate 400, basic functionality --- tests/test_proxy.py | 55 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 tests/test_proxy.py diff --git a/tests/test_proxy.py b/tests/test_proxy.py new file mode 100644 index 000000000..e4f85a1f2 --- /dev/null +++ b/tests/test_proxy.py @@ -0,0 +1,55 @@ +import json +import subprocess +import unittest +import uuid + +from securedrop_proxy import proxy +from securedrop_proxy import config + + +class TestProxy(unittest.TestCase): + def setUp(self): + self.conf = config.Conf() + self.conf.host = 'jsonplaceholder.typicode.com' + self.conf.scheme = 'https' + self.conf.port = 443 + + def on_save(self, fh, res): + + self.fn = str(uuid.uuid4()) + + # this will be `qvm-move...` in production + subprocess.run(["cp", fh.name, "/tmp/{}".format(self.fn)]) + + res.headers['X-Origin-Content-Type'] = res.headers['content-type'] + res.headers['Content-Type'] = 'application/json' + res.body = json.dumps({'filename': self.fn }) + + def test_400_if_callback_not_set(self): + req = proxy.Req() + req.method = 'GET' + req.path_query = '' + req.headers = {'Accept': 'application/json'} + + p = proxy.Proxy() + p.proxy() + + assert p.res.status == 400 + + def test_proxy_basic_functionality(self): + req = proxy.Req() + req.method = 'GET' + req.path_query = '' + req.headers = {'Accept': 'application/json'} + + p = proxy.Proxy(self.conf, req, self.on_save) + p.proxy() + + assert p.res.status == 200 + assert p.res.body == json.dumps({'filename': self.fn }) + + # This is the way to verify the 'content-type' header + assert p.res.headers['_store']['content-type'][1] == 'application/json' + + # But I want to do this + # assert p.res.headers['Content-Type'] == 'application/json' From 02992d7b69ad6a82f4191c417e512d53eb21502b Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 16 Aug 2018 14:18:54 -0700 Subject: [PATCH 012/352] Unit tests: Add VCR to mock requests/responses --- Pipfile | 1 + fixtures/basic_proxy_functionality.yaml | 124 ++++++++++++++++++++++++ tests/test_proxy.py | 2 + 3 files changed, 127 insertions(+) create mode 100644 fixtures/basic_proxy_functionality.yaml diff --git a/Pipfile b/Pipfile index b65758a89..5d761889d 100644 --- a/Pipfile +++ b/Pipfile @@ -12,3 +12,4 @@ pyyaml = "*" requests = "*" [dev-packages] +vcrpy = "*" diff --git a/fixtures/basic_proxy_functionality.yaml b/fixtures/basic_proxy_functionality.yaml new file mode 100644 index 000000000..41b466bc9 --- /dev/null +++ b/fixtures/basic_proxy_functionality.yaml @@ -0,0 +1,124 @@ +interactions: +- request: + body: null + headers: + Accept: [application/json] + method: GET + uri: https://jsonplaceholder.typicode.com/ + response: + body: {string: "\n\n\n\n\n\n\n\n\nJSONPlaceholder + - Fake online REST API for developers\n\n\n\n\n
\n\n
\n

\n{\nJSON\n:\nPlaceholder\n}\n

\n

\nFake + Online REST API for Testing and Prototyping\n
Serving ~200 millions API + requests per month\n

\n

\nPowered by\nJSON + Server\nand\nLowDB\n

\n
\n\n
\n

Sponsors \U0001F64F

\n

\n\n\n\n

\n

\n[Become a sponsor]\n

\n
\n\n
\n
\n\n

Intro

\n

\nJSONPlaceholder is a + free online REST service that you can use whenever you need some fake data.\n
It's + great for tutorials, testing new libraries, sharing code examples, ...\n

\n\n

Example

\n

\nRun + this code in a console or from anywhere. Both\n

\n
fetch('https://jsonplaceholder.typicode.com/todos/1')\n
+        \ .then(response => response.json())\n  .then(json => console.log(json))\n
\n

\n\nAwesome + \U0001F942\n

\n
\n

\nTip + you can use\n\nhttp\n or\n\nhttps\n when making requests to + JSONPlaceholder.\n

\n\n

Resources

\n

\nJSONPlaceholder comes with + a set of common resources.\n

\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
\n/posts\n100 posts
\n/comments\n500 comments
\n/albums\n100 albums
\n/photos\n5000 photos
\n/todos\n200 todos
\n/users\n10 users
\n

\nNote + resources have relations. For example:\nposts have many\ncomments,\nalbums + have many\nphotos, ...\n

\n\n

Routes

\n

\nAll HTTP methods + are supported.\n

\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
GET\n/posts\n
GET\n/posts/1\n
GET\n/posts/1/comments\n
GET\n/comments?postId=1\n
GET\n/posts?userId=1\n
POST/posts
PUT/posts/1
PATCH/posts/1
DELETE/posts/1
\n

\nNote + you can view detailed examples\n\nhere\n\n

\n\n

Use + your own data

\n

\n\nJSON + Server\n powers this website. You can use it to create the same + fake API in less than\n30 seconds with your own data.\n

\n
npm install json-server
\n

You can also try\nMy JSON Server free service.

\n
\n
\n\n
\n

\nCoded + and built with \u2764\uFE0F by\ntypicode\n

\n

Source + code available on\nGitHub\n

\n

\n\n\n

\n
\n\n\n\n\n\n"} + headers: + Access-Control-Allow-Credentials: ['true'] + CF-Cache-Status: [HIT] + CF-RAY: [44b6eed6eeb06c7c-SJC] + Cache-Control: ['public, max-age=14400'] + Connection: [keep-alive] + Content-Type: [text/html; charset=UTF-8] + Date: ['Thu, 16 Aug 2018 21:18:21 GMT'] + Expect-CT: ['max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"'] + Expires: ['Fri, 17 Aug 2018 01:18:21 GMT'] + Last-Modified: ['Sun, 29 Jul 2018 21:56:24 GMT'] + Server: [cloudflare] + Set-Cookie: ['__cfduid=d2298f2ad2272eaf0907e88029489f22c1534454301; expires=Fri, + 16-Aug-19 21:18:21 GMT; path=/; domain=.typicode.com; HttpOnly'] + Vary: ['Origin, Accept-Encoding'] + Via: [1.1 vegur] + X-Powered-By: [Express] + status: {code: 200, message: OK} +version: 1 diff --git a/tests/test_proxy.py b/tests/test_proxy.py index e4f85a1f2..19688ea43 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -1,5 +1,6 @@ import json import subprocess +import vcr import unittest import uuid @@ -36,6 +37,7 @@ def test_400_if_callback_not_set(self): assert p.res.status == 400 + @vcr.use_cassette('fixtures/basic_proxy_functionality.yaml') def test_proxy_basic_functionality(self): req = proxy.Req() req.method = 'GET' From a6d0dee399c6a59514f773def18f932ed4358631 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 16 Aug 2018 14:20:29 -0700 Subject: [PATCH 013/352] CI: Check Python dependencies for CVEs --- .circleci/config.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 5fdb0ed55..f752adb5b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -11,3 +11,8 @@ jobs: command: | pipenv install --dev pipenv run python -m unittest -v + + - run: + name: Check Python dependencies for CVEs + command: | + pipenv check From 6947ed68365f38c17a024c3d2d25115ac7494967 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 16 Aug 2018 15:38:58 -0700 Subject: [PATCH 014/352] Remove now unnecessary pipereader.py --- securedrop_proxy/pipereader.py | 87 ---------------------------------- 1 file changed, 87 deletions(-) delete mode 100755 securedrop_proxy/pipereader.py diff --git a/securedrop_proxy/pipereader.py b/securedrop_proxy/pipereader.py deleted file mode 100755 index d7322d0dc..000000000 --- a/securedrop_proxy/pipereader.py +++ /dev/null @@ -1,87 +0,0 @@ -#!/usr/bin/python - -import os -import select -import errno - -BUFFSIZE = 64 - - -class PipeReader(): - def __init__(self, pipe, cb): - self._quit = False - self.pipe = pipe - self.cb = cb - - try: - os.mkfifo(pipe) - except OSError as oe: - if oe.errno != errno.EEXIST: - raise - - def quit(self): - self._quit = True - - def read(self): - - pipe = self.pipe - cb = self.cb - - fifo = os.open(pipe, os.O_RDONLY | os.O_NONBLOCK) - poller = select.epoll() - poller.register(fifo) - - while not self._quit: - events = poller.poll(timeout=1) - for fileno, event in events: - if event & select.EPOLLIN: - - # read at most BUFSIZE bytes from the fifo - data = os.read(fifo, BUFFSIZE) - - # in this application, we never want to read more - # than BUFSIZE bytes. writes from our client - # should be atomic up to PIPE_BUF byes, which is - # greater than our BUF_SIZE (see - # https://unix.stackexchange.com/questions/68146/what-are-guarantees-for-concurrent-writes-into-a-named-pipe). So, # noqa: E501 - # we can immediately close this filehandle - - poller.unregister(fileno) - os.close(fileno) - cb(self, data.rstrip(), None) - fifo = os.open(pipe, os.O_RDONLY | os.O_NONBLOCK) - poller.register(fifo) - - elif event & select.EPOLLHUP: - poller.unregister(fileno) - os.close(fileno) - - fifo = os.open(pipe, os.O_RDONLY | os.O_NONBLOCK) - poller.register(fifo) - - elif event & select.EPOLLERR: - print "Error while polling." - cb(None, "POLLING_ERROR") - poller.unregister(fileno) - os.close(fileno) - fifo = os.open(pipe, os.O_RDONLY | os.O_NONBLOCK) - print("FIFO opened {}".format(fifo)) - poller.register(fifo) - elif event: - print "Totally unhandled event: {}".format(event) - cb(None, "POLLING_ERROR") - poller.unregister(fileno) - os.close(fileno) - fifo = os.open(pipe, os.O_RDONLY | os.O_NONBLOCK) - poller.register(fifo) - - -def reporter(poller, msg, err): - print "Got a message: {} (error: {})".format(msg.rstrip(), err) - if msg.rstrip() == "quit": - poller.quit() - - -if __name__ == '__main__': - reader = PipeReader("mypipe", reporter) - reader.read() From 730673f574e00db59857f9cd08974f7665ca75be Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 16 Aug 2018 16:01:08 -0700 Subject: [PATCH 015/352] Move callbacks and main function into securedrop_proxy package This is done for testability: the code in sd-proxy.py now just uses the main function and callbacks which are in the package so they can be unit tested. --- sd-proxy.py | 68 +++-------------------------------- securedrop_proxy/callbacks.py | 37 +++++++++++++++++++ securedrop_proxy/main.py | 35 ++++++++++++++++++ 3 files changed, 77 insertions(+), 63 deletions(-) create mode 100644 securedrop_proxy/callbacks.py create mode 100644 securedrop_proxy/main.py diff --git a/sd-proxy.py b/sd-proxy.py index 77eedde29..5d01b098d 100755 --- a/sd-proxy.py +++ b/sd-proxy.py @@ -12,17 +12,15 @@ import uuid import subprocess import securedrop_proxy.config as config +import securedrop_proxy.callbacks as callbacks +import securedrop_proxy.main as main # a fresh, new proxy object p = proxy.Proxy() # set up an error handler early, so we can use it during # configuration, etc -def err_on_done(res): - print(json.dumps(res.__dict__)) - sys.exit(1) - -p.on_done = err_on_done +p.on_done = callbacks.err_on_done # path to config file must be at argv[1] if len(sys.argv) != 2: @@ -38,62 +36,6 @@ def err_on_done(res): incoming = [] for line in sys.stdin: incoming.append(line) +incoming = '\n'.join(incoming) -# deserialize incoming request -client_req = None -try: - client_req = json.loads('\n'.join(incoming)) -except json.decoder.JSONDecodeError: - p.simple_error(400, 'Invalid JSON in request') - p.on_done(p.res) - -# build request oject -req = proxy.Req() -try: - req.method = client_req['method'] - req.path_query = client_req['path_query'] -except KeyError: - p.simple_error(400, 'Missing keys in request') - p.on_done(p.res) - -if "headers" in client_req: - req.headers = client_req['headers'] - -if "body" in client_req: - req.body = client_req['body'] - -# callback for handling non-JSON content. in production-like -# environments, we want to call `qvm-move-to-vm` (and expressly not -# `qvm-move`, since we want to include the destination VM name) to -# move the content to the target VM. for development and testing, we -# keep the file on the local VM. -# -# In any case, this callback mutates the given result object (in -# `res`) to include the name of the new file, or to indicate errors. -def on_save(fh, res): - fn = str(uuid.uuid4()) - - try: - subprocess.run(["cp", fh.name, "/tmp/{}".format(fn)]) - if p.conf.dev is not True: - subprocess.run(['qvm-move-to-vm', p.conf.target_vm, "/tmp/{}".format(fn)]) - except Exception: - res.status = 500 - res.headers['Content-Type'] = 'application/json' - res.headers['X-Origin-Content-Type'] = res.headers['content-type'] - res.body = json.dumps({"error": "Unhandled error while handling non-JSON content, sorry"}) - return - - res.headers['X-Origin-Content-Type'] = res.headers['content-type'] - res.headers['Content-Type'] = 'application/json' - res.body = json.dumps({'filename': fn }) - -# new on_done handler -def on_done(res): - print(json.dumps(res.__dict__)) - -# complete proxy object -p.req = req -p.on_save = on_save -p.on_done = on_done -p.proxy() +main.__main__(incoming, p) diff --git a/securedrop_proxy/callbacks.py b/securedrop_proxy/callbacks.py new file mode 100644 index 000000000..2bed25cc2 --- /dev/null +++ b/securedrop_proxy/callbacks.py @@ -0,0 +1,37 @@ +import sys +import json + + +def err_on_done(res): + print(json.dumps(res.__dict__)) + sys.exit(1) + +# callback for handling non-JSON content. in production-like +# environments, we want to call `qvm-move-to-vm` (and expressly not +# `qvm-move`, since we want to include the destination VM name) to +# move the content to the target VM. for development and testing, we +# keep the file on the local VM. +# +# In any case, this callback mutates the given result object (in +# `res`) to include the name of the new file, or to indicate errors. +def on_save(fh, res): + fn = str(uuid.uuid4()) + + try: + subprocess.run(["cp", fh.name, "/tmp/{}".format(fn)]) + if p.conf.dev is not True: + subprocess.run(['qvm-move-to-vm', p.conf.target_vm, "/tmp/{}".format(fn)]) + except Exception: + res.status = 500 + res.headers['Content-Type'] = 'application/json' + res.headers['X-Origin-Content-Type'] = res.headers['content-type'] + res.body = json.dumps({"error": "Unhandled error while handling non-JSON content, sorry"}) + return + + res.headers['X-Origin-Content-Type'] = res.headers['content-type'] + res.headers['Content-Type'] = 'application/json' + res.body = json.dumps({'filename': fn }) + +# new on_done handler +def on_done(res): + print(json.dumps(res.__dict__)) diff --git a/securedrop_proxy/main.py b/securedrop_proxy/main.py new file mode 100644 index 000000000..2333bb1be --- /dev/null +++ b/securedrop_proxy/main.py @@ -0,0 +1,35 @@ +import json + +from securedrop_proxy import callbacks +from securedrop_proxy import proxy + + +def __main__(incoming, p): + # deserialize incoming request + client_req = None + try: + client_req = json.loads(incoming) + except json.decoder.JSONDecodeError: + p.simple_error(400, 'Invalid JSON in request') + p.on_done(p.res) + + # build request oject + req = proxy.Req() + try: + req.method = client_req['method'] + req.path_query = client_req['path_query'] + except KeyError: + p.simple_error(400, 'Missing keys in request') + p.on_done(p.res) + + if "headers" in client_req: + req.headers = client_req['headers'] + + if "body" in client_req: + req.body = client_req['body'] + + # complete proxy object + p.req = req + p.on_save = callbacks.on_save + p.on_done = callbacks.on_done + p.proxy() From 42160b1251abb14931067c585580c4f2ac843eda Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 16 Aug 2018 16:03:30 -0700 Subject: [PATCH 016/352] Ensure we can access keys on request.headers Previously, we converted requests' CaseInsensitiveDict [0] to a dict which preserved the data structure of CaseInsensitiveDict. This created a top-level _store key, with an interior OrderedDict that had the headers we want to access. Instead, we should use the dict() attribute which gives us just the headers and not the outer _store and OrderedDict. [0] http://docs.python-requests.org/en/master/_modules/requests/structures/ --- securedrop_proxy/proxy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index 9ffc264d9..541b490bc 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -118,7 +118,7 @@ def handle_response(self): # headers is a Requests class which doesn't JSON serialize. # coerce it into a normal dict so it will - self.res.headers = self.res.headers.__dict__ + self.res.headers = dict(self.res.headers) def proxy(self): From fd789f1e8abf835cfcdf0e621623c410b1fe2be2 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 16 Aug 2018 15:06:59 -0700 Subject: [PATCH 017/352] Unit tests: Add more unit tests for config Note that we are using a custom callback here in order to simplify testing --- tests/files/invalid_yaml.yaml | 1 + tests/files/valid-config.yaml | 5 +++ tests/test_config.py | 64 ++++++++++++++++++++++++++--------- 3 files changed, 54 insertions(+), 16 deletions(-) create mode 100644 tests/files/invalid_yaml.yaml create mode 100644 tests/files/valid-config.yaml diff --git a/tests/files/invalid_yaml.yaml b/tests/files/invalid_yaml.yaml new file mode 100644 index 000000000..fad3d369d --- /dev/null +++ b/tests/files/invalid_yaml.yaml @@ -0,0 +1 @@ +[:thisisbad diff --git a/tests/files/valid-config.yaml b/tests/files/valid-config.yaml new file mode 100644 index 000000000..282052a86 --- /dev/null +++ b/tests/files/valid-config.yaml @@ -0,0 +1,5 @@ +host: jsonplaceholder.typicode.com +scheme: https +port: 443 +target_vm: compost +dev: False diff --git a/tests/test_config.py b/tests/test_config.py index 636e13510..bc478856a 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -2,6 +2,7 @@ import json import sys import unittest +from unittest.mock import patch from securedrop_proxy import proxy from securedrop_proxy import config @@ -11,24 +12,55 @@ class TestConfig(unittest.TestCase): def setUp(self): self.p = proxy.Proxy() + def test_config_file_does_not_exist(self): def err_on_done(res): - print(json.dumps(res.__dict__)) + res = res.__dict__ + self.assertEquals(res['status'], 500) + self.assertIn("Configuration file does not exist", + res['body']) + self.assertEquals(res['headers']['Content-Type'], + 'application/json') sys.exit(1) self.p.on_done = err_on_done + with self.assertRaises(SystemExit): + config.read_conf('not/a/real/path', self.p) - def test_config_file_does_not_exist(self): - saved_stdout = sys.stdout - try: - out = StringIO() - sys.stdout = out - with self.assertRaises(SystemExit): - config.read_conf('not/a/real/path', self.p) - output = out.getvalue().strip() - finally: - sys.stdout = saved_stdout - - response = json.loads(output) - assert response['status'] == 500 - assert "Configuration file does not exist" in response['body'] - assert response['headers']['Content-Type'] == 'application/json' + def test_config_file_when_yaml_is_invalid(self): + def err_on_done(res): + res = res.__dict__ + self.assertEquals(res['status'], 500) + self.assertIn("YAML syntax error", res['body']) + self.assertEquals(res['headers']['Content-Type'], + 'application/json') + sys.exit(1) + + self.p.on_done = err_on_done + with self.assertRaises(SystemExit): + config.read_conf('tests/files/invalid_yaml.yaml', self.p) + + def test_config_file_open_generic_exception(self): + def err_on_done(res): + res = res.__dict__ + self.assertEquals(res['status'], 500) + self.assertEquals(res['headers']['Content-Type'], + 'application/json') + sys.exit(1) + + self.p.on_done = err_on_done + + with self.assertRaises(SystemExit): + # Patching open so that we can simulate a non-YAML error + # (e.g. permissions) + with patch("builtins.open", side_effect=IOError): + config.read_conf('tests/files/valid-config.yaml', self.p) + + def test_config_has_valid_keys(self): + c = config.read_conf('tests/files/valid-config.yaml', self.p) + + # Verify we have a valid Conf object + self.assertEquals(c.host, 'jsonplaceholder.typicode.com') + self.assertEquals(c.port, 443) + self.assertFalse(c.dev) + self.assertEquals(c.scheme, 'https') + self.assertEquals(c.target_vm, 'compost') From 39dd7f0261a369d424b454232521033ad98c8186 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 16 Aug 2018 16:19:07 -0700 Subject: [PATCH 018/352] Unit tests: Basic tests for proxy logic --- fixtures/proxy_404.yaml | 31 +++++++++++++++++++++++++++++++ tests/test_proxy.py | 30 ++++++++++++++++++++++-------- 2 files changed, 53 insertions(+), 8 deletions(-) create mode 100644 fixtures/proxy_404.yaml diff --git a/fixtures/proxy_404.yaml b/fixtures/proxy_404.yaml new file mode 100644 index 000000000..491699a10 --- /dev/null +++ b/fixtures/proxy_404.yaml @@ -0,0 +1,31 @@ +interactions: +- request: + body: null + headers: + Accept: [application/json] + method: GET + uri: https://jsonplaceholder.typicode.com/notfound + response: + body: {string: '{}'} + headers: + Access-Control-Allow-Credentials: ['true'] + CF-Cache-Status: [HIT] + CF-RAY: [44b75f42af366c82-SJC] + Cache-Control: ['public, max-age=14400'] + Connection: [keep-alive] + Content-Length: ['2'] + Content-Type: [application/json; charset=utf-8] + Date: ['Thu, 16 Aug 2018 22:35:06 GMT'] + Etag: [W/"2-vyGp6PvFo4RvsFtPoIWeCReyIC8"] + Expect-CT: ['max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"'] + Expires: ['Fri, 17 Aug 2018 02:35:06 GMT'] + Pragma: [no-cache] + Server: [cloudflare] + Set-Cookie: ['__cfduid=d1ba26b900bf8e93056964a66047938871534458906; expires=Fri, + 16-Aug-19 22:35:06 GMT; path=/; domain=.typicode.com; HttpOnly'] + Vary: ['Origin, Accept-Encoding'] + Via: [1.1 vegur] + X-Content-Type-Options: [nosniff] + X-Powered-By: [Express] + status: {code: 404, message: Not Found} +version: 1 diff --git a/tests/test_proxy.py b/tests/test_proxy.py index 19688ea43..b5cc4bc6a 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -22,10 +22,14 @@ def on_save(self, fh, res): # this will be `qvm-move...` in production subprocess.run(["cp", fh.name, "/tmp/{}".format(self.fn)]) - res.headers['X-Origin-Content-Type'] = res.headers['content-type'] + res.headers['X-Origin-Content-Type'] = res.headers['Content-Type'] res.headers['Content-Type'] = 'application/json' res.body = json.dumps({'filename': self.fn }) + def on_done(self, res): + res.headers['X-Origin-Content-Type'] = res.headers['Content-Type'] + res.headers['Content-Type'] = 'application/json' + def test_400_if_callback_not_set(self): req = proxy.Req() req.method = 'GET' @@ -35,7 +39,7 @@ def test_400_if_callback_not_set(self): p = proxy.Proxy() p.proxy() - assert p.res.status == 400 + self.assertEquals(p.res.status, 400) @vcr.use_cassette('fixtures/basic_proxy_functionality.yaml') def test_proxy_basic_functionality(self): @@ -47,11 +51,21 @@ def test_proxy_basic_functionality(self): p = proxy.Proxy(self.conf, req, self.on_save) p.proxy() - assert p.res.status == 200 - assert p.res.body == json.dumps({'filename': self.fn }) + self.assertEquals(p.res.status, 200) + self.assertEquals(p.res.body, json.dumps({'filename': self.fn })) + self.assertEquals(p.res.headers['Content-Type'], 'application/json') - # This is the way to verify the 'content-type' header - assert p.res.headers['_store']['content-type'][1] == 'application/json' + @vcr.use_cassette('fixtures/proxy_404.yaml') + def test_proxy_produces_404(self): + req = proxy.Req() + req.method = 'GET' + req.path_query = '/notfound' + req.headers = {'Accept': 'application/json'} + + p = proxy.Proxy(self.conf, req) + p.on_save = self.on_save + p.on_done = self.on_done + p.proxy() - # But I want to do this - # assert p.res.headers['Content-Type'] == 'application/json' + self.assertEquals(p.res.status, 404) + self.assertEquals(p.res.headers['Content-Type'], 'application/json') From 2e2b9bd32d777cbfd24110d18b0d36d129096282 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 16 Aug 2018 16:24:58 -0700 Subject: [PATCH 019/352] Unit tests: Complete basic tests for config parsing --- tests/files/missing-key.yaml | 4 +++ tests/files/missing-target-vm.yaml | 4 +++ tests/test_config.py | 56 ++++++++++++++++++++++-------- 3 files changed, 49 insertions(+), 15 deletions(-) create mode 100644 tests/files/missing-key.yaml create mode 100644 tests/files/missing-target-vm.yaml diff --git a/tests/files/missing-key.yaml b/tests/files/missing-key.yaml new file mode 100644 index 000000000..8e429958d --- /dev/null +++ b/tests/files/missing-key.yaml @@ -0,0 +1,4 @@ +host: jsonplaceholder.typicode.com +scheme: https +target_vm: compost +dev: False diff --git a/tests/files/missing-target-vm.yaml b/tests/files/missing-target-vm.yaml new file mode 100644 index 000000000..2ea9cc53a --- /dev/null +++ b/tests/files/missing-target-vm.yaml @@ -0,0 +1,4 @@ +host: jsonplaceholder.typicode.com +scheme: https +port: 443 +dev: False diff --git a/tests/test_config.py b/tests/test_config.py index bc478856a..916ad679d 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -1,5 +1,3 @@ -from io import StringIO -import json import sys import unittest from unittest.mock import patch @@ -15,11 +13,11 @@ def setUp(self): def test_config_file_does_not_exist(self): def err_on_done(res): res = res.__dict__ - self.assertEquals(res['status'], 500) + self.assertEqual(res['status'], 500) self.assertIn("Configuration file does not exist", res['body']) - self.assertEquals(res['headers']['Content-Type'], - 'application/json') + self.assertEqual(res['headers']['Content-Type'], + 'application/json') sys.exit(1) self.p.on_done = err_on_done @@ -29,10 +27,10 @@ def err_on_done(res): def test_config_file_when_yaml_is_invalid(self): def err_on_done(res): res = res.__dict__ - self.assertEquals(res['status'], 500) + self.assertEqual(res['status'], 500) self.assertIn("YAML syntax error", res['body']) - self.assertEquals(res['headers']['Content-Type'], - 'application/json') + self.assertEqual(res['headers']['Content-Type'], + 'application/json') sys.exit(1) self.p.on_done = err_on_done @@ -42,9 +40,9 @@ def err_on_done(res): def test_config_file_open_generic_exception(self): def err_on_done(res): res = res.__dict__ - self.assertEquals(res['status'], 500) - self.assertEquals(res['headers']['Content-Type'], - 'application/json') + self.assertEqual(res['status'], 500) + self.assertEqual(res['headers']['Content-Type'], + 'application/json') sys.exit(1) self.p.on_done = err_on_done @@ -59,8 +57,36 @@ def test_config_has_valid_keys(self): c = config.read_conf('tests/files/valid-config.yaml', self.p) # Verify we have a valid Conf object - self.assertEquals(c.host, 'jsonplaceholder.typicode.com') - self.assertEquals(c.port, 443) + self.assertEqual(c.host, 'jsonplaceholder.typicode.com') + self.assertEqual(c.port, 443) self.assertFalse(c.dev) - self.assertEquals(c.scheme, 'https') - self.assertEquals(c.target_vm, 'compost') + self.assertEqual(c.scheme, 'https') + self.assertEqual(c.target_vm, 'compost') + + def test_config_500_when_missing_a_required_key(self): + def err_on_done(res): + res = res.__dict__ + self.assertEqual(res['status'], 500) + self.assertIn("missing required keys", res['body']) + self.assertEqual(res['headers']['Content-Type'], + 'application/json') + sys.exit(1) + + self.p.on_done = err_on_done + + with self.assertRaises(SystemExit): + config.read_conf('tests/files/missing-key.yaml', self.p) + + def test_config_500_when_missing_target_vm(self): + def err_on_done(res): + res = res.__dict__ + self.assertEqual(res['status'], 500) + self.assertIn("missing `target_vm` key", res['body']) + self.assertEqual(res['headers']['Content-Type'], + 'application/json') + sys.exit(1) + + self.p.on_done = err_on_done + + with self.assertRaises(SystemExit): + config.read_conf('tests/files/missing-target-vm.yaml', self.p) From 6cbfe4756a6337741333713408ecd19b7d91551a Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 16 Aug 2018 16:28:09 -0700 Subject: [PATCH 020/352] Replace utils module with werkzeug To simplify maintenance, we can use Werkzeug directly --- Pipfile | 1 + securedrop_proxy/proxy.py | 4 +- securedrop_proxy/util.py | 103 -------------------------------------- 3 files changed, 3 insertions(+), 105 deletions(-) delete mode 100644 securedrop_proxy/util.py diff --git a/Pipfile b/Pipfile index 5d761889d..3b58e620f 100644 --- a/Pipfile +++ b/Pipfile @@ -10,6 +10,7 @@ python_version = "3.5" furl = "*" pyyaml = "*" requests = "*" +werkzeug = "*" [dev-packages] vcrpy = "*" diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index 541b490bc..155caa752 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -1,8 +1,8 @@ import requests import furl -import securedrop_proxy.util as util import tempfile import json +import werkzeug class Req: @@ -109,7 +109,7 @@ def handle_non_json_response(self): def handle_response(self): - ctype = util.parse_options_header(self._presp.headers['content-type']) + ctype = werkzeug.http.parse_options_header(self._presp.headers['content-type']) if ctype[0] == "application/json": self.handle_json_response() diff --git a/securedrop_proxy/util.py b/securedrop_proxy/util.py deleted file mode 100644 index cdb2a9cbf..000000000 --- a/securedrop_proxy/util.py +++ /dev/null @@ -1,103 +0,0 @@ -# coding: utf-8 - -import re - -# with thanks to https://github.com/pallets/werkzeug/blob/master/werkzeug/http.py -_option_header_piece_re = re.compile(r''' - ;\s* - (?P - "[^"\\]*(?:\\.[^"\\]*)*" # quoted string - | - [^\s;,=*]+ # token - ) - \s* - (?: # optionally followed by =value - (?: # equals sign, possibly with encoding - \*\s*=\s* # * indicates extended notation - (?P[^\s]+?) - '(?P[^\s]*?)' - | - =\s* # basic notation - ) - (?P - "[^"\\]*(?:\\.[^"\\]*)*" # quoted string - | - [^;,]+ # token - )? - )? - \s* -''', flags=re.VERBOSE) - -_option_header_start_mime_type = re.compile(r',\s*([^;,\s]+)([;,]\s*.+)?') - -def unquote_header_value(value, is_filename=False): - r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). - This does not use the real unquoting but what browsers are actually - using for quoting. - .. versionadded:: 0.5 - :param value: the header value to unquote. - """ - if value and value[0] == value[-1] == '"': - # this is not the real unquoting, but fixing this so that the - # RFC is met will result in bugs with internet explorer and - # probably some other browsers as well. IE for example is - # uploading files with "C:\foo\bar.txt" as filename - value = value[1:-1] - - # if this is a filename and the starting characters look like - # a UNC path, then just return the value without quotes. Using the - # replace sequence below on a UNC path has the effect of turning - # the leading double slash into a single slash and then - # _fix_ie_filename() doesn't work correctly. See #458. - if not is_filename or value[:2] != '\\\\': - return value.replace('\\\\', '\\').replace('\\"', '"') - return value - -def parse_options_header(value, multiple=False): - """Parse a ``Content-Type`` like header into a tuple with the content - type and the options: - >>> parse_options_header('text/html; charset=utf8') - ('text/html', {'charset': 'utf8'}) - This should not be used to parse ``Cache-Control`` like headers that use - a slightly different format. For these headers use the - :func:`parse_dict_header` function. - .. versionadded:: 0.5 - :param value: the header to parse. - :param multiple: Whether try to parse and return multiple MIME types - :return: (mimetype, options) or (mimetype, options, mimetype, options, …) - if multiple=True - """ - if not value: - return '', {} - - result = [] - - value = "," + value.replace("\n", ",") - while value: - match = _option_header_start_mime_type.match(value) - if not match: - break - result.append(match.group(1)) # mimetype - options = {} - # Parse options - rest = match.group(2) - while rest: - optmatch = _option_header_piece_re.match(rest) - if not optmatch: - break - option, encoding, _, option_value = optmatch.groups() - option = unquote_header_value(option) - if option_value is not None: - option_value = unquote_header_value( - option_value, - option == 'filename') - if encoding is not None: - option_value = _unquote(option_value).decode(encoding) - options[option] = option_value - rest = rest[optmatch.end():] - result.append(options) - if multiple is False: - return tuple(result) - value = rest - - return tuple(result) if result else ('', {}) From 1f65dac4e08f5b280ca57eaf020167c76e25ee57 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 16 Aug 2018 16:52:33 -0700 Subject: [PATCH 021/352] Unit tests: Make __main__.py into file of proxy unit tests --- fixtures/proxy_200_valid_path.yaml | 34 ++++++++ fixtures/proxy_parameters.yaml | 67 +++++++++++++++ securedrop_proxy/__main__.py | 126 ----------------------------- tests/test_proxy.py | 112 +++++++++++++++++++++++-- 4 files changed, 205 insertions(+), 134 deletions(-) create mode 100644 fixtures/proxy_200_valid_path.yaml create mode 100644 fixtures/proxy_parameters.yaml delete mode 100644 securedrop_proxy/__main__.py diff --git a/fixtures/proxy_200_valid_path.yaml b/fixtures/proxy_200_valid_path.yaml new file mode 100644 index 000000000..ef116fd88 --- /dev/null +++ b/fixtures/proxy_200_valid_path.yaml @@ -0,0 +1,34 @@ +interactions: +- request: + body: null + headers: + Accept: [application/json] + method: GET + uri: https://jsonplaceholder.typicode.com/posts/1 + response: + body: {string: "{\n \"userId\": 1,\n \"id\": 1,\n \"title\": \"sunt aut facere + repellat provident occaecati excepturi optio reprehenderit\",\n \"body\": + \"quia et suscipit\\nsuscipit recusandae consequuntur expedita et cum\\nreprehenderit + molestiae ut ut quas totam\\nnostrum rerum est autem sunt rem eveniet architecto\"\n}"} + headers: + Access-Control-Allow-Credentials: ['true'] + CF-Cache-Status: [HIT] + CF-RAY: [44b7cfc9cd806c58-SJC] + Cache-Control: ['public, max-age=14400'] + Connection: [keep-alive] + Content-Length: ['292'] + Content-Type: [application/json; charset=utf-8] + Date: ['Thu, 16 Aug 2018 23:51:55 GMT'] + Etag: [W/"124-yiKdLzqO5gfBrJFrcdJ8Yq0LGnU"] + Expect-CT: ['max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"'] + Expires: ['Fri, 17 Aug 2018 03:51:55 GMT'] + Pragma: [no-cache] + Server: [cloudflare] + Set-Cookie: ['__cfduid=d5d93decbd21c24b97c90d1f49461fd8d1534463515; expires=Fri, + 16-Aug-19 23:51:55 GMT; path=/; domain=.typicode.com; HttpOnly'] + Vary: ['Origin, Accept-Encoding'] + Via: [1.1 vegur] + X-Content-Type-Options: [nosniff] + X-Powered-By: [Express] + status: {code: 200, message: OK} +version: 1 diff --git a/fixtures/proxy_parameters.yaml b/fixtures/proxy_parameters.yaml new file mode 100644 index 000000000..af1b06ffe --- /dev/null +++ b/fixtures/proxy_parameters.yaml @@ -0,0 +1,67 @@ +interactions: +- request: + body: null + headers: + Accept: [application/json] + method: GET + uri: https://jsonplaceholder.typicode.com/posts?userId=1 + response: + body: {string: "[\n {\n \"userId\": 1,\n \"id\": 1,\n \"title\": \"sunt + aut facere repellat provident occaecati excepturi optio reprehenderit\",\n + \ \"body\": \"quia et suscipit\\nsuscipit recusandae consequuntur expedita + et cum\\nreprehenderit molestiae ut ut quas totam\\nnostrum rerum est autem + sunt rem eveniet architecto\"\n },\n {\n \"userId\": 1,\n \"id\": + 2,\n \"title\": \"qui est esse\",\n \"body\": \"est rerum tempore vitae\\nsequi + sint nihil reprehenderit dolor beatae ea dolores neque\\nfugiat blanditiis + voluptate porro vel nihil molestiae ut reiciendis\\nqui aperiam non debitis + possimus qui neque nisi nulla\"\n },\n {\n \"userId\": 1,\n \"id\": + 3,\n \"title\": \"ea molestias quasi exercitationem repellat qui ipsa sit + aut\",\n \"body\": \"et iusto sed quo iure\\nvoluptatem occaecati omnis + eligendi aut ad\\nvoluptatem doloribus vel accusantium quis pariatur\\nmolestiae + porro eius odio et labore et velit aut\"\n },\n {\n \"userId\": 1,\n + \ \"id\": 4,\n \"title\": \"eum et est occaecati\",\n \"body\": \"ullam + et saepe reiciendis voluptatem adipisci\\nsit amet autem assumenda provident + rerum culpa\\nquis hic commodi nesciunt rem tenetur doloremque ipsam iure\\nquis + sunt voluptatem rerum illo velit\"\n },\n {\n \"userId\": 1,\n \"id\": + 5,\n \"title\": \"nesciunt quas odio\",\n \"body\": \"repudiandae veniam + quaerat sunt sed\\nalias aut fugiat sit autem sed est\\nvoluptatem omnis possimus + esse voluptatibus quis\\nest aut tenetur dolor neque\"\n },\n {\n \"userId\": + 1,\n \"id\": 6,\n \"title\": \"dolorem eum magni eos aperiam quia\",\n + \ \"body\": \"ut aspernatur corporis harum nihil quis provident sequi\\nmollitia + nobis aliquid molestiae\\nperspiciatis et ea nemo ab reprehenderit accusantium + quas\\nvoluptate dolores velit et doloremque molestiae\"\n },\n {\n \"userId\": + 1,\n \"id\": 7,\n \"title\": \"magnam facilis autem\",\n \"body\": + \"dolore placeat quibusdam ea quo vitae\\nmagni quis enim qui quis quo nemo + aut saepe\\nquidem repellat excepturi ut quia\\nsunt ut sequi eos ea sed quas\"\n + \ },\n {\n \"userId\": 1,\n \"id\": 8,\n \"title\": \"dolorem dolore + est ipsam\",\n \"body\": \"dignissimos aperiam dolorem qui eum\\nfacilis + quibusdam animi sint suscipit qui sint possimus cum\\nquaerat magni maiores + excepturi\\nipsam ut commodi dolor voluptatum modi aut vitae\"\n },\n {\n + \ \"userId\": 1,\n \"id\": 9,\n \"title\": \"nesciunt iure omnis dolorem + tempora et accusantium\",\n \"body\": \"consectetur animi nesciunt iure + dolore\\nenim quia ad\\nveniam autem ut quam aut nobis\\net est aut quod aut + provident voluptas autem voluptas\"\n },\n {\n \"userId\": 1,\n \"id\": + 10,\n \"title\": \"optio molestias id quia eum\",\n \"body\": \"quo + et expedita modi cum officia vel magni\\ndoloribus qui repudiandae\\nvero + nisi sit\\nquos veniam quod sed accusamus veritatis error\"\n }\n]"} + headers: + Access-Control-Allow-Credentials: ['true'] + CF-Cache-Status: [HIT] + CF-RAY: [44b7bdac4e0a6bf8-SJC] + Cache-Control: ['public, max-age=14400'] + Connection: [keep-alive] + Content-Type: [application/json; charset=utf-8] + Date: ['Thu, 16 Aug 2018 23:39:33 GMT'] + Etag: [W/"aa6-j2NSH739l9uq40OywFMn7Y0C/iY"] + Expect-CT: ['max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"'] + Expires: ['Fri, 17 Aug 2018 03:39:33 GMT'] + Pragma: [no-cache] + Server: [cloudflare] + Set-Cookie: ['__cfduid=dca3d39adfb7f9a82e73277f19cf335921534462773; expires=Fri, + 16-Aug-19 23:39:33 GMT; path=/; domain=.typicode.com; HttpOnly'] + Vary: ['Origin, Accept-Encoding'] + Via: [1.1 vegur] + X-Content-Type-Options: [nosniff] + X-Powered-By: [Express] + status: {code: 200, message: OK} +version: 1 diff --git a/securedrop_proxy/__main__.py b/securedrop_proxy/__main__.py deleted file mode 100644 index 6abb71d76..000000000 --- a/securedrop_proxy/__main__.py +++ /dev/null @@ -1,126 +0,0 @@ -import proxy -import json -import subprocess -import uuid - -conf = proxy.Conf() -conf.host = 'jsonplaceholder.typicode.com' -conf.scheme = 'https' -conf.port = 443 - -def on_save(fh, res): - - fn = str(uuid.uuid4()) - - # this will be `qvm-move...` in production - subprocess.run(["cp", fh.name, "/tmp/{}".format(fn)]) - - res.headers['X-Origin-Content-Type'] = res.headers['content-type'] - res.headers['Content-Type'] = 'application/json' - res.body = json.dumps({'filename': fn }) - -# does it work at all -req = proxy.Req() -req.method = 'GET' -req.path_query = '' -req.headers = {'Accept': 'application/json'} - -p = proxy.Proxy(conf, req, on_save) -p.proxy() - -print(p.res.status) -print(p.res.headers) -print(p.res.version) -print(p.res.body) - -# params -req = proxy.Req() -req.method = 'GET' -req.path_query = '/posts?userId=1' -req.headers = {'Accept': 'application/json'} - -p = proxy.Proxy(conf, req, on_save) -p.proxy() - -print(p.res.status) -#print(res.headers) -print(p.res.version) -print(json.loads(p.res.body.decode())) - - -# path -req = proxy.Req() -req.method = 'GET' -req.path_query = '/posts/1' -req.headers = {'Accept': 'application/json'} - -p = proxy.Proxy(conf, req, on_save) -p.proxy() - -print(p.res.status) # 200 -print(p.res.version) -print(json.loads(p.res.body.decode())) - - -# 404 -req = proxy.Req() -req.method = 'GET' -req.path_query = '/notfound' -req.headers = {'Accept': 'application/json'} - -p = proxy.Proxy(conf, req, on_save) -p.proxy() - -print(p.res.status) # 404 -print(p.res.headers) -print(p.res.version) -print(p.res.body) # {} - - -# 400 bad path -req = proxy.Req() -req.method = 'GET' -req.path_query = 'http://badpath.lol/path' -req.headers = {'Accept': 'application/json'} - -p = proxy.Proxy(conf, req, on_save) -p.proxy() - -print(p.res.status) # 400 -print(p.res.headers) -print(p.res.version) -print(p.res.body) # {'error': 'Path provided in request did not look valid'} - -# 400 no handler -req = proxy.Req() -req.method = 'GET' -req.path_query = 'http://badpath.lol/path' -req.headers = {'Accept': 'application/json'} - -p = proxy.Proxy(conf, req, None) -p.proxy() - -print(p.res.status) # 400 -print(p.res.headers) -print(p.res.version) -print(p.res.body) # {'error': 'Request callback is not set.'} - - -# 500 proxy error (in this case, misconfiguration) -conf = proxy.Conf() -conf.host = 'jsonplaceholder.typicode.com' -conf.scheme = 'https://http' # bad -conf.port = 443 - -req = proxy.Req() -req.method = 'GET' -req.path_query = '/posts/1' -req.headers = {'Accept': 'application/json'} - -p = proxy.Proxy(conf, req, on_save) -p.proxy() - -print(p.res.status) # 500 -print(p.res.headers) -print(p.res.version) -print(p.res.body) # {'error': 'Proxy error while generating URL to request'} diff --git a/tests/test_proxy.py b/tests/test_proxy.py index b5cc4bc6a..7eec40522 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -8,7 +8,7 @@ from securedrop_proxy import config -class TestProxy(unittest.TestCase): +class TestProxyValidConfig(unittest.TestCase): def setUp(self): self.conf = config.Conf() self.conf.host = 'jsonplaceholder.typicode.com' @@ -24,7 +24,7 @@ def on_save(self, fh, res): res.headers['X-Origin-Content-Type'] = res.headers['Content-Type'] res.headers['Content-Type'] = 'application/json' - res.body = json.dumps({'filename': self.fn }) + res.body = json.dumps({'filename': self.fn}) def on_done(self, res): res.headers['X-Origin-Content-Type'] = res.headers['Content-Type'] @@ -39,7 +39,7 @@ def test_400_if_callback_not_set(self): p = proxy.Proxy() p.proxy() - self.assertEquals(p.res.status, 400) + self.assertEqual(p.res.status, 400) @vcr.use_cassette('fixtures/basic_proxy_functionality.yaml') def test_proxy_basic_functionality(self): @@ -51,9 +51,9 @@ def test_proxy_basic_functionality(self): p = proxy.Proxy(self.conf, req, self.on_save) p.proxy() - self.assertEquals(p.res.status, 200) - self.assertEquals(p.res.body, json.dumps({'filename': self.fn })) - self.assertEquals(p.res.headers['Content-Type'], 'application/json') + self.assertEqual(p.res.status, 200) + self.assertEqual(p.res.body, json.dumps({'filename': self.fn})) + self.assertEqual(p.res.headers['Content-Type'], 'application/json') @vcr.use_cassette('fixtures/proxy_404.yaml') def test_proxy_produces_404(self): @@ -67,5 +67,101 @@ def test_proxy_produces_404(self): p.on_done = self.on_done p.proxy() - self.assertEquals(p.res.status, 404) - self.assertEquals(p.res.headers['Content-Type'], 'application/json') + self.assertEqual(p.res.status, 404) + self.assertEqual(p.res.headers['Content-Type'], 'application/json') + + @vcr.use_cassette('fixtures/proxy_parameters.yaml') + def test_proxy_handles_query_params_gracefully(self): + req = proxy.Req() + req.method = 'GET' + req.path_query = '/posts?userId=1' + req.headers = {'Accept': 'application/json'} + + p = proxy.Proxy(self.conf, req, self.on_save) + p.proxy() + + self.assertEqual(p.res.status, 200) + self.assertIn('application/json', p.res.headers['Content-Type']) + body = json.loads(p.res.body) + for item in body: + self.assertEqual(item['userId'], 1) + + # No cassette needed as no network request should be sent + def test_proxy_400_bad_path(self): + req = proxy.Req() + req.method = 'GET' + req.path_query = 'http://badpath.lol/path' + req.headers = {'Accept': 'application/json'} + + p = proxy.Proxy(self.conf, req) + p.on_save = self.on_save + p.on_done = self.on_done + p.proxy() + + self.assertEqual(p.res.status, 400) + self.assertEqual(p.res.headers['Content-Type'], 'application/json') + self.assertIn('Path provided in request did not look valid', + p.res.body) + + @vcr.use_cassette('fixtures/proxy_200_valid_path.yaml') + def test_proxy_200_valid_path(self): + req = proxy.Req() + req.method = 'GET' + req.path_query = '/posts/1' + req.headers = {'Accept': 'application/json'} + + p = proxy.Proxy(self.conf, req, self.on_save) + p.proxy() + + self.assertEqual(p.res.status, 200) + self.assertIn('application/json', p.res.headers['Content-Type']) + body = json.loads(p.res.body) + self.assertEqual(body['userId'], 1) + + # No cassette needed as no network request should be sent + def test_proxy_400_no_handler(self): + req = proxy.Req() + req.method = 'GET' + req.path_query = 'http://badpath.lol/path' + req.headers = {'Accept': 'application/json'} + + p = proxy.Proxy(self.conf, req) + p.proxy() + + self.assertEqual(p.res.status, 400) + self.assertEqual(p.res.headers['Content-Type'], 'application/json') + self.assertIn('Request callback is not set', + p.res.body) + + +class TestProxyInvalidConfig(unittest.TestCase): + def setUp(self): + self.conf = config.Conf() + self.conf.host = 'jsonplaceholder.typicode.com' + self.conf.scheme = 'https://http' # bad + self.conf.port = 443 + + def on_save(self, fh, res): + self.fn = str(uuid.uuid4()) + + # this will be `qvm-move...` in production + subprocess.run(["cp", fh.name, "/tmp/{}".format(self.fn)]) + + res.headers['X-Origin-Content-Type'] = res.headers['Content-Type'] + res.headers['Content-Type'] = 'application/json' + res.body = json.dumps({'filename': self.fn}) + + # No cassette needed as no network request should be sent + def test_proxy_500_misconfiguration(self): + req = proxy.Req() + req.method = 'GET' + req.path_query = '/posts/1' + req.headers = {'Accept': 'application/json'} + + p = proxy.Proxy(self.conf, req, self.on_save) + p.proxy() + + self.assertEqual(p.res.status, 500) + self.assertEqual(p.res.headers['Content-Type'], 'application/json') + self.assertIn('Proxy error while generating URL to request', + p.res.body) From 527f7c24695d3fc7621780adcd421094b2c6f354 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Mon, 20 Aug 2018 09:23:26 -0700 Subject: [PATCH 022/352] Add conf as argument to on_save callback The on_save callback uses the config object in order to determine if qvm-move-to-vm should be used. We should pass this as an arg. --- securedrop_proxy/callbacks.py | 12 +++++++----- securedrop_proxy/proxy.py | 2 +- tests/test_proxy.py | 4 ++-- 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/securedrop_proxy/callbacks.py b/securedrop_proxy/callbacks.py index 2bed25cc2..fa42fb9c9 100644 --- a/securedrop_proxy/callbacks.py +++ b/securedrop_proxy/callbacks.py @@ -1,5 +1,7 @@ +import subprocess import sys import json +import uuid def err_on_done(res): @@ -14,22 +16,22 @@ def err_on_done(res): # # In any case, this callback mutates the given result object (in # `res`) to include the name of the new file, or to indicate errors. -def on_save(fh, res): +def on_save(fh, res, conf): fn = str(uuid.uuid4()) try: subprocess.run(["cp", fh.name, "/tmp/{}".format(fn)]) - if p.conf.dev is not True: - subprocess.run(['qvm-move-to-vm', p.conf.target_vm, "/tmp/{}".format(fn)]) + if conf.dev is not True: + subprocess.run(['qvm-move-to-vm', conf.target_vm, "/tmp/{}".format(fn)]) except Exception: res.status = 500 res.headers['Content-Type'] = 'application/json' - res.headers['X-Origin-Content-Type'] = res.headers['content-type'] + res.headers['X-Origin-Content-Type'] = res.headers['Content-Type'] res.body = json.dumps({"error": "Unhandled error while handling non-JSON content, sorry"}) return - res.headers['X-Origin-Content-Type'] = res.headers['content-type'] res.headers['Content-Type'] = 'application/json' + res.headers['X-Origin-Content-Type'] = res.headers['Content-Type'] res.body = json.dumps({'filename': fn }) # new on_done handler diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index 155caa752..1e9554d90 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -103,7 +103,7 @@ def handle_non_json_response(self): res.headers = self._presp.headers - self.on_save(fh, res) + self.on_save(fh, res, self.conf) self.res = res diff --git a/tests/test_proxy.py b/tests/test_proxy.py index 7eec40522..12c307dd7 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -15,7 +15,7 @@ def setUp(self): self.conf.scheme = 'https' self.conf.port = 443 - def on_save(self, fh, res): + def on_save(self, fh, res, conf): self.fn = str(uuid.uuid4()) @@ -141,7 +141,7 @@ def setUp(self): self.conf.scheme = 'https://http' # bad self.conf.port = 443 - def on_save(self, fh, res): + def on_save(self, fh, res, conf): self.fn = str(uuid.uuid4()) # this will be `qvm-move...` in production From f910bc2e29293641a66fcce459103a9e37eefc4e Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Mon, 20 Aug 2018 09:27:21 -0700 Subject: [PATCH 023/352] Bugfix: Default value of headers should be empty dict We return informative JSON errors to users, and in cases where the headers attribute was initialized to None, when we try to set `Content-Type` later on in the proxy logic, we will get an error: TypeError: 'NoneType' object does not support item assignment We can resolve this by setting the default value of headers to an empty dict. --- securedrop_proxy/proxy.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index 1e9554d90..09c306770 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -10,14 +10,14 @@ def __init__(self): self.method = '' self.path_query = '' self.body = None - self.headers = None + self.headers = {} class Response: def __init__(self, status): self.status = status self.body = None - self.headers = None + self.headers = {} self.version = "0.1.1" class Proxy: From 3f84a0da5ead1ad5149e15ef78bacc9845720bc4 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Mon, 20 Aug 2018 09:52:26 -0700 Subject: [PATCH 024/352] Unit tests: Callback functions --- tests/test_callbacks.py | 78 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 78 insertions(+) create mode 100644 tests/test_callbacks.py diff --git a/tests/test_callbacks.py b/tests/test_callbacks.py new file mode 100644 index 000000000..d0993e473 --- /dev/null +++ b/tests/test_callbacks.py @@ -0,0 +1,78 @@ +from io import StringIO +import json +import sys +import tempfile +import unittest +from unittest.mock import patch + +from securedrop_proxy import callbacks +from securedrop_proxy import config +from securedrop_proxy import proxy + + +class TestCallbacks(unittest.TestCase): + def setUp(self): + self.res = proxy.Response(status=200) + self.res.body = "babbys request" + + self.conf = config.Conf() + self.conf.host = 'jsonplaceholder.typicode.com' + self.conf.scheme = 'https' + self.conf.port = 443 + self.conf.dev = True + + def test_err_on_done(self): + saved_stdout = sys.stdout + try: + out = StringIO() + sys.stdout = out + with self.assertRaises(SystemExit): + callbacks.err_on_done(self.res) + output = out.getvalue().strip() + finally: + sys.stdout = saved_stdout + + response = json.loads(output) + self.assertEqual(response['status'], 200) + self.assertEqual(response['body'], 'babbys request') + + def test_on_done(self): + saved_stdout = sys.stdout + try: + out = StringIO() + sys.stdout = out + callbacks.on_done(self.res) + output = out.getvalue().strip() + finally: + sys.stdout = saved_stdout + + response = json.loads(output) + self.assertEqual(response['status'], 200) + self.assertEqual(response['body'], 'babbys request') + + def test_on_save_500_unhandled_error(self): + fh = tempfile.NamedTemporaryFile() + + # Let's generate an error and ensure that an appropriate response + # is sent back to the user + with patch("subprocess.run", side_effect=IOError): + callbacks.on_save(fh, self.res, self.conf) + + self.assertEqual(self.res.status, 500) + self.assertEqual(self.res.headers['Content-Type'], + 'application/json') + self.assertEqual(self.res.headers['X-Origin-Content-Type'], + 'application/json') + self.assertIn('Unhandled error', self.res.body) + + def test_on_save_200_success(self): + fh = tempfile.NamedTemporaryFile() + + callbacks.on_save(fh, self.res, self.conf) + + self.assertEqual(self.res.headers['Content-Type'], + 'application/json') + self.assertEqual(self.res.headers['X-Origin-Content-Type'], + 'application/json') + self.assertEqual(self.res.status, 200) + self.assertIn('filename', self.res.body) From 4a1874c55322166f90017a0d2ea06556eadd8299 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Mon, 20 Aug 2018 12:24:31 -0700 Subject: [PATCH 025/352] Unit tests: Add main function tests Note: each test here corresponds to one in the README --- tests/test_main.py | 102 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 102 insertions(+) create mode 100644 tests/test_main.py diff --git a/tests/test_main.py b/tests/test_main.py new file mode 100644 index 000000000..6bfa8b866 --- /dev/null +++ b/tests/test_main.py @@ -0,0 +1,102 @@ +from io import StringIO +import json +import subprocess +import sys +import unittest +import uuid + +from securedrop_proxy import config +from securedrop_proxy import main +from securedrop_proxy import proxy + + +class TestMain(unittest.TestCase): + def setUp(self): + self.conf = config.Conf() + self.conf.host = 'jsonplaceholder.typicode.com' + self.conf.scheme = 'https' + self.conf.port = 443 + self.conf.dev = True + + def test_json_response(self): + test_input_json = """{ "method": "GET", + "path_query": "/posts?userId=1" }""" + + req = proxy.Req() + req.method = 'GET' + req.path_query = '' + req.headers = {'Accept': 'application/json'} + + # Use custom callbacks + def on_save(res, fh, conf): + pass + + def on_done(res): + res = res.__dict__ + self.assertEqual(res['status'], 200) + + self.p = proxy.Proxy(self.conf, req, on_save) + self.p.on_done = on_done + self.p.proxy() + + saved_stdout = sys.stdout + try: + out = StringIO() + sys.stdout = out + main.__main__(test_input_json, self.p) + output = out.getvalue().strip() + finally: + sys.stdout = saved_stdout + + response = json.loads(output) + for item in json.loads(response['body']): + self.assertEqual(item['userId'], 1) + + def test_non_json_response(self): + test_input_json = """{ "method": "GET", + "path_query": "" }""" + + def on_save(fh, res, conf): + self.fn = str(uuid.uuid4()) + + subprocess.run(["cp", fh.name, "/tmp/{}".format(self.fn)]) + + res.headers['X-Origin-Content-Type'] = res.headers['Content-Type'] + res.headers['Content-Type'] = 'application/json' + res.body = json.dumps({'filename': self.fn}) + + self.p = proxy.Proxy(self.conf, proxy.Req(), on_save) + self.p.proxy() + + saved_stdout = sys.stdout + try: + out = StringIO() + sys.stdout = out + main.__main__(test_input_json, self.p) + output = out.getvalue().strip() + finally: + sys.stdout = saved_stdout + + response = json.loads(output) + self.assertEqual(response['status'], 200) + + # The proxy should have created a filename in the response body + self.assertIn('filename', response['body']) + + def test_error_response(self): + test_input_json = """"foo": "bar", "baz": "bliff" }""" + + def on_save(fh, res, conf): + pass + + def on_done(res): + res = res.__dict__ + self.assertEqual(res['status'], 400) + sys.exit(1) + + self.p = proxy.Proxy(self.conf, proxy.Req(), on_save) + self.p.on_done = on_done + + with self.assertRaises(SystemExit): + self.p.proxy() + main.__main__(test_input_json, self.p) From bd5d8c89ffd7da53e589bbaa602b8e119148e7b7 Mon Sep 17 00:00:00 2001 From: Joshua Thayer Date: Wed, 22 Aug 2018 00:06:30 -0700 Subject: [PATCH 026/352] Adds qubes-specific things --- Makefile | 6 ++++ README.md | 71 ++++++++++++++++++++++++++++++++++++++++++ entrypoint.sh | 4 +++ qubes/securedrop.Proxy | 1 + 4 files changed, 82 insertions(+) create mode 100644 Makefile create mode 100755 entrypoint.sh create mode 100644 qubes/securedrop.Proxy diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..1b3475073 --- /dev/null +++ b/Makefile @@ -0,0 +1,6 @@ +install: + pipenv install + cp qubes/securedrop.Proxy /etc/qubes-rpc/securedrop.Proxy + +test: + pipenv run python -m unittest -v diff --git a/README.md b/README.md index c27419cc6..612df2631 100644 --- a/README.md +++ b/README.md @@ -51,6 +51,10 @@ mode, the file is not moved off the VM, but is saved as a temporary file in `/tmp`. In both cases, the response written to STDOUT includes the name of the new file. +#### tests + +Unit tests can be run with `make tests` + #### running The following commands can be used to demonstrate the proxy. @@ -71,3 +75,70 @@ invalid JSON. The proxy detects that, and prints an error message (still a valid proxy response). $ cat examples/bad.json | ./sd-proxy.py ./config-example.yaml + +#### Qubes integration + +Until we determine how we wish to package and install this script, +demonstrating the proxy in a Qubes environment is a somewhat manual +process. + +First, determine which of your VMs will be acting as the proxy VM +(where this code will be running), and which will be acting as the +client VM (where the client code will be running). For the purposes of +this documentation, we assume the client is running in +`securedrop-client`, and the proxy is running in `seuredrop-proxy`. + +Edit `qubes/securedrop.Proxy` to reflect the path to `entrypoint.sh` +in this repo. Run `make install`, which will move `securedrop.Proxy` +(the qubes-rpc "server path definition" file) into place in +`/etc/qubes-rpc/`. + +On `dom0`, create the file `/etc/qubes-rpc/policy/securedrop.Proxy` +with the contents: + + securedrop-client securedrop-proxy allow + $anyvm $anyvm deny + +(replacing the VM names with the correct source and destination names +for your environment) + +Also in `dom0`, edit `/etc/qubes-rpc/policy/qubes.Filecopy`, to add +near the top: + + securedrop-proxy securedrop-client allow + +(again replacing the VM names with the correct source and destination +names for your environment). This allows non-JSON responses to be +moved to the client VM using Qubes' native inter-VM file copy service. + +Copy `config-example.yaml` to `config.yaml`, and edit it to reflect +your situation- check that `target_vm` is set to the correct client VM +name, and assure that `dev` is `False`. This documentation assumes +you've left `host` set to `jsonplaceholder.typicode.com`. + +Now on the client VM you should be able to do: + + $ echo '{"method":"GET","path_query":"/posts?userId=1"}' | /usr/lib/qubes/qrexec-client-vm securedrop-client securedrop.Proxy + +You should see a successful JSON response as returned by the remote server. + +Try now + + $ echo '{"method":"GET","path_query":""}' | /usr/lib/qubes/qrexec-client-vm securedrop-client securedrop.Proxy + +If you have configured everything correctly, you should see a JSON +response which include a `body` which looks like: + + { ... + "body": "{\"filename\": \"7463c589-92d2-46ba-845f-3ace2587916d\"}" + } + +If you look in `~/QubesIncoming/securedrop-proxy`, you should see a +new file with that name. The content of that file will be the content +returned by the remote server. + +Finally, try invoking an error. Provide an invalid JSON request, and +notice you receive a `400` response from the proxy: + + $ echo '[INVALID' | /usr/lib/qubes/qrexec-client-vm securedrop-client securedrop.Proxy + {"body": "{\"error\": \"Invalid JSON in request\"}", "version": "0.1.1", "status": 400, "headers": {"Content-Type": "application/json"}} diff --git a/entrypoint.sh b/entrypoint.sh new file mode 100755 index 000000000..35815f05b --- /dev/null +++ b/entrypoint.sh @@ -0,0 +1,4 @@ +#!/bin/sh + +cd /home/user/projects/securedrop-proxy +pipenv run ./sd-proxy.py ./config.yaml diff --git a/qubes/securedrop.Proxy b/qubes/securedrop.Proxy new file mode 100644 index 000000000..133c09008 --- /dev/null +++ b/qubes/securedrop.Proxy @@ -0,0 +1 @@ +/home/user/projects/securedrop-proxy/entrypoint.sh From cb9fbabac6cf32fae28ef2d4fb0c024b3ce2408d Mon Sep 17 00:00:00 2001 From: Joshua Thayer Date: Wed, 22 Aug 2018 00:19:21 -0700 Subject: [PATCH 027/352] typo --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 612df2631..fe89c5b71 100644 --- a/README.md +++ b/README.md @@ -86,7 +86,7 @@ First, determine which of your VMs will be acting as the proxy VM (where this code will be running), and which will be acting as the client VM (where the client code will be running). For the purposes of this documentation, we assume the client is running in -`securedrop-client`, and the proxy is running in `seuredrop-proxy`. +`securedrop-client`, and the proxy is running in `securedrop-proxy`. Edit `qubes/securedrop.Proxy` to reflect the path to `entrypoint.sh` in this repo. Run `make install`, which will move `securedrop.Proxy` From 6b162a2b6559c870111aa2da4c298c738e07c358 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Wed, 22 Aug 2018 09:57:19 -0700 Subject: [PATCH 028/352] Remove unnecessary comment and subprocess.run in callbacks --- securedrop_proxy/callbacks.py | 2 +- tests/test_proxy.py | 9 --------- 2 files changed, 1 insertion(+), 10 deletions(-) diff --git a/securedrop_proxy/callbacks.py b/securedrop_proxy/callbacks.py index fa42fb9c9..d67bcebdb 100644 --- a/securedrop_proxy/callbacks.py +++ b/securedrop_proxy/callbacks.py @@ -34,6 +34,6 @@ def on_save(fh, res, conf): res.headers['X-Origin-Content-Type'] = res.headers['Content-Type'] res.body = json.dumps({'filename': fn }) -# new on_done handler + def on_done(res): print(json.dumps(res.__dict__)) diff --git a/tests/test_proxy.py b/tests/test_proxy.py index 12c307dd7..22aac3def 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -16,12 +16,7 @@ def setUp(self): self.conf.port = 443 def on_save(self, fh, res, conf): - self.fn = str(uuid.uuid4()) - - # this will be `qvm-move...` in production - subprocess.run(["cp", fh.name, "/tmp/{}".format(self.fn)]) - res.headers['X-Origin-Content-Type'] = res.headers['Content-Type'] res.headers['Content-Type'] = 'application/json' res.body = json.dumps({'filename': self.fn}) @@ -143,10 +138,6 @@ def setUp(self): def on_save(self, fh, res, conf): self.fn = str(uuid.uuid4()) - - # this will be `qvm-move...` in production - subprocess.run(["cp", fh.name, "/tmp/{}".format(self.fn)]) - res.headers['X-Origin-Content-Type'] = res.headers['Content-Type'] res.headers['Content-Type'] = 'application/json' res.body = json.dumps({'filename': self.fn}) From 3ca06fa59e5a48d651da9ac0842521a8f29b2e43 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Fri, 24 Aug 2018 15:08:25 -0700 Subject: [PATCH 029/352] Minor docs/makefile fixes --- Makefile | 2 +- README.md | 13 +++++++------ 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/Makefile b/Makefile index 1b3475073..cdffb28c4 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ install: pipenv install - cp qubes/securedrop.Proxy /etc/qubes-rpc/securedrop.Proxy + sudo cp qubes/securedrop.Proxy /etc/qubes-rpc/securedrop.Proxy test: pipenv run python -m unittest -v diff --git a/README.md b/README.md index fe89c5b71..f742855f0 100644 --- a/README.md +++ b/README.md @@ -53,7 +53,7 @@ the name of the new file. #### tests -Unit tests can be run with `make tests` +Unit tests can be run with `make test` #### running @@ -89,7 +89,8 @@ this documentation, we assume the client is running in `securedrop-client`, and the proxy is running in `securedrop-proxy`. Edit `qubes/securedrop.Proxy` to reflect the path to `entrypoint.sh` -in this repo. Run `make install`, which will move `securedrop.Proxy` +in this repo. Also edit the directory to this repo code in `entrypoint.sh`. +Next, run `make install`, which will move `securedrop.Proxy` (the qubes-rpc "server path definition" file) into place in `/etc/qubes-rpc/`. @@ -116,15 +117,15 @@ your situation- check that `target_vm` is set to the correct client VM name, and assure that `dev` is `False`. This documentation assumes you've left `host` set to `jsonplaceholder.typicode.com`. -Now on the client VM you should be able to do: +Now on the client VM you should be able to do (again replacing `securedrop-proxy` with the name of your proxy AppVM): - $ echo '{"method":"GET","path_query":"/posts?userId=1"}' | /usr/lib/qubes/qrexec-client-vm securedrop-client securedrop.Proxy + $ echo '{"method":"GET","path_query":"/posts?userId=1"}' | /usr/lib/qubes/qrexec-client-vm securedrop-proxy securedrop.Proxy You should see a successful JSON response as returned by the remote server. Try now - $ echo '{"method":"GET","path_query":""}' | /usr/lib/qubes/qrexec-client-vm securedrop-client securedrop.Proxy + $ echo '{"method":"GET","path_query":""}' | /usr/lib/qubes/qrexec-client-vm securedrop-proxy securedrop.Proxy If you have configured everything correctly, you should see a JSON response which include a `body` which looks like: @@ -140,5 +141,5 @@ returned by the remote server. Finally, try invoking an error. Provide an invalid JSON request, and notice you receive a `400` response from the proxy: - $ echo '[INVALID' | /usr/lib/qubes/qrexec-client-vm securedrop-client securedrop.Proxy + $ echo '[INVALID' | /usr/lib/qubes/qrexec-client-vm securedrop-proxy securedrop.Proxy {"body": "{\"error\": \"Invalid JSON in request\"}", "version": "0.1.1", "status": 400, "headers": {"Content-Type": "application/json"}} From 010a72f54711675a806f5787d8629e44123ae20e Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Thu, 27 Sep 2018 16:24:08 -0400 Subject: [PATCH 030/352] Moves sd-proxy inside of the module --- sd-proxy.py => securedrop_proxy/entrypoint.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename sd-proxy.py => securedrop_proxy/entrypoint.py (100%) diff --git a/sd-proxy.py b/securedrop_proxy/entrypoint.py similarity index 100% rename from sd-proxy.py rename to securedrop_proxy/entrypoint.py From d431cd449dddc138f1ea84a4903680b704f2ba04 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Thu, 27 Sep 2018 16:25:37 -0400 Subject: [PATCH 031/352] Updates entrypoint into a function --- securedrop_proxy/entrypoint.py | 63 ++++++++++++++++++---------------- 1 file changed, 34 insertions(+), 29 deletions(-) diff --git a/securedrop_proxy/entrypoint.py b/securedrop_proxy/entrypoint.py index 5d01b098d..2bf7211db 100755 --- a/securedrop_proxy/entrypoint.py +++ b/securedrop_proxy/entrypoint.py @@ -8,34 +8,39 @@ import sys import json -import securedrop_proxy.proxy as proxy import uuid import subprocess -import securedrop_proxy.config as config -import securedrop_proxy.callbacks as callbacks -import securedrop_proxy.main as main - -# a fresh, new proxy object -p = proxy.Proxy() - -# set up an error handler early, so we can use it during -# configuration, etc -p.on_done = callbacks.err_on_done - -# path to config file must be at argv[1] -if len(sys.argv) != 2: - p.simple_error(500, 'sd-proxy script not called with path to configuration file') - p.on_done(p.res) - -# read config. `read_conf` will call `p.on_done` if there is a config -# problem, and will return a Conf object on success. -conf_path = sys.argv[1] -p.conf = config.read_conf(conf_path, p) - -# read user request from STDIN -incoming = [] -for line in sys.stdin: - incoming.append(line) -incoming = '\n'.join(incoming) - -main.__main__(incoming, p) + +from . import proxy +from . import config +from . import callbacks +from . import main + + +def start(): + # a fresh, new proxy object + p = proxy.Proxy() + + # set up an error handler early, so we can use it during + # configuration, etc + p.on_done = callbacks.err_on_done + + # path to config file must be at argv[1] + if len(sys.argv) != 2: + p.simple_error( + 500, "sd-proxy script not called with path to configuration file" + ) + p.on_done(p.res) + + # read config. `read_conf` will call `p.on_done` if there is a config + # problem, and will return a Conf object on success. + conf_path = sys.argv[1] + p.conf = config.read_conf(conf_path, p) + + # read user request from STDIN + incoming = [] + for line in sys.stdin: + incoming.append(line) + incoming = "\n".join(incoming) + + main.__main__(incoming, p) From 3d3882c41a0075a7a43b98a4eb7f179fd86521f0 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Thu, 27 Sep 2018 16:27:35 -0400 Subject: [PATCH 032/352] Adds setup.py and related changes for packaging Adds a console entry point to use the `sd-proxy` command. It also adds the LICENSE file for the project --- LICENSE | 674 +++++++++++++++++++++++++++++++++++++++++ MANIFEST.in | 9 + qubes/securedrop.Proxy | 2 +- setup.py | 32 ++ 4 files changed, 716 insertions(+), 1 deletion(-) create mode 100644 LICENSE create mode 100644 MANIFEST.in create mode 100644 setup.py diff --git a/LICENSE b/LICENSE new file mode 100644 index 000000000..f288702d2 --- /dev/null +++ b/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 000000000..315d5e4ef --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,9 @@ +include securedrop_proxy/*.py +include requirements.txt +include README.md +include LICENSE +include setup.py +include Pipfile +include Pipfile.lock +include qubes/securedrop.Proxy +include config-example.yaml diff --git a/qubes/securedrop.Proxy b/qubes/securedrop.Proxy index 133c09008..cea1922e4 100644 --- a/qubes/securedrop.Proxy +++ b/qubes/securedrop.Proxy @@ -1 +1 @@ -/home/user/projects/securedrop-proxy/entrypoint.sh +/usr/bin/sd-proxy /etc/sd-proxy.yaml diff --git a/setup.py b/setup.py new file mode 100644 index 000000000..dff5d4a80 --- /dev/null +++ b/setup.py @@ -0,0 +1,32 @@ +import setuptools + +with open("README.md", "r") as fh: + long_description = fh.read() + +setuptools.setup( + name="securedrop-proxy", + version="0.1.0", + author="Freedom of the Press Foundation", + author_email="securedrop@freedom.press", + description="SecureDrop Qubes proxy service", + long_description=long_description, + long_description_content_type="text/markdown", + license="GPLv3+", + install_requires=["requests","furl", "pyyaml", "werkzeug"], + python_requires=">=3.5", + url="https://github.com/freedomofpress/securedrop-proxy", + packages=setuptools.find_packages(exclude=["docs", "tests"]), + classifiers=( + "Development Status :: 3 - Alpha", + "Programming Language :: Python :: 3", + "Topic :: Software Development :: Libraries :: Python Modules", + "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", + "Intended Audience :: Developers", + "Operating System :: OS Independent", + ), + entry_points={ + 'console_scripts': [ + 'sd-proxy = securedrop_proxy.entrypoint:start', + ], + }, +) From 280e82300c498f3694e1ee7b1e21612111086e87 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Fri, 12 Oct 2018 00:16:56 -0400 Subject: [PATCH 033/352] Fixes #11 Saves the downloaded submissions and replies Now we are explicitly closing NamedTemporaryFile and also marking it not to delete on save. --- securedrop_proxy/proxy.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index 09c306770..2108d1a48 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -96,11 +96,15 @@ def handle_non_json_response(self): res = Response(self._presp.status_code) - fh = tempfile.NamedTemporaryFile() + # Create a NamedTemporaryFile, we don't want + # to delete it after closing. + fh = tempfile.NamedTemporaryFile(delete=False) for c in self._presp.iter_content(10): fh.write(c) + fh.close() + res.headers = self._presp.headers self.on_save(fh, res, self.conf) From 0c0be3ec714875572d2fca2e548a4389b5c1232f Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Fri, 12 Oct 2018 16:21:38 -0700 Subject: [PATCH 034/352] Regression test: Assert that file is not empty --- tests/test_main.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/test_main.py b/tests/test_main.py index 6bfa8b866..8a4a28e15 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -83,6 +83,13 @@ def on_save(fh, res, conf): # The proxy should have created a filename in the response body self.assertIn('filename', response['body']) + # The file should not be empty + with open("/tmp/{}".format(self.fn)) as f: + saved_file = f.read() + + # We expect HTML content in the file from the test data + self.assertIn("", saved_file) + def test_error_response(self): test_input_json = """"foo": "bar", "baz": "bliff" }""" From 66bcf95bb04b1f521c3076123b8f3c190c722d39 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Fri, 2 Nov 2018 01:52:54 +0530 Subject: [PATCH 035/352] Adds lock file and requirements files as required for packaging --- Pipfile.lock | 200 +++++++++++++++++++++++++++++++++++++++++ qubes/securedrop.Proxy | 0 requirements-build.txt | 10 +++ requirements.txt | 10 +++ 4 files changed, 220 insertions(+) create mode 100644 Pipfile.lock mode change 100644 => 100755 qubes/securedrop.Proxy create mode 100644 requirements-build.txt create mode 100644 requirements.txt diff --git a/Pipfile.lock b/Pipfile.lock new file mode 100644 index 000000000..fe9c5e31a --- /dev/null +++ b/Pipfile.lock @@ -0,0 +1,200 @@ +{ + "_meta": { + "hash": { + "sha256": "58ba4d066d275717a5b571cfe5195a63f95cbfec1b2a12d1ba40ecfcb6d00c6d" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.5" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "certifi": { + "hashes": [ + "sha256:376690d6f16d32f9d1fe8932551d80b23e9d393a8578c5633a2ed39a64861638", + "sha256:456048c7e371c089d0a77a5212fb37a2c2dce1e24146e3b7e0261736aaeaa22a" + ], + "version": "==2018.8.24" + }, + "chardet": { + "hashes": [ + "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", + "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" + ], + "version": "==3.0.4" + }, + "furl": { + "hashes": [ + "sha256:17654103b8d0cbe42798592db099c728165ac12057d49fe2e69de967d87bf29b" + ], + "index": "pypi", + "version": "==1.2.1" + }, + "idna": { + "hashes": [ + "sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e", + "sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16" + ], + "version": "==2.7" + }, + "orderedmultidict": { + "hashes": [ + "sha256:24e3b730cf84e4a6a68be5cc760864905cf66abc89851e724bd5b4e849eaa96b", + "sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5" + ], + "version": "==1.0" + }, + "pyyaml": { + "hashes": [ + "sha256:3d7da3009c0f3e783b2c873687652d83b1bbfd5c88e9813fb7e5b03c0dd3108b", + "sha256:3ef3092145e9b70e3ddd2c7ad59bdd0252a94dfe3949721633e41344de00a6bf", + "sha256:40c71b8e076d0550b2e6380bada1f1cd1017b882f7e16f09a65be98e017f211a", + "sha256:558dd60b890ba8fd982e05941927a3911dc409a63dcb8b634feaa0cda69330d3", + "sha256:a7c28b45d9f99102fa092bb213aa12e0aaf9a6a1f5e395d36166639c1f96c3a1", + "sha256:aa7dd4a6a427aed7df6fb7f08a580d68d9b118d90310374716ae90b710280af1", + "sha256:bc558586e6045763782014934bfaf39d48b8ae85a2713117d16c39864085c613", + "sha256:d46d7982b62e0729ad0175a9bc7e10a566fc07b224d2c79fafb5e032727eaa04", + "sha256:d5eef459e30b09f5a098b9cea68bebfeb268697f78d647bd255a085371ac7f3f", + "sha256:e01d3203230e1786cd91ccfdc8f8454c8069c91bee3962ad93b87a4b2860f537", + "sha256:e170a9e6fcfd19021dd29845af83bb79236068bf5fd4df3327c1be18182b2531" + ], + "index": "pypi", + "version": "==3.13" + }, + "requests": { + "hashes": [ + "sha256:63b52e3c866428a224f97cab011de738c36aec0185aa91cfacd418b5d58911d1", + "sha256:ec22d826a36ed72a7358ff3fe56cbd4ba69dd7a6718ffd450ff0e9df7a47ce6a" + ], + "index": "pypi", + "version": "==2.19.1" + }, + "six": { + "hashes": [ + "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9", + "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb" + ], + "version": "==1.11.0" + }, + "urllib3": { + "hashes": [ + "sha256:a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf", + "sha256:b5725a0bd4ba422ab0e66e89e030c806576753ea3ee08554382c14e685d117b5" + ], + "markers": "python_version != '3.1.*' and python_version < '4' and python_version != '3.2.*' and python_version != '3.0.*' and python_version != '3.3.*' and python_version >= '2.6'", + "version": "==1.23" + }, + "werkzeug": { + "hashes": [ + "sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c", + "sha256:d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b" + ], + "index": "pypi", + "version": "==0.14.1" + } + }, + "develop": { + "idna": { + "hashes": [ + "sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e", + "sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16" + ], + "version": "==2.7" + }, + "multidict": { + "hashes": [ + "sha256:05eeab69bf2b0664644c62bd92fabb045163e5b8d4376a31dfb52ce0210ced7b", + "sha256:0c85880efa7cadb18e3b5eef0aa075dc9c0a3064cbbaef2e20be264b9cf47a64", + "sha256:136f5a4a6a4adeacc4dc820b8b22f0a378fb74f326e259c54d1817639d1d40a0", + "sha256:14906ad3347c7d03e9101749b16611cf2028547716d0840838d3c5e2b3b0f2d3", + "sha256:1ade4a3b71b1bf9e90c5f3d034a87fe4949c087ef1f6cd727fdd766fe8bbd121", + "sha256:22939a00a511a59f9ecc0158b8db728afef57975ce3782b3a265a319d05b9b12", + "sha256:2b86b02d872bc5ba5b3a4530f6a7ba0b541458ab4f7c1429a12ac326231203f7", + "sha256:3c11e92c3dfc321014e22fb442bc9eb70e01af30d6ce442026b0c35723448c66", + "sha256:4ba3bd26f282b201fdbce351f1c5d17ceb224cbedb73d6e96e6ce391b354aacc", + "sha256:4c6e78d042e93751f60672989efbd6a6bc54213ed7ff695fff82784bbb9ea035", + "sha256:4d80d1901b89cc935a6cf5b9fd89df66565272722fe2e5473168927a9937e0ca", + "sha256:4fcf71d33178a00cc34a57b29f5dab1734b9ce0f1c97fb34666deefac6f92037", + "sha256:52f7670b41d4b4d97866ebc38121de8bcb9813128b7c4942b07794d08193c0ab", + "sha256:5368e2b7649a26b7253c6c9e53241248aab9da49099442f5be238fde436f18c9", + "sha256:5bb65fbb48999044938f0c0508e929b14a9b8bf4939d8263e9ea6691f7b54663", + "sha256:60672bb5577472800fcca1ac9dae232d1461db9f20f055184be8ce54b0052572", + "sha256:669e9be6d148fc0283f53e17dd140cde4dc7c87edac8319147edd5aa2a830771", + "sha256:6a0b7a804e8d1716aa2c72e73210b48be83d25ba9ec5cf52cf91122285707bb1", + "sha256:79034ea3da3cf2a815e3e52afdc1f6c1894468c98bdce5d2546fa2342585497f", + "sha256:79247feeef6abcc11137ad17922e865052f23447152059402fc320f99ff544bb", + "sha256:81671c2049e6bf42c7fd11a060f8bc58f58b7b3d6f3f951fc0b15e376a6a5a98", + "sha256:82ac4a5cb56cc9280d4ae52c2d2ebcd6e0668dd0f9ef17f0a9d7c82bd61e24fa", + "sha256:9436267dbbaa49dad18fbbb54f85386b0f5818d055e7b8e01d219661b6745279", + "sha256:94e4140bb1343115a1afd6d84ebf8fca5fb7bfb50e1c2cbd6f2fb5d3117ef102", + "sha256:a2cab366eae8a0ffe0813fd8e335cf0d6b9bb6c5227315f53bb457519b811537", + "sha256:a596019c3eafb1b0ae07db9f55a08578b43c79adb1fe1ab1fd818430ae59ee6f", + "sha256:e8848ae3cd6a784c29fae5055028bee9bffcc704d8bcad09bd46b42b44a833e2", + "sha256:e8a048bfd7d5a280f27527d11449a509ddedf08b58a09a24314828631c099306", + "sha256:f6dd28a0ac60e2426a6918f36f1b4e2620fc785a0de7654cd206ba842eee57fd" + ], + "markers": "python_version >= '3.4.1'", + "version": "==4.4.2" + }, + "pyyaml": { + "hashes": [ + "sha256:3d7da3009c0f3e783b2c873687652d83b1bbfd5c88e9813fb7e5b03c0dd3108b", + "sha256:3ef3092145e9b70e3ddd2c7ad59bdd0252a94dfe3949721633e41344de00a6bf", + "sha256:40c71b8e076d0550b2e6380bada1f1cd1017b882f7e16f09a65be98e017f211a", + "sha256:558dd60b890ba8fd982e05941927a3911dc409a63dcb8b634feaa0cda69330d3", + "sha256:a7c28b45d9f99102fa092bb213aa12e0aaf9a6a1f5e395d36166639c1f96c3a1", + "sha256:aa7dd4a6a427aed7df6fb7f08a580d68d9b118d90310374716ae90b710280af1", + "sha256:bc558586e6045763782014934bfaf39d48b8ae85a2713117d16c39864085c613", + "sha256:d46d7982b62e0729ad0175a9bc7e10a566fc07b224d2c79fafb5e032727eaa04", + "sha256:d5eef459e30b09f5a098b9cea68bebfeb268697f78d647bd255a085371ac7f3f", + "sha256:e01d3203230e1786cd91ccfdc8f8454c8069c91bee3962ad93b87a4b2860f537", + "sha256:e170a9e6fcfd19021dd29845af83bb79236068bf5fd4df3327c1be18182b2531" + ], + "index": "pypi", + "version": "==3.13" + }, + "six": { + "hashes": [ + "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9", + "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb" + ], + "version": "==1.11.0" + }, + "vcrpy": { + "hashes": [ + "sha256:127e79cf7b569d071d1bd761b83f7b62b2ce2a2eb63ceca7aa67cba8f2602ea3", + "sha256:57be64aa8e9883a4117d0b15de28af62275c001abcdb00b6dc2d4406073d9a4f" + ], + "index": "pypi", + "version": "==2.0.1" + }, + "wrapt": { + "hashes": [ + "sha256:d4d560d479f2c21e1b5443bbd15fe7ec4b37fe7e53d335d3b9b0a7b1226fe3c6" + ], + "version": "==1.10.11" + }, + "yarl": { + "hashes": [ + "sha256:2556b779125621b311844a072e0ed367e8409a18fa12cbd68eb1258d187820f9", + "sha256:4aec0769f1799a9d4496827292c02a7b1f75c0bab56ab2b60dd94ebb57cbd5ee", + "sha256:55369d95afaacf2fa6b49c84d18b51f1704a6560c432a0f9a1aeb23f7b971308", + "sha256:6c098b85442c8fe3303e708bbb775afd0f6b29f77612e8892627bcab4b939357", + "sha256:9182cd6f93412d32e009020a44d6d170d2093646464a88aeec2aef50592f8c78", + "sha256:c8cbc21bbfa1dd7d5386d48cc814fe3d35b80f60299cdde9279046f399c3b0d8", + "sha256:db6f70a4b09cde813a4807843abaaa60f3b15fb4a2a06f9ae9c311472662daa1", + "sha256:f17495e6fe3d377e3faac68121caef6f974fcb9e046bc075bcff40d8e5cc69a4", + "sha256:f85900b9cca0c67767bb61b2b9bd53208aaa7373dae633dbe25d179b4bf38aa7" + ], + "markers": "python_version >= '3.4'", + "version": "==1.2.6" + } + } +} diff --git a/qubes/securedrop.Proxy b/qubes/securedrop.Proxy old mode 100644 new mode 100755 diff --git a/requirements-build.txt b/requirements-build.txt new file mode 100644 index 000000000..594b6dc85 --- /dev/null +++ b/requirements-build.txt @@ -0,0 +1,10 @@ +orderedmultidict==1.0 --hash=sha256:24e3b730cf84e4a6a68be5cc760864905cf66abc89851e724bd5b4e849eaa96b --hash=sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5 +idna==2.7 --hash=sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e --hash=sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16 +certifi==2018.8.24 --hash=sha256:376690d6f16d32f9d1fe8932551d80b23e9d393a8578c5633a2ed39a64861638 --hash=sha256:456048c7e371c089d0a77a5212fb37a2c2dce1e24146e3b7e0261736aaeaa22a +requests==2.19.1 --hash=sha256:63b52e3c866428a224f97cab011de738c36aec0185aa91cfacd418b5d58911d1 --hash=sha256:ec22d826a36ed72a7358ff3fe56cbd4ba69dd7a6718ffd450ff0e9df7a47ce6a +urllib3==1.23 --hash=sha256:a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf --hash=sha256:b5725a0bd4ba422ab0e66e89e030c806576753ea3ee08554382c14e685d117b5 +werkzeug==0.14.1 --hash=sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c --hash=sha256:d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b +pyyaml==3.13 --hash=sha256:3d7da3009c0f3e783b2c873687652d83b1bbfd5c88e9813fb7e5b03c0dd3108b --hash=sha256:3ef3092145e9b70e3ddd2c7ad59bdd0252a94dfe3949721633e41344de00a6bf --hash=sha256:40c71b8e076d0550b2e6380bada1f1cd1017b882f7e16f09a65be98e017f211a --hash=sha256:558dd60b890ba8fd982e05941927a3911dc409a63dcb8b634feaa0cda69330d3 --hash=sha256:a7c28b45d9f99102fa092bb213aa12e0aaf9a6a1f5e395d36166639c1f96c3a1 --hash=sha256:aa7dd4a6a427aed7df6fb7f08a580d68d9b118d90310374716ae90b710280af1 --hash=sha256:bc558586e6045763782014934bfaf39d48b8ae85a2713117d16c39864085c613 --hash=sha256:d46d7982b62e0729ad0175a9bc7e10a566fc07b224d2c79fafb5e032727eaa04 --hash=sha256:d5eef459e30b09f5a098b9cea68bebfeb268697f78d647bd255a085371ac7f3f --hash=sha256:e01d3203230e1786cd91ccfdc8f8454c8069c91bee3962ad93b87a4b2860f537 --hash=sha256:e170a9e6fcfd19021dd29845af83bb79236068bf5fd4df3327c1be18182b2531 +chardet==3.0.4 --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 +six==1.11.0 --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb +furl==1.2.1 --hash=sha256:17654103b8d0cbe42798592db099c728165ac12057d49fe2e69de967d87bf29b diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 000000000..be31b955b --- /dev/null +++ b/requirements.txt @@ -0,0 +1,10 @@ +./localwheels/Werkzeug-0.14.1-py2.py3-none-any.whl --hash=sha256:e224b7be15904cc4fbfdcd0d13dff5c7500d026e634af48c85766d5362fa7b36 +./localwheels/PyYAML-3.13-cp35-cp35m-linux_x86_64.whl --hash=sha256:da7175daaeff8a86a56cc398fb6628d3c3a6965f282789c2c9898e274e4691cd +./localwheels/chardet-3.0.4-py2.py3-none-any.whl --hash=sha256:e22445af14d9591037e995ee6c59a68b8d7df85b9a47b49a4b28d8601c405ff3 +./localwheels/requests-2.19.1-py2.py3-none-any.whl --hash=sha256:ed6aedaa095be7005b77b157936338883144d9b9c41cbe7665523a313343cca8 +./localwheels/idna-2.7-py2.py3-none-any.whl --hash=sha256:6e8569c478f0343de79c76418c80b7cd13dc93631ede79225dd1314bc8eaf2ed +./localwheels/urllib3-1.23-py2.py3-none-any.whl --hash=sha256:fc12686dea2604ce4967c45f6aabaf102016320020fd68175f4b0914e017aca5 +./localwheels/orderedmultidict-1.0-py3-none-any.whl --hash=sha256:0519055d46a45f8b44a950f97a8fa7c36f8c5adeb44f3f1a0910a674c95ffbc9 +./localwheels/furl-1.2.1-py3-none-any.whl --hash=sha256:af82dc925eb94ec60fd7ab06ab7ba11209d1dc40a7ee3fa8c9e9c08c6889c94f +./localwheels/six-1.11.0-py2.py3-none-any.whl --hash=sha256:c2dac6747d031463c7d5873d1ea5d43d4c20ce888f4762d457908f567997c670 +./localwheels/certifi-2018.8.24-py2.py3-none-any.whl --hash=sha256:d232833167262107f3aa658b00e1051d31a6ec095669710eeb8f8f8c888033f9 From 43ed2e33e65308a09faa7efb013efd5a3864b910 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Sat, 3 Nov 2018 02:42:24 +0530 Subject: [PATCH 036/352] Fixes #15 updates requirements.txt files To be used like below. pip3 install -r requirements.txt --index-url https://dev-bin.ops.securedrop.org/simple --- Pipfile.lock | 25 ++++++++++++------------- requirements-build.txt | 10 ---------- requirements-dev.txt | 7 +++++++ requirements.txt | 20 ++++++++++---------- 4 files changed, 29 insertions(+), 33 deletions(-) delete mode 100644 requirements-build.txt create mode 100644 requirements-dev.txt diff --git a/Pipfile.lock b/Pipfile.lock index fe9c5e31a..f2aee454c 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -18,10 +18,10 @@ "default": { "certifi": { "hashes": [ - "sha256:376690d6f16d32f9d1fe8932551d80b23e9d393a8578c5633a2ed39a64861638", - "sha256:456048c7e371c089d0a77a5212fb37a2c2dce1e24146e3b7e0261736aaeaa22a" + "sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c", + "sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a" ], - "version": "==2018.8.24" + "version": "==2018.10.15" }, "chardet": { "hashes": [ @@ -32,10 +32,11 @@ }, "furl": { "hashes": [ - "sha256:17654103b8d0cbe42798592db099c728165ac12057d49fe2e69de967d87bf29b" + "sha256:f7e90e9f85ef3f2e64485f04c2a80b50af6133942812fd87a44d45305b079018", + "sha256:fdcaedc1fb19a63d7d875b0105b0a5b496dd0989330d454a42bcb401fa5454ec" ], "index": "pypi", - "version": "==1.2.1" + "version": "==2.0.0" }, "idna": { "hashes": [ @@ -70,11 +71,11 @@ }, "requests": { "hashes": [ - "sha256:63b52e3c866428a224f97cab011de738c36aec0185aa91cfacd418b5d58911d1", - "sha256:ec22d826a36ed72a7358ff3fe56cbd4ba69dd7a6718ffd450ff0e9df7a47ce6a" + "sha256:99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c", + "sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279" ], "index": "pypi", - "version": "==2.19.1" + "version": "==2.20.0" }, "six": { "hashes": [ @@ -85,11 +86,10 @@ }, "urllib3": { "hashes": [ - "sha256:a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf", - "sha256:b5725a0bd4ba422ab0e66e89e030c806576753ea3ee08554382c14e685d117b5" + "sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39", + "sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22" ], - "markers": "python_version != '3.1.*' and python_version < '4' and python_version != '3.2.*' and python_version != '3.0.*' and python_version != '3.3.*' and python_version >= '2.6'", - "version": "==1.23" + "version": "==1.24.1" }, "werkzeug": { "hashes": [ @@ -140,7 +140,6 @@ "sha256:e8a048bfd7d5a280f27527d11449a509ddedf08b58a09a24314828631c099306", "sha256:f6dd28a0ac60e2426a6918f36f1b4e2620fc785a0de7654cd206ba842eee57fd" ], - "markers": "python_version >= '3.4.1'", "version": "==4.4.2" }, "pyyaml": { diff --git a/requirements-build.txt b/requirements-build.txt deleted file mode 100644 index 594b6dc85..000000000 --- a/requirements-build.txt +++ /dev/null @@ -1,10 +0,0 @@ -orderedmultidict==1.0 --hash=sha256:24e3b730cf84e4a6a68be5cc760864905cf66abc89851e724bd5b4e849eaa96b --hash=sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5 -idna==2.7 --hash=sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e --hash=sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16 -certifi==2018.8.24 --hash=sha256:376690d6f16d32f9d1fe8932551d80b23e9d393a8578c5633a2ed39a64861638 --hash=sha256:456048c7e371c089d0a77a5212fb37a2c2dce1e24146e3b7e0261736aaeaa22a -requests==2.19.1 --hash=sha256:63b52e3c866428a224f97cab011de738c36aec0185aa91cfacd418b5d58911d1 --hash=sha256:ec22d826a36ed72a7358ff3fe56cbd4ba69dd7a6718ffd450ff0e9df7a47ce6a -urllib3==1.23 --hash=sha256:a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf --hash=sha256:b5725a0bd4ba422ab0e66e89e030c806576753ea3ee08554382c14e685d117b5 -werkzeug==0.14.1 --hash=sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c --hash=sha256:d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b -pyyaml==3.13 --hash=sha256:3d7da3009c0f3e783b2c873687652d83b1bbfd5c88e9813fb7e5b03c0dd3108b --hash=sha256:3ef3092145e9b70e3ddd2c7ad59bdd0252a94dfe3949721633e41344de00a6bf --hash=sha256:40c71b8e076d0550b2e6380bada1f1cd1017b882f7e16f09a65be98e017f211a --hash=sha256:558dd60b890ba8fd982e05941927a3911dc409a63dcb8b634feaa0cda69330d3 --hash=sha256:a7c28b45d9f99102fa092bb213aa12e0aaf9a6a1f5e395d36166639c1f96c3a1 --hash=sha256:aa7dd4a6a427aed7df6fb7f08a580d68d9b118d90310374716ae90b710280af1 --hash=sha256:bc558586e6045763782014934bfaf39d48b8ae85a2713117d16c39864085c613 --hash=sha256:d46d7982b62e0729ad0175a9bc7e10a566fc07b224d2c79fafb5e032727eaa04 --hash=sha256:d5eef459e30b09f5a098b9cea68bebfeb268697f78d647bd255a085371ac7f3f --hash=sha256:e01d3203230e1786cd91ccfdc8f8454c8069c91bee3962ad93b87a4b2860f537 --hash=sha256:e170a9e6fcfd19021dd29845af83bb79236068bf5fd4df3327c1be18182b2531 -chardet==3.0.4 --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 -six==1.11.0 --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb -furl==1.2.1 --hash=sha256:17654103b8d0cbe42798592db099c728165ac12057d49fe2e69de967d87bf29b diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 000000000..fa6bd625f --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,7 @@ +idna==2.7 +multidict==4.4.2 ; python_version >= '3.4.1' +pyyaml==3.13 +six==1.11.0 +vcrpy==2.0.1 +wrapt==1.10.11 +yarl==1.2.6 ; python_version >= '3.4' diff --git a/requirements.txt b/requirements.txt index be31b955b..f2fcac3ab 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,10 +1,10 @@ -./localwheels/Werkzeug-0.14.1-py2.py3-none-any.whl --hash=sha256:e224b7be15904cc4fbfdcd0d13dff5c7500d026e634af48c85766d5362fa7b36 -./localwheels/PyYAML-3.13-cp35-cp35m-linux_x86_64.whl --hash=sha256:da7175daaeff8a86a56cc398fb6628d3c3a6965f282789c2c9898e274e4691cd -./localwheels/chardet-3.0.4-py2.py3-none-any.whl --hash=sha256:e22445af14d9591037e995ee6c59a68b8d7df85b9a47b49a4b28d8601c405ff3 -./localwheels/requests-2.19.1-py2.py3-none-any.whl --hash=sha256:ed6aedaa095be7005b77b157936338883144d9b9c41cbe7665523a313343cca8 -./localwheels/idna-2.7-py2.py3-none-any.whl --hash=sha256:6e8569c478f0343de79c76418c80b7cd13dc93631ede79225dd1314bc8eaf2ed -./localwheels/urllib3-1.23-py2.py3-none-any.whl --hash=sha256:fc12686dea2604ce4967c45f6aabaf102016320020fd68175f4b0914e017aca5 -./localwheels/orderedmultidict-1.0-py3-none-any.whl --hash=sha256:0519055d46a45f8b44a950f97a8fa7c36f8c5adeb44f3f1a0910a674c95ffbc9 -./localwheels/furl-1.2.1-py3-none-any.whl --hash=sha256:af82dc925eb94ec60fd7ab06ab7ba11209d1dc40a7ee3fa8c9e9c08c6889c94f -./localwheels/six-1.11.0-py2.py3-none-any.whl --hash=sha256:c2dac6747d031463c7d5873d1ea5d43d4c20ce888f4762d457908f567997c670 -./localwheels/certifi-2018.8.24-py2.py3-none-any.whl --hash=sha256:d232833167262107f3aa658b00e1051d31a6ec095669710eeb8f8f8c888033f9 +certifi==2018.10.15 +chardet==3.0.4 +furl==2.0.0 +idna==2.7 +orderedmultidict==1.0 +pyyaml==3.13 +requests==2.20.0 +six==1.11.0 +urllib3==1.24.1 +werkzeug==0.14.1 From 62e1e8f52929fca424a46f191fedc0e383e13afc Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Wed, 7 Nov 2018 16:10:23 -0800 Subject: [PATCH 037/352] securedrop-proxy 0.1.1 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index dff5d4a80..2b462566a 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ setuptools.setup( name="securedrop-proxy", - version="0.1.0", + version="0.1.1", author="Freedom of the Press Foundation", author_email="securedrop@freedom.press", description="SecureDrop Qubes proxy service", From 09865221c5d81335d1d21cba68c989869c5fe8ef Mon Sep 17 00:00:00 2001 From: mickael e Date: Tue, 13 Nov 2018 16:13:46 -0500 Subject: [PATCH 038/352] Pin library versions in Pipfile This will ensure pipenv lock command does not update package versions unless explicitly specified in the Pipfile. Since we have automatic security checks for vulnerabilities in python libraries, this will ensure more consistency when building and testing our packages, and avoid drift in our requirements.txt and Pipfile.lock files. --- Pipfile | 10 +++++----- Pipfile.lock | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Pipfile b/Pipfile index 3b58e620f..c87b2d468 100644 --- a/Pipfile +++ b/Pipfile @@ -7,10 +7,10 @@ verify_ssl = true python_version = "3.5" [packages] -furl = "*" -pyyaml = "*" -requests = "*" -werkzeug = "*" +furl = "==2.0.0" +pyyaml = "==3.13" +requests = "==2.20.0" +werkzeug = "==0.14.1" [dev-packages] -vcrpy = "*" +vcrpy = "==2.0.1" diff --git a/Pipfile.lock b/Pipfile.lock index f2aee454c..dc88d1339 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "58ba4d066d275717a5b571cfe5195a63f95cbfec1b2a12d1ba40ecfcb6d00c6d" + "sha256": "86be16c12e06dc67fd9e95334532a13dec41da47cee17103e2ee74de62d00ed1" }, "pipfile-spec": 6, "requires": { From 020df5524d07d500b4e5f071f81a9439578c3231 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Wed, 14 Nov 2018 15:23:43 -0800 Subject: [PATCH 039/352] Remove now unnecessary requirements-dev.txt requirements*.txt files are only used at build time. Since development requirements are not needed at build time, this file should not clutter up the repository. --- requirements-dev.txt | 7 ------- 1 file changed, 7 deletions(-) delete mode 100644 requirements-dev.txt diff --git a/requirements-dev.txt b/requirements-dev.txt deleted file mode 100644 index fa6bd625f..000000000 --- a/requirements-dev.txt +++ /dev/null @@ -1,7 +0,0 @@ -idna==2.7 -multidict==4.4.2 ; python_version >= '3.4.1' -pyyaml==3.13 -six==1.11.0 -vcrpy==2.0.1 -wrapt==1.10.11 -yarl==1.2.6 ; python_version >= '3.4' From 4aad014be2fcdec341e39f4505b879c786667bee Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Fri, 16 Nov 2018 00:59:47 +0530 Subject: [PATCH 040/352] Fresh wheels's sha256sums from the s3 bucket --- requirements.txt | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/requirements.txt b/requirements.txt index f2fcac3ab..923665a2c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,10 +1,10 @@ -certifi==2018.10.15 -chardet==3.0.4 -furl==2.0.0 -idna==2.7 -orderedmultidict==1.0 -pyyaml==3.13 -requests==2.20.0 -six==1.11.0 -urllib3==1.24.1 -werkzeug==0.14.1 +certifi==2018.10.15 --hash=sha256:a5471c55b011bd45d6155f5c3629310c1d2f1e1a5a899b7e438a223343de583d +chardet==3.0.4 --hash=sha256:9f178988ca4c86e8a319b51aac1185b6fe5192328eb5a163c286f4bf50b7b3d8 +furl==2.0.0 --hash=sha256:cc0eb8998dcc7c5b58bc8625891a9ff563e2765e112024fa3d1e3521481de8b6 +idna==2.7 --hash=sha256:954e65e127d0433a352981f43f291a438423d5b385ebf643c70fd740e0634111 +orderedmultidict==1.0 --hash=sha256:25489716d76d2cc8aa656bfb00cd40b6ca29d5e11ccde0db60c2b46ad52bb40a +pyyaml==3.13 --hash=sha256:59fa6f097310e25248d5aa1d7b7e619ea22dfaada67e9c65262457b3e5b3a5c8 +requests==2.20.0 --hash=sha256:2a539dd6af40a611f3b8eb3f99d3567781352ece1698b2fab42bf4c2218705b5 +six==1.11.0 --hash=sha256:4663c7a1dbed033cfb294f2d534bd6151c0698dc12ecabb4eaa3cb041d758528 +urllib3==1.24.1 --hash=sha256:ac4755b0ae019d670d5fb420c39cb531389ab6ca8b652e12f56259f5cbc0ce21 +werkzeug==0.14.1 --hash=sha256:177ea4248bf0475cbc060edb35a0bdcf6e6daeac9e1296de5ddb3493e5ec15b9 From 1a674a407fd968b3bfe4037d4cbed2c291730885 Mon Sep 17 00:00:00 2001 From: mickael e Date: Thu, 15 Nov 2018 16:10:58 -0500 Subject: [PATCH 041/352] Bump version to 0.1.2 --- securedrop_proxy/proxy.py | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index 2108d1a48..770db4d57 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -18,7 +18,7 @@ def __init__(self, status): self.status = status self.body = None self.headers = {} - self.version = "0.1.1" + self.version = "0.1.2" class Proxy: diff --git a/setup.py b/setup.py index 2b462566a..940af036a 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ setuptools.setup( name="securedrop-proxy", - version="0.1.1", + version="0.1.2", author="Freedom of the Press Foundation", author_email="securedrop@freedom.press", description="SecureDrop Qubes proxy service", From 5967991f2b9fe542263c67bbc08e8a995365be84 Mon Sep 17 00:00:00 2001 From: John Hensley Date: Mon, 15 Apr 2019 17:06:47 -0400 Subject: [PATCH 042/352] 0.1.3: Update PyYAML to 5.1 Also simplify package versioning with a VERSION file containing the version string, e.g. 0.1.3, which will be read in setup.py and securedrop_proxy/version.py. Now we just have to remember to update the package version in one place. Add a test making sure that the proxy reports the same version used in setup.py. --- MANIFEST.in | 11 ++++++----- Pipfile | 2 +- Pipfile.lock | 26 +++++++++++++------------- securedrop_proxy/VERSION | 1 + securedrop_proxy/proxy.py | 28 +++++++++++++++------------- securedrop_proxy/version.py | 3 +++ setup.py | 13 ++++++------- tests/test_proxy.py | 13 ++++++++++++- 8 files changed, 57 insertions(+), 40 deletions(-) create mode 100644 securedrop_proxy/VERSION create mode 100644 securedrop_proxy/version.py diff --git a/MANIFEST.in b/MANIFEST.in index 315d5e4ef..d097841f9 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,9 +1,10 @@ -include securedrop_proxy/*.py -include requirements.txt -include README.md include LICENSE -include setup.py include Pipfile include Pipfile.lock -include qubes/securedrop.Proxy +include README.md +include VERSION include config-example.yaml +include qubes/securedrop.Proxy +include requirements.txt +include securedrop_proxy/*.py +include setup.py diff --git a/Pipfile b/Pipfile index c87b2d468..df9a66bcb 100644 --- a/Pipfile +++ b/Pipfile @@ -8,7 +8,7 @@ python_version = "3.5" [packages] furl = "==2.0.0" -pyyaml = "==3.13" +pyyaml = ">=5.1,<6" requests = "==2.20.0" werkzeug = "==0.14.1" diff --git a/Pipfile.lock b/Pipfile.lock index dc88d1339..64e5dd6ff 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "86be16c12e06dc67fd9e95334532a13dec41da47cee17103e2ee74de62d00ed1" + "sha256": "30eddf2618171a8245e3e40f8c42d389e9d5e2b1e0246dbdd66d5e742cda0391" }, "pipfile-spec": 6, "requires": { @@ -54,20 +54,20 @@ }, "pyyaml": { "hashes": [ - "sha256:3d7da3009c0f3e783b2c873687652d83b1bbfd5c88e9813fb7e5b03c0dd3108b", - "sha256:3ef3092145e9b70e3ddd2c7ad59bdd0252a94dfe3949721633e41344de00a6bf", - "sha256:40c71b8e076d0550b2e6380bada1f1cd1017b882f7e16f09a65be98e017f211a", - "sha256:558dd60b890ba8fd982e05941927a3911dc409a63dcb8b634feaa0cda69330d3", - "sha256:a7c28b45d9f99102fa092bb213aa12e0aaf9a6a1f5e395d36166639c1f96c3a1", - "sha256:aa7dd4a6a427aed7df6fb7f08a580d68d9b118d90310374716ae90b710280af1", - "sha256:bc558586e6045763782014934bfaf39d48b8ae85a2713117d16c39864085c613", - "sha256:d46d7982b62e0729ad0175a9bc7e10a566fc07b224d2c79fafb5e032727eaa04", - "sha256:d5eef459e30b09f5a098b9cea68bebfeb268697f78d647bd255a085371ac7f3f", - "sha256:e01d3203230e1786cd91ccfdc8f8454c8069c91bee3962ad93b87a4b2860f537", - "sha256:e170a9e6fcfd19021dd29845af83bb79236068bf5fd4df3327c1be18182b2531" + "sha256:1adecc22f88d38052fb787d959f003811ca858b799590a5eaa70e63dca50308c", + "sha256:436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95", + "sha256:460a5a4248763f6f37ea225d19d5c205677d8d525f6a83357ca622ed541830c2", + "sha256:5a22a9c84653debfbf198d02fe592c176ea548cccce47553f35f466e15cf2fd4", + "sha256:7a5d3f26b89d688db27822343dfa25c599627bc92093e788956372285c6298ad", + "sha256:9372b04a02080752d9e6f990179a4ab840227c6e2ce15b95e1278456664cf2ba", + "sha256:a5dcbebee834eaddf3fa7366316b880ff4062e4bcc9787b78c7fbb4a26ff2dd1", + "sha256:aee5bab92a176e7cd034e57f46e9df9a9862a71f8f37cad167c6fc74c65f5b4e", + "sha256:c51f642898c0bacd335fc119da60baae0824f2cde95b0330b56c0553439f0673", + "sha256:c68ea4d3ba1705da1e0d85da6684ac657912679a649e8868bd850d2c299cce13", + "sha256:e23d0cc5299223dcc37885dae624f382297717e459ea24053709675a976a3e19" ], "index": "pypi", - "version": "==3.13" + "version": "==5.1" }, "requests": { "hashes": [ diff --git a/securedrop_proxy/VERSION b/securedrop_proxy/VERSION new file mode 100644 index 000000000..b1e80bb24 --- /dev/null +++ b/securedrop_proxy/VERSION @@ -0,0 +1 @@ +0.1.3 diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index 770db4d57..9e0f3a191 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -4,24 +4,26 @@ import json import werkzeug -class Req: +import securedrop_proxy.version as version + +class Req: def __init__(self): - self.method = '' - self.path_query = '' + self.method = "" + self.path_query = "" self.body = None self.headers = {} -class Response: +class Response: def __init__(self, status): self.status = status self.body = None self.headers = {} - self.version = "0.1.2" + self.version = version.version -class Proxy: +class Proxy: @staticmethod def _on_done(res): print(json.dumps(res.__dict__)) @@ -63,15 +65,15 @@ def prep_request(self): method = self.req.method if not self.valid_path(path): - self.simple_error(400, 'Path provided in request did not look valid') - raise ValueError('Path provided was invalid') + self.simple_error(400, "Path provided in request did not look valid") + raise ValueError("Path provided was invalid") try: url = furl.furl("{}://{}:{}/{}".format(scheme, host, port, path)) except Exception as e: - self.simple_error(500, 'Proxy error while generating URL to request') - raise ValueError('Error generating URL from provided values') + self.simple_error(500, "Proxy error while generating URL to request") + raise ValueError("Error generating URL from provided values") url.path.normalize() @@ -113,7 +115,7 @@ def handle_non_json_response(self): def handle_response(self): - ctype = werkzeug.http.parse_options_header(self._presp.headers['content-type']) + ctype = werkzeug.http.parse_options_header(self._presp.headers["content-type"]) if ctype[0] == "application/json": self.handle_json_response() @@ -128,8 +130,8 @@ def proxy(self): try: if self.on_save is None: - self.simple_error(400, 'Request callback is not set.') - raise ValueError('Request callback is not set.') + self.simple_error(400, "Request callback is not set.") + raise ValueError("Request callback is not set.") self.prep_request() s = requests.Session() diff --git a/securedrop_proxy/version.py b/securedrop_proxy/version.py new file mode 100644 index 000000000..430dd11dc --- /dev/null +++ b/securedrop_proxy/version.py @@ -0,0 +1,3 @@ +import pkgutil + +version = pkgutil.get_data("securedrop_proxy", "VERSION").decode("utf-8") diff --git a/setup.py b/setup.py index 940af036a..f8e2cc7e2 100644 --- a/setup.py +++ b/setup.py @@ -1,18 +1,21 @@ +import pkgutil import setuptools with open("README.md", "r") as fh: long_description = fh.read() +version = pkgutil.get_data("securedrop_proxy", "VERSION").decode("utf-8") + setuptools.setup( name="securedrop-proxy", - version="0.1.2", + version=version, author="Freedom of the Press Foundation", author_email="securedrop@freedom.press", description="SecureDrop Qubes proxy service", long_description=long_description, long_description_content_type="text/markdown", license="GPLv3+", - install_requires=["requests","furl", "pyyaml", "werkzeug"], + install_requires=["requests", "furl", "pyyaml", "werkzeug"], python_requires=">=3.5", url="https://github.com/freedomofpress/securedrop-proxy", packages=setuptools.find_packages(exclude=["docs", "tests"]), @@ -24,9 +27,5 @@ "Intended Audience :: Developers", "Operating System :: OS Independent", ), - entry_points={ - 'console_scripts': [ - 'sd-proxy = securedrop_proxy.entrypoint:start', - ], - }, + entry_points={"console_scripts": ["sd-proxy = securedrop_proxy.entrypoint:start"]}, ) diff --git a/tests/test_proxy.py b/tests/test_proxy.py index 22aac3def..6278dce45 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -1,11 +1,11 @@ import json -import subprocess import vcr import unittest import uuid from securedrop_proxy import proxy from securedrop_proxy import config +from securedrop_proxy import version class TestProxyValidConfig(unittest.TestCase): @@ -25,6 +25,17 @@ def on_done(self, res): res.headers['X-Origin-Content-Type'] = res.headers['Content-Type'] res.headers['Content-Type'] = 'application/json' + def test_version(self): + req = proxy.Req() + req.method = 'GET' + req.path_query = '' + req.headers = {'Accept': 'application/json'} + + p = proxy.Proxy() + p.proxy() + + self.assertEqual(p.res.version, version.version) + def test_400_if_callback_not_set(self): req = proxy.Req() req.method = 'GET' From 5f092fbd9c3706a7d34800be6c348b0ef01db252 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Tue, 16 Apr 2019 17:34:43 -0700 Subject: [PATCH 043/352] followup from pyyaml update During review I missed that: 1. We had another set of pyyaml hashes in the Pipfile.lock that were not updated and, 2. We did not update the load -> safe_load call in the prod code. --- Pipfile.lock | 24 ++++++++++++------------ securedrop_proxy/config.py | 2 +- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index 64e5dd6ff..eda907903 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -144,20 +144,20 @@ }, "pyyaml": { "hashes": [ - "sha256:3d7da3009c0f3e783b2c873687652d83b1bbfd5c88e9813fb7e5b03c0dd3108b", - "sha256:3ef3092145e9b70e3ddd2c7ad59bdd0252a94dfe3949721633e41344de00a6bf", - "sha256:40c71b8e076d0550b2e6380bada1f1cd1017b882f7e16f09a65be98e017f211a", - "sha256:558dd60b890ba8fd982e05941927a3911dc409a63dcb8b634feaa0cda69330d3", - "sha256:a7c28b45d9f99102fa092bb213aa12e0aaf9a6a1f5e395d36166639c1f96c3a1", - "sha256:aa7dd4a6a427aed7df6fb7f08a580d68d9b118d90310374716ae90b710280af1", - "sha256:bc558586e6045763782014934bfaf39d48b8ae85a2713117d16c39864085c613", - "sha256:d46d7982b62e0729ad0175a9bc7e10a566fc07b224d2c79fafb5e032727eaa04", - "sha256:d5eef459e30b09f5a098b9cea68bebfeb268697f78d647bd255a085371ac7f3f", - "sha256:e01d3203230e1786cd91ccfdc8f8454c8069c91bee3962ad93b87a4b2860f537", - "sha256:e170a9e6fcfd19021dd29845af83bb79236068bf5fd4df3327c1be18182b2531" + "sha256:1adecc22f88d38052fb787d959f003811ca858b799590a5eaa70e63dca50308c", + "sha256:436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95", + "sha256:460a5a4248763f6f37ea225d19d5c205677d8d525f6a83357ca622ed541830c2", + "sha256:5a22a9c84653debfbf198d02fe592c176ea548cccce47553f35f466e15cf2fd4", + "sha256:7a5d3f26b89d688db27822343dfa25c599627bc92093e788956372285c6298ad", + "sha256:9372b04a02080752d9e6f990179a4ab840227c6e2ce15b95e1278456664cf2ba", + "sha256:a5dcbebee834eaddf3fa7366316b880ff4062e4bcc9787b78c7fbb4a26ff2dd1", + "sha256:aee5bab92a176e7cd034e57f46e9df9a9862a71f8f37cad167c6fc74c65f5b4e", + "sha256:c51f642898c0bacd335fc119da60baae0824f2cde95b0330b56c0553439f0673", + "sha256:c68ea4d3ba1705da1e0d85da6684ac657912679a649e8868bd850d2c299cce13", + "sha256:e23d0cc5299223dcc37885dae624f382297717e459ea24053709675a976a3e19" ], "index": "pypi", - "version": "==3.13" + "version": "==5.1" }, "six": { "hashes": [ diff --git a/securedrop_proxy/config.py b/securedrop_proxy/config.py index e5c133250..461edd5e8 100644 --- a/securedrop_proxy/config.py +++ b/securedrop_proxy/config.py @@ -15,7 +15,7 @@ def read_conf(conf_path, p): try: fh = open(conf_path, 'r') - conf_in = yaml.load(fh) + conf_in = yaml.safe_load(fh) except yaml.YAMLError: p.simple_error(500, 'YAML syntax error while reading configuration file {}'.format(conf_path)) p.on_done(p.res) From 0d9b4c6f2a2a63889cf6a20e00433703454961fd Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Thu, 25 Apr 2019 01:20:07 +0530 Subject: [PATCH 044/352] Adds new sha256sums for PyYAML 5.1 and new release Updates setup.py for new release of 0.1.3 It also adds the VERSION file as the right package data, and fixes the following error due to the newline in that file. /home/user/.local/lib/python3.5/site-packages/setuptools/dist.py:398: UserWarning: Normalizing '0.1.3 ' to '0.1.3' normalized_version, --- requirements.txt | 2 +- setup.py | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 923665a2c..8e2c8bb46 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ chardet==3.0.4 --hash=sha256:9f178988ca4c86e8a319b51aac1185b6fe5192328eb5a163c28 furl==2.0.0 --hash=sha256:cc0eb8998dcc7c5b58bc8625891a9ff563e2765e112024fa3d1e3521481de8b6 idna==2.7 --hash=sha256:954e65e127d0433a352981f43f291a438423d5b385ebf643c70fd740e0634111 orderedmultidict==1.0 --hash=sha256:25489716d76d2cc8aa656bfb00cd40b6ca29d5e11ccde0db60c2b46ad52bb40a -pyyaml==3.13 --hash=sha256:59fa6f097310e25248d5aa1d7b7e619ea22dfaada67e9c65262457b3e5b3a5c8 +pyyaml==5.1 --hash=sha256:b8d80623e9d4e348c59ea726ce3032a2eb15abca6a48d3828362d11c6014a0a7 requests==2.20.0 --hash=sha256:2a539dd6af40a611f3b8eb3f99d3567781352ece1698b2fab42bf4c2218705b5 six==1.11.0 --hash=sha256:4663c7a1dbed033cfb294f2d534bd6151c0698dc12ecabb4eaa3cb041d758528 urllib3==1.24.1 --hash=sha256:ac4755b0ae019d670d5fb420c39cb531389ab6ca8b652e12f56259f5cbc0ce21 diff --git a/setup.py b/setup.py index f8e2cc7e2..1e3f4ca6c 100644 --- a/setup.py +++ b/setup.py @@ -5,6 +5,7 @@ long_description = fh.read() version = pkgutil.get_data("securedrop_proxy", "VERSION").decode("utf-8") +version = version.strip() setuptools.setup( name="securedrop-proxy", @@ -19,6 +20,9 @@ python_requires=">=3.5", url="https://github.com/freedomofpress/securedrop-proxy", packages=setuptools.find_packages(exclude=["docs", "tests"]), + package_data={ + 'securedrop_proxy': ['VERSION'], + }, classifiers=( "Development Status :: 3 - Alpha", "Programming Language :: Python :: 3", From cc64c089f2fbd442f953ae6535c68de807b6b60e Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Tue, 14 May 2019 16:52:45 -0700 Subject: [PATCH 045/352] requirements: requirements.txt -> build-requirements.txt --- requirements.txt => build-requirements.txt | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename requirements.txt => build-requirements.txt (100%) diff --git a/requirements.txt b/build-requirements.txt similarity index 100% rename from requirements.txt rename to build-requirements.txt From 40635b6dab055e2fa9a4d8f16015e7b47e6daf2c Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Tue, 14 May 2019 17:02:04 -0700 Subject: [PATCH 046/352] requirements: Pipenv -> pip-tools --- .circleci/config.yml | 10 ++- Makefile | 43 +++++++++- Pipfile | 16 ---- Pipfile.lock | 199 ------------------------------------------- README.md | 14 ++- dev-requirements.in | 9 ++ dev-requirements.txt | 110 ++++++++++++++++++++++++ entrypoint.sh | 6 +- requirements.in | 9 ++ requirements.txt | 46 ++++++++++ 10 files changed, 237 insertions(+), 225 deletions(-) delete mode 100644 Pipfile delete mode 100644 Pipfile.lock create mode 100644 dev-requirements.in create mode 100644 dev-requirements.txt create mode 100644 requirements.in create mode 100644 requirements.txt diff --git a/.circleci/config.yml b/.circleci/config.yml index f752adb5b..7a7e5b79f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -9,10 +9,14 @@ jobs: - run: name: Install requirements and run tests command: | - pipenv install --dev - pipenv run python -m unittest -v + virtualenv .venv + source .venv/bin/activate + pip install --require-hashes -r dev-requirements.txt + make test - run: name: Check Python dependencies for CVEs command: | - pipenv check + set -e + source .venv/bin/activate + make safety diff --git a/Makefile b/Makefile index cdffb28c4..9b894c3b6 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,41 @@ -install: - pipenv install - sudo cp qubes/securedrop.Proxy /etc/qubes-rpc/securedrop.Proxy +# Bandit is a static code analysis tool to detect security vulnerabilities in Python applications +# https://wiki.openstack.org/wiki/Security/Projects/Bandit +.PHONY: bandit +bandit: ## Run bandit with medium level excluding test-related folders + pip install --upgrade pip && \ + pip install --upgrade bandit!=1.6.0 && \ + bandit -ll --recursive . --exclude tests,.venv +.PHONY: safety +safety: ## Runs `safety check` to check python dependencies for vulnerabilities + pip install --upgrade safety && \ + for req_file in `find . -type f -name '*requirements.txt'`; do \ + echo "Checking file $$req_file" \ + && safety check --full-report -r $$req_file \ + && echo -e '\n' \ + || exit 1; \ + done + +.PHONY: update-pip-requirements +update-pip-requirements: ## Updates all Python requirements files via pip-compile. + pip-compile --generate-hashes --output-file dev-requirements.txt dev-requirements.in requirements.in + pip-compile --generate-hashes --output-file requirements.txt requirements.in + +.PHONY: test test: - pipenv run python -m unittest -v + python -m unittest -v + +# Explaination of the below shell command should it ever break. +# 1. Set the field separator to ": ##" and any make targets that might appear between : and ## +# 2. Use sed-like syntax to remove the make targets +# 3. Format the split fields into $$1) the target name (in blue) and $$2) the target descrption +# 4. Pass this file as an arg to awk +# 5. Sort it alphabetically +# 6. Format columns with colon as delimiter. +.PHONY: help +help: ## Print this message and exit. + @printf "Makefile for developing and testing the SecureDrop proxy.\n" + @printf "Subcommands:\n\n" + @awk 'BEGIN {FS = ":.*?## "} /^[0-9a-zA-Z_-]+:.*?## / {printf "\033[36m%s\033[0m : %s\n", $$1, $$2}' $(MAKEFILE_LIST) \ + | sort \ + | column -s ':' -t diff --git a/Pipfile b/Pipfile deleted file mode 100644 index df9a66bcb..000000000 --- a/Pipfile +++ /dev/null @@ -1,16 +0,0 @@ -[[source]] -url = "https://pypi.org/simple" -name = "pypi" -verify_ssl = true - -[requires] -python_version = "3.5" - -[packages] -furl = "==2.0.0" -pyyaml = ">=5.1,<6" -requests = "==2.20.0" -werkzeug = "==0.14.1" - -[dev-packages] -vcrpy = "==2.0.1" diff --git a/Pipfile.lock b/Pipfile.lock deleted file mode 100644 index eda907903..000000000 --- a/Pipfile.lock +++ /dev/null @@ -1,199 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "30eddf2618171a8245e3e40f8c42d389e9d5e2b1e0246dbdd66d5e742cda0391" - }, - "pipfile-spec": 6, - "requires": { - "python_version": "3.5" - }, - "sources": [ - { - "name": "pypi", - "url": "https://pypi.org/simple", - "verify_ssl": true - } - ] - }, - "default": { - "certifi": { - "hashes": [ - "sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c", - "sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a" - ], - "version": "==2018.10.15" - }, - "chardet": { - "hashes": [ - "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", - "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" - ], - "version": "==3.0.4" - }, - "furl": { - "hashes": [ - "sha256:f7e90e9f85ef3f2e64485f04c2a80b50af6133942812fd87a44d45305b079018", - "sha256:fdcaedc1fb19a63d7d875b0105b0a5b496dd0989330d454a42bcb401fa5454ec" - ], - "index": "pypi", - "version": "==2.0.0" - }, - "idna": { - "hashes": [ - "sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e", - "sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16" - ], - "version": "==2.7" - }, - "orderedmultidict": { - "hashes": [ - "sha256:24e3b730cf84e4a6a68be5cc760864905cf66abc89851e724bd5b4e849eaa96b", - "sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5" - ], - "version": "==1.0" - }, - "pyyaml": { - "hashes": [ - "sha256:1adecc22f88d38052fb787d959f003811ca858b799590a5eaa70e63dca50308c", - "sha256:436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95", - "sha256:460a5a4248763f6f37ea225d19d5c205677d8d525f6a83357ca622ed541830c2", - "sha256:5a22a9c84653debfbf198d02fe592c176ea548cccce47553f35f466e15cf2fd4", - "sha256:7a5d3f26b89d688db27822343dfa25c599627bc92093e788956372285c6298ad", - "sha256:9372b04a02080752d9e6f990179a4ab840227c6e2ce15b95e1278456664cf2ba", - "sha256:a5dcbebee834eaddf3fa7366316b880ff4062e4bcc9787b78c7fbb4a26ff2dd1", - "sha256:aee5bab92a176e7cd034e57f46e9df9a9862a71f8f37cad167c6fc74c65f5b4e", - "sha256:c51f642898c0bacd335fc119da60baae0824f2cde95b0330b56c0553439f0673", - "sha256:c68ea4d3ba1705da1e0d85da6684ac657912679a649e8868bd850d2c299cce13", - "sha256:e23d0cc5299223dcc37885dae624f382297717e459ea24053709675a976a3e19" - ], - "index": "pypi", - "version": "==5.1" - }, - "requests": { - "hashes": [ - "sha256:99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c", - "sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279" - ], - "index": "pypi", - "version": "==2.20.0" - }, - "six": { - "hashes": [ - "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9", - "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb" - ], - "version": "==1.11.0" - }, - "urllib3": { - "hashes": [ - "sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39", - "sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22" - ], - "version": "==1.24.1" - }, - "werkzeug": { - "hashes": [ - "sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c", - "sha256:d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b" - ], - "index": "pypi", - "version": "==0.14.1" - } - }, - "develop": { - "idna": { - "hashes": [ - "sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e", - "sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16" - ], - "version": "==2.7" - }, - "multidict": { - "hashes": [ - "sha256:05eeab69bf2b0664644c62bd92fabb045163e5b8d4376a31dfb52ce0210ced7b", - "sha256:0c85880efa7cadb18e3b5eef0aa075dc9c0a3064cbbaef2e20be264b9cf47a64", - "sha256:136f5a4a6a4adeacc4dc820b8b22f0a378fb74f326e259c54d1817639d1d40a0", - "sha256:14906ad3347c7d03e9101749b16611cf2028547716d0840838d3c5e2b3b0f2d3", - "sha256:1ade4a3b71b1bf9e90c5f3d034a87fe4949c087ef1f6cd727fdd766fe8bbd121", - "sha256:22939a00a511a59f9ecc0158b8db728afef57975ce3782b3a265a319d05b9b12", - "sha256:2b86b02d872bc5ba5b3a4530f6a7ba0b541458ab4f7c1429a12ac326231203f7", - "sha256:3c11e92c3dfc321014e22fb442bc9eb70e01af30d6ce442026b0c35723448c66", - "sha256:4ba3bd26f282b201fdbce351f1c5d17ceb224cbedb73d6e96e6ce391b354aacc", - "sha256:4c6e78d042e93751f60672989efbd6a6bc54213ed7ff695fff82784bbb9ea035", - "sha256:4d80d1901b89cc935a6cf5b9fd89df66565272722fe2e5473168927a9937e0ca", - "sha256:4fcf71d33178a00cc34a57b29f5dab1734b9ce0f1c97fb34666deefac6f92037", - "sha256:52f7670b41d4b4d97866ebc38121de8bcb9813128b7c4942b07794d08193c0ab", - "sha256:5368e2b7649a26b7253c6c9e53241248aab9da49099442f5be238fde436f18c9", - "sha256:5bb65fbb48999044938f0c0508e929b14a9b8bf4939d8263e9ea6691f7b54663", - "sha256:60672bb5577472800fcca1ac9dae232d1461db9f20f055184be8ce54b0052572", - "sha256:669e9be6d148fc0283f53e17dd140cde4dc7c87edac8319147edd5aa2a830771", - "sha256:6a0b7a804e8d1716aa2c72e73210b48be83d25ba9ec5cf52cf91122285707bb1", - "sha256:79034ea3da3cf2a815e3e52afdc1f6c1894468c98bdce5d2546fa2342585497f", - "sha256:79247feeef6abcc11137ad17922e865052f23447152059402fc320f99ff544bb", - "sha256:81671c2049e6bf42c7fd11a060f8bc58f58b7b3d6f3f951fc0b15e376a6a5a98", - "sha256:82ac4a5cb56cc9280d4ae52c2d2ebcd6e0668dd0f9ef17f0a9d7c82bd61e24fa", - "sha256:9436267dbbaa49dad18fbbb54f85386b0f5818d055e7b8e01d219661b6745279", - "sha256:94e4140bb1343115a1afd6d84ebf8fca5fb7bfb50e1c2cbd6f2fb5d3117ef102", - "sha256:a2cab366eae8a0ffe0813fd8e335cf0d6b9bb6c5227315f53bb457519b811537", - "sha256:a596019c3eafb1b0ae07db9f55a08578b43c79adb1fe1ab1fd818430ae59ee6f", - "sha256:e8848ae3cd6a784c29fae5055028bee9bffcc704d8bcad09bd46b42b44a833e2", - "sha256:e8a048bfd7d5a280f27527d11449a509ddedf08b58a09a24314828631c099306", - "sha256:f6dd28a0ac60e2426a6918f36f1b4e2620fc785a0de7654cd206ba842eee57fd" - ], - "version": "==4.4.2" - }, - "pyyaml": { - "hashes": [ - "sha256:1adecc22f88d38052fb787d959f003811ca858b799590a5eaa70e63dca50308c", - "sha256:436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95", - "sha256:460a5a4248763f6f37ea225d19d5c205677d8d525f6a83357ca622ed541830c2", - "sha256:5a22a9c84653debfbf198d02fe592c176ea548cccce47553f35f466e15cf2fd4", - "sha256:7a5d3f26b89d688db27822343dfa25c599627bc92093e788956372285c6298ad", - "sha256:9372b04a02080752d9e6f990179a4ab840227c6e2ce15b95e1278456664cf2ba", - "sha256:a5dcbebee834eaddf3fa7366316b880ff4062e4bcc9787b78c7fbb4a26ff2dd1", - "sha256:aee5bab92a176e7cd034e57f46e9df9a9862a71f8f37cad167c6fc74c65f5b4e", - "sha256:c51f642898c0bacd335fc119da60baae0824f2cde95b0330b56c0553439f0673", - "sha256:c68ea4d3ba1705da1e0d85da6684ac657912679a649e8868bd850d2c299cce13", - "sha256:e23d0cc5299223dcc37885dae624f382297717e459ea24053709675a976a3e19" - ], - "index": "pypi", - "version": "==5.1" - }, - "six": { - "hashes": [ - "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9", - "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb" - ], - "version": "==1.11.0" - }, - "vcrpy": { - "hashes": [ - "sha256:127e79cf7b569d071d1bd761b83f7b62b2ce2a2eb63ceca7aa67cba8f2602ea3", - "sha256:57be64aa8e9883a4117d0b15de28af62275c001abcdb00b6dc2d4406073d9a4f" - ], - "index": "pypi", - "version": "==2.0.1" - }, - "wrapt": { - "hashes": [ - "sha256:d4d560d479f2c21e1b5443bbd15fe7ec4b37fe7e53d335d3b9b0a7b1226fe3c6" - ], - "version": "==1.10.11" - }, - "yarl": { - "hashes": [ - "sha256:2556b779125621b311844a072e0ed367e8409a18fa12cbd68eb1258d187820f9", - "sha256:4aec0769f1799a9d4496827292c02a7b1f75c0bab56ab2b60dd94ebb57cbd5ee", - "sha256:55369d95afaacf2fa6b49c84d18b51f1704a6560c432a0f9a1aeb23f7b971308", - "sha256:6c098b85442c8fe3303e708bbb775afd0f6b29f77612e8892627bcab4b939357", - "sha256:9182cd6f93412d32e009020a44d6d170d2093646464a88aeec2aef50592f8c78", - "sha256:c8cbc21bbfa1dd7d5386d48cc814fe3d35b80f60299cdde9279046f399c3b0d8", - "sha256:db6f70a4b09cde813a4807843abaaa60f3b15fb4a2a06f9ae9c311472662daa1", - "sha256:f17495e6fe3d377e3faac68121caef6f974fcb9e046bc075bcff40d8e5cc69a4", - "sha256:f85900b9cca0c67767bb61b2b9bd53208aaa7373dae633dbe25d179b4bf38aa7" - ], - "markers": "python_version >= '3.4'", - "version": "==1.2.6" - } - } -} diff --git a/README.md b/README.md index f742855f0..b4e253b78 100644 --- a/README.md +++ b/README.md @@ -25,7 +25,17 @@ be poked at and demonstrated. To try the proxy script, first use `pipenv` to create an environment and install requirements. In the root of the project directory, run - pipenv install +``` +virtualenv .venv +source .venv/bin/activate +pip install --require-hashes -r dev-requirements.txt +``` + +#### Update dependencies + +To add or update a dependency, modify either `dev-requirements.in` and `requirements.in` and then run `make update-pip-dependencies`. This will generate `dev-requirements.txt` and `requirements.txt`. + +**IMPORTANT:** Do not modify `build-requirements.txt` during normal development. We use a pip mirror for our build process and the hashes in that file point to wheels on our mirror. #### configuration @@ -90,7 +100,7 @@ this documentation, we assume the client is running in Edit `qubes/securedrop.Proxy` to reflect the path to `entrypoint.sh` in this repo. Also edit the directory to this repo code in `entrypoint.sh`. -Next, run `make install`, which will move `securedrop.Proxy` +Next, run `sudo cp qubes/securedrop.Proxy /etc/qubes-rpc/securedrop.Proxy`, which will move `securedrop.Proxy` (the qubes-rpc "server path definition" file) into place in `/etc/qubes-rpc/`. diff --git a/dev-requirements.in b/dev-requirements.in new file mode 100644 index 000000000..4b9bd528d --- /dev/null +++ b/dev-requirements.in @@ -0,0 +1,9 @@ +flake8==3.5.0 +multidict==4.4.2 +pip-tools==3.1.0 +pycodestyle==2.3.1 +pyflakes==1.6.0 +six==1.11.0 +vcrpy==2.0.1 +wrapt==1.10.11 +yarl==1.2.6 diff --git a/dev-requirements.txt b/dev-requirements.txt new file mode 100644 index 000000000..db5afdafe --- /dev/null +++ b/dev-requirements.txt @@ -0,0 +1,110 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --generate-hashes --output-file dev-requirements.txt dev-requirements.in requirements.in +# +certifi==2018.10.15 \ + --hash=sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c \ + --hash=sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a +chardet==3.0.4 \ + --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ + --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 +click==7.0 \ + --hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \ + --hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 \ + # via pip-tools +flake8==3.5.0 \ + --hash=sha256:7253265f7abd8b313e3892944044a365e3f4ac3fcdcfb4298f55ee9ddf188ba0 \ + --hash=sha256:c7841163e2b576d435799169b78703ad6ac1bbb0f199994fc05f700b2a90ea37 +furl==2.0.0 \ + --hash=sha256:f7e90e9f85ef3f2e64485f04c2a80b50af6133942812fd87a44d45305b079018 \ + --hash=sha256:fdcaedc1fb19a63d7d875b0105b0a5b496dd0989330d454a42bcb401fa5454ec +idna==2.7 \ + --hash=sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e \ + --hash=sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16 +mccabe==0.6.1 \ + --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ + --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f \ + # via flake8 +multidict==4.4.2 \ + --hash=sha256:05eeab69bf2b0664644c62bd92fabb045163e5b8d4376a31dfb52ce0210ced7b \ + --hash=sha256:0c85880efa7cadb18e3b5eef0aa075dc9c0a3064cbbaef2e20be264b9cf47a64 \ + --hash=sha256:136f5a4a6a4adeacc4dc820b8b22f0a378fb74f326e259c54d1817639d1d40a0 \ + --hash=sha256:14906ad3347c7d03e9101749b16611cf2028547716d0840838d3c5e2b3b0f2d3 \ + --hash=sha256:1ade4a3b71b1bf9e90c5f3d034a87fe4949c087ef1f6cd727fdd766fe8bbd121 \ + --hash=sha256:22939a00a511a59f9ecc0158b8db728afef57975ce3782b3a265a319d05b9b12 \ + --hash=sha256:2b86b02d872bc5ba5b3a4530f6a7ba0b541458ab4f7c1429a12ac326231203f7 \ + --hash=sha256:3c11e92c3dfc321014e22fb442bc9eb70e01af30d6ce442026b0c35723448c66 \ + --hash=sha256:4ba3bd26f282b201fdbce351f1c5d17ceb224cbedb73d6e96e6ce391b354aacc \ + --hash=sha256:4c6e78d042e93751f60672989efbd6a6bc54213ed7ff695fff82784bbb9ea035 \ + --hash=sha256:4d80d1901b89cc935a6cf5b9fd89df66565272722fe2e5473168927a9937e0ca \ + --hash=sha256:4fcf71d33178a00cc34a57b29f5dab1734b9ce0f1c97fb34666deefac6f92037 \ + --hash=sha256:52f7670b41d4b4d97866ebc38121de8bcb9813128b7c4942b07794d08193c0ab \ + --hash=sha256:5368e2b7649a26b7253c6c9e53241248aab9da49099442f5be238fde436f18c9 \ + --hash=sha256:5bb65fbb48999044938f0c0508e929b14a9b8bf4939d8263e9ea6691f7b54663 \ + --hash=sha256:60672bb5577472800fcca1ac9dae232d1461db9f20f055184be8ce54b0052572 \ + --hash=sha256:669e9be6d148fc0283f53e17dd140cde4dc7c87edac8319147edd5aa2a830771 \ + --hash=sha256:6a0b7a804e8d1716aa2c72e73210b48be83d25ba9ec5cf52cf91122285707bb1 \ + --hash=sha256:79034ea3da3cf2a815e3e52afdc1f6c1894468c98bdce5d2546fa2342585497f \ + --hash=sha256:79247feeef6abcc11137ad17922e865052f23447152059402fc320f99ff544bb \ + --hash=sha256:81671c2049e6bf42c7fd11a060f8bc58f58b7b3d6f3f951fc0b15e376a6a5a98 \ + --hash=sha256:82ac4a5cb56cc9280d4ae52c2d2ebcd6e0668dd0f9ef17f0a9d7c82bd61e24fa \ + --hash=sha256:9436267dbbaa49dad18fbbb54f85386b0f5818d055e7b8e01d219661b6745279 \ + --hash=sha256:94e4140bb1343115a1afd6d84ebf8fca5fb7bfb50e1c2cbd6f2fb5d3117ef102 \ + --hash=sha256:a2cab366eae8a0ffe0813fd8e335cf0d6b9bb6c5227315f53bb457519b811537 \ + --hash=sha256:a596019c3eafb1b0ae07db9f55a08578b43c79adb1fe1ab1fd818430ae59ee6f \ + --hash=sha256:e8848ae3cd6a784c29fae5055028bee9bffcc704d8bcad09bd46b42b44a833e2 \ + --hash=sha256:e8a048bfd7d5a280f27527d11449a509ddedf08b58a09a24314828631c099306 \ + --hash=sha256:f6dd28a0ac60e2426a6918f36f1b4e2620fc785a0de7654cd206ba842eee57fd +orderedmultidict==1.0 \ + --hash=sha256:24e3b730cf84e4a6a68be5cc760864905cf66abc89851e724bd5b4e849eaa96b \ + --hash=sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5 +pip-tools==3.1.0 \ + --hash=sha256:31b43e5f8d605fc84f7506199025460abcb98a29d12cc99db268f73e39cf55e5 \ + --hash=sha256:b1ceca03b4a48346b2f6870565abb09d8d257d5b1524b4c6b222185bf26c3870 +pycodestyle==2.3.1 \ + --hash=sha256:682256a5b318149ca0d2a9185d365d8864a768a28db66a84a2ea946bcc426766 \ + --hash=sha256:6c4245ade1edfad79c3446fadfc96b0de2759662dc29d07d80a6f27ad1ca6ba9 +pyflakes==1.6.0 \ + --hash=sha256:08bd6a50edf8cffa9fa09a463063c425ecaaf10d1eb0335a7e8b1401aef89e6f \ + --hash=sha256:8d616a382f243dbf19b54743f280b80198be0bca3a5396f1d2e1fca6223e8805 +pyyaml==5.1 \ + --hash=sha256:1adecc22f88d38052fb787d959f003811ca858b799590a5eaa70e63dca50308c \ + --hash=sha256:436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95 \ + --hash=sha256:460a5a4248763f6f37ea225d19d5c205677d8d525f6a83357ca622ed541830c2 \ + --hash=sha256:5a22a9c84653debfbf198d02fe592c176ea548cccce47553f35f466e15cf2fd4 \ + --hash=sha256:7a5d3f26b89d688db27822343dfa25c599627bc92093e788956372285c6298ad \ + --hash=sha256:9372b04a02080752d9e6f990179a4ab840227c6e2ce15b95e1278456664cf2ba \ + --hash=sha256:a5dcbebee834eaddf3fa7366316b880ff4062e4bcc9787b78c7fbb4a26ff2dd1 \ + --hash=sha256:aee5bab92a176e7cd034e57f46e9df9a9862a71f8f37cad167c6fc74c65f5b4e \ + --hash=sha256:c51f642898c0bacd335fc119da60baae0824f2cde95b0330b56c0553439f0673 \ + --hash=sha256:c68ea4d3ba1705da1e0d85da6684ac657912679a649e8868bd850d2c299cce13 \ + --hash=sha256:e23d0cc5299223dcc37885dae624f382297717e459ea24053709675a976a3e19 +requests==2.20.0 \ + --hash=sha256:99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c \ + --hash=sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279 +six==1.11.0 \ + --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \ + --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb +urllib3==1.24.1 \ + --hash=sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39 \ + --hash=sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22 +vcrpy==2.0.1 \ + --hash=sha256:127e79cf7b569d071d1bd761b83f7b62b2ce2a2eb63ceca7aa67cba8f2602ea3 \ + --hash=sha256:57be64aa8e9883a4117d0b15de28af62275c001abcdb00b6dc2d4406073d9a4f +werkzeug==0.14.1 \ + --hash=sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c \ + --hash=sha256:d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b +wrapt==1.10.11 \ + --hash=sha256:d4d560d479f2c21e1b5443bbd15fe7ec4b37fe7e53d335d3b9b0a7b1226fe3c6 +yarl==1.2.6 \ + --hash=sha256:2556b779125621b311844a072e0ed367e8409a18fa12cbd68eb1258d187820f9 \ + --hash=sha256:4aec0769f1799a9d4496827292c02a7b1f75c0bab56ab2b60dd94ebb57cbd5ee \ + --hash=sha256:55369d95afaacf2fa6b49c84d18b51f1704a6560c432a0f9a1aeb23f7b971308 \ + --hash=sha256:6c098b85442c8fe3303e708bbb775afd0f6b29f77612e8892627bcab4b939357 \ + --hash=sha256:9182cd6f93412d32e009020a44d6d170d2093646464a88aeec2aef50592f8c78 \ + --hash=sha256:c8cbc21bbfa1dd7d5386d48cc814fe3d35b80f60299cdde9279046f399c3b0d8 \ + --hash=sha256:db6f70a4b09cde813a4807843abaaa60f3b15fb4a2a06f9ae9c311472662daa1 \ + --hash=sha256:f17495e6fe3d377e3faac68121caef6f974fcb9e046bc075bcff40d8e5cc69a4 \ + --hash=sha256:f85900b9cca0c67767bb61b2b9bd53208aaa7373dae633dbe25d179b4bf38aa7 diff --git a/entrypoint.sh b/entrypoint.sh index 35815f05b..7ff5723d5 100755 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -1,4 +1,8 @@ #!/bin/sh cd /home/user/projects/securedrop-proxy -pipenv run ./sd-proxy.py ./config.yaml +virtualenv .venv +source .venv/bin/activate +pip install --require-hashes -r requirements.txt +pip install --require-hashes -r dev-requirements.txt +./sd-proxy.py ./config.yaml diff --git a/requirements.in b/requirements.in new file mode 100644 index 000000000..ed7f5c309 --- /dev/null +++ b/requirements.in @@ -0,0 +1,9 @@ +certifi==2018.10.15 +chardet==3.0.4 +furl==2.0.0 +idna==2.7 +orderedmultidict==1.0 +PyYAML==5.1 +requests==2.20.0 +urllib3==1.24.1 +Werkzeug==0.14.1 diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 000000000..f9ec42579 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,46 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --generate-hashes --output-file requirements.txt requirements.in +# +certifi==2018.10.15 \ + --hash=sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c \ + --hash=sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a +chardet==3.0.4 \ + --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ + --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 +furl==2.0.0 \ + --hash=sha256:f7e90e9f85ef3f2e64485f04c2a80b50af6133942812fd87a44d45305b079018 \ + --hash=sha256:fdcaedc1fb19a63d7d875b0105b0a5b496dd0989330d454a42bcb401fa5454ec +idna==2.7 \ + --hash=sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e \ + --hash=sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16 +orderedmultidict==1.0 \ + --hash=sha256:24e3b730cf84e4a6a68be5cc760864905cf66abc89851e724bd5b4e849eaa96b \ + --hash=sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5 +pyyaml==5.1 \ + --hash=sha256:1adecc22f88d38052fb787d959f003811ca858b799590a5eaa70e63dca50308c \ + --hash=sha256:436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95 \ + --hash=sha256:460a5a4248763f6f37ea225d19d5c205677d8d525f6a83357ca622ed541830c2 \ + --hash=sha256:5a22a9c84653debfbf198d02fe592c176ea548cccce47553f35f466e15cf2fd4 \ + --hash=sha256:7a5d3f26b89d688db27822343dfa25c599627bc92093e788956372285c6298ad \ + --hash=sha256:9372b04a02080752d9e6f990179a4ab840227c6e2ce15b95e1278456664cf2ba \ + --hash=sha256:a5dcbebee834eaddf3fa7366316b880ff4062e4bcc9787b78c7fbb4a26ff2dd1 \ + --hash=sha256:aee5bab92a176e7cd034e57f46e9df9a9862a71f8f37cad167c6fc74c65f5b4e \ + --hash=sha256:c51f642898c0bacd335fc119da60baae0824f2cde95b0330b56c0553439f0673 \ + --hash=sha256:c68ea4d3ba1705da1e0d85da6684ac657912679a649e8868bd850d2c299cce13 \ + --hash=sha256:e23d0cc5299223dcc37885dae624f382297717e459ea24053709675a976a3e19 +requests==2.20.0 \ + --hash=sha256:99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c \ + --hash=sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279 +six==1.12.0 \ + --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \ + --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \ + # via furl, orderedmultidict +urllib3==1.24.1 \ + --hash=sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39 \ + --hash=sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22 +werkzeug==0.14.1 \ + --hash=sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c \ + --hash=sha256:d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b From f946fe38f4dc0b4b71cff3ea8d318fcca1599c09 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Wed, 15 May 2019 12:55:01 -0700 Subject: [PATCH 047/352] security updates: urllib3==1.24.3 urllib>=1.24.2 due to CVE-2019-11324 --- dev-requirements.txt | 6 +++--- requirements.in | 2 +- requirements.txt | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/dev-requirements.txt b/dev-requirements.txt index db5afdafe..aeb1983f6 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -87,9 +87,9 @@ requests==2.20.0 \ six==1.11.0 \ --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \ --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb -urllib3==1.24.1 \ - --hash=sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39 \ - --hash=sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22 +urllib3==1.24.3 \ + --hash=sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4 \ + --hash=sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb vcrpy==2.0.1 \ --hash=sha256:127e79cf7b569d071d1bd761b83f7b62b2ce2a2eb63ceca7aa67cba8f2602ea3 \ --hash=sha256:57be64aa8e9883a4117d0b15de28af62275c001abcdb00b6dc2d4406073d9a4f diff --git a/requirements.in b/requirements.in index ed7f5c309..6a3de347c 100644 --- a/requirements.in +++ b/requirements.in @@ -5,5 +5,5 @@ idna==2.7 orderedmultidict==1.0 PyYAML==5.1 requests==2.20.0 -urllib3==1.24.1 +urllib3==1.24.3 Werkzeug==0.14.1 diff --git a/requirements.txt b/requirements.txt index f9ec42579..d98ecc1b0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -38,9 +38,9 @@ six==1.12.0 \ --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \ --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \ # via furl, orderedmultidict -urllib3==1.24.1 \ - --hash=sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39 \ - --hash=sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22 +urllib3==1.24.3 \ + --hash=sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4 \ + --hash=sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb werkzeug==0.14.1 \ --hash=sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c \ --hash=sha256:d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b From 5625bb19ef2583ff9f8051e8532db5a71591a030 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Fri, 17 May 2019 13:13:32 -0700 Subject: [PATCH 048/352] update MANIFEST.in for packaging --- MANIFEST.in | 2 -- README.md | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/MANIFEST.in b/MANIFEST.in index d097841f9..65ba732ea 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,4 @@ include LICENSE -include Pipfile -include Pipfile.lock include README.md include VERSION include config-example.yaml diff --git a/README.md b/README.md index b4e253b78..0207698be 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ be poked at and demonstrated. #### install requirements -To try the proxy script, first use `pipenv` to create an environment +To try the proxy script, first use `venv` to create an environment and install requirements. In the root of the project directory, run ``` From d51a259e615af2cb71a800d77df6b446b97d3a32 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Fri, 17 May 2019 21:39:47 -0700 Subject: [PATCH 049/352] requirements: keep six consistent between dev/prod --- requirements.in | 1 + requirements.txt | 7 +++---- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements.in b/requirements.in index 6a3de347c..15019974a 100644 --- a/requirements.in +++ b/requirements.in @@ -4,6 +4,7 @@ furl==2.0.0 idna==2.7 orderedmultidict==1.0 PyYAML==5.1 +six==1.11.0 requests==2.20.0 urllib3==1.24.3 Werkzeug==0.14.1 diff --git a/requirements.txt b/requirements.txt index d98ecc1b0..0cbecb33e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -34,10 +34,9 @@ pyyaml==5.1 \ requests==2.20.0 \ --hash=sha256:99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c \ --hash=sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279 -six==1.12.0 \ - --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \ - --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \ - # via furl, orderedmultidict +six==1.11.0 \ + --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \ + --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb urllib3==1.24.3 \ --hash=sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4 \ --hash=sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb From 1279fe4068a43f71521d3ea127c82d7fa3eb6b05 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Fri, 14 Jun 2019 09:35:16 -0700 Subject: [PATCH 050/352] manifest: fix path to VERSION --- MANIFEST.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MANIFEST.in b/MANIFEST.in index 65ba732ea..d7bfd51bc 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,6 @@ include LICENSE include README.md -include VERSION +include securedrop_proxy/VERSION include config-example.yaml include qubes/securedrop.Proxy include requirements.txt From 6601488186f60ad36edbe679d79d6164f2adbc03 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Fri, 14 Jun 2019 09:38:03 -0700 Subject: [PATCH 051/352] build-requirements.txt: update urllib to 1.24.3 --- build-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-requirements.txt b/build-requirements.txt index 8e2c8bb46..5d37ab853 100644 --- a/build-requirements.txt +++ b/build-requirements.txt @@ -6,5 +6,5 @@ orderedmultidict==1.0 --hash=sha256:25489716d76d2cc8aa656bfb00cd40b6ca29d5e11ccd pyyaml==5.1 --hash=sha256:b8d80623e9d4e348c59ea726ce3032a2eb15abca6a48d3828362d11c6014a0a7 requests==2.20.0 --hash=sha256:2a539dd6af40a611f3b8eb3f99d3567781352ece1698b2fab42bf4c2218705b5 six==1.11.0 --hash=sha256:4663c7a1dbed033cfb294f2d534bd6151c0698dc12ecabb4eaa3cb041d758528 -urllib3==1.24.1 --hash=sha256:ac4755b0ae019d670d5fb420c39cb531389ab6ca8b652e12f56259f5cbc0ce21 +urllib3==1.24.3 --hash=sha256:028309393606e28e640e2031edd27eb969c94f9364b0871912608aaa8e66c96e werkzeug==0.14.1 --hash=sha256:177ea4248bf0475cbc060edb35a0bdcf6e6daeac9e1296de5ddb3493e5ec15b9 From d66d48af84c14e66762c663b54c026c8e7738762 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Fri, 14 Jun 2019 13:18:53 -0700 Subject: [PATCH 052/352] manifest: add build-requirements.txt (must be in source tarball) --- MANIFEST.in | 1 + 1 file changed, 1 insertion(+) diff --git a/MANIFEST.in b/MANIFEST.in index d7bfd51bc..0c0916b5a 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -3,6 +3,7 @@ include README.md include securedrop_proxy/VERSION include config-example.yaml include qubes/securedrop.Proxy +include build-requirements.txt include requirements.txt include securedrop_proxy/*.py include setup.py From 46443b7d03810403eefb2481a0446b9b8517c6da Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Fri, 14 Jun 2019 09:38:42 -0700 Subject: [PATCH 053/352] securedrop-proxy 0.1.4 --- securedrop_proxy/VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/securedrop_proxy/VERSION b/securedrop_proxy/VERSION index b1e80bb24..845639eef 100644 --- a/securedrop_proxy/VERSION +++ b/securedrop_proxy/VERSION @@ -1 +1 @@ -0.1.3 +0.1.4 From 11e772e927a8e73415a15a6468b24cbc3b711571 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Mon, 17 Jun 2019 11:24:17 -0700 Subject: [PATCH 054/352] README: update dependency update policies and instructions --- README.md | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 0207698be..a6d5ddb73 100644 --- a/README.md +++ b/README.md @@ -33,9 +33,16 @@ pip install --require-hashes -r dev-requirements.txt #### Update dependencies -To add or update a dependency, modify either `dev-requirements.in` and `requirements.in` and then run `make update-pip-dependencies`. This will generate `dev-requirements.txt` and `requirements.txt`. +If you're adding or updating a dependency, you need to: -**IMPORTANT:** Do not modify `build-requirements.txt` during normal development. We use a pip mirror for our build process and the hashes in that file point to wheels on our mirror. +1. Modify either `dev-requirements.in` and `requirements.in` (depending on whether it is prod or dev only) and then run `make update-pip-dependencies`. This will generate `dev-requirements.txt` and `requirements.txt`. + +2. For building a debian package from this project, we use the requirements in +`build-requirements.txt` which uses our pip mirror, i.e. the hashes in that file point to +wheels on our pip mirror. A maintainer will need to add +the updated dependency to our pip mirror (you can request this in the PR). + +3. Once the pip mirror is updated, you should checkout the [securedrop-debian-packaging repo](https://github.com/freedomofpress/securedrop-debian-packaging) and run `make requirements`. Commit the `build-requirements.txt` that results and add it to your PR. #### configuration From a1116954a95ff44b75851cd5b39719bb28293d9f Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Mon, 17 Jun 2019 11:26:21 -0700 Subject: [PATCH 055/352] ci: make sure we can build debian package on diff in PR --- .circleci/config.yml | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 7a7e5b79f..57cbbe098 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,6 +1,35 @@ version: 2 jobs: build: + docker: + - image: circleci/python:3.5-stretch + steps: + - checkout + + - run: + name: Install Debian packaging dependencies and download wheels + command: | + mkdir ~/packaging && cd ~/packaging + git clone https://github.com/freedomofpress/securedrop-debian-packaging.git + cd securedrop-debian-packaging + make install-deps && make fetch-wheels + + - run: + name: Tag and make source tarball + command: | + cd ~/project + ./update_version.sh 1000.0 # Dummy version number, doesn't matter what we put here + python3 setup.py sdist + + - run: + name: Build debian package + command: | + cd ~/packaging/securedrop-debian-packaging + export PKG_VERSION=1000.0 + export PKG_PATH=~/project/dist/securedrop-proxy-$PKG_VERSION.tar.gz + make securedrop-proxy + + test: docker: - image: circleci/python:3.5 steps: @@ -20,3 +49,10 @@ jobs: set -e source .venv/bin/activate make safety + +workflows: + version: 2 + securedrop_proxy_ci: + jobs: + - test + - build From 082e174fcb179d965b2af06d8c46597e94e6e5bc Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Tue, 18 Jun 2019 10:54:21 -0700 Subject: [PATCH 056/352] update_version.sh: add update version script this intentionally does not commit and tag as I want to run non-interactively --- update_version.sh | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100755 update_version.sh diff --git a/update_version.sh b/update_version.sh new file mode 100755 index 000000000..7e027d202 --- /dev/null +++ b/update_version.sh @@ -0,0 +1,27 @@ +#!/bin/bash +## Usage: ./update_version.sh + +set -e + +readonly NEW_VERSION=$1 + +if [ -z "$NEW_VERSION" ]; then + echo "You must specify the new version!" + exit 1 +fi + +# Get the old version from securedrop_proxy/VERSION +OLD_VERSION=$(cat securedrop_proxy/VERSION) + +if [ -z "$OLD_VERSION" ]; then + echo "Couldn't find the old version: does this script need to be updated?" + exit 1 +fi + +# Update the version in securedrop_proxy/VERSION +if [[ "$OSTYPE" == "darwin"* ]]; then + # The empty '' after sed -i is required on macOS to indicate no backup file should be saved. + sed -i '' "s@$(echo "${OLD_VERSION}" | sed 's/\./\\./g')@$NEW_VERSION@g" securedrop_proxy/VERSION +else + sed -i "s@$(echo "${OLD_VERSION}" | sed 's/\./\\./g')@$NEW_VERSION@g" securedrop_proxy/VERSION +fi From b121caa4a5567f28f902eea331116cdf222637d1 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Tue, 18 Jun 2019 11:31:53 -0700 Subject: [PATCH 057/352] README: add alpha release management guide --- README.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/README.md b/README.md index a6d5ddb73..6130263aa 100644 --- a/README.md +++ b/README.md @@ -44,6 +44,15 @@ the updated dependency to our pip mirror (you can request this in the PR). 3. Once the pip mirror is updated, you should checkout the [securedrop-debian-packaging repo](https://github.com/freedomofpress/securedrop-debian-packaging) and run `make requirements`. Commit the `build-requirements.txt` that results and add it to your PR. +## Making a Release + +**Note:** These are the release guidelines for pre-production alpha releases. Production release tags must be signed with the SecureDrop release key. + + 1. Update versions: `./update_version.sh $new_version_number`. +2. Commit the changes with commit message `securedrop-proxy $new_version_number` and make a PR. +3. You should confirm via a manual debian package build and manual testing in Qubes that there are no regressions (this is limited pre-release QA). +4. Once your PR is approved, you can add a tag and push: `git tag $new_version_number`. + #### configuration The proxy script must be run with the path to its configuration file From c1f7666604ea2284f04e678ddfdecfc360ac8286 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Wed, 26 Jun 2019 11:57:43 -0700 Subject: [PATCH 058/352] ci: fail build if build requirements need update --- .circleci/config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 57cbbe098..0ec6d2a5f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -13,6 +13,7 @@ jobs: git clone https://github.com/freedomofpress/securedrop-debian-packaging.git cd securedrop-debian-packaging make install-deps && make fetch-wheels + PKG_DIR=~/project make requirements - run: name: Tag and make source tarball From 7af5102ba1f513183c81c1530d597662768e8b56 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Fri, 5 Jul 2019 10:25:59 -0700 Subject: [PATCH 059/352] add changelog file --- changelog.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.md diff --git a/changelog.md b/changelog.md new file mode 100644 index 000000000..825c32f0d --- /dev/null +++ b/changelog.md @@ -0,0 +1 @@ +# Changelog From 10ef74f965be41a53da754d3b5b410e9d46f76c6 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Fri, 5 Jul 2019 10:38:56 -0700 Subject: [PATCH 060/352] update release instructions to include changelog --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 6130263aa..ee927197e 100644 --- a/README.md +++ b/README.md @@ -48,11 +48,12 @@ the updated dependency to our pip mirror (you can request this in the PR). **Note:** These are the release guidelines for pre-production alpha releases. Production release tags must be signed with the SecureDrop release key. - 1. Update versions: `./update_version.sh $new_version_number`. +1. Update versions: `./update_version.sh $new_version_number` and add a new entry in the changelog. 2. Commit the changes with commit message `securedrop-proxy $new_version_number` and make a PR. 3. You should confirm via a manual debian package build and manual testing in Qubes that there are no regressions (this is limited pre-release QA). 4. Once your PR is approved, you can add a tag and push: `git tag $new_version_number`. + #### configuration The proxy script must be run with the path to its configuration file From 8945f4a88085103c039c60766e768c7bd2e5aa09 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Fri, 5 Jul 2019 10:54:34 -0700 Subject: [PATCH 061/352] backfill changelog entries --- MANIFEST.in | 1 + changelog.md | 27 +++++++++++++++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/MANIFEST.in b/MANIFEST.in index 0c0916b5a..b024c622c 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,7 @@ include LICENSE include README.md include securedrop_proxy/VERSION +include changelog.md include config-example.yaml include qubes/securedrop.Proxy include build-requirements.txt diff --git a/changelog.md b/changelog.md index 825c32f0d..a97c31911 100644 --- a/changelog.md +++ b/changelog.md @@ -1 +1,28 @@ # Changelog + +## 0.1.4 + + * Update urllib3 to version 1.24.3 or later due to CVE-2019-11324 (#35) + * Remove pipenv in favor of pip-tools (#33) + +## 0.1.3 + + * Updated PyYAML to 5.1 and safe loading of YAML files + #24 and #25 + +## 0.1.2 + + * Update requirements: Remove dev requirements (#20), update wheel hashes + (#21) + +## 0.1.1-1 + + * Resolves venv paths in generated scripts (via dh-virtualenv) + +## 0.1.1 + + * Update requests to 2.20.0 + +## 0.1.0 + + * Initial release. (Closes: #XXX) From 70a99e7d8f2f0d7f17adc3f1196e9d38708b4fca Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Fri, 19 Jul 2019 13:49:01 -0700 Subject: [PATCH 062/352] Initial commit --- .gitignore | 104 +++++++++ LICENSE | 674 +++++++++++++++++++++++++++++++++++++++++++++++++++++ README.md | 2 + 3 files changed, 780 insertions(+) create mode 100644 .gitignore create mode 100644 LICENSE create mode 100644 README.md diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..894a44cc0 --- /dev/null +++ b/.gitignore @@ -0,0 +1,104 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ diff --git a/LICENSE b/LICENSE new file mode 100644 index 000000000..f288702d2 --- /dev/null +++ b/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/README.md b/README.md new file mode 100644 index 000000000..09ef02c90 --- /dev/null +++ b/README.md @@ -0,0 +1,2 @@ +# securedrop-export +code for exporting from the securedrop qubes workstation From 0ed49544bfa356470a0dfb418c43b04351b1f77e Mon Sep 17 00:00:00 2001 From: Kevin O'Gorman Date: Sun, 21 Jul 2019 12:53:02 -0700 Subject: [PATCH 063/352] Refactored send-to-usb script as a python module --- MANIFEST.in | 12 ++ build-requirements.txt | 0 changelog.md | 6 + files/application-x-sd-export.xml | 7 + files/sd-logo.png | Bin 0 -> 8606 bytes files/send-to-usb.desktop | 5 + requirements.txt | 0 securedrop_export/VERSION | 1 + securedrop_export/__init__.py | 0 securedrop_export/entrypoint.py | 23 +++ securedrop_export/export.py | 327 ++++++++++++++++++++++++++++++ securedrop_export/main.py | 37 ++++ setup.py | 35 ++++ test-requirements.txt | 1 + tests/__init__.py | 0 tests/test_export.py | 171 ++++++++++++++++ 16 files changed, 625 insertions(+) create mode 100644 MANIFEST.in create mode 100644 build-requirements.txt create mode 100644 changelog.md create mode 100644 files/application-x-sd-export.xml create mode 100644 files/sd-logo.png create mode 100644 files/send-to-usb.desktop create mode 100644 requirements.txt create mode 100644 securedrop_export/VERSION create mode 100644 securedrop_export/__init__.py create mode 100755 securedrop_export/entrypoint.py create mode 100755 securedrop_export/export.py create mode 100755 securedrop_export/main.py create mode 100644 setup.py create mode 100644 test-requirements.txt create mode 100644 tests/__init__.py create mode 100644 tests/test_export.py diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 000000000..8104f05fc --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,12 @@ +include LICENSE +include README.md +include securedrop_export/VERSION +include changelog.md +include build-requirements.txt +include requirements.txt +include securedrop_export/*.py +include setup.py +include files/send-to-usb.desktop +include files/application-x-sd-export.xml +include files/sd-logo.png + diff --git a/build-requirements.txt b/build-requirements.txt new file mode 100644 index 000000000..e69de29bb diff --git a/changelog.md b/changelog.md new file mode 100644 index 000000000..02c74ef5a --- /dev/null +++ b/changelog.md @@ -0,0 +1,6 @@ +securedrop-export (0.1.1-1) unstable; urgency=medium + + [ Freedom Of The Press Foundation ] + * Initial release + + -- SecureDrop Team Thu, 18 Jul 2019 10:47:38 -0700 diff --git a/files/application-x-sd-export.xml b/files/application-x-sd-export.xml new file mode 100644 index 000000000..9e36ef08b --- /dev/null +++ b/files/application-x-sd-export.xml @@ -0,0 +1,7 @@ + + + + Archive for transfering files from the SecureDrop workstation to an external USB device. + + + diff --git a/files/sd-logo.png b/files/sd-logo.png new file mode 100644 index 0000000000000000000000000000000000000000..531cbf26c3426493616b7060338cc68d714bad1d GIT binary patch literal 8606 zcmZ`^dJohFV>TA-|aL|B2AbM>rHDh2*0lsJ|O5n=K zmG~MM$emU6R6wAHG}=ph3g9=dqn5EA2oxd+0!7AvK)-;g$nPMKzZeMg!wv+J&jo?l zyz<+OV89=g4mz4@puhh&eV{k66^1Ggjif-4f+s5qDdSki-;yff%&M=R5l6qp!tl_ z^yCr29y=_9s+}XwGQ_fDHczLf)vWexvm+-yQ|4ZKTWDFkMAebglfMe`ZO(V^?VvNe zuJ!{vkCj%kf^LJZPG~}{%#aN3O}dpOOvBxD1uOjF&p*$ z*SH5B9bY&10M#1VE{G4tZN0@}ykudi(PfO;QbdgGQazWM66}BkC(Fb=pgal2k@nz^ z1PJoZWZ9o3B7Y~mKjtPfiRF{x2aAJAp|+H@-VXp9kdcgFm?j38iOF<%Z1u1lE((02 z>SV0?=HbF?`HY+F_vJ{@EdN*t5ON~%{CqfCbF?jg?AF}>5+N0pd}I#UJ}ic3OIPA< zs{J8`(FS3uvA*u(vJ|Xu8F8eOAH;N7fD1hfPJ-n14eBRGhNcu28Bq<Z^{EE4Q4y%D~T+neO2b~^3m_>^ILi{VstE%QWO64dTu2g^JX5+QYeRLs%F@QJbTp_EATFzy%CxQ|0d3?uzhfw(28T?$79F2UTz#?*&>r7 zC0@-pLX&(-RZ749H!0qJfm^TJ_5(yCNvXw*`_o%{mTZei)ufa)zeXnd>aYcE|DuO^ z;9H`|(mqSR8T3WCam8Dlq@ptj0tImZbu>+Gm9^@u9ns!}D&6p(t47&&Ov zDMiDv(5xz|79*}#@>~8e1Sd725W>cl@^OZbi{=<*5W&!;B(^_MX3N@xxwIm}Zsd7K zmA?3?pm5{V4V&r*ZZf1ESW>cn!Jv#K^mKF$iXJ z-J;l${X?^$@6dInzfFaYjP#Kt3v0YbVFip)TjDMZ-AC%Z&j%aoe^Tgvh(hmr#YX4%9Ri@Kuqrq8826BAy~tF$U1-ZpmW6(}l>?zx_g^($YO(S_Xs>;V zoz*HGTRnVFe(CYaMyAr|8Hqd{{<$({HT{=e>#GE47dceIss5*in@Y{hCRpjp_i8Qv z1zjXmX~hs>q48v?#d?3akZo+mE+Bwm^22K+bK<2U8%Y;9O;bG``F)weAl*GEf-(Jg zk=*Y|_Qq2I?T}fq0~N?#9N>({zE9jb^av-5YczlV=>;)GL9+zYq%z#h>sW z``YvA>az-Oni;15u!`m+57aPG1rdWE3coTpM3Y8B$XWh^&hsIM*0?qJW7)i4(UJFw z^UA!kBHE1*0>_JsKO~!zTR<@KC7Ne=$tDl1-SN>fG@U zV*`&je&MyAF8^vR@N(hvx%eVoYunNNbBfOE?p0R8M8H$^Y~+`1KeaveN%3R`*0l%x zx4jR#pGO28|DNLXhj}oH=767`&3yA|wXkX#`{}ovc4f&I4cP~2~^qAGoz~6*8&tz{EPkgM|*pr ziniY?4pz4PzWe#nnG8tDOTqPMVyz!YxK%%|HiqswSPDkG0I74QeJ|Oe%z766s6ih{ zL9c?{@1*~XU3m?;1Bh2+mgMdc-F$LGszXm!HfnZf{Z0LGLiK)KM64_6_-EXYj=7JnB-)kZdz*%T*P{wkWuxnB6XYJ4Cj5oJ zJZ0A7%ts9x9mV!}X(J}^#Q*MJQZj)Q-snid=_Tgtn~&LmIE<_FzF2$GQ%rmx|5Hu-8hf0!f0uT# zX^ENVLwZ)YLEI*RzSN_%xj)1yOV@*JQi?iWh}fbzaql}qKu5wP(?rdQ-%bTS`5H;l z4DT{}($MD9ZVKa;X{3vruz%+B@?MUpug5m5xM(mspVgYd&eceGX0EPTEZO<-mxV9W zn3Z|Tcle`clTt>ASaWDMZq|0=vQ_6iJOYaIS^SUA0UQG46%Ms69n_PuI~LUKjFQLaUG^&U&a-qV9tuq#F#0HeSx_ifhvFBn3>5L@^ngv zBm(Bi!OagX6=gsY?Rc?M2zCpzae7*HGw)W@%LvBR*KA$*l!ZN?ZwNz33#!Bd9S8lG(AAEcIgezBAVYg0L!btGgjJAjvC!KQhT9do8u9&nNtndNwXg7fzR-hkWu=E@t zZCLw;_?wjc^U5E-%9nZ|T$_Jtb_R^dN4PLh-XlyX$S)|stE(0NvlLlB z15=Z~To3wrqJ|`!@Ys1XO#ir4el(lGT&SZs%x9d7u9G&cwxPMe8M$}+cXK#t6fXdG ziwvHH@QB;?KksJkT-Wi>)9YwIlg`kQ3wCjzC%WYIZsTNrL%94a;G<5<)`LRHg{w{YihM+&{! zP}ZqQVeKDn8V|X+0OtR|gucd~t2QfwS^3YfS}BQS znA%5F>SKs#k48$$2nTAHqs*0wCU#&(eNIw&I))Kr#xjN#Pd^6gkS(d^uZlaIRR~{B z^K5llsi;t0s~EDDjr{)(|A~UXipV-ix#4r6J4?kHP>r*77t90do9nOqPtOXMzdomv ziWr~!(MnZy!)*5HpeGUlprPCc zfc39k2E(}VT2?U+F;-1_&0nukzORLQB-?Mh?iM!);3Udi8#@~wM@Z^~ctriXKP-eA z^Ba+D_mIrgy2NKtB#^_dLHfFjTJx8iO?=6Al=s{ihc+MN8r=6;FP+sYXH6TWnvqnN zCWnr05|@1RzbMP4AT45q%BaXyKfv`gyZN=R#UTx$tUy=}df0>01{P)$!-(+grNOp| zsXMGYfPCa%KE4Wk8nSkuk(t7dKandpjlIQ9Z?;>5X{FLa-a4Cj!@6Ovd5lEPU{ zVRpqv=IX6%H`jNb-mNOXgZ^_pc?NKI4M5q4o{5w%M7oiP(33RuqH-1>ri1hK#^f ze{scid}EDif5>gTExJC__C=Y}GxU?OZGxSaH7 zT}T{mPsL4ULWCsQ7?WM08D9wo?g%EG_z3^T_55YbJ|tYII*CFZ)lQ zh&$a`Nq5T1&>dNc?}VWu{T4R!TrN-lsNL&1#a>>4wnVK2&{_?s1v@uM&8j=xnDy&H z-}aBM=Mzjm+8_RYm=90(-7wBWZ5!6z>^}a4SCfq#Sbw+dax1u2voXbvQHqJm`)W;k zEm7)Xk#YfU@wJAPKo_JT5<0(o+-6~w;CiiG z16?E3y$oD+1EyWCyyOIJ;>Cf;W39X^nRC7Vzl^4qrAmJLh7GNMc~(~sFCi|cUSZCg zV_njMQ7xamxijdP$NGvdQ&-n_?{VIRsoInU7hO^fOLYHLWLm}{Z4koAsYF+Q)bQ>; zz{3c-YtOz;k>+YC+^7b7Ff@%jh6dbg$(FWz8Tt}mATREcmm=Tz&W}^B$LI&o+MPQR z0H!icny%9-e3RID^Efm&Zq+4d_x(*&0pZ7nln$-P89uo4M#j!?~r~fD%@B?efY>2n|Zu z?Fy!x`=zp7Z+O|6{(4cyQ&djNpK)&vA_BeJtv_A#183MFM~bapJ|I7t4pTh)eM9j0 zCIC?krvaG3>biPck;18Z#c#iW(DdFrg0bO+s44pz)>Q17 z@w}c#b^`bgu*Kc1kh+hOa?HFODA^hB0vA3gx*Y-7$oLK$3Y{pHW8?5FB%mnZ*=e*` zMg;`~5^;3J==J#Gg|}k=P96Fn{JmzO!r@?4NXh8Y55d3#q;U2r&qbaq&C1AY%Pz$y zLL9EYgs@V20f7(h+&Ifk=|$<>2mJTYP!o!4e2o~o1J{TQ@7?^4r8-DeYpQ5y>l`ik zee?A6F8_&O0lSoFPCKUCiP0>hcPOf6lrFZ1VZ!AlA|%}_=%`g@UB3ij@os6x0>A6C zwNJUwj->mnmaViaJ~2?m(xlOQ&3CX-SF~ND**Jm@om!jt<6!^6#5tF51qEp_(=}&C zZj0>k69zZV(8bQT_P$lh<@|Q(8xukoLaeT))9MnCoT*WLGl{=l7rF0m$NH+h z^Zxjt$$j&|`&l<^6g5rJw0r$WpTdGY*idWmTHo z^rLTG{ywkb5H>w5oQgzm|(nmqvnGSa_}G^npYT@(YfzT7x{B6O3ml` zzAwDlefZQT@U2e;cEtV7&z_6@<;kJFbF@0WKzYUNsYL{l<>LcMNQSFJqUnL$ax|7d z;Dh5oxGyf;erpl#Zt_Ku(AAMuVhSZ3hS-wlInEdsZA%+1)*TVAo_{>DxaND@yXG78 z=^fb?FH!i_BPlfEnPAWInBe>_$F$FO18H$BrXB4N@0{USTU?7~Fq6OW?98J9g@3I( z%90`wx(5d@yzxuVh1&^Ub9BCE07TsLuD{7G$4|M-RmfCJDn+5Vu-=II=yJ==s{)$0 zt)j3Ah)0^{qGBp}n(WScK&aN}eR0l|rVf-q;X;wk!i#UtUE_zYye?3j!-H%59EJZZ zVY(O!wV-%!_IURx&fQ5y3-EpKwU#Ax6jvC?rY-1*SL+`P zWkiF;{rSe&w>&s-N^Pic#AMNlGqc7|-B1@`H4v9%L3(<=wQ@>%+f<5a9d=g&-S2~wFfz>w;u{4#-m&!jXkPm6#ut`V zAceetqob=oPgHBCKAk5g1S2dRx}Le-9)o+@bnhu%1uM+Z>i}z?$Rr~2=I7bl+u=a$ zo8A?u9w3N^F0T$%+yr>);?-<-?>$rxK-mY#k4)oSMe}tcTuk^6!=wN1vdnF;Vnn7l z1m$G#KmyoLjV!m6vnpbNlf`!20wgz?Y_bmG8$7bx6W3327ZwzHY1R2=ZLJm3U37+8 zxNr*rTOZK*(F?hxFcDi8xi^hz)%d&v0*(Nuy*I-d#0)GyZ96^MQF9!UyyYDtGWjS% zw7|e;`_U^e^WEczJL&8IB%c2Gu<%P6nV5MtY0hUI{(;N|J$gy`dwSTB$R4LsKG|)g zNvH4E)+RDZYBN@-wtOci`LJ&rAD}wB`%i#0O804cjh!WXU z()B@6=Fp)Tr_`Fue!Co5jYeIdw-HhgS&I(rzIIXEHwv$U)_E}Wg~IBotITS=b~ev| z-9{qE2J!3}WiUJO{1VuPw4mr+ks$*NG$Zk5t&O&1;VXtx+H9!Khh_$u72Aygj9Ew; z3x{d+medl$-&sMkp&sWBdGLY%BS`UuU8MElVLBA|>FDT-SDtvlWb9sngi^?h*prlk zViGb{-IzY+g;ZIGc8n^7LkMLFfr-mq4;p*93m@WLVPx%FQw-!m0M*-n%P#(DhYSbu z@tq+V5R|?qf8m0iXW&~csER~0aV_jreAot=jg#JgICA6_oN3uX?FHQ`Pjwv<8JubU z=u+R(qz=hOh)NrXNga}+Z&x_WRm=nv#Rj4`VAFrr4=y7<`R93%p#$300~}+Q3iKQf zJy(d^KkBU{+(^2llCtH@>08y&d)@L0@YGIo%lN=f_wKM$&;gi%Hhf z{}?*#VPjg=9MsY_1Z?V$OEWR8XfL7atIvNJ=az>vwaOBOOwRu}mq_*+AwGX``CwtO zw6-5j7rS;f$ibO{;OyphX)QX0f$hC8A2Jd_7mxoP(%JSRwtZPB9btg8C9n zuqNEYKCwfXwMHKsBPYxtIn4bvCUcn@KW8Ne5wwe)Ik&f}vt8xASk5_!*1U}7KMIsC zJV7%+Q^*2*ML|4>!jsvOimrl!?r!k?o|d?{r1V`nHWr-?;>7P=go10y?9J=ux`f+A zg{2i-WH39>ORJgmTNSVE{4;ioPzYKfjktNPfQ9kL4M^$Hv@g6#3VnDVKjPl&z16aZ zn_sH(+Oh$9rXn0EA5H8UB5(9M|Mn-4*um(tmvhl4OLd)p7!tCkaa2Ky;E{*;P_Bi+ zcDx2q&v}Q!5He=q;Xo+v+iSMHJ$8=S9a>#HbJz)Xs zNfsD67T@i#(^?0#SU7`jH0b_?zGgpxVpYznRam~+tB`?>Ca!+JFQ*#3&8lEaw!9^0 zx1F;6Ua5mN4SY^TiZ&pdFv1UuN$I+9{imm9v9Us;MEYSXwr>XeJ#h7ql!xp}lt^tC zFeNFKpnp?J0sBJZ`erBNhcud`n&MuZs4Pg4u#R0JTP7*fkTN3Mexqyx>I7eM68@6Z zsMt41PrY^LfsqjE_y`xA%I7hH&xARAT^?g!fIDd}d5WmTpmu~@GsSQsc~l`~M8|LE z?{j6(5;#m;DzSe-5iIA9MGT(-SsKcY!-+9bh3abP!bfU+1J6c^Do9==BtZYt&c(0) zs|~A3eiKmS5e&@1R}y?+pZtFEJb7XlbNU7&Q~IX4eS`nI*l5KdC#I@uzy=!Com~9! zs*~hb_xTzx75U=jn9bc&{`)!wHXuKz=r#VspU{RM+%R;p#1`H*Q(YlDB_IG@zFb4{ zz`WIv7f+*R*^85yxWFltmlW^DjtV26v(p1jCY#wfZrHWgjMtRAf_Hz?qB6#r>o^Q)9R6uY^|$%Zr&!t>SGEhKch13PmHVds5=D91RCNoCx)? zQ|TC?Zl%&BPa#=U-<9tISFekiyUShUfLjM4Vtk3TRJ1=RQs>o!dd71TSZ=R?IupHc zDE%1V7+Bme4aN(^{P{^s#kSgj^?fv+3vicV#=@|x^>ZAwF7E!TMv?at;4|&mp85gr zk#X|`7T$x~9_nX7l$iWXDKnt4)KkJY<~q+hHC5m5s|(!l->E_Z8=+>nQ-gmPaM_g10oM|Y%7mg zBz#O;{sut4si-q?F7F)TP_@=7MeY(HC#L-8rm7cCPP_b{0a&Ln%mK+=)VfMLXN({{ z=7z-cC%t8B(?W2!{dwa7@3)r!9cXV=8{8z;LxsahO@h@p~|_} zUuI$*lYdTwu1}$Qxx|;BJFk<79;S_iKgH`)74iLM zJh0mYXC_QdS6t*a^FuNze{zT|#g&kdOQg@sF*-nL0!le|`NggCdyy+wJz(WY<=p13 zS7M$ytGif-oe`it$cT0&Y%y2WbdjHHxG&O zwCVgAcK{&VG8=g%x5QwWQ2G-EL^|q!#n7IV>hd}{(o_uJINDi})H~)(c(sp;@0\n".format(test_msg) + assert captured.out == "" + + +def test_empty_config(capsys): + submission = export.SDExport("testfile") + temp_folder = tempfile.mkdtemp() + metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) + with open(metadata, "w") as f: + f.write("{}") + config = export.Metadata(temp_folder) + assert not config.is_valid() + + +def test_valid_printer_test_config(capsys): + submission = export.SDExport("testfile") + temp_folder = tempfile.mkdtemp() + metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) + with open(metadata, "w") as f: + f.write('{"device": "printer-test"}') + config = export.Metadata(temp_folder) + assert config.is_valid() + assert config.encryption_key is None + assert config.encryption_method is None + + +def test_valid_printer_config(capsys): + submission = export.SDExport("") + temp_folder = tempfile.mkdtemp() + metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) + with open(metadata, "w") as f: + f.write('{"device": "printer"}') + config = export.Metadata(temp_folder) + assert config.is_valid() + assert config.encryption_key is None + assert config.encryption_method is None + + +def test_invalid_encryption_config(capsys): + submission = export.SDExport("testfile") + + temp_folder = tempfile.mkdtemp() + metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) + with open(metadata, "w") as f: + f.write( + '{"device": "disk", "encryption_method": "base64", "encryption_key": "hunter1"}' + ) + config = export.Metadata(temp_folder) + assert config.encryption_key == "hunter1" + assert config.encryption_method == "base64" + assert not config.is_valid() + + +def test_valid_encryption_config(capsys): + submission = export.SDExport("testfile") + temp_folder = tempfile.mkdtemp() + metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) + with open(metadata, "w") as f: + f.write( + '{"device": "disk", "encryption_method": "luks", "encryption_key": "hunter1"}' + ) + config = export.Metadata(temp_folder) + assert config.encryption_key == "hunter1" + assert config.encryption_method == "luks" + assert config.is_valid() + + +@mock.patch("subprocess.check_call") +def test_popup_message(mocked_call): + submission = export.SDExport("testfile") + submission.popup_message("hello!") + mocked_call.assert_called_once_with([ + "notify-send", + "--expire-time", "3000", + "--icon", "/usr/share/securedrop/icons/sd-logo.png", + "SecureDrop: hello!" + ]) + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_BOTHER_PRINTER) +def test_get_good_printer_uri(mocked_call): + submission = export.SDExport("testfile") + result = submission.get_printer_uri() + assert result == "usb://Brother/HL-L2320D%20series?serial=A00000A000000" + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PRINTER) +def test_get_bad_printer_uri(mocked_call, capsys): + submission = export.SDExport("testfile") + expected_message = "USB Printer not found" + mocked_exit = mock.patch("export.exit_gracefully", return_value=0) + + with pytest.raises(SystemExit) as sysexit: + result = submission.get_printer_uri() + assert result == "" + mocked_exit.assert_called_once_with(expected_message) + + assert sysexit.value.code == 0 + captured = capsys.readouterr() + assert captured.err == "{}\n".format(expected_message) + assert captured.out == "" + + +@pytest.mark.parametrize('open_office_paths', [ + "/tmp/whatver/thisisadoc.doc" + "/home/user/Downloads/thisisadoc.xlsx" + "/home/user/Downloads/file.odt" + "/tmp/tmpJf83j9/secret.pptx" +]) +def test_is_open_office_file(capsys, open_office_paths): + submission = export.SDExport("") + assert submission.is_open_office_file(open_office_paths) + + +@pytest.mark.parametrize('open_office_paths', [ + "/tmp/whatver/thisisadoc.doccc" + "/home/user/Downloads/thisisa.xlsx.zip" + "/home/user/Downloads/file.odz" + "/tmp/tmpJf83j9/secret.gpg" +]) +def test_is_not_open_office_file(capsys, open_office_paths): + submission = export.SDExport("") + assert not submission.is_open_office_file(open_office_paths) From 8806ea1968a5f2d75f448f3652089c2d3bdc81ba Mon Sep 17 00:00:00 2001 From: Kevin O'Gorman Date: Tue, 23 Jul 2019 14:52:35 -0700 Subject: [PATCH 064/352] fixed disk export location --- securedrop_export/export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index c40ee2f1d..f5ce56a95 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -168,7 +168,7 @@ def copy_submission(self): target_path = os.path.join(self.mountpoint, self.target_dirname) subprocess.check_call(["mkdir", target_path]) export_data = os.path.join( - self.tmpdir, self.submission_dirname, "export_data/" + self.tmpdir, "export_data/" ) subprocess.check_call(["cp", "-r", export_data, target_path]) self.popup_message("Files exported successfully to disk.") From 0cba8e2d71a29df217dce1dbc381007c0187b96b Mon Sep 17 00:00:00 2001 From: mickael e Date: Mon, 22 Jul 2019 10:39:58 -0400 Subject: [PATCH 065/352] Add makefile and ci for tests --- .circleci/config.yml | 35 ++++++++++++++++++ Makefile | 32 +++++++++++++++++ README.md | 3 ++ test-requirements.in | 1 + test-requirements.txt | 84 ++++++++++++++++++++++++++++++++++++++++++- 5 files changed, 154 insertions(+), 1 deletion(-) create mode 100644 .circleci/config.yml create mode 100644 Makefile create mode 100644 test-requirements.in diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 000000000..baae95224 --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,35 @@ +version: 2 +jobs: + build: + docker: + - image: circleci/python:3.5-stretch + steps: + - checkout + + test: + docker: + - image: circleci/python:3.5 + steps: + - checkout + - run: + name: Install packages + command: | + sudo apt install libnotify-bin + - run: + name: Install test requirements and run tests + command: | + virtualenv .venv + source .venv/bin/activate + pip install --require-hashes -r test-requirements.txt + make test + - run: + name: Check Python dependencies for CVEs + command: | + set -e + source .venv/bin/activate + make safety +workflows: + version: 2 + securedrop_export_ci: + jobs: + - test diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..2ee2e801d --- /dev/null +++ b/Makefile @@ -0,0 +1,32 @@ +.PHONY: safety +safety: ## Runs `safety check` to check python dependencies for vulnerabilities + pip install --upgrade safety && \ + for req_file in `find . -type f -name '*requirements.txt'`; do \ + echo "Checking file $$req_file" \ + && safety check --full-report -r $$req_file \ + && echo -e '\n' \ + || exit 1; \ + done + +.PHONY: update-pip-requirements +update-pip-requirements: ## Updates all Python requirements files via pip-compile. + pip-compile --generate-hashes --output-file test-requirements.txt test-requirements.in + +.PHONY: test +test: + pytest -v tests/ + +# Explaination of the below shell command should it ever break. +# 1. Set the field separator to ": ##" and any make targets that might appear between : and ## +# 2. Use sed-like syntax to remove the make targets +# 3. Format the split fields into $$1) the target name (in blue) and $$2) the target descrption +# 4. Pass this file as an arg to awk +# 5. Sort it alphabetically +# 6. Format columns with colon as delimiter. +.PHONY: help +help: ## Print this message and exit. + @printf "Makefile for developing and testing the SecureDrop proxy.\n" + @printf "Subcommands:\n\n" + @awk 'BEGIN {FS = ":.*?## "} /^[0-9a-zA-Z_-]+:.*?## / {printf "\033[36m%s\033[0m : %s\n", $$1, $$2}' $(MAKEFILE_LIST) \ + | sort \ +| column -s ':' -t diff --git a/README.md b/README.md index 09ef02c90..f8a0bd6df 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,5 @@ +[![CircleCI](https://circleci.com/gh/freedomofpress/securedrop-export.svg?style=svg)](https://circleci.com/gh/freedomofpress/securedrop-export) + # securedrop-export + code for exporting from the securedrop qubes workstation diff --git a/test-requirements.in b/test-requirements.in new file mode 100644 index 000000000..e079f8a60 --- /dev/null +++ b/test-requirements.in @@ -0,0 +1 @@ +pytest diff --git a/test-requirements.txt b/test-requirements.txt index e079f8a60..073600a7a 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1 +1,83 @@ -pytest +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --generate-hashes --output-file test-requirements.txt test-requirements.in +# +atomicwrites==1.3.0 \ + --hash=sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4 \ + --hash=sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6 \ + # via pytest +attrs==19.1.0 \ + --hash=sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79 \ + --hash=sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399 \ + # via pytest +configparser==3.7.4 \ + --hash=sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32 \ + --hash=sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75 \ + # via importlib-metadata +contextlib2==0.5.5 \ + --hash=sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48 \ + --hash=sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00 \ + # via importlib-metadata +funcsigs==1.0.2 \ + --hash=sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca \ + --hash=sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50 \ + # via pytest +importlib-metadata==0.18 \ + --hash=sha256:6dfd58dfe281e8d240937776065dd3624ad5469c835248219bd16cf2e12dbeb7 \ + --hash=sha256:cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db \ + # via pluggy, pytest +more-itertools==5.0.0 \ + --hash=sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4 \ + --hash=sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc \ + --hash=sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9 \ + # via pytest +packaging==19.0 \ + --hash=sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af \ + --hash=sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3 \ + # via pytest +pathlib2==2.3.4 \ + --hash=sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e \ + --hash=sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8 \ + # via importlib-metadata, pytest +pluggy==0.12.0 \ + --hash=sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc \ + --hash=sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c \ + # via pytest +py==1.8.0 \ + --hash=sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa \ + --hash=sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53 \ + # via pytest +pyparsing==2.4.1 \ + --hash=sha256:530d8bf8cc93a34019d08142593cf4d78a05c890da8cf87ffa3120af53772238 \ + --hash=sha256:f78e99616b6f1a4745c0580e170251ef1bbafc0d0513e270c4bd281bf29d2800 \ + # via packaging +pytest==4.6.4 \ + --hash=sha256:6aa9bc2f6f6504d7949e9df2a756739ca06e58ffda19b5e53c725f7b03fb4aae \ + --hash=sha256:b77ae6f2d1a760760902a7676887b665c086f71e3461c64ed2a312afcedc00d6 +scandir==1.10.0 \ + --hash=sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e \ + --hash=sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022 \ + --hash=sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f \ + --hash=sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f \ + --hash=sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae \ + --hash=sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173 \ + --hash=sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4 \ + --hash=sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32 \ + --hash=sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188 \ + --hash=sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d \ + --hash=sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac \ + # via pathlib2 +six==1.12.0 \ + --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \ + --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \ + # via more-itertools, packaging, pathlib2, pytest +wcwidth==0.1.7 \ + --hash=sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e \ + --hash=sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c \ + # via pytest +zipp==0.5.2 \ + --hash=sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a \ + --hash=sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec \ + # via importlib-metadata From 056b8ad570d57de7e8634abb236a9a39538287d9 Mon Sep 17 00:00:00 2001 From: mickael e Date: Tue, 23 Jul 2019 18:41:39 -0400 Subject: [PATCH 066/352] Add update-version.sh script --- update-version.sh | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100755 update-version.sh diff --git a/update-version.sh b/update-version.sh new file mode 100755 index 000000000..983fd4e8d --- /dev/null +++ b/update-version.sh @@ -0,0 +1,26 @@ +#!/bin/bash +## Usage: ./update_version.sh + +set -e + +readonly NEW_VERSION=$1 + +if [ -z "$NEW_VERSION" ]; then + echo "You must specify the new version!" + exit 1 +fi + +# Get the old version from securedrop_export/VERSION +OLD_VERSION=$(cat securedrop_export/VERSION) +if [ -z "$OLD_VERSION" ]; then + echo "Couldn't find the old version: does this script need to be updated?" + exit 1 +fi + +# Update the version in securedrop_export/VERSION (setup.py is done automatically) +if [[ "$OSTYPE" == "darwin"* ]]; then + # The empty '' after sed -i is required on macOS to indicate no backup file should be saved. + sed -i '' "s@$(echo "${OLD_VERSION}" | sed 's/\./\\./g')@$NEW_VERSION@g" securedrop_export/VERSION +else + sed -i "s@$(echo "${OLD_VERSION}" | sed 's/\./\\./g')@$NEW_VERSION@g" securedrop_export/VERSION +fi From eba6ec1f311a7ca3335869bd6ab31b921d1b0eb6 Mon Sep 17 00:00:00 2001 From: mickael e Date: Thu, 25 Jul 2019 09:24:26 -0400 Subject: [PATCH 067/352] Rename update-version.sh for consistency. Other repos use update_version.sh: https://github.com/freedomofpress/securedrop-client/ https://github.com/freedomofpress/securedrop-proxy/ --- update-version.sh => update_version.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename update-version.sh => update_version.sh (100%) diff --git a/update-version.sh b/update_version.sh similarity index 100% rename from update-version.sh rename to update_version.sh From 73c3d77bcd980cfa5878405d27ba31b20fa072c7 Mon Sep 17 00:00:00 2001 From: mickael e Date: Thu, 25 Jul 2019 09:31:11 -0400 Subject: [PATCH 068/352] Update pyparsing to 2.4.1.1 2.4.1 was no longer available on pypi: https://circleci.com/gh/freedomofpress/securedrop-export/27 --- test-requirements.txt | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index 073600a7a..c1a39ee1b 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -49,10 +49,9 @@ py==1.8.0 \ --hash=sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa \ --hash=sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53 \ # via pytest -pyparsing==2.4.1 \ - --hash=sha256:530d8bf8cc93a34019d08142593cf4d78a05c890da8cf87ffa3120af53772238 \ - --hash=sha256:f78e99616b6f1a4745c0580e170251ef1bbafc0d0513e270c4bd281bf29d2800 \ - # via packaging +pyparsing==2.4.1.1 \ + --hash=sha256:43c5486cefefa536c9aab528881c992328f020eefe4f6d06332449c365218580 \ + --hash=sha256:d6c5ffe9d0305b9b977f7a642d36b9370954d1da7ada4c62393382cbadad4265 pytest==4.6.4 \ --hash=sha256:6aa9bc2f6f6504d7949e9df2a756739ca06e58ffda19b5e53c725f7b03fb4aae \ --hash=sha256:b77ae6f2d1a760760902a7676887b665c086f71e3461c64ed2a312afcedc00d6 From 9763192a3bb269775fcd1f07e6aadca85d5ea9c6 Mon Sep 17 00:00:00 2001 From: mickael e Date: Wed, 24 Jul 2019 09:15:59 -0400 Subject: [PATCH 069/352] Provides usb preflight checks Also check if a USB device is connected via `usb-test` action: * returns `USB_CONNECTED` if a device is connected to sd-export Qube * returns `USB_NOT_CONNECTED` if a device is not connnected to sd-export Qube Also checks if a Drive is LUKS-encrypted via `disk-test` action: * returns `USB_ENCRYPTED` if the usb device is LUKS-encrypted * returns `USB_NO_SUPPORTED_ENCRYPTION` if the usb device is not LUKS-encrypted --- securedrop_export/export.py | 45 +++++++++++++++++++- securedrop_export/main.py | 6 ++- tests/test_export.py | 82 +++++++++++++++++++++++++++++++++---- 3 files changed, 122 insertions(+), 11 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index f5ce56a95..e1343c5cf 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -18,7 +18,7 @@ ENCRYPTED_DEVICE = "encrypted_volume" BRLASER_DRIVER = "/usr/share/cups/drv/brlaser.drv" BRLASER_PPD = "/usr/share/cups/model/br7030.ppd" - +PCI_BUS_ID = "002:" class Metadata(object): """ @@ -26,7 +26,13 @@ class Metadata(object): """ METADATA_FILE = "metadata.json" - SUPPORTED_EXPORT_METHODS = ["disk", "printer", "printer-test"] + SUPPORTED_EXPORT_METHODS = [ + "usb-test", # general preflight check + "disk", + "disk-test", # disk preflight test + "printer", + "printer-test" # print test page + ] SUPPORTED_ENCRYPTION_METHODS = ["luks"] def __init__(self, archive_path): @@ -119,6 +125,41 @@ def extract_tarball(self): self.exit_gracefully(msg, e=e) + def check_usb_connected(self): + # Rely on the output of lsusb on the bus assigned to. We might need to make this variable configurable + # In the future and extracted from config.json + p = subprocess.check_output(["lsusb", "-s", PCI_BUS_ID]) + # Empty string means a likely wrong PCI_BUS_ID + if p == "": + msg = "ERROR_USB_CHECK" + self.exit_gracefully(msg) + n_usb = len(p.rstrip().split("\n")) + # If there is one device, it is the root hub. + if n_usb == 1: + msg = "USB_NOT_CONNECTED" + self.exit_gracefully(msg) + # If there are two devices, it's the root hub and another device (presumably for export) + elif n_usb == 2: + msg = "USB_CONNECTED" + self.exit_gracefully(msg) + # Else the result is unexpected + else: + msg = "ERROR_USB_CHECK" + self.exit_gracefully(msg) + + + def check_luks_volume(self): + try: + # cryptsetup isLuks returns 0 if the device is a luks volume + # subprocess with throw if the device is not luks (rc !=0) + p = subprocess.check_call(["sudo", "cryptsetup", "isLuks", DEVICE]) + msg = "USB_ENCRYPTED" + self.exit_gracefully(msg) + except subprocess.CalledProcessError as e: + msg = "USB_NO_SUPPORTED_ENCRYPTION" + self.exit_gracefully(msg) + + def unlock_luks_volume(self, encryption_key): # the luks device is not already unlocked if not os.path.exists(os.path.join("/dev/mapper/", self.encrypted_device)): diff --git a/securedrop_export/main.py b/securedrop_export/main.py index 79b5d721d..58dca3e02 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -14,11 +14,15 @@ def __main__(submission): submission.exit_gracefully(msg, e=e) if submission.archive_metadata.is_valid(): - if submission.archive_metadata.export_method == "disk": + if submission.archive_metadata.export_method == "usb-test": + submission.check_usb_connected() + elif submission.archive_metadata.export_method == "disk": # exports all documents in the archive to luks-encrypted volume submission.unlock_luks_volume(submission.archive_metadata.encryption_key) submission.mount_volume() submission.copy_submission() + elif submission.archive_metadata.export_method == "disk-test": + submission.check_luks_volume() elif submission.archive_metadata.export_method == "printer": # prints all documents in the archive printer_uri = submission.get_printer_uri() diff --git a/tests/test_export.py b/tests/test_export.py index 85dbcd258..577667ee5 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -2,6 +2,7 @@ import os import pytest +import subprocess import tempfile from securedrop_export import export @@ -9,14 +10,10 @@ SAMPLE_OUTPUT_NO_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\nnetwork lpd" # noqa SAMPLE_OUTPUT_BOTHER_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\ndirect usb://Brother/HL-L2320D%20series?serial=A00000A000000\nnetwork lpd" # noqa - -# This below stanza is only necessary because the export code is not -# structured as a module. If a Python module were created called -# `export`, we could simply do `import export` -# path_to_script = os.path.join( -# os.path.dirname(os.path.abspath(__file__)), "send-to-usb" -# ) -# securedropexport = imp.load_source("send-to-usb", path_to_script) +SAMPLE_OUTPUT_NO_USB="Bus 001 Device 001: ID 1d6b:0002 Linux Foundation 2.0 root hub" # noqa +SAMPLE_OUTPUT_USB="Bus 001 Device 002: ID 0781:5575 SanDisk Corp.\nBus 001 Device 001: ID 1d6b:0002 Linux Foundation 2.0 root hub" # noqa +SAMPLE_OUTPUT_USB_ERROR="" +SAMPLE_OUTPUT_USB_ERROR2="h\ne\nl\nl\no" def test_exit_gracefully_no_exception(capsys): @@ -169,3 +166,72 @@ def test_is_open_office_file(capsys, open_office_paths): def test_is_not_open_office_file(capsys, open_office_paths): submission = export.SDExport("") assert not submission.is_open_office_file(open_office_paths) + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_USB) +def test_usb_precheck_connected(mocked_call, capsys): + submission = export.SDExport("testfile") + expected_message = "USB_NOT_CONNECTED" + mocked_exit = mock.patch("export.exit_gracefully", return_value=0) + with pytest.raises(SystemExit) as sysexit: + result = submission.check_usb_connected() + mocked_exit.assert_called_once_with(expected_message) + + assert sysexit.value.code == 0 + captured = capsys.readouterr() + assert captured.err == "{}\n".format(expected_message) + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_USB) +def test_usb_precheck_disconnected(mocked_call, capsys): + submission = export.SDExport("testfile") + expected_message = "USB_CONNECTED" + mocked_exit = mock.patch("export.exit_gracefully", return_value=0) + with pytest.raises(SystemExit) as sysexit: + result = submission.check_usb_connected() + mocked_exit.assert_called_once_with(expected_message) + + assert sysexit.value.code == 0 + captured = capsys.readouterr() + assert captured.err == "{}\n".format(expected_message) + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_USB_ERROR) +def test_usb_precheck_error(mocked_call, capsys): + submission = export.SDExport("testfile") + expected_message = "ERROR_USB_CHECK" + mocked_exit = mock.patch("export.exit_gracefully", return_value=0) + with pytest.raises(SystemExit) as sysexit: + result = submission.check_usb_connected() + mocked_exit.assert_called_once_with(expected_message) + + assert sysexit.value.code == 0 + captured = capsys.readouterr() + assert captured.err == "{}\n".format(expected_message) + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_USB_ERROR2) +def test_usb_precheck_error_2(mocked_call, capsys): + submission = export.SDExport("testfile") + expected_message = "ERROR_USB_CHECK" + mocked_exit = mock.patch("export.exit_gracefully", return_value=0) + with pytest.raises(SystemExit) as sysexit: + result = submission.check_usb_connected() + mocked_exit.assert_called_once_with(expected_message) + + assert sysexit.value.code == 0 + captured = capsys.readouterr() + assert captured.err == "{}\n".format(expected_message) + + +@mock.patch("subprocess.check_call") +def test_luks_precheck_encrypted(mocked_call, capsys): + submission = export.SDExport("testfile") + expected_message = "USB_ENCRYPTED" + with pytest.raises(SystemExit) as sysexit: + result = submission.check_luks_volume() + mocked_exit.assert_called_once_with(expected_message) + assert sysexit.value.code == 0 + captured = capsys.readouterr() + assert captured.err == "{}\n".format(expected_message) + From f511756937092823415ee0710cc6a581e4ecd55f Mon Sep 17 00:00:00 2001 From: mickael e Date: Thu, 25 Jul 2019 12:58:34 -0400 Subject: [PATCH 070/352] Standardize error messages for sd-export All errors return codes are now prefixed with `ERROR_` --- securedrop_export/export.py | 23 +++++++++-------------- securedrop_export/main.py | 4 ++-- tests/test_export.py | 2 +- 3 files changed, 12 insertions(+), 17 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index e1343c5cf..46fba8ab2 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -98,7 +98,6 @@ def exit_gracefully(self, msg, e=False): sys.stderr.write("\n") # exit with 0 return code otherwise the os will attempt to open # the file with another application - self.popup_message("Export error: {}".format(msg)) sys.exit(0) @@ -172,7 +171,7 @@ def unlock_luks_volume(self, encryption_key): p.communicate(input=str.encode(encryption_key, "utf-8")) rc = p.returncode if rc != 0: - msg = "Bad passphrase or luks error." + msg = "USB_BAD_PASSPHRASE" self.exit_gracefully(msg) @@ -199,7 +198,7 @@ def mount_volume(self): except subprocess.CalledProcessError as e: # clean up subprocess.check_call(["sudo", "cryptsetup", "luksClose", self.encrypted_device]) - msg = "An error occurred while mounting disk: " + msg = "ERROR_USB_MOUNT" self.exit_gracefully(msg, e=e) @@ -214,7 +213,7 @@ def copy_submission(self): subprocess.check_call(["cp", "-r", export_data, target_path]) self.popup_message("Files exported successfully to disk.") except (subprocess.CalledProcessError, OSError) as e: - msg = "Error writing to disk:" + msg = "ERROR_USB_WRITE" self.exit_gracefully(msg, e=e) finally: # Finally, we sync the filesystem, unmount the drive and lock the @@ -254,7 +253,7 @@ def get_printer_uri(self): try: output = subprocess.check_output(["sudo", "lpinfo", "-v"]) except subprocess.CalledProcessError as e: - msg = "Error retrieving printer uri." + msg = "ERROR_PRINTER_URI" self.exit_gracefully(msg, e=e) # fetch the usb printer uri @@ -265,12 +264,12 @@ def get_printer_uri(self): # verify that the printer is supported, else exit if printer_uri == "": # No usb printer is connected - self.exit_gracefully("USB Printer not found") + self.exit_gracefully("ERROR_PRINTER_NOT_FOUND") elif "Brother" in printer_uri: return printer_uri else: # printer url is a make that is unsupported - self.exit_gracefully("USB Printer not supported") + self.exit_gracefully("ERROR_PRINTER_NOT_SUPPORTED") def install_printer_ppd(self, uri): @@ -281,7 +280,7 @@ def install_printer_ppd(self, uri): ["sudo", "ppdc", self.brlaser_driver, "-d", "/usr/share/cups/model/"] ) except subprocess.CalledProcessError as e: - msg = "Error installing ppd file for printer {}.".format(uri) + msg = "ERROR_PRINTER_DRIVER_INSTALL" self.exit_gracefully(msg, e=e) return self.brlaser_ppd # Here, we could support ppd drivers for other makes or models in the future @@ -309,9 +308,7 @@ def setup_printer(self, printer_uri, printer_ppd): ["sudo", "lpadmin", "-p", self.printer_name, "-u", "allow:user"] ) except subprocess.CalledProcessError as e: - msg = "Error setting up printer {} at {} using {}.".format( - self.printer_name, printer_uri, printer_ppd - ) + msg = "ERROR_PRINTER_INSTALL" self.exit_gracefully(msg, e=e) @@ -354,9 +351,7 @@ def print_file(self, file_to_print): subprocess.check_call(["xpp", "-P", self.printer_name, file_to_print]) except subprocess.CalledProcessError as e: - msg = "Error printing file {} with printer {}.".format( - file_to_print, self.printer_name - ) + msg = "ERROR_PRINT" self.exit_gracefully(msg, e=e) ## class ends here diff --git a/securedrop_export/main.py b/securedrop_export/main.py index 58dca3e02..a0b830c7a 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -10,7 +10,7 @@ def __main__(submission): try: submission.archive_metadata = export.Metadata(submission.tmpdir) except Exception as e: - msg = "Error parsing metadata: " + msg = "ERROR_METADATA_PARSING" submission.exit_gracefully(msg, e=e) if submission.archive_metadata.is_valid(): @@ -36,6 +36,6 @@ def __main__(submission): submission.setup_printer(printer_uri, printer_ppd) submission.print_test_page() else: - submission.exit_gracefully("Archive metadata is invalid") + submission.exit_gracefully("ERROR_ARCHIVE_METADATA") diff --git a/tests/test_export.py b/tests/test_export.py index 577667ee5..00996c9a3 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -132,7 +132,7 @@ def test_get_good_printer_uri(mocked_call): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PRINTER) def test_get_bad_printer_uri(mocked_call, capsys): submission = export.SDExport("testfile") - expected_message = "USB Printer not found" + expected_message = "ERROR_PRINTER_NOT_FOUND" mocked_exit = mock.patch("export.exit_gracefully", return_value=0) with pytest.raises(SystemExit) as sysexit: From 8cfa3bdc68b2541ca24ef24e31dc11fe97b93f34 Mon Sep 17 00:00:00 2001 From: mickael e Date: Thu, 1 Aug 2019 16:20:54 -0400 Subject: [PATCH 071/352] Add more return codes for errors --- securedrop_export/entrypoint.py | 6 ++--- securedrop_export/export.py | 43 ++++++++++++++++----------------- 2 files changed, 24 insertions(+), 25 deletions(-) diff --git a/securedrop_export/entrypoint.py b/securedrop_export/entrypoint.py index 9958d5d05..fbdec17d1 100755 --- a/securedrop_export/entrypoint.py +++ b/securedrop_export/entrypoint.py @@ -11,7 +11,7 @@ def start(): try: # Halt immediately if target file is absent if not os.path.exists(my_sub.archive): - msg = "File does not exist" + msg = "ERROR_FILE_NOT_FOUND" my_sub.exit_gracefully(msg) main.__main__(my_sub) # Delete extracted achive from tempfile @@ -19,5 +19,5 @@ def start(): except Exception as e: # exit with 0 return code otherwise the os will attempt to open # the file with another application - msg = "Unhandled exception:" - my_sub.exit_gracefully(msg, e=e) + msg = "ERROR_GENERIC" + my_sub.exit_gracefully(msg) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 46fba8ab2..12f918fbf 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -118,10 +118,9 @@ def extract_tarball(self): try: with tarfile.open(self.archive) as tar: tar.extractall(self.tmpdir) - except Exception as e: - print (e) - msg = "Error opening export bundle: " - self.exit_gracefully(msg, e=e) + except Exception: + msg = "ERROR_EXTRACTION" + self.exit_gracefully(msg) def check_usb_connected(self): @@ -154,7 +153,7 @@ def check_luks_volume(self): p = subprocess.check_call(["sudo", "cryptsetup", "isLuks", DEVICE]) msg = "USB_ENCRYPTED" self.exit_gracefully(msg) - except subprocess.CalledProcessError as e: + except subprocess.CalledProcessError: msg = "USB_NO_SUPPORTED_ENCRYPTION" self.exit_gracefully(msg) @@ -195,11 +194,11 @@ def mount_volume(self): "-R", "user:user", self.mountpoint, ] ) - except subprocess.CalledProcessError as e: + except subprocess.CalledProcessError: # clean up subprocess.check_call(["sudo", "cryptsetup", "luksClose", self.encrypted_device]) msg = "ERROR_USB_MOUNT" - self.exit_gracefully(msg, e=e) + self.exit_gracefully(msg) def copy_submission(self): @@ -212,9 +211,9 @@ def copy_submission(self): ) subprocess.check_call(["cp", "-r", export_data, target_path]) self.popup_message("Files exported successfully to disk.") - except (subprocess.CalledProcessError, OSError) as e: + except (subprocess.CalledProcessError, OSError): msg = "ERROR_USB_WRITE" - self.exit_gracefully(msg, e=e) + self.exit_gracefully(msg) finally: # Finally, we sync the filesystem, unmount the drive and lock the # luks volume, and exit 0 @@ -238,12 +237,12 @@ def wait_for_print(self): return True else: time.sleep(5) - except subprocess.CalledProcessError as e: - msg = "Error while retrieving print status" - self.exit_gracefully(msg, e=e) - except TimeoutException as e: - msg = "Timeout when getting printer information" - self.exit_gracefully(msg, e=e) + except subprocess.CalledProcessError: + msg = "ERROR_PRINT" + self.exit_gracefully(msg) + except TimeoutException: + msg = "ERROR_PRINT" + self.exit_gracefully(msg) return True @@ -254,7 +253,7 @@ def get_printer_uri(self): output = subprocess.check_output(["sudo", "lpinfo", "-v"]) except subprocess.CalledProcessError as e: msg = "ERROR_PRINTER_URI" - self.exit_gracefully(msg, e=e) + self.exit_gracefully(msg) # fetch the usb printer uri for line in output.split(): @@ -279,9 +278,9 @@ def install_printer_ppd(self, uri): subprocess.check_call( ["sudo", "ppdc", self.brlaser_driver, "-d", "/usr/share/cups/model/"] ) - except subprocess.CalledProcessError as e: + except subprocess.CalledProcessError: msg = "ERROR_PRINTER_DRIVER_INSTALL" - self.exit_gracefully(msg, e=e) + self.exit_gracefully(msg) return self.brlaser_ppd # Here, we could support ppd drivers for other makes or models in the future @@ -307,9 +306,9 @@ def setup_printer(self, printer_uri, printer_ppd): subprocess.check_call( ["sudo", "lpadmin", "-p", self.printer_name, "-u", "allow:user"] ) - except subprocess.CalledProcessError as e: + except subprocess.CalledProcessError: msg = "ERROR_PRINTER_INSTALL" - self.exit_gracefully(msg, e=e) + self.exit_gracefully(msg) def print_test_page(self): @@ -350,9 +349,9 @@ def print_file(self, file_to_print): file_to_print = converted_path subprocess.check_call(["xpp", "-P", self.printer_name, file_to_print]) - except subprocess.CalledProcessError as e: + except subprocess.CalledProcessError: msg = "ERROR_PRINT" - self.exit_gracefully(msg, e=e) + self.exit_gracefully(msg) ## class ends here class TimeoutException(Exception): From bee4790b4d8d5aaf7a69f3b4674057e4e598e7cf Mon Sep 17 00:00:00 2001 From: mickael e Date: Thu, 1 Aug 2019 16:50:52 -0400 Subject: [PATCH 072/352] Address review comments Configurable PCI_DEVICE_ID will be populated at provision time by salt logic --- securedrop_export/entrypoint.py | 3 +- securedrop_export/export.py | 26 ++++++----- securedrop_export/main.py | 13 ++---- tests/sd-export-config-bad-2.json | 3 ++ tests/sd-export-config-bad.json | 3 ++ tests/sd-export-config.json | 3 ++ tests/test_export.py | 72 +++++++++++++++++++++++-------- 7 files changed, 83 insertions(+), 40 deletions(-) create mode 100644 tests/sd-export-config-bad-2.json create mode 100644 tests/sd-export-config-bad.json create mode 100644 tests/sd-export-config.json diff --git a/securedrop_export/entrypoint.py b/securedrop_export/entrypoint.py index fbdec17d1..c5b22494e 100755 --- a/securedrop_export/entrypoint.py +++ b/securedrop_export/entrypoint.py @@ -5,9 +5,10 @@ from securedrop_export import export from securedrop_export import main +CONFIG_PATH = "/etc/sd-export-config.json" def start(): - my_sub = export.SDExport(sys.argv[1]) + my_sub = export.SDExport(sys.argv[1], CONFIG_PATH) try: # Halt immediately if target file is absent if not os.path.exists(my_sub.archive): diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 12f918fbf..7fe7fa513 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -18,7 +18,6 @@ ENCRYPTED_DEVICE = "encrypted_volume" BRLASER_DRIVER = "/usr/share/cups/drv/brlaser.drv" BRLASER_PPD = "/usr/share/cups/model/br7030.ppd" -PCI_BUS_ID = "002:" class Metadata(object): """ @@ -37,12 +36,14 @@ class Metadata(object): def __init__(self, archive_path): self.metadata_path = os.path.join(archive_path, self.METADATA_FILE) + try: with open(self.metadata_path) as f: json_config = json.loads(f.read()) self.export_method = json_config.get("device", None) self.encryption_method = json_config.get("encryption_method", None) self.encryption_key = json_config.get("encryption_key", None) + except Exception as e: raise @@ -58,7 +59,7 @@ def is_valid(self): class SDExport(object): - def __init__(self, archive): + def __init__(self, archive, config_path): self.device = DEVICE self.mountpoint = MOUNTPOINT self.encrypted_device = ENCRYPTED_DEVICE @@ -68,14 +69,21 @@ def __init__(self, archive): self.brlaser_driver = BRLASER_DRIVER self.brlaser_ppd = BRLASER_PPD - + self.archive = archive - self.submission_dirname = os.path.basename(self.archive).split(".")[0] + self.submission_dirname = os.path.basename(self.archive).split(".")[0] self.target_dirname = "sd-export-{}".format( datetime.datetime.now().strftime("%Y%m%d-%H%M%S") ) self.tmpdir = tempfile.mkdtemp() + try: + with open(config_path) as f: + json_config = json.loads(f.read()) + self.pci_bus_id = int(json_config.get("pci_bus_id", 2)) + except Exception as e: + self.exit_gracefully("ERROR_CONFIG") + def exit_gracefully(self, msg, e=False): """ @@ -124,10 +132,8 @@ def extract_tarball(self): def check_usb_connected(self): - # Rely on the output of lsusb on the bus assigned to. We might need to make this variable configurable - # In the future and extracted from config.json - p = subprocess.check_output(["lsusb", "-s", PCI_BUS_ID]) - # Empty string means a likely wrong PCI_BUS_ID + p = subprocess.check_output(["lsusb", "-s", self.pci_bus_id]) + # Empty string means a likely wrong pci_bus_id if p == "": msg = "ERROR_USB_CHECK" self.exit_gracefully(msg) @@ -189,8 +195,8 @@ def mount_volume(self): ) subprocess.check_call( [ - "sudo", - "chown", + "sudo", + "chown", "-R", "user:user", self.mountpoint, ] ) diff --git a/securedrop_export/main.py b/securedrop_export/main.py index a0b830c7a..b51770556 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -1,17 +1,12 @@ -import os -import shutil -import sys - -from securedrop_export import export +from securedrop_export import export def __main__(submission): submission.extract_tarball() - try: + try: submission.archive_metadata = export.Metadata(submission.tmpdir) except Exception as e: - msg = "ERROR_METADATA_PARSING" - submission.exit_gracefully(msg, e=e) + submission.exit_gracefully("ERROR_METADATA_PARSING") if submission.archive_metadata.is_valid(): if submission.archive_metadata.export_method == "usb-test": @@ -37,5 +32,3 @@ def __main__(submission): submission.print_test_page() else: submission.exit_gracefully("ERROR_ARCHIVE_METADATA") - - diff --git a/tests/sd-export-config-bad-2.json b/tests/sd-export-config-bad-2.json new file mode 100644 index 000000000..879fb8397 --- /dev/null +++ b/tests/sd-export-config-bad-2.json @@ -0,0 +1,3 @@ +{ + "pci_bus_id": "two" +} diff --git a/tests/sd-export-config-bad.json b/tests/sd-export-config-bad.json new file mode 100644 index 000000000..f7cbf8d7a --- /dev/null +++ b/tests/sd-export-config-bad.json @@ -0,0 +1,3 @@ +{ + "pciishf. i3u 2 +} diff --git a/tests/sd-export-config.json b/tests/sd-export-config.json new file mode 100644 index 000000000..d1167cf5a --- /dev/null +++ b/tests/sd-export-config.json @@ -0,0 +1,3 @@ +{ + "pci_bus_id": "2" +} diff --git a/tests/test_export.py b/tests/test_export.py index 00996c9a3..82cc824f7 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -5,7 +5,7 @@ import subprocess import tempfile -from securedrop_export import export +from securedrop_export import export SAMPLE_OUTPUT_NO_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\nnetwork lpd" # noqa SAMPLE_OUTPUT_BOTHER_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\ndirect usb://Brother/HL-L2320D%20series?serial=A00000A000000\nnetwork lpd" # noqa @@ -14,10 +14,45 @@ SAMPLE_OUTPUT_USB="Bus 001 Device 002: ID 0781:5575 SanDisk Corp.\nBus 001 Device 001: ID 1d6b:0002 Linux Foundation 2.0 root hub" # noqa SAMPLE_OUTPUT_USB_ERROR="" SAMPLE_OUTPUT_USB_ERROR2="h\ne\nl\nl\no" +TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") +BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad.json") +ANOTHER_BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad-2.json") + + +def test_bad_sd_export_config_invalid_json(capsys): + + expected_message = "ERROR_CONFIG" + with pytest.raises(SystemExit) as sysexit: + submission = export.SDExport("", BAD_TEST_CONFIG) + # A graceful exit means a return code of 0 + assert sysexit.value.code == 0 + + captured = capsys.readouterr() + assert captured.err == "{}\n".format(expected_message) + assert captured.out == "" + + +def test_bad_sd_export_config_invalid_value(capsys): + + expected_message = "ERROR_CONFIG" + with pytest.raises(SystemExit) as sysexit: + submission = export.SDExport("", ANOTHER_BAD_TEST_CONFIG) + # A graceful exit means a return code of 0 + assert sysexit.value.code == 0 + + captured = capsys.readouterr() + assert captured.err == "{}\n".format(expected_message) + assert captured.out == "" + + +def test_good_sd_export_config(capsys): + submission = export.SDExport("", TEST_CONFIG) + assert submission.pci_bus_id == 2 def test_exit_gracefully_no_exception(capsys): - submission = export.SDExport("testfile") + + submission = export.SDExport("testfile", TEST_CONFIG) test_msg = 'test' with pytest.raises(SystemExit) as sysexit: @@ -32,7 +67,7 @@ def test_exit_gracefully_no_exception(capsys): def test_exit_gracefully_exception(capsys): - submission = export.SDExport("testfile") + submission = export.SDExport("testfile", TEST_CONFIG) test_msg = 'test' with pytest.raises(SystemExit) as sysexit: @@ -48,7 +83,7 @@ def test_exit_gracefully_exception(capsys): def test_empty_config(capsys): - submission = export.SDExport("testfile") + submission = export.SDExport("testfile", TEST_CONFIG) temp_folder = tempfile.mkdtemp() metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) with open(metadata, "w") as f: @@ -58,7 +93,7 @@ def test_empty_config(capsys): def test_valid_printer_test_config(capsys): - submission = export.SDExport("testfile") + submission = export.SDExport("testfile", TEST_CONFIG) temp_folder = tempfile.mkdtemp() metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) with open(metadata, "w") as f: @@ -70,7 +105,7 @@ def test_valid_printer_test_config(capsys): def test_valid_printer_config(capsys): - submission = export.SDExport("") + submission = export.SDExport("", TEST_CONFIG) temp_folder = tempfile.mkdtemp() metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) with open(metadata, "w") as f: @@ -82,7 +117,7 @@ def test_valid_printer_config(capsys): def test_invalid_encryption_config(capsys): - submission = export.SDExport("testfile") + submission = export.SDExport("testfile", TEST_CONFIG) temp_folder = tempfile.mkdtemp() metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) @@ -97,7 +132,7 @@ def test_invalid_encryption_config(capsys): def test_valid_encryption_config(capsys): - submission = export.SDExport("testfile") + submission = export.SDExport("testfile", TEST_CONFIG) temp_folder = tempfile.mkdtemp() metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) with open(metadata, "w") as f: @@ -112,7 +147,7 @@ def test_valid_encryption_config(capsys): @mock.patch("subprocess.check_call") def test_popup_message(mocked_call): - submission = export.SDExport("testfile") + submission = export.SDExport("testfile", TEST_CONFIG) submission.popup_message("hello!") mocked_call.assert_called_once_with([ "notify-send", @@ -124,14 +159,14 @@ def test_popup_message(mocked_call): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_BOTHER_PRINTER) def test_get_good_printer_uri(mocked_call): - submission = export.SDExport("testfile") + submission = export.SDExport("testfile", TEST_CONFIG) result = submission.get_printer_uri() assert result == "usb://Brother/HL-L2320D%20series?serial=A00000A000000" @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PRINTER) def test_get_bad_printer_uri(mocked_call, capsys): - submission = export.SDExport("testfile") + submission = export.SDExport("testfile", TEST_CONFIG) expected_message = "ERROR_PRINTER_NOT_FOUND" mocked_exit = mock.patch("export.exit_gracefully", return_value=0) @@ -153,7 +188,7 @@ def test_get_bad_printer_uri(mocked_call, capsys): "/tmp/tmpJf83j9/secret.pptx" ]) def test_is_open_office_file(capsys, open_office_paths): - submission = export.SDExport("") + submission = export.SDExport("", TEST_CONFIG) assert submission.is_open_office_file(open_office_paths) @@ -164,13 +199,13 @@ def test_is_open_office_file(capsys, open_office_paths): "/tmp/tmpJf83j9/secret.gpg" ]) def test_is_not_open_office_file(capsys, open_office_paths): - submission = export.SDExport("") + submission = export.SDExport("", TEST_CONFIG) assert not submission.is_open_office_file(open_office_paths) @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_USB) def test_usb_precheck_connected(mocked_call, capsys): - submission = export.SDExport("testfile") + submission = export.SDExport("testfile", TEST_CONFIG) expected_message = "USB_NOT_CONNECTED" mocked_exit = mock.patch("export.exit_gracefully", return_value=0) with pytest.raises(SystemExit) as sysexit: @@ -184,7 +219,7 @@ def test_usb_precheck_connected(mocked_call, capsys): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_USB) def test_usb_precheck_disconnected(mocked_call, capsys): - submission = export.SDExport("testfile") + submission = export.SDExport("testfile", TEST_CONFIG) expected_message = "USB_CONNECTED" mocked_exit = mock.patch("export.exit_gracefully", return_value=0) with pytest.raises(SystemExit) as sysexit: @@ -198,7 +233,7 @@ def test_usb_precheck_disconnected(mocked_call, capsys): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_USB_ERROR) def test_usb_precheck_error(mocked_call, capsys): - submission = export.SDExport("testfile") + submission = export.SDExport("testfile", TEST_CONFIG) expected_message = "ERROR_USB_CHECK" mocked_exit = mock.patch("export.exit_gracefully", return_value=0) with pytest.raises(SystemExit) as sysexit: @@ -212,7 +247,7 @@ def test_usb_precheck_error(mocked_call, capsys): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_USB_ERROR2) def test_usb_precheck_error_2(mocked_call, capsys): - submission = export.SDExport("testfile") + submission = export.SDExport("testfile", TEST_CONFIG) expected_message = "ERROR_USB_CHECK" mocked_exit = mock.patch("export.exit_gracefully", return_value=0) with pytest.raises(SystemExit) as sysexit: @@ -226,7 +261,7 @@ def test_usb_precheck_error_2(mocked_call, capsys): @mock.patch("subprocess.check_call") def test_luks_precheck_encrypted(mocked_call, capsys): - submission = export.SDExport("testfile") + submission = export.SDExport("testfile", TEST_CONFIG) expected_message = "USB_ENCRYPTED" with pytest.raises(SystemExit) as sysexit: result = submission.check_luks_volume() @@ -234,4 +269,3 @@ def test_luks_precheck_encrypted(mocked_call, capsys): assert sysexit.value.code == 0 captured = capsys.readouterr() assert captured.err == "{}\n".format(expected_message) - From 0a98222a2713e08461ed2def5029bbfa8e42313a Mon Sep 17 00:00:00 2001 From: mickael e Date: Fri, 2 Aug 2019 11:00:10 -0400 Subject: [PATCH 073/352] Code formatting --- securedrop_export/entrypoint.py | 3 +- securedrop_export/export.py | 98 ++++++++++++++++----------------- securedrop_export/main.py | 5 +- tests/test_export.py | 11 ++-- 4 files changed, 60 insertions(+), 57 deletions(-) diff --git a/securedrop_export/entrypoint.py b/securedrop_export/entrypoint.py index c5b22494e..a868a1594 100755 --- a/securedrop_export/entrypoint.py +++ b/securedrop_export/entrypoint.py @@ -7,6 +7,7 @@ CONFIG_PATH = "/etc/sd-export-config.json" + def start(): my_sub = export.SDExport(sys.argv[1], CONFIG_PATH) try: @@ -17,7 +18,7 @@ def start(): main.__main__(my_sub) # Delete extracted achive from tempfile shutil.rmtree(my_sub.tmpdir) - except Exception as e: + except Exception: # exit with 0 return code otherwise the os will attempt to open # the file with another application msg = "ERROR_GENERIC" diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 7fe7fa513..7e207287d 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -19,6 +19,7 @@ BRLASER_DRIVER = "/usr/share/cups/drv/brlaser.drv" BRLASER_PPD = "/usr/share/cups/model/br7030.ppd" + class Metadata(object): """ Object to parse, validate and store json metadata from the sd-export archive. @@ -26,11 +27,11 @@ class Metadata(object): METADATA_FILE = "metadata.json" SUPPORTED_EXPORT_METHODS = [ - "usb-test", # general preflight check - "disk", - "disk-test", # disk preflight test - "printer", - "printer-test" # print test page + "usb-test", # general preflight check + "disk", + "disk-test", # disk preflight test + "printer", + "printer-test", # print test page ] SUPPORTED_ENCRYPTION_METHODS = ["luks"] @@ -44,7 +45,7 @@ def __init__(self, archive_path): self.encryption_method = json_config.get("encryption_method", None) self.encryption_key = json_config.get("encryption_key", None) - except Exception as e: + except Exception: raise def is_valid(self): @@ -58,7 +59,6 @@ def is_valid(self): class SDExport(object): - def __init__(self, archive, config_path): self.device = DEVICE self.mountpoint = MOUNTPOINT @@ -81,10 +81,9 @@ def __init__(self, archive, config_path): with open(config_path) as f: json_config = json.loads(f.read()) self.pci_bus_id = int(json_config.get("pci_bus_id", 2)) - except Exception as e: + except Exception: self.exit_gracefully("ERROR_CONFIG") - def exit_gracefully(self, msg, e=False): """ Utility to print error messages, mostly used during debugging, @@ -108,20 +107,22 @@ def exit_gracefully(self, msg, e=False): # the file with another application sys.exit(0) - def popup_message(self, msg): try: - subprocess.check_call([ - "notify-send", - "--expire-time", "3000", - "--icon", "/usr/share/securedrop/icons/sd-logo.png", - "SecureDrop: {}".format(msg) - ]) + subprocess.check_call( + [ + "notify-send", + "--expire-time", + "3000", + "--icon", + "/usr/share/securedrop/icons/sd-logo.png", + "SecureDrop: {}".format(msg), + ] + ) except subprocess.CalledProcessError as e: msg = "Error sending notification:" self.exit_gracefully(msg, e=e) - def extract_tarball(self): try: with tarfile.open(self.archive) as tar: @@ -130,7 +131,6 @@ def extract_tarball(self): msg = "ERROR_EXTRACTION" self.exit_gracefully(msg) - def check_usb_connected(self): p = subprocess.check_output(["lsusb", "-s", self.pci_bus_id]) # Empty string means a likely wrong pci_bus_id @@ -151,7 +151,6 @@ def check_usb_connected(self): msg = "ERROR_USB_CHECK" self.exit_gracefully(msg) - def check_luks_volume(self): try: # cryptsetup isLuks returns 0 if the device is a luks volume @@ -163,7 +162,6 @@ def check_luks_volume(self): msg = "USB_NO_SUPPORTED_ENCRYPTION" self.exit_gracefully(msg) - def unlock_luks_volume(self, encryption_key): # the luks device is not already unlocked if not os.path.exists(os.path.join("/dev/mapper/", self.encrypted_device)): @@ -179,7 +177,6 @@ def unlock_luks_volume(self, encryption_key): msg = "USB_BAD_PASSPHRASE" self.exit_gracefully(msg) - def mount_volume(self): # mount target not created if not os.path.exists(self.mountpoint): @@ -193,28 +190,21 @@ def mount_volume(self): self.mountpoint, ] ) - subprocess.check_call( - [ - "sudo", - "chown", - "-R", "user:user", self.mountpoint, - ] - ) + subprocess.check_call(["sudo", "chown", "-R", "user:user", self.mountpoint]) except subprocess.CalledProcessError: # clean up - subprocess.check_call(["sudo", "cryptsetup", "luksClose", self.encrypted_device]) + subprocess.check_call( + ["sudo", "cryptsetup", "luksClose", self.encrypted_device] + ) msg = "ERROR_USB_MOUNT" self.exit_gracefully(msg) - def copy_submission(self): # move files to drive (overwrites files with same filename) and unmount drive try: target_path = os.path.join(self.mountpoint, self.target_dirname) subprocess.check_call(["mkdir", target_path]) - export_data = os.path.join( - self.tmpdir, "export_data/" - ) + export_data = os.path.join(self.tmpdir, "export_data/") subprocess.check_call(["cp", "-r", export_data, target_path]) self.popup_message("Files exported successfully to disk.") except (subprocess.CalledProcessError, OSError): @@ -225,21 +215,22 @@ def copy_submission(self): # luks volume, and exit 0 subprocess.check_call(["sync"]) subprocess.check_call(["sudo", "umount", self.mountpoint]) - subprocess.check_call(["sudo", "cryptsetup", "luksClose", self.encrypted_device]) + subprocess.check_call( + ["sudo", "cryptsetup", "luksClose", self.encrypted_device] + ) subprocess.check_call(["rm", "-rf", self.tmpdir]) sys.exit(0) - def wait_for_print(self): # use lpstat to ensure the job was fully transfered to the printer # returns True if print was successful, otherwise will throw exceptions signal.signal(signal.SIGALRM, handler) signal.alarm(self.printer_wait_timeout) printer_idle_string = "printer {} is idle".format(self.printer_name) - while(True): + while True: try: output = subprocess.check_output(["lpstat", "-p", self.printer_name]) - if(printer_idle_string in output.decode("utf-8")): + if printer_idle_string in output.decode("utf-8"): return True else: time.sleep(5) @@ -251,13 +242,12 @@ def wait_for_print(self): self.exit_gracefully(msg) return True - def get_printer_uri(self): # Get the URI via lpinfo and only accept URIs of supported printers printer_uri = "" try: output = subprocess.check_output(["sudo", "lpinfo", "-v"]) - except subprocess.CalledProcessError as e: + except subprocess.CalledProcessError: msg = "ERROR_PRINTER_URI" self.exit_gracefully(msg) @@ -276,13 +266,18 @@ def get_printer_uri(self): # printer url is a make that is unsupported self.exit_gracefully("ERROR_PRINTER_NOT_SUPPORTED") - def install_printer_ppd(self, uri): - # Some drivers don't come with ppd files pre-compiled, we must compile them + # Some drivers don't come with ppd files pre-compiled, we must compile them if "Brother" in uri: try: subprocess.check_call( - ["sudo", "ppdc", self.brlaser_driver, "-d", "/usr/share/cups/model/"] + [ + "sudo", + "ppdc", + self.brlaser_driver, + "-d", + "/usr/share/cups/model/", + ] ) except subprocess.CalledProcessError: msg = "ERROR_PRINTER_DRIVER_INSTALL" @@ -290,7 +285,6 @@ def install_printer_ppd(self, uri): return self.brlaser_ppd # Here, we could support ppd drivers for other makes or models in the future - def setup_printer(self, printer_uri, printer_ppd): try: # Add the printer using lpadmin @@ -316,12 +310,10 @@ def setup_printer(self, printer_uri, printer_ppd): msg = "ERROR_PRINTER_INSTALL" self.exit_gracefully(msg) - def print_test_page(self): self.print_file("/usr/share/cups/data/testprint") self.popup_message("Printing test page") - def print_all_files(self): files_path = os.path.join(self.tmpdir, "export_data/") files = os.listdir(files_path) @@ -333,16 +325,23 @@ def print_all_files(self): msg = "Printing document {} of {}".format(print_count, len(files)) self.popup_message(msg) - def is_open_office_file(self, filename): - OPEN_OFFICE_FORMATS = [".doc", ".docx", ".xls", ".xlsx", - ".ppt", ".pptx", ".odt", ".ods", ".odp"] + OPEN_OFFICE_FORMATS = [ + ".doc", + ".docx", + ".xls", + ".xlsx", + ".ppt", + ".pptx", + ".odt", + ".ods", + ".odp", + ] for extension in OPEN_OFFICE_FORMATS: if os.path.basename(filename).endswith(extension): return True return False - def print_file(self, file_to_print): try: # if the file to print is an (open)office document, we need to call unoconf to convert @@ -359,6 +358,7 @@ def print_file(self, file_to_print): msg = "ERROR_PRINT" self.exit_gracefully(msg) + ## class ends here class TimeoutException(Exception): pass diff --git a/securedrop_export/main.py b/securedrop_export/main.py index b51770556..e9ae86da4 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -1,12 +1,13 @@ from securedrop_export import export + def __main__(submission): submission.extract_tarball() try: submission.archive_metadata = export.Metadata(submission.tmpdir) - except Exception as e: - submission.exit_gracefully("ERROR_METADATA_PARSING") + except Exception: + submission.exit_gracefully("ERROR_METADATA_PARSING") if submission.archive_metadata.is_valid(): if submission.archive_metadata.export_method == "usb-test": diff --git a/tests/test_export.py b/tests/test_export.py index 82cc824f7..19be56140 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -2,7 +2,6 @@ import os import pytest -import subprocess import tempfile from securedrop_export import export @@ -10,10 +9,10 @@ SAMPLE_OUTPUT_NO_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\nnetwork lpd" # noqa SAMPLE_OUTPUT_BOTHER_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\ndirect usb://Brother/HL-L2320D%20series?serial=A00000A000000\nnetwork lpd" # noqa -SAMPLE_OUTPUT_NO_USB="Bus 001 Device 001: ID 1d6b:0002 Linux Foundation 2.0 root hub" # noqa -SAMPLE_OUTPUT_USB="Bus 001 Device 002: ID 0781:5575 SanDisk Corp.\nBus 001 Device 001: ID 1d6b:0002 Linux Foundation 2.0 root hub" # noqa -SAMPLE_OUTPUT_USB_ERROR="" -SAMPLE_OUTPUT_USB_ERROR2="h\ne\nl\nl\no" +SAMPLE_OUTPUT_NO_USB = "Bus 001 Device 001: ID 1d6b:0002 Linux Foundation 2.0 root hub" # noqa +SAMPLE_OUTPUT_USB = "Bus 001 Device 002: ID 0781:5575 SanDisk Corp.\nBus 001 Device 001: ID 1d6b:0002 Linux Foundation 2.0 root hub" # noqa +SAMPLE_OUTPUT_USB_ERROR = "" +SAMPLE_OUTPUT_USB_ERROR2 = "h\ne\nl\nl\no" TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad.json") ANOTHER_BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad-2.json") @@ -263,6 +262,8 @@ def test_usb_precheck_error_2(mocked_call, capsys): def test_luks_precheck_encrypted(mocked_call, capsys): submission = export.SDExport("testfile", TEST_CONFIG) expected_message = "USB_ENCRYPTED" + mocked_exit = mock.patch("export.exit_gracefully", return_value=0) + with pytest.raises(SystemExit) as sysexit: result = submission.check_luks_volume() mocked_exit.assert_called_once_with(expected_message) From 36be369293208aecef2892524a383e7ed143c374 Mon Sep 17 00:00:00 2001 From: mickael e Date: Fri, 2 Aug 2019 14:43:25 -0400 Subject: [PATCH 074/352] Fix parsing and test strings now binary to reflect type of check_output --- securedrop_export/export.py | 17 +++++++++++------ tests/sd-export-config-bad-2.json | 2 +- tests/test_export.py | 13 +++++++------ 3 files changed, 19 insertions(+), 13 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 7e207287d..8dcff76bf 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -80,7 +80,9 @@ def __init__(self, archive, config_path): try: with open(config_path) as f: json_config = json.loads(f.read()) - self.pci_bus_id = int(json_config.get("pci_bus_id", 2)) + self.pci_bus_id = json_config.get("pci_bus_id", None) + if self.pci_bus_id is None: + raise except Exception: self.exit_gracefully("ERROR_CONFIG") @@ -132,12 +134,15 @@ def extract_tarball(self): self.exit_gracefully(msg) def check_usb_connected(self): - p = subprocess.check_output(["lsusb", "-s", self.pci_bus_id]) - # Empty string means a likely wrong pci_bus_id - if p == "": - msg = "ERROR_USB_CHECK" + + # If the USB is not attached via qvm-usb attach, lsusb will return empty string and a + # return code of 1 + try: + p = subprocess.check_output(["lsusb", "-s", "{}:".format(self.pci_bus_id)]) + except subprocess.CalledProcessError: + msg = "ERROR_USB_CONFIGURATION" self.exit_gracefully(msg) - n_usb = len(p.rstrip().split("\n")) + n_usb = len(p.decode("utf-8").rstrip().split("\n")) # If there is one device, it is the root hub. if n_usb == 1: msg = "USB_NOT_CONNECTED" diff --git a/tests/sd-export-config-bad-2.json b/tests/sd-export-config-bad-2.json index 879fb8397..f69e25b7a 100644 --- a/tests/sd-export-config-bad-2.json +++ b/tests/sd-export-config-bad-2.json @@ -1,3 +1,3 @@ { - "pci_bus_id": "two" + "no_pci_bus_id": "nope" } diff --git a/tests/test_export.py b/tests/test_export.py index 19be56140..d0a294659 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -2,6 +2,7 @@ import os import pytest +import subprocess import tempfile from securedrop_export import export @@ -9,10 +10,10 @@ SAMPLE_OUTPUT_NO_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\nnetwork lpd" # noqa SAMPLE_OUTPUT_BOTHER_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\ndirect usb://Brother/HL-L2320D%20series?serial=A00000A000000\nnetwork lpd" # noqa -SAMPLE_OUTPUT_NO_USB = "Bus 001 Device 001: ID 1d6b:0002 Linux Foundation 2.0 root hub" # noqa -SAMPLE_OUTPUT_USB = "Bus 001 Device 002: ID 0781:5575 SanDisk Corp.\nBus 001 Device 001: ID 1d6b:0002 Linux Foundation 2.0 root hub" # noqa -SAMPLE_OUTPUT_USB_ERROR = "" -SAMPLE_OUTPUT_USB_ERROR2 = "h\ne\nl\nl\no" +SAMPLE_OUTPUT_NO_USB = b"Bus 001 Device 001: ID 1d6b:0002 Linux Foundation 2.0 root hub" # noqa +SAMPLE_OUTPUT_USB = b"Bus 001 Device 002: ID 0781:5575 SanDisk Corp.\nBus 001 Device 001: ID 1d6b:0002 Linux Foundation 2.0 root hub" # noqa +SAMPLE_OUTPUT_USB_ERROR = b"" +SAMPLE_OUTPUT_USB_ERROR2 = b"h\ne\nl\nl\no" TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad.json") ANOTHER_BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad-2.json") @@ -46,7 +47,7 @@ def test_bad_sd_export_config_invalid_value(capsys): def test_good_sd_export_config(capsys): submission = export.SDExport("", TEST_CONFIG) - assert submission.pci_bus_id == 2 + assert submission.pci_bus_id == "2" def test_exit_gracefully_no_exception(capsys): @@ -230,7 +231,7 @@ def test_usb_precheck_disconnected(mocked_call, capsys): assert captured.err == "{}\n".format(expected_message) -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_USB_ERROR) +@mock.patch("subprocess.check_output", return_code=1) def test_usb_precheck_error(mocked_call, capsys): submission = export.SDExport("testfile", TEST_CONFIG) expected_message = "ERROR_USB_CHECK" From 2a70172e67a008efeeecf88c4fe8f8edbdb6e320 Mon Sep 17 00:00:00 2001 From: mickael e Date: Tue, 6 Aug 2019 17:03:10 -0400 Subject: [PATCH 075/352] Update return code when printer drivers are not available --- securedrop_export/export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 8dcff76bf..6c3ac34f6 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -285,7 +285,7 @@ def install_printer_ppd(self, uri): ] ) except subprocess.CalledProcessError: - msg = "ERROR_PRINTER_DRIVER_INSTALL" + msg = "ERROR_PRINTER_DRIVER_UNAVAILBLE" self.exit_gracefully(msg) return self.brlaser_ppd # Here, we could support ppd drivers for other makes or models in the future From 0dea37119cb06d390a4146e8e5708c109fea0ce3 Mon Sep 17 00:00:00 2001 From: mickael e Date: Tue, 6 Aug 2019 17:20:12 -0400 Subject: [PATCH 076/352] Update readme with metadata format details See https://github.com/freedomofpress/securedrop-workstation/issues/280 --- README.md | 113 +++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 112 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index f8a0bd6df..1346cd7f8 100644 --- a/README.md +++ b/README.md @@ -2,4 +2,115 @@ # securedrop-export -code for exporting from the securedrop qubes workstation +Code for exporting and printing files from the SecureDrop Qubes Workstation. + + +## Export Archive Format + +Export archive format is defined as a gzipped tar archive whose extension ends with .sd-export. + +### Archive Contents + +Once extracted, the archive will contain two elements: + +`metadata.json` : file containing export metadata, a file containing information about the archive and the export operation + +`export_data`: folder containing the raw files to export + +Example archive structure: + +``` +. +├── metadata.json +└── export_data + ├── file-to-export-1.txt + ├── file-to-export-2.pdf + ├── file-to-export-3.doc + [...] +``` + +### Archive Metadata + +Metadata contains three possible keys which may contain several possible values: +`device` +`device` specifies the method used for export, and can be either a device or a preflight check. See the Devices section below for possible values. It is a required key. + +`encryption_method`: +`encryption_method` is used exclusively when exporting to USB storage. It is an optional key. Possible values are: +luks + +`encryption_passphrase` +`encryption_passphrase` is used exclusively when exporting to USB storage. It is an optional key. It contains an arbitrary string that contains the disk encryption passphrase of the device. + + +Example archive metadata (`metadata.json`): +``` +{ + "device": "disk" + "encryption-method": "luks" + "encryption-key": "Your encryption passphrase goes here" +} +``` + +### Devices + +For all 5 devices described below, there are three generic errors that apply: + +`ERROR_FILE_NOT_FOUND`: No file has been specified or the path is incorrect +`ERROR_EXTRACTION`: Error while extracting the archive +`ERROR_METADATA_PARSING`: The metadata.json file cannot be correctly parsed +`ERROR_ARCHIVE_METADATA`: The metadata failed the check +`ERROR_GENERIC`: An uncaught (unexpected) error somewhere in the script. These should not happen unless the code improperly handles errors + +The list of devices are as follows: + +1. `usb-test` : Preflight check that probes for USB connected devices, that returns: +`USB_CONNECTED` if a USB device is attached to the dedicated slot +`USB_NOT_CONNECTED` if no USB is attached +`USB_CHECK_ERROR` if an error occurred during pre-flight + +2. `disk-test`: Preflight check that checks for LUKS-encrypted volume that returns: +`USB_ENCRYPTED` if a LUKS volume is attached to sd-export +`USB_ENCRYPTION_NOT_SUPPORTED` if a LUKS volume is not attached or there was any other error +`USB_DISK_ERROR` + +3. `printer-test`: prints a test page that returns: +`ERROR_PRINTER_NOT_FOUND` if no printer is connected +`ERROR_PRINTER_NOT_SUPPORTED` if the printer is not currently supported by the export script +`ERROR_PRINTER_DRIVER_UNAVAILABLE` if the printer driver is not available +`ERROR_PRINTER_INSTALL` If there is an error installing the printer +`ERROR_PRINT` if there is an error printing + +4. `printer`: sends files to printer that returns: +`ERROR_PRINTER_NOT_FOUND` if no printer is connected +`ERROR_PRINTER_NOT_SUPPORTED` if the printer is not currently supported by the export script +`ERROR_PRINTER_DRIVER_UNAVAILABLE` if the printer driver is not available +`ERROR_PRINTER_INSTALL` If there is an error installing the printer +`ERROR_PRINT` if there is an error printing + +5. `disk`: sends files to disk: +All files in `export_data` will be sent to disk +`encryption_method` and `encryption_passphrase` specify the device encryption settings + +### Export Folder Structure + +When exporting to a USB drive (using the disk device in metadata.json), the files will be placed on the drive as follows: The root of the USB drive will contain one folder per source, reflecting their source codename in the client. Documents or messages exported will be copied to that directory, preserving the filename from the server. In case a same file is exported twice, a confirmation window replace/rename/abort. + +Example folder structure of USB export drive: + +``` +. + +├── cytotoxic payer +│ ├── 1-cytotoxic-payer-msg +│ │ └── file-to-export-1.txt +│ ├── 2-cytotoxic-payer-msg +│ │ └── file-to-export-2.txt +│ └── 3-cytotoxic-payer-doc +│ │ └── file-to-export-3.doc +├── grandiloquent pasteboard +│ └── 1-grandiloquent-pasteboard-doc +│ │ └── file-to-export-1.doc +└── snug seek +``` + From c22ec4e666691a1bc573cf3b07da819739bcfc6d Mon Sep 17 00:00:00 2001 From: pierwill <19642016+pierwill@users.noreply.github.com> Date: Thu, 8 Aug 2019 11:38:22 -0500 Subject: [PATCH 077/352] Edit README.md - add more context to the repo introduction - light revisions and rewording throughout for readability and clarity - rename "running" to "Example Commands" - capitalize section titles - make markdown lineation more consistent --- README.md | 79 +++++++++++++++++++++++++++++++------------------------ 1 file changed, 44 insertions(+), 35 deletions(-) diff --git a/README.md b/README.md index 0207698be..e2d4ca771 100644 --- a/README.md +++ b/README.md @@ -2,12 +2,21 @@ [![CircleCI](https://circleci.com/gh/freedomofpress/securedrop-proxy.svg?style=svg)](https://circleci.com/gh/freedomofpress/securedrop-proxy) -This implements a Qubes RPC <-> HTTP proxy, used to forward requests -from the [securedrop workstation -client](https://github.com/freedomofpress/securedrop-client) to the -[securedrop server](https://github.com/freedomofpress/securedrop). +`securedrop-proxy` is part of the [SecureDrop +Workstation](https://github.com/freedomofpress/securedrop-workstation) project. -### try it out +The code in this repository implements a proxy across two APIs: the [Qubes RPC +protocol](https://www.qubes-os.org/doc/qrexec3/) and the [SecureDrop +API](https://docs.securedrop.org/en/latest/development/journalist_api.html). +This proxy is used to forward requests from the securedrop workstation client to +the securedrop server. + +This code is still in development, and not quite ready for integration with the +rest of the Securedrop Workstation project. However, it is ready to be poked at +and demonstrated. Feel free to explore and contribute! You'll need a machine +running [Qubes OS](https://qubes-os.org). + +### How It Works The proxy works by reading a JSON object from STDIN, generating an HTTP request from that JSON, making that request against the remote @@ -16,14 +25,10 @@ server's response to STDOUT. For discussion about the shape of the request and response objects, see https://github.com/freedomofpress/securedrop-workstation/issues/107. -This is still development code, not quite ready for integration with the -rest of the securedrop-workstation project. However, it is ready to -be poked at and demonstrated. +#### Installation Requirements -#### install requirements - -To try the proxy script, first use `venv` to create an environment -and install requirements. In the root of the project directory, run +To try the proxy script, create a virtual environment and install the +requirements. In the root of the project directory, run ``` virtualenv .venv @@ -31,13 +36,17 @@ source .venv/bin/activate pip install --require-hashes -r dev-requirements.txt ``` -#### Update dependencies +#### Update Dependencies -To add or update a dependency, modify either `dev-requirements.in` and `requirements.in` and then run `make update-pip-dependencies`. This will generate `dev-requirements.txt` and `requirements.txt`. +To add or update a dependency, modify either `dev-requirements.in` and +`requirements.in` and then run `make update-pip-dependencies`. This will +generate `dev-requirements.txt` and `requirements.txt`. -**IMPORTANT:** Do not modify `build-requirements.txt` during normal development. We use a pip mirror for our build process and the hashes in that file point to wheels on our mirror. +⚠ **IMPORTANT:** Do not modify `build-requirements.txt` during normal +development. We use a pip mirror for our build process and the hashes in that +file point to wheels on our mirror. -#### configuration +#### Configuration The proxy script must be run with the path to its configuration file as its first argument. This repo includes an example configuration @@ -50,7 +59,6 @@ following values: - `dev` - A boolean, where `True` indicates we're running in development mode, any other value (or not set) indicates we're running in production. See below for what that means. - `target_vm` - The name of the VM we should `qvm-move` non-JSON responses to. Must be set if dev is not True. - #### dev vs prod Configuration includes a "dev" attribute. At this point, the only @@ -61,11 +69,11 @@ mode, the file is not moved off the VM, but is saved as a temporary file in `/tmp`. In both cases, the response written to STDOUT includes the name of the new file. -#### tests +#### Tests -Unit tests can be run with `make test` +Unit tests can be run with `make test`. -#### running +#### Example Commands The following commands can be used to demonstrate the proxy. @@ -80,13 +88,13 @@ read at that name under `/tmp`. $ cat examples/html.json | ./sd-proxy.py ./config-example.yaml -Finally, this demonstrates some error handling. The request contains -invalid JSON. The proxy detects that, and prints an error message -(still a valid proxy response). +Finally, this demonstrates some error handling. The request contains invalid +JSON. The proxy detects the malformed request, and prints an error message. +(The error message itself is considered a valid proxy response). $ cat examples/bad.json | ./sd-proxy.py ./config-example.yaml -#### Qubes integration +#### Qubes Integration Until we determine how we wish to package and install this script, demonstrating the proxy in a Qubes environment is a somewhat manual @@ -100,12 +108,12 @@ this documentation, we assume the client is running in Edit `qubes/securedrop.Proxy` to reflect the path to `entrypoint.sh` in this repo. Also edit the directory to this repo code in `entrypoint.sh`. -Next, run `sudo cp qubes/securedrop.Proxy /etc/qubes-rpc/securedrop.Proxy`, which will move `securedrop.Proxy` -(the qubes-rpc "server path definition" file) into place in -`/etc/qubes-rpc/`. +Next, run `sudo cp qubes/securedrop.Proxy /etc/qubes-rpc/securedrop.Proxy`. +This will move `securedrop.Proxy` (the qubes-rpc "server path definition" file) +into place in `/etc/qubes-rpc/`. -On `dom0`, create the file `/etc/qubes-rpc/policy/securedrop.Proxy` -with the contents: +In `dom0`, create the file `/etc/qubes-rpc/policy/securedrop.Proxy` +with the contents securedrop-client securedrop-proxy allow $anyvm $anyvm deny @@ -123,14 +131,15 @@ names for your environment). This allows non-JSON responses to be moved to the client VM using Qubes' native inter-VM file copy service. Copy `config-example.yaml` to `config.yaml`, and edit it to reflect -your situation- check that `target_vm` is set to the correct client VM -name, and assure that `dev` is `False`. This documentation assumes +your situation. Ensure that `target_vm` is set to the correct client VM +name, and that `dev` is `False`. This documentation assumes you've left `host` set to `jsonplaceholder.typicode.com`. -Now on the client VM you should be able to do (again replacing `securedrop-proxy` with the name of your proxy AppVM): +At this point, in the client VM you should be able to do $ echo '{"method":"GET","path_query":"/posts?userId=1"}' | /usr/lib/qubes/qrexec-client-vm securedrop-proxy securedrop.Proxy +(again replacing `securedrop-proxy` with the name of your proxy AppVM) You should see a successful JSON response as returned by the remote server. Try now @@ -145,11 +154,11 @@ response which include a `body` which looks like: } If you look in `~/QubesIncoming/securedrop-proxy`, you should see a -new file with that name. The content of that file will be the content +new file with that name. The content of that file will reflect the content returned by the remote server. -Finally, try invoking an error. Provide an invalid JSON request, and -notice you receive a `400` response from the proxy: +Finally, try invoking an error by providing an invalid JSON request. +Notice that you receive a `400` response from the proxy: $ echo '[INVALID' | /usr/lib/qubes/qrexec-client-vm securedrop-proxy securedrop.Proxy {"body": "{\"error\": \"Invalid JSON in request\"}", "version": "0.1.1", "status": 400, "headers": {"Content-Type": "application/json"}} From 55a2e41dd5125ab829facef2916cb56f472922ad Mon Sep 17 00:00:00 2001 From: pierwill <19642016+pierwill@users.noreply.github.com> Date: Mon, 19 Aug 2019 11:38:39 -0500 Subject: [PATCH 078/352] Update qrexec policy keyword characters --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 0207698be..6574fcdd4 100644 --- a/README.md +++ b/README.md @@ -108,7 +108,7 @@ On `dom0`, create the file `/etc/qubes-rpc/policy/securedrop.Proxy` with the contents: securedrop-client securedrop-proxy allow - $anyvm $anyvm deny + @anyvm @anyvm deny (replacing the VM names with the correct source and destination names for your environment) From c486954bafbdef5ce0b0e45d19978bbd9f5c7004 Mon Sep 17 00:00:00 2001 From: pierwill <19642016+pierwill@users.noreply.github.com> Date: Mon, 19 Aug 2019 11:40:20 -0500 Subject: [PATCH 079/352] Clarify instructions for qrexec policy file contents --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 6574fcdd4..8ae97a1ec 100644 --- a/README.md +++ b/README.md @@ -110,8 +110,8 @@ with the contents: securedrop-client securedrop-proxy allow @anyvm @anyvm deny -(replacing the VM names with the correct source and destination names -for your environment) +Replace the VM names in the first line above with the correct source and +destination names for your environment. The second line should appear as is. Also in `dom0`, edit `/etc/qubes-rpc/policy/qubes.Filecopy`, to add near the top: From b1a12d25d1bd7cf41b5fc26c20c58f987777a1d1 Mon Sep 17 00:00:00 2001 From: mickael e Date: Wed, 4 Sep 2019 12:12:23 -0400 Subject: [PATCH 080/352] Address review comments - Fix formatting - Reword device section - Add more error codes for `disk` export method --- README.md | 60 ++++++++++++++++++++++++++++++------------------------- 1 file changed, 33 insertions(+), 27 deletions(-) diff --git a/README.md b/README.md index 1346cd7f8..d014aa237 100644 --- a/README.md +++ b/README.md @@ -54,43 +54,50 @@ Example archive metadata (`metadata.json`): ### Devices -For all 5 devices described below, there are three generic errors that apply: +For all device types (described in detail below), the following standard error types can be returned: -`ERROR_FILE_NOT_FOUND`: No file has been specified or the path is incorrect -`ERROR_EXTRACTION`: Error while extracting the archive -`ERROR_METADATA_PARSING`: The metadata.json file cannot be correctly parsed -`ERROR_ARCHIVE_METADATA`: The metadata failed the check -`ERROR_GENERIC`: An uncaught (unexpected) error somewhere in the script. These should not happen unless the code improperly handles errors +- `ERROR_FILE_NOT_FOUND`: No file has been specified or the path is incorrect +- `ERROR_EXTRACTION`: Error while extracting the archive +- `ERROR_METADATA_PARSING`: The metadata.json file cannot be correctly parsed +- `ERROR_ARCHIVE_METADATA`: The metadata failed the check +- `ERROR_USB_CONFIGURATION`: There is no USB controller attached to the VM, the dom0 configuration (in `config.json`) or USB device identifier is is misconfigured +- `ERROR_GENERIC`: An uncaught (unexpected) error somewhere in the script. These should not happen unless the code improperly handles errors -The list of devices are as follows: +The supported device types for export are as follows, including the possible errors specific to that device type: 1. `usb-test` : Preflight check that probes for USB connected devices, that returns: -`USB_CONNECTED` if a USB device is attached to the dedicated slot -`USB_NOT_CONNECTED` if no USB is attached -`USB_CHECK_ERROR` if an error occurred during pre-flight + - `USB_CONNECTED` if a USB device is attached to the dedicated slot + - `USB_NOT_CONNECTED` if no USB is attached + - `USB_CHECK_ERROR` if an error occurred during pre-flight + 2. `disk-test`: Preflight check that checks for LUKS-encrypted volume that returns: -`USB_ENCRYPTED` if a LUKS volume is attached to sd-export -`USB_ENCRYPTION_NOT_SUPPORTED` if a LUKS volume is not attached or there was any other error -`USB_DISK_ERROR` + - `USB_ENCRYPTED` if a LUKS volume is attached to sd-export + - `USB_ENCRYPTION_NOT_SUPPORTED` if a LUKS volume is not attached or there was any other error + - `USB_DISK_ERROR` + 3. `printer-test`: prints a test page that returns: -`ERROR_PRINTER_NOT_FOUND` if no printer is connected -`ERROR_PRINTER_NOT_SUPPORTED` if the printer is not currently supported by the export script -`ERROR_PRINTER_DRIVER_UNAVAILABLE` if the printer driver is not available -`ERROR_PRINTER_INSTALL` If there is an error installing the printer -`ERROR_PRINT` if there is an error printing + - `ERROR_PRINTER_NOT_FOUND` if no printer is connected + - `ERROR_PRINTER_NOT_SUPPORTED` if the printer is not currently supported by the export script + - `ERROR_PRINTER_DRIVER_UNAVAILABLE` if the printer driver is not available + - `ERROR_PRINTER_INSTALL` If there is an error installing the printer + - `ERROR_PRINT` if there is an error printing + 4. `printer`: sends files to printer that returns: -`ERROR_PRINTER_NOT_FOUND` if no printer is connected -`ERROR_PRINTER_NOT_SUPPORTED` if the printer is not currently supported by the export script -`ERROR_PRINTER_DRIVER_UNAVAILABLE` if the printer driver is not available -`ERROR_PRINTER_INSTALL` If there is an error installing the printer -`ERROR_PRINT` if there is an error printing + - `ERROR_PRINTER_NOT_FOUND` if no printer is connected + - `ERROR_PRINTER_NOT_SUPPORTED` if the printer is not currently supported by the export script + - `ERROR_PRINTER_DRIVER_UNAVAILABLE` if the printer driver is not available + - `ERROR_PRINTER_INSTALL` If there is an error installing the printer + - `ERROR_PRINT` if there is an error printing + + +5. `disk`: sends files to disk that returns: + - `USB_BAD_PASSPHRASE` if the luks decryption failed (likely due to bad passphrase) + - `ERROR_USB_MOUNT` if there was an error mounting the volume (after unlocking the luks volume) + - `ERROR_USB_WRITE` if there was an error writing to disk (e.g., no space left on device) -5. `disk`: sends files to disk: -All files in `export_data` will be sent to disk -`encryption_method` and `encryption_passphrase` specify the device encryption settings ### Export Folder Structure @@ -113,4 +120,3 @@ Example folder structure of USB export drive: │ │ └── file-to-export-1.doc └── snug seek ``` - From e2a489095786af1fa25f71b03ef42211c81a2ea4 Mon Sep 17 00:00:00 2001 From: Conor Schaefer Date: Wed, 4 Sep 2019 09:27:07 -0700 Subject: [PATCH 081/352] Minor formatting cleanup Tiny readability improvements, since we'll be referring back to this document frequently. Specifically: * use in-line for documenting file extension * greater indentation in sublists * use "definition list" formatting for metadata defns --- README.md | 60 +++++++++++++++++++++++++++---------------------------- 1 file changed, 29 insertions(+), 31 deletions(-) diff --git a/README.md b/README.md index d014aa237..803c3a0a1 100644 --- a/README.md +++ b/README.md @@ -7,15 +7,17 @@ Code for exporting and printing files from the SecureDrop Qubes Workstation. ## Export Archive Format -Export archive format is defined as a gzipped tar archive whose extension ends with .sd-export. +Export archive format is defined as a gzipped tar archive whose extension ends with `.sd-export`. ### Archive Contents Once extracted, the archive will contain two elements: -`metadata.json` : file containing export metadata, a file containing information about the archive and the export operation +`metadata.json` +: file containing export metadata, a file containing information about the archive and the export operation -`export_data`: folder containing the raw files to export +`export_data` +: folder containing the raw files to export Example archive structure: @@ -32,15 +34,16 @@ Example archive structure: ### Archive Metadata Metadata contains three possible keys which may contain several possible values: + `device` -`device` specifies the method used for export, and can be either a device or a preflight check. See the Devices section below for possible values. It is a required key. +: specifies the method used for export, and can be either a device or a preflight check. See the Devices section below for possible values. It is a required key. -`encryption_method`: -`encryption_method` is used exclusively when exporting to USB storage. It is an optional key. Possible values are: +`encryption_method` +: used exclusively when exporting to USB storage. It is an optional key. Possible values are: luks `encryption_passphrase` -`encryption_passphrase` is used exclusively when exporting to USB storage. It is an optional key. It contains an arbitrary string that contains the disk encryption passphrase of the device. +: used exclusively when exporting to USB storage. It is an optional key. It contains an arbitrary string that contains the disk encryption passphrase of the device. Example archive metadata (`metadata.json`): @@ -66,38 +69,33 @@ For all device types (described in detail below), the following standard error t The supported device types for export are as follows, including the possible errors specific to that device type: 1. `usb-test` : Preflight check that probes for USB connected devices, that returns: - - `USB_CONNECTED` if a USB device is attached to the dedicated slot - - `USB_NOT_CONNECTED` if no USB is attached - - `USB_CHECK_ERROR` if an error occurred during pre-flight - + - `USB_CONNECTED` if a USB device is attached to the dedicated slot + - `USB_NOT_CONNECTED` if no USB is attached + - `USB_CHECK_ERROR` if an error occurred during pre-flight 2. `disk-test`: Preflight check that checks for LUKS-encrypted volume that returns: - - `USB_ENCRYPTED` if a LUKS volume is attached to sd-export - - `USB_ENCRYPTION_NOT_SUPPORTED` if a LUKS volume is not attached or there was any other error - - `USB_DISK_ERROR` - + - `USB_ENCRYPTED` if a LUKS volume is attached to sd-export + - `USB_ENCRYPTION_NOT_SUPPORTED` if a LUKS volume is not attached or there was any other error + - `USB_DISK_ERROR` 3. `printer-test`: prints a test page that returns: - - `ERROR_PRINTER_NOT_FOUND` if no printer is connected - - `ERROR_PRINTER_NOT_SUPPORTED` if the printer is not currently supported by the export script - - `ERROR_PRINTER_DRIVER_UNAVAILABLE` if the printer driver is not available - - `ERROR_PRINTER_INSTALL` If there is an error installing the printer - - `ERROR_PRINT` if there is an error printing - + - `ERROR_PRINTER_NOT_FOUND` if no printer is connected + - `ERROR_PRINTER_NOT_SUPPORTED` if the printer is not currently supported by the export script + - `ERROR_PRINTER_DRIVER_UNAVAILABLE` if the printer driver is not available + - `ERROR_PRINTER_INSTALL` If there is an error installing the printer + - `ERROR_PRINT` if there is an error printing 4. `printer`: sends files to printer that returns: - - `ERROR_PRINTER_NOT_FOUND` if no printer is connected - - `ERROR_PRINTER_NOT_SUPPORTED` if the printer is not currently supported by the export script - - `ERROR_PRINTER_DRIVER_UNAVAILABLE` if the printer driver is not available - - `ERROR_PRINTER_INSTALL` If there is an error installing the printer - - `ERROR_PRINT` if there is an error printing - + - `ERROR_PRINTER_NOT_FOUND` if no printer is connected + - `ERROR_PRINTER_NOT_SUPPORTED` if the printer is not currently supported by the export script + - `ERROR_PRINTER_DRIVER_UNAVAILABLE` if the printer driver is not available + - `ERROR_PRINTER_INSTALL` If there is an error installing the printer + - `ERROR_PRINT` if there is an error printing 5. `disk`: sends files to disk that returns: - - `USB_BAD_PASSPHRASE` if the luks decryption failed (likely due to bad passphrase) - - `ERROR_USB_MOUNT` if there was an error mounting the volume (after unlocking the luks volume) - - `ERROR_USB_WRITE` if there was an error writing to disk (e.g., no space left on device) - + - `USB_BAD_PASSPHRASE` if the luks decryption failed (likely due to bad passphrase) + - `ERROR_USB_MOUNT` if there was an error mounting the volume (after unlocking the luks volume) + - `ERROR_USB_WRITE` if there was an error writing to disk (e.g., no space left on device) ### Export Folder Structure From ca16c57e1a8a8e03ddd178c5ac3a7a5be07b2d66 Mon Sep 17 00:00:00 2001 From: mickael e Date: Thu, 26 Sep 2019 10:17:48 -0400 Subject: [PATCH 082/352] Add logging to sd-export --- securedrop_export/__init__.py | 1 + securedrop_export/entrypoint.py | 36 ++++++++++++++++++++++++++ securedrop_export/export.py | 45 +++++++++++++++++++++++++++++++-- securedrop_export/main.py | 14 ++++++++++ update_version.sh | 2 ++ 5 files changed, 96 insertions(+), 2 deletions(-) diff --git a/securedrop_export/__init__.py b/securedrop_export/__init__.py index e69de29bb..df9144c54 100644 --- a/securedrop_export/__init__.py +++ b/securedrop_export/__init__.py @@ -0,0 +1 @@ +__version__ = '0.1.1' diff --git a/securedrop_export/entrypoint.py b/securedrop_export/entrypoint.py index a868a1594..00e3e35e8 100755 --- a/securedrop_export/entrypoint.py +++ b/securedrop_export/entrypoint.py @@ -1,18 +1,54 @@ +import logging import os import shutil import sys +from logging.handlers import TimedRotatingFileHandler +from securedrop_export import __version__ from securedrop_export import export from securedrop_export import main CONFIG_PATH = "/etc/sd-export-config.json" +DEFAULT_HOME = os.path.join(os.path.expanduser("~"), ".securedrop_export") +def configure_logging(): + """ + All logging related settings are set up by this function. + """ + log_folder = os.path.join(DEFAULT_HOME, 'logs') + if not os.path.exists(log_folder): + os.makedirs(log_folder) + + log_file = os.path.join(DEFAULT_HOME, 'logs', 'export.log') + + # set logging format + log_fmt = ('%(asctime)s - %(name)s:%(lineno)d(%(funcName)s) ' + '%(levelname)s: %(message)s') + formatter = logging.Formatter(log_fmt) + + handler = TimedRotatingFileHandler(log_file) + handler.setFormatter(formatter) + handler.setLevel(logging.DEBUG) + + # set up primary log + log = logging.getLogger() + log.setLevel(logging.DEBUG) + log.addHandler(handler) def start(): + try: + configure_logging() + except Exception: + msg = "ERROR_LOGGING" + my_sub.exit_gracefully(msg) + + logging.info('Starting SecureDrop Export {}'.format(__version__)) my_sub = export.SDExport(sys.argv[1], CONFIG_PATH) + try: # Halt immediately if target file is absent if not os.path.exists(my_sub.archive): + logging.info('Archive is not found {}.'.format(my_sub.archive)) msg = "ERROR_FILE_NOT_FOUND" my_sub.exit_gracefully(msg) main.__main__(my_sub) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 6c3ac34f6..4bb3a20b1 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -2,6 +2,7 @@ import datetime import json +import logging import os import shutil import signal @@ -19,6 +20,7 @@ BRLASER_DRIVER = "/usr/share/cups/drv/brlaser.drv" BRLASER_PPD = "/usr/share/cups/model/br7030.ppd" +logger = logging.getLogger(__name__) class Metadata(object): """ @@ -40,20 +42,26 @@ def __init__(self, archive_path): try: with open(self.metadata_path) as f: + logging.info('Parsing archive metadata') json_config = json.loads(f.read()) self.export_method = json_config.get("device", None) self.encryption_method = json_config.get("encryption_method", None) self.encryption_key = json_config.get("encryption_key", None) + logging.info('Exporting to device {} with encryption_method {}'.format(self.export_method, self.encryption_method)) except Exception: + logging.error('Metadata parsing failure') raise def is_valid(self): + logging.info('Validating metadata contents') if self.export_method not in self.SUPPORTED_EXPORT_METHODS: + logging.error('Archive metadata: Export method {} is not supported'.format(self.export_method)) return False if self.export_method == "disk": if self.encryption_method not in self.SUPPORTED_ENCRYPTION_METHODS: + logging.error('Archive metadata: Encryption method {} is not supported'.format(self.encryption_method)) return False return True @@ -79,11 +87,15 @@ def __init__(self, archive, config_path): try: with open(config_path) as f: + logging.info('Retrieving VM configuration') json_config = json.loads(f.read()) self.pci_bus_id = json_config.get("pci_bus_id", None) + logging.info('pci_bus_id is {}'.format(self.pci_bus_id)) if self.pci_bus_id is None: - raise + logging.error('pci_bus_id is not set in VM configuration') + raise except Exception: + logger.error("error parsing VM configuration.") self.exit_gracefully("ERROR_CONFIG") def exit_gracefully(self, msg, e=False): @@ -95,12 +107,14 @@ def exit_gracefully(self, msg, e=False): """ sys.stderr.write(msg) sys.stderr.write("\n") + logger.info('Exiting with message: {}'.format(msg)) if e: try: # If the file archive was extracted, delete before returning if os.path.isdir(self.tmpdir): shutil.rmtree(self.tmpdir) e_output = e.output + logger.error(e_output) except Exception: e_output = "" sys.stderr.write(e_output) @@ -127,6 +141,7 @@ def popup_message(self, msg): def extract_tarball(self): try: + logging.info('Extracting tarball {} into {}'.format(self.archive, self.tmpdir)) with tarfile.open(self.archive) as tar: tar.extractall(self.tmpdir) except Exception: @@ -137,18 +152,22 @@ def check_usb_connected(self): # If the USB is not attached via qvm-usb attach, lsusb will return empty string and a # return code of 1 + logging.info('Performing usb preflight') try: - p = subprocess.check_output(["lsusb", "-s", "{}:".format(self.pci_bus_id)]) + p = subprocess.check_output(["lsusb", "-s", "{}:".format(self.pci_bus_id)]) + logging.info("lsusb -s {} : {}".format(self.pci_bus_id, p.decode("utf-8"))) except subprocess.CalledProcessError: msg = "ERROR_USB_CONFIGURATION" self.exit_gracefully(msg) n_usb = len(p.decode("utf-8").rstrip().split("\n")) # If there is one device, it is the root hub. if n_usb == 1: + logging.info('usb preflight - no external devices connected') msg = "USB_NOT_CONNECTED" self.exit_gracefully(msg) # If there are two devices, it's the root hub and another device (presumably for export) elif n_usb == 2: + logging.info('usb preflight - external device connected') msg = "USB_CONNECTED" self.exit_gracefully(msg) # Else the result is unexpected @@ -157,6 +176,7 @@ def check_usb_connected(self): self.exit_gracefully(msg) def check_luks_volume(self): + logging.info('Checking if volume is luks-encrypted') try: # cryptsetup isLuks returns 0 if the device is a luks volume # subprocess with throw if the device is not luks (rc !=0) @@ -169,6 +189,7 @@ def check_luks_volume(self): def unlock_luks_volume(self, encryption_key): # the luks device is not already unlocked + logging.info('Unlocking luks volume {}'.format(self.encrypted_device)) if not os.path.exists(os.path.join("/dev/mapper/", self.encrypted_device)): p = subprocess.Popen( ["sudo", "cryptsetup", "luksOpen", self.device, self.encrypted_device], @@ -176,9 +197,11 @@ def unlock_luks_volume(self, encryption_key): stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) + logging.info('Passing key') p.communicate(input=str.encode(encryption_key, "utf-8")) rc = p.returncode if rc != 0: + logging.error('Bad phassphrase for {}',format(self.encrypted_device)) msg = "USB_BAD_PASSPHRASE" self.exit_gracefully(msg) @@ -187,6 +210,7 @@ def mount_volume(self): if not os.path.exists(self.mountpoint): subprocess.check_call(["sudo", "mkdir", self.mountpoint]) try: + logging.info('Mounting {} to {}'.format(self.encrypted_device, self.mountpoint)) subprocess.check_call( [ "sudo", @@ -198,6 +222,8 @@ def mount_volume(self): subprocess.check_call(["sudo", "chown", "-R", "user:user", self.mountpoint]) except subprocess.CalledProcessError: # clean up + logging.error('Error mounting {} to {}'.format(self.encrypted_device, self.mountpoint)) + logging.info('Locking luks volume {}'.format(self.encrypted_device)) subprocess.check_call( ["sudo", "cryptsetup", "luksClose", self.encrypted_device] ) @@ -210,7 +236,9 @@ def copy_submission(self): target_path = os.path.join(self.mountpoint, self.target_dirname) subprocess.check_call(["mkdir", target_path]) export_data = os.path.join(self.tmpdir, "export_data/") + logging.info('Copying file to {}'.format(self.target_dirname)) subprocess.check_call(["cp", "-r", export_data, target_path]) + logging.info('File copied successfully to {}'.format(self.target_dirname)) self.popup_message("Files exported successfully to disk.") except (subprocess.CalledProcessError, OSError): msg = "ERROR_USB_WRITE" @@ -218,11 +246,15 @@ def copy_submission(self): finally: # Finally, we sync the filesystem, unmount the drive and lock the # luks volume, and exit 0 + logging.info('Syncing filesystems') subprocess.check_call(["sync"]) + logging.info('Unmounting drive from {}'.format(self.mountpoint)) subprocess.check_call(["sudo", "umount", self.mountpoint]) + logging.info('Locking luks volume {}'.format(self.encrypted_device)) subprocess.check_call( ["sudo", "cryptsetup", "luksClose", self.encrypted_device] ) + logging.info('Deleting temporary directory {}'.format(self.tmpdir)) subprocess.check_call(["rm", "-rf", self.tmpdir]) sys.exit(0) @@ -234,8 +266,10 @@ def wait_for_print(self): printer_idle_string = "printer {} is idle".format(self.printer_name) while True: try: + logging.info('Running lpstat waiting for printer {}'.format(self.printer_name)) output = subprocess.check_output(["lpstat", "-p", self.printer_name]) if printer_idle_string in output.decode("utf-8"): + logging.info('Print completed') return True else: time.sleep(5) @@ -243,6 +277,7 @@ def wait_for_print(self): msg = "ERROR_PRINT" self.exit_gracefully(msg) except TimeoutException: + logging.error('Timeout waiting for printer {}'.format(self.printer_name)) msg = "ERROR_PRINT" self.exit_gracefully(msg) return True @@ -260,15 +295,19 @@ def get_printer_uri(self): for line in output.split(): if "usb://" in line.decode("utf-8"): printer_uri = line.decode("utf-8") + logging.info('lpinfo usb printer: {}'.format(printer_uri)) # verify that the printer is supported, else exit if printer_uri == "": # No usb printer is connected + logging.info('No usb printers connected') self.exit_gracefully("ERROR_PRINTER_NOT_FOUND") elif "Brother" in printer_uri: + logging.info('Printer {} is supported'.format(printer_uri)) return printer_uri else: # printer url is a make that is unsupported + logging.info('Printer {} is unsupported'.format(printer_uri)) self.exit_gracefully("ERROR_PRINTER_NOT_SUPPORTED") def install_printer_ppd(self, uri): @@ -352,12 +391,14 @@ def print_file(self, file_to_print): # if the file to print is an (open)office document, we need to call unoconf to convert # the file to pdf as printer drivers do not immediately support this format out of the box if self.is_open_office_file(file_to_print): + logging.info('Converting Office document to pdf for printing'.format(self.printer_name)) folder = os.path.dirname(file_to_print) converted_filename = file_to_print + ".pdf" converted_path = os.path.join(folder, converted_filename) subprocess.check_call(["unoconv", "-o", converted_path, file_to_print]) file_to_print = converted_path + logging.info('Sending file to printer {}:{}'.format(self.printer_name)) subprocess.check_call(["xpp", "-P", self.printer_name, file_to_print]) except subprocess.CalledProcessError: msg = "ERROR_PRINT" diff --git a/securedrop_export/main.py b/securedrop_export/main.py index e9ae86da4..f45f9305f 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -1,5 +1,8 @@ +import logging + from securedrop_export import export +logger = logging.getLogger(__name__) def __main__(submission): submission.extract_tarball() @@ -11,19 +14,30 @@ def __main__(submission): if submission.archive_metadata.is_valid(): if submission.archive_metadata.export_method == "usb-test": + logging.info('Export archive is usb-test') submission.check_usb_connected() elif submission.archive_metadata.export_method == "disk": + logging.info('Export archive is disk') + logging.info('Unlocking volume') # exports all documents in the archive to luks-encrypted volume submission.unlock_luks_volume(submission.archive_metadata.encryption_key) + logging.info('Mounting volume') submission.mount_volume() + logging.info('Copying submission to drive') submission.copy_submission() elif submission.archive_metadata.export_method == "disk-test": + logging.info('Export archive is disk-test') submission.check_luks_volume() elif submission.archive_metadata.export_method == "printer": + logging.info('Export archive is printer') # prints all documents in the archive + logging.info('Searching for printer') printer_uri = submission.get_printer_uri() + logging.info('Installing printer drivers') printer_ppd = submission.install_printer_ppd(printer_uri) + logging.info('Setting up printer') submission.setup_printer(printer_uri, printer_ppd) + logging.info('Printing files') submission.print_all_files() elif submission.archive_metadata.export_method == "printer-test": # Prints a test page to ensure the printer is functional diff --git a/update_version.sh b/update_version.sh index 983fd4e8d..f66ee435d 100755 --- a/update_version.sh +++ b/update_version.sh @@ -21,6 +21,8 @@ fi if [[ "$OSTYPE" == "darwin"* ]]; then # The empty '' after sed -i is required on macOS to indicate no backup file should be saved. sed -i '' "s@$(echo "${OLD_VERSION}" | sed 's/\./\\./g')@$NEW_VERSION@g" securedrop_export/VERSION + sed -i '' "s@$(echo "${OLD_VERSION}" | sed 's/\./\\./g')@$NEW_VERSION@g" securedrop_export/__init__.py else sed -i "s@$(echo "${OLD_VERSION}" | sed 's/\./\\./g')@$NEW_VERSION@g" securedrop_export/VERSION + sed -i "s@$(echo "${OLD_VERSION}" | sed 's/\./\\./g')@$NEW_VERSION@g" securedrop_export/__init__.py fi From ffab87e55586b89a1c219d412116e9d49e895af3 Mon Sep 17 00:00:00 2001 From: mickael e Date: Fri, 27 Sep 2019 09:41:34 -0400 Subject: [PATCH 083/352] Add flake8 to ci and fix flake8 findings --- .circleci/config.yml | 25 ++++++++++------ .flake8 | 2 ++ Makefile | 4 +++ securedrop_export/entrypoint.py | 4 ++- securedrop_export/export.py | 37 +++++++++++++++++------- securedrop_export/main.py | 1 + test-requirements.in | 1 + test-requirements.txt | 51 +++++++++++++++------------------ tests/test_export.py | 31 ++++++++++---------- 9 files changed, 93 insertions(+), 63 deletions(-) create mode 100644 .flake8 diff --git a/.circleci/config.yml b/.circleci/config.yml index baae95224..d53dabaf2 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,10 +1,23 @@ version: 2 jobs: - build: + lint: docker: - - image: circleci/python:3.5-stretch + - image: circleci/python:3.5 steps: - checkout + - run: + name: Install test requirements and run lint + command: | + virtualenv .venv + source .venv/bin/activate + pip install --require-hashes -r test-requirements.txt + make lint + - run: + name: Check Python dependencies for CVEs + command: | + set -e + source .venv/bin/activate + make safety test: docker: @@ -22,14 +35,10 @@ jobs: source .venv/bin/activate pip install --require-hashes -r test-requirements.txt make test - - run: - name: Check Python dependencies for CVEs - command: | - set -e - source .venv/bin/activate - make safety workflows: version: 2 securedrop_export_ci: jobs: + - lint - test + diff --git a/.flake8 b/.flake8 new file mode 100644 index 000000000..61d908155 --- /dev/null +++ b/.flake8 @@ -0,0 +1,2 @@ +[flake8] +max-line-length = 99 diff --git a/Makefile b/Makefile index 2ee2e801d..079b9e379 100644 --- a/Makefile +++ b/Makefile @@ -16,6 +16,10 @@ update-pip-requirements: ## Updates all Python requirements files via pip-compil test: pytest -v tests/ +.PHONY: lint +lint: + flake8 securedrop_export/ tests/ + # Explaination of the below shell command should it ever break. # 1. Set the field separator to ": ##" and any make targets that might appear between : and ## # 2. Use sed-like syntax to remove the make targets diff --git a/securedrop_export/entrypoint.py b/securedrop_export/entrypoint.py index 00e3e35e8..f2d837202 100755 --- a/securedrop_export/entrypoint.py +++ b/securedrop_export/entrypoint.py @@ -11,6 +11,7 @@ CONFIG_PATH = "/etc/sd-export-config.json" DEFAULT_HOME = os.path.join(os.path.expanduser("~"), ".securedrop_export") + def configure_logging(): """ All logging related settings are set up by this function. @@ -35,12 +36,13 @@ def configure_logging(): log.setLevel(logging.DEBUG) log.addHandler(handler) + def start(): try: configure_logging() except Exception: msg = "ERROR_LOGGING" - my_sub.exit_gracefully(msg) + export.SDExport.exit_gracefully(msg) logging.info('Starting SecureDrop Export {}'.format(__version__)) my_sub = export.SDExport(sys.argv[1], CONFIG_PATH) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 4bb3a20b1..febd54efa 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -22,6 +22,7 @@ logger = logging.getLogger(__name__) + class Metadata(object): """ Object to parse, validate and store json metadata from the sd-export archive. @@ -46,8 +47,14 @@ def __init__(self, archive_path): json_config = json.loads(f.read()) self.export_method = json_config.get("device", None) self.encryption_method = json_config.get("encryption_method", None) - self.encryption_key = json_config.get("encryption_key", None) - logging.info('Exporting to device {} with encryption_method {}'.format(self.export_method, self.encryption_method)) + self.encryption_key = json_config.get( + "encryption_key", None + ) + logging.info( + 'Exporting to device {} with encryption_method {}'.format( + self.export_method, self.encryption_method + ) + ) except Exception: logging.error('Metadata parsing failure') @@ -56,12 +63,20 @@ def __init__(self, archive_path): def is_valid(self): logging.info('Validating metadata contents') if self.export_method not in self.SUPPORTED_EXPORT_METHODS: - logging.error('Archive metadata: Export method {} is not supported'.format(self.export_method)) + logging.error( + 'Archive metadata: Export method {} is not supported'.format( + self.export_method + ) + ) return False if self.export_method == "disk": if self.encryption_method not in self.SUPPORTED_ENCRYPTION_METHODS: - logging.error('Archive metadata: Encryption method {} is not supported'.format(self.encryption_method)) + logging.error( + 'Archive metadata: Encryption method {} is not supported'.format( + self.encryption_method + ) + ) return False return True @@ -180,7 +195,7 @@ def check_luks_volume(self): try: # cryptsetup isLuks returns 0 if the device is a luks volume # subprocess with throw if the device is not luks (rc !=0) - p = subprocess.check_call(["sudo", "cryptsetup", "isLuks", DEVICE]) + subprocess.check_call(["sudo", "cryptsetup", "isLuks", DEVICE]) msg = "USB_ENCRYPTED" self.exit_gracefully(msg) except subprocess.CalledProcessError: @@ -195,13 +210,13 @@ def unlock_luks_volume(self, encryption_key): ["sudo", "cryptsetup", "luksOpen", self.device, self.encrypted_device], stdin=subprocess.PIPE, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, + stderr=subprocess.PIPE ) logging.info('Passing key') p.communicate(input=str.encode(encryption_key, "utf-8")) rc = p.returncode if rc != 0: - logging.error('Bad phassphrase for {}',format(self.encrypted_device)) + logging.error('Bad phassphrase for {}'.format(self.encrypted_device)) msg = "USB_BAD_PASSPHRASE" self.exit_gracefully(msg) @@ -388,10 +403,10 @@ def is_open_office_file(self, filename): def print_file(self, file_to_print): try: - # if the file to print is an (open)office document, we need to call unoconf to convert - # the file to pdf as printer drivers do not immediately support this format out of the box + # If the file to print is an (open)office document, we need to call unoconf to + # convert the file to pdf as printer drivers do not support this format if self.is_open_office_file(file_to_print): - logging.info('Converting Office document to pdf for printing'.format(self.printer_name)) + logging.info('Converting Office document to pdf'.format(self.printer_name)) folder = os.path.dirname(file_to_print) converted_filename = file_to_print + ".pdf" converted_path = os.path.join(folder, converted_filename) @@ -405,7 +420,7 @@ def print_file(self, file_to_print): self.exit_gracefully(msg) -## class ends here +# class ends here class TimeoutException(Exception): pass diff --git a/securedrop_export/main.py b/securedrop_export/main.py index f45f9305f..00ca144b1 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -4,6 +4,7 @@ logger = logging.getLogger(__name__) + def __main__(submission): submission.extract_tarball() diff --git a/test-requirements.in b/test-requirements.in index e079f8a60..28ecacab6 100644 --- a/test-requirements.in +++ b/test-requirements.in @@ -1 +1,2 @@ +flake8 pytest diff --git a/test-requirements.txt b/test-requirements.txt index c1a39ee1b..8eb119b9c 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --generate-hashes --output-file test-requirements.txt test-requirements.in +# pip-compile --generate-hashes --output-file=test-requirements.txt test-requirements.in # atomicwrites==1.3.0 \ --hash=sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4 \ @@ -12,22 +12,21 @@ attrs==19.1.0 \ --hash=sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79 \ --hash=sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399 \ # via pytest -configparser==3.7.4 \ - --hash=sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32 \ - --hash=sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75 \ - # via importlib-metadata -contextlib2==0.5.5 \ - --hash=sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48 \ - --hash=sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00 \ - # via importlib-metadata -funcsigs==1.0.2 \ - --hash=sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca \ - --hash=sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50 \ - # via pytest +entrypoints==0.3 \ + --hash=sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19 \ + --hash=sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451 \ + # via flake8 +flake8==3.7.8 \ + --hash=sha256:19241c1cbc971b9962473e4438a2ca19749a7dd002dd1a946eaba171b4114548 \ + --hash=sha256:8e9dfa3cecb2400b3738a42c54c3043e821682b9c840b0448c0503f781130696 importlib-metadata==0.18 \ --hash=sha256:6dfd58dfe281e8d240937776065dd3624ad5469c835248219bd16cf2e12dbeb7 \ --hash=sha256:cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db \ # via pluggy, pytest +mccabe==0.6.1 \ + --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ + --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f \ + # via flake8 more-itertools==5.0.0 \ --hash=sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4 \ --hash=sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc \ @@ -40,7 +39,7 @@ packaging==19.0 \ pathlib2==2.3.4 \ --hash=sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e \ --hash=sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8 \ - # via importlib-metadata, pytest + # via pytest pluggy==0.12.0 \ --hash=sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc \ --hash=sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c \ @@ -49,25 +48,21 @@ py==1.8.0 \ --hash=sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa \ --hash=sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53 \ # via pytest +pycodestyle==2.5.0 \ + --hash=sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56 \ + --hash=sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c \ + # via flake8 +pyflakes==2.1.1 \ + --hash=sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0 \ + --hash=sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2 \ + # via flake8 pyparsing==2.4.1.1 \ --hash=sha256:43c5486cefefa536c9aab528881c992328f020eefe4f6d06332449c365218580 \ - --hash=sha256:d6c5ffe9d0305b9b977f7a642d36b9370954d1da7ada4c62393382cbadad4265 + --hash=sha256:d6c5ffe9d0305b9b977f7a642d36b9370954d1da7ada4c62393382cbadad4265 \ + # via packaging pytest==4.6.4 \ --hash=sha256:6aa9bc2f6f6504d7949e9df2a756739ca06e58ffda19b5e53c725f7b03fb4aae \ --hash=sha256:b77ae6f2d1a760760902a7676887b665c086f71e3461c64ed2a312afcedc00d6 -scandir==1.10.0 \ - --hash=sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e \ - --hash=sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022 \ - --hash=sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f \ - --hash=sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f \ - --hash=sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae \ - --hash=sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173 \ - --hash=sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4 \ - --hash=sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32 \ - --hash=sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188 \ - --hash=sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d \ - --hash=sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac \ - # via pathlib2 six==1.12.0 \ --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \ --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \ diff --git a/tests/test_export.py b/tests/test_export.py index d0a294659..7c6e45244 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -2,7 +2,7 @@ import os import pytest -import subprocess +import subprocess # noqa: F401 import tempfile from securedrop_export import export @@ -23,7 +23,7 @@ def test_bad_sd_export_config_invalid_json(capsys): expected_message = "ERROR_CONFIG" with pytest.raises(SystemExit) as sysexit: - submission = export.SDExport("", BAD_TEST_CONFIG) + export.SDExport("", BAD_TEST_CONFIG) # A graceful exit means a return code of 0 assert sysexit.value.code == 0 @@ -36,7 +36,7 @@ def test_bad_sd_export_config_invalid_value(capsys): expected_message = "ERROR_CONFIG" with pytest.raises(SystemExit) as sysexit: - submission = export.SDExport("", ANOTHER_BAD_TEST_CONFIG) + export.SDExport("", ANOTHER_BAD_TEST_CONFIG) # A graceful exit means a return code of 0 assert sysexit.value.code == 0 @@ -71,8 +71,9 @@ def test_exit_gracefully_exception(capsys): test_msg = 'test' with pytest.raises(SystemExit) as sysexit: - submission.exit_gracefully(test_msg, - e=Exception('BANG!')) + submission.exit_gracefully( + test_msg, e=Exception('BANG!') + ) # A graceful exit means a return code of 0 assert sysexit.value.code == 0 @@ -83,7 +84,7 @@ def test_exit_gracefully_exception(capsys): def test_empty_config(capsys): - submission = export.SDExport("testfile", TEST_CONFIG) + export.SDExport("testfile", TEST_CONFIG) temp_folder = tempfile.mkdtemp() metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) with open(metadata, "w") as f: @@ -93,7 +94,7 @@ def test_empty_config(capsys): def test_valid_printer_test_config(capsys): - submission = export.SDExport("testfile", TEST_CONFIG) + export.SDExport("testfile", TEST_CONFIG) temp_folder = tempfile.mkdtemp() metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) with open(metadata, "w") as f: @@ -105,7 +106,7 @@ def test_valid_printer_test_config(capsys): def test_valid_printer_config(capsys): - submission = export.SDExport("", TEST_CONFIG) + export.SDExport("", TEST_CONFIG) temp_folder = tempfile.mkdtemp() metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) with open(metadata, "w") as f: @@ -117,7 +118,7 @@ def test_valid_printer_config(capsys): def test_invalid_encryption_config(capsys): - submission = export.SDExport("testfile", TEST_CONFIG) + export.SDExport("testfile", TEST_CONFIG) temp_folder = tempfile.mkdtemp() metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) @@ -132,7 +133,7 @@ def test_invalid_encryption_config(capsys): def test_valid_encryption_config(capsys): - submission = export.SDExport("testfile", TEST_CONFIG) + export.SDExport("testfile", TEST_CONFIG) temp_folder = tempfile.mkdtemp() metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) with open(metadata, "w") as f: @@ -209,7 +210,7 @@ def test_usb_precheck_connected(mocked_call, capsys): expected_message = "USB_NOT_CONNECTED" mocked_exit = mock.patch("export.exit_gracefully", return_value=0) with pytest.raises(SystemExit) as sysexit: - result = submission.check_usb_connected() + submission.check_usb_connected() mocked_exit.assert_called_once_with(expected_message) assert sysexit.value.code == 0 @@ -223,7 +224,7 @@ def test_usb_precheck_disconnected(mocked_call, capsys): expected_message = "USB_CONNECTED" mocked_exit = mock.patch("export.exit_gracefully", return_value=0) with pytest.raises(SystemExit) as sysexit: - result = submission.check_usb_connected() + submission.check_usb_connected() mocked_exit.assert_called_once_with(expected_message) assert sysexit.value.code == 0 @@ -237,7 +238,7 @@ def test_usb_precheck_error(mocked_call, capsys): expected_message = "ERROR_USB_CHECK" mocked_exit = mock.patch("export.exit_gracefully", return_value=0) with pytest.raises(SystemExit) as sysexit: - result = submission.check_usb_connected() + submission.check_usb_connected() mocked_exit.assert_called_once_with(expected_message) assert sysexit.value.code == 0 @@ -251,7 +252,7 @@ def test_usb_precheck_error_2(mocked_call, capsys): expected_message = "ERROR_USB_CHECK" mocked_exit = mock.patch("export.exit_gracefully", return_value=0) with pytest.raises(SystemExit) as sysexit: - result = submission.check_usb_connected() + submission.check_usb_connected() mocked_exit.assert_called_once_with(expected_message) assert sysexit.value.code == 0 @@ -266,7 +267,7 @@ def test_luks_precheck_encrypted(mocked_call, capsys): mocked_exit = mock.patch("export.exit_gracefully", return_value=0) with pytest.raises(SystemExit) as sysexit: - result = submission.check_luks_volume() + submission.check_luks_volume() mocked_exit.assert_called_once_with(expected_message) assert sysexit.value.code == 0 captured = capsys.readouterr() From de1e4a2b0f8c7ece8472d1fb0de470ecae9eedba Mon Sep 17 00:00:00 2001 From: mickael e Date: Thu, 26 Sep 2019 12:19:56 -0400 Subject: [PATCH 084/352] Bump securedrop-export to 0.1.2 --- securedrop_export/VERSION | 2 +- securedrop_export/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/securedrop_export/VERSION b/securedrop_export/VERSION index 17e51c385..d917d3e26 100644 --- a/securedrop_export/VERSION +++ b/securedrop_export/VERSION @@ -1 +1 @@ -0.1.1 +0.1.2 diff --git a/securedrop_export/__init__.py b/securedrop_export/__init__.py index df9144c54..10939f01b 100644 --- a/securedrop_export/__init__.py +++ b/securedrop_export/__init__.py @@ -1 +1 @@ -__version__ = '0.1.1' +__version__ = '0.1.2' From 021dab1d75f228fae5a651046756338b6f2491f6 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 17 Oct 2019 07:55:37 -0400 Subject: [PATCH 085/352] ci: parallel stretch/buster jobs --- .circleci/config.yml | 53 ++++++++++++++++++++++++++++---------------- 1 file changed, 34 insertions(+), 19 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 0ec6d2a5f..d5a8ee3c2 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,6 +1,25 @@ +--- +common-steps: + - &run_tests + run: + name: Install requirements and run tests + command: | + virtualenv .venv + source .venv/bin/activate + pip install --require-hashes -r dev-requirements.txt + make test + + - &check_python_dependencies_for_vulns + run: + name: Check Python dependencies for CVEs + command: | + set -e + source .venv/bin/activate + make safety + version: 2 jobs: - build: + build-stretch: docker: - image: circleci/python:3.5-stretch steps: @@ -30,30 +49,26 @@ jobs: export PKG_PATH=~/project/dist/securedrop-proxy-$PKG_VERSION.tar.gz make securedrop-proxy - test: + test-stretch: docker: - - image: circleci/python:3.5 + - image: circleci/python:3.5-stretch steps: - checkout + - *run_tests + - *check_python_dependencies_for_vulns - - run: - name: Install requirements and run tests - command: | - virtualenv .venv - source .venv/bin/activate - pip install --require-hashes -r dev-requirements.txt - make test - - - run: - name: Check Python dependencies for CVEs - command: | - set -e - source .venv/bin/activate - make safety + test-buster: + docker: + - image: circleci/python:3.7-buster + steps: + - checkout + - *run_tests + - *check_python_dependencies_for_vulns workflows: version: 2 securedrop_proxy_ci: jobs: - - test - - build + - test-stretch + - test-buster + - build-stretch From 0dffa8fce1ecf47c4d658373c81d0d6a45fedec8 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Tue, 22 Oct 2019 14:56:48 -0700 Subject: [PATCH 086/352] add logger and logging to proxy --- securedrop_proxy/entrypoint.py | 61 ++++++++++++++++++++++++++++++---- securedrop_proxy/main.py | 18 +++++++--- securedrop_proxy/proxy.py | 17 +++++++--- 3 files changed, 80 insertions(+), 16 deletions(-) diff --git a/securedrop_proxy/entrypoint.py b/securedrop_proxy/entrypoint.py index 2bf7211db..250b4652b 100755 --- a/securedrop_proxy/entrypoint.py +++ b/securedrop_proxy/entrypoint.py @@ -6,18 +6,28 @@ # called with exactly one argument: the path to its config file. See # the README for configuration options. -import sys import json -import uuid +import logging +import os import subprocess +import sys +import uuid + +from logging.handlers import TimedRotatingFileHandler -from . import proxy -from . import config -from . import callbacks -from . import main +from securedrop_proxy import callbacks +from securedrop_proxy import config +from securedrop_proxy import main +from securedrop_proxy import proxy +from securedrop_proxy.version import version + + +DEFAULT_HOME = os.path.join(os.path.expanduser("~"), ".securedrop_proxy") def start(): + logging.info('Starting SecureDrop Proxy {}'.format(version)) + # a fresh, new proxy object p = proxy.Proxy() @@ -44,3 +54,42 @@ def start(): incoming = "\n".join(incoming) main.__main__(incoming, p) + + +def excepthook(*exc_args): + ''' + This function is called in the event of a catastrophic failure. + Log exception and exit cleanly. + ''' + logging.error('Unrecoverable error', exc_info=(exc_args)) + sys.__excepthook__(*exc_args) + print('') # force terminal prompt on to a new line + sys.exit(1) + + +def configure_logging(sdc_home: str) -> None: + ''' + All logging related settings are set up by this function. + ''' + log_folder = os.path.join(DEFAULT_HOME, 'logs') + if not os.path.exists(log_folder): + os.makedirs(log_folder) + + log_file = os.path.join(DEFAULT_HOME, 'logs', 'proxy.log') + + # set logging format + log_fmt = ('%(asctime)s - %(name)s:%(lineno)d(%(funcName)s) %(levelname)s: %(message)s') + formatter = logging.Formatter(log_fmt) + + # define log handlers such as for rotating log files + handler = TimedRotatingFileHandler(log_file) + handler.setFormatter(formatter) + handler.setLevel(logging.DEBUG) + + # set up primary log + log = logging.getLogger() + log.setLevel(logging.DEBUG) + log.addHandler(handler) + + # override excepthook to capture a log of catastrophic failures. + sys.excepthook = excepthook diff --git a/securedrop_proxy/main.py b/securedrop_proxy/main.py index 2333bb1be..6fadafaf5 100644 --- a/securedrop_proxy/main.py +++ b/securedrop_proxy/main.py @@ -1,24 +1,33 @@ import json +import logging from securedrop_proxy import callbacks from securedrop_proxy import proxy +logger = logging.getLogger(__name__) + + def __main__(incoming, p): - # deserialize incoming request + ''' + Deserialize incoming request in order to build and send a proxy request. + ''' + logging.debug('Creating request to be sent by proxy') + client_req = None try: client_req = json.loads(incoming) - except json.decoder.JSONDecodeError: + except json.decoder.JSONDecodeError as e: + logging.error(e) p.simple_error(400, 'Invalid JSON in request') p.on_done(p.res) - # build request oject req = proxy.Req() try: req.method = client_req['method'] req.path_query = client_req['path_query'] - except KeyError: + except KeyError as e: + logging.error(e) p.simple_error(400, 'Missing keys in request') p.on_done(p.res) @@ -28,7 +37,6 @@ def __main__(incoming, p): if "body" in client_req: req.body = client_req['body'] - # complete proxy object p.req = req p.on_save = callbacks.on_save p.on_done = callbacks.on_done diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index 9e0f3a191..6b5061931 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -1,12 +1,16 @@ -import requests import furl -import tempfile import json +import logging +import requests +import tempfile import werkzeug import securedrop_proxy.version as version +logger = logging.getLogger(__name__) + + class Req: def __init__(self): self.method = "" @@ -71,7 +75,7 @@ def prep_request(self): try: url = furl.furl("{}://{}:{}/{}".format(scheme, host, port, path)) except Exception as e: - + logging.error(e) self.simple_error(500, "Proxy error while generating URL to request") raise ValueError("Error generating URL from provided values") @@ -99,7 +103,7 @@ def handle_non_json_response(self): res = Response(self._presp.status_code) # Create a NamedTemporaryFile, we don't want - # to delete it after closing. + # to delete it after closign. fh = tempfile.NamedTemporaryFile(delete=False) for c in self._presp.iter_content(10): @@ -114,6 +118,7 @@ def handle_non_json_response(self): self.res = res def handle_response(self): + logging.debug('Handling response') ctype = werkzeug.http.parse_options_header(self._presp.headers["content-type"]) @@ -134,11 +139,13 @@ def proxy(self): raise ValueError("Request callback is not set.") self.prep_request() + logging.debug('Sending request') s = requests.Session() self._presp = s.send(self._prepared_request) self.handle_response() - except ValueError: + except ValueError as e: + logging.error(e) # effectively a 4xx error # we have set self.response to indicate an error From ea7e87ac00c918862a61c11a3a3ebb4dd2325877 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Tue, 22 Oct 2019 17:03:11 -0700 Subject: [PATCH 087/352] init logger --- securedrop_proxy/entrypoint.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/securedrop_proxy/entrypoint.py b/securedrop_proxy/entrypoint.py index 250b4652b..d5bb4838e 100755 --- a/securedrop_proxy/entrypoint.py +++ b/securedrop_proxy/entrypoint.py @@ -26,6 +26,16 @@ def start(): + ''' + Set up a new proxy object with an error handler, configuration that we read from argv[1], and + the original user request from STDIN. + ''' + try: + configure_logging() + except Exception as e: + print(e) + return + logging.info('Starting SecureDrop Proxy {}'.format(version)) # a fresh, new proxy object @@ -67,7 +77,7 @@ def excepthook(*exc_args): sys.exit(1) -def configure_logging(sdc_home: str) -> None: +def configure_logging() -> None: ''' All logging related settings are set up by this function. ''' From 4a70ffb87a195bb6c0adb2888c36f24518e26945 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Wed, 23 Oct 2019 15:15:25 -0400 Subject: [PATCH 088/352] ci: ensure that requirements files are in sync this guards against a scenario discovered in: https://github.com/freedomofpress/securedrop-proxy/pull/46#issuecomment-545589326 wherein a contributor would update build-requirements.txt but not requirements.txt --- .circleci/config.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index d5a8ee3c2..a501982b6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -34,6 +34,14 @@ jobs: make install-deps && make fetch-wheels PKG_DIR=~/project make requirements + - run: + name: Ensure that build-requirements.txt and requirements.txt are in sync. + command: | + cd ~/project + # Return 1 if unstaged changes exist (after `make requirements` in the + # previous run step), else return 0. + git diff --quiet + - run: name: Tag and make source tarball command: | From 5527347cde49cbc27b105e34e8dce07dd2e05152 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Wed, 30 Oct 2019 17:33:13 -0700 Subject: [PATCH 089/352] add envvar for logging default to info --- securedrop_proxy/entrypoint.py | 3 ++- securedrop_proxy/proxy.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/securedrop_proxy/entrypoint.py b/securedrop_proxy/entrypoint.py index d5bb4838e..f568983ba 100755 --- a/securedrop_proxy/entrypoint.py +++ b/securedrop_proxy/entrypoint.py @@ -23,6 +23,7 @@ DEFAULT_HOME = os.path.join(os.path.expanduser("~"), ".securedrop_proxy") +LOGLEVEL = os.environ.get('LOGLEVEL', 'info').upper() def start(): @@ -98,7 +99,7 @@ def configure_logging() -> None: # set up primary log log = logging.getLogger() - log.setLevel(logging.DEBUG) + log.setLevel(LOGLEVEL) log.addHandler(handler) # override excepthook to capture a log of catastrophic failures. diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index 6b5061931..cc30b3214 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -103,7 +103,7 @@ def handle_non_json_response(self): res = Response(self._presp.status_code) # Create a NamedTemporaryFile, we don't want - # to delete it after closign. + # to delete it after closing. fh = tempfile.NamedTemporaryFile(delete=False) for c in self._presp.iter_content(10): From 9ff16b6b58d861833e70d3019f19817c04fda0d4 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Thu, 31 Oct 2019 19:57:13 +0530 Subject: [PATCH 090/352] Initial code commit --- README.md | 54 +++++++++++++++++++++++++++ oqubes-logging | 95 ++++++++++++++++++++++++++++++++++++++++++++++++ oqubeslogging.py | 40 ++++++++++++++++++++ 3 files changed, 189 insertions(+) create mode 100644 README.md create mode 100644 oqubes-logging create mode 100644 oqubeslogging.py diff --git a/README.md b/README.md new file mode 100644 index 000000000..06095d3b7 --- /dev/null +++ b/README.md @@ -0,0 +1,54 @@ +# OQubes Logging + +This is a PoC logging service based on [Qubes +buildlog](https://github.com/QubesOS/qubes-builder/blob/master/rpc-services/qubesbuilder.BuildLog). + +## How to use/try this? + +In our example, we will use a vm named *logging* for storing logs, and we will use +*workvm* to send in logs to the *logging* vm. + +### In dom0 + +- Create a file `/etc/qubes-rpc/policy/oqubes.Logging` in `dom0` with the following content. + +``` +workvm logging allow +@anyvm @anyvm deny +``` + +### In logging vm + +Add the following content to `/etc/qubes-rpc/oqubes.Logging` + +``` +/usr/sbin/oqubes-logging +``` + +and then place `oqubes-logging` script to `/usr/sbin/` directory and make sure that +it is executable. + +### To use from any Python code in workvm + +Here is an example code using Python logging + +```Python +import logging +from oqubeslogging import OQubesLog + +def main(): + handler = OQubesLog("workvm", "proxy-debian") + logging.basicConfig(level=logging.DEBUG, handlers=[handler]) + logger = logging.getLogger("example") + + logger.info("kushal says it works") + + +if __name__ == "__main__": + main() + +``` + +## The journalctl example + +TODO: add an example of streaming journalctl logs via python code. \ No newline at end of file diff --git a/oqubes-logging b/oqubes-logging new file mode 100644 index 000000000..036a19ee7 --- /dev/null +++ b/oqubes-logging @@ -0,0 +1,95 @@ +#!/usr/bin/env python3 + +from __future__ import print_function + +import tempfile +import io +import sys +import os +import errno +import shutil +import subprocess +from datetime import datetime + + +def sanitize_line(untrusted_line): + line = bytearray(untrusted_line) + for i, c in enumerate(line): + if c >= 0x20 and c <= 0x7e: + pass + else: + line[i] = 0x2e + return bytearray(line).decode('ascii') + +try: + stdin = sys.stdin.buffer # python3 +except AttributeError: + stdin = io.open(0, 'rb') # python2 + +start = datetime.utcnow() + +tmp_log = tempfile.NamedTemporaryFile(prefix="qubes-log_", delete=False) + +qrexec_remote = "qubes:" + + +def log(msg, remote=True, now=None): + if now is None: + now = datetime.utcnow() + if remote: + remote_str = '{}:'.format(qrexec_remote) + else: + remote_str = '>' + + line = '{:%F %T.%f} +0000 {} {}\n'.format(now, remote_str, msg) + + tmp_log.write(line.encode('utf-8')) + +log('starting log', now=start, remote=False) + +# the first line is always the remote vm name +untrusted_line = stdin.readline() +qrexec_remote = untrusted_line.rstrip(b'\n').decode('utf-8') +while True: + untrusted_line = stdin.readline() + if untrusted_line == b'': + break + + log(sanitize_line(untrusted_line.rstrip(b'\n'))) + + +log('closing log', remote=False) +tmp_log.close() + +file_name_base = os.path.join( + os.getenv('HOME', '/'), + 'QubesIncomingLogs', + '{remote}', + 'log_{time:%Y-%m-%d_%H-%M-%S}').format( + remote=qrexec_remote, + time=start) + +try: + os.makedirs(os.path.dirname(file_name_base)) +except OSError as err: + if err.errno != errno.EEXIST: + raise + +try_no = 0 +file_name = file_name_base +while True: + if try_no > 0: + file_name = '{}.{}'.format(file_name_base, try_no) + + try: + fd = os.open(file_name, os.O_CREAT | os.O_EXCL, 0o664) + except OSError as err: + if err.errno == errno.EEXIST: + try_no += 1 + continue + raise + + os.close(fd) + break + +shutil.move(tmp_log.name, file_name) diff --git a/oqubeslogging.py b/oqubeslogging.py new file mode 100644 index 000000000..6392b1be7 --- /dev/null +++ b/oqubeslogging.py @@ -0,0 +1,40 @@ +from logging import StreamHandler +from subprocess import Popen, PIPE + + +class Singleton(type): + _ins = {} + + def __call__(cls, *args, **kwargs): + if cls not in cls._ins: + cls._ins[cls] = super(Singleton, cls).__call__(*args, **kwargs) + + return cls._ins[cls] + + +class InternalLog(metaclass=Singleton): + def __init__(self, name, logvmname): + self.process = Popen( + ["/usr/lib/qubes/qrexec-client-vm", logvmname, "oqubes.Logging"], + stdin=PIPE, + stdout=PIPE, + stderr=PIPE, + ) + self.write(name) + + def write(self, text): + data = text + "\n" + data = data.encode("utf-8") + self.process.stdin.write(data) + self.process.stdin.flush() + + +class OQubesLog(StreamHandler): + def __init__(self, name, logvmname): + StreamHandler.__init__(self) + self.qubes_log = InternalLog(name, logvmname) + + def emit(self, record): + msg = self.format(record) + self.qubes_log.write(msg) + return True From be54349afc94e03874b0704cc65201e31143c33b Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Thu, 31 Oct 2019 19:58:09 +0530 Subject: [PATCH 091/352] Some examples --- ex1.py | 7 +++++++ ex2.py | 10 ++++++++++ example.py | 22 ++++++++++++++++++++++ 3 files changed, 39 insertions(+) create mode 100644 ex1.py create mode 100644 ex2.py create mode 100644 example.py diff --git a/ex1.py b/ex1.py new file mode 100644 index 000000000..9955c4196 --- /dev/null +++ b/ex1.py @@ -0,0 +1,7 @@ +import logging + + +def fire(msg): + logger = logging.getLogger(__name__) + + logger.debug("bye bye in debug") diff --git a/ex2.py b/ex2.py new file mode 100644 index 000000000..9bb488e31 --- /dev/null +++ b/ex2.py @@ -0,0 +1,10 @@ +import logging + + +class Hello: + def __init__(self, *args, **kwargs): + self.logger = logging.getLogger(__name__) + + + def talk(self, msg): + self.logger.debug(msg) diff --git a/example.py b/example.py new file mode 100644 index 000000000..83662f416 --- /dev/null +++ b/example.py @@ -0,0 +1,22 @@ +import logging +from oqubeslogging import OQubesLog + +import ex2 +import ex1 + + +def main(): + handler = OQubesLog("workvm", "logging") + logging.basicConfig(level=logging.DEBUG, handlers=[handler]) + logger = logging.getLogger("example") + + + d = ex2.Hello() + d.talk("This should be line 1") + ex1.fire("Where are you in middle?") + d.talk("Oh again") + logger.info("kushal says it works.") + + +if __name__ == "__main__": + main() From 6530b0c63c5e40cfa9d4649b3d186ef3fabb7880 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Fri, 1 Nov 2019 17:57:46 +0530 Subject: [PATCH 092/352] Adds a journalctl example The log file is actually not storing all messages. --- journal-example.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 journal-example.py diff --git a/journal-example.py b/journal-example.py new file mode 100644 index 000000000..fa468cb10 --- /dev/null +++ b/journal-example.py @@ -0,0 +1,26 @@ +import logging +from oqubeslogging import OQubesLog +from systemd import journal +import select + + +def main(): + handler = OQubesLog("workvm", "logging") + logging.basicConfig(level=logging.DEBUG, handlers=[handler]) + logger = logging.getLogger("example") + j = journal.Reader() + j.seek_tail() + + p = select.poll() + p.register(j, j.get_events()) + while True: + p.poll() + if j.process() == journal.APPEND: + for m in j: + msg = "MSG: {}".format(m["MESSAGE"]) + print(msg) + # TODO: Figure out why the log file in the logging VM is closing + logger.info(m["MESSAGE"]) + +if __name__ == "__main__": + main() \ No newline at end of file From ac3e533dd4f3d08ffa98743f4b183f48094be229 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Fri, 1 Nov 2019 20:17:42 +0530 Subject: [PATCH 093/352] Updates readme --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 06095d3b7..9b19ddcb2 100644 --- a/README.md +++ b/README.md @@ -51,4 +51,6 @@ if __name__ == "__main__": ## The journalctl example -TODO: add an example of streaming journalctl logs via python code. \ No newline at end of file +You will need `python3-systemd` package for the same. + +The code is in `journal-example.py` file. \ No newline at end of file From 587ef0300a1697dfa7013ac5908e950d93e9652c Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Mon, 4 Nov 2019 13:10:18 -0800 Subject: [PATCH 094/352] change log info->debug --- securedrop_proxy/entrypoint.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/securedrop_proxy/entrypoint.py b/securedrop_proxy/entrypoint.py index f568983ba..33c84d56c 100755 --- a/securedrop_proxy/entrypoint.py +++ b/securedrop_proxy/entrypoint.py @@ -37,7 +37,7 @@ def start(): print(e) return - logging.info('Starting SecureDrop Proxy {}'.format(version)) + logging.debug('Starting SecureDrop Proxy {}'.format(version)) # a fresh, new proxy object p = proxy.Proxy() From 3f9659098e99e2701f05a6be43a88c1a90453a14 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Mon, 23 Sep 2019 16:16:29 -0700 Subject: [PATCH 095/352] workaround for bus id issue --- securedrop_export/export.py | 23 +++-------------------- 1 file changed, 3 insertions(+), 20 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index febd54efa..2741bdc51 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -164,31 +164,14 @@ def extract_tarball(self): self.exit_gracefully(msg) def check_usb_connected(self): - # If the USB is not attached via qvm-usb attach, lsusb will return empty string and a # return code of 1 logging.info('Performing usb preflight') try: - p = subprocess.check_output(["lsusb", "-s", "{}:".format(self.pci_bus_id)]) - logging.info("lsusb -s {} : {}".format(self.pci_bus_id, p.decode("utf-8"))) + subprocess.check_output(["lsblk", "-p", "-o", "KNAME", DEVICE]) + self.exit_gracefully("USB_CONNECTED") except subprocess.CalledProcessError: - msg = "ERROR_USB_CONFIGURATION" - self.exit_gracefully(msg) - n_usb = len(p.decode("utf-8").rstrip().split("\n")) - # If there is one device, it is the root hub. - if n_usb == 1: - logging.info('usb preflight - no external devices connected') - msg = "USB_NOT_CONNECTED" - self.exit_gracefully(msg) - # If there are two devices, it's the root hub and another device (presumably for export) - elif n_usb == 2: - logging.info('usb preflight - external device connected') - msg = "USB_CONNECTED" - self.exit_gracefully(msg) - # Else the result is unexpected - else: - msg = "ERROR_USB_CHECK" - self.exit_gracefully(msg) + self.exit_gracefully("USB_NOT_CONNECTED") def check_luks_volume(self): logging.info('Checking if volume is luks-encrypted') From 4ed572aacd662f41719560bf678e6d10759f50c7 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Mon, 23 Sep 2019 17:53:15 -0700 Subject: [PATCH 096/352] pipe output for lsblk --- securedrop_export/export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 2741bdc51..a220c686d 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -168,7 +168,7 @@ def check_usb_connected(self): # return code of 1 logging.info('Performing usb preflight') try: - subprocess.check_output(["lsblk", "-p", "-o", "KNAME", DEVICE]) + subprocess.check_output(["lsblk", "-p", "-o", "KNAME", DEVICE], stderr=subprocess.PIPE) self.exit_gracefully("USB_CONNECTED") except subprocess.CalledProcessError: self.exit_gracefully("USB_NOT_CONNECTED") From 823e830e711511efc16ef73bb9cd05d19bce4e53 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Fri, 1 Nov 2019 12:45:26 -0700 Subject: [PATCH 097/352] change device to sda and fix check --- securedrop_export/export.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index a220c686d..6a65fa3e0 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -14,7 +14,7 @@ PRINTER_NAME = "sdw-printer" PRINTER_WAIT_TIMEOUT = 60 -DEVICE = "/dev/sda1" +DEVICE = "/dev/sda" MOUNTPOINT = "/media/usb" ENCRYPTED_DEVICE = "encrypted_volume" BRLASER_DRIVER = "/usr/share/cups/drv/brlaser.drv" @@ -168,7 +168,8 @@ def check_usb_connected(self): # return code of 1 logging.info('Performing usb preflight') try: - subprocess.check_output(["lsblk", "-p", "-o", "KNAME", DEVICE], stderr=subprocess.PIPE) + subprocess.check_output( + ["lsblk", "-p", "-o", "KNAME", DEVICE, "--noheadings"], stderr=subprocess.PIPE) self.exit_gracefully("USB_CONNECTED") except subprocess.CalledProcessError: self.exit_gracefully("USB_NOT_CONNECTED") From 289110e7ab4bc362287d8ea403e24f902215dd76 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Mon, 4 Nov 2019 13:49:15 -0800 Subject: [PATCH 098/352] update tests for lsblk change --- securedrop_export/export.py | 3 ++- tests/test_export.py | 42 +++++++------------------------------ 2 files changed, 9 insertions(+), 36 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 6a65fa3e0..105dd198a 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -169,7 +169,8 @@ def check_usb_connected(self): logging.info('Performing usb preflight') try: subprocess.check_output( - ["lsblk", "-p", "-o", "KNAME", DEVICE, "--noheadings"], stderr=subprocess.PIPE) + ["lsblk", "-p", "-o", "KNAME", DEVICE, "--noheadings", "--inverse"], + stderr=subprocess.PIPE) self.exit_gracefully("USB_CONNECTED") except subprocess.CalledProcessError: self.exit_gracefully("USB_NOT_CONNECTED") diff --git a/tests/test_export.py b/tests/test_export.py index 7c6e45244..17967b464 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -4,16 +4,14 @@ import pytest import subprocess # noqa: F401 import tempfile +from subprocess import CalledProcessError from securedrop_export import export SAMPLE_OUTPUT_NO_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\nnetwork lpd" # noqa SAMPLE_OUTPUT_BOTHER_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\ndirect usb://Brother/HL-L2320D%20series?serial=A00000A000000\nnetwork lpd" # noqa -SAMPLE_OUTPUT_NO_USB = b"Bus 001 Device 001: ID 1d6b:0002 Linux Foundation 2.0 root hub" # noqa -SAMPLE_OUTPUT_USB = b"Bus 001 Device 002: ID 0781:5575 SanDisk Corp.\nBus 001 Device 001: ID 1d6b:0002 Linux Foundation 2.0 root hub" # noqa -SAMPLE_OUTPUT_USB_ERROR = b"" -SAMPLE_OUTPUT_USB_ERROR2 = b"h\ne\nl\nl\no" +SAMPLE_OUTPUT_USB = b"/dev/sda" # noqa TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad.json") ANOTHER_BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad-2.json") @@ -204,39 +202,13 @@ def test_is_not_open_office_file(capsys, open_office_paths): assert not submission.is_open_office_file(open_office_paths) -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_USB) -def test_usb_precheck_connected(mocked_call, capsys): +def test_usb_precheck_disconnected(capsys): submission = export.SDExport("testfile", TEST_CONFIG) expected_message = "USB_NOT_CONNECTED" mocked_exit = mock.patch("export.exit_gracefully", return_value=0) - with pytest.raises(SystemExit) as sysexit: - submission.check_usb_connected() - mocked_exit.assert_called_once_with(expected_message) - - assert sysexit.value.code == 0 - captured = capsys.readouterr() - assert captured.err == "{}\n".format(expected_message) - - -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_USB) -def test_usb_precheck_disconnected(mocked_call, capsys): - submission = export.SDExport("testfile", TEST_CONFIG) - expected_message = "USB_CONNECTED" - mocked_exit = mock.patch("export.exit_gracefully", return_value=0) - with pytest.raises(SystemExit) as sysexit: - submission.check_usb_connected() - mocked_exit.assert_called_once_with(expected_message) - assert sysexit.value.code == 0 - captured = capsys.readouterr() - assert captured.err == "{}\n".format(expected_message) + mock.patch("subprocess.check_output", return_value=CalledProcessError(1, 'check_output')) - -@mock.patch("subprocess.check_output", return_code=1) -def test_usb_precheck_error(mocked_call, capsys): - submission = export.SDExport("testfile", TEST_CONFIG) - expected_message = "ERROR_USB_CHECK" - mocked_exit = mock.patch("export.exit_gracefully", return_value=0) with pytest.raises(SystemExit) as sysexit: submission.check_usb_connected() mocked_exit.assert_called_once_with(expected_message) @@ -246,10 +218,10 @@ def test_usb_precheck_error(mocked_call, capsys): assert captured.err == "{}\n".format(expected_message) -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_USB_ERROR2) -def test_usb_precheck_error_2(mocked_call, capsys): +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_USB) +def test_usb_precheck_connected(mocked_call, capsys): submission = export.SDExport("testfile", TEST_CONFIG) - expected_message = "ERROR_USB_CHECK" + expected_message = "USB_CONNECTED" mocked_exit = mock.patch("export.exit_gracefully", return_value=0) with pytest.raises(SystemExit) as sysexit: submission.check_usb_connected() From c8e82ad1b00132917a6544e89960f089b4c5c904 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Tue, 5 Nov 2019 19:40:37 -0800 Subject: [PATCH 099/352] update docs and partition check --- README.md | 9 +++++++ securedrop_export/export.py | 21 +++++++++++----- tests/test_export.py | 48 ++++++++++++++++++++++++++++++++++++- 3 files changed, 71 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 803c3a0a1..e6a7b9ae0 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,15 @@ Code for exporting and printing files from the SecureDrop Qubes Workstation. +## Supported Printers + +TBD + +## Supported Export Devices + +We support luks-encrypted drives that are either MBR/DOS partitioned or GPT partitioned. If you use `Disks` in Linux to partition your drive, you can [follow these instructions](https://docs.securedrop.org/en/stable/set_up_transfer_and_export_device.html#create-usb-transfer-device). For full-disk encryption, you can use [cryptsetup](https://linux.die.net/man/8/cryptsetup), e.g. `sudo cryptsetup luksFormat --hash=sha512 --key-size=512 /dev/sda` if `/dev/sda` is your device. + +We do not yet support drives that use full-disk encryption with VeraCrypt. ## Export Archive Format diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 105dd198a..cff77be12 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -169,7 +169,7 @@ def check_usb_connected(self): logging.info('Performing usb preflight') try: subprocess.check_output( - ["lsblk", "-p", "-o", "KNAME", DEVICE, "--noheadings", "--inverse"], + ["lsblk", "-p", "-o", "KNAME", "--noheadings", "--inverse", DEVICE], stderr=subprocess.PIPE) self.exit_gracefully("USB_CONNECTED") except subprocess.CalledProcessError: @@ -178,11 +178,20 @@ def check_usb_connected(self): def check_luks_volume(self): logging.info('Checking if volume is luks-encrypted') try: - # cryptsetup isLuks returns 0 if the device is a luks volume - # subprocess with throw if the device is not luks (rc !=0) - subprocess.check_call(["sudo", "cryptsetup", "isLuks", DEVICE]) - msg = "USB_ENCRYPTED" - self.exit_gracefully(msg) + device_and_partitions = subprocess.check_output( + ["lsblk", "-o", "TYPE", "--noheadings", DEVICE], stderr=subprocess.PIPE) + + # we don't support multiple partitions + partition_count = device_and_partitions.decode('utf-8').split('\n').count('part') + if partition_count > 1: + logging.debug("multiple partitions not supported") + self.exit_gracefully("USB_NO_SUPPORTED_ENCRYPTION") + + # we support full-disk luks encryption where there are 0 partitions + # or 1 partition which will be /dev/sda1 instead of /dev/sda + dev = DEVICE if partition_count == 0 else DEVICE + '1' + subprocess.check_call(["sudo", "cryptsetup", "isLuks", dev]) + self.exit_gracefully("USB_ENCRYPTED") except subprocess.CalledProcessError: msg = "USB_NO_SUPPORTED_ENCRYPTION" self.exit_gracefully(msg) diff --git a/tests/test_export.py b/tests/test_export.py index 17967b464..94b6dd63a 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -11,6 +11,9 @@ SAMPLE_OUTPUT_NO_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\nnetwork lpd" # noqa SAMPLE_OUTPUT_BOTHER_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\ndirect usb://Brother/HL-L2320D%20series?serial=A00000A000000\nnetwork lpd" # noqa +SAMPLE_OUTPUT_NO_PART = b"disk\ncrypt" # noqa +SAMPLE_OUTPUT_ONE_PART = b"disk\npart\ncrypt" # noqa +SAMPLE_OUTPUT_MULTI_PART = b"disk\npart\npart\npart\ncrypt" # noqa SAMPLE_OUTPUT_USB = b"/dev/sda" # noqa TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad.json") @@ -232,7 +235,7 @@ def test_usb_precheck_connected(mocked_call, capsys): assert captured.err == "{}\n".format(expected_message) -@mock.patch("subprocess.check_call") +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PART) def test_luks_precheck_encrypted(mocked_call, capsys): submission = export.SDExport("testfile", TEST_CONFIG) expected_message = "USB_ENCRYPTED" @@ -244,3 +247,46 @@ def test_luks_precheck_encrypted(mocked_call, capsys): assert sysexit.value.code == 0 captured = capsys.readouterr() assert captured.err == "{}\n".format(expected_message) + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) +def test_luks_precheck_encrypted(mocked_call, capsys): + submission = export.SDExport("testfile", TEST_CONFIG) + expected_message = "USB_ENCRYPTED" + mocked_exit = mock.patch("export.exit_gracefully", return_value=0) + + with pytest.raises(SystemExit) as sysexit: + submission.check_luks_volume() + mocked_exit.assert_called_once_with(expected_message) + assert sysexit.value.code == 0 + captured = capsys.readouterr() + assert captured.err == "{}\n".format(expected_message) + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_MULTI_PART) +def test_luks_precheck_encrypted(mocked_call, capsys): + submission = export.SDExport("testfile", TEST_CONFIG) + expected_message = "USB_NO_SUPPORTED_ENCRYPTION" + mocked_exit = mock.patch("export.exit_gracefully", return_value=0) + + with pytest.raises(SystemExit) as sysexit: + submission.check_luks_volume() + mocked_exit.assert_called_once_with(expected_message) + assert sysexit.value.code == 0 + captured = capsys.readouterr() + assert captured.err == "{}\n".format(expected_message) + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) +def test_luks_precheck_encrypted(mocked_call, capsys): + submission = export.SDExport("testfile", TEST_CONFIG) + expected_message = "USB_NO_SUPPORTED_ENCRYPTION" + mocked_exit = mock.patch("export.exit_gracefully", return_value=0) + + mock.patch("subprocess.check_call", return_value=CalledProcessError(1, 'check_call')) + + with pytest.raises(SystemExit) as sysexit: + submission.check_luks_volume() + mocked_exit.assert_called_once_with(expected_message) + assert sysexit.value.code == 0 + captured = capsys.readouterr() + assert captured.err == "{}\n".format(expected_message) From c6a7c9004062790b6a830f8aab5af00593ef7187 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Wed, 6 Nov 2019 14:03:15 -0800 Subject: [PATCH 100/352] support mounting different encrypted devices --- README.md | 10 ++++- securedrop_export/export.py | 79 +++++++++++++++++++++---------------- 2 files changed, 54 insertions(+), 35 deletions(-) diff --git a/README.md b/README.md index e6a7b9ae0..1c5d518ef 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,15 @@ TBD ## Supported Export Devices -We support luks-encrypted drives that are either MBR/DOS partitioned or GPT partitioned. If you use `Disks` in Linux to partition your drive, you can [follow these instructions](https://docs.securedrop.org/en/stable/set_up_transfer_and_export_device.html#create-usb-transfer-device). For full-disk encryption, you can use [cryptsetup](https://linux.die.net/man/8/cryptsetup), e.g. `sudo cryptsetup luksFormat --hash=sha512 --key-size=512 /dev/sda` if `/dev/sda` is your device. +We support luks-encrypted drives that are either MBR/DOS partitioned or GPT partitioned. If you use `Disks` in Linux to partition your drive, you can [follow these instructions](https://docs.securedrop.org/en/stable/set_up_transfer_and_export_device.html#create-usb-transfer-device) to create a new export device. You can also use [cryptsetup](https://linux.die.net/man/8/cryptsetup) to create a luks-encrypted device with full-disk encryption, for example: + +1. `sudo cryptsetup luksFormat --hash=sha512 --key-size=512 DEVICE` where `DEIVCE` is the name of your removable drive, which you can find via `lsblk -p`. + + Make sure `DEVICE` is correct because you will be overwriting its data irrevocably. + +2. `sudo cryptsetup luksOpen /dev/sdb encrypted_device` + +3. `sudo mkfs.ext4 /dev/mapper/encrypted_device` We do not yet support drives that use full-disk encryption with VeraCrypt. diff --git a/securedrop_export/export.py b/securedrop_export/export.py index cff77be12..7a2265f20 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -175,8 +175,7 @@ def check_usb_connected(self): except subprocess.CalledProcessError: self.exit_gracefully("USB_NOT_CONNECTED") - def check_luks_volume(self): - logging.info('Checking if volume is luks-encrypted') + def set_extracted_device_name(self): try: device_and_partitions = subprocess.check_output( ["lsblk", "-o", "TYPE", "--noheadings", DEVICE], stderr=subprocess.PIPE) @@ -187,47 +186,59 @@ def check_luks_volume(self): logging.debug("multiple partitions not supported") self.exit_gracefully("USB_NO_SUPPORTED_ENCRYPTION") - # we support full-disk luks encryption where there are 0 partitions - # or 1 partition which will be /dev/sda1 instead of /dev/sda - dev = DEVICE if partition_count == 0 else DEVICE + '1' - subprocess.check_call(["sudo", "cryptsetup", "isLuks", dev]) + # set device to /dev/sda if disk is encrypted, /dev/sda1 if partition encrypted + self.device = DEVICE if partition_count == 0 else DEVICE + '1' + except subprocess.CalledProcessError: + msg = "USB_NO_SUPPORTED_ENCRYPTION" + self.exit_gracefully(msg) + + def check_luks_volume(self): + logging.info('Checking if volume is luks-encrypted') + try: + self.set_extracted_device_name() + logging.debug("checking if {} is luks encrypted".format(self.device)) + subprocess.check_call(["sudo", "cryptsetup", "isLuks", self.device]) self.exit_gracefully("USB_ENCRYPTED") except subprocess.CalledProcessError: msg = "USB_NO_SUPPORTED_ENCRYPTION" self.exit_gracefully(msg) def unlock_luks_volume(self, encryption_key): - # the luks device is not already unlocked - logging.info('Unlocking luks volume {}'.format(self.encrypted_device)) - if not os.path.exists(os.path.join("/dev/mapper/", self.encrypted_device)): - p = subprocess.Popen( - ["sudo", "cryptsetup", "luksOpen", self.device, self.encrypted_device], - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE - ) - logging.info('Passing key') - p.communicate(input=str.encode(encryption_key, "utf-8")) - rc = p.returncode - if rc != 0: - logging.error('Bad phassphrase for {}'.format(self.encrypted_device)) - msg = "USB_BAD_PASSPHRASE" - self.exit_gracefully(msg) + try: + # set encrypted device (depends on how device was encrypted) + self.set_extracted_device_name() + device_uuid = subprocess.check_output( + ["sudo", "lsblk", "-o", "UUID", "--noheadings", self.device]) + self.encrypted_device = 'luks-' + device_uuid + + # the luks device is not already unlocked + if not os.path.exists(os.path.join("/dev/mapper/", self.encrypted_device)): + logging.debug('Unlocking luks volume {}'.format(self.encrypted_device)) + p = subprocess.Popen( + ["sudo", "cryptsetup", "luksOpen", self.device, self.encrypted_device], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE + ) + logging.debug('Passing key') + p.communicate(input=str.encode(encryption_key, "utf-8")) + rc = p.returncode + if rc != 0: + logging.error('Bad phassphrase for {}'.format(self.encrypted_device)) + msg = "USB_BAD_PASSPHRASE" + self.exit_gracefully(msg) + except subprocess.CalledProcessError: + self.exit_gracefully("USB_NO_SUPPORTED_ENCRYPTION") def mount_volume(self): - # mount target not created - if not os.path.exists(self.mountpoint): - subprocess.check_call(["sudo", "mkdir", self.mountpoint]) try: - logging.info('Mounting {} to {}'.format(self.encrypted_device, self.mountpoint)) - subprocess.check_call( - [ - "sudo", - "mount", - os.path.join("/dev/mapper/", self.encrypted_device), - self.mountpoint, - ] - ) + # mount target not created + if not os.path.exists(self.mountpoint): + subprocess.check_call(["sudo", "mkdir", self.mountpoint]) + + mapped_device_path = os.path.join("/dev/mapper/", self.encrypted_device) + logging.info('Mounting {}'.format(mapped_device_path)) + subprocess.check_call(["sudo", "mount", mapped_device_path, self.mountpoint]) subprocess.check_call(["sudo", "chown", "-R", "user:user", self.mountpoint]) except subprocess.CalledProcessError: # clean up From eac1d5414bf93f7339147f7390b0c91c197917cc Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Thu, 7 Nov 2019 13:05:42 +0530 Subject: [PATCH 101/352] Adds wheel hashes for both Stretch and Buster --- build-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-requirements.txt b/build-requirements.txt index 5d37ab853..4b17af9f9 100644 --- a/build-requirements.txt +++ b/build-requirements.txt @@ -3,7 +3,7 @@ chardet==3.0.4 --hash=sha256:9f178988ca4c86e8a319b51aac1185b6fe5192328eb5a163c28 furl==2.0.0 --hash=sha256:cc0eb8998dcc7c5b58bc8625891a9ff563e2765e112024fa3d1e3521481de8b6 idna==2.7 --hash=sha256:954e65e127d0433a352981f43f291a438423d5b385ebf643c70fd740e0634111 orderedmultidict==1.0 --hash=sha256:25489716d76d2cc8aa656bfb00cd40b6ca29d5e11ccde0db60c2b46ad52bb40a -pyyaml==5.1 --hash=sha256:b8d80623e9d4e348c59ea726ce3032a2eb15abca6a48d3828362d11c6014a0a7 +pyyaml==5.1 --hash=sha256:b8d80623e9d4e348c59ea726ce3032a2eb15abca6a48d3828362d11c6014a0a7 --hash=sha256:c6dec5d6ffa44a50d83b9c4e8df0443a0a87e4213ecf2e24fcae6ea991f3b0c0 requests==2.20.0 --hash=sha256:2a539dd6af40a611f3b8eb3f99d3567781352ece1698b2fab42bf4c2218705b5 six==1.11.0 --hash=sha256:4663c7a1dbed033cfb294f2d534bd6151c0698dc12ecabb4eaa3cb041d758528 urllib3==1.24.3 --hash=sha256:028309393606e28e640e2031edd27eb969c94f9364b0871912608aaa8e66c96e From 9720fc93a37fbda108135057bacda0623d06437f Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Thu, 7 Nov 2019 00:06:09 -0800 Subject: [PATCH 102/352] get uuid using cryptsetup --- securedrop_export/export.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 7a2265f20..c0b93d447 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -205,11 +205,14 @@ def check_luks_volume(self): def unlock_luks_volume(self, encryption_key): try: - # set encrypted device (depends on how device was encrypted) + # get the encrypted device name self.set_extracted_device_name() - device_uuid = subprocess.check_output( - ["sudo", "lsblk", "-o", "UUID", "--noheadings", self.device]) - self.encrypted_device = 'luks-' + device_uuid + luks_header = subprocess.check_output(["sudo", "cryptsetup", "luksDump", self.device]) + luks_header_list = luks_header.decode('utf-8').split('\n') + for line in luks_header_list: + items = line.split('\t') + if 'UUID' in items[0]: + self.encrypted_device = 'luks-' + items[1] # the luks device is not already unlocked if not os.path.exists(os.path.join("/dev/mapper/", self.encrypted_device)): From bb5f9a79aec86860729a866b78ff0dafaf31752b Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Thu, 7 Nov 2019 10:13:36 -0800 Subject: [PATCH 103/352] Update docs --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 1c5d518ef..bbfebdb14 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ TBD We support luks-encrypted drives that are either MBR/DOS partitioned or GPT partitioned. If you use `Disks` in Linux to partition your drive, you can [follow these instructions](https://docs.securedrop.org/en/stable/set_up_transfer_and_export_device.html#create-usb-transfer-device) to create a new export device. You can also use [cryptsetup](https://linux.die.net/man/8/cryptsetup) to create a luks-encrypted device with full-disk encryption, for example: -1. `sudo cryptsetup luksFormat --hash=sha512 --key-size=512 DEVICE` where `DEIVCE` is the name of your removable drive, which you can find via `lsblk -p`. +1. `sudo cryptsetup luksFormat --hash=sha512 --key-size=512 DEVICE` where `DEVICE` is the name of your removable drive, which you can find via `lsblk -p`. Make sure `DEVICE` is correct because you will be overwriting its data irrevocably. @@ -20,6 +20,8 @@ We support luks-encrypted drives that are either MBR/DOS partitioned or GPT part 3. `sudo mkfs.ext4 /dev/mapper/encrypted_device` +4. `sudo cryptsetup luksClose /dev/mapper/encrypted_device` + We do not yet support drives that use full-disk encryption with VeraCrypt. ## Export Archive Format From 68442b0e680d14fa418d417f0696560c15290256 Mon Sep 17 00:00:00 2001 From: mickael e Date: Thu, 17 Oct 2019 10:08:07 -0400 Subject: [PATCH 104/352] Use enum to specify possible return codes --- securedrop_export/export.py | 77 +++++++++++++++++++++++++++---------- securedrop_export/main.py | 5 ++- tests/test_export.py | 21 +++++++--- 3 files changed, 75 insertions(+), 28 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index c0b93d447..4aea94572 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -12,6 +12,8 @@ import tempfile import time +from enum import Enum + PRINTER_NAME = "sdw-printer" PRINTER_WAIT_TIMEOUT = 60 DEVICE = "/dev/sda" @@ -23,6 +25,41 @@ logger = logging.getLogger(__name__) +class ExportStatus(Enum): + + # General errors + ERROR_FILE_NOT_FOUND = 'ERROR_FILE_NOT_FOUND' + ERROR_EXTRACTION = 'ERROR_EXTRACTION' + ERROR_METADATA_PARSING = 'ERROR_METADATA_PARSING' + ERROR_ARCHIVE_METADATA = 'ERROR_ARCHIVE_METADATA' + ERROR_USB_CONFIGURATION = 'ERROR_USB_CONFIGURATION' + ERROR_GENERIC = 'ERROR_GENERIC' + + # USB preflight related errors + USB_CONNECTED = 'USB_CONNECTED' + USB_NOT_CONNECTED = 'USB_NOT_CONNECTED' + ERROR_USB_CHECK = 'ERROR_USB_CHECK' + + # USB Disk preflight related errors + USB_ENCRYPTED = 'USB_ENCRYPTED' + USB_ENCRYPTION_NOT_SUPPORTED = 'USB_ENCRYPTION_NOT_SUPPORTED' + USB_DISK_ERROR = 'USB_DISK_ERROR' + + # Printer preflight related errors + ERROR_PRINTER_NOT_FOUND = 'ERROR_PRINTER_NOT_FOUND' + ERROR_PRINTER_NOT_SUPPORTED = 'ERROR_PRINTER_NOT_SUPPORTED' + ERROR_PRINTER_DRIVER_UNAVAILABLE = 'ERROR_PRINTER_DRIVER_UNAVAILABLE' + ERROR_PRINTER_INSTALL = 'ERROR_PRINTER_INSTALL' + + # Disk export errors + USB_BAD_PASSPHRASE = 'USB_BAD_PASSPHRASE' + ERROR_USB_MOUNT = 'ERROR_USB_MOUNT' + ERROR_USB_WRITE = 'ERROR_USB_WRITE' + + # Printer export errors + ERROR_PRINT = 'ERROR_PRINT' + + class Metadata(object): """ Object to parse, validate and store json metadata from the sd-export archive. @@ -111,7 +148,7 @@ def __init__(self, archive, config_path): raise except Exception: logger.error("error parsing VM configuration.") - self.exit_gracefully("ERROR_CONFIG") + self.exit_gracefully(ExportStatus.ERROR_USB_CONFIGURATION.value) def exit_gracefully(self, msg, e=False): """ @@ -160,7 +197,7 @@ def extract_tarball(self): with tarfile.open(self.archive) as tar: tar.extractall(self.tmpdir) except Exception: - msg = "ERROR_EXTRACTION" + msg = ExportStatus.ERROR_EXTRACTION.value self.exit_gracefully(msg) def check_usb_connected(self): @@ -173,7 +210,7 @@ def check_usb_connected(self): stderr=subprocess.PIPE) self.exit_gracefully("USB_CONNECTED") except subprocess.CalledProcessError: - self.exit_gracefully("USB_NOT_CONNECTED") + self.exit_gracefully(ExportStatus.USB_NOT_CONNECTED.value) def set_extracted_device_name(self): try: @@ -184,13 +221,12 @@ def set_extracted_device_name(self): partition_count = device_and_partitions.decode('utf-8').split('\n').count('part') if partition_count > 1: logging.debug("multiple partitions not supported") - self.exit_gracefully("USB_NO_SUPPORTED_ENCRYPTION") + self.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED) # set device to /dev/sda if disk is encrypted, /dev/sda1 if partition encrypted self.device = DEVICE if partition_count == 0 else DEVICE + '1' except subprocess.CalledProcessError: - msg = "USB_NO_SUPPORTED_ENCRYPTION" - self.exit_gracefully(msg) + self.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED) def check_luks_volume(self): logging.info('Checking if volume is luks-encrypted') @@ -198,9 +234,9 @@ def check_luks_volume(self): self.set_extracted_device_name() logging.debug("checking if {} is luks encrypted".format(self.device)) subprocess.check_call(["sudo", "cryptsetup", "isLuks", self.device]) - self.exit_gracefully("USB_ENCRYPTED") + self.exit_gracefully(ExportStatus.USB_ENCRYPTED.value) except subprocess.CalledProcessError: - msg = "USB_NO_SUPPORTED_ENCRYPTION" + msg = ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value self.exit_gracefully(msg) def unlock_luks_volume(self, encryption_key): @@ -228,10 +264,10 @@ def unlock_luks_volume(self, encryption_key): rc = p.returncode if rc != 0: logging.error('Bad phassphrase for {}'.format(self.encrypted_device)) - msg = "USB_BAD_PASSPHRASE" + msg = ExportStatus.USB_BAD_PASSPHRASE.value self.exit_gracefully(msg) except subprocess.CalledProcessError: - self.exit_gracefully("USB_NO_SUPPORTED_ENCRYPTION") + self.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED) def mount_volume(self): try: @@ -250,7 +286,7 @@ def mount_volume(self): subprocess.check_call( ["sudo", "cryptsetup", "luksClose", self.encrypted_device] ) - msg = "ERROR_USB_MOUNT" + msg = ExportStatus.ERROR_USB_MOUNT.value self.exit_gracefully(msg) def copy_submission(self): @@ -264,7 +300,7 @@ def copy_submission(self): logging.info('File copied successfully to {}'.format(self.target_dirname)) self.popup_message("Files exported successfully to disk.") except (subprocess.CalledProcessError, OSError): - msg = "ERROR_USB_WRITE" + msg = ExportStatus.ERROR_USB_WRITE.value self.exit_gracefully(msg) finally: # Finally, we sync the filesystem, unmount the drive and lock the @@ -297,11 +333,11 @@ def wait_for_print(self): else: time.sleep(5) except subprocess.CalledProcessError: - msg = "ERROR_PRINT" + msg = ExportStatus.ERROR_PRINT.value self.exit_gracefully(msg) except TimeoutException: logging.error('Timeout waiting for printer {}'.format(self.printer_name)) - msg = "ERROR_PRINT" + msg = ExportStatus.ERROR_PRINT.value self.exit_gracefully(msg) return True @@ -311,7 +347,7 @@ def get_printer_uri(self): try: output = subprocess.check_output(["sudo", "lpinfo", "-v"]) except subprocess.CalledProcessError: - msg = "ERROR_PRINTER_URI" + msg = ExportStatus.ERROR_PRINTER_URI.value self.exit_gracefully(msg) # fetch the usb printer uri @@ -324,14 +360,14 @@ def get_printer_uri(self): if printer_uri == "": # No usb printer is connected logging.info('No usb printers connected') - self.exit_gracefully("ERROR_PRINTER_NOT_FOUND") + self.exit_gracefully(ExportStatus.ERROR_PRINTER_NOT_FOUND.value) elif "Brother" in printer_uri: logging.info('Printer {} is supported'.format(printer_uri)) return printer_uri else: # printer url is a make that is unsupported logging.info('Printer {} is unsupported'.format(printer_uri)) - self.exit_gracefully("ERROR_PRINTER_NOT_SUPPORTED") + self.exit_gracefully(ExportStatus.ERROR_PRINTER_NOT_SUPPORTED.value) def install_printer_ppd(self, uri): # Some drivers don't come with ppd files pre-compiled, we must compile them @@ -347,12 +383,13 @@ def install_printer_ppd(self, uri): ] ) except subprocess.CalledProcessError: - msg = "ERROR_PRINTER_DRIVER_UNAVAILBLE" + msg = ExportStatus.ERROR_PRINTER_DRIVER_UNAVAILBLE.value self.exit_gracefully(msg) return self.brlaser_ppd # Here, we could support ppd drivers for other makes or models in the future def setup_printer(self, printer_uri, printer_ppd): + # Add the printer using lpadmin try: # Add the printer using lpadmin subprocess.check_call( @@ -374,7 +411,7 @@ def setup_printer(self, printer_uri, printer_ppd): ["sudo", "lpadmin", "-p", self.printer_name, "-u", "allow:user"] ) except subprocess.CalledProcessError: - msg = "ERROR_PRINTER_INSTALL" + msg = ExportStatus.ERROR_PRINTER_INSTALL.value self.exit_gracefully(msg) def print_test_page(self): @@ -424,7 +461,7 @@ def print_file(self, file_to_print): logging.info('Sending file to printer {}:{}'.format(self.printer_name)) subprocess.check_call(["xpp", "-P", self.printer_name, file_to_print]) except subprocess.CalledProcessError: - msg = "ERROR_PRINT" + msg = ExportStatus.ERROR_PRINT.value self.exit_gracefully(msg) diff --git a/securedrop_export/main.py b/securedrop_export/main.py index 00ca144b1..34b8a9f66 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -1,6 +1,7 @@ import logging from securedrop_export import export +from securedrop_export.export import ExportStatus logger = logging.getLogger(__name__) @@ -11,7 +12,7 @@ def __main__(submission): try: submission.archive_metadata = export.Metadata(submission.tmpdir) except Exception: - submission.exit_gracefully("ERROR_METADATA_PARSING") + submission.exit_gracefully(ExportStatus.ERROR_METADATA_PARSING.value) if submission.archive_metadata.is_valid(): if submission.archive_metadata.export_method == "usb-test": @@ -47,4 +48,4 @@ def __main__(submission): submission.setup_printer(printer_uri, printer_ppd) submission.print_test_page() else: - submission.exit_gracefully("ERROR_ARCHIVE_METADATA") + submission.exit_gracefully(ExportStatus.ERROR_ARCHIVE_METADATA.value) diff --git a/tests/test_export.py b/tests/test_export.py index 94b6dd63a..ffef44596 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -22,7 +22,9 @@ def test_bad_sd_export_config_invalid_json(capsys): - expected_message = "ERROR_CONFIG" + expected_message = "ERROR_USB_CONFIGURATION" + assert export.ExportStatus.ERROR_USB_CONFIGURATION.value == expected_message + with pytest.raises(SystemExit) as sysexit: export.SDExport("", BAD_TEST_CONFIG) # A graceful exit means a return code of 0 @@ -35,7 +37,9 @@ def test_bad_sd_export_config_invalid_json(capsys): def test_bad_sd_export_config_invalid_value(capsys): - expected_message = "ERROR_CONFIG" + expected_message = "ERROR_USB_CONFIGURATION" + assert export.ExportStatus.ERROR_USB_CONFIGURATION.value == expected_message + with pytest.raises(SystemExit) as sysexit: export.SDExport("", ANOTHER_BAD_TEST_CONFIG) # A graceful exit means a return code of 0 @@ -170,6 +174,7 @@ def test_get_good_printer_uri(mocked_call): def test_get_bad_printer_uri(mocked_call, capsys): submission = export.SDExport("testfile", TEST_CONFIG) expected_message = "ERROR_PRINTER_NOT_FOUND" + assert export.ExportStatus.ERROR_PRINTER_NOT_FOUND.value == expected_message mocked_exit = mock.patch("export.exit_gracefully", return_value=0) with pytest.raises(SystemExit) as sysexit: @@ -208,6 +213,7 @@ def test_is_not_open_office_file(capsys, open_office_paths): def test_usb_precheck_disconnected(capsys): submission = export.SDExport("testfile", TEST_CONFIG) expected_message = "USB_NOT_CONNECTED" + assert export.ExportStatus.USB_NOT_CONNECTED.value == expected_message mocked_exit = mock.patch("export.exit_gracefully", return_value=0) mock.patch("subprocess.check_output", return_value=CalledProcessError(1, 'check_output')) @@ -225,6 +231,7 @@ def test_usb_precheck_disconnected(capsys): def test_usb_precheck_connected(mocked_call, capsys): submission = export.SDExport("testfile", TEST_CONFIG) expected_message = "USB_CONNECTED" + assert export.ExportStatus.USB_CONNECTED.value == expected_message mocked_exit = mock.patch("export.exit_gracefully", return_value=0) with pytest.raises(SystemExit) as sysexit: submission.check_usb_connected() @@ -238,7 +245,7 @@ def test_usb_precheck_connected(mocked_call, capsys): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PART) def test_luks_precheck_encrypted(mocked_call, capsys): submission = export.SDExport("testfile", TEST_CONFIG) - expected_message = "USB_ENCRYPTED" + expected_message = export.ExportStatus.USB_ENCRYPTED.value mocked_exit = mock.patch("export.exit_gracefully", return_value=0) with pytest.raises(SystemExit) as sysexit: @@ -252,7 +259,7 @@ def test_luks_precheck_encrypted(mocked_call, capsys): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) def test_luks_precheck_encrypted(mocked_call, capsys): submission = export.SDExport("testfile", TEST_CONFIG) - expected_message = "USB_ENCRYPTED" + expected_message = export.ExportStatus.USB_ENCRYPTED.value mocked_exit = mock.patch("export.exit_gracefully", return_value=0) with pytest.raises(SystemExit) as sysexit: @@ -266,7 +273,7 @@ def test_luks_precheck_encrypted(mocked_call, capsys): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_MULTI_PART) def test_luks_precheck_encrypted(mocked_call, capsys): submission = export.SDExport("testfile", TEST_CONFIG) - expected_message = "USB_NO_SUPPORTED_ENCRYPTION" + expected_message = export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value mocked_exit = mock.patch("export.exit_gracefully", return_value=0) with pytest.raises(SystemExit) as sysexit: @@ -276,10 +283,12 @@ def test_luks_precheck_encrypted(mocked_call, capsys): captured = capsys.readouterr() assert captured.err == "{}\n".format(expected_message) + @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) def test_luks_precheck_encrypted(mocked_call, capsys): submission = export.SDExport("testfile", TEST_CONFIG) - expected_message = "USB_NO_SUPPORTED_ENCRYPTION" + expected_message = "USB_ENCRYPTION_NOT_SUPPORTED" + assert expected_message == export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value mocked_exit = mock.patch("export.exit_gracefully", return_value=0) mock.patch("subprocess.check_call", return_value=CalledProcessError(1, 'check_call')) From b21b0ef6e3b1807fee77bd5231a833260a7af86f Mon Sep 17 00:00:00 2001 From: mickael e Date: Wed, 6 Nov 2019 15:02:20 -0500 Subject: [PATCH 105/352] wrap subprocess.check_call into safe_check_call This ensures we properly catch CalledProcessErrors and that we propagate the proper message to stdout --- securedrop_export/export.py | 192 +++++++++++++++++++----------------- tests/test_export.py | 13 +++ 2 files changed, 113 insertions(+), 92 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 4aea94572..c5475e96f 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -150,6 +150,16 @@ def __init__(self, archive, config_path): logger.error("error parsing VM configuration.") self.exit_gracefully(ExportStatus.ERROR_USB_CONFIGURATION.value) + def safe_check_call(self, command, error_message): + """ + Safely wrap subprocess.check_output to ensure we always return 0 and + log the error messages + """ + try: + subprocess.check_call(command) + except subprocess.CalledProcessError as ex: + self.exit_gracefully(msg=error_message, e=ex.output) + def exit_gracefully(self, msg, e=False): """ Utility to print error messages, mostly used during debugging, @@ -169,27 +179,24 @@ def exit_gracefully(self, msg, e=False): logger.error(e_output) except Exception: e_output = "" - sys.stderr.write(e_output) + sys.stderr.write(str(e_output)) sys.stderr.write("\n") # exit with 0 return code otherwise the os will attempt to open # the file with another application sys.exit(0) def popup_message(self, msg): - try: - subprocess.check_call( - [ - "notify-send", - "--expire-time", - "3000", - "--icon", - "/usr/share/securedrop/icons/sd-logo.png", - "SecureDrop: {}".format(msg), - ] - ) - except subprocess.CalledProcessError as e: - msg = "Error sending notification:" - self.exit_gracefully(msg, e=e) + self.safe_check_call( + command=[ + "notify-send", + "--expire-time", + "3000", + "--icon", + "/usr/share/securedrop/icons/sd-logo.png", + "SecureDrop: {}".format(msg), + ], + error_message="Error sending notification:" + ) def extract_tarball(self): try: @@ -229,15 +236,16 @@ def set_extracted_device_name(self): self.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED) def check_luks_volume(self): + # cryptsetup isLuks returns 0 if the device is a luks volume + # subprocess with throw if the device is not luks (rc !=0) logging.info('Checking if volume is luks-encrypted') - try: - self.set_extracted_device_name() - logging.debug("checking if {} is luks encrypted".format(self.device)) - subprocess.check_call(["sudo", "cryptsetup", "isLuks", self.device]) - self.exit_gracefully(ExportStatus.USB_ENCRYPTED.value) - except subprocess.CalledProcessError: - msg = ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value - self.exit_gracefully(msg) + self.set_extracted_device_name() + logging.debug("checking if {} is luks encrypted".format(self.device)) + self.safe_check_call( + command=["sudo", "cryptsetup", "isLuks", self.device], + error_message=ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value + ) + self.exit_gracefully(ExportStatus.USB_ENCRYPTED.value) def unlock_luks_volume(self, encryption_key): try: @@ -270,27 +278,28 @@ def unlock_luks_volume(self, encryption_key): self.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED) def mount_volume(self): - try: - # mount target not created - if not os.path.exists(self.mountpoint): - subprocess.check_call(["sudo", "mkdir", self.mountpoint]) - - mapped_device_path = os.path.join("/dev/mapper/", self.encrypted_device) - logging.info('Mounting {}'.format(mapped_device_path)) - subprocess.check_call(["sudo", "mount", mapped_device_path, self.mountpoint]) - subprocess.check_call(["sudo", "chown", "-R", "user:user", self.mountpoint]) - except subprocess.CalledProcessError: - # clean up - logging.error('Error mounting {} to {}'.format(self.encrypted_device, self.mountpoint)) - logging.info('Locking luks volume {}'.format(self.encrypted_device)) - subprocess.check_call( - ["sudo", "cryptsetup", "luksClose", self.encrypted_device] + # mount target not created, create folder + if not os.path.exists(self.mountpoint): + self.safe_check_call( + command=["sudo", "mkdir", self.mountpoint], + error_message=ExportStatus.ERROR_USB_MOUNT ) - msg = ExportStatus.ERROR_USB_MOUNT.value - self.exit_gracefully(msg) + + mapped_device_path = os.path.join("/dev/mapper/", self.encrypted_device) + logging.info('Mounting {}'.format(mapped_device_path)) + self.safe_check_call( + command=["sudo", "mount", mapped_device_path, self.mountpoint], + error_message=ExportStatus.ERROR_USB_MOUNT.value + ) + self.safe_check_call( + command=["sudo", "chown", "-R", "user:user", self.mountpoint], + error_message=ExportStatus.ERROR_USB_MOUNT.value + ) def copy_submission(self): # move files to drive (overwrites files with same filename) and unmount drive + # we don't use safe_check_call here because we must lock and + # unmount the drive as part of the finally block try: target_path = os.path.join(self.mountpoint, self.target_dirname) subprocess.check_call(["mkdir", target_path]) @@ -372,47 +381,44 @@ def get_printer_uri(self): def install_printer_ppd(self, uri): # Some drivers don't come with ppd files pre-compiled, we must compile them if "Brother" in uri: - try: - subprocess.check_call( - [ - "sudo", - "ppdc", - self.brlaser_driver, - "-d", - "/usr/share/cups/model/", - ] - ) - except subprocess.CalledProcessError: - msg = ExportStatus.ERROR_PRINTER_DRIVER_UNAVAILBLE.value - self.exit_gracefully(msg) + self.safe_check_call( + command=[ + "sudo", + "ppdc", + self.brlaser_driver, + "-d", + "/usr/share/cups/model/", + ], + error_message=ExportStatus.ERROR_PRINTER_DRIVER_UNAVAILABLE.value + ) return self.brlaser_ppd # Here, we could support ppd drivers for other makes or models in the future def setup_printer(self, printer_uri, printer_ppd): # Add the printer using lpadmin - try: - # Add the printer using lpadmin - subprocess.check_call( - [ - "sudo", - "lpadmin", - "-p", - self.printer_name, - "-v", - printer_uri, - "-P", - printer_ppd, - ] - ) - # Activate the printer so that it can receive jobs - subprocess.check_call(["sudo", "lpadmin", "-p", self.printer_name, "-E"]) - # Allow user to print (without using sudo) - subprocess.check_call( - ["sudo", "lpadmin", "-p", self.printer_name, "-u", "allow:user"] - ) - except subprocess.CalledProcessError: - msg = ExportStatus.ERROR_PRINTER_INSTALL.value - self.exit_gracefully(msg) + self.safe_check_call( + command=[ + "sudo", + "lpadmin", + "-p", + self.printer_name, + "-v", + printer_uri, + "-P", + printer_ppd, + ], + error_message=ExportStatus.ERROR_PRINTER_INSTALL.value + ) + # Activate the printer so that it can receive jobs + self.safe_check_call( + command=["sudo", "lpadmin", "-p", self.printer_name, "-E"], + error_message=ExportStatus.ERROR_PRINTER_INSTALL.value + ) + # Allow user to print (without using sudo) + self.safe_check_call( + command=["sudo", "lpadmin", "-p", self.printer_name, "-u", "allow:user"], + error_message=ExportStatus.ERROR_PRINTER_INSTALL.value + ) def print_test_page(self): self.print_file("/usr/share/cups/data/testprint") @@ -447,22 +453,24 @@ def is_open_office_file(self, filename): return False def print_file(self, file_to_print): - try: - # If the file to print is an (open)office document, we need to call unoconf to - # convert the file to pdf as printer drivers do not support this format - if self.is_open_office_file(file_to_print): - logging.info('Converting Office document to pdf'.format(self.printer_name)) - folder = os.path.dirname(file_to_print) - converted_filename = file_to_print + ".pdf" - converted_path = os.path.join(folder, converted_filename) - subprocess.check_call(["unoconv", "-o", converted_path, file_to_print]) - file_to_print = converted_path - - logging.info('Sending file to printer {}:{}'.format(self.printer_name)) - subprocess.check_call(["xpp", "-P", self.printer_name, file_to_print]) - except subprocess.CalledProcessError: - msg = ExportStatus.ERROR_PRINT.value - self.exit_gracefully(msg) + # If the file to print is an (open)office document, we need to call unoconf to + # convert the file to pdf as printer drivers do not support this format + if self.is_open_office_file(file_to_print): + logging.info('Converting Office document to pdf'.format(self.printer_name)) + folder = os.path.dirname(file_to_print) + converted_filename = file_to_print + ".pdf" + converted_path = os.path.join(folder, converted_filename) + self.safe_check_call( + command=["unoconv", "-o", converted_path, file_to_print], + error_message=ExportStatus.ERROR_PRINT.value + ) + file_to_print = converted_path + + logging.info('Sending file to printer {}:{}'.format(self.printer_name)) + self.safe_check_call( + command=["xpp", "-P", self.printer_name, file_to_print], + error_message=ExportStatus.ERROR_PRINT.value + ) # class ends here diff --git a/tests/test_export.py b/tests/test_export.py index ffef44596..440fae616 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -299,3 +299,16 @@ def test_luks_precheck_encrypted(mocked_call, capsys): assert sysexit.value.code == 0 captured = capsys.readouterr() assert captured.err == "{}\n".format(expected_message) + + +def test_safe_check_call(capsys): + submission = export.SDExport("testfile", TEST_CONFIG) + submission.safe_check_call(['ls'], "this will work") + mocked_exit = mock.patch("export.exit_gracefully", return_value=0) + expected_message = "uh oh!!!!" + with pytest.raises(SystemExit) as sysexit: + submission.safe_check_call(['ls', 'kjdsfhkdjfh'], expected_message) + mocked_exit.assert_called_once_with(expected_message) + assert sysexit.value.code == 0 + captured = capsys.readouterr() + assert captured.err == "{}\n".format(expected_message) From 1e0ed17245e1d27bca5f7e5813471d6b95401130 Mon Sep 17 00:00:00 2001 From: mickael e Date: Wed, 6 Nov 2019 16:51:53 -0500 Subject: [PATCH 106/352] Exit_gracefully should correctly log exceptions It should also not log stuff to stderr --- securedrop_export/export.py | 17 ++++++++--------- tests/test_export.py | 2 +- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index c5475e96f..09b97627d 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -167,20 +167,19 @@ def exit_gracefully(self, msg, e=False): since non-zero exit values will cause system to try alternative solutions for mimetype handling, which we want to avoid. """ - sys.stderr.write(msg) - sys.stderr.write("\n") logger.info('Exiting with message: {}'.format(msg)) - if e: + if not e: + sys.stderr.write(msg) + sys.stderr.write("\n") + else: try: # If the file archive was extracted, delete before returning if os.path.isdir(self.tmpdir): shutil.rmtree(self.tmpdir) - e_output = e.output - logger.error(e_output) - except Exception: - e_output = "" - sys.stderr.write(str(e_output)) - sys.stderr.write("\n") + logger.error("{}:{}".format(msg, e.output)) + except Exception as ex: + logger.error("Unhandled exception: {}".format(ex)) + sys.stderr.write(ExportStatus.ERROR_GENERIC.value) # exit with 0 return code otherwise the os will attempt to open # the file with another application sys.exit(0) diff --git a/tests/test_export.py b/tests/test_export.py index 440fae616..66c420339 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -84,7 +84,7 @@ def test_exit_gracefully_exception(capsys): assert sysexit.value.code == 0 captured = capsys.readouterr() - assert captured.err == "{}\n\n".format(test_msg) + assert captured.err == export.ExportStatus.ERROR_GENERIC.value assert captured.out == "" From c19c0d7212c048c2a3dce0e1bbd0319ae72d18e9 Mon Sep 17 00:00:00 2001 From: mickael e Date: Wed, 6 Nov 2019 16:55:38 -0500 Subject: [PATCH 107/352] Cleanup: directly pass enum values instead of assigning variable --- securedrop_export/export.py | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 09b97627d..f1454bb23 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -203,8 +203,7 @@ def extract_tarball(self): with tarfile.open(self.archive) as tar: tar.extractall(self.tmpdir) except Exception: - msg = ExportStatus.ERROR_EXTRACTION.value - self.exit_gracefully(msg) + self.exit_gracefully(ExportStatus.ERROR_EXTRACTION.value) def check_usb_connected(self): # If the USB is not attached via qvm-usb attach, lsusb will return empty string and a @@ -271,8 +270,7 @@ def unlock_luks_volume(self, encryption_key): rc = p.returncode if rc != 0: logging.error('Bad phassphrase for {}'.format(self.encrypted_device)) - msg = ExportStatus.USB_BAD_PASSPHRASE.value - self.exit_gracefully(msg) + self.exit_gracefully(ExportStatus.USB_BAD_PASSPHRASE.value) except subprocess.CalledProcessError: self.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED) @@ -308,8 +306,7 @@ def copy_submission(self): logging.info('File copied successfully to {}'.format(self.target_dirname)) self.popup_message("Files exported successfully to disk.") except (subprocess.CalledProcessError, OSError): - msg = ExportStatus.ERROR_USB_WRITE.value - self.exit_gracefully(msg) + self.exit_gracefully(ExportStatus.ERROR_USB_WRITE.value) finally: # Finally, we sync the filesystem, unmount the drive and lock the # luks volume, and exit 0 @@ -341,12 +338,10 @@ def wait_for_print(self): else: time.sleep(5) except subprocess.CalledProcessError: - msg = ExportStatus.ERROR_PRINT.value - self.exit_gracefully(msg) + self.exit_gracefully(ExportStatus.ERROR_PRINT.value) except TimeoutException: logging.error('Timeout waiting for printer {}'.format(self.printer_name)) - msg = ExportStatus.ERROR_PRINT.value - self.exit_gracefully(msg) + self.exit_gracefully(ExportStatus.ERROR_PRINT.value) return True def get_printer_uri(self): @@ -355,8 +350,7 @@ def get_printer_uri(self): try: output = subprocess.check_output(["sudo", "lpinfo", "-v"]) except subprocess.CalledProcessError: - msg = ExportStatus.ERROR_PRINTER_URI.value - self.exit_gracefully(msg) + self.exit_gracefully(ExportStatus.ERROR_PRINTER_URI.value) # fetch the usb printer uri for line in output.split(): From e378abf0a50c99a26b65838c4ab32159700b1b2d Mon Sep 17 00:00:00 2001 From: mickael e Date: Fri, 8 Nov 2019 09:35:49 -0500 Subject: [PATCH 108/352] Fix linting/tests Linting was failing but was not caught, but there were multiple tests with the same name (lint was failing, but app tests were passing). This was not caught in the review (only test was run) --- securedrop_export/export.py | 4 ++-- tests/test_export.py | 42 ++++++++++++++++++++++++++++++------- 2 files changed, 36 insertions(+), 10 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index f1454bb23..c8ec5dc73 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -226,12 +226,12 @@ def set_extracted_device_name(self): partition_count = device_and_partitions.decode('utf-8').split('\n').count('part') if partition_count > 1: logging.debug("multiple partitions not supported") - self.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED) + self.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value) # set device to /dev/sda if disk is encrypted, /dev/sda1 if partition encrypted self.device = DEVICE if partition_count == 0 else DEVICE + '1' except subprocess.CalledProcessError: - self.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED) + self.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value) def check_luks_volume(self): # cryptsetup isLuks returns 0 if the device is a luks volume diff --git a/tests/test_export.py b/tests/test_export.py index 66c420339..87f031d88 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -243,21 +243,49 @@ def test_usb_precheck_connected(mocked_call, capsys): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PART) -def test_luks_precheck_encrypted(mocked_call, capsys): +def test_extract_device_name_no_part(mocked_call, capsys): + submission = export.SDExport("testfile", TEST_CONFIG) + submission.set_extracted_device_name() + assert submission.device == "/dev/sda" + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) +def test_extract_device_name_single_part(mocked_call, capsys): + submission = export.SDExport("testfile", TEST_CONFIG) + submission.set_extracted_device_name() + assert submission.device == "/dev/sda1" + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_MULTI_PART) +def test_extract_device_name_multiple_part(mocked_call, capsys): submission = export.SDExport("testfile", TEST_CONFIG) - expected_message = export.ExportStatus.USB_ENCRYPTED.value mocked_exit = mock.patch("export.exit_gracefully", return_value=0) + expected_message = export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value with pytest.raises(SystemExit) as sysexit: - submission.check_luks_volume() + submission.set_extracted_device_name() mocked_exit.assert_called_once_with(expected_message) assert sysexit.value.code == 0 captured = capsys.readouterr() assert captured.err == "{}\n".format(expected_message) +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PART) +@mock.patch("subprocess.check_call", return_value=0) +def test_luks_precheck_encrypted_fde(mocked_call, capsys): + submission = export.SDExport("testfile", TEST_CONFIG) + expected_message = export.ExportStatus.USB_ENCRYPTED.value + mocked_exit = mock.patch("export.exit_gracefully", return_value=0) + + with pytest.raises(SystemExit) as sysexit: + submission.check_luks_volume() + mocked_exit.assert_called_once_with(expected_message) + assert sysexit.value.code == 0 + + @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) -def test_luks_precheck_encrypted(mocked_call, capsys): +@mock.patch("subprocess.check_call", return_value=0) +def test_luks_precheck_encrypted_single_part(mocked_call, capsys): submission = export.SDExport("testfile", TEST_CONFIG) expected_message = export.ExportStatus.USB_ENCRYPTED.value mocked_exit = mock.patch("export.exit_gracefully", return_value=0) @@ -266,12 +294,10 @@ def test_luks_precheck_encrypted(mocked_call, capsys): submission.check_luks_volume() mocked_exit.assert_called_once_with(expected_message) assert sysexit.value.code == 0 - captured = capsys.readouterr() - assert captured.err == "{}\n".format(expected_message) @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_MULTI_PART) -def test_luks_precheck_encrypted(mocked_call, capsys): +def test_luks_precheck_encrypted_multi_part(mocked_call, capsys): submission = export.SDExport("testfile", TEST_CONFIG) expected_message = export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value mocked_exit = mock.patch("export.exit_gracefully", return_value=0) @@ -285,7 +311,7 @@ def test_luks_precheck_encrypted(mocked_call, capsys): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) -def test_luks_precheck_encrypted(mocked_call, capsys): +def test_luks_precheck_encrypted_luks_error(mocked_call, capsys): submission = export.SDExport("testfile", TEST_CONFIG) expected_message = "USB_ENCRYPTION_NOT_SUPPORTED" assert expected_message == export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value From a770fad1bbbb325616b06499dacfa7e130573201 Mon Sep 17 00:00:00 2001 From: mickael e Date: Fri, 8 Nov 2019 12:02:31 -0500 Subject: [PATCH 109/352] Cleanup: Remove config in sd-export-usb This is no longer needed as we are no longer using the pci bus id for preflight checks. --- securedrop_export/export.py | 13 ------------- tests/test_export.py | 35 ----------------------------------- 2 files changed, 48 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index c8ec5dc73..e9706ac84 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -137,19 +137,6 @@ def __init__(self, archive, config_path): ) self.tmpdir = tempfile.mkdtemp() - try: - with open(config_path) as f: - logging.info('Retrieving VM configuration') - json_config = json.loads(f.read()) - self.pci_bus_id = json_config.get("pci_bus_id", None) - logging.info('pci_bus_id is {}'.format(self.pci_bus_id)) - if self.pci_bus_id is None: - logging.error('pci_bus_id is not set in VM configuration') - raise - except Exception: - logger.error("error parsing VM configuration.") - self.exit_gracefully(ExportStatus.ERROR_USB_CONFIGURATION.value) - def safe_check_call(self, command, error_message): """ Safely wrap subprocess.check_output to ensure we always return 0 and diff --git a/tests/test_export.py b/tests/test_export.py index 87f031d88..02beddecc 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -20,41 +20,6 @@ ANOTHER_BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad-2.json") -def test_bad_sd_export_config_invalid_json(capsys): - - expected_message = "ERROR_USB_CONFIGURATION" - assert export.ExportStatus.ERROR_USB_CONFIGURATION.value == expected_message - - with pytest.raises(SystemExit) as sysexit: - export.SDExport("", BAD_TEST_CONFIG) - # A graceful exit means a return code of 0 - assert sysexit.value.code == 0 - - captured = capsys.readouterr() - assert captured.err == "{}\n".format(expected_message) - assert captured.out == "" - - -def test_bad_sd_export_config_invalid_value(capsys): - - expected_message = "ERROR_USB_CONFIGURATION" - assert export.ExportStatus.ERROR_USB_CONFIGURATION.value == expected_message - - with pytest.raises(SystemExit) as sysexit: - export.SDExport("", ANOTHER_BAD_TEST_CONFIG) - # A graceful exit means a return code of 0 - assert sysexit.value.code == 0 - - captured = capsys.readouterr() - assert captured.err == "{}\n".format(expected_message) - assert captured.out == "" - - -def test_good_sd_export_config(capsys): - submission = export.SDExport("", TEST_CONFIG) - assert submission.pci_bus_id == "2" - - def test_exit_gracefully_no_exception(capsys): submission = export.SDExport("testfile", TEST_CONFIG) From 51d6eb84d628c37c6baf1d916359526cb05f8e1f Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Tue, 19 Nov 2019 10:38:25 -0500 Subject: [PATCH 110/352] ci: run tests also under python 3.7 --- .circleci/config.yml | 46 ++++++++++++++++++++++++++++++-------------- 1 file changed, 32 insertions(+), 14 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index d53dabaf2..4d6ecd2f1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,3 +1,20 @@ +--- +common-steps: + - &install_packages + run: + name: Install packages + command: | + sudo apt install libnotify-bin + + - &run_tests + run: + name: Install test requirements and run tests + command: | + virtualenv .venv + source .venv/bin/activate + pip install --require-hashes -r test-requirements.txt + make test + version: 2 jobs: lint: @@ -19,26 +36,27 @@ jobs: source .venv/bin/activate make safety - test: + test-stretch: docker: - - image: circleci/python:3.5 + - image: circleci/python:3.5-stretch steps: - checkout - - run: - name: Install packages - command: | - sudo apt install libnotify-bin - - run: - name: Install test requirements and run tests - command: | - virtualenv .venv - source .venv/bin/activate - pip install --require-hashes -r test-requirements.txt - make test + - *install_packages + - *run_tests + + test-buster: + docker: + - image: circleci/python:3.7-buster + steps: + - checkout + - *install_packages + - *run_tests + workflows: version: 2 securedrop_export_ci: jobs: - lint - - test + - test-stretch + - test-buster From 057014a0b4510311a6543a3d5756ab224fbb0891 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Thu, 21 Nov 2019 10:12:14 -0800 Subject: [PATCH 111/352] securedrop-proxy 0.1.5 --- changelog.md | 4 ++++ securedrop_proxy/VERSION | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/changelog.md b/changelog.md index a97c31911..176c9a111 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,9 @@ # Changelog +## 0.1.5 + + * Update build-requirements.txt to include wheels for Buster + ## 0.1.4 * Update urllib3 to version 1.24.3 or later due to CVE-2019-11324 (#35) diff --git a/securedrop_proxy/VERSION b/securedrop_proxy/VERSION index 845639eef..9faa1b7a7 100644 --- a/securedrop_proxy/VERSION +++ b/securedrop_proxy/VERSION @@ -1 +1 @@ -0.1.4 +0.1.5 From 9b9688eaf022bb33b4e54c3fa3c0288767028f8d Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Fri, 22 Nov 2019 16:19:44 -0500 Subject: [PATCH 112/352] dev: getting started docs, add single target for test/lint --- Makefile | 3 +++ README.md | 16 ++++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/Makefile b/Makefile index 079b9e379..687ec36b1 100644 --- a/Makefile +++ b/Makefile @@ -12,6 +12,9 @@ safety: ## Runs `safety check` to check python dependencies for vulnerabilities update-pip-requirements: ## Updates all Python requirements files via pip-compile. pip-compile --generate-hashes --output-file test-requirements.txt test-requirements.in +.PHONY: check +check: lint test + .PHONY: test test: pytest -v tests/ diff --git a/README.md b/README.md index bbfebdb14..4d097a2c5 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,22 @@ Code for exporting and printing files from the SecureDrop Qubes Workstation. +## Getting Started + +Python 3 support is required. To get started: + +``` +virtualenv --python=python3.7 .venv +source .venv/bin/activate +pip install -r test-requirements.txt +``` + +To run the linter and tests: + +``` +make check +``` + ## Supported Printers TBD From 44e046ffdafbfe392fbae82dfac5d557896f1cae Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Fri, 22 Nov 2019 16:35:50 -0500 Subject: [PATCH 113/352] dev: enable running single tests via makefile target --- Makefile | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 687ec36b1..3ecc0094a 100644 --- a/Makefile +++ b/Makefile @@ -14,10 +14,11 @@ update-pip-requirements: ## Updates all Python requirements files via pip-compil .PHONY: check check: lint test - + +TESTS ?= tests .PHONY: test test: - pytest -v tests/ + pytest -v $$TESTS .PHONY: lint lint: From c40c1910137e5cb564f121aaa1c0f1bd15348f6f Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Fri, 22 Nov 2019 18:32:11 -0500 Subject: [PATCH 114/352] test: add pytest-mock and pip-tools to test dependencies --- test-requirements.in | 2 ++ test-requirements.txt | 16 +++++++++++----- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/test-requirements.in b/test-requirements.in index 28ecacab6..00ca62224 100644 --- a/test-requirements.in +++ b/test-requirements.in @@ -1,2 +1,4 @@ flake8 +pip-tools pytest +pytest-mock \ No newline at end of file diff --git a/test-requirements.txt b/test-requirements.txt index 8eb119b9c..4cd163f1f 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -12,6 +12,10 @@ attrs==19.1.0 \ --hash=sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79 \ --hash=sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399 \ # via pytest +click==7.0 \ + --hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \ + --hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 \ + # via pip-tools entrypoints==0.3 \ --hash=sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19 \ --hash=sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451 \ @@ -36,10 +40,9 @@ packaging==19.0 \ --hash=sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af \ --hash=sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3 \ # via pytest -pathlib2==2.3.4 \ - --hash=sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e \ - --hash=sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8 \ - # via pytest +pip-tools==4.2.0 \ + --hash=sha256:123174aabf7f4a63dd6e0bfc8aeeb5eaddbecb75a41e9f0dd4c447b1f2de14f7 \ + --hash=sha256:5427ea4dcc175649723985fbcace9b2d8f46f9adbcc63bc2d7b247d9bcc74917 pluggy==0.12.0 \ --hash=sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc \ --hash=sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c \ @@ -60,13 +63,16 @@ pyparsing==2.4.1.1 \ --hash=sha256:43c5486cefefa536c9aab528881c992328f020eefe4f6d06332449c365218580 \ --hash=sha256:d6c5ffe9d0305b9b977f7a642d36b9370954d1da7ada4c62393382cbadad4265 \ # via packaging +pytest-mock==1.12.1 \ + --hash=sha256:96a0cebc66e09930be2a15b03333d90b59584d3fb011924f81c14b50ee0afbba \ + --hash=sha256:e5381be2608e49547f5e47633c5f81241ebf6206d17ce516a7a18d5a917e3859 pytest==4.6.4 \ --hash=sha256:6aa9bc2f6f6504d7949e9df2a756739ca06e58ffda19b5e53c725f7b03fb4aae \ --hash=sha256:b77ae6f2d1a760760902a7676887b665c086f71e3461c64ed2a312afcedc00d6 six==1.12.0 \ --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \ --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \ - # via more-itertools, packaging, pathlib2, pytest + # via more-itertools, packaging, pip-tools, pytest wcwidth==0.1.7 \ --hash=sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e \ --hash=sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c \ From 7e604d8b0c0809fa19e9c4046fd50e99ce526b3f Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Fri, 22 Nov 2019 18:39:05 -0500 Subject: [PATCH 115/352] test: apply mocker.patch inline --- tests/test_export.py | 43 +++++++++++++++++++++++-------------------- 1 file changed, 23 insertions(+), 20 deletions(-) diff --git a/tests/test_export.py b/tests/test_export.py index 02beddecc..29344bbda 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -136,11 +136,11 @@ def test_get_good_printer_uri(mocked_call): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PRINTER) -def test_get_bad_printer_uri(mocked_call, capsys): +def test_get_bad_printer_uri(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) expected_message = "ERROR_PRINTER_NOT_FOUND" assert export.ExportStatus.ERROR_PRINTER_NOT_FOUND.value == expected_message - mocked_exit = mock.patch("export.exit_gracefully", return_value=0) + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) with pytest.raises(SystemExit) as sysexit: result = submission.get_printer_uri() @@ -175,13 +175,14 @@ def test_is_not_open_office_file(capsys, open_office_paths): assert not submission.is_open_office_file(open_office_paths) -def test_usb_precheck_disconnected(capsys): +def test_usb_precheck_disconnected(capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) expected_message = "USB_NOT_CONNECTED" assert export.ExportStatus.USB_NOT_CONNECTED.value == expected_message - mocked_exit = mock.patch("export.exit_gracefully", return_value=0) + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - mock.patch("subprocess.check_output", return_value=CalledProcessError(1, 'check_output')) + mocker.patch("subprocess.check_output", + return_value=CalledProcessError(1, 'check_output')) with pytest.raises(SystemExit) as sysexit: submission.check_usb_connected() @@ -193,11 +194,13 @@ def test_usb_precheck_disconnected(capsys): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_USB) -def test_usb_precheck_connected(mocked_call, capsys): +def test_usb_precheck_connected(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) expected_message = "USB_CONNECTED" assert export.ExportStatus.USB_CONNECTED.value == expected_message - mocked_exit = mock.patch("export.exit_gracefully", return_value=0) + + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) + with pytest.raises(SystemExit) as sysexit: submission.check_usb_connected() mocked_exit.assert_called_once_with(expected_message) @@ -222,9 +225,9 @@ def test_extract_device_name_single_part(mocked_call, capsys): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_MULTI_PART) -def test_extract_device_name_multiple_part(mocked_call, capsys): +def test_extract_device_name_multiple_part(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) - mocked_exit = mock.patch("export.exit_gracefully", return_value=0) + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) expected_message = export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value with pytest.raises(SystemExit) as sysexit: @@ -237,10 +240,10 @@ def test_extract_device_name_multiple_part(mocked_call, capsys): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PART) @mock.patch("subprocess.check_call", return_value=0) -def test_luks_precheck_encrypted_fde(mocked_call, capsys): +def test_luks_precheck_encrypted_fde(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) expected_message = export.ExportStatus.USB_ENCRYPTED.value - mocked_exit = mock.patch("export.exit_gracefully", return_value=0) + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) with pytest.raises(SystemExit) as sysexit: submission.check_luks_volume() @@ -250,10 +253,10 @@ def test_luks_precheck_encrypted_fde(mocked_call, capsys): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) @mock.patch("subprocess.check_call", return_value=0) -def test_luks_precheck_encrypted_single_part(mocked_call, capsys): +def test_luks_precheck_encrypted_single_part(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) expected_message = export.ExportStatus.USB_ENCRYPTED.value - mocked_exit = mock.patch("export.exit_gracefully", return_value=0) + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) with pytest.raises(SystemExit) as sysexit: submission.check_luks_volume() @@ -262,10 +265,10 @@ def test_luks_precheck_encrypted_single_part(mocked_call, capsys): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_MULTI_PART) -def test_luks_precheck_encrypted_multi_part(mocked_call, capsys): +def test_luks_precheck_encrypted_multi_part(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) expected_message = export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value - mocked_exit = mock.patch("export.exit_gracefully", return_value=0) + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) with pytest.raises(SystemExit) as sysexit: submission.check_luks_volume() @@ -276,13 +279,13 @@ def test_luks_precheck_encrypted_multi_part(mocked_call, capsys): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) -def test_luks_precheck_encrypted_luks_error(mocked_call, capsys): +def test_luks_precheck_encrypted_luks_error(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) expected_message = "USB_ENCRYPTION_NOT_SUPPORTED" assert expected_message == export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value - mocked_exit = mock.patch("export.exit_gracefully", return_value=0) + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - mock.patch("subprocess.check_call", return_value=CalledProcessError(1, 'check_call')) + mocker.patch("subprocess.check_call", return_value=CalledProcessError(1, 'check_call')) with pytest.raises(SystemExit) as sysexit: submission.check_luks_volume() @@ -292,10 +295,10 @@ def test_luks_precheck_encrypted_luks_error(mocked_call, capsys): assert captured.err == "{}\n".format(expected_message) -def test_safe_check_call(capsys): +def test_safe_check_call(capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) submission.safe_check_call(['ls'], "this will work") - mocked_exit = mock.patch("export.exit_gracefully", return_value=0) + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) expected_message = "uh oh!!!!" with pytest.raises(SystemExit) as sysexit: submission.safe_check_call(['ls', 'kjdsfhkdjfh'], expected_message) From 2fa2ccab64cae4266735800d936a2048694e1bc1 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Fri, 22 Nov 2019 19:02:52 -0500 Subject: [PATCH 116/352] dev: display help for all makefile targets --- Makefile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Makefile b/Makefile index 3ecc0094a..a65ea37a9 100644 --- a/Makefile +++ b/Makefile @@ -13,15 +13,15 @@ update-pip-requirements: ## Updates all Python requirements files via pip-compil pip-compile --generate-hashes --output-file test-requirements.txt test-requirements.in .PHONY: check -check: lint test +check: lint test ## Run linter and tests TESTS ?= tests .PHONY: test -test: +test: ## Run tests pytest -v $$TESTS .PHONY: lint -lint: +lint: ## Run linter flake8 securedrop_export/ tests/ # Explaination of the below shell command should it ever break. @@ -33,7 +33,7 @@ lint: # 6. Format columns with colon as delimiter. .PHONY: help help: ## Print this message and exit. - @printf "Makefile for developing and testing the SecureDrop proxy.\n" + @printf "Makefile for developing and testing the SecureDrop export code.\n" @printf "Subcommands:\n\n" @awk 'BEGIN {FS = ":.*?## "} /^[0-9a-zA-Z_-]+:.*?## / {printf "\033[36m%s\033[0m : %s\n", $$1, $$2}' $(MAKEFILE_LIST) \ | sort \ From c6146c2122ba373a31640300f78a107b2c870335 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Mon, 25 Nov 2019 09:21:57 -0500 Subject: [PATCH 117/352] app: use ExportStatus enum instead of hardcoded string --- securedrop_export/export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index e9706ac84..fd767133e 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -200,7 +200,7 @@ def check_usb_connected(self): subprocess.check_output( ["lsblk", "-p", "-o", "KNAME", "--noheadings", "--inverse", DEVICE], stderr=subprocess.PIPE) - self.exit_gracefully("USB_CONNECTED") + self.exit_gracefully(ExportStatus.USB_CONNECTED.value) except subprocess.CalledProcessError: self.exit_gracefully(ExportStatus.USB_NOT_CONNECTED.value) From c2574dddd4c6146482ae91679186965b5a6b9906 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Mon, 25 Nov 2019 09:30:49 -0500 Subject: [PATCH 118/352] test: add coverage reporting --- Makefile | 2 +- test-requirements.in | 3 ++- test-requirements.txt | 37 +++++++++++++++++++++++++++++++++++++ 3 files changed, 40 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index a65ea37a9..762c42ec9 100644 --- a/Makefile +++ b/Makefile @@ -18,7 +18,7 @@ check: lint test ## Run linter and tests TESTS ?= tests .PHONY: test test: ## Run tests - pytest -v $$TESTS + pytest -v --cov-report html --cov-report term-missing --cov=securedrop_export $$TESTS .PHONY: lint lint: ## Run linter diff --git a/test-requirements.in b/test-requirements.in index 00ca62224..8a4af7f03 100644 --- a/test-requirements.in +++ b/test-requirements.in @@ -1,4 +1,5 @@ flake8 pip-tools pytest -pytest-mock \ No newline at end of file +pytest-cov +pytest-mock diff --git a/test-requirements.txt b/test-requirements.txt index 4cd163f1f..51297db17 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -16,6 +16,40 @@ click==7.0 \ --hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \ --hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 \ # via pip-tools +coverage==4.5.4 \ + --hash=sha256:08907593569fe59baca0bf152c43f3863201efb6113ecb38ce7e97ce339805a6 \ + --hash=sha256:0be0f1ed45fc0c185cfd4ecc19a1d6532d72f86a2bac9de7e24541febad72650 \ + --hash=sha256:141f08ed3c4b1847015e2cd62ec06d35e67a3ac185c26f7635f4406b90afa9c5 \ + --hash=sha256:19e4df788a0581238e9390c85a7a09af39c7b539b29f25c89209e6c3e371270d \ + --hash=sha256:23cc09ed395b03424d1ae30dcc292615c1372bfba7141eb85e11e50efaa6b351 \ + --hash=sha256:245388cda02af78276b479f299bbf3783ef0a6a6273037d7c60dc73b8d8d7755 \ + --hash=sha256:331cb5115673a20fb131dadd22f5bcaf7677ef758741312bee4937d71a14b2ef \ + --hash=sha256:386e2e4090f0bc5df274e720105c342263423e77ee8826002dcffe0c9533dbca \ + --hash=sha256:3a794ce50daee01c74a494919d5ebdc23d58873747fa0e288318728533a3e1ca \ + --hash=sha256:60851187677b24c6085248f0a0b9b98d49cba7ecc7ec60ba6b9d2e5574ac1ee9 \ + --hash=sha256:63a9a5fc43b58735f65ed63d2cf43508f462dc49857da70b8980ad78d41d52fc \ + --hash=sha256:6b62544bb68106e3f00b21c8930e83e584fdca005d4fffd29bb39fb3ffa03cb5 \ + --hash=sha256:6ba744056423ef8d450cf627289166da65903885272055fb4b5e113137cfa14f \ + --hash=sha256:7494b0b0274c5072bddbfd5b4a6c6f18fbbe1ab1d22a41e99cd2d00c8f96ecfe \ + --hash=sha256:826f32b9547c8091679ff292a82aca9c7b9650f9fda3e2ca6bf2ac905b7ce888 \ + --hash=sha256:93715dffbcd0678057f947f496484e906bf9509f5c1c38fc9ba3922893cda5f5 \ + --hash=sha256:9a334d6c83dfeadae576b4d633a71620d40d1c379129d587faa42ee3e2a85cce \ + --hash=sha256:af7ed8a8aa6957aac47b4268631fa1df984643f07ef00acd374e456364b373f5 \ + --hash=sha256:bf0a7aed7f5521c7ca67febd57db473af4762b9622254291fbcbb8cd0ba5e33e \ + --hash=sha256:bf1ef9eb901113a9805287e090452c05547578eaab1b62e4ad456fcc049a9b7e \ + --hash=sha256:c0afd27bc0e307a1ffc04ca5ec010a290e49e3afbe841c5cafc5c5a80ecd81c9 \ + --hash=sha256:dd579709a87092c6dbee09d1b7cfa81831040705ffa12a1b248935274aee0437 \ + --hash=sha256:df6712284b2e44a065097846488f66840445eb987eb81b3cc6e4149e7b6982e1 \ + --hash=sha256:e07d9f1a23e9e93ab5c62902833bf3e4b1f65502927379148b6622686223125c \ + --hash=sha256:e2ede7c1d45e65e209d6093b762e98e8318ddeff95317d07a27a2140b80cfd24 \ + --hash=sha256:e4ef9c164eb55123c62411f5936b5c2e521b12356037b6e1c2617cef45523d47 \ + --hash=sha256:eca2b7343524e7ba246cab8ff00cab47a2d6d54ada3b02772e908a45675722e2 \ + --hash=sha256:eee64c616adeff7db37cc37da4180a3a5b6177f5c46b187894e633f088fb5b28 \ + --hash=sha256:ef824cad1f980d27f26166f86856efe11eff9912c4fed97d3804820d43fa550c \ + --hash=sha256:efc89291bd5a08855829a3c522df16d856455297cf35ae827a37edac45f466a7 \ + --hash=sha256:fa964bae817babece5aa2e8c1af841bebb6d0b9add8e637548809d040443fee0 \ + --hash=sha256:ff37757e068ae606659c28c3bd0d923f9d29a85de79bf25b2b34b148473b5025 \ + # via pytest-cov entrypoints==0.3 \ --hash=sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19 \ --hash=sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451 \ @@ -63,6 +97,9 @@ pyparsing==2.4.1.1 \ --hash=sha256:43c5486cefefa536c9aab528881c992328f020eefe4f6d06332449c365218580 \ --hash=sha256:d6c5ffe9d0305b9b977f7a642d36b9370954d1da7ada4c62393382cbadad4265 \ # via packaging +pytest-cov==2.8.1 \ + --hash=sha256:cc6742d8bac45070217169f5f72ceee1e0e55b0221f54bcf24845972d3a47f2b \ + --hash=sha256:cdbdef4f870408ebdbfeb44e63e07eb18bb4619fae852f6e760645fa36172626 pytest-mock==1.12.1 \ --hash=sha256:96a0cebc66e09930be2a15b03333d90b59584d3fb011924f81c14b50ee0afbba \ --hash=sha256:e5381be2608e49547f5e47633c5f81241ebf6206d17ce516a7a18d5a917e3859 From 281f9361f0b322993a393cabb4aa305bb99edaba Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Mon, 25 Nov 2019 11:22:42 -0500 Subject: [PATCH 119/352] test: fix remaining mocks, rework asserts for tests using exit mock MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * to have a mock raise exceptions, we need to use side_effect instead of return_value * if we’re mocking SDExport.exit_gracefully then we don’t expect a SystemExit to occur unless that’s a side_effect of the mocked exit_gracefully (generally it isn't) * we also have unit tests of exit_gracefully method so we shouldn’t be including asserts for the functionality inside the exit_gracefully method in unrelated tests * otherwise I've broken up some of the longer tests using whitespace to delineate the build / operate / check sections --- tests/test_export.py | 123 +++++++++++++++++++++++-------------------- 1 file changed, 66 insertions(+), 57 deletions(-) diff --git a/tests/test_export.py b/tests/test_export.py index 29344bbda..3c70c3fb6 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -3,6 +3,7 @@ import os import pytest import subprocess # noqa: F401 +import sys import tempfile from subprocess import CalledProcessError @@ -59,7 +60,9 @@ def test_empty_config(capsys): metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) with open(metadata, "w") as f: f.write("{}") + config = export.Metadata(temp_folder) + assert not config.is_valid() @@ -69,7 +72,9 @@ def test_valid_printer_test_config(capsys): metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) with open(metadata, "w") as f: f.write('{"device": "printer-test"}') + config = export.Metadata(temp_folder) + assert config.is_valid() assert config.encryption_key is None assert config.encryption_method is None @@ -81,7 +86,9 @@ def test_valid_printer_config(capsys): metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) with open(metadata, "w") as f: f.write('{"device": "printer"}') + config = export.Metadata(temp_folder) + assert config.is_valid() assert config.encryption_key is None assert config.encryption_method is None @@ -96,7 +103,9 @@ def test_invalid_encryption_config(capsys): f.write( '{"device": "disk", "encryption_method": "base64", "encryption_key": "hunter1"}' ) + config = export.Metadata(temp_folder) + assert config.encryption_key == "hunter1" assert config.encryption_method == "base64" assert not config.is_valid() @@ -110,7 +119,9 @@ def test_valid_encryption_config(capsys): f.write( '{"device": "disk", "encryption_method": "luks", "encryption_key": "hunter1"}' ) + config = export.Metadata(temp_folder) + assert config.encryption_key == "hunter1" assert config.encryption_method == "luks" assert config.is_valid() @@ -131,7 +142,9 @@ def test_popup_message(mocked_call): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_BOTHER_PRINTER) def test_get_good_printer_uri(mocked_call): submission = export.SDExport("testfile", TEST_CONFIG) + result = submission.get_printer_uri() + assert result == "usb://Brother/HL-L2320D%20series?serial=A00000A000000" @@ -140,17 +153,13 @@ def test_get_bad_printer_uri(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) expected_message = "ERROR_PRINTER_NOT_FOUND" assert export.ExportStatus.ERROR_PRINTER_NOT_FOUND.value == expected_message - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) + mocked_exit = mocker.patch.object(submission, "exit_gracefully", + side_effect=lambda x: sys.exit(0)) - with pytest.raises(SystemExit) as sysexit: - result = submission.get_printer_uri() - assert result == "" - mocked_exit.assert_called_once_with(expected_message) + with pytest.raises(SystemExit): + submission.get_printer_uri() - assert sysexit.value.code == 0 - captured = capsys.readouterr() - assert captured.err == "{}\n".format(expected_message) - assert captured.out == "" + mocked_exit.assert_called_once_with(expected_message) @pytest.mark.parametrize('open_office_paths', [ @@ -182,15 +191,10 @@ def test_usb_precheck_disconnected(capsys, mocker): mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) mocker.patch("subprocess.check_output", - return_value=CalledProcessError(1, 'check_output')) - - with pytest.raises(SystemExit) as sysexit: - submission.check_usb_connected() - mocked_exit.assert_called_once_with(expected_message) + side_effect=CalledProcessError(1, 'check_output')) - assert sysexit.value.code == 0 - captured = capsys.readouterr() - assert captured.err == "{}\n".format(expected_message) + submission.check_usb_connected() + mocked_exit.assert_called_once_with(expected_message) @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_USB) @@ -198,29 +202,28 @@ def test_usb_precheck_connected(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) expected_message = "USB_CONNECTED" assert export.ExportStatus.USB_CONNECTED.value == expected_message - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - with pytest.raises(SystemExit) as sysexit: - submission.check_usb_connected() - mocked_exit.assert_called_once_with(expected_message) + submission.check_usb_connected() - assert sysexit.value.code == 0 - captured = capsys.readouterr() - assert captured.err == "{}\n".format(expected_message) + mocked_exit.assert_called_once_with(expected_message) @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PART) def test_extract_device_name_no_part(mocked_call, capsys): submission = export.SDExport("testfile", TEST_CONFIG) + submission.set_extracted_device_name() + assert submission.device == "/dev/sda" @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) def test_extract_device_name_single_part(mocked_call, capsys): submission = export.SDExport("testfile", TEST_CONFIG) + submission.set_extracted_device_name() + assert submission.device == "/dev/sda1" @@ -230,12 +233,9 @@ def test_extract_device_name_multiple_part(mocked_call, capsys, mocker): mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) expected_message = export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value - with pytest.raises(SystemExit) as sysexit: - submission.set_extracted_device_name() - mocked_exit.assert_called_once_with(expected_message) - assert sysexit.value.code == 0 - captured = capsys.readouterr() - assert captured.err == "{}\n".format(expected_message) + submission.set_extracted_device_name() + + mocked_exit.assert_called_once_with(expected_message) @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PART) @@ -245,10 +245,9 @@ def test_luks_precheck_encrypted_fde(mocked_call, capsys, mocker): expected_message = export.ExportStatus.USB_ENCRYPTED.value mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - with pytest.raises(SystemExit) as sysexit: - submission.check_luks_volume() - mocked_exit.assert_called_once_with(expected_message) - assert sysexit.value.code == 0 + submission.check_luks_volume() + + mocked_exit.assert_called_once_with(expected_message) @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) @@ -258,24 +257,32 @@ def test_luks_precheck_encrypted_single_part(mocked_call, capsys, mocker): expected_message = export.ExportStatus.USB_ENCRYPTED.value mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - with pytest.raises(SystemExit) as sysexit: - submission.check_luks_volume() - mocked_exit.assert_called_once_with(expected_message) - assert sysexit.value.code == 0 + submission.check_luks_volume() + + mocked_exit.assert_called_once_with(expected_message) @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_MULTI_PART) def test_luks_precheck_encrypted_multi_part(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) expected_message = export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - with pytest.raises(SystemExit) as sysexit: + # Here we need to mock the exit_gracefully method with a side effect otherwise + # program execution will continue after exit_gracefully and exit_gracefully + # may be called a second time. + mocked_exit = mocker.patch.object(submission, "exit_gracefully", + side_effect=lambda x: sys.exit(0)) + + # Output of `lsblk -o TYPE --noheadings DEVICE_NAME` when a drive has multiple + # partitions + multi_partition_lsblk_output = b"disk\npart\npart\n" + mocker.patch("subprocess.check_call", return_value=0) + mocker.patch("subprocess.check_output", return_value=multi_partition_lsblk_output) + + with pytest.raises(SystemExit): submission.check_luks_volume() - mocked_exit.assert_called_once_with(expected_message) - assert sysexit.value.code == 0 - captured = capsys.readouterr() - assert captured.err == "{}\n".format(expected_message) + + mocked_exit.assert_called_once_with(expected_message) @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) @@ -283,16 +290,19 @@ def test_luks_precheck_encrypted_luks_error(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) expected_message = "USB_ENCRYPTION_NOT_SUPPORTED" assert expected_message == export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - mocker.patch("subprocess.check_call", return_value=CalledProcessError(1, 'check_call')) + mocked_exit = mocker.patch.object(submission, "exit_gracefully", + side_effect=lambda msg, e: sys.exit(0)) - with pytest.raises(SystemExit) as sysexit: + single_partition_lsblk_output = b"disk\npart\n" + mocker.patch("subprocess.check_output", return_value=single_partition_lsblk_output) + mocker.patch("subprocess.check_call", side_effect=CalledProcessError(1, 'check_call')) + + with pytest.raises(SystemExit): submission.check_luks_volume() - mocked_exit.assert_called_once_with(expected_message) - assert sysexit.value.code == 0 - captured = capsys.readouterr() - assert captured.err == "{}\n".format(expected_message) + + assert mocked_exit.mock_calls[0][2]['msg'] == expected_message + assert mocked_exit.mock_calls[0][2]['e'] is None def test_safe_check_call(capsys, mocker): @@ -300,9 +310,8 @@ def test_safe_check_call(capsys, mocker): submission.safe_check_call(['ls'], "this will work") mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) expected_message = "uh oh!!!!" - with pytest.raises(SystemExit) as sysexit: - submission.safe_check_call(['ls', 'kjdsfhkdjfh'], expected_message) - mocked_exit.assert_called_once_with(expected_message) - assert sysexit.value.code == 0 - captured = capsys.readouterr() - assert captured.err == "{}\n".format(expected_message) + + submission.safe_check_call(['ls', 'kjdsfhkdjfh'], expected_message) + + assert mocked_exit.mock_calls[0][2]['msg'] == expected_message + assert mocked_exit.mock_calls[0][2]['e'] is None From bbb3d114300e535be85f70a3a24aa31b47cd364f Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Mon, 25 Nov 2019 11:50:10 -0500 Subject: [PATCH 120/352] test: add pathlib2 as dependency for python < 3.6 https://github.com/pytest-dev/pytest/blob/7e5ad314287a5c86856efea13c04333d7baf3643/setup.py#L11 --- test-requirements.in | 1 + test-requirements.txt | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/test-requirements.in b/test-requirements.in index 8a4af7f03..6a68d166f 100644 --- a/test-requirements.in +++ b/test-requirements.in @@ -1,4 +1,5 @@ flake8 +pathlib2 # required by pytest for python 3.5 pip-tools pytest pytest-cov diff --git a/test-requirements.txt b/test-requirements.txt index 51297db17..e18ef566f 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -74,6 +74,9 @@ packaging==19.0 \ --hash=sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af \ --hash=sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3 \ # via pytest +pathlib2==2.3.5 \ + --hash=sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db \ + --hash=sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868 pip-tools==4.2.0 \ --hash=sha256:123174aabf7f4a63dd6e0bfc8aeeb5eaddbecb75a41e9f0dd4c447b1f2de14f7 \ --hash=sha256:5427ea4dcc175649723985fbcace9b2d8f46f9adbcc63bc2d7b247d9bcc74917 @@ -109,7 +112,7 @@ pytest==4.6.4 \ six==1.12.0 \ --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \ --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \ - # via more-itertools, packaging, pip-tools, pytest + # via more-itertools, packaging, pathlib2, pip-tools, pytest wcwidth==0.1.7 \ --hash=sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e \ --hash=sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c \ From 81994bfc6a586bf735fda0a28df3ce0a52fa2462 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Fri, 29 Nov 2019 18:43:00 +0530 Subject: [PATCH 121/352] Adds buster packaging in CI Fixes #50 --- .circleci/config.yml | 81 +++++++++++++++++++++++++++----------------- 1 file changed, 50 insertions(+), 31 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index a501982b6..73d3df004 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -17,6 +17,42 @@ common-steps: source .venv/bin/activate make safety + - &install_packaging_dependencies + run: + name: Install Debian packaging dependencies and download wheels + command: | + mkdir ~/packaging && cd ~/packaging + git clone https://github.com/freedomofpress/securedrop-debian-packaging.git + cd securedrop-debian-packaging + make install-deps && make fetch-wheels + PKG_DIR=~/project make requirements + + - &verify_requirements + run: + name: Ensure that build-requirements.txt and requirements.txt are in sync. + command: | + cd ~/project + # Return 1 if unstaged changes exist (after `make requirements` in the + # previous run step), else return 0. + git diff --quiet + + - &make_source_tarball + run: + name: Tag and make source tarball + command: | + cd ~/project + ./update_version.sh 1000.0 # Dummy version number, doesn't matter what we put here + python3 setup.py sdist + + - &build_debian_package + run: + name: Build debian package + command: | + cd ~/packaging/securedrop-debian-packaging + export PKG_VERSION=1000.0 + export PKG_PATH=/home/circleci/project/dist/securedrop-proxy-$PKG_VERSION.tar.gz + make securedrop-proxy + version: 2 jobs: build-stretch: @@ -24,38 +60,20 @@ jobs: - image: circleci/python:3.5-stretch steps: - checkout + - *install_packaging_dependencies + - *verify_requirements + - *make_source_tarball + - *build_debian_package - - run: - name: Install Debian packaging dependencies and download wheels - command: | - mkdir ~/packaging && cd ~/packaging - git clone https://github.com/freedomofpress/securedrop-debian-packaging.git - cd securedrop-debian-packaging - make install-deps && make fetch-wheels - PKG_DIR=~/project make requirements - - - run: - name: Ensure that build-requirements.txt and requirements.txt are in sync. - command: | - cd ~/project - # Return 1 if unstaged changes exist (after `make requirements` in the - # previous run step), else return 0. - git diff --quiet - - - run: - name: Tag and make source tarball - command: | - cd ~/project - ./update_version.sh 1000.0 # Dummy version number, doesn't matter what we put here - python3 setup.py sdist - - - run: - name: Build debian package - command: | - cd ~/packaging/securedrop-debian-packaging - export PKG_VERSION=1000.0 - export PKG_PATH=~/project/dist/securedrop-proxy-$PKG_VERSION.tar.gz - make securedrop-proxy + build-buster: + docker: + - image: circleci/python:3.7-buster + steps: + - checkout + - *install_packaging_dependencies + - *verify_requirements + - *make_source_tarball + - *build_debian_package test-stretch: docker: @@ -80,3 +98,4 @@ workflows: - test-stretch - test-buster - build-stretch + - build-buster From ebf479f815403300c58c09795623707d2994bed7 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Fri, 29 Nov 2019 19:04:44 +0530 Subject: [PATCH 122/352] Adds debian packaging job in CI for Buster Fixes #30 --- .circleci/config.yml | 48 +++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 47 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 4d6ecd2f1..ad0cbded6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,6 +15,42 @@ common-steps: pip install --require-hashes -r test-requirements.txt make test + - &install_packaging_dependencies + run: + name: Install Debian packaging dependencies and download wheels + command: | + mkdir ~/packaging && cd ~/packaging + git clone https://github.com/freedomofpress/securedrop-debian-packaging.git + cd securedrop-debian-packaging + make install-deps && make fetch-wheels + PKG_DIR=~/project make requirements + + - &verify_requirements + run: + name: Ensure that build-requirements.txt and requirements.txt are in sync. + command: | + cd ~/project + # Return 1 if unstaged changes exist (after `make requirements` in the + # previous run step), else return 0. + git diff --quiet + + - &make_source_tarball + run: + name: Tag and make source tarball + command: | + cd ~/project + ./update_version.sh 1000.0 # Dummy version number, doesn't matter what we put here + python3 setup.py sdist + + - &build_debian_package + run: + name: Build debian package + command: | + cd ~/packaging/securedrop-debian-packaging + export PKG_VERSION=1000.0 + export PKG_PATH=/home/circleci/project/dist/securedrop-export-$PKG_VERSION.tar.gz + make securedrop-export + version: 2 jobs: lint: @@ -52,6 +88,16 @@ jobs: - *install_packages - *run_tests + build-buster: + docker: + - image: circleci/python:3.7-buster + steps: + - checkout + - *install_packaging_dependencies + - *verify_requirements + - *make_source_tarball + - *build_debian_package + workflows: version: 2 securedrop_export_ci: @@ -59,4 +105,4 @@ workflows: - lint - test-stretch - test-buster - + - build-buster From 224dd7c4f8cd7e2b2537fcf4b53437bb065239de Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Mon, 2 Dec 2019 17:34:10 +0530 Subject: [PATCH 123/352] Adds exception handling Also raises exception for using two different vmname or logginvm name within the codebase. --- oqubeslogging.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/oqubeslogging.py b/oqubeslogging.py index 6392b1be7..0a8f9b1d4 100644 --- a/oqubeslogging.py +++ b/oqubeslogging.py @@ -7,9 +7,12 @@ class Singleton(type): def __call__(cls, *args, **kwargs): if cls not in cls._ins: - cls._ins[cls] = super(Singleton, cls).__call__(*args, **kwargs) + cls._ins[cls] = (super(Singleton, cls).__call__(*args, **kwargs), args) - return cls._ins[cls] + if len(args) > 1: + if args != cls._ins[cls][1]: + raise Exception("Arguments not matching for logvm name and Qubes VM name") + return cls._ins[cls][0] class InternalLog(metaclass=Singleton): @@ -35,6 +38,10 @@ def __init__(self, name, logvmname): self.qubes_log = InternalLog(name, logvmname) def emit(self, record): - msg = self.format(record) - self.qubes_log.write(msg) - return True + try: + msg = self.format(record) + self.qubes_log.write(msg) + return True + + except Exception: + self.handleError(record) From 5c81debc47c7441a0b9330f67f37537432d14fdc Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Mon, 2 Dec 2019 17:36:18 +0530 Subject: [PATCH 124/352] Removes Python2 statements --- oqubes-logging | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/oqubes-logging b/oqubes-logging index 036a19ee7..5fd3a23e4 100644 --- a/oqubes-logging +++ b/oqubes-logging @@ -21,10 +21,9 @@ def sanitize_line(untrusted_line): line[i] = 0x2e return bytearray(line).decode('ascii') -try: - stdin = sys.stdin.buffer # python3 -except AttributeError: - stdin = io.open(0, 'rb') # python2 + +stdin = sys.stdin.buffer # python3 + start = datetime.utcnow() @@ -44,6 +43,7 @@ def log(msg, remote=True, now=None): line = '{:%F %T.%f} +0000 {} {}\n'.format(now, remote_str, msg) tmp_log.write(line.encode('utf-8')) + tmp_log.flush() log('starting log', now=start, remote=False) From 096a57d081045c8ce1786248fc83e4ff26cbca75 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Mon, 2 Dec 2019 09:50:41 -0500 Subject: [PATCH 125/352] synchronize singleton creation using threading.Lock() in __call__ --- oqubeslogging.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/oqubeslogging.py b/oqubeslogging.py index 0a8f9b1d4..ce191ed71 100644 --- a/oqubeslogging.py +++ b/oqubeslogging.py @@ -1,13 +1,16 @@ from logging import StreamHandler from subprocess import Popen, PIPE +import threading class Singleton(type): _ins = {} + _lock = threading.Lock() def __call__(cls, *args, **kwargs): - if cls not in cls._ins: - cls._ins[cls] = (super(Singleton, cls).__call__(*args, **kwargs), args) + with cls._lock: # First thread that gets here creates the instance + if cls not in cls._ins: + cls._ins[cls] = (super(Singleton, cls).__call__(*args, **kwargs), args) if len(args) > 1: if args != cls._ins[cls][1]: From 802eb83b5d7b25df6753accde23c4f6520aca8dc Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Mon, 25 Nov 2019 19:28:50 -0500 Subject: [PATCH 126/352] app: remove hardcoded device name, get on the fly from lsblk --- securedrop_export/export.py | 58 ++++++++++++++++++++++++++++--------- securedrop_export/main.py | 6 +++- 2 files changed, 50 insertions(+), 14 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index fd767133e..c42611197 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -11,12 +11,12 @@ import tarfile import tempfile import time +from typing import List, Optional # noqa: F401 from enum import Enum PRINTER_NAME = "sdw-printer" PRINTER_WAIT_TIMEOUT = 60 -DEVICE = "/dev/sda" MOUNTPOINT = "/media/usb" ENCRYPTED_DEVICE = "encrypted_volume" BRLASER_DRIVER = "/usr/share/cups/drv/brlaser.drv" @@ -120,7 +120,7 @@ def is_valid(self): class SDExport(object): def __init__(self, archive, config_path): - self.device = DEVICE + self.device = None # Optional[str] self.mountpoint = MOUNTPOINT self.encrypted_device = ENCRYPTED_DEVICE @@ -192,22 +192,54 @@ def extract_tarball(self): except Exception: self.exit_gracefully(ExportStatus.ERROR_EXTRACTION.value) - def check_usb_connected(self): - # If the USB is not attached via qvm-usb attach, lsusb will return empty string and a - # return code of 1 + def check_usb_connected(self, exit=False) -> None: + usb_devices = self._get_connected_usbs() + + if len(usb_devices) == 0: + self.exit_gracefully(ExportStatus.USB_NOT_CONNECTED.value) + elif len(usb_devices) == 1: + self.device = usb_devices[0] + if exit: + self.exit_gracefully(ExportStatus.USB_CONNECTED.value) + elif len(usb_devices) > 1: + # Return generic error until freedomofpress/securedrop-export/issues/25 + self.exit_gracefully(ExportStatus.ERROR_GENERIC.value) + + def _get_connected_usbs(self) -> List[str]: logging.info('Performing usb preflight') + # List all block devices attached to VM that are disks and not partitions. try: - subprocess.check_output( - ["lsblk", "-p", "-o", "KNAME", "--noheadings", "--inverse", DEVICE], - stderr=subprocess.PIPE) - self.exit_gracefully(ExportStatus.USB_CONNECTED.value) + lsblk = subprocess.Popen(["lsblk", "-o", "NAME,TYPE"], stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + grep = subprocess.Popen(["grep", "disk"], stdin=lsblk.stdout, stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + command_output = grep.stdout.readlines() + + # The first word in each element of the command_output list is the device name + attached_devices = [x.decode('utf8').split()[0] for x in command_output] except subprocess.CalledProcessError: - self.exit_gracefully(ExportStatus.USB_NOT_CONNECTED.value) + self.exit_gracefully(ExportStatus.ERROR_GENERIC.value) + + # Determine which are USBs by selecting those block devices that are removable disks. + usb_devices = [] + for device in attached_devices: + try: + removable = subprocess.check_output( + ["cat", "/sys/class/block/{}/removable".format(device)], + stderr=subprocess.PIPE) + is_removable = int(removable.decode('utf8').strip()) + except subprocess.CalledProcessError: + is_removable = False + + if is_removable: + usb_devices.append("/dev/{}".format(device)) + + return usb_devices def set_extracted_device_name(self): try: device_and_partitions = subprocess.check_output( - ["lsblk", "-o", "TYPE", "--noheadings", DEVICE], stderr=subprocess.PIPE) + ["lsblk", "-o", "TYPE", "--noheadings", self.device], stderr=subprocess.PIPE) # we don't support multiple partitions partition_count = device_and_partitions.decode('utf-8').split('\n').count('part') @@ -215,8 +247,8 @@ def set_extracted_device_name(self): logging.debug("multiple partitions not supported") self.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value) - # set device to /dev/sda if disk is encrypted, /dev/sda1 if partition encrypted - self.device = DEVICE if partition_count == 0 else DEVICE + '1' + # redefine device to /dev/sda if disk is encrypted, /dev/sda1 if partition encrypted + self.device = self.device if partition_count == 0 else self.device + '1' except subprocess.CalledProcessError: self.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value) diff --git a/securedrop_export/main.py b/securedrop_export/main.py index 34b8a9f66..1c7f6dbc7 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -17,9 +17,11 @@ def __main__(submission): if submission.archive_metadata.is_valid(): if submission.archive_metadata.export_method == "usb-test": logging.info('Export archive is usb-test') - submission.check_usb_connected() + submission.check_usb_connected(exit=True) elif submission.archive_metadata.export_method == "disk": logging.info('Export archive is disk') + # check_usb_connected looks for the drive, sets the drive to use + submission.check_usb_connected() logging.info('Unlocking volume') # exports all documents in the archive to luks-encrypted volume submission.unlock_luks_volume(submission.archive_metadata.encryption_key) @@ -29,6 +31,8 @@ def __main__(submission): submission.copy_submission() elif submission.archive_metadata.export_method == "disk-test": logging.info('Export archive is disk-test') + # check_usb_connected looks for the drive, sets the drive to use + submission.check_usb_connected() submission.check_luks_volume() elif submission.archive_metadata.export_method == "printer": logging.info('Export archive is printer') From 930dd9d77e13da49829eed4eef60a479d669cd96 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Tue, 26 Nov 2019 15:41:48 -0500 Subject: [PATCH 127/352] test: check_usb_connected determines the device name dynamically --- tests/test_export.py | 61 +++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 57 insertions(+), 4 deletions(-) diff --git a/tests/test_export.py b/tests/test_export.py index 3c70c3fb6..a09d40d39 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -185,33 +185,81 @@ def test_is_not_open_office_file(capsys, open_office_paths): def test_usb_precheck_disconnected(capsys, mocker): + """Tests the scenario where there are disks connected, but none of them are USB""" submission = export.SDExport("testfile", TEST_CONFIG) expected_message = "USB_NOT_CONNECTED" assert export.ExportStatus.USB_NOT_CONNECTED.value == expected_message + + # Popen call returns lsblk output + command_output = mock.MagicMock() + command_output.stdout = mock.MagicMock() + command_output.stdout.readlines = mock.MagicMock(return_value=[b"sda disk\n", b"sdb disk\n"]) + mocker.patch("subprocess.Popen", return_value=command_output) + + # check_output returns removable status + mocker.patch("subprocess.check_output", return_value=[b'0\n', b'0\n']) + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) mocker.patch("subprocess.check_output", side_effect=CalledProcessError(1, 'check_output')) - submission.check_usb_connected() + submission.check_usb_connected(exit=True) + mocked_exit.assert_called_once_with(expected_message) + assert submission.device is None -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_USB) -def test_usb_precheck_connected(mocked_call, capsys, mocker): +def test_usb_precheck_connected(capsys, mocker): + """Tests the scenario where there is one USB connected""" submission = export.SDExport("testfile", TEST_CONFIG) + + # Popen call returns lsblk output + command_output = mock.MagicMock() + command_output.stdout = mock.MagicMock() + command_output.stdout.readlines = mock.MagicMock(return_value=[b"sdb disk\n"]) + mocker.patch("subprocess.Popen", return_value=command_output) + + # check_output returns removable status + mocker.patch("subprocess.check_output", return_value=b"1\n") + expected_message = "USB_CONNECTED" assert export.ExportStatus.USB_CONNECTED.value == expected_message mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - submission.check_usb_connected() + submission.check_usb_connected(exit=True) + + mocked_exit.assert_called_once_with(expected_message) + assert submission.device == "/dev/sdb" + + +def test_usb_precheck_multiple_devices_connected(capsys, mocker): + """Tests the scenario where there are multiple USB drives connected""" + submission = export.SDExport("testfile", TEST_CONFIG) + + # Popen call returns lsblk output + command_output = mock.MagicMock() + command_output.stdout = mock.MagicMock() + command_output.stdout.readlines = mock.MagicMock(return_value=[b"sdb disk\n", b"sdc disk\n"]) + mocker.patch("subprocess.Popen", return_value=command_output) + + # check_output returns removable status + mocker.patch("subprocess.check_output", return_value=b"1\n") + + expected_message = "ERROR_GENERIC" + assert export.ExportStatus.ERROR_GENERIC.value == expected_message + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) + + submission.check_usb_connected(exit=True) mocked_exit.assert_called_once_with(expected_message) + assert submission.device is None @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PART) def test_extract_device_name_no_part(mocked_call, capsys): submission = export.SDExport("testfile", TEST_CONFIG) + submission.device = "/dev/sda" submission.set_extracted_device_name() @@ -221,6 +269,7 @@ def test_extract_device_name_no_part(mocked_call, capsys): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) def test_extract_device_name_single_part(mocked_call, capsys): submission = export.SDExport("testfile", TEST_CONFIG) + submission.device = "/dev/sda" submission.set_extracted_device_name() @@ -230,6 +279,7 @@ def test_extract_device_name_single_part(mocked_call, capsys): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_MULTI_PART) def test_extract_device_name_multiple_part(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) + submission.device = "/dev/sda" mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) expected_message = export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value @@ -254,6 +304,7 @@ def test_luks_precheck_encrypted_fde(mocked_call, capsys, mocker): @mock.patch("subprocess.check_call", return_value=0) def test_luks_precheck_encrypted_single_part(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) + submission.device = "/dev/sda" expected_message = export.ExportStatus.USB_ENCRYPTED.value mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) @@ -265,6 +316,7 @@ def test_luks_precheck_encrypted_single_part(mocked_call, capsys, mocker): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_MULTI_PART) def test_luks_precheck_encrypted_multi_part(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) + submission.device = "/dev/sda" expected_message = export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value # Here we need to mock the exit_gracefully method with a side effect otherwise @@ -288,6 +340,7 @@ def test_luks_precheck_encrypted_multi_part(mocked_call, capsys, mocker): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) def test_luks_precheck_encrypted_luks_error(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) + submission.device = "/dev/sda" expected_message = "USB_ENCRYPTION_NOT_SUPPORTED" assert expected_message == export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value From b62397c64a34ebb54035086b80362de6fddc95ff Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Mon, 25 Nov 2019 15:25:42 -0800 Subject: [PATCH 128/352] support laserjet --- securedrop_export/export.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index fd767133e..6bb329c65 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -350,17 +350,17 @@ def get_printer_uri(self): # No usb printer is connected logging.info('No usb printers connected') self.exit_gracefully(ExportStatus.ERROR_PRINTER_NOT_FOUND.value) - elif "Brother" in printer_uri: - logging.info('Printer {} is supported'.format(printer_uri)) - return printer_uri - else: + elif not any(x in printer_uri for x in ("Brother", "LaserJet")): # printer url is a make that is unsupported logging.info('Printer {} is unsupported'.format(printer_uri)) self.exit_gracefully(ExportStatus.ERROR_PRINTER_NOT_SUPPORTED.value) + logging.info('Printer {} is supported'.format(printer_uri)) + return printer_uri + def install_printer_ppd(self, uri): # Some drivers don't come with ppd files pre-compiled, we must compile them - if "Brother" in uri: + if any(x in uri for x in ("Brother", "LaserJet")): self.safe_check_call( command=[ "sudo", From 0a410fbe0beceebc3914602e7fb51525a5dae58f Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Wed, 27 Nov 2019 13:11:14 -0800 Subject: [PATCH 129/352] fix logging bug --- securedrop_export/export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 6bb329c65..9f68c5131 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -446,7 +446,7 @@ def print_file(self, file_to_print): ) file_to_print = converted_path - logging.info('Sending file to printer {}:{}'.format(self.printer_name)) + logging.info('Sending file to printer {}:{}'.format(self.printer_name, file_to_print)) self.safe_check_call( command=["xpp", "-P", self.printer_name, file_to_print], error_message=ExportStatus.ERROR_PRINT.value From 922482872968dc914e6fce2e7d8295fefa463640 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Mon, 2 Dec 2019 13:20:01 -0800 Subject: [PATCH 130/352] add laserjet test --- tests/test_export.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/tests/test_export.py b/tests/test_export.py index 3c70c3fb6..bcd6b2114 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -10,7 +10,8 @@ from securedrop_export import export SAMPLE_OUTPUT_NO_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\nnetwork lpd" # noqa -SAMPLE_OUTPUT_BOTHER_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\ndirect usb://Brother/HL-L2320D%20series?serial=A00000A000000\nnetwork lpd" # noqa +SAMPLE_OUTPUT_BROTHER_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\ndirect usb://Brother/HL-L2320D%20series?serial=A00000A000000\nnetwork lpd" # noqa +SAMPLE_OUTPUT_LASERJET_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\ndirect usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000\nnetwork lpd" # noqa SAMPLE_OUTPUT_NO_PART = b"disk\ncrypt" # noqa SAMPLE_OUTPUT_ONE_PART = b"disk\npart\ncrypt" # noqa @@ -139,8 +140,8 @@ def test_popup_message(mocked_call): ]) -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_BOTHER_PRINTER) -def test_get_good_printer_uri(mocked_call): +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_BROTHER_PRINTER) +def test_get_good_printer_uri_laserjet(mocked_call): submission = export.SDExport("testfile", TEST_CONFIG) result = submission.get_printer_uri() @@ -148,6 +149,13 @@ def test_get_good_printer_uri(mocked_call): assert result == "usb://Brother/HL-L2320D%20series?serial=A00000A000000" +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_LASERJET_PRINTER) +def test_get_good_printer_uri_brother(mocked_call): + submission = export.SDExport("testfile", TEST_CONFIG) + result = submission.get_printer_uri() + assert result == "usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000" + + @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PRINTER) def test_get_bad_printer_uri(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) From eed82e99f1d8d901b43529c972a8ae5401dc9b81 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Wed, 4 Dec 2019 20:14:14 +0530 Subject: [PATCH 131/352] Moves the file to a module directory --- oqubeslogging.py => securedrop_log/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename oqubeslogging.py => securedrop_log/__init__.py (100%) diff --git a/oqubeslogging.py b/securedrop_log/__init__.py similarity index 100% rename from oqubeslogging.py rename to securedrop_log/__init__.py From 98b55351d3e2a003d7b51a86652f06917cced592 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Wed, 4 Dec 2019 20:14:58 +0530 Subject: [PATCH 132/352] Renames the class and qrexec in the code --- securedrop_log/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/securedrop_log/__init__.py b/securedrop_log/__init__.py index ce191ed71..a67d90f8e 100644 --- a/securedrop_log/__init__.py +++ b/securedrop_log/__init__.py @@ -21,7 +21,7 @@ def __call__(cls, *args, **kwargs): class InternalLog(metaclass=Singleton): def __init__(self, name, logvmname): self.process = Popen( - ["/usr/lib/qubes/qrexec-client-vm", logvmname, "oqubes.Logging"], + ["/usr/lib/qubes/qrexec-client-vm", logvmname, "securedrop.Log"], stdin=PIPE, stdout=PIPE, stderr=PIPE, @@ -35,7 +35,7 @@ def write(self, text): self.process.stdin.flush() -class OQubesLog(StreamHandler): +class SecureDropLog(StreamHandler): def __init__(self, name, logvmname): StreamHandler.__init__(self) self.qubes_log = InternalLog(name, logvmname) From bd78047fa8fc64fad845e55fc4fa41c55e056fbd Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Wed, 4 Dec 2019 20:17:35 +0530 Subject: [PATCH 133/352] More renames in README and code --- README.md | 17 ++++++++--------- oqubes-logging => securedrop-log | 0 2 files changed, 8 insertions(+), 9 deletions(-) rename oqubes-logging => securedrop-log (100%) diff --git a/README.md b/README.md index 9b19ddcb2..efaf8e8a9 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,6 @@ -# OQubes Logging +# securedrop-log -This is a PoC logging service based on [Qubes -buildlog](https://github.com/QubesOS/qubes-builder/blob/master/rpc-services/qubesbuilder.BuildLog). +This is a Python module and qrexec service for logging in Qubes for [SecureDrop](https://securedrop.org). ## How to use/try this? @@ -10,7 +9,7 @@ In our example, we will use a vm named *logging* for storing logs, and we will u ### In dom0 -- Create a file `/etc/qubes-rpc/policy/oqubes.Logging` in `dom0` with the following content. +- Create a file `/etc/qubes-rpc/policy/securedrop.Log` in `dom0` with the following content. ``` workvm logging allow @@ -19,13 +18,13 @@ workvm logging allow ### In logging vm -Add the following content to `/etc/qubes-rpc/oqubes.Logging` +Add the following content to `/etc/qubes-rpc/securedrop.Log` ``` -/usr/sbin/oqubes-logging +/usr/sbin/securedrop-log ``` -and then place `oqubes-logging` script to `/usr/sbin/` directory and make sure that +and then place `securedrop-log` script to `/usr/sbin/` directory and make sure that it is executable. ### To use from any Python code in workvm @@ -34,10 +33,10 @@ Here is an example code using Python logging ```Python import logging -from oqubeslogging import OQubesLog +from securedrop_log import SecureDropLog def main(): - handler = OQubesLog("workvm", "proxy-debian") + handler = SecureDropLog("workvm", "proxy-debian") logging.basicConfig(level=logging.DEBUG, handlers=[handler]) logger = logging.getLogger("example") diff --git a/oqubes-logging b/securedrop-log similarity index 100% rename from oqubes-logging rename to securedrop-log From 0301869fe714ccc0059d50de230838d8a5d534be Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Wed, 4 Dec 2019 20:20:59 +0530 Subject: [PATCH 134/352] Adds license --- LICENSE | 674 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 674 insertions(+) create mode 100644 LICENSE diff --git a/LICENSE b/LICENSE new file mode 100644 index 000000000..f288702d2 --- /dev/null +++ b/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. From 38283be4c45680912f8f20b3a9c085fe84930126 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Wed, 4 Dec 2019 20:23:54 +0530 Subject: [PATCH 135/352] Initial setup.py and MANIFEST --- MANIFEST.in | 8 ++++++++ setup.py | 31 +++++++++++++++++++++++++++++++ 2 files changed, 39 insertions(+) create mode 100644 MANIFEST.in create mode 100644 setup.py diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 000000000..78f3d19f7 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,8 @@ +include LICENSE +include README.md +include build-requirements.txt +include requirements.txt +include securedrop_log/*.py +include setup.py +include securedrop-log + diff --git a/setup.py b/setup.py new file mode 100644 index 000000000..60f080fac --- /dev/null +++ b/setup.py @@ -0,0 +1,31 @@ +import pkgutil +import setuptools + +with open("README.md", "r") as fh: + long_description = fh.read() + +version = "0.0.1" + +setuptools.setup( + name="securedrop-log", + version=version, + author="Freedom of the Press Foundation", + author_email="securedrop@freedom.press", + description="SecureDrop Qubes logging scripts", + long_description=long_description, + long_description_content_type="text/markdown", + license="GPLv3+", + install_requires=[], + python_requires=">=3.5", + url="https://github.com/freedomofpress/securedrop-log", + packages="securedrop_log", + classifiers=( + "Development Status :: 3 - Alpha", + "Programming Language :: Python :: 3", + "Topic :: Software Development :: Libraries :: Python Modules", + "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", + "Intended Audience :: Developers", + "OOperating System :: POSIX :: Linux", + ), + data_files = [("sbin",["securedrop-log"])] +) From c061b7ead408d3dd3b948e64a4662e65eaaddea8 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Wed, 4 Dec 2019 20:25:15 +0530 Subject: [PATCH 136/352] Adds Python specific gitignore file --- .gitignore | 130 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 130 insertions(+) create mode 100644 .gitignore diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..aa44ee2ad --- /dev/null +++ b/.gitignore @@ -0,0 +1,130 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + From 9b5b9d26008a6aa6ac3d2bd11cec3111e63ebe44 Mon Sep 17 00:00:00 2001 From: mickael e Date: Wed, 4 Dec 2019 10:11:38 -0500 Subject: [PATCH 137/352] Provide Buster support for printing Some changes in lpinfo resulted in the printer queue not being activated. As a result, the printer could not accept jobs and the lpinfo -E command would return non-zero. --- securedrop_export/export.py | 22 ++++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 9f68c5131..6329bbbe6 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -376,6 +376,7 @@ def install_printer_ppd(self, uri): def setup_printer(self, printer_uri, printer_ppd): # Add the printer using lpadmin + logger.info('Setting up printer name {}'.format(self.printer_name)) self.safe_check_call( command=[ "sudo", @@ -390,17 +391,34 @@ def setup_printer(self, printer_uri, printer_ppd): error_message=ExportStatus.ERROR_PRINTER_INSTALL.value ) # Activate the printer so that it can receive jobs + logger.info('Activating printer {}'.format(self.printer_name)) self.safe_check_call( - command=["sudo", "lpadmin", "-p", self.printer_name, "-E"], + command=["sudo", "lpadmin", "-p", self.printer_name], error_message=ExportStatus.ERROR_PRINTER_INSTALL.value ) + # worksaround for version of lpadmin/cups in debian buster: + # see https://forums.developer.apple.com/thread/106112 + self.safe_check_call( + command=["sudo", "cupsaccept", self.printer_name], + error_message=ExportStatus.ERROR_PRINTER_INSTALL.value + ) + # A non-zero return code is expected here, but the command is required + # and works as expected. + command = ["sudo", "cupsenable", self.printer_name] + try: + subprocess.check_call(command) + except subprocess.CalledProcessError: + pass + # Allow user to print (without using sudo) + logger.info('Allow user to print {}'.format(self.printer_name)) self.safe_check_call( command=["sudo", "lpadmin", "-p", self.printer_name, "-u", "allow:user"], error_message=ExportStatus.ERROR_PRINTER_INSTALL.value ) def print_test_page(self): + logger.info('Printing test page') self.print_file("/usr/share/cups/data/testprint") self.popup_message("Printing test page") @@ -436,7 +454,7 @@ def print_file(self, file_to_print): # If the file to print is an (open)office document, we need to call unoconf to # convert the file to pdf as printer drivers do not support this format if self.is_open_office_file(file_to_print): - logging.info('Converting Office document to pdf'.format(self.printer_name)) + logger.info('Converting Office document to pdf') folder = os.path.dirname(file_to_print) converted_filename = file_to_print + ".pdf" converted_path = os.path.join(folder, converted_filename) From ed1a2f99e566d5d5b6e1c32dd16fe9ec1c6177e1 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Wed, 4 Dec 2019 16:41:29 -0500 Subject: [PATCH 138/352] add initial changelog.md --- MANIFEST.in | 1 + changelog.md | 5 +++++ 2 files changed, 6 insertions(+) create mode 100644 changelog.md diff --git a/MANIFEST.in b/MANIFEST.in index 78f3d19f7..d82c3479f 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,5 +1,6 @@ include LICENSE include README.md +include changelog.md include build-requirements.txt include requirements.txt include securedrop_log/*.py diff --git a/changelog.md b/changelog.md new file mode 100644 index 000000000..9c18f3c15 --- /dev/null +++ b/changelog.md @@ -0,0 +1,5 @@ +# Changelog + +## 0.0.1 + + * Initial release. From 2c65482b4e6fd962875a1711b904065015dd6e4b Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Wed, 4 Dec 2019 16:54:01 -0500 Subject: [PATCH 139/352] test: couple of tests, never gonna be great b/c of the singleton logger1 from test 2 is logger1 from test 1 --- tests/__init__.py | 0 tests/test_logger.py | 18 ++++++++++++++++++ 2 files changed, 18 insertions(+) create mode 100644 tests/__init__.py create mode 100644 tests/test_logger.py diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/test_logger.py b/tests/test_logger.py new file mode 100644 index 000000000..174544f00 --- /dev/null +++ b/tests/test_logger.py @@ -0,0 +1,18 @@ +from unittest import mock, TestCase + +import securedrop_log + + +@mock.patch('securedrop_log.Popen') +class TestLogger(TestCase): + def test_singleton_there_can_be_only_one(self, mock_popen): + logger1 = securedrop_log.SecureDropLog('name', 'logvmname') + logger2 = securedrop_log.SecureDropLog('name', 'logvmname') + + self.assertEqual(logger1.qubes_log, logger2.qubes_log) + + def test_singleton_raises_exception_for_dev(self, mock_popen): + logger1 = securedrop_log.SecureDropLog('name', 'logvmname') + + with self.assertRaises(Exception): + logger2 = securedrop_log.SecureDropLog('name2', 'logvmname2') From 464eccbbe8d696298fc0f3c7955ae0de0bc5178b Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Wed, 4 Dec 2019 16:43:56 -0500 Subject: [PATCH 140/352] ci: add circle integration and test job --- .circleci/config.yml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 .circleci/config.yml diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 000000000..dd6ad3702 --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,16 @@ +version: 2 +jobs: + test: + docker: + - image: circleci/python:3.7-buster + steps: + - checkout + - run: + name: Run tests + command: python3 -m unittest + +workflows: + version: 2 + per_pr: + jobs: + - test From 18a532b3d315c0b3e39f6bc6d93ce5b849052c1c Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Wed, 4 Dec 2019 16:45:25 -0500 Subject: [PATCH 141/352] update examples for new package name --- example.py | 4 ++-- journal-example.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/example.py b/example.py index 83662f416..1a582bbea 100644 --- a/example.py +++ b/example.py @@ -1,12 +1,12 @@ import logging -from oqubeslogging import OQubesLog +from securedrop_log import SecureDropLog import ex2 import ex1 def main(): - handler = OQubesLog("workvm", "logging") + handler = SecureDropLog("workvm", "logging") logging.basicConfig(level=logging.DEBUG, handlers=[handler]) logger = logging.getLogger("example") diff --git a/journal-example.py b/journal-example.py index fa468cb10..903412af9 100644 --- a/journal-example.py +++ b/journal-example.py @@ -1,11 +1,11 @@ import logging -from oqubeslogging import OQubesLog +from securedrop_log import SecureDropLog from systemd import journal import select def main(): - handler = OQubesLog("workvm", "logging") + handler = SecureDropLog("workvm", "logging") logging.basicConfig(level=logging.DEBUG, handlers=[handler]) logger = logging.getLogger("example") j = journal.Reader() From f6cf757193760e254fb653e80a302fd7c030961f Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Wed, 4 Dec 2019 17:00:36 -0500 Subject: [PATCH 142/352] requirements files (needed for deb build) --- build-requirements.txt | 0 requirements.txt | 0 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 build-requirements.txt create mode 100644 requirements.txt diff --git a/build-requirements.txt b/build-requirements.txt new file mode 100644 index 000000000..e69de29bb diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 000000000..e69de29bb From 23c0b5b7349afa3f815c6d5555f8d1f090a815b5 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Wed, 4 Dec 2019 14:24:06 -0800 Subject: [PATCH 143/352] support laserjet ppds generated via cups compiler --- securedrop_export/export.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 6329bbbe6..421fea7d6 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -21,6 +21,8 @@ ENCRYPTED_DEVICE = "encrypted_volume" BRLASER_DRIVER = "/usr/share/cups/drv/brlaser.drv" BRLASER_PPD = "/usr/share/cups/model/br7030.ppd" +LASERJET_DRIVER = "/usr/share/cups/drv/hpcups.drv" +LASERJET_PPD = "/usr/share/cups/model/hp-laserjet_6l.ppd" logger = logging.getLogger(__name__) @@ -127,8 +129,8 @@ def __init__(self, archive, config_path): self.printer_name = PRINTER_NAME self.printer_wait_timeout = PRINTER_WAIT_TIMEOUT - self.brlaser_driver = BRLASER_DRIVER - self.brlaser_ppd = BRLASER_PPD + self.printer_driver = None + self.printer_ppd = None self.archive = archive self.submission_dirname = os.path.basename(self.archive).split(".")[0] @@ -359,19 +361,26 @@ def get_printer_uri(self): return printer_uri def install_printer_ppd(self, uri): + if "Brother" in uri: + self.printer_driver = BRLASER_DRIVER + self.printer_ppd = BRLASER_PPD + elif "LaserJet" in uri: + self.printer_driver = LASERJET_DRIVER + self.printer_ppd = LASERJET_PPD + # Some drivers don't come with ppd files pre-compiled, we must compile them if any(x in uri for x in ("Brother", "LaserJet")): self.safe_check_call( command=[ "sudo", "ppdc", - self.brlaser_driver, + self.printer_driver, "-d", "/usr/share/cups/model/", ], error_message=ExportStatus.ERROR_PRINTER_DRIVER_UNAVAILABLE.value ) - return self.brlaser_ppd + return self.printer_ppd # Here, we could support ppd drivers for other makes or models in the future def setup_printer(self, printer_uri, printer_ppd): From 0685535505cd1fab243bf53b6e6b662d2e0afa69 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Wed, 4 Dec 2019 15:51:17 -0800 Subject: [PATCH 144/352] test installing printer ppd --- securedrop_export/export.py | 40 +++++++++++++++--------------- tests/test_export.py | 49 +++++++++++++++++++++++++++++++++++++ 2 files changed, 69 insertions(+), 20 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 421fea7d6..3bd8c442b 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -129,9 +129,6 @@ def __init__(self, archive, config_path): self.printer_name = PRINTER_NAME self.printer_wait_timeout = PRINTER_WAIT_TIMEOUT - self.printer_driver = None - self.printer_ppd = None - self.archive = archive self.submission_dirname = os.path.basename(self.archive).split(".")[0] self.target_dirname = "sd-export-{}".format( @@ -361,27 +358,30 @@ def get_printer_uri(self): return printer_uri def install_printer_ppd(self, uri): + if not any(x in uri for x in ("Brother", "LaserJet")): + logger.error("Cannot install printer ppd for unsupported printer: {}".format(uri)) + self.exit_gracefully(msg=ExportStatus.ERROR_PRINTER_NOT_SUPPORTED.value) + return + if "Brother" in uri: - self.printer_driver = BRLASER_DRIVER - self.printer_ppd = BRLASER_PPD + printer_driver = BRLASER_DRIVER + printer_ppd = BRLASER_PPD elif "LaserJet" in uri: - self.printer_driver = LASERJET_DRIVER - self.printer_ppd = LASERJET_PPD + printer_driver = LASERJET_DRIVER + printer_ppd = LASERJET_PPD # Some drivers don't come with ppd files pre-compiled, we must compile them - if any(x in uri for x in ("Brother", "LaserJet")): - self.safe_check_call( - command=[ - "sudo", - "ppdc", - self.printer_driver, - "-d", - "/usr/share/cups/model/", - ], - error_message=ExportStatus.ERROR_PRINTER_DRIVER_UNAVAILABLE.value - ) - return self.printer_ppd - # Here, we could support ppd drivers for other makes or models in the future + self.safe_check_call( + command=[ + "sudo", + "ppdc", + printer_driver, + "-d", + "/usr/share/cups/model/", + ], + error_message=ExportStatus.ERROR_PRINTER_DRIVER_UNAVAILABLE.value + ) + return printer_ppd def setup_printer(self, printer_uri, printer_ppd): # Add the printer using lpadmin diff --git a/tests/test_export.py b/tests/test_export.py index bcd6b2114..0441198af 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -323,3 +323,52 @@ def test_safe_check_call(capsys, mocker): assert mocked_exit.mock_calls[0][2]['msg'] == expected_message assert mocked_exit.mock_calls[0][2]['e'] is None + + +@mock.patch("subprocess.check_call") +def test_install_printer_ppd_laserjet(mocker): + submission = export.SDExport("testfile", TEST_CONFIG) + ppd = submission.install_printer_ppd("usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A00000") + assert ppd == "/usr/share/cups/model/hp-laserjet_6l.ppd" + + +@mock.patch("subprocess.check_call") +def test_install_printer_ppd_brother(mocker): + submission = export.SDExport("testfile", TEST_CONFIG) + ppd = submission.install_printer_ppd("usb://Brother/HL-L2320D%20series?serial=A00000A000000") + assert ppd == "/usr/share/cups/model/br7030.ppd" + + +def test_install_printer_ppd_error_no_driver(mocker): + submission = export.SDExport("testfile", TEST_CONFIG) + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) + mocker.patch("subprocess.check_call", side_effect=CalledProcessError(1, 'check_call')) + + submission.install_printer_ppd("usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000") + + assert mocked_exit.mock_calls[0][2]['msg'] == "ERROR_PRINTER_DRIVER_UNAVAILABLE" + assert mocked_exit.mock_calls[0][2]['e'] is None + + +def test_install_printer_ppd_error_not_supported(mocker): + submission = export.SDExport("testfile", TEST_CONFIG) + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) + mocker.patch("subprocess.check_call", side_effect=CalledProcessError(1, 'check_call')) + + submission.install_printer_ppd("usb://Not/Supported?serial=A00000A000000") + + assert mocked_exit.mock_calls[0][2]['msg'] == "ERROR_PRINTER_NOT_SUPPORTED" + + +def test_setup_printer_error(mocker): + submission = export.SDExport("testfile", TEST_CONFIG) + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) + mocker.patch("subprocess.check_call", side_effect=CalledProcessError(1, 'check_call')) + + submission.setup_printer( + "usb://Brother/HL-L2320D%20series?serial=A00000A000000", + "/usr/share/cups/model/br7030.ppd" + ) + + assert mocked_exit.mock_calls[0][2]['msg'] == "ERROR_PRINTER_INSTALL" + assert mocked_exit.mock_calls[0][2]['e'] is None From ed81efcfa2b3249230818a14b81fe238910dedbb Mon Sep 17 00:00:00 2001 From: mickael e Date: Wed, 4 Dec 2019 10:45:17 -0500 Subject: [PATCH 145/352] Use logger instead of logging Logging works, but does not provide the class/method/line where the logging was invoked. Logger was used inconsistently accross the codebase. --- securedrop_export/entrypoint.py | 6 ++-- securedrop_export/export.py | 61 +++++++++++++++++---------------- securedrop_export/main.py | 22 ++++++------ 3 files changed, 47 insertions(+), 42 deletions(-) diff --git a/securedrop_export/entrypoint.py b/securedrop_export/entrypoint.py index f2d837202..ed1d69bb1 100755 --- a/securedrop_export/entrypoint.py +++ b/securedrop_export/entrypoint.py @@ -11,6 +11,8 @@ CONFIG_PATH = "/etc/sd-export-config.json" DEFAULT_HOME = os.path.join(os.path.expanduser("~"), ".securedrop_export") +logger = logging.getLogger(__name__) + def configure_logging(): """ @@ -44,13 +46,13 @@ def start(): msg = "ERROR_LOGGING" export.SDExport.exit_gracefully(msg) - logging.info('Starting SecureDrop Export {}'.format(__version__)) + logger.info('Starting SecureDrop Export {}'.format(__version__)) my_sub = export.SDExport(sys.argv[1], CONFIG_PATH) try: # Halt immediately if target file is absent if not os.path.exists(my_sub.archive): - logging.info('Archive is not found {}.'.format(my_sub.archive)) + logger.info('Archive is not found {}.'.format(my_sub.archive)) msg = "ERROR_FILE_NOT_FOUND" my_sub.exit_gracefully(msg) main.__main__(my_sub) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index c2f7b92eb..00c4a83be 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -82,27 +82,27 @@ def __init__(self, archive_path): try: with open(self.metadata_path) as f: - logging.info('Parsing archive metadata') + logger.info('Parsing archive metadata') json_config = json.loads(f.read()) self.export_method = json_config.get("device", None) self.encryption_method = json_config.get("encryption_method", None) self.encryption_key = json_config.get( "encryption_key", None ) - logging.info( + logger.info( 'Exporting to device {} with encryption_method {}'.format( self.export_method, self.encryption_method ) ) except Exception: - logging.error('Metadata parsing failure') + logger.error('Metadata parsing failure') raise def is_valid(self): - logging.info('Validating metadata contents') + logger.info('Validating metadata contents') if self.export_method not in self.SUPPORTED_EXPORT_METHODS: - logging.error( + logger.error( 'Archive metadata: Export method {} is not supported'.format( self.export_method ) @@ -111,7 +111,7 @@ def is_valid(self): if self.export_method == "disk": if self.encryption_method not in self.SUPPORTED_ENCRYPTION_METHODS: - logging.error( + logger.error( 'Archive metadata: Encryption method {} is not supported'.format( self.encryption_method ) @@ -185,7 +185,7 @@ def popup_message(self, msg): def extract_tarball(self): try: - logging.info('Extracting tarball {} into {}'.format(self.archive, self.tmpdir)) + logger.info('Extracting tarball {} into {}'.format(self.archive, self.tmpdir)) with tarfile.open(self.archive) as tar: tar.extractall(self.tmpdir) except Exception: @@ -195,17 +195,20 @@ def check_usb_connected(self, exit=False) -> None: usb_devices = self._get_connected_usbs() if len(usb_devices) == 0: + logger.info('0 USB devices connected') self.exit_gracefully(ExportStatus.USB_NOT_CONNECTED.value) elif len(usb_devices) == 1: + logger.info('1 USB device connected') self.device = usb_devices[0] if exit: self.exit_gracefully(ExportStatus.USB_CONNECTED.value) elif len(usb_devices) > 1: + logger.info('>1 USB devices connected') # Return generic error until freedomofpress/securedrop-export/issues/25 self.exit_gracefully(ExportStatus.ERROR_GENERIC.value) def _get_connected_usbs(self) -> List[str]: - logging.info('Performing usb preflight') + logger.info('Performing usb preflight') # List all block devices attached to VM that are disks and not partitions. try: lsblk = subprocess.Popen(["lsblk", "-o", "NAME,TYPE"], stdout=subprocess.PIPE, @@ -243,7 +246,7 @@ def set_extracted_device_name(self): # we don't support multiple partitions partition_count = device_and_partitions.decode('utf-8').split('\n').count('part') if partition_count > 1: - logging.debug("multiple partitions not supported") + logger.debug("multiple partitions not supported") self.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value) # redefine device to /dev/sda if disk is encrypted, /dev/sda1 if partition encrypted @@ -254,9 +257,9 @@ def set_extracted_device_name(self): def check_luks_volume(self): # cryptsetup isLuks returns 0 if the device is a luks volume # subprocess with throw if the device is not luks (rc !=0) - logging.info('Checking if volume is luks-encrypted') + logger.info('Checking if volume is luks-encrypted') self.set_extracted_device_name() - logging.debug("checking if {} is luks encrypted".format(self.device)) + logger.debug("checking if {} is luks encrypted".format(self.device)) self.safe_check_call( command=["sudo", "cryptsetup", "isLuks", self.device], error_message=ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value @@ -276,18 +279,18 @@ def unlock_luks_volume(self, encryption_key): # the luks device is not already unlocked if not os.path.exists(os.path.join("/dev/mapper/", self.encrypted_device)): - logging.debug('Unlocking luks volume {}'.format(self.encrypted_device)) + logger.debug('Unlocking luks volume {}'.format(self.encrypted_device)) p = subprocess.Popen( ["sudo", "cryptsetup", "luksOpen", self.device, self.encrypted_device], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) - logging.debug('Passing key') + logger.debug('Passing key') p.communicate(input=str.encode(encryption_key, "utf-8")) rc = p.returncode if rc != 0: - logging.error('Bad phassphrase for {}'.format(self.encrypted_device)) + logger.error('Bad phassphrase for {}'.format(self.encrypted_device)) self.exit_gracefully(ExportStatus.USB_BAD_PASSPHRASE.value) except subprocess.CalledProcessError: self.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED) @@ -301,7 +304,7 @@ def mount_volume(self): ) mapped_device_path = os.path.join("/dev/mapper/", self.encrypted_device) - logging.info('Mounting {}'.format(mapped_device_path)) + logger.info('Mounting {}'.format(mapped_device_path)) self.safe_check_call( command=["sudo", "mount", mapped_device_path, self.mountpoint], error_message=ExportStatus.ERROR_USB_MOUNT.value @@ -319,24 +322,24 @@ def copy_submission(self): target_path = os.path.join(self.mountpoint, self.target_dirname) subprocess.check_call(["mkdir", target_path]) export_data = os.path.join(self.tmpdir, "export_data/") - logging.info('Copying file to {}'.format(self.target_dirname)) + logger.info('Copying file to {}'.format(self.target_dirname)) subprocess.check_call(["cp", "-r", export_data, target_path]) - logging.info('File copied successfully to {}'.format(self.target_dirname)) + logger.info('File copied successfully to {}'.format(self.target_dirname)) self.popup_message("Files exported successfully to disk.") except (subprocess.CalledProcessError, OSError): self.exit_gracefully(ExportStatus.ERROR_USB_WRITE.value) finally: # Finally, we sync the filesystem, unmount the drive and lock the # luks volume, and exit 0 - logging.info('Syncing filesystems') + logger.info('Syncing filesystems') subprocess.check_call(["sync"]) - logging.info('Unmounting drive from {}'.format(self.mountpoint)) + logger.info('Unmounting drive from {}'.format(self.mountpoint)) subprocess.check_call(["sudo", "umount", self.mountpoint]) - logging.info('Locking luks volume {}'.format(self.encrypted_device)) + logger.info('Locking luks volume {}'.format(self.encrypted_device)) subprocess.check_call( ["sudo", "cryptsetup", "luksClose", self.encrypted_device] ) - logging.info('Deleting temporary directory {}'.format(self.tmpdir)) + logger.info('Deleting temporary directory {}'.format(self.tmpdir)) subprocess.check_call(["rm", "-rf", self.tmpdir]) sys.exit(0) @@ -348,17 +351,17 @@ def wait_for_print(self): printer_idle_string = "printer {} is idle".format(self.printer_name) while True: try: - logging.info('Running lpstat waiting for printer {}'.format(self.printer_name)) + logger.info('Running lpstat waiting for printer {}'.format(self.printer_name)) output = subprocess.check_output(["lpstat", "-p", self.printer_name]) if printer_idle_string in output.decode("utf-8"): - logging.info('Print completed') + logger.info('Print completed') return True else: time.sleep(5) except subprocess.CalledProcessError: self.exit_gracefully(ExportStatus.ERROR_PRINT.value) except TimeoutException: - logging.error('Timeout waiting for printer {}'.format(self.printer_name)) + logger.error('Timeout waiting for printer {}'.format(self.printer_name)) self.exit_gracefully(ExportStatus.ERROR_PRINT.value) return True @@ -374,19 +377,19 @@ def get_printer_uri(self): for line in output.split(): if "usb://" in line.decode("utf-8"): printer_uri = line.decode("utf-8") - logging.info('lpinfo usb printer: {}'.format(printer_uri)) + logger.info('lpinfo usb printer: {}'.format(printer_uri)) # verify that the printer is supported, else exit if printer_uri == "": # No usb printer is connected - logging.info('No usb printers connected') + logger.info('No usb printers connected') self.exit_gracefully(ExportStatus.ERROR_PRINTER_NOT_FOUND.value) elif not any(x in printer_uri for x in ("Brother", "LaserJet")): # printer url is a make that is unsupported - logging.info('Printer {} is unsupported'.format(printer_uri)) + logger.info('Printer {} is unsupported'.format(printer_uri)) self.exit_gracefully(ExportStatus.ERROR_PRINTER_NOT_SUPPORTED.value) - logging.info('Printer {} is supported'.format(printer_uri)) + logger.info('Printer {} is supported'.format(printer_uri)) return printer_uri def install_printer_ppd(self, uri): @@ -505,7 +508,7 @@ def print_file(self, file_to_print): ) file_to_print = converted_path - logging.info('Sending file to printer {}:{}'.format(self.printer_name, file_to_print)) + logger.info('Sending file to printer {}:{}'.format(self.printer_name, file_to_print)) self.safe_check_call( command=["xpp", "-P", self.printer_name, file_to_print], error_message=ExportStatus.ERROR_PRINT.value diff --git a/securedrop_export/main.py b/securedrop_export/main.py index 1c7f6dbc7..d3b5a0bef 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -16,34 +16,34 @@ def __main__(submission): if submission.archive_metadata.is_valid(): if submission.archive_metadata.export_method == "usb-test": - logging.info('Export archive is usb-test') + logger.info('Export archive is usb-test') submission.check_usb_connected(exit=True) elif submission.archive_metadata.export_method == "disk": - logging.info('Export archive is disk') + logger.info('Export archive is disk') # check_usb_connected looks for the drive, sets the drive to use submission.check_usb_connected() - logging.info('Unlocking volume') + logger.info('Unlocking volume') # exports all documents in the archive to luks-encrypted volume submission.unlock_luks_volume(submission.archive_metadata.encryption_key) - logging.info('Mounting volume') + logger.info('Mounting volume') submission.mount_volume() - logging.info('Copying submission to drive') + logger.info('Copying submission to drive') submission.copy_submission() elif submission.archive_metadata.export_method == "disk-test": - logging.info('Export archive is disk-test') + logger.info('Export archive is disk-test') # check_usb_connected looks for the drive, sets the drive to use submission.check_usb_connected() submission.check_luks_volume() elif submission.archive_metadata.export_method == "printer": - logging.info('Export archive is printer') + logger.info('Export archive is printer') # prints all documents in the archive - logging.info('Searching for printer') + logger.info('Searching for printer') printer_uri = submission.get_printer_uri() - logging.info('Installing printer drivers') + logger.info('Installing printer drivers') printer_ppd = submission.install_printer_ppd(printer_uri) - logging.info('Setting up printer') + logger.info('Setting up printer') submission.setup_printer(printer_uri, printer_ppd) - logging.info('Printing files') + logger.info('Printing files') submission.print_all_files() elif submission.archive_metadata.export_method == "printer-test": # Prints a test page to ensure the printer is functional From 51da6e63ccec6677126caf08c5d71e28dd80be6d Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Thu, 5 Dec 2019 21:57:50 +0530 Subject: [PATCH 146/352] Adds required scripts and updates for packaging --- MANIFEST.in | 1 + securedrop_log/VERSION | 1 + setup.py | 9 +++++++-- update_version.sh | 26 ++++++++++++++++++++++++++ 4 files changed, 35 insertions(+), 2 deletions(-) create mode 100644 securedrop_log/VERSION create mode 100755 update_version.sh diff --git a/MANIFEST.in b/MANIFEST.in index d82c3479f..d78aea8b9 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -4,6 +4,7 @@ include changelog.md include build-requirements.txt include requirements.txt include securedrop_log/*.py +include securdrop_log/VERSION include setup.py include securedrop-log diff --git a/securedrop_log/VERSION b/securedrop_log/VERSION new file mode 100644 index 000000000..8acdd82b7 --- /dev/null +++ b/securedrop_log/VERSION @@ -0,0 +1 @@ +0.0.1 diff --git a/setup.py b/setup.py index 60f080fac..78592406f 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,9 @@ with open("README.md", "r") as fh: long_description = fh.read() -version = "0.0.1" +version = pkgutil.get_data("securedrop_log", "VERSION").decode("utf-8") +version = version.strip() + setuptools.setup( name="securedrop-log", @@ -18,7 +20,10 @@ install_requires=[], python_requires=">=3.5", url="https://github.com/freedomofpress/securedrop-log", - packages="securedrop_log", + packages=["securedrop_log",], + package_data={ + 'securedrop_log': ['VERSION'], + }, classifiers=( "Development Status :: 3 - Alpha", "Programming Language :: Python :: 3", diff --git a/update_version.sh b/update_version.sh new file mode 100755 index 000000000..18c133047 --- /dev/null +++ b/update_version.sh @@ -0,0 +1,26 @@ +#!/bin/bash +## Usage: ./update_version.sh + +set -e + +readonly NEW_VERSION=$1 + +if [ -z "$NEW_VERSION" ]; then + echo "You must specify the new version!" + exit 1 +fi + +# Get the old version from securedrop_log/VERSION +OLD_VERSION=$(cat securedrop_log/VERSION) +if [ -z "$OLD_VERSION" ]; then + echo "Couldn't find the old version: does this script need to be updated?" + exit 1 +fi + +# Update the version in securedrop_log/VERSION (setup.py is done automatically) +if [[ "$OSTYPE" == "darwin"* ]]; then + # The empty '' after sed -i is required on macOS to indicate no backup file should be saved. + sed -i '' "s@$(echo "${OLD_VERSION}" | sed 's/\./\\./g')@$NEW_VERSION@g" securedrop_log/VERSION +else + sed -i "s@$(echo "${OLD_VERSION}" | sed 's/\./\\./g')@$NEW_VERSION@g" securedrop_log/VERSION +fi From ca65ec3ecfaf859dd7805d333795f99d932b8b96 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Thu, 5 Dec 2019 23:19:10 +0530 Subject: [PATCH 147/352] Adds securedrop.Log file for the qrexec in the logging vm --- MANIFEST.in | 2 +- securedrop.Log | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 securedrop.Log diff --git a/MANIFEST.in b/MANIFEST.in index d78aea8b9..b5ca54051 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -7,4 +7,4 @@ include securedrop_log/*.py include securdrop_log/VERSION include setup.py include securedrop-log - +include securedrop.Log diff --git a/securedrop.Log b/securedrop.Log new file mode 100644 index 000000000..0ebaf241e --- /dev/null +++ b/securedrop.Log @@ -0,0 +1 @@ +/usr/sbin/securedrop-log From 944f8c3ff2255d5d5f8e8f1ac6ad7245e98f192e Mon Sep 17 00:00:00 2001 From: Kevin O'Gorman Date: Thu, 12 Dec 2019 08:02:56 -0800 Subject: [PATCH 148/352] updated logo to match 1.0 version --- files/sd-logo.png | Bin 8606 -> 16040 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/files/sd-logo.png b/files/sd-logo.png index 531cbf26c3426493616b7060338cc68d714bad1d..4b58df918486310e52435baa1b2d4d73bdae4778 100644 GIT binary patch literal 16040 zcmV;ZK3BnsP) zaB^>EX>4U6ba`-PAZ2)IW&i+q+O3;uavZs`rT=3Uy#&k-EC-|6+d(hCzk?)IDoM9q zU9%P?l1e5c0J!@Z90+&*@Bdu)fB4fvt}YjAwb$zTlY8!Q@J;idKmC0UKEKcJPupKV z3x9vz_4DGNM3U+*8hUVggJ_jTsqUw7^6C$W89DEzqa`;&BMeZPMmB(KNM z>mmO#qS~ANJkHS*e z!}wSD-rh&&qxs<%A>V%a$;S#kM51zC$YF;OZaDAj3X37;c%tz&#ud|httE>+t{1ti zk8q#VKI=qY zFs`V)W*CZ`KJzXD67E;r%3I*?kFW6ipYn%PC4%KGb7O+T&(9@h34i5QdT~y?ukiJg zkipshx&TYWy$gd0i3s=_QVBJ9i_t=09UB1}j69|sClLsx#LXZh1yZB4*rM*u?^t;6 zjV0Eom%%0iQK%*(HdGnG%DIp~mK!-VN-DXOQj3-5N-v|Pnro@GS{29*OO~xzHM3^j zMoTTX(rUBTT5qGrp1{D;Yq#Ed?_)$axY6KRgZB?!nQ^9>XPI@{Y_rd?NS~EgS#{ZJ ztFN)+P8*omb=z*c?{R`VAjQe2oOL@j(p zExt(UefA@2yz0!pFA)SMMKL2{F$W@UiU0{66*CW63d=qWbvxX~V@f zL~7%I`??uJS}BZnpvKBHLrpEr#I)93)Ag`tJ9W-U-K;Q&)jnH~r6ddKgnViyMs1}z zu1}4Kd3Zm7@oSj(Mx9a1E|n~!&MQebTnX0ZtguQ}QdyRd+3fy?uxF<9zQ8`HKqfDxia z4z_a+a-th_Vh_fznGD)(fv-INrX09=tG7pi2CyWDl}bT%;7T4v@T)oR03ccuA3bN(O40(q8Dy&W2Z|lLq`)>WXUbSq47`o0t?p?~`7{`dnGTpw}YGYCN$ph8d9a_#=^ zotGBT&@tBhp+U#i2NQ_`jR(fer6IY9nVxK6oR(bspGaeO4ihea;|I$ zw#>U-AqRI~kp+Ta%px~l*I0ehl^i7}s9qx84ThOyd-Y%dG`?&bl!7uyWt+-j_7yUW}!5<@<~*J`%(lRh)=hNx7ZYzyW@=%XO3j|)-K>g7kFox$>^p~B7M z<}>0XnL>-BxN=L6N@Rkcf|c0rio9o8W0-NQl35UdK9@+@rAj9G-Ri4GOTIB(>LI)C zt!Q-vB|_a75^oxV2ikXQdTT%&!r_d`^2t5CG0i)$48ZQ1RcKjVIydqwBlK1xcgvG$ zMT$O72)iDDl@cC|$Oel$ZffWH@gUFt^ws_P!Z6U9i_F`>XfiOZ9a*_mpgW>_7)v8f zE!V5}*UB_{$JC^LNQT`GP`cNBSwWS-iNGdPHu5--k03!}aY;LiVbt2X-7#ki+^j3J z_oM_U)ROPqRppu2?;o6*wh4B=(*nz;6Q+x`;i-f|D`MeKAZ4CWu3}c zlZCxN2%(!V3+Hzh@_SwRNBThu% z&}jA6Gy8EMP@Xm46Z2--V0K`4q|@T$QgBwC=TPkzUTw8xvLC2C2aYQfG#?WMKPyaY za*LgNqK>0275e55q zL`V-1W;#fdK~hsL(p7V(iSvP?;5+`!Ar3^Lc~$pt$eS63M3i^Bg>v?y=xf9dYxEhQ z@8>?Vbput~HAZYL8mm$jfhb(W=w2HlUnI&%ENmcK3wep~I~qzUBm#u-v%+k=u2_Bi|IPRSs0S|?Asx&h)Uil6N7fZ1~5 z9#V@qAA}BOggmS(#ynAUbLJ35sQmi&aUyrg76L=4sSo%DkunMiVGe4Qc}miVNI-DN zDgfIW$i0va7|P7|I20DxSA@;6a0@6^-kcN|X;K#`rMeMppLzH8SFX(6rQs+Lt*tR@ z5_J)7{PXRoyoHh(8L@JS+F;DJV62*7=wjYm40IwTfIBkdkaT$8I8Ye)(k-RvL4IUX z+Q>Lm4VY4kJCI)t5HG4`7gRk0gM0!S`|_kDkz*2qZ7D!j!UL6obbySIBTlAnJ1m8))A}isVgJm^8#@ z%vcx@c9FJoP0-Ws3kEa=u*v}pSE^zOA~mw#AE*IdX`&^G!*h@{WCiyD74IQP1mpIN zdE(Xxypd#Orn%pvuD%84nB)d({nipf`-Kc^`2w(Ip~@FZZen!Eo)G|N93*vQr z%B&dZ#*5gM9)Ms7$^l@I+Vfy&J&^xIWK_a$@cwc(phprQz>$}e;gZ#fbipT*qf}vH zr3z|?T7#QF=}5`KOib}c03XpzmJK{g>14~GqM7MHfC7YQis8}R_+zS6)qAEx8tj6y zMEQ^OTL@X2xEOssT9Lpn<>MJ>v%>e71J>dqJ+^m?n;D};b3{@UV>V6p7iSE2;ug}9 zI?WISx2qycNLO$M*>Cbl`L%KYvjISslaR4As4*7wrbUWqsew&v@FMhlA;-QW`O89+ zGwm@TE(&Zw&A1NL9=Ugi2@40tsxk0}ph@{b$SWqNFA5~omo9@8qt!O<)ZK%X6j5~QtogjbUMM?;k zC5xeCuxc+gA_ky~SfbFl_&KAy*gNJBzjq}Lt+op^fukY`icvZaydtyUZP_|F;MO%M zzL~%1ZBw=PQ~l~vR~E;=>Zed35KJp26mXz$Gd0Rmf*9={>CCJ8?~J5OOv#)WKf~w` zFb$n4KHmipga`-^$}tW^#VJe!2i$>35DbwCuCE_@q*_QfAW)q6 zyQdX}r;>iE0|E3a1a;M-X z79sgSD!KW{8pMi_W6kPD^fc^p2fiW>a?1rpkKau|ankVcD}=kjUL-T}wF67Ztfy~@ zK=5UNQiog)gtCrP?kzQ@PS+Uo7^%)9I}A)>&jhU z5fS95SObzg13Fih^!!=3=zle4Db$aci7?apJ}}HT!O}f6Mus7rU=J__m`ad~QWBa1 z^buB$f-u+^nhfw0D;TKJu$2#l3st$f%{~Areg(p&5=qoraPEVoWm;Pv87oRzAW9MW z7^4ZQUh+Xp1r#j^k4jrmT1Lck{b8}Ay-QguDA?->d3Y~w zH})JC;>8JPwNu+M@n;fy*s9d&9s$$2gyCpAfTHB=Cwd3Q!~>9R9q@ImWNM@f+=`3K z2K-0-rEh9pvNW%SV|~SMkoXp_SiU1c`Oq9OdON1Dqzeu|53u!rECjy#vd%y*j(zld zw_)Lr{)ZfzXtJbjKdzDy55Bo?l}Bxo-B=jrt>pu)Vb?(7G^*kq8e|IYBws5E$No z9t;tg0t%^g08=7Q%1%4}a*2C;H;uqH3F8~XhrKrPRNIz;q6O`!jG6*huXsMd0UsIo zb?^-g4OHxas3|G(;Hm+Ygp;pB0v58GN!=0mnwJ4X^}3m01cR$(){fd&|M!Ap7;Zku zrCCE62i6xoMU>JE8>d4eCa3-ZX{yG#dTB_*%qk!h)iW0px)rQk;6rLvr%dI>g1U5Xt=(Vvq!D7t!5;Wyl#Ehk+ds(IGJG(AY_BO1VvC zIYD{NbY$O!$l;2DO>Y1xb-&>mk`x4m`(WJG+)^KFY)SbDKO@xV=W>X%X29(5L6k=} zyq+^c>#AG*WKOQw0r2U(sBs(J9lOB1_9Qf$iBfY=u9e^qmKZyX!THG3Gf5O8RlOvd zn5y*{9SKTaNA^;NB%sv5-Bx0$q>XYv03VA;VOxdvIn1QK#YtrLMARo3 zAzTF3J}OYWOT70W1-U8t)KE|h#OlnXFpxyHw9s0}h!V9mhQgjoio((B&ouU z80;T5;9E<5NhvJ&ZK1r|BILO3<=#$b4I-*M|3H>M0!yDk?!Q{DL0SaKzpHw12f#@w zXNU4ZZV30V71(}8BLoFCKH49bI!z)4k+B~}CtNnabEncRQqi!#Wmjc9F$@Q-0d7HG ztQi!TF?=Q+jHB5?DCluxZmDl%yWnX_Y~mzhSI681Oo^#Ueo#c(DVUxz+uF&!m9+w6 z5?tl8O~{%sY%~KI(>e;1Ps|D!CH!ZB8vGzp9%2lJr9L8{##JN)x#OpGusVGGnb*E1l1U=uc2UfdMF>2;0+zoBP z6brEgZX?&=Id$H&63k<%8B2(eiUww9^}145&tib-m!W`MCKR9mSWRFZyE)ZMKs}OU zMLUcYQ4eP04w!!R4x!WWUzAAbB-nq3I5qD{6@eETG^h7AWX0DT!K2T6y?=nX>O(}_ zQPF1TsnoW9M&0MDIbES6QsviIohbHTn91mo=Rd$A`Xh8C{GsA2e;f7DbR2?mB-##= z+(m+sf=xIZ^w7jE0=T;8+pA9g*=PLD(tg@RCpR@H@dny%f#r_+M+BL>PFJO}l-R%wnJBomP1!8{rbZNIbHx<_s80&$mz z4u^zg-b939td9%g%KByj!L_?)R6KY~Lg;2J;0Ae_?ukLYD2##neTWQg6nc=l^bsOF zfO*l0o-SL6gTxRgC~?bn!^dFt$cJ8Ql z=eh6g9-1I}GKi-iHu z%R_0174P#ou%V7)7tbeb6*Gl`ro%fK>!lqEjc7~VkUkF~kE#qZK+EZbNXE{BqZ6n?u*xOO=PcrDPIwX z<+BQYzvl7J@BICZdK(3wq2_E%Ex(q;lE->_NKLS#lQ$?)rt_y)Z8zjL$Fmq(+zDC;4}b!9b$^o_;FE?=)TipJvVIa5-{0#+e)>J#@gbW03F;I3}y22 zP91a$+A{$_O^#_&6`o5@JaiRsBot4=%$PQg!GstyqIGyJ#>&$2*3=cN7{DXZo^1rHK1b3G?b5NMFZhi zkV5O7MMI!Yd8_3PvD*cbZLEuDO?0XwZSBV38RzNvrunI=tOGM(z^93yE7`QRY*P)W zTKQir>f5)2;>MRUM7`+9Q2kfRP}$Yo=JPIvVBzLr3$nz~~1Yn{)!p&Qbz+YA%tH zz*@-Bo|Lf}`FE9@EFsItZ}nc6`wWMo3DBEI#_%nXWX+*^EY>-MxT*0tI|w-{(}UkV z(x!Rkw@fRf!_-!Svbyv*zMtG=%_%&Hsi)`gpwR)$Wu2X=u-VedH1x61N_L9X9B4=> z(We7r!SHd(cUBs!ppy^r#9U1*DM>pHoTewf(zeXgXCDyH4Ve;`{Xa zEz>Z~E5N1mi7rntZ-7H%!CDuLLWeFr-qNSk-BJktv<4SVQb_d*<|o%b*q0bvDeVriI{HgtTu`?mvT$e9U6360e%G2AAS zq+%EmqdG3js*nMEJ@S4njuV1RH2Q*y2x{*|ZFQkUw4G&KR;Mg+n&g}GDIs-@(4nZb z$3%gs?}7bHVjl_lv0L7xFLDwXRi^}O!datRbU0~aIrtGsO^GmXfuom=;sThTQK`wb z<)nd5?DS17E|loq zdKB@kPnqike{OjDOA>g&MTT*XHc{Bk`5pNAUYn? zN(58@bF|ym8LhE|^W<@wSz<|NYt}*q9`Fq?uXlJb^t)=TrCb==FuE zK{JCvO9}yVtes_T?o)tQr*qCxKMA*uP+DC?eejo}Sg04hO0K7N4`V``6eoHe z3$WA_mHkyKBfe7&fvJ=R9_b9ojDkV!6)}>;RYL6DMI9%@Wm?pxvRuJ6^Aqn=C<--4 zb&zdo2g@|XuiYSn=Jjo+=FT3=JFbtCX*fA>KrJ;*u316GH6%7)+8qHYL3b^ z6?F+Qfu@Wb5+X->Z#A$IGatEuF)2xktsO&j7p8_BcL?*Ko|x&_DF(F&d$ZIq27i!m zJL;V2Zjv&yQgM)E((#J`K!7bs3Cuo=jhxjf^Jf;hS?4W{6)g~n3{9I+w0SK0#zF$9 zEFH)q(OJ}{0r7K-+FxsA@LB4}!o6OH_P19r{tA7r3>q{DrrN9Rp|)|yIsXDK?l*J+ zxOAHLOe6r=h5*ue4DgR3rS5f}OiOZ@yALhz^ zyDL!gpWM}-z12_F%E?<{IJL*$^$40N!(KaDJrbGknrl0|f(($gPmtOsjbv~Z()Wl1 zBH*8GP4PEr%QbY4xhI_sEjq+Jch`wPPzANGgyXkDi!F#OFL)Cen+{=n!e2M5b5jH< zaKcS31W+OCES^qb>&PR1;YOw3q}akGabQeSBelvT=(GbRQn<&Ra*1T!)q^;_>$FvT zxd+0J7~jU26b0nzhHYfa<8o=8<>->EkEsJ;}^Va zTNZ7~W&y?AFwRDf7h;GsWVwXQ)kJ@2coaw<2M@d;{nVk;B~>o(5@mt)w3y}qe4VpC zI~xa^vM`nT?)S)tlo61QXlN)(`67ULM#g@i2a@+roMc9TSZ*esEt${ubQdi~-!u{;M2u`%!1CVa*nF813NZc$uVDr)sIo z`#JWfKI9nMkPKbN7Q9aC=wYDRqfRlPs5+_^97Aol5XnU&);c~ldy&LN8Di`L9gN=Gp0gj1B9t}HG05cI3@ zi*LqdBgVS94C8-F-J}5KJCO%>DARN+S+XmI$%==z^aq$A(-ACrt0r{PAV)_cq93u~ zP?(jnPd%@>PznaS1YMfq$IA!pj*vb&$_!$LCkC+B>OfFePFv+uPdcbmuEW*>b0P!SC2X)i-~6y$uK=y zhE)`3lYbB=Eo=oOD@l_9Z#Q}?hm6)nB%nDqC}7w!t6Q5FNsl5F9Shtq zUw+=MejPbh^QEi%SM9;K^*i+>-qgclIsn#sP$80+O}3@FUaP4*l1|m9^NvI5afWPY zbZ8xQ6Ts>OpIx>0FseFlxNqj3I^hC%r|E;!nYpV*mY%m!&ma$N2dPH%WKJZ6Tj-4Y zWaQXB^b{ALiAvUC*aWp}pV3W8G*+vl?`EO=pbWp#iEq9X(Q}lW}OK9p=JT#b~k@b4!6Hs29qU1}1LNT+sO@lr%vL;v{~bifnpj znVPNvrvzv|=FDKOdVqozZN~8m<_T+(0c7_cxgspErOwMh(EVDTQ(OJyB0zMJQ$spZjz4sdx%k;EdFAVGwJ z8p^1^MvPXS6botEPx$zUT)#vvg=bb^8^S!16O+6Uu^*KPtxmcEqVm>Zvz+CZB5w&E_Z;zCqp)6R|?V;3I*W( zjJ_!c4BP^}YhG`yeVjf38R{x^0~{OzV@1kd_jz|uXK(+WY4!I5dLDABK*ABX00006 zVoOIv00000008+zyMF)x010qNS#tmY3ljhU3ljkVnw%H_000McNliru;|vD?D-|D( z54HdR8r4ZeK~#9!?VV?MlV$h7e@)X(cMHL?DNrbsDMKiGiB&*Uh9IaQ$g((!Y-LlF z7hEh6t0)30ydW>22twIVkd_T)bf&vao1{(BG> z5&+Bu-Za2+zbJsUz{12BNxlOb`-a6Ehz8yRyesh&Ij|D=C^1G-=zzw)VDSPb05gEV z$2dd*uoU zfJg0s#=2nvcmXyo>qAI@xc3uF_L2rXoP{Kg~3i3Yy-3EUSM`&j3mJUP1V5C)Zh)h zY=aKp7GQp2jO3C78q>fMiayRV3fm7xZbH8(des5V@J4Vve+1DeXg z5(LafzeePa{fY`$XV5%Z4rnTd+U~yuMq{uFziHMbY8nntRAi%*yrA3B5KJ{s0jHK8upoxf;dZ15Jm;(ek7S zMZYMzE-^+Tw@GLsVx?l>HS{?xA&zHmGRXlJqR-_ivk7R-kkewXl8!!7{#9a(q}(c@ zF~Bmy085yVqvz}5#0!mFX(AC5?Bh%ge=%Z_)8Fn^snU{HR8CsHhCkEP{Cvx167H^{ z&myd{Tpp(?f@L7E2nZKyq_n_WE#2uJ?oNZ6ZiqyW_WLrmj>{=!>^ZOChh&=o=#pXD z+H7igOa@K$sD((nj_qME&N>P?I?=7&8Q7s30p8B0(et@#HXJPGr`t9GPZIk0*Dt1m z$H_$6rHcWUNFhff#LbD7&->E2mRpq^|BpmDQ}>nF%7WJkU`Aq$%zXqbis z&uIy9b7I|ae`*E0ROxxnURUz!4w)?(%<6T++b{(EvIF}Kp)yQ_7LP; zn-9rYddlX?nhpXni7}EhRSHc+tTYUL1b3u4@3v!rGizTLE-qNC(9-2|o2~!<8TcSE zMv_$7ABZXmOK3!_bR+uGjFINP%N~tf1wEg)r-=R`Hk+OCJdhX>E8S#Bbo*Bk{|hV< z^Z}z+0T)ZTOIK&%O;c}In=eLv85j`}EBzLIE>H3!X_|;wX<$UGbQO>cykSYe03>A! z>k5}^;c$iqI}j)I(fg|rvC`Fs=1vc{X(D2!{^%2+CQxyo7K@R6V*`=+3o~Wh$x;#h zV<9&cI_yl8=o7&f-P_d3$pDKt@V4ROV-0uHKSa!;p#g{mnyPdsg7+IN|EQN3G@;X;4Q|<4D*JEn}|BqU1%5T&MO_fc)Fo0tpc5K z)#$h`vtEMA)hhC`Qb@~8L8VaxBsh6QMlV9Yp!JsohjqoLep^IhH>O7E>!myo89&(QEIs*RdFV{jNg(~T(km+bUH1vyeyxr9RO`Ejrz8vUd7I5y$h1;dX?39sSW`1{GU8*2k zno5!Uq510ToL%a6?8QS;8q`V|MJeaemdbGRt%1nJ&8DE)d9{po!OnyP3LC%7lWAD{ zYY}q~D=0RzvM9QpfV0>YSD)%QiJWZ;oDS8U zsVM+B$}%ngJz31y9WwsRH7_kXotEN)Y*JE^DOOavvu)*}F`NZ?&MivPF5>FthfB3U zn*ry)SIX#E!yBt12myuy66x<{HC6CNwLp;pRB zKlKw{L3IcU?~SWZb(;WZ%yt=ZgP~QZX>FVfw@X=jsD#7W<_`dBRHfu(rchAw_=#nY zrfKenM^Qx4opYcm)!RE_SE~nS$CWZV)o?~)0*#ZAtLD?aMa(~<;Evq9hOV|;LqT>r zchYWAsxqoPD#0jd{!(=X8OZLO$JxaVSD#?3chQXnr$}DTPsd7_xVxB(1!j)P=yVhp z=8~F{j7)BlDqhn-1EABEBhOBzB;zt})dF$x^0zy1)EXVXot5*>P8s`B%x|}p%4KAv zCX-u`i&keccoR&%V55>3kal7V-ZerAY}Ui7jpfnXWqdstT35oxIeT5niUTDiSwOT` zqbegeE0ug%)oc_~WvhI}SvM%ox@k>+Nh(^Z`fRpm>=dDo9qc>5f|;XSDq5^k08or=K8muc8~tc2A`WfpOb@}fMX z(lp96RX;77%%*v`y`!N-dL2b}GOpDEaPjoBN*kwRb!Ys&o%!GC62|V3aWc#NF0?*n z;Rf0HIrlFtR+S_OnuJnXlttRH&G>{gB`~bJ)#dMu*&!p;OT-Pi1=uZW6$RPp{k6by3%qtj|B%9E0nD-}w!SOzqACHHDPg|`z7DHvf^ zAK$5JQOF9&%uEr)Zn0Qs05oL^G81;<8F-GMrrmM#cQncRhuu=DAV-=)k=!!2%e0uX zK}B9FDM!A=w{{BxnnWV@@UeSXwB;J|v(w4Wx2%$2i>oy)N=-tRc9Ec_-SDZ~+9uYs z9@%cuX(=wqL7J6;Mr(Oh4q{6kw46%^DZG7_;P75}NgCS?Fv{XWvZSdfRAm->jTT+i zu2vM0ar$Sx0&5f0yf>~sHEb3Zby+F7QsYt<9MIe&^1M{!hc@D0zYTt&oe;a*PGfUz zxrTzA46^dFt>&7os((hot+U8dFA&tU8{V~B;^b_-?LTxn9VLaiWM-wIDYvfbOsgC9 za*c|tq~9n=K230ozIaw|V0FNh%4K9rQzQy;P4()3u$gKDHtV%xk$6pP-%qEw|TLSC+`hwO7q6maS`k10%fV111Ih<{d`vemd-W!jgx@Lo=Oi@C%G=*ZNLhy69ttRHf zVU6h3u^wV^#fHPh8#y#za0>CGYn^pQPfr>heDHezzy2)}iD+1_7B4*Afrh>!cAvQ^ zyzNk?K$dm^O-ZicT`)B2GIFIUq)Supc3BWqR!7xl$@sQ}Hws4dka?(jfEO*R>DZQ( z|H#u*mD#U8-d>Cz{tQ<)Pv`Ca;bwW;Wf@nv_4^tMuEwL)RBeP+r`1xJlR@&0i{uvO z3x2OtBm78S+=a0(MBwXPxtF9zhsV>zRqpiE4w0~M=2(t?Hl6`Zg3SU??zN-b`F$-V z>6g$|!CH<&me0)_Nn~Va2%h6IpoSaA-)YUqZ+E876OWeE=I7&1nCGM0euBkmT1SNO z!|E9J?my1_wR^abCv1#Dqf(H0b|;#y^CvR&%mo&G zzJvYA!X`=UbUJb_{XyX^LkdRC#yn`d2xW00QfUfGl}hk&>g?;nqR~z1*s>PR&gSo% zWf5yn9ojW#+nUDg*?)}aRlCV96*ds2R?5gYvt8fyE4(*uzW<+sp)OOBBTb>OMEKo| zH9GiVSOcEx5sHVqO&OC{y-M*Qow#-~Ah zii?ZQma;JKsbJn1(v0eX)vz?NxK-}J0AHp}7|OGKpW@RE|7CrAvf$THaQhs2g$0xc z2*0`S@cLfF3~xsB#+KXj$6{+nLL1cPKMN-ETkL2$)w2C0^LEw5?0KUZ8)kL2XkaW- z)BKHSk%-P6Tk%V)rtI1q&zKdvZ4#KZ&(-0jeod(6WgSWMR#Bbi;qJyugL-r4=ef)s z($;2RnbyjmTT3H(?fDi~1B{^1sMQ*y+g}6+`Sbo;BRR8v3M1RswOUxBYPxgsy*9)~ zKSeEx>89TNA|?5S3fqAuJ13uMbG{(?R;tm*YSyFyUo4%%o+Tq`Q{BsIU}@kcV#nyl zY?Z_r}&3RbP%MqYlAamJX~nQooi zvhSN&tef1=;(=M!FNEXEy7NqrM!33|CP7TC(elf|>vW%Sh$(y0{`&vv2c=xQnQI%+ z+?5&M9iz+0#q8X37?s+%oD(lk4@SP!pX9Bxm>v~jaj;Bj?#tB$PcdBAFrk)h>%pYk{D+<)V0m7 z6NSa4{J8HLZ~l^M_G=ML)6|p%bKv9G=vt@Z)@h#~xJuZFWqh@17llQ|IOML~QqI2k zWctrIWD&p!3XMqQM8_vvuzmgeeEaURs2<>bPhNL`0k1A)&;Da*G#a}G=KN(TBjz0< zYQq&0N-SqA1h;8CJlq)aTu-Ds=JLUdPgeMyKXVlfnZJ#fCoSiE(lxt-B_m75oG(t$ zZdn35(k-tF(`<%tP@q3CQ$~=mek!j%RlmaT?7Eyyr%9_>@X^;~NONo+7P&&j*E=py zXL>wKPvu$cb((z{p=p!)tXw#Wy-P>YCb;7C#U(q>6Z+C(zT38!61mc5q3JXIAmeu2 zvH1PIjfTd_35;!y?IL!j>r-v`b<-TyPw9{50mmEn=|1|7TFBvJXVGYEZy)Sa#nxfN z84ZoQ$ccC6eL+f^@pasu9`3ySVn0%ToW+cRtsZFa^nd?brq29~OIL5&6g1^J>;oEr zHGfYcyY?5NI;r+Kdj8sug9(A<^406(Tp7|QvL(-_}1^r7zZ@=w1f*97iz zfWI%3$GyOXZ{A^iw@|wW=B2ux)T-%gJ z4pM3Ycez2mT1=lj)b62)8Q!e&)`iZ3paBp&JL~I~HqPX$w+2+WJmBU8BbV>w+1HnF z;&h_fHome1@i9Sh|K{NhX&F}e?Eo%jDJ=_{yW-*D&afeUNZUS#nFCut&|U8N&PLvy zyP8{f(s9V6$;bv?B+co-3;n_>{Tqc+&25#|GNHLE)q?_<{mv_#UOSl=T2-8ux%Ti4 z!bg3?7aM*ezo5uL!@o!;KNt2-Y|i>wT?uO#^hm!=$$Y%=HA`wlc+-Y#SP{*x1tVzv zfOLp=jW@bt!4}2YFcs}v+JkQG>mqi3^hX(=kYPR00Jw<7^y>B``{HJ? z`prHSe&>d)jG>Ev=2XHZ2POB&iat+}{+}Ma&@b$<`_|+YDwz1cH0y%qu2ie$$>`w& zxV~iuZ}n~FAmSks-O`U6i#jvq<<Bd10Lhg})As(EHY0L6yov9n*XY}#*9l6PjKOR5-$!KU?MNae|y@=lroknef zbcj|FP1w9ThOM)P(8$-#u3>r7Tg=|^;r#GncUm`-nBS5qmO0M3d#7^+~sy*}>wU8sn&sp6*jph(j0VB;R;lnj2X}k1{ z$$_adO>?iD&6d-5`Zv7Zr2&)1JWE*PdJa0ONEAvn+xK5*g4a+5Y= zSMkwjo5{;BbUa}bibkX5@aYu#PCvkypQ;v^yU_dw+!5*q=Wabk?ZKu?hp>0Kax<5y zA046B=SiHDRk_2E4eS&t<5&IxgadN{VfN0`=@_|uCxgZ<<;3YEv^t&R=|7_6<}3L4 z>oc^Nf0FfAs=PIe5?BT_)Wk*Y{+lB6h>Q*amI5P%9Dwm%Lzy~u5cTUB-;hX}l}m#c z7a4sVtzSKpbS%HKERrW`n687TvP{F?!^sTaoJ_e^ke6)-FkKTD^;bPog|nYWWOPU1 zGeFR$y~|(e!tf!z3GnxIK=V)K%*pd<%>V8(zvT$B6U-%GiY6}VK&1>}W&Fw$Ks(@7 z!#pO4y!Xu!+7DaAu04lQsT}DLcjZ=E0dIeNgvgcWg#acOm;^Mh2$%9x%$@o?ZCf?7TTPQ+q-5*fYkzC^T7^<>m|Z^9#6>+c+rQGznMY)FsKFR| zUdXrXQ5!hg0yJt(Ilmpb#d}+Ba7`&lP;@^qRTCFgsg=w}_UJt#qk9;L6g2te({Vi* z@>~!6e7$WNnsXO3S@7K@c4Y|C?)570o+d78*Q3U19?g1>UwIH{4!j8nwgL5&^#|!Q zdI@{?A2Ue`hNVfGRK~2&j?;emSs{Rt0q+43j|$9VX`1`Ajvw#=Fa-gl%g;8e!Hh|R zY1N{M)oYq!xr!b8uk+gW+d>WBdSI3&F6yznGMFUb>=7B=6j%xL74q$TcR&j!jE>@o zn$@ii8nq$xKj-_aoED~S+7V!)CN3(;*aMkFJ1FB<2^vSA>G*qyHt*aOe`^ zHt$AmMGYJ;T+3$CvUvKhy(k1QcYt9)q$z+gnWni<>$n>H{0{+dp&mr5U{4mlHJH!_ zA%u;HwMb2qohRq39haCBFG%Q10W1MlXyT$wx;#{6>!BWz(ZRqXV4RR|apzh-98Hs1 z6g2+69_%`JgP}j%tnw9TgKPp~G;vXxRdMhtzXgxT=oY}IKsRghW=sXmrkCpT#jj}` zD-YbNw&d-{zelQRploWv^lm%WV9y` z4a@>O9ncu>g8-|5g_^i1xlkuHOGkB&$Y=@rC5cxY&^)&M2)wU}ixPadd$YFT_lS({ z0DJ%TZty;>MgY|4V&LURv1-3<0lgdIY2*#OI4 zOMEsWONB-mzw!{!(lC!(?lxQFAps74Ofxtq8!EEW!d<(p*iHpj%<}tT=XzqchIr<=2 zcMF8(6fj8>7j?!K#@c3}xd)yX(FX?`S^zXzz`MXEYqMMI3K|1A9>7%eoiIGj0F45D zqWxkaQx+W1mC4o~*15nN=&Kcv*u!|*g{HYr>v#e)fN6l25t>E?d=7k|iHnli(-_(v zH21($&){VZdkkoHqmQ>-vB$BqTWIcqr>nu5(-oQLyvevJjTX{gJ4)_9?r-_TobBvHf9tKYnw`W&= iA|CxqxMMW_DgOt9z+M7aZ^(H70000^dJohFV>TA-|aL|B2AbM>rHDh2*0lsJ|O5n=K zmG~MM$emU6R6wAHG}=ph3g9=dqn5EA2oxd+0!7AvK)-;g$nPMKzZeMg!wv+J&jo?l zyz<+OV89=g4mz4@puhh&eV{k66^1Ggjif-4f+s5qDdSki-;yff%&M=R5l6qp!tl_ z^yCr29y=_9s+}XwGQ_fDHczLf)vWexvm+-yQ|4ZKTWDFkMAebglfMe`ZO(V^?VvNe zuJ!{vkCj%kf^LJZPG~}{%#aN3O}dpOOvBxD1uOjF&p*$ z*SH5B9bY&10M#1VE{G4tZN0@}ykudi(PfO;QbdgGQazWM66}BkC(Fb=pgal2k@nz^ z1PJoZWZ9o3B7Y~mKjtPfiRF{x2aAJAp|+H@-VXp9kdcgFm?j38iOF<%Z1u1lE((02 z>SV0?=HbF?`HY+F_vJ{@EdN*t5ON~%{CqfCbF?jg?AF}>5+N0pd}I#UJ}ic3OIPA< zs{J8`(FS3uvA*u(vJ|Xu8F8eOAH;N7fD1hfPJ-n14eBRGhNcu28Bq<Z^{EE4Q4y%D~T+neO2b~^3m_>^ILi{VstE%QWO64dTu2g^JX5+QYeRLs%F@QJbTp_EATFzy%CxQ|0d3?uzhfw(28T?$79F2UTz#?*&>r7 zC0@-pLX&(-RZ749H!0qJfm^TJ_5(yCNvXw*`_o%{mTZei)ufa)zeXnd>aYcE|DuO^ z;9H`|(mqSR8T3WCam8Dlq@ptj0tImZbu>+Gm9^@u9ns!}D&6p(t47&&Ov zDMiDv(5xz|79*}#@>~8e1Sd725W>cl@^OZbi{=<*5W&!;B(^_MX3N@xxwIm}Zsd7K zmA?3?pm5{V4V&r*ZZf1ESW>cn!Jv#K^mKF$iXJ z-J;l${X?^$@6dInzfFaYjP#Kt3v0YbVFip)TjDMZ-AC%Z&j%aoe^Tgvh(hmr#YX4%9Ri@Kuqrq8826BAy~tF$U1-ZpmW6(}l>?zx_g^($YO(S_Xs>;V zoz*HGTRnVFe(CYaMyAr|8Hqd{{<$({HT{=e>#GE47dceIss5*in@Y{hCRpjp_i8Qv z1zjXmX~hs>q48v?#d?3akZo+mE+Bwm^22K+bK<2U8%Y;9O;bG``F)weAl*GEf-(Jg zk=*Y|_Qq2I?T}fq0~N?#9N>({zE9jb^av-5YczlV=>;)GL9+zYq%z#h>sW z``YvA>az-Oni;15u!`m+57aPG1rdWE3coTpM3Y8B$XWh^&hsIM*0?qJW7)i4(UJFw z^UA!kBHE1*0>_JsKO~!zTR<@KC7Ne=$tDl1-SN>fG@U zV*`&je&MyAF8^vR@N(hvx%eVoYunNNbBfOE?p0R8M8H$^Y~+`1KeaveN%3R`*0l%x zx4jR#pGO28|DNLXhj}oH=767`&3yA|wXkX#`{}ovc4f&I4cP~2~^qAGoz~6*8&tz{EPkgM|*pr ziniY?4pz4PzWe#nnG8tDOTqPMVyz!YxK%%|HiqswSPDkG0I74QeJ|Oe%z766s6ih{ zL9c?{@1*~XU3m?;1Bh2+mgMdc-F$LGszXm!HfnZf{Z0LGLiK)KM64_6_-EXYj=7JnB-)kZdz*%T*P{wkWuxnB6XYJ4Cj5oJ zJZ0A7%ts9x9mV!}X(J}^#Q*MJQZj)Q-snid=_Tgtn~&LmIE<_FzF2$GQ%rmx|5Hu-8hf0!f0uT# zX^ENVLwZ)YLEI*RzSN_%xj)1yOV@*JQi?iWh}fbzaql}qKu5wP(?rdQ-%bTS`5H;l z4DT{}($MD9ZVKa;X{3vruz%+B@?MUpug5m5xM(mspVgYd&eceGX0EPTEZO<-mxV9W zn3Z|Tcle`clTt>ASaWDMZq|0=vQ_6iJOYaIS^SUA0UQG46%Ms69n_PuI~LUKjFQLaUG^&U&a-qV9tuq#F#0HeSx_ifhvFBn3>5L@^ngv zBm(Bi!OagX6=gsY?Rc?M2zCpzae7*HGw)W@%LvBR*KA$*l!ZN?ZwNz33#!Bd9S8lG(AAEcIgezBAVYg0L!btGgjJAjvC!KQhT9do8u9&nNtndNwXg7fzR-hkWu=E@t zZCLw;_?wjc^U5E-%9nZ|T$_Jtb_R^dN4PLh-XlyX$S)|stE(0NvlLlB z15=Z~To3wrqJ|`!@Ys1XO#ir4el(lGT&SZs%x9d7u9G&cwxPMe8M$}+cXK#t6fXdG ziwvHH@QB;?KksJkT-Wi>)9YwIlg`kQ3wCjzC%WYIZsTNrL%94a;G<5<)`LRHg{w{YihM+&{! zP}ZqQVeKDn8V|X+0OtR|gucd~t2QfwS^3YfS}BQS znA%5F>SKs#k48$$2nTAHqs*0wCU#&(eNIw&I))Kr#xjN#Pd^6gkS(d^uZlaIRR~{B z^K5llsi;t0s~EDDjr{)(|A~UXipV-ix#4r6J4?kHP>r*77t90do9nOqPtOXMzdomv ziWr~!(MnZy!)*5HpeGUlprPCc zfc39k2E(}VT2?U+F;-1_&0nukzORLQB-?Mh?iM!);3Udi8#@~wM@Z^~ctriXKP-eA z^Ba+D_mIrgy2NKtB#^_dLHfFjTJx8iO?=6Al=s{ihc+MN8r=6;FP+sYXH6TWnvqnN zCWnr05|@1RzbMP4AT45q%BaXyKfv`gyZN=R#UTx$tUy=}df0>01{P)$!-(+grNOp| zsXMGYfPCa%KE4Wk8nSkuk(t7dKandpjlIQ9Z?;>5X{FLa-a4Cj!@6Ovd5lEPU{ zVRpqv=IX6%H`jNb-mNOXgZ^_pc?NKI4M5q4o{5w%M7oiP(33RuqH-1>ri1hK#^f ze{scid}EDif5>gTExJC__C=Y}GxU?OZGxSaH7 zT}T{mPsL4ULWCsQ7?WM08D9wo?g%EG_z3^T_55YbJ|tYII*CFZ)lQ zh&$a`Nq5T1&>dNc?}VWu{T4R!TrN-lsNL&1#a>>4wnVK2&{_?s1v@uM&8j=xnDy&H z-}aBM=Mzjm+8_RYm=90(-7wBWZ5!6z>^}a4SCfq#Sbw+dax1u2voXbvQHqJm`)W;k zEm7)Xk#YfU@wJAPKo_JT5<0(o+-6~w;CiiG z16?E3y$oD+1EyWCyyOIJ;>Cf;W39X^nRC7Vzl^4qrAmJLh7GNMc~(~sFCi|cUSZCg zV_njMQ7xamxijdP$NGvdQ&-n_?{VIRsoInU7hO^fOLYHLWLm}{Z4koAsYF+Q)bQ>; zz{3c-YtOz;k>+YC+^7b7Ff@%jh6dbg$(FWz8Tt}mATREcmm=Tz&W}^B$LI&o+MPQR z0H!icny%9-e3RID^Efm&Zq+4d_x(*&0pZ7nln$-P89uo4M#j!?~r~fD%@B?efY>2n|Zu z?Fy!x`=zp7Z+O|6{(4cyQ&djNpK)&vA_BeJtv_A#183MFM~bapJ|I7t4pTh)eM9j0 zCIC?krvaG3>biPck;18Z#c#iW(DdFrg0bO+s44pz)>Q17 z@w}c#b^`bgu*Kc1kh+hOa?HFODA^hB0vA3gx*Y-7$oLK$3Y{pHW8?5FB%mnZ*=e*` zMg;`~5^;3J==J#Gg|}k=P96Fn{JmzO!r@?4NXh8Y55d3#q;U2r&qbaq&C1AY%Pz$y zLL9EYgs@V20f7(h+&Ifk=|$<>2mJTYP!o!4e2o~o1J{TQ@7?^4r8-DeYpQ5y>l`ik zee?A6F8_&O0lSoFPCKUCiP0>hcPOf6lrFZ1VZ!AlA|%}_=%`g@UB3ij@os6x0>A6C zwNJUwj->mnmaViaJ~2?m(xlOQ&3CX-SF~ND**Jm@om!jt<6!^6#5tF51qEp_(=}&C zZj0>k69zZV(8bQT_P$lh<@|Q(8xukoLaeT))9MnCoT*WLGl{=l7rF0m$NH+h z^Zxjt$$j&|`&l<^6g5rJw0r$WpTdGY*idWmTHo z^rLTG{ywkb5H>w5oQgzm|(nmqvnGSa_}G^npYT@(YfzT7x{B6O3ml` zzAwDlefZQT@U2e;cEtV7&z_6@<;kJFbF@0WKzYUNsYL{l<>LcMNQSFJqUnL$ax|7d z;Dh5oxGyf;erpl#Zt_Ku(AAMuVhSZ3hS-wlInEdsZA%+1)*TVAo_{>DxaND@yXG78 z=^fb?FH!i_BPlfEnPAWInBe>_$F$FO18H$BrXB4N@0{USTU?7~Fq6OW?98J9g@3I( z%90`wx(5d@yzxuVh1&^Ub9BCE07TsLuD{7G$4|M-RmfCJDn+5Vu-=II=yJ==s{)$0 zt)j3Ah)0^{qGBp}n(WScK&aN}eR0l|rVf-q;X;wk!i#UtUE_zYye?3j!-H%59EJZZ zVY(O!wV-%!_IURx&fQ5y3-EpKwU#Ax6jvC?rY-1*SL+`P zWkiF;{rSe&w>&s-N^Pic#AMNlGqc7|-B1@`H4v9%L3(<=wQ@>%+f<5a9d=g&-S2~wFfz>w;u{4#-m&!jXkPm6#ut`V zAceetqob=oPgHBCKAk5g1S2dRx}Le-9)o+@bnhu%1uM+Z>i}z?$Rr~2=I7bl+u=a$ zo8A?u9w3N^F0T$%+yr>);?-<-?>$rxK-mY#k4)oSMe}tcTuk^6!=wN1vdnF;Vnn7l z1m$G#KmyoLjV!m6vnpbNlf`!20wgz?Y_bmG8$7bx6W3327ZwzHY1R2=ZLJm3U37+8 zxNr*rTOZK*(F?hxFcDi8xi^hz)%d&v0*(Nuy*I-d#0)GyZ96^MQF9!UyyYDtGWjS% zw7|e;`_U^e^WEczJL&8IB%c2Gu<%P6nV5MtY0hUI{(;N|J$gy`dwSTB$R4LsKG|)g zNvH4E)+RDZYBN@-wtOci`LJ&rAD}wB`%i#0O804cjh!WXU z()B@6=Fp)Tr_`Fue!Co5jYeIdw-HhgS&I(rzIIXEHwv$U)_E}Wg~IBotITS=b~ev| z-9{qE2J!3}WiUJO{1VuPw4mr+ks$*NG$Zk5t&O&1;VXtx+H9!Khh_$u72Aygj9Ew; z3x{d+medl$-&sMkp&sWBdGLY%BS`UuU8MElVLBA|>FDT-SDtvlWb9sngi^?h*prlk zViGb{-IzY+g;ZIGc8n^7LkMLFfr-mq4;p*93m@WLVPx%FQw-!m0M*-n%P#(DhYSbu z@tq+V5R|?qf8m0iXW&~csER~0aV_jreAot=jg#JgICA6_oN3uX?FHQ`Pjwv<8JubU z=u+R(qz=hOh)NrXNga}+Z&x_WRm=nv#Rj4`VAFrr4=y7<`R93%p#$300~}+Q3iKQf zJy(d^KkBU{+(^2llCtH@>08y&d)@L0@YGIo%lN=f_wKM$&;gi%Hhf z{}?*#VPjg=9MsY_1Z?V$OEWR8XfL7atIvNJ=az>vwaOBOOwRu}mq_*+AwGX``CwtO zw6-5j7rS;f$ibO{;OyphX)QX0f$hC8A2Jd_7mxoP(%JSRwtZPB9btg8C9n zuqNEYKCwfXwMHKsBPYxtIn4bvCUcn@KW8Ne5wwe)Ik&f}vt8xASk5_!*1U}7KMIsC zJV7%+Q^*2*ML|4>!jsvOimrl!?r!k?o|d?{r1V`nHWr-?;>7P=go10y?9J=ux`f+A zg{2i-WH39>ORJgmTNSVE{4;ioPzYKfjktNPfQ9kL4M^$Hv@g6#3VnDVKjPl&z16aZ zn_sH(+Oh$9rXn0EA5H8UB5(9M|Mn-4*um(tmvhl4OLd)p7!tCkaa2Ky;E{*;P_Bi+ zcDx2q&v}Q!5He=q;Xo+v+iSMHJ$8=S9a>#HbJz)Xs zNfsD67T@i#(^?0#SU7`jH0b_?zGgpxVpYznRam~+tB`?>Ca!+JFQ*#3&8lEaw!9^0 zx1F;6Ua5mN4SY^TiZ&pdFv1UuN$I+9{imm9v9Us;MEYSXwr>XeJ#h7ql!xp}lt^tC zFeNFKpnp?J0sBJZ`erBNhcud`n&MuZs4Pg4u#R0JTP7*fkTN3Mexqyx>I7eM68@6Z zsMt41PrY^LfsqjE_y`xA%I7hH&xARAT^?g!fIDd}d5WmTpmu~@GsSQsc~l`~M8|LE z?{j6(5;#m;DzSe-5iIA9MGT(-SsKcY!-+9bh3abP!bfU+1J6c^Do9==BtZYt&c(0) zs|~A3eiKmS5e&@1R}y?+pZtFEJb7XlbNU7&Q~IX4eS`nI*l5KdC#I@uzy=!Com~9! zs*~hb_xTzx75U=jn9bc&{`)!wHXuKz=r#VspU{RM+%R;p#1`H*Q(YlDB_IG@zFb4{ zz`WIv7f+*R*^85yxWFltmlW^DjtV26v(p1jCY#wfZrHWgjMtRAf_Hz?qB6#r>o^Q)9R6uY^|$%Zr&!t>SGEhKch13PmHVds5=D91RCNoCx)? zQ|TC?Zl%&BPa#=U-<9tISFekiyUShUfLjM4Vtk3TRJ1=RQs>o!dd71TSZ=R?IupHc zDE%1V7+Bme4aN(^{P{^s#kSgj^?fv+3vicV#=@|x^>ZAwF7E!TMv?at;4|&mp85gr zk#X|`7T$x~9_nX7l$iWXDKnt4)KkJY<~q+hHC5m5s|(!l->E_Z8=+>nQ-gmPaM_g10oM|Y%7mg zBz#O;{sut4si-q?F7F)TP_@=7MeY(HC#L-8rm7cCPP_b{0a&Ln%mK+=)VfMLXN({{ z=7z-cC%t8B(?W2!{dwa7@3)r!9cXV=8{8z;LxsahO@h@p~|_} zUuI$*lYdTwu1}$Qxx|;BJFk<79;S_iKgH`)74iLM zJh0mYXC_QdS6t*a^FuNze{zT|#g&kdOQg@sF*-nL0!le|`NggCdyy+wJz(WY<=p13 zS7M$ytGif-oe`it$cT0&Y%y2WbdjHHxG&O zwCVgAcK{&VG8=g%x5QwWQ2G-EL^|q!#n7IV>hd}{(o_uJINDi})H~)(c(sp;@0 Date: Thu, 12 Dec 2019 20:21:49 +0530 Subject: [PATCH 149/352] Fixes #107 by adding executable permission https://github.com/freedomofpress/securedrop-debian-packaging/issues/107 and also updates the version. --- changelog.md | 4 ++++ securedrop-log | 0 securedrop_log/VERSION | 2 +- 3 files changed, 5 insertions(+), 1 deletion(-) mode change 100644 => 100755 securedrop-log diff --git a/changelog.md b/changelog.md index 9c18f3c15..c62e4db27 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,9 @@ # Changelog +## 0.0.2 + + * Fixes execution permission for securedrop-log command. + ## 0.0.1 * Initial release. diff --git a/securedrop-log b/securedrop-log old mode 100644 new mode 100755 diff --git a/securedrop_log/VERSION b/securedrop_log/VERSION index 8acdd82b7..4e379d2bf 100644 --- a/securedrop_log/VERSION +++ b/securedrop_log/VERSION @@ -1 +1 @@ -0.0.1 +0.0.2 From 1ad7e7f940543fd5c5fbd4a2f1bdd8ab55eef210 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Fri, 13 Dec 2019 17:14:38 +0530 Subject: [PATCH 150/352] Typo fixes for pypi upload --- MANIFEST.in | 2 +- changelog.md | 4 ++++ securedrop_log/VERSION | 2 +- setup.py | 8 +++----- 4 files changed, 9 insertions(+), 7 deletions(-) diff --git a/MANIFEST.in b/MANIFEST.in index b5ca54051..6d11dc950 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -4,7 +4,7 @@ include changelog.md include build-requirements.txt include requirements.txt include securedrop_log/*.py -include securdrop_log/VERSION +include securedrop_log/VERSION include setup.py include securedrop-log include securedrop.Log diff --git a/changelog.md b/changelog.md index c62e4db27..b82e78e7a 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,9 @@ # Changelog +## 0.0.3 + + * Fixes typos MANIFEST.in and setup.py + ## 0.0.2 * Fixes execution permission for securedrop-log command. diff --git a/securedrop_log/VERSION b/securedrop_log/VERSION index 4e379d2bf..bcab45af1 100644 --- a/securedrop_log/VERSION +++ b/securedrop_log/VERSION @@ -1 +1 @@ -0.0.2 +0.0.3 diff --git a/setup.py b/setup.py index 78592406f..790b992e7 100644 --- a/setup.py +++ b/setup.py @@ -21,16 +21,14 @@ python_requires=">=3.5", url="https://github.com/freedomofpress/securedrop-log", packages=["securedrop_log",], - package_data={ - 'securedrop_log': ['VERSION'], - }, + package_data={"securedrop_log": ["VERSION"],}, classifiers=( "Development Status :: 3 - Alpha", "Programming Language :: Python :: 3", "Topic :: Software Development :: Libraries :: Python Modules", "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", "Intended Audience :: Developers", - "OOperating System :: POSIX :: Linux", + "Operating System :: POSIX :: Linux", ), - data_files = [("sbin",["securedrop-log"])] + data_files=[("sbin", ["securedrop-log"])], ) From 089af705ebbb8b4645a06dc09050bf7656dd2532 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Wed, 4 Dec 2019 18:59:36 -0800 Subject: [PATCH 151/352] check if already unlocked and mounted --- securedrop_export/export.py | 61 +++++++++++++++++++++++-------------- 1 file changed, 38 insertions(+), 23 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 00c4a83be..500b9cf78 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -277,25 +277,37 @@ def unlock_luks_volume(self, encryption_key): if 'UUID' in items[0]: self.encrypted_device = 'luks-' + items[1] - # the luks device is not already unlocked - if not os.path.exists(os.path.join("/dev/mapper/", self.encrypted_device)): - logger.debug('Unlocking luks volume {}'.format(self.encrypted_device)) - p = subprocess.Popen( - ["sudo", "cryptsetup", "luksOpen", self.device, self.encrypted_device], - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE - ) - logger.debug('Passing key') - p.communicate(input=str.encode(encryption_key, "utf-8")) - rc = p.returncode - if rc != 0: - logger.error('Bad phassphrase for {}'.format(self.encrypted_device)) - self.exit_gracefully(ExportStatus.USB_BAD_PASSPHRASE.value) + # the luks device is already unlocked + if os.path.exists(os.path.join('/dev/mapper/', self.encrypted_device)): + logger.debug('Device already unlocked') + return + + logger.debug('Unlocking luks volume {}'.format(self.encrypted_device)) + p = subprocess.Popen( + ["sudo", "cryptsetup", "luksOpen", self.device, self.encrypted_device], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE + ) + logger.debug('Passing key') + p.communicate(input=str.encode(encryption_key, "utf-8")) + rc = p.returncode + if rc != 0: + logger.error('Bad phassphrase for {}'.format(self.encrypted_device)) + self.exit_gracefully(ExportStatus.USB_BAD_PASSPHRASE.value) except subprocess.CalledProcessError: self.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED) def mount_volume(self): + # If the drive is already mounted then we don't need to mount it again + output = subprocess.check_output( + ["lsblk", "-o", "MOUNTPOINT", "--noheadings", self.device]) + mountpoint = output.decode('utf-8').strip() + if mountpoint: + logger.debug('The device is already mounted') + self.mountpoint = mountpoint + return + # mount target not created, create folder if not os.path.exists(self.mountpoint): self.safe_check_call( @@ -329,16 +341,19 @@ def copy_submission(self): except (subprocess.CalledProcessError, OSError): self.exit_gracefully(ExportStatus.ERROR_USB_WRITE.value) finally: - # Finally, we sync the filesystem, unmount the drive and lock the - # luks volume, and exit 0 logger.info('Syncing filesystems') subprocess.check_call(["sync"]) - logger.info('Unmounting drive from {}'.format(self.mountpoint)) - subprocess.check_call(["sudo", "umount", self.mountpoint]) - logger.info('Locking luks volume {}'.format(self.encrypted_device)) - subprocess.check_call( - ["sudo", "cryptsetup", "luksClose", self.encrypted_device] - ) + + if os.path.exists(self.mountpoint): + logger.info('Unmounting drive from {}'.format(self.mountpoint)) + subprocess.check_call(["sudo", "umount", self.mountpoint]) + + if os.path.exists(os.path.join('/dev/mapper', self.encrypted_device)): + logger.info('Locking luks volume {}'.format(self.encrypted_device)) + subprocess.check_call( + ["sudo", "cryptsetup", "luksClose", self.encrypted_device] + ) + logger.info('Deleting temporary directory {}'.format(self.tmpdir)) subprocess.check_call(["rm", "-rf", self.tmpdir]) sys.exit(0) From 6c12a291d672db26ec29bdc0d2244ea81208fc9f Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Tue, 17 Dec 2019 16:15:05 -0500 Subject: [PATCH 152/352] app: collect status/exception related functionality/classes --- securedrop_export/exceptions.py | 44 +++++++++++++++++++++++++++++++ securedrop_export/export.py | 46 +-------------------------------- 2 files changed, 45 insertions(+), 45 deletions(-) create mode 100644 securedrop_export/exceptions.py diff --git a/securedrop_export/exceptions.py b/securedrop_export/exceptions.py new file mode 100644 index 000000000..a3109e00f --- /dev/null +++ b/securedrop_export/exceptions.py @@ -0,0 +1,44 @@ +from enum import Enum + + +class ExportStatus(Enum): + + # General errors + ERROR_FILE_NOT_FOUND = 'ERROR_FILE_NOT_FOUND' + ERROR_EXTRACTION = 'ERROR_EXTRACTION' + ERROR_METADATA_PARSING = 'ERROR_METADATA_PARSING' + ERROR_ARCHIVE_METADATA = 'ERROR_ARCHIVE_METADATA' + ERROR_USB_CONFIGURATION = 'ERROR_USB_CONFIGURATION' + ERROR_GENERIC = 'ERROR_GENERIC' + + # USB preflight related errors + USB_CONNECTED = 'USB_CONNECTED' + USB_NOT_CONNECTED = 'USB_NOT_CONNECTED' + ERROR_USB_CHECK = 'ERROR_USB_CHECK' + + # USB Disk preflight related errors + USB_ENCRYPTED = 'USB_ENCRYPTED' + USB_ENCRYPTION_NOT_SUPPORTED = 'USB_ENCRYPTION_NOT_SUPPORTED' + USB_DISK_ERROR = 'USB_DISK_ERROR' + + # Printer preflight related errors + ERROR_PRINTER_NOT_FOUND = 'ERROR_PRINTER_NOT_FOUND' + ERROR_PRINTER_NOT_SUPPORTED = 'ERROR_PRINTER_NOT_SUPPORTED' + ERROR_PRINTER_DRIVER_UNAVAILABLE = 'ERROR_PRINTER_DRIVER_UNAVAILABLE' + ERROR_PRINTER_INSTALL = 'ERROR_PRINTER_INSTALL' + + # Disk export errors + USB_BAD_PASSPHRASE = 'USB_BAD_PASSPHRASE' + ERROR_USB_MOUNT = 'ERROR_USB_MOUNT' + ERROR_USB_WRITE = 'ERROR_USB_WRITE' + + # Printer export errors + ERROR_PRINT = 'ERROR_PRINT' + + +class TimeoutException(Exception): + pass + + +def handler(s, f): + raise TimeoutException("Timeout") diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 500b9cf78..f4e84bb98 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -13,7 +13,7 @@ import time from typing import List, Optional # noqa: F401 -from enum import Enum +from securedrop_export.exceptions import ExportStatus, handler, TimeoutException PRINTER_NAME = "sdw-printer" PRINTER_WAIT_TIMEOUT = 60 @@ -27,41 +27,6 @@ logger = logging.getLogger(__name__) -class ExportStatus(Enum): - - # General errors - ERROR_FILE_NOT_FOUND = 'ERROR_FILE_NOT_FOUND' - ERROR_EXTRACTION = 'ERROR_EXTRACTION' - ERROR_METADATA_PARSING = 'ERROR_METADATA_PARSING' - ERROR_ARCHIVE_METADATA = 'ERROR_ARCHIVE_METADATA' - ERROR_USB_CONFIGURATION = 'ERROR_USB_CONFIGURATION' - ERROR_GENERIC = 'ERROR_GENERIC' - - # USB preflight related errors - USB_CONNECTED = 'USB_CONNECTED' - USB_NOT_CONNECTED = 'USB_NOT_CONNECTED' - ERROR_USB_CHECK = 'ERROR_USB_CHECK' - - # USB Disk preflight related errors - USB_ENCRYPTED = 'USB_ENCRYPTED' - USB_ENCRYPTION_NOT_SUPPORTED = 'USB_ENCRYPTION_NOT_SUPPORTED' - USB_DISK_ERROR = 'USB_DISK_ERROR' - - # Printer preflight related errors - ERROR_PRINTER_NOT_FOUND = 'ERROR_PRINTER_NOT_FOUND' - ERROR_PRINTER_NOT_SUPPORTED = 'ERROR_PRINTER_NOT_SUPPORTED' - ERROR_PRINTER_DRIVER_UNAVAILABLE = 'ERROR_PRINTER_DRIVER_UNAVAILABLE' - ERROR_PRINTER_INSTALL = 'ERROR_PRINTER_INSTALL' - - # Disk export errors - USB_BAD_PASSPHRASE = 'USB_BAD_PASSPHRASE' - ERROR_USB_MOUNT = 'ERROR_USB_MOUNT' - ERROR_USB_WRITE = 'ERROR_USB_WRITE' - - # Printer export errors - ERROR_PRINT = 'ERROR_PRINT' - - class Metadata(object): """ Object to parse, validate and store json metadata from the sd-export archive. @@ -528,12 +493,3 @@ def print_file(self, file_to_print): command=["xpp", "-P", self.printer_name, file_to_print], error_message=ExportStatus.ERROR_PRINT.value ) - - -# class ends here -class TimeoutException(Exception): - pass - - -def handler(s, f): - raise TimeoutException("Timeout") From f07ecc8f453d52500ee30f2789bb41ca2bab6bf5 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Tue, 17 Dec 2019 18:43:07 -0500 Subject: [PATCH 153/352] app, test: create separate *Action objects --- securedrop_export/export.py | 400 ++-------------------------- securedrop_export/main.py | 56 ++-- securedrop_export/print/__init__.py | 0 securedrop_export/print/actions.py | 229 ++++++++++++++++ securedrop_export/usb/__init__.py | 0 securedrop_export/usb/actions.py | 230 ++++++++++++++++ tests/print/__init__.py | 0 tests/print/test_actions.py | 127 +++++++++ tests/test_export.py | 285 -------------------- tests/test_main.py | 3 + tests/usb/__init__.py | 0 tests/usb/test_actions.py | 203 ++++++++++++++ 12 files changed, 837 insertions(+), 696 deletions(-) create mode 100644 securedrop_export/print/__init__.py create mode 100644 securedrop_export/print/actions.py create mode 100644 securedrop_export/usb/__init__.py create mode 100644 securedrop_export/usb/actions.py create mode 100644 tests/print/__init__.py create mode 100644 tests/print/test_actions.py create mode 100644 tests/test_main.py create mode 100644 tests/usb/__init__.py create mode 100644 tests/usb/test_actions.py diff --git a/securedrop_export/export.py b/securedrop_export/export.py index f4e84bb98..976b1bd13 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -1,28 +1,17 @@ #!/usr/bin/env python3 +import abc import datetime import json import logging import os import shutil -import signal import subprocess import sys import tarfile import tempfile -import time -from typing import List, Optional # noqa: F401 -from securedrop_export.exceptions import ExportStatus, handler, TimeoutException - -PRINTER_NAME = "sdw-printer" -PRINTER_WAIT_TIMEOUT = 60 -MOUNTPOINT = "/media/usb" -ENCRYPTED_DEVICE = "encrypted_volume" -BRLASER_DRIVER = "/usr/share/cups/drv/brlaser.drv" -BRLASER_PPD = "/usr/share/cups/model/br7030.ppd" -LASERJET_DRIVER = "/usr/share/cups/drv/hpcups.drv" -LASERJET_PPD = "/usr/share/cups/model/hp-laserjet_6l.ppd" +from securedrop_export.exceptions import ExportStatus logger = logging.getLogger(__name__) @@ -87,13 +76,6 @@ def is_valid(self): class SDExport(object): def __init__(self, archive, config_path): - self.device = None # Optional[str] - self.mountpoint = MOUNTPOINT - self.encrypted_device = ENCRYPTED_DEVICE - - self.printer_name = PRINTER_NAME - self.printer_wait_timeout = PRINTER_WAIT_TIMEOUT - self.archive = archive self.submission_dirname = os.path.basename(self.archive).split(".")[0] self.target_dirname = "sd-export-{}".format( @@ -101,15 +83,13 @@ def __init__(self, archive, config_path): ) self.tmpdir = tempfile.mkdtemp() - def safe_check_call(self, command, error_message): - """ - Safely wrap subprocess.check_output to ensure we always return 0 and - log the error messages - """ + def extract_tarball(self): try: - subprocess.check_call(command) - except subprocess.CalledProcessError as ex: - self.exit_gracefully(msg=error_message, e=ex.output) + logger.info('Extracting tarball {} into {}'.format(self.archive, self.tmpdir)) + with tarfile.open(self.archive) as tar: + tar.extractall(self.tmpdir) + except Exception: + self.exit_gracefully(ExportStatus.ERROR_EXTRACTION.value) def exit_gracefully(self, msg, e=False): """ @@ -135,7 +115,17 @@ def exit_gracefully(self, msg, e=False): # the file with another application sys.exit(0) - def popup_message(self, msg): + def safe_check_call(self, command, error_message): + """ + Safely wrap subprocess.check_output to ensure we always return 0 and + log the error messages + """ + try: + subprocess.check_call(command) + except subprocess.CalledProcessError as ex: + self.exit_gracefully(msg=error_message, e=ex.output) + + def popup_message(self, msg: str): self.safe_check_call( command=[ "notify-send", @@ -148,348 +138,14 @@ def popup_message(self, msg): error_message="Error sending notification:" ) - def extract_tarball(self): - try: - logger.info('Extracting tarball {} into {}'.format(self.archive, self.tmpdir)) - with tarfile.open(self.archive) as tar: - tar.extractall(self.tmpdir) - except Exception: - self.exit_gracefully(ExportStatus.ERROR_EXTRACTION.value) - - def check_usb_connected(self, exit=False) -> None: - usb_devices = self._get_connected_usbs() - - if len(usb_devices) == 0: - logger.info('0 USB devices connected') - self.exit_gracefully(ExportStatus.USB_NOT_CONNECTED.value) - elif len(usb_devices) == 1: - logger.info('1 USB device connected') - self.device = usb_devices[0] - if exit: - self.exit_gracefully(ExportStatus.USB_CONNECTED.value) - elif len(usb_devices) > 1: - logger.info('>1 USB devices connected') - # Return generic error until freedomofpress/securedrop-export/issues/25 - self.exit_gracefully(ExportStatus.ERROR_GENERIC.value) - - def _get_connected_usbs(self) -> List[str]: - logger.info('Performing usb preflight') - # List all block devices attached to VM that are disks and not partitions. - try: - lsblk = subprocess.Popen(["lsblk", "-o", "NAME,TYPE"], stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - grep = subprocess.Popen(["grep", "disk"], stdin=lsblk.stdout, stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - command_output = grep.stdout.readlines() - - # The first word in each element of the command_output list is the device name - attached_devices = [x.decode('utf8').split()[0] for x in command_output] - except subprocess.CalledProcessError: - self.exit_gracefully(ExportStatus.ERROR_GENERIC.value) - - # Determine which are USBs by selecting those block devices that are removable disks. - usb_devices = [] - for device in attached_devices: - try: - removable = subprocess.check_output( - ["cat", "/sys/class/block/{}/removable".format(device)], - stderr=subprocess.PIPE) - is_removable = int(removable.decode('utf8').strip()) - except subprocess.CalledProcessError: - is_removable = False - - if is_removable: - usb_devices.append("/dev/{}".format(device)) - - return usb_devices - - def set_extracted_device_name(self): - try: - device_and_partitions = subprocess.check_output( - ["lsblk", "-o", "TYPE", "--noheadings", self.device], stderr=subprocess.PIPE) - - # we don't support multiple partitions - partition_count = device_and_partitions.decode('utf-8').split('\n').count('part') - if partition_count > 1: - logger.debug("multiple partitions not supported") - self.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value) - - # redefine device to /dev/sda if disk is encrypted, /dev/sda1 if partition encrypted - self.device = self.device if partition_count == 0 else self.device + '1' - except subprocess.CalledProcessError: - self.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value) - - def check_luks_volume(self): - # cryptsetup isLuks returns 0 if the device is a luks volume - # subprocess with throw if the device is not luks (rc !=0) - logger.info('Checking if volume is luks-encrypted') - self.set_extracted_device_name() - logger.debug("checking if {} is luks encrypted".format(self.device)) - self.safe_check_call( - command=["sudo", "cryptsetup", "isLuks", self.device], - error_message=ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value - ) - self.exit_gracefully(ExportStatus.USB_ENCRYPTED.value) - - def unlock_luks_volume(self, encryption_key): - try: - # get the encrypted device name - self.set_extracted_device_name() - luks_header = subprocess.check_output(["sudo", "cryptsetup", "luksDump", self.device]) - luks_header_list = luks_header.decode('utf-8').split('\n') - for line in luks_header_list: - items = line.split('\t') - if 'UUID' in items[0]: - self.encrypted_device = 'luks-' + items[1] - - # the luks device is already unlocked - if os.path.exists(os.path.join('/dev/mapper/', self.encrypted_device)): - logger.debug('Device already unlocked') - return - - logger.debug('Unlocking luks volume {}'.format(self.encrypted_device)) - p = subprocess.Popen( - ["sudo", "cryptsetup", "luksOpen", self.device, self.encrypted_device], - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE - ) - logger.debug('Passing key') - p.communicate(input=str.encode(encryption_key, "utf-8")) - rc = p.returncode - if rc != 0: - logger.error('Bad phassphrase for {}'.format(self.encrypted_device)) - self.exit_gracefully(ExportStatus.USB_BAD_PASSPHRASE.value) - except subprocess.CalledProcessError: - self.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED) - - def mount_volume(self): - # If the drive is already mounted then we don't need to mount it again - output = subprocess.check_output( - ["lsblk", "-o", "MOUNTPOINT", "--noheadings", self.device]) - mountpoint = output.decode('utf-8').strip() - if mountpoint: - logger.debug('The device is already mounted') - self.mountpoint = mountpoint - return - - # mount target not created, create folder - if not os.path.exists(self.mountpoint): - self.safe_check_call( - command=["sudo", "mkdir", self.mountpoint], - error_message=ExportStatus.ERROR_USB_MOUNT - ) - - mapped_device_path = os.path.join("/dev/mapper/", self.encrypted_device) - logger.info('Mounting {}'.format(mapped_device_path)) - self.safe_check_call( - command=["sudo", "mount", mapped_device_path, self.mountpoint], - error_message=ExportStatus.ERROR_USB_MOUNT.value - ) - self.safe_check_call( - command=["sudo", "chown", "-R", "user:user", self.mountpoint], - error_message=ExportStatus.ERROR_USB_MOUNT.value - ) - - def copy_submission(self): - # move files to drive (overwrites files with same filename) and unmount drive - # we don't use safe_check_call here because we must lock and - # unmount the drive as part of the finally block - try: - target_path = os.path.join(self.mountpoint, self.target_dirname) - subprocess.check_call(["mkdir", target_path]) - export_data = os.path.join(self.tmpdir, "export_data/") - logger.info('Copying file to {}'.format(self.target_dirname)) - subprocess.check_call(["cp", "-r", export_data, target_path]) - logger.info('File copied successfully to {}'.format(self.target_dirname)) - self.popup_message("Files exported successfully to disk.") - except (subprocess.CalledProcessError, OSError): - self.exit_gracefully(ExportStatus.ERROR_USB_WRITE.value) - finally: - logger.info('Syncing filesystems') - subprocess.check_call(["sync"]) - - if os.path.exists(self.mountpoint): - logger.info('Unmounting drive from {}'.format(self.mountpoint)) - subprocess.check_call(["sudo", "umount", self.mountpoint]) - if os.path.exists(os.path.join('/dev/mapper', self.encrypted_device)): - logger.info('Locking luks volume {}'.format(self.encrypted_device)) - subprocess.check_call( - ["sudo", "cryptsetup", "luksClose", self.encrypted_device] - ) - - logger.info('Deleting temporary directory {}'.format(self.tmpdir)) - subprocess.check_call(["rm", "-rf", self.tmpdir]) - sys.exit(0) - - def wait_for_print(self): - # use lpstat to ensure the job was fully transfered to the printer - # returns True if print was successful, otherwise will throw exceptions - signal.signal(signal.SIGALRM, handler) - signal.alarm(self.printer_wait_timeout) - printer_idle_string = "printer {} is idle".format(self.printer_name) - while True: - try: - logger.info('Running lpstat waiting for printer {}'.format(self.printer_name)) - output = subprocess.check_output(["lpstat", "-p", self.printer_name]) - if printer_idle_string in output.decode("utf-8"): - logger.info('Print completed') - return True - else: - time.sleep(5) - except subprocess.CalledProcessError: - self.exit_gracefully(ExportStatus.ERROR_PRINT.value) - except TimeoutException: - logger.error('Timeout waiting for printer {}'.format(self.printer_name)) - self.exit_gracefully(ExportStatus.ERROR_PRINT.value) - return True - - def get_printer_uri(self): - # Get the URI via lpinfo and only accept URIs of supported printers - printer_uri = "" - try: - output = subprocess.check_output(["sudo", "lpinfo", "-v"]) - except subprocess.CalledProcessError: - self.exit_gracefully(ExportStatus.ERROR_PRINTER_URI.value) - - # fetch the usb printer uri - for line in output.split(): - if "usb://" in line.decode("utf-8"): - printer_uri = line.decode("utf-8") - logger.info('lpinfo usb printer: {}'.format(printer_uri)) - - # verify that the printer is supported, else exit - if printer_uri == "": - # No usb printer is connected - logger.info('No usb printers connected') - self.exit_gracefully(ExportStatus.ERROR_PRINTER_NOT_FOUND.value) - elif not any(x in printer_uri for x in ("Brother", "LaserJet")): - # printer url is a make that is unsupported - logger.info('Printer {} is unsupported'.format(printer_uri)) - self.exit_gracefully(ExportStatus.ERROR_PRINTER_NOT_SUPPORTED.value) - - logger.info('Printer {} is supported'.format(printer_uri)) - return printer_uri - - def install_printer_ppd(self, uri): - if not any(x in uri for x in ("Brother", "LaserJet")): - logger.error("Cannot install printer ppd for unsupported printer: {}".format(uri)) - self.exit_gracefully(msg=ExportStatus.ERROR_PRINTER_NOT_SUPPORTED.value) - return - - if "Brother" in uri: - printer_driver = BRLASER_DRIVER - printer_ppd = BRLASER_PPD - elif "LaserJet" in uri: - printer_driver = LASERJET_DRIVER - printer_ppd = LASERJET_PPD - - # Some drivers don't come with ppd files pre-compiled, we must compile them - self.safe_check_call( - command=[ - "sudo", - "ppdc", - printer_driver, - "-d", - "/usr/share/cups/model/", - ], - error_message=ExportStatus.ERROR_PRINTER_DRIVER_UNAVAILABLE.value - ) - return printer_ppd - - def setup_printer(self, printer_uri, printer_ppd): - # Add the printer using lpadmin - logger.info('Setting up printer name {}'.format(self.printer_name)) - self.safe_check_call( - command=[ - "sudo", - "lpadmin", - "-p", - self.printer_name, - "-v", - printer_uri, - "-P", - printer_ppd, - ], - error_message=ExportStatus.ERROR_PRINTER_INSTALL.value - ) - # Activate the printer so that it can receive jobs - logger.info('Activating printer {}'.format(self.printer_name)) - self.safe_check_call( - command=["sudo", "lpadmin", "-p", self.printer_name], - error_message=ExportStatus.ERROR_PRINTER_INSTALL.value - ) - # worksaround for version of lpadmin/cups in debian buster: - # see https://forums.developer.apple.com/thread/106112 - self.safe_check_call( - command=["sudo", "cupsaccept", self.printer_name], - error_message=ExportStatus.ERROR_PRINTER_INSTALL.value - ) - # A non-zero return code is expected here, but the command is required - # and works as expected. - command = ["sudo", "cupsenable", self.printer_name] - try: - subprocess.check_call(command) - except subprocess.CalledProcessError: - pass - - # Allow user to print (without using sudo) - logger.info('Allow user to print {}'.format(self.printer_name)) - self.safe_check_call( - command=["sudo", "lpadmin", "-p", self.printer_name, "-u", "allow:user"], - error_message=ExportStatus.ERROR_PRINTER_INSTALL.value - ) - - def print_test_page(self): - logger.info('Printing test page') - self.print_file("/usr/share/cups/data/testprint") - self.popup_message("Printing test page") - - def print_all_files(self): - files_path = os.path.join(self.tmpdir, "export_data/") - files = os.listdir(files_path) - print_count = 0 - for f in files: - file_path = os.path.join(files_path, f) - self.print_file(file_path) - print_count += 1 - msg = "Printing document {} of {}".format(print_count, len(files)) - self.popup_message(msg) - - def is_open_office_file(self, filename): - OPEN_OFFICE_FORMATS = [ - ".doc", - ".docx", - ".xls", - ".xlsx", - ".ppt", - ".pptx", - ".odt", - ".ods", - ".odp", - ] - for extension in OPEN_OFFICE_FORMATS: - if os.path.basename(filename).endswith(extension): - return True - return False - - def print_file(self, file_to_print): - # If the file to print is an (open)office document, we need to call unoconf to - # convert the file to pdf as printer drivers do not support this format - if self.is_open_office_file(file_to_print): - logger.info('Converting Office document to pdf') - folder = os.path.dirname(file_to_print) - converted_filename = file_to_print + ".pdf" - converted_path = os.path.join(folder, converted_filename) - self.safe_check_call( - command=["unoconv", "-o", converted_path, file_to_print], - error_message=ExportStatus.ERROR_PRINT.value - ) - file_to_print = converted_path +class ExportAction(abc.ABC): + """ + This export interface defines the method that export + methods should implement. + """ - logger.info('Sending file to printer {}:{}'.format(self.printer_name, file_to_print)) - self.safe_check_call( - command=["xpp", "-P", self.printer_name, file_to_print], - error_message=ExportStatus.ERROR_PRINT.value - ) + @abc.abstractmethod + def run(self) -> None: + """Run logic""" + pass diff --git a/securedrop_export/main.py b/securedrop_export/main.py index d3b5a0bef..0696af7a8 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -1,7 +1,9 @@ import logging from securedrop_export import export -from securedrop_export.export import ExportStatus +from securedrop_export.exceptions import ExportStatus +from securedrop_export.print.actions import PrintExportAction, PrintTestPageAction +from securedrop_export.usb.actions import USBDiskTestAction, USBExportAction, USBTestAction logger = logging.getLogger(__name__) @@ -14,42 +16,18 @@ def __main__(submission): except Exception: submission.exit_gracefully(ExportStatus.ERROR_METADATA_PARSING.value) - if submission.archive_metadata.is_valid(): - if submission.archive_metadata.export_method == "usb-test": - logger.info('Export archive is usb-test') - submission.check_usb_connected(exit=True) - elif submission.archive_metadata.export_method == "disk": - logger.info('Export archive is disk') - # check_usb_connected looks for the drive, sets the drive to use - submission.check_usb_connected() - logger.info('Unlocking volume') - # exports all documents in the archive to luks-encrypted volume - submission.unlock_luks_volume(submission.archive_metadata.encryption_key) - logger.info('Mounting volume') - submission.mount_volume() - logger.info('Copying submission to drive') - submission.copy_submission() - elif submission.archive_metadata.export_method == "disk-test": - logger.info('Export archive is disk-test') - # check_usb_connected looks for the drive, sets the drive to use - submission.check_usb_connected() - submission.check_luks_volume() - elif submission.archive_metadata.export_method == "printer": - logger.info('Export archive is printer') - # prints all documents in the archive - logger.info('Searching for printer') - printer_uri = submission.get_printer_uri() - logger.info('Installing printer drivers') - printer_ppd = submission.install_printer_ppd(printer_uri) - logger.info('Setting up printer') - submission.setup_printer(printer_uri, printer_ppd) - logger.info('Printing files') - submission.print_all_files() - elif submission.archive_metadata.export_method == "printer-test": - # Prints a test page to ensure the printer is functional - printer_uri = submission.get_printer_uri() - printer_ppd = submission.install_printer_ppd(printer_uri) - submission.setup_printer(printer_uri, printer_ppd) - submission.print_test_page() - else: + if not submission.archive_metadata.is_valid(): submission.exit_gracefully(ExportStatus.ERROR_ARCHIVE_METADATA.value) + + if submission.archive_metadata.export_method == "usb-test": + action = USBTestAction(submission) + elif submission.archive_metadata.export_method == "disk": + action = USBExportAction(submission) + elif submission.archive_metadata.export_method == "disk-test": + action = USBDiskTestAction(submission) + elif submission.archive_metadata.export_method == "printer": + action = PrintExportAction(submission) + elif submission.archive_metadata.export_method == "printer-test": + action = PrintTestPageAction(submission) + + action.run() diff --git a/securedrop_export/print/__init__.py b/securedrop_export/print/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/securedrop_export/print/actions.py b/securedrop_export/print/actions.py new file mode 100644 index 000000000..f58ff4cd0 --- /dev/null +++ b/securedrop_export/print/actions.py @@ -0,0 +1,229 @@ +import logging +import os +import signal +import subprocess +import time + +from securedrop_export.exceptions import ExportStatus, handler, TimeoutException +from securedrop_export.export import ExportAction + + +PRINTER_NAME = "sdw-printer" +PRINTER_WAIT_TIMEOUT = 60 +BRLASER_DRIVER = "/usr/share/cups/drv/brlaser.drv" +BRLASER_PPD = "/usr/share/cups/model/br7030.ppd" +LASERJET_DRIVER = "/usr/share/cups/drv/hpcups.drv" +LASERJET_PPD = "/usr/share/cups/model/hp-laserjet_6l.ppd" + +logger = logging.getLogger(__name__) + + +class PrintAction(ExportAction): + def __init__(self, submission): + self.submission = submission + self.printer_name = PRINTER_NAME + self.printer_wait_timeout = PRINTER_WAIT_TIMEOUT + + def run(self) -> None: + """Run logic""" + raise NotImplementedError + + def wait_for_print(self): + # use lpstat to ensure the job was fully transfered to the printer + # returns True if print was successful, otherwise will throw exceptions + signal.signal(signal.SIGALRM, handler) + signal.alarm(self.printer_wait_timeout) + printer_idle_string = "printer {} is idle".format(self.printer_name) + while True: + try: + logger.info('Running lpstat waiting for printer {}'.format(self.printer_name)) + output = subprocess.check_output(["lpstat", "-p", self.printer_name]) + if printer_idle_string in output.decode("utf-8"): + logger.info('Print completed') + return True + else: + time.sleep(5) + except subprocess.CalledProcessError: + self.submission.exit_gracefully(ExportStatus.ERROR_PRINT.value) + except TimeoutException: + logger.error('Timeout waiting for printer {}'.format(self.printer_name)) + self.submission.exit_gracefully(ExportStatus.ERROR_PRINT.value) + return True + + def get_printer_uri(self): + # Get the URI via lpinfo and only accept URIs of supported printers + printer_uri = "" + try: + output = subprocess.check_output(["sudo", "lpinfo", "-v"]) + except subprocess.CalledProcessError: + self.submission.exit_gracefully(ExportStatus.ERROR_PRINTER_URI.value) + + # fetch the usb printer uri + for line in output.split(): + if "usb://" in line.decode("utf-8"): + printer_uri = line.decode("utf-8") + logger.info('lpinfo usb printer: {}'.format(printer_uri)) + + # verify that the printer is supported, else exit + if printer_uri == "": + # No usb printer is connected + logger.info('No usb printers connected') + self.submission.exit_gracefully(ExportStatus.ERROR_PRINTER_NOT_FOUND.value) + elif not any(x in printer_uri for x in ("Brother", "LaserJet")): + # printer url is a make that is unsupported + logger.info('Printer {} is unsupported'.format(printer_uri)) + self.submission.exit_gracefully(ExportStatus.ERROR_PRINTER_NOT_SUPPORTED.value) + + logger.info('Printer {} is supported'.format(printer_uri)) + return printer_uri + + def install_printer_ppd(self, uri): + if not any(x in uri for x in ("Brother", "LaserJet")): + logger.error("Cannot install printer ppd for unsupported printer: {}".format(uri)) + self.submission.exit_gracefully(msg=ExportStatus.ERROR_PRINTER_NOT_SUPPORTED.value) + return + + if "Brother" in uri: + printer_driver = BRLASER_DRIVER + printer_ppd = BRLASER_PPD + elif "LaserJet" in uri: + printer_driver = LASERJET_DRIVER + printer_ppd = LASERJET_PPD + + # Some drivers don't come with ppd files pre-compiled, we must compile them + self.submission.safe_check_call( + command=[ + "sudo", + "ppdc", + printer_driver, + "-d", + "/usr/share/cups/model/", + ], + error_message=ExportStatus.ERROR_PRINTER_DRIVER_UNAVAILABLE.value + ) + return printer_ppd + + def setup_printer(self, printer_uri, printer_ppd): + # Add the printer using lpadmin + logger.info('Setting up printer name {}'.format(self.printer_name)) + self.submission.safe_check_call( + command=[ + "sudo", + "lpadmin", + "-p", + self.printer_name, + "-v", + printer_uri, + "-P", + printer_ppd, + ], + error_message=ExportStatus.ERROR_PRINTER_INSTALL.value + ) + # Activate the printer so that it can receive jobs + logger.info('Activating printer {}'.format(self.printer_name)) + self.submission.safe_check_call( + command=["sudo", "lpadmin", "-p", self.printer_name], + error_message=ExportStatus.ERROR_PRINTER_INSTALL.value + ) + # worksaround for version of lpadmin/cups in debian buster: + # see https://forums.developer.apple.com/thread/106112 + self.submission.safe_check_call( + command=["sudo", "cupsaccept", self.printer_name], + error_message=ExportStatus.ERROR_PRINTER_INSTALL.value + ) + # A non-zero return code is expected here, but the command is required + # and works as expected. + command = ["sudo", "cupsenable", self.printer_name] + try: + subprocess.check_call(command) + except subprocess.CalledProcessError: + pass + + # Allow user to print (without using sudo) + logger.info('Allow user to print {}'.format(self.printer_name)) + self.submission.safe_check_call( + command=["sudo", "lpadmin", "-p", self.printer_name, "-u", "allow:user"], + error_message=ExportStatus.ERROR_PRINTER_INSTALL.value + ) + + def print_test_page(self): + logger.info('Printing test page') + self.print_file("/usr/share/cups/data/testprint") + self.submission.popup_message("Printing test page") + + def print_all_files(self): + files_path = os.path.join(self.submission.tmpdir, "export_data/") + files = os.listdir(files_path) + print_count = 0 + for f in files: + file_path = os.path.join(files_path, f) + self.print_file(file_path) + print_count += 1 + msg = "Printing document {} of {}".format(print_count, len(files)) + self.submission.popup_message(msg) + + def is_open_office_file(self, filename): + OPEN_OFFICE_FORMATS = [ + ".doc", + ".docx", + ".xls", + ".xlsx", + ".ppt", + ".pptx", + ".odt", + ".ods", + ".odp", + ] + for extension in OPEN_OFFICE_FORMATS: + if os.path.basename(filename).endswith(extension): + return True + return False + + def print_file(self, file_to_print): + # If the file to print is an (open)office document, we need to call unoconf to + # convert the file to pdf as printer drivers do not support this format + if self.is_open_office_file(file_to_print): + logger.info('Converting Office document to pdf') + folder = os.path.dirname(file_to_print) + converted_filename = file_to_print + ".pdf" + converted_path = os.path.join(folder, converted_filename) + self.submission.safe_check_call( + command=["unoconv", "-o", converted_path, file_to_print], + error_message=ExportStatus.ERROR_PRINT.value + ) + file_to_print = converted_path + + logger.info('Sending file to printer {}:{}'.format(self.printer_name, file_to_print)) + self.submission.safe_check_call( + command=["xpp", "-P", self.printer_name, file_to_print], + error_message=ExportStatus.ERROR_PRINT.value + ) + + +class PrintExportAction(PrintAction): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def run(self): + logger.info('Export archive is printer') + # prints all documents in the archive + logger.info('Searching for printer') + printer_uri = self.get_printer_uri() + logger.info('Installing printer drivers') + printer_ppd = self.install_printer_ppd(printer_uri) + logger.info('Setting up printer') + self.setup_printer(printer_uri, printer_ppd) + logger.info('Printing files') + self.print_all_files() + + +class PrintTestPageAction(PrintAction): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def run(self): + # Prints a test page to ensure the printer is functional + printer_uri = self.get_printer_uri() + printer_ppd = self.install_printer_ppd(printer_uri) + self.setup_printer(printer_uri, printer_ppd) + self.print_test_page() diff --git a/securedrop_export/usb/__init__.py b/securedrop_export/usb/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/securedrop_export/usb/actions.py b/securedrop_export/usb/actions.py new file mode 100644 index 000000000..c2479e895 --- /dev/null +++ b/securedrop_export/usb/actions.py @@ -0,0 +1,230 @@ +import logging +import os +import subprocess +import sys + +from typing import List + +from securedrop_export.export import ExportAction +from securedrop_export.exceptions import ExportStatus + +MOUNTPOINT = "/media/usb" +ENCRYPTED_DEVICE = "encrypted_volume" + +logger = logging.getLogger(__name__) + + +class USBAction(ExportAction): + def __init__(self, submission): + self.submission = submission + self.device = None # Optional[str] + self.mountpoint = MOUNTPOINT + self.encrypted_device = ENCRYPTED_DEVICE + + def run(self) -> None: + """Run logic""" + raise NotImplementedError + + def check_usb_connected(self, exit=False) -> None: + usb_devices = self._get_connected_usbs() + + if len(usb_devices) == 0: + logger.info('0 USB devices connected') + self.submission.exit_gracefully(ExportStatus.USB_NOT_CONNECTED.value) + elif len(usb_devices) == 1: + logger.info('1 USB device connected') + self.device = usb_devices[0] + if exit: + self.submission.exit_gracefully(ExportStatus.USB_CONNECTED.value) + elif len(usb_devices) > 1: + logger.info('>1 USB devices connected') + # Return generic error until freedomofpress/securedrop-export/issues/25 + self.submission.exit_gracefully(ExportStatus.ERROR_GENERIC.value) + + def _get_connected_usbs(self) -> List[str]: + logger.info('Performing usb preflight') + # List all block devices attached to VM that are disks and not partitions. + try: + lsblk = subprocess.Popen(["lsblk", "-o", "NAME,TYPE"], stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + grep = subprocess.Popen(["grep", "disk"], stdin=lsblk.stdout, stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + command_output = grep.stdout.readlines() + + # The first word in each element of the command_output list is the device name + attached_devices = [x.decode('utf8').split()[0] for x in command_output] + except subprocess.CalledProcessError: + self.submission.exit_gracefully(ExportStatus.ERROR_GENERIC.value) + + # Determine which are USBs by selecting those block devices that are removable disks. + usb_devices = [] + for device in attached_devices: + try: + removable = subprocess.check_output( + ["cat", "/sys/class/block/{}/removable".format(device)], + stderr=subprocess.PIPE) + is_removable = int(removable.decode('utf8').strip()) + except subprocess.CalledProcessError: + is_removable = False + + if is_removable: + usb_devices.append("/dev/{}".format(device)) + + return usb_devices + + def set_extracted_device_name(self): + try: + device_and_partitions = subprocess.check_output( + ["lsblk", "-o", "TYPE", "--noheadings", self.device], stderr=subprocess.PIPE) + + # we don't support multiple partitions + partition_count = device_and_partitions.decode('utf-8').split('\n').count('part') + if partition_count > 1: + logger.debug("multiple partitions not supported") + self.submission.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value) + + # redefine device to /dev/sda if disk is encrypted, /dev/sda1 if partition encrypted + self.device = self.device if partition_count == 0 else self.device + '1' + except subprocess.CalledProcessError: + self.submission.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value) + + def check_luks_volume(self): + # cryptsetup isLuks returns 0 if the device is a luks volume + # subprocess with throw if the device is not luks (rc !=0) + logger.info('Checking if volume is luks-encrypted') + self.set_extracted_device_name() + logger.debug("checking if {} is luks encrypted".format(self.device)) + self.submission.safe_check_call( + command=["sudo", "cryptsetup", "isLuks", self.device], + error_message=ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value + ) + self.submission.exit_gracefully(ExportStatus.USB_ENCRYPTED.value) + + def unlock_luks_volume(self, encryption_key): + try: + # get the encrypted device name + self.set_extracted_device_name() + luks_header = subprocess.check_output(["sudo", "cryptsetup", "luksDump", self.device]) + luks_header_list = luks_header.decode('utf-8').split('\n') + for line in luks_header_list: + items = line.split('\t') + if 'UUID' in items[0]: + self.encrypted_device = 'luks-' + items[1] + + # the luks device is already unlocked + if os.path.exists(os.path.join('/dev/mapper/', self.encrypted_device)): + logger.debug('Device already unlocked') + return + + logger.debug('Unlocking luks volume {}'.format(self.encrypted_device)) + p = subprocess.Popen( + ["sudo", "cryptsetup", "luksOpen", self.device, self.encrypted_device], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE + ) + logger.debug('Passing key') + p.communicate(input=str.encode(encryption_key, "utf-8")) + rc = p.returncode + if rc != 0: + logger.error('Bad phassphrase for {}'.format(self.encrypted_device)) + self.submission.exit_gracefully(ExportStatus.USB_BAD_PASSPHRASE.value) + except subprocess.CalledProcessError: + self.submission.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED) + + def mount_volume(self): + # If the drive is already mounted then we don't need to mount it again + output = subprocess.check_output( + ["lsblk", "-o", "MOUNTPOINT", "--noheadings", self.device]) + mountpoint = output.decode('utf-8').strip() + if mountpoint: + logger.debug('The device is already mounted') + self.mountpoint = mountpoint + return + + # mount target not created, create folder + if not os.path.exists(self.mountpoint): + self.submission.safe_check_call( + command=["sudo", "mkdir", self.mountpoint], + error_message=ExportStatus.ERROR_USB_MOUNT + ) + + mapped_device_path = os.path.join("/dev/mapper/", self.encrypted_device) + logger.info('Mounting {}'.format(mapped_device_path)) + self.submission.safe_check_call( + command=["sudo", "mount", mapped_device_path, self.mountpoint], + error_message=ExportStatus.ERROR_USB_MOUNT.value + ) + self.submission.safe_check_call( + command=["sudo", "chown", "-R", "user:user", self.mountpoint], + error_message=ExportStatus.ERROR_USB_MOUNT.value + ) + + def copy_submission(self): + # move files to drive (overwrites files with same filename) and unmount drive + # we don't use safe_check_call here because we must lock and + # unmount the drive as part of the finally block + try: + target_path = os.path.join(self.mountpoint, self.submission.target_dirname) + subprocess.check_call(["mkdir", target_path]) + export_data = os.path.join(self.submission.tmpdir, "export_data/") + logger.info('Copying file to {}'.format(self.submission.target_dirname)) + subprocess.check_call(["cp", "-r", export_data, target_path]) + logger.info('File copied successfully to {}'.format(self.submission.target_dirname)) + self.submission.popup_message("Files exported successfully to disk.") + except (subprocess.CalledProcessError, OSError): + self.submission.exit_gracefully(ExportStatus.ERROR_USB_WRITE.value) + finally: + logger.info('Syncing filesystems') + subprocess.check_call(["sync"]) + + if os.path.exists(self.mountpoint): + logger.info('Unmounting drive from {}'.format(self.mountpoint)) + subprocess.check_call(["sudo", "umount", self.mountpoint]) + + if os.path.exists(os.path.join('/dev/mapper', self.encrypted_device)): + logger.info('Locking luks volume {}'.format(self.encrypted_device)) + subprocess.check_call( + ["sudo", "cryptsetup", "luksClose", self.encrypted_device] + ) + + logger.info('Deleting temporary directory {}'.format(self.submission.tmpdir)) + subprocess.check_call(["rm", "-rf", self.submission.tmpdir]) + sys.exit(0) + + +class USBTestAction(USBAction): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def run(self): + logger.info('Export archive is usb-test') + self.check_usb_connected(exit=True) + + +class USBDiskTestAction(USBAction): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def run(self): + logger.info('Export archive is disk-test') + # check_usb_connected looks for the drive, sets the drive to use + self.check_usb_connected() + self.check_luks_volume() + + +class USBExportAction(USBAction): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def run(self): + logger.info('Export archive is disk') + # check_usb_connected looks for the drive, sets the drive to use + self.check_usb_connected() + logger.info('Unlocking volume') + # exports all documents in the archive to luks-encrypted volume + self.unlock_luks_volume(self.submission.archive_metadata.encryption_key) + logger.info('Mounting volume') + self.mount_volume() + logger.info('Copying submission to drive') + self.copy_submission() diff --git a/tests/print/__init__.py b/tests/print/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/print/test_actions.py b/tests/print/test_actions.py new file mode 100644 index 000000000..c500e8f01 --- /dev/null +++ b/tests/print/test_actions.py @@ -0,0 +1,127 @@ +from unittest import mock + +import os +import pytest +from subprocess import CalledProcessError +import sys + +from securedrop_export import export +from securedrop_export.print.actions import PrintExportAction + + +SAMPLE_OUTPUT_NO_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\nnetwork lpd" # noqa +SAMPLE_OUTPUT_BROTHER_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\ndirect usb://Brother/HL-L2320D%20series?serial=A00000A000000\nnetwork lpd" # noqa +SAMPLE_OUTPUT_LASERJET_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\ndirect usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000\nnetwork lpd" # noqa +TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_BROTHER_PRINTER) +def test_get_good_printer_uri_laserjet(mocked_call): + submission = export.SDExport("testfile", TEST_CONFIG) + action = PrintExportAction(submission) + + result = action.get_printer_uri() + + assert result == "usb://Brother/HL-L2320D%20series?serial=A00000A000000" + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_LASERJET_PRINTER) +def test_get_good_printer_uri_brother(mocked_call): + submission = export.SDExport("testfile", TEST_CONFIG) + action = PrintExportAction(submission) + + result = action.get_printer_uri() + assert result == "usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000" + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PRINTER) +def test_get_bad_printer_uri(mocked_call, capsys, mocker): + submission = export.SDExport("testfile", TEST_CONFIG) + action = PrintExportAction(submission) + expected_message = "ERROR_PRINTER_NOT_FOUND" + assert export.ExportStatus.ERROR_PRINTER_NOT_FOUND.value == expected_message + mocked_exit = mocker.patch.object(submission, "exit_gracefully", + side_effect=lambda x: sys.exit(0)) + + with pytest.raises(SystemExit): + action.get_printer_uri() + + mocked_exit.assert_called_once_with(expected_message) + + +@pytest.mark.parametrize('open_office_paths', [ + "/tmp/whatver/thisisadoc.doc" + "/home/user/Downloads/thisisadoc.xlsx" + "/home/user/Downloads/file.odt" + "/tmp/tmpJf83j9/secret.pptx" +]) +def test_is_open_office_file(capsys, open_office_paths): + submission = export.SDExport("", TEST_CONFIG) + action = PrintExportAction(submission) + assert action.is_open_office_file(open_office_paths) + + +@pytest.mark.parametrize('open_office_paths', [ + "/tmp/whatver/thisisadoc.doccc" + "/home/user/Downloads/thisisa.xlsx.zip" + "/home/user/Downloads/file.odz" + "/tmp/tmpJf83j9/secret.gpg" +]) +def test_is_not_open_office_file(capsys, open_office_paths): + submission = export.SDExport("", TEST_CONFIG) + action = PrintExportAction(submission) + assert not action.is_open_office_file(open_office_paths) + + +@mock.patch("subprocess.check_call") +def test_install_printer_ppd_laserjet(mocker): + submission = export.SDExport("testfile", TEST_CONFIG) + action = PrintExportAction(submission) + ppd = action.install_printer_ppd("usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A00000") + assert ppd == "/usr/share/cups/model/hp-laserjet_6l.ppd" + + +@mock.patch("subprocess.check_call") +def test_install_printer_ppd_brother(mocker): + submission = export.SDExport("testfile", TEST_CONFIG) + action = PrintExportAction(submission) + ppd = action.install_printer_ppd("usb://Brother/HL-L2320D%20series?serial=A00000A000000") + assert ppd == "/usr/share/cups/model/br7030.ppd" + + +def test_install_printer_ppd_error_no_driver(mocker): + submission = export.SDExport("testfile", TEST_CONFIG) + action = PrintExportAction(submission) + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) + mocker.patch("subprocess.check_call", side_effect=CalledProcessError(1, 'check_call')) + + action.install_printer_ppd("usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000") + + assert mocked_exit.mock_calls[0][2]['msg'] == "ERROR_PRINTER_DRIVER_UNAVAILABLE" + assert mocked_exit.mock_calls[0][2]['e'] is None + + +def test_install_printer_ppd_error_not_supported(mocker): + submission = export.SDExport("testfile", TEST_CONFIG) + action = PrintExportAction(submission) + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) + mocker.patch("subprocess.check_call", side_effect=CalledProcessError(1, 'check_call')) + + action.install_printer_ppd("usb://Not/Supported?serial=A00000A000000") + + assert mocked_exit.mock_calls[0][2]['msg'] == "ERROR_PRINTER_NOT_SUPPORTED" + + +def test_setup_printer_error(mocker): + submission = export.SDExport("testfile", TEST_CONFIG) + action = PrintExportAction(submission) + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) + mocker.patch("subprocess.check_call", side_effect=CalledProcessError(1, 'check_call')) + + action.setup_printer( + "usb://Brother/HL-L2320D%20series?serial=A00000A000000", + "/usr/share/cups/model/br7030.ppd" + ) + + assert mocked_exit.mock_calls[0][2]['msg'] == "ERROR_PRINTER_INSTALL" + assert mocked_exit.mock_calls[0][2]['e'] is None diff --git a/tests/test_export.py b/tests/test_export.py index 9579d4327..95665761f 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -3,20 +3,10 @@ import os import pytest import subprocess # noqa: F401 -import sys import tempfile -from subprocess import CalledProcessError from securedrop_export import export -SAMPLE_OUTPUT_NO_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\nnetwork lpd" # noqa -SAMPLE_OUTPUT_BROTHER_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\ndirect usb://Brother/HL-L2320D%20series?serial=A00000A000000\nnetwork lpd" # noqa -SAMPLE_OUTPUT_LASERJET_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\ndirect usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000\nnetwork lpd" # noqa - -SAMPLE_OUTPUT_NO_PART = b"disk\ncrypt" # noqa -SAMPLE_OUTPUT_ONE_PART = b"disk\npart\ncrypt" # noqa -SAMPLE_OUTPUT_MULTI_PART = b"disk\npart\npart\npart\ncrypt" # noqa -SAMPLE_OUTPUT_USB = b"/dev/sda" # noqa TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad.json") ANOTHER_BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad-2.json") @@ -140,232 +130,6 @@ def test_popup_message(mocked_call): ]) -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_BROTHER_PRINTER) -def test_get_good_printer_uri_laserjet(mocked_call): - submission = export.SDExport("testfile", TEST_CONFIG) - - result = submission.get_printer_uri() - - assert result == "usb://Brother/HL-L2320D%20series?serial=A00000A000000" - - -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_LASERJET_PRINTER) -def test_get_good_printer_uri_brother(mocked_call): - submission = export.SDExport("testfile", TEST_CONFIG) - result = submission.get_printer_uri() - assert result == "usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000" - - -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PRINTER) -def test_get_bad_printer_uri(mocked_call, capsys, mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - expected_message = "ERROR_PRINTER_NOT_FOUND" - assert export.ExportStatus.ERROR_PRINTER_NOT_FOUND.value == expected_message - mocked_exit = mocker.patch.object(submission, "exit_gracefully", - side_effect=lambda x: sys.exit(0)) - - with pytest.raises(SystemExit): - submission.get_printer_uri() - - mocked_exit.assert_called_once_with(expected_message) - - -@pytest.mark.parametrize('open_office_paths', [ - "/tmp/whatver/thisisadoc.doc" - "/home/user/Downloads/thisisadoc.xlsx" - "/home/user/Downloads/file.odt" - "/tmp/tmpJf83j9/secret.pptx" -]) -def test_is_open_office_file(capsys, open_office_paths): - submission = export.SDExport("", TEST_CONFIG) - assert submission.is_open_office_file(open_office_paths) - - -@pytest.mark.parametrize('open_office_paths', [ - "/tmp/whatver/thisisadoc.doccc" - "/home/user/Downloads/thisisa.xlsx.zip" - "/home/user/Downloads/file.odz" - "/tmp/tmpJf83j9/secret.gpg" -]) -def test_is_not_open_office_file(capsys, open_office_paths): - submission = export.SDExport("", TEST_CONFIG) - assert not submission.is_open_office_file(open_office_paths) - - -def test_usb_precheck_disconnected(capsys, mocker): - """Tests the scenario where there are disks connected, but none of them are USB""" - submission = export.SDExport("testfile", TEST_CONFIG) - expected_message = "USB_NOT_CONNECTED" - assert export.ExportStatus.USB_NOT_CONNECTED.value == expected_message - - # Popen call returns lsblk output - command_output = mock.MagicMock() - command_output.stdout = mock.MagicMock() - command_output.stdout.readlines = mock.MagicMock(return_value=[b"sda disk\n", b"sdb disk\n"]) - mocker.patch("subprocess.Popen", return_value=command_output) - - # check_output returns removable status - mocker.patch("subprocess.check_output", return_value=[b'0\n', b'0\n']) - - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - - mocker.patch("subprocess.check_output", - side_effect=CalledProcessError(1, 'check_output')) - - submission.check_usb_connected(exit=True) - - mocked_exit.assert_called_once_with(expected_message) - assert submission.device is None - - -def test_usb_precheck_connected(capsys, mocker): - """Tests the scenario where there is one USB connected""" - submission = export.SDExport("testfile", TEST_CONFIG) - - # Popen call returns lsblk output - command_output = mock.MagicMock() - command_output.stdout = mock.MagicMock() - command_output.stdout.readlines = mock.MagicMock(return_value=[b"sdb disk\n"]) - mocker.patch("subprocess.Popen", return_value=command_output) - - # check_output returns removable status - mocker.patch("subprocess.check_output", return_value=b"1\n") - - expected_message = "USB_CONNECTED" - assert export.ExportStatus.USB_CONNECTED.value == expected_message - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - - submission.check_usb_connected(exit=True) - - mocked_exit.assert_called_once_with(expected_message) - assert submission.device == "/dev/sdb" - - -def test_usb_precheck_multiple_devices_connected(capsys, mocker): - """Tests the scenario where there are multiple USB drives connected""" - submission = export.SDExport("testfile", TEST_CONFIG) - - # Popen call returns lsblk output - command_output = mock.MagicMock() - command_output.stdout = mock.MagicMock() - command_output.stdout.readlines = mock.MagicMock(return_value=[b"sdb disk\n", b"sdc disk\n"]) - mocker.patch("subprocess.Popen", return_value=command_output) - - # check_output returns removable status - mocker.patch("subprocess.check_output", return_value=b"1\n") - - expected_message = "ERROR_GENERIC" - assert export.ExportStatus.ERROR_GENERIC.value == expected_message - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - - submission.check_usb_connected(exit=True) - - mocked_exit.assert_called_once_with(expected_message) - assert submission.device is None - - -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PART) -def test_extract_device_name_no_part(mocked_call, capsys): - submission = export.SDExport("testfile", TEST_CONFIG) - submission.device = "/dev/sda" - - submission.set_extracted_device_name() - - assert submission.device == "/dev/sda" - - -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) -def test_extract_device_name_single_part(mocked_call, capsys): - submission = export.SDExport("testfile", TEST_CONFIG) - submission.device = "/dev/sda" - - submission.set_extracted_device_name() - - assert submission.device == "/dev/sda1" - - -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_MULTI_PART) -def test_extract_device_name_multiple_part(mocked_call, capsys, mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - submission.device = "/dev/sda" - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - expected_message = export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value - - submission.set_extracted_device_name() - - mocked_exit.assert_called_once_with(expected_message) - - -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PART) -@mock.patch("subprocess.check_call", return_value=0) -def test_luks_precheck_encrypted_fde(mocked_call, capsys, mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - expected_message = export.ExportStatus.USB_ENCRYPTED.value - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - - submission.check_luks_volume() - - mocked_exit.assert_called_once_with(expected_message) - - -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) -@mock.patch("subprocess.check_call", return_value=0) -def test_luks_precheck_encrypted_single_part(mocked_call, capsys, mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - submission.device = "/dev/sda" - expected_message = export.ExportStatus.USB_ENCRYPTED.value - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - - submission.check_luks_volume() - - mocked_exit.assert_called_once_with(expected_message) - - -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_MULTI_PART) -def test_luks_precheck_encrypted_multi_part(mocked_call, capsys, mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - submission.device = "/dev/sda" - expected_message = export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value - - # Here we need to mock the exit_gracefully method with a side effect otherwise - # program execution will continue after exit_gracefully and exit_gracefully - # may be called a second time. - mocked_exit = mocker.patch.object(submission, "exit_gracefully", - side_effect=lambda x: sys.exit(0)) - - # Output of `lsblk -o TYPE --noheadings DEVICE_NAME` when a drive has multiple - # partitions - multi_partition_lsblk_output = b"disk\npart\npart\n" - mocker.patch("subprocess.check_call", return_value=0) - mocker.patch("subprocess.check_output", return_value=multi_partition_lsblk_output) - - with pytest.raises(SystemExit): - submission.check_luks_volume() - - mocked_exit.assert_called_once_with(expected_message) - - -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) -def test_luks_precheck_encrypted_luks_error(mocked_call, capsys, mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - submission.device = "/dev/sda" - expected_message = "USB_ENCRYPTION_NOT_SUPPORTED" - assert expected_message == export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value - - mocked_exit = mocker.patch.object(submission, "exit_gracefully", - side_effect=lambda msg, e: sys.exit(0)) - - single_partition_lsblk_output = b"disk\npart\n" - mocker.patch("subprocess.check_output", return_value=single_partition_lsblk_output) - mocker.patch("subprocess.check_call", side_effect=CalledProcessError(1, 'check_call')) - - with pytest.raises(SystemExit): - submission.check_luks_volume() - - assert mocked_exit.mock_calls[0][2]['msg'] == expected_message - assert mocked_exit.mock_calls[0][2]['e'] is None - - def test_safe_check_call(capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) submission.safe_check_call(['ls'], "this will work") @@ -376,52 +140,3 @@ def test_safe_check_call(capsys, mocker): assert mocked_exit.mock_calls[0][2]['msg'] == expected_message assert mocked_exit.mock_calls[0][2]['e'] is None - - -@mock.patch("subprocess.check_call") -def test_install_printer_ppd_laserjet(mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - ppd = submission.install_printer_ppd("usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A00000") - assert ppd == "/usr/share/cups/model/hp-laserjet_6l.ppd" - - -@mock.patch("subprocess.check_call") -def test_install_printer_ppd_brother(mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - ppd = submission.install_printer_ppd("usb://Brother/HL-L2320D%20series?serial=A00000A000000") - assert ppd == "/usr/share/cups/model/br7030.ppd" - - -def test_install_printer_ppd_error_no_driver(mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - mocker.patch("subprocess.check_call", side_effect=CalledProcessError(1, 'check_call')) - - submission.install_printer_ppd("usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000") - - assert mocked_exit.mock_calls[0][2]['msg'] == "ERROR_PRINTER_DRIVER_UNAVAILABLE" - assert mocked_exit.mock_calls[0][2]['e'] is None - - -def test_install_printer_ppd_error_not_supported(mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - mocker.patch("subprocess.check_call", side_effect=CalledProcessError(1, 'check_call')) - - submission.install_printer_ppd("usb://Not/Supported?serial=A00000A000000") - - assert mocked_exit.mock_calls[0][2]['msg'] == "ERROR_PRINTER_NOT_SUPPORTED" - - -def test_setup_printer_error(mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - mocker.patch("subprocess.check_call", side_effect=CalledProcessError(1, 'check_call')) - - submission.setup_printer( - "usb://Brother/HL-L2320D%20series?serial=A00000A000000", - "/usr/share/cups/model/br7030.ppd" - ) - - assert mocked_exit.mock_calls[0][2]['msg'] == "ERROR_PRINTER_INSTALL" - assert mocked_exit.mock_calls[0][2]['e'] is None diff --git a/tests/test_main.py b/tests/test_main.py new file mode 100644 index 000000000..d1e43d251 --- /dev/null +++ b/tests/test_main.py @@ -0,0 +1,3 @@ +from securedrop_export.main import __main__ # noqa: F401 +# This import ensures at least the imports in main.__main__ +# are executed during a test run diff --git a/tests/usb/__init__.py b/tests/usb/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/usb/test_actions.py b/tests/usb/test_actions.py new file mode 100644 index 000000000..011b9386b --- /dev/null +++ b/tests/usb/test_actions.py @@ -0,0 +1,203 @@ +from unittest import mock + +import os +import pytest +import sys + +from subprocess import CalledProcessError + +from securedrop_export import export +from securedrop_export.usb.actions import USBExportAction, USBTestAction + +TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") +SAMPLE_OUTPUT_NO_PART = b"disk\ncrypt" # noqa +SAMPLE_OUTPUT_ONE_PART = b"disk\npart\ncrypt" # noqa +SAMPLE_OUTPUT_MULTI_PART = b"disk\npart\npart\npart\ncrypt" # noqa +SAMPLE_OUTPUT_USB = b"/dev/sda" # noqa + + +def test_usb_precheck_disconnected(capsys, mocker): + """Tests the scenario where there are disks connected, but none of them are USB""" + submission = export.SDExport("testfile", TEST_CONFIG) + action = USBTestAction(submission) + expected_message = "USB_NOT_CONNECTED" + assert export.ExportStatus.USB_NOT_CONNECTED.value == expected_message + + # Popen call returns lsblk output + command_output = mock.MagicMock() + command_output.stdout = mock.MagicMock() + command_output.stdout.readlines = mock.MagicMock(return_value=[b"sda disk\n", b"sdb disk\n"]) + mocker.patch("subprocess.Popen", return_value=command_output) + + # check_output returns removable status + mocker.patch("subprocess.check_output", return_value=[b'0\n', b'0\n']) + + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) + + mocker.patch("subprocess.check_output", + side_effect=CalledProcessError(1, 'check_output')) + + action.check_usb_connected(exit=True) + + mocked_exit.assert_called_once_with(expected_message) + assert action.device is None + + +def test_usb_precheck_connected(capsys, mocker): + """Tests the scenario where there is one USB connected""" + submission = export.SDExport("testfile", TEST_CONFIG) + action = USBTestAction(submission) + + # Popen call returns lsblk output + command_output = mock.MagicMock() + command_output.stdout = mock.MagicMock() + command_output.stdout.readlines = mock.MagicMock(return_value=[b"sdb disk\n"]) + mocker.patch("subprocess.Popen", return_value=command_output) + + # check_output returns removable status + mocker.patch("subprocess.check_output", return_value=b"1\n") + + expected_message = "USB_CONNECTED" + assert export.ExportStatus.USB_CONNECTED.value == expected_message + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) + + action.check_usb_connected(exit=True) + + mocked_exit.assert_called_once_with(expected_message) + assert action.device == "/dev/sdb" + + +def test_usb_precheck_multiple_devices_connected(capsys, mocker): + """Tests the scenario where there are multiple USB drives connected""" + submission = export.SDExport("testfile", TEST_CONFIG) + action = USBTestAction(submission) + + # Popen call returns lsblk output + command_output = mock.MagicMock() + command_output.stdout = mock.MagicMock() + command_output.stdout.readlines = mock.MagicMock(return_value=[b"sdb disk\n", b"sdc disk\n"]) + mocker.patch("subprocess.Popen", return_value=command_output) + + # check_output returns removable status + mocker.patch("subprocess.check_output", return_value=b"1\n") + + expected_message = "ERROR_GENERIC" + assert export.ExportStatus.ERROR_GENERIC.value == expected_message + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) + + action.check_usb_connected(exit=True) + + mocked_exit.assert_called_once_with(expected_message) + assert action.device is None + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PART) +def test_extract_device_name_no_part(mocked_call, capsys): + submission = export.SDExport("testfile", TEST_CONFIG) + action = USBExportAction(submission) + + action.device = "/dev/sda" + + action.set_extracted_device_name() + + assert action.device == "/dev/sda" + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) +def test_extract_device_name_single_part(mocked_call, capsys): + submission = export.SDExport("testfile", TEST_CONFIG) + action = USBExportAction(submission) + + action.device = "/dev/sda" + + action.set_extracted_device_name() + + assert action.device == "/dev/sda1" + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_MULTI_PART) +def test_extract_device_name_multiple_part(mocked_call, capsys, mocker): + submission = export.SDExport("testfile", TEST_CONFIG) + action = USBExportAction(submission) + action.device = "/dev/sda" + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) + expected_message = export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value + + action.set_extracted_device_name() + + mocked_exit.assert_called_once_with(expected_message) + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PART) +@mock.patch("subprocess.check_call", return_value=0) +def test_luks_precheck_encrypted_fde(mocked_call, capsys, mocker): + submission = export.SDExport("testfile", TEST_CONFIG) + action = USBExportAction(submission) + + expected_message = export.ExportStatus.USB_ENCRYPTED.value + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) + + action.check_luks_volume() + + mocked_exit.assert_called_once_with(expected_message) + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) +@mock.patch("subprocess.check_call", return_value=0) +def test_luks_precheck_encrypted_single_part(mocked_call, capsys, mocker): + submission = export.SDExport("testfile", TEST_CONFIG) + action = USBExportAction(submission) + action.device = "/dev/sda" + expected_message = export.ExportStatus.USB_ENCRYPTED.value + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) + + action.check_luks_volume() + + mocked_exit.assert_called_once_with(expected_message) + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_MULTI_PART) +def test_luks_precheck_encrypted_multi_part(mocked_call, capsys, mocker): + submission = export.SDExport("testfile", TEST_CONFIG) + action = USBExportAction(submission) + action.device = "/dev/sda" + expected_message = export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value + + # Here we need to mock the exit_gracefully method with a side effect otherwise + # program execution will continue after exit_gracefully and exit_gracefully + # may be called a second time. + mocked_exit = mocker.patch.object(submission, "exit_gracefully", + side_effect=lambda x: sys.exit(0)) + + # Output of `lsblk -o TYPE --noheadings DEVICE_NAME` when a drive has multiple + # partitions + multi_partition_lsblk_output = b"disk\npart\npart\n" + mocker.patch("subprocess.check_call", return_value=0) + mocker.patch("subprocess.check_output", return_value=multi_partition_lsblk_output) + + with pytest.raises(SystemExit): + action.check_luks_volume() + + mocked_exit.assert_called_once_with(expected_message) + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) +def test_luks_precheck_encrypted_luks_error(mocked_call, capsys, mocker): + submission = export.SDExport("testfile", TEST_CONFIG) + action = USBExportAction(submission) + action.device = "/dev/sda" + expected_message = "USB_ENCRYPTION_NOT_SUPPORTED" + assert expected_message == export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value + + mocked_exit = mocker.patch.object(submission, "exit_gracefully", + side_effect=lambda msg, e: sys.exit(0)) + + single_partition_lsblk_output = b"disk\npart\n" + mocker.patch("subprocess.check_output", return_value=single_partition_lsblk_output) + mocker.patch("subprocess.check_call", side_effect=CalledProcessError(1, 'check_call')) + + with pytest.raises(SystemExit): + action.check_luks_volume() + + assert mocked_exit.mock_calls[0][2]['msg'] == expected_message + assert mocked_exit.mock_calls[0][2]['e'] is None From b1bb5301c9f3114a3dd40aa888962e0a41fe3aad Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Wed, 18 Dec 2019 19:10:48 -0500 Subject: [PATCH 154/352] app: rename USB*Actions to Disk*Actions this is more accurate since there are no _strict_ USB requirements in the current export code: removable block devices of other kinds can be exported to --- securedrop_export/{usb => disk}/__init__.py | 0 securedrop_export/{usb => disk}/actions.py | 8 ++++---- securedrop_export/main.py | 6 +++--- tests/{usb => disk}/__init__.py | 0 tests/{usb => disk}/test_actions.py | 22 ++++++++++----------- 5 files changed, 18 insertions(+), 18 deletions(-) rename securedrop_export/{usb => disk}/__init__.py (100%) rename securedrop_export/{usb => disk}/actions.py (98%) rename tests/{usb => disk}/__init__.py (100%) rename tests/{usb => disk}/test_actions.py (93%) diff --git a/securedrop_export/usb/__init__.py b/securedrop_export/disk/__init__.py similarity index 100% rename from securedrop_export/usb/__init__.py rename to securedrop_export/disk/__init__.py diff --git a/securedrop_export/usb/actions.py b/securedrop_export/disk/actions.py similarity index 98% rename from securedrop_export/usb/actions.py rename to securedrop_export/disk/actions.py index c2479e895..02b09f2c3 100644 --- a/securedrop_export/usb/actions.py +++ b/securedrop_export/disk/actions.py @@ -14,7 +14,7 @@ logger = logging.getLogger(__name__) -class USBAction(ExportAction): +class DiskAction(ExportAction): def __init__(self, submission): self.submission = submission self.device = None # Optional[str] @@ -193,7 +193,7 @@ def copy_submission(self): sys.exit(0) -class USBTestAction(USBAction): +class USBTestAction(DiskAction): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -202,7 +202,7 @@ def run(self): self.check_usb_connected(exit=True) -class USBDiskTestAction(USBAction): +class DiskTestAction(DiskAction): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -213,7 +213,7 @@ def run(self): self.check_luks_volume() -class USBExportAction(USBAction): +class DiskExportAction(DiskAction): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) diff --git a/securedrop_export/main.py b/securedrop_export/main.py index 0696af7a8..b68dce170 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -3,7 +3,7 @@ from securedrop_export import export from securedrop_export.exceptions import ExportStatus from securedrop_export.print.actions import PrintExportAction, PrintTestPageAction -from securedrop_export.usb.actions import USBDiskTestAction, USBExportAction, USBTestAction +from securedrop_export.disk.actions import DiskTestAction, DiskExportAction, USBTestAction logger = logging.getLogger(__name__) @@ -22,9 +22,9 @@ def __main__(submission): if submission.archive_metadata.export_method == "usb-test": action = USBTestAction(submission) elif submission.archive_metadata.export_method == "disk": - action = USBExportAction(submission) + action = DiskExportAction(submission) elif submission.archive_metadata.export_method == "disk-test": - action = USBDiskTestAction(submission) + action = DiskTestAction(submission) elif submission.archive_metadata.export_method == "printer": action = PrintExportAction(submission) elif submission.archive_metadata.export_method == "printer-test": diff --git a/tests/usb/__init__.py b/tests/disk/__init__.py similarity index 100% rename from tests/usb/__init__.py rename to tests/disk/__init__.py diff --git a/tests/usb/test_actions.py b/tests/disk/test_actions.py similarity index 93% rename from tests/usb/test_actions.py rename to tests/disk/test_actions.py index 011b9386b..c34998598 100644 --- a/tests/usb/test_actions.py +++ b/tests/disk/test_actions.py @@ -7,7 +7,7 @@ from subprocess import CalledProcessError from securedrop_export import export -from securedrop_export.usb.actions import USBExportAction, USBTestAction +from securedrop_export.disk.actions import DiskExportAction, DiskTestAction TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") SAMPLE_OUTPUT_NO_PART = b"disk\ncrypt" # noqa @@ -19,7 +19,7 @@ def test_usb_precheck_disconnected(capsys, mocker): """Tests the scenario where there are disks connected, but none of them are USB""" submission = export.SDExport("testfile", TEST_CONFIG) - action = USBTestAction(submission) + action = DiskTestAction(submission) expected_message = "USB_NOT_CONNECTED" assert export.ExportStatus.USB_NOT_CONNECTED.value == expected_message @@ -46,7 +46,7 @@ def test_usb_precheck_disconnected(capsys, mocker): def test_usb_precheck_connected(capsys, mocker): """Tests the scenario where there is one USB connected""" submission = export.SDExport("testfile", TEST_CONFIG) - action = USBTestAction(submission) + action = DiskTestAction(submission) # Popen call returns lsblk output command_output = mock.MagicMock() @@ -70,7 +70,7 @@ def test_usb_precheck_connected(capsys, mocker): def test_usb_precheck_multiple_devices_connected(capsys, mocker): """Tests the scenario where there are multiple USB drives connected""" submission = export.SDExport("testfile", TEST_CONFIG) - action = USBTestAction(submission) + action = DiskTestAction(submission) # Popen call returns lsblk output command_output = mock.MagicMock() @@ -94,7 +94,7 @@ def test_usb_precheck_multiple_devices_connected(capsys, mocker): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PART) def test_extract_device_name_no_part(mocked_call, capsys): submission = export.SDExport("testfile", TEST_CONFIG) - action = USBExportAction(submission) + action = DiskExportAction(submission) action.device = "/dev/sda" @@ -106,7 +106,7 @@ def test_extract_device_name_no_part(mocked_call, capsys): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) def test_extract_device_name_single_part(mocked_call, capsys): submission = export.SDExport("testfile", TEST_CONFIG) - action = USBExportAction(submission) + action = DiskExportAction(submission) action.device = "/dev/sda" @@ -118,7 +118,7 @@ def test_extract_device_name_single_part(mocked_call, capsys): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_MULTI_PART) def test_extract_device_name_multiple_part(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) - action = USBExportAction(submission) + action = DiskExportAction(submission) action.device = "/dev/sda" mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) expected_message = export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value @@ -132,7 +132,7 @@ def test_extract_device_name_multiple_part(mocked_call, capsys, mocker): @mock.patch("subprocess.check_call", return_value=0) def test_luks_precheck_encrypted_fde(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) - action = USBExportAction(submission) + action = DiskExportAction(submission) expected_message = export.ExportStatus.USB_ENCRYPTED.value mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) @@ -146,7 +146,7 @@ def test_luks_precheck_encrypted_fde(mocked_call, capsys, mocker): @mock.patch("subprocess.check_call", return_value=0) def test_luks_precheck_encrypted_single_part(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) - action = USBExportAction(submission) + action = DiskExportAction(submission) action.device = "/dev/sda" expected_message = export.ExportStatus.USB_ENCRYPTED.value mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) @@ -159,7 +159,7 @@ def test_luks_precheck_encrypted_single_part(mocked_call, capsys, mocker): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_MULTI_PART) def test_luks_precheck_encrypted_multi_part(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) - action = USBExportAction(submission) + action = DiskExportAction(submission) action.device = "/dev/sda" expected_message = export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value @@ -184,7 +184,7 @@ def test_luks_precheck_encrypted_multi_part(mocked_call, capsys, mocker): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) def test_luks_precheck_encrypted_luks_error(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) - action = USBExportAction(submission) + action = DiskExportAction(submission) action.device = "/dev/sda" expected_message = "USB_ENCRYPTION_NOT_SUPPORTED" assert expected_message == export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value From 55e90284c19decb3ae1dcba7cc70e8c6dbce76a5 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Wed, 18 Dec 2019 19:14:45 -0500 Subject: [PATCH 155/352] app: explain the signal handler in exceptions.py --- securedrop_export/exceptions.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/securedrop_export/exceptions.py b/securedrop_export/exceptions.py index a3109e00f..1c14bc684 100644 --- a/securedrop_export/exceptions.py +++ b/securedrop_export/exceptions.py @@ -40,5 +40,9 @@ class TimeoutException(Exception): pass -def handler(s, f): +def handler(signum, frame): + """ + This is a signal handler used for raising timeouts: + https://docs.python.org/3/library/signal.html#signal.signal + """ raise TimeoutException("Timeout") From bb73ceaff4edeab984e53a44e7a4c1ccdeef2a40 Mon Sep 17 00:00:00 2001 From: John Hensley Date: Wed, 18 Dec 2019 19:57:56 -0500 Subject: [PATCH 156/352] Add quality control tools Added coverage, flake8, mypy, and a few Makefile targets for convenience. Replaced test+safety in CI with just `make check` which runs everything. --- .circleci/config.yml | 12 +----- Makefile | 40 ++++++++++++++++--- dev-requirements.in | 10 +++-- dev-requirements.txt | 95 ++++++++++++++++++++++++++++++++++++++------ requirements.txt | 2 +- setup.cfg | 9 +++++ 6 files changed, 136 insertions(+), 32 deletions(-) create mode 100644 setup.cfg diff --git a/.circleci/config.yml b/.circleci/config.yml index 73d3df004..76852d363 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -7,15 +7,7 @@ common-steps: virtualenv .venv source .venv/bin/activate pip install --require-hashes -r dev-requirements.txt - make test - - - &check_python_dependencies_for_vulns - run: - name: Check Python dependencies for CVEs - command: | - set -e - source .venv/bin/activate - make safety + make check - &install_packaging_dependencies run: @@ -81,7 +73,6 @@ jobs: steps: - checkout - *run_tests - - *check_python_dependencies_for_vulns test-buster: docker: @@ -89,7 +80,6 @@ jobs: steps: - checkout - *run_tests - - *check_python_dependencies_for_vulns workflows: version: 2 diff --git a/Makefile b/Makefile index 9b894c3b6..f00f0a7e1 100644 --- a/Makefile +++ b/Makefile @@ -1,10 +1,13 @@ # Bandit is a static code analysis tool to detect security vulnerabilities in Python applications # https://wiki.openstack.org/wiki/Security/Projects/Bandit +.PHONY: all +all: help + .PHONY: bandit bandit: ## Run bandit with medium level excluding test-related folders pip install --upgrade pip && \ - pip install --upgrade bandit!=1.6.0 && \ - bandit -ll --recursive . --exclude tests,.venv + pip install --upgrade bandit!=1.6.0 && \ + bandit -ll --recursive securedrop_proxy .PHONY: safety safety: ## Runs `safety check` to check python dependencies for vulnerabilities @@ -16,16 +19,43 @@ safety: ## Runs `safety check` to check python dependencies for vulnerabilities || exit 1; \ done +.PHONY: lint +lint: ## Run flake8 + @flake8 securedrop_proxy tests + +.PHONY: mypy +mypy: ## Run mypy static type checker + @mypy --ignore-missing-imports securedrop_proxy + + .PHONY: update-pip-requirements update-pip-requirements: ## Updates all Python requirements files via pip-compile. pip-compile --generate-hashes --output-file dev-requirements.txt dev-requirements.in requirements.in pip-compile --generate-hashes --output-file requirements.txt requirements.in .PHONY: test -test: - python -m unittest -v +test: clean .coverage ## Runs tests with coverage + +.coverage: + @coverage run --source securedrop_proxy -m unittest + +.PHONY: browse-coverage +browse-coverage: .coverage ## Generates and opens HTML coverage report + @coverage html + @xdg-open htmlcov/index.html 2>/dev/null || open htmlcov/index.html 2>/dev/null + +.PHONY: check +check: clean lint test mypy safety bandit ## Runs all tests and code checkers + +.PHONY: clean +clean: ## Clean the workspace of generated resources + @rm -rf .mypy_cache build dist *.egg-info .coverage .eggs docs/_build .pytest_cache lib htmlcov .cache && \ + find . \( -name '*.py[co]' -o -name dropin.cache \) -delete && \ + find . \( -name '*.bak' -o -name dropin.cache \) -delete && \ + find . \( -name '*.tgz' -o -name dropin.cache \) -delete && \ + find . -name __pycache__ -print0 | xargs -0 rm -rf -# Explaination of the below shell command should it ever break. +# Explanation of the below shell command should it ever break. # 1. Set the field separator to ": ##" and any make targets that might appear between : and ## # 2. Use sed-like syntax to remove the make targets # 3. Format the split fields into $$1) the target name (in blue) and $$2) the target descrption diff --git a/dev-requirements.in b/dev-requirements.in index 4b9bd528d..9a880c48f 100644 --- a/dev-requirements.in +++ b/dev-requirements.in @@ -1,8 +1,12 @@ -flake8==3.5.0 +coverage==5.0 +flake8==3.6.0 +mccabe==0.6.1 multidict==4.4.2 +mypy==0.701 +mypy-extensions==0.4.1 pip-tools==3.1.0 -pycodestyle==2.3.1 -pyflakes==1.6.0 +pycodestyle==2.4.0 +pyflakes==2.0.0 six==1.11.0 vcrpy==2.0.1 wrapt==1.10.11 diff --git a/dev-requirements.txt b/dev-requirements.txt index aeb1983f6..826afb92f 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --generate-hashes --output-file dev-requirements.txt dev-requirements.in requirements.in +# pip-compile --generate-hashes --output-file=dev-requirements.txt dev-requirements.in requirements.in # certifi==2018.10.15 \ --hash=sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c \ @@ -14,9 +14,41 @@ click==7.0 \ --hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \ --hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 \ # via pip-tools -flake8==3.5.0 \ - --hash=sha256:7253265f7abd8b313e3892944044a365e3f4ac3fcdcfb4298f55ee9ddf188ba0 \ - --hash=sha256:c7841163e2b576d435799169b78703ad6ac1bbb0f199994fc05f700b2a90ea37 +coverage==5.0 \ + --hash=sha256:0cd13a6e98c37b510a2d34c8281d5e1a226aaf9b65b7d770ef03c63169965351 \ + --hash=sha256:1a4b6b6a2a3a6612e6361130c2cc3dc4378d8c221752b96167ccbad94b47f3cd \ + --hash=sha256:2ee55e6dba516ddf6f484aa83ccabbb0adf45a18892204c23486938d12258cde \ + --hash=sha256:3be5338a2eb4ef03c57f20917e1d12a1fd10e3853fed060b6d6b677cb3745898 \ + --hash=sha256:44b783b02db03c4777d8cf71bae19eadc171a6f2a96777d916b2c30a1eb3d070 \ + --hash=sha256:475bf7c4252af0a56e1abba9606f1e54127cdf122063095c75ab04f6f99cf45e \ + --hash=sha256:47c81ee687eafc2f1db7f03fbe99aab81330565ebc62fb3b61edfc2216a550c8 \ + --hash=sha256:4a7f8e72b18f2aca288ff02255ce32cc830bc04d993efbc87abf6beddc9e56c0 \ + --hash=sha256:50197163a22fd17f79086e087a787883b3ec9280a509807daf158dfc2a7ded02 \ + --hash=sha256:56b13000acf891f700f5067512b804d1ec8c301d627486c678b903859d07f798 \ + --hash=sha256:79388ae29c896299b3567965dbcd93255f175c17c6c7bca38614d12718c47466 \ + --hash=sha256:79fd5d3d62238c4f583b75d48d53cdae759fe04d4fb18fe8b371d88ad2b6f8be \ + --hash=sha256:7fe3e2fde2bf1d7ce25ebcd2d3de3650b8d60d9a73ce6dcef36e20191291613d \ + --hash=sha256:81042a24f67b96e4287774014fa27220d8a4d91af1043389e4d73892efc89ac6 \ + --hash=sha256:81326f1095c53111f8afc95da281e1414185f4a538609a77ca50bdfa39a6c207 \ + --hash=sha256:8873dc0d8f42142ea9f20c27bbdc485190fff93823c6795be661703369e5877d \ + --hash=sha256:88d2cbcb0a112f47eef71eb95460b6995da18e6f8ca50c264585abc2c473154b \ + --hash=sha256:91f2491aeab9599956c45a77c5666d323efdec790bfe23fcceafcd91105d585a \ + --hash=sha256:979daa8655ae5a51e8e7a24e7d34e250ae8309fd9719490df92cbb2fe2b0422b \ + --hash=sha256:9c871b006c878a890c6e44a5b2f3c6291335324b298c904dc0402ee92ee1f0be \ + --hash=sha256:a6d092545e5af53e960465f652e00efbf5357adad177b2630d63978d85e46a72 \ + --hash=sha256:b5ed7837b923d1d71c4f587ae1539ccd96bfd6be9788f507dbe94dab5febbb5d \ + --hash=sha256:ba259f68250f16d2444cbbfaddaa0bb20e1560a4fdaad50bece25c199e6af864 \ + --hash=sha256:be1d89614c6b6c36d7578496dc8625123bda2ff44f224cf8b1c45b810ee7383f \ + --hash=sha256:c1b030a79749aa8d1f1486885040114ee56933b15ccfc90049ba266e4aa2139f \ + --hash=sha256:c95bb147fab76f2ecde332d972d8f4138b8f2daee6c466af4ff3b4f29bd4c19e \ + --hash=sha256:d52c1c2d7e856cecc05aa0526453cb14574f821b7f413cc279b9514750d795c1 \ + --hash=sha256:d609a6d564ad3d327e9509846c2c47f170456344521462b469e5cb39e48ba31c \ + --hash=sha256:e1bad043c12fb58e8c7d92b3d7f2f49977dcb80a08a6d1e7a5114a11bf819fca \ + --hash=sha256:e5a675f6829c53c87d79117a8eb656cc4a5f8918185a32fc93ba09778e90f6db \ + --hash=sha256:fec32646b98baf4a22fdceb08703965bd16dea09051fbeb31a04b5b6e72b846c +flake8==3.6.0 \ + --hash=sha256:6a35f5b8761f45c5513e3405f110a86bea57982c3b75b766ce7b65217abe1670 \ + --hash=sha256:c01f8a3963b3571a8e6bd7a4063359aff90749e160778e03817cd9b71c9e07d2 furl==2.0.0 \ --hash=sha256:f7e90e9f85ef3f2e64485f04c2a80b50af6133942812fd87a44d45305b079018 \ --hash=sha256:fdcaedc1fb19a63d7d875b0105b0a5b496dd0989330d454a42bcb401fa5454ec @@ -25,8 +57,7 @@ idna==2.7 \ --hash=sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16 mccabe==0.6.1 \ --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ - --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f \ - # via flake8 + --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f multidict==4.4.2 \ --hash=sha256:05eeab69bf2b0664644c62bd92fabb045163e5b8d4376a31dfb52ce0210ced7b \ --hash=sha256:0c85880efa7cadb18e3b5eef0aa075dc9c0a3064cbbaef2e20be264b9cf47a64 \ @@ -57,18 +88,33 @@ multidict==4.4.2 \ --hash=sha256:e8848ae3cd6a784c29fae5055028bee9bffcc704d8bcad09bd46b42b44a833e2 \ --hash=sha256:e8a048bfd7d5a280f27527d11449a509ddedf08b58a09a24314828631c099306 \ --hash=sha256:f6dd28a0ac60e2426a6918f36f1b4e2620fc785a0de7654cd206ba842eee57fd +mypy-extensions==0.4.1 \ + --hash=sha256:37e0e956f41369209a3d5f34580150bcacfabaa57b33a15c0b25f4b5725e0812 \ + --hash=sha256:b16cabe759f55e3409a7d231ebd2841378fb0c27a5d1994719e340e4f429ac3e +mypy==0.701 \ + --hash=sha256:2afe51527b1f6cdc4a5f34fc90473109b22bf7f21086ba3e9451857cf11489e6 \ + --hash=sha256:56a16df3e0abb145d8accd5dbb70eba6c4bd26e2f89042b491faa78c9635d1e2 \ + --hash=sha256:5764f10d27b2e93c84f70af5778941b8f4aa1379b2430f85c827e0f5464e8714 \ + --hash=sha256:5bbc86374f04a3aa817622f98e40375ccb28c4836f36b66706cf3c6ccce86eda \ + --hash=sha256:6a9343089f6377e71e20ca734cd8e7ac25d36478a9df580efabfe9059819bf82 \ + --hash=sha256:6c9851bc4a23dc1d854d3f5dfd5f20a016f8da86bcdbb42687879bb5f86434b0 \ + --hash=sha256:b8e85956af3fcf043d6f87c91cbe8705073fc67029ba6e22d3468bfee42c4823 \ + --hash=sha256:b9a0af8fae490306bc112229000aa0c2ccc837b49d29a5c42e088c132a2334dd \ + --hash=sha256:bbf643528e2a55df2c1587008d6e3bda5c0445f1240dfa85129af22ae16d7a9a \ + --hash=sha256:c46ab3438bd21511db0f2c612d89d8344154c0c9494afc7fbc932de514cf8d15 \ + --hash=sha256:f7a83d6bd805855ef83ec605eb01ab4fa42bcef254b13631e451cbb44914a9b0 orderedmultidict==1.0 \ --hash=sha256:24e3b730cf84e4a6a68be5cc760864905cf66abc89851e724bd5b4e849eaa96b \ --hash=sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5 pip-tools==3.1.0 \ --hash=sha256:31b43e5f8d605fc84f7506199025460abcb98a29d12cc99db268f73e39cf55e5 \ --hash=sha256:b1ceca03b4a48346b2f6870565abb09d8d257d5b1524b4c6b222185bf26c3870 -pycodestyle==2.3.1 \ - --hash=sha256:682256a5b318149ca0d2a9185d365d8864a768a28db66a84a2ea946bcc426766 \ - --hash=sha256:6c4245ade1edfad79c3446fadfc96b0de2759662dc29d07d80a6f27ad1ca6ba9 -pyflakes==1.6.0 \ - --hash=sha256:08bd6a50edf8cffa9fa09a463063c425ecaaf10d1eb0335a7e8b1401aef89e6f \ - --hash=sha256:8d616a382f243dbf19b54743f280b80198be0bca3a5396f1d2e1fca6223e8805 +pycodestyle==2.4.0 \ + --hash=sha256:cbc619d09254895b0d12c2c691e237b2e91e9b2ecf5e84c26b35400f93dcfb83 \ + --hash=sha256:cbfca99bd594a10f674d0cd97a3d802a1fdef635d4361e1a2658de47ed261e3a +pyflakes==2.0.0 \ + --hash=sha256:9a7662ec724d0120012f6e29d6248ae3727d821bba522a0e6b356eff19126a49 \ + --hash=sha256:f661252913bc1dbe7fcfcbf0af0db3f42ab65aabd1a6ca68fe5d466bace94dae pyyaml==5.1 \ --hash=sha256:1adecc22f88d38052fb787d959f003811ca858b799590a5eaa70e63dca50308c \ --hash=sha256:436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95 \ @@ -87,6 +133,27 @@ requests==2.20.0 \ six==1.11.0 \ --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \ --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb +typed-ast==1.3.5 \ + --hash=sha256:132eae51d6ef3ff4a8c47c393a4ef5ebf0d1aecc96880eb5d6c8ceab7017cc9b \ + --hash=sha256:18141c1484ab8784006c839be8b985cfc82a2e9725837b0ecfa0203f71c4e39d \ + --hash=sha256:2baf617f5bbbfe73fd8846463f5aeafc912b5ee247f410700245d68525ec584a \ + --hash=sha256:3d90063f2cbbe39177e9b4d888e45777012652d6110156845b828908c51ae462 \ + --hash=sha256:4304b2218b842d610aa1a1d87e1dc9559597969acc62ce717ee4dfeaa44d7eee \ + --hash=sha256:4983ede548ffc3541bae49a82675996497348e55bafd1554dc4e4a5d6eda541a \ + --hash=sha256:5315f4509c1476718a4825f45a203b82d7fdf2a6f5f0c8f166435975b1c9f7d4 \ + --hash=sha256:6cdfb1b49d5345f7c2b90d638822d16ba62dc82f7616e9b4caa10b72f3f16649 \ + --hash=sha256:7b325f12635598c604690efd7a0197d0b94b7d7778498e76e0710cd582fd1c7a \ + --hash=sha256:8d3b0e3b8626615826f9a626548057c5275a9733512b137984a68ba1598d3d2f \ + --hash=sha256:8f8631160c79f53081bd23446525db0bc4c5616f78d04021e6e434b286493fd7 \ + --hash=sha256:912de10965f3dc89da23936f1cc4ed60764f712e5fa603a09dd904f88c996760 \ + --hash=sha256:b010c07b975fe853c65d7bbe9d4ac62f1c69086750a574f6292597763781ba18 \ + --hash=sha256:c908c10505904c48081a5415a1e295d8403e353e0c14c42b6d67f8f97fae6616 \ + --hash=sha256:c94dd3807c0c0610f7c76f078119f4ea48235a953512752b9175f9f98f5ae2bd \ + --hash=sha256:ce65dee7594a84c466e79d7fb7d3303e7295d16a83c22c7c4037071b059e2c21 \ + --hash=sha256:eaa9cfcb221a8a4c2889be6f93da141ac777eb8819f077e1d09fb12d00a09a93 \ + --hash=sha256:f3376bc31bad66d46d44b4e6522c5c21976bf9bca4ef5987bb2bf727f4506cbb \ + --hash=sha256:f9202fa138544e13a4ec1a6792c35834250a85958fde1251b6a22e07d1260ae7 \ + # via mypy urllib3==1.24.3 \ --hash=sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4 \ --hash=sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb @@ -108,3 +175,7 @@ yarl==1.2.6 \ --hash=sha256:db6f70a4b09cde813a4807843abaaa60f3b15fb4a2a06f9ae9c311472662daa1 \ --hash=sha256:f17495e6fe3d377e3faac68121caef6f974fcb9e046bc075bcff40d8e5cc69a4 \ --hash=sha256:f85900b9cca0c67767bb61b2b9bd53208aaa7373dae633dbe25d179b4bf38aa7 + +# WARNING: The following packages were not pinned, but pip requires them to be +# pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag. +# setuptools diff --git a/requirements.txt b/requirements.txt index 0cbecb33e..6ac4b0979 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --generate-hashes --output-file requirements.txt requirements.in +# pip-compile --generate-hashes --output-file=requirements.txt requirements.in # certifi==2018.10.15 \ --hash=sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c \ diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 000000000..997bc30e0 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,9 @@ +[flake8] +exclude = + .git, + __pycache__, + +max-line-length = 100 + +builtins = + _, From 105e7d446c6064ef215ba1d5905ff8808daf3009 Mon Sep 17 00:00:00 2001 From: John Hensley Date: Wed, 18 Dec 2019 20:07:06 -0500 Subject: [PATCH 157/352] Address complaints from the new quality tools --- securedrop_proxy/callbacks.py | 13 +++++++++---- securedrop_proxy/config.py | 22 +++++++++++++++++----- securedrop_proxy/entrypoint.py | 3 --- securedrop_proxy/version.py | 7 ++++++- 4 files changed, 32 insertions(+), 13 deletions(-) diff --git a/securedrop_proxy/callbacks.py b/securedrop_proxy/callbacks.py index d67bcebdb..0e890d21e 100644 --- a/securedrop_proxy/callbacks.py +++ b/securedrop_proxy/callbacks.py @@ -1,6 +1,8 @@ +import os import subprocess import sys import json +import tempfile import uuid @@ -8,6 +10,7 @@ def err_on_done(res): print(json.dumps(res.__dict__)) sys.exit(1) + # callback for handling non-JSON content. in production-like # environments, we want to call `qvm-move-to-vm` (and expressly not # `qvm-move`, since we want to include the destination VM name) to @@ -20,9 +23,11 @@ def on_save(fh, res, conf): fn = str(uuid.uuid4()) try: - subprocess.run(["cp", fh.name, "/tmp/{}".format(fn)]) - if conf.dev is not True: - subprocess.run(['qvm-move-to-vm', conf.target_vm, "/tmp/{}".format(fn)]) + with tempfile.TemporaryDirectory() as tmpdir: + tmpfile = os.path.join(os.path.abspath(tmpdir), fn) + subprocess.run(["cp", fh.name, tmpfile]) + if conf.dev is not True: + subprocess.run(["qvm-move-to-vm", conf.target_vm, tmpfile]) except Exception: res.status = 500 res.headers['Content-Type'] = 'application/json' @@ -32,7 +37,7 @@ def on_save(fh, res, conf): res.headers['Content-Type'] = 'application/json' res.headers['X-Origin-Content-Type'] = res.headers['Content-Type'] - res.body = json.dumps({'filename': fn }) + res.body = json.dumps({'filename': fn}) def on_done(res): diff --git a/securedrop_proxy/config.py b/securedrop_proxy/config.py index 461edd5e8..d82cdb459 100644 --- a/securedrop_proxy/config.py +++ b/securedrop_proxy/config.py @@ -1,12 +1,14 @@ import os import yaml + class Conf: scheme = '' host = '' port = 0 dev = False + def read_conf(conf_path, p): if not os.path.isfile(conf_path): @@ -17,13 +19,17 @@ def read_conf(conf_path, p): fh = open(conf_path, 'r') conf_in = yaml.safe_load(fh) except yaml.YAMLError: - p.simple_error(500, 'YAML syntax error while reading configuration file {}'.format(conf_path)) + p.simple_error( + 500, "YAML syntax error while reading configuration file {}".format(conf_path) + ) p.on_done(p.res) except Exception: - p.simple_error(500, 'Error while opening or reading configuration file {}'.format(conf_path)) + p.simple_error( + 500, "Error while opening or reading configuration file {}".format(conf_path) + ) p.on_done(p.res) - req_conf_keys = set(('host','scheme','port')) + req_conf_keys = set(('host', 'scheme', 'port')) missing_keys = req_conf_keys - set(conf_in.keys()) if len(missing_keys) > 0: p.simple_error(500, 'Configuration file missing required keys: {}'.format(missing_keys)) @@ -37,8 +43,14 @@ def read_conf(conf_path, p): if 'dev' in conf_in and conf_in['dev'] is True: c.dev = True else: - if 'target_vm' not in conf_in: - p.simple_error(500, 'Configuration file missing `target_vm` key, which is required when not in development mode') + if "target_vm" not in conf_in: + p.simple_error( + 500, + ( + "Configuration file missing `target_vm` key, which is required " + "when not in development mode" + ), + ) p.on_done(p.res) c.target_vm = conf_in['target_vm'] diff --git a/securedrop_proxy/entrypoint.py b/securedrop_proxy/entrypoint.py index 33c84d56c..70f21a3db 100755 --- a/securedrop_proxy/entrypoint.py +++ b/securedrop_proxy/entrypoint.py @@ -6,12 +6,9 @@ # called with exactly one argument: the path to its config file. See # the README for configuration options. -import json import logging import os -import subprocess import sys -import uuid from logging.handlers import TimedRotatingFileHandler diff --git a/securedrop_proxy/version.py b/securedrop_proxy/version.py index 430dd11dc..0ebdbd246 100644 --- a/securedrop_proxy/version.py +++ b/securedrop_proxy/version.py @@ -1,3 +1,8 @@ import pkgutil -version = pkgutil.get_data("securedrop_proxy", "VERSION").decode("utf-8") +version = None +version_content = pkgutil.get_data("securedrop_proxy", "VERSION") +if isinstance(version_content, bytes): + version = version_content.decode("utf-8") +else: + raise ValueError("Could not read VERSION file") From 20064fcca66f0a9c4282022247b7cb1f4d91207f Mon Sep 17 00:00:00 2001 From: John Hensley Date: Mon, 16 Dec 2019 19:29:36 -0500 Subject: [PATCH 158/352] Improve error handling, tests Try to ensure the proxy always returns a valid JSON response. Add specific handling of upstream error responses to proxy.py, and add tests and fixtures for that. Replace the Proxy._on_done static method with an instance method. In main.py, bail out upon receiving invalid input JSON, instead of printing an error response and then continuing to try to use it, resulting in printing another error response. Have main.py use the callbacks on the proxy as given, instead of always overwriting them with the defaults from callbacks.py. Rework entrypoint.py so that it should always print a JSON response, and add tests for it. --- fixtures/main_error_response.yaml | 30 +++++ fixtures/main_input_body.yaml | 59 +++++++++ fixtures/main_json_response.yaml | 92 ++++++++++++++ fixtures/main_non_json_response.yaml | 145 ++++++++++++++++++++++ fixtures/proxy_bad_request.yaml | 20 +++ fixtures/proxy_callbacks.yaml | 145 ++++++++++++++++++++++ fixtures/proxy_cannot_connect.yaml | 22 ++++ fixtures/proxy_internal_error.yaml | 22 ++++ fixtures/proxy_internal_server_error.yaml | 20 +++ fixtures/proxy_unofficial_status.yaml | 20 +++ securedrop_proxy/entrypoint.py | 76 ++++++------ securedrop_proxy/main.py | 5 +- securedrop_proxy/proxy.py | 61 ++++++--- tests/files/dev-config.yaml | 5 + tests/test_callbacks.py | 51 ++++++++ tests/test_config.py | 4 + tests/test_entrypoint.py | 128 +++++++++++++++++++ tests/test_main.py | 83 +++++++++++-- tests/test_proxy.py | 132 +++++++++++++++++++- 19 files changed, 1044 insertions(+), 76 deletions(-) create mode 100644 fixtures/main_error_response.yaml create mode 100644 fixtures/main_input_body.yaml create mode 100644 fixtures/main_json_response.yaml create mode 100644 fixtures/main_non_json_response.yaml create mode 100644 fixtures/proxy_bad_request.yaml create mode 100644 fixtures/proxy_callbacks.yaml create mode 100644 fixtures/proxy_cannot_connect.yaml create mode 100644 fixtures/proxy_internal_error.yaml create mode 100644 fixtures/proxy_internal_server_error.yaml create mode 100644 fixtures/proxy_unofficial_status.yaml create mode 100644 tests/files/dev-config.yaml create mode 100644 tests/test_entrypoint.py diff --git a/fixtures/main_error_response.yaml b/fixtures/main_error_response.yaml new file mode 100644 index 000000000..30d6fd734 --- /dev/null +++ b/fixtures/main_error_response.yaml @@ -0,0 +1,30 @@ +interactions: +- request: + body: null + headers: + Content-Length: + - '0' + method: '' + uri: https://jsonplaceholder.typicode.com/ + response: + body: + string: "\r\n400 Bad Request\r\n\r\n\ +

400 Bad Request

\r\n
cloudflare
\r\ + \n\r\n\r\n" + headers: + CF-RAY: + - '-' + Connection: + - close + Content-Length: + - '155' + Content-Type: + - text/html + Date: + - Mon, 16 Dec 2019 22:11:39 GMT + Server: + - cloudflare + status: + code: 400 + message: Bad Request +version: 1 diff --git a/fixtures/main_input_body.yaml b/fixtures/main_input_body.yaml new file mode 100644 index 000000000..0ea82b1a2 --- /dev/null +++ b/fixtures/main_input_body.yaml @@ -0,0 +1,59 @@ +interactions: +- request: + body: id=42&title=test + headers: + Content-Length: + - '16' + Content-Type: + - application/x-www-form-urlencoded + method: POST + uri: https://jsonplaceholder.typicode.com/posts + response: + body: + string: "{\n \"id\": 101,\n \"title\": \"test\"\n}" + headers: + Access-Control-Allow-Credentials: + - 'true' + Access-Control-Expose-Headers: + - Location + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 546ab6213f81f11a-IAD + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '34' + Content-Type: + - application/json; charset=utf-8 + Date: + - Tue, 17 Dec 2019 17:45:33 GMT + Etag: + - W/"22-i04alCk7PdGrJ2UKCUwBOO0LB3w" + Expect-CT: + - max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct" + Expires: + - '-1' + Location: + - http://jsonplaceholder.typicode.com/posts/101 + Pragma: + - no-cache + Server: + - cloudflare + Set-Cookie: + - __cfduid=d1fa880178dc2db917f26b8ce4e0d56e41576604733; expires=Thu, 16-Jan-20 + 17:45:33 GMT; path=/; domain=.typicode.com; HttpOnly; SameSite=Lax + Vary: + - Origin, X-HTTP-Method-Override, Accept-Encoding + Via: + - 1.1 vegur + X-Content-Type-Options: + - nosniff + X-Powered-By: + - Express + status: + code: 201 + message: Created +version: 1 diff --git a/fixtures/main_json_response.yaml b/fixtures/main_json_response.yaml new file mode 100644 index 000000000..782bee3e5 --- /dev/null +++ b/fixtures/main_json_response.yaml @@ -0,0 +1,92 @@ +interactions: +- request: + body: null + headers: {} + method: GET + uri: https://jsonplaceholder.typicode.com/posts?userId=1 + response: + body: + string: "[\n {\n \"userId\": 1,\n \"id\": 1,\n \"title\": \"sunt aut\ + \ facere repellat provident occaecati excepturi optio reprehenderit\",\n \ + \ \"body\": \"quia et suscipit\\nsuscipit recusandae consequuntur expedita\ + \ et cum\\nreprehenderit molestiae ut ut quas totam\\nnostrum rerum est autem\ + \ sunt rem eveniet architecto\"\n },\n {\n \"userId\": 1,\n \"id\"\ + : 2,\n \"title\": \"qui est esse\",\n \"body\": \"est rerum tempore\ + \ vitae\\nsequi sint nihil reprehenderit dolor beatae ea dolores neque\\nfugiat\ + \ blanditiis voluptate porro vel nihil molestiae ut reiciendis\\nqui aperiam\ + \ non debitis possimus qui neque nisi nulla\"\n },\n {\n \"userId\":\ + \ 1,\n \"id\": 3,\n \"title\": \"ea molestias quasi exercitationem repellat\ + \ qui ipsa sit aut\",\n \"body\": \"et iusto sed quo iure\\nvoluptatem\ + \ occaecati omnis eligendi aut ad\\nvoluptatem doloribus vel accusantium quis\ + \ pariatur\\nmolestiae porro eius odio et labore et velit aut\"\n },\n {\n\ + \ \"userId\": 1,\n \"id\": 4,\n \"title\": \"eum et est occaecati\"\ + ,\n \"body\": \"ullam et saepe reiciendis voluptatem adipisci\\nsit amet\ + \ autem assumenda provident rerum culpa\\nquis hic commodi nesciunt rem tenetur\ + \ doloremque ipsam iure\\nquis sunt voluptatem rerum illo velit\"\n },\n\ + \ {\n \"userId\": 1,\n \"id\": 5,\n \"title\": \"nesciunt quas odio\"\ + ,\n \"body\": \"repudiandae veniam quaerat sunt sed\\nalias aut fugiat\ + \ sit autem sed est\\nvoluptatem omnis possimus esse voluptatibus quis\\nest\ + \ aut tenetur dolor neque\"\n },\n {\n \"userId\": 1,\n \"id\": 6,\n\ + \ \"title\": \"dolorem eum magni eos aperiam quia\",\n \"body\": \"\ + ut aspernatur corporis harum nihil quis provident sequi\\nmollitia nobis aliquid\ + \ molestiae\\nperspiciatis et ea nemo ab reprehenderit accusantium quas\\\ + nvoluptate dolores velit et doloremque molestiae\"\n },\n {\n \"userId\"\ + : 1,\n \"id\": 7,\n \"title\": \"magnam facilis autem\",\n \"body\"\ + : \"dolore placeat quibusdam ea quo vitae\\nmagni quis enim qui quis quo nemo\ + \ aut saepe\\nquidem repellat excepturi ut quia\\nsunt ut sequi eos ea sed\ + \ quas\"\n },\n {\n \"userId\": 1,\n \"id\": 8,\n \"title\": \"\ + dolorem dolore est ipsam\",\n \"body\": \"dignissimos aperiam dolorem qui\ + \ eum\\nfacilis quibusdam animi sint suscipit qui sint possimus cum\\nquaerat\ + \ magni maiores excepturi\\nipsam ut commodi dolor voluptatum modi aut vitae\"\ + \n },\n {\n \"userId\": 1,\n \"id\": 9,\n \"title\": \"nesciunt\ + \ iure omnis dolorem tempora et accusantium\",\n \"body\": \"consectetur\ + \ animi nesciunt iure dolore\\nenim quia ad\\nveniam autem ut quam aut nobis\\\ + net est aut quod aut provident voluptas autem voluptas\"\n },\n {\n \"\ + userId\": 1,\n \"id\": 10,\n \"title\": \"optio molestias id quia eum\"\ + ,\n \"body\": \"quo et expedita modi cum officia vel magni\\ndoloribus\ + \ qui repudiandae\\nvero nisi sit\\nquos veniam quod sed accusamus veritatis\ + \ error\"\n }\n]" + headers: + Access-Control-Allow-Credentials: + - 'true' + Age: + - '4646' + CF-Cache-Status: + - HIT + CF-RAY: + - 54640259fa6fe0d2-IAD + Cache-Control: + - max-age=14400 + Connection: + - keep-alive + Content-Type: + - application/json; charset=utf-8 + Date: + - Mon, 16 Dec 2019 22:14:15 GMT + Etag: + - W/"aa6-j2NSH739l9uq40OywFMn7Y0C/iY" + Expect-CT: + - max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct" + Expires: + - '-1' + Pragma: + - no-cache + Server: + - cloudflare + Set-Cookie: + - __cfduid=d8ac1a396d31b9e9c1f816924e5bf186d1576534455; expires=Wed, 15-Jan-20 + 22:14:15 GMT; path=/; domain=.typicode.com; HttpOnly + Transfer-Encoding: + - chunked + Vary: + - Origin, Accept-Encoding + Via: + - 1.1 vegur + X-Content-Type-Options: + - nosniff + X-Powered-By: + - Express + status: + code: 200 + message: OK +version: 1 diff --git a/fixtures/main_non_json_response.yaml b/fixtures/main_non_json_response.yaml new file mode 100644 index 000000000..e5e02a920 --- /dev/null +++ b/fixtures/main_non_json_response.yaml @@ -0,0 +1,145 @@ +interactions: +- request: + body: null + headers: {} + method: GET + uri: https://jsonplaceholder.typicode.com/ + response: + body: + string: "\n\n\n\n\n\n\n\n\n\n\nJSONPlaceholder - Fake online REST API for developers\n\ + \n\n
\n\n\ + \n Announcement: You can now support\n\ + JSONPlaceholder on GitHub Sponsors!\n\n
\n
\n\n
\n
\n

\nJSONPlaceholder\n\ +

\n

\nFake Online REST API for Testing and Prototyping\n\ +
Serving ~350M requests per month\n
Powered by\nJSON Server\n+\nLowDB\n

\n\n\n\n\n\n
\n
\n\n
\n

Gold\ + \ Sponsors

\n

\n\n\n\n

\n

\nYour Company Logo Here\n

\n
\n\n
\n
\n\ + \n

Intro

\n

\nJSONPlaceholder is a free online REST API that you\ + \ can use whenever you need some fake data.\n
It's great for tutorials,\ + \ testing new libraries, sharing code examples, ...\n

\n\n

Example

\n\ +

\nRun this code in a console or from any site:\n

\n
fetch('https://jsonplaceholder.typicode.com/todos/1')\n\
+        \  .then(response => response.json())\n  .then(json => console.log(json))\n\
+        
\n

\n\n

\n
\nCongrats\ + \ you've made your first call to JSONPlaceholder! \U0001F603 \U0001F389\n\ +

\nTip: you can use\n\nhttp://\n or\n\nhttps://\n\ + when making requests to JSONPlaceholder.\n

\n
\n\n\n

Resources

\n

\nJSONPlaceholder comes with a\ + \ set of 6 common resources:\n

\n\n\n\n\n\n\n\ + \n\n\ + \n\n\n\n\ + \n\n\n\n\ + \n\n\n\n\ + \n\n\n\n\ + \n
\n\ + \ /posts\n100 posts
\n/comments\n500 comments
\n/albums\n100 albums
\n/photos\n5000 photos
\n/todos\n200 todos
\n/users\n10 users
\n

\nNote: resources have relations. For\ + \ example:\nposts have many\ncomments,\nalbums have many\n\ + photos, ... see below for routes examples.\n

\n\n

Routes

\n\ +

\nAll HTTP methods are supported.\n

\n\n\n\n\ + \n\n\n\n\n\n\n\n\n\n\n\ + \n\n\n\n\n\n\n\n\n\n\n\n\n\ + \n\n\n\n\n\n\ + \n\n\n\n\n
GET\n/posts\n
GET\n\ + /posts/1\n
GET\n\ + /posts/1/comments\n
GET\n/comments?postId=1\n\ +
GET\n/posts?userId=1\n\ +
POST/posts
PUT/posts/1
PATCH/posts/1
DELETE/posts/1
\n

\nNote:\ + \ you can view detailed examples\nhere.\n

\n\ + \n

Use your own data

\n\n

\nWith My JSON Server online service and a simple GitHub repo, you can have\ + \ your own online fake REST server in seconds.\n

\n
\n
\n
\n\ + \n\n\n\n\n\ + \n\n\n" + headers: + Access-Control-Allow-Credentials: + - 'true' + Age: + - '5668' + CF-Cache-Status: + - HIT + CF-RAY: + - 5463fe6ccb0ecf04-IAD + Cache-Control: + - public, max-age=14400 + Connection: + - keep-alive + Content-Type: + - text/html; charset=UTF-8 + Date: + - Mon, 16 Dec 2019 22:11:34 GMT + Expect-CT: + - max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct" + Last-Modified: + - Mon, 05 Aug 2019 03:07:14 GMT + Server: + - cloudflare + Set-Cookie: + - __cfduid=d13b55a7f6e786f74d5f9f1f084a183a31576534294; expires=Wed, 15-Jan-20 + 22:11:34 GMT; path=/; domain=.typicode.com; HttpOnly + Transfer-Encoding: + - chunked + Vary: + - Origin, Accept-Encoding + Via: + - 1.1 vegur + X-Powered-By: + - Express + status: + code: 200 + message: OK +version: 1 diff --git a/fixtures/proxy_bad_request.yaml b/fixtures/proxy_bad_request.yaml new file mode 100644 index 000000000..9d71f3dae --- /dev/null +++ b/fixtures/proxy_bad_request.yaml @@ -0,0 +1,20 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/json + method: GET + uri: http://localhost:8000/bad + response: + body: + string: '' + headers: + Date: + - Mon, 16 Dec 2019 23:44:53 GMT + Server: + - BaseHTTP/0.6 Python/3.7.3 + status: + code: 400 + message: Bad Request +version: 1 diff --git a/fixtures/proxy_callbacks.yaml b/fixtures/proxy_callbacks.yaml new file mode 100644 index 000000000..7224c8cfd --- /dev/null +++ b/fixtures/proxy_callbacks.yaml @@ -0,0 +1,145 @@ +interactions: +- request: + body: null + headers: {} + method: GET + uri: https://jsonplaceholder.typicode.com/ + response: + body: + string: "\n\n\n\n\n\n\n\n\n\n\nJSONPlaceholder - Fake online REST API for developers\n\ + \n\n
\n\n\ + \n Announcement: You can now support\n\ + JSONPlaceholder on GitHub Sponsors!\n\n
\n
\n\n
\n
\n

\nJSONPlaceholder\n\ +

\n

\nFake Online REST API for Testing and Prototyping\n\ +
Serving ~350M requests per month\n
Powered by\nJSON Server\n+\nLowDB\n

\n\n\n\n\n\n
\n
\n\n
\n

Gold\ + \ Sponsors

\n

\n\n\n\n

\n

\nYour Company Logo Here\n

\n
\n\n
\n
\n\ + \n

Intro

\n

\nJSONPlaceholder is a free online REST API that you\ + \ can use whenever you need some fake data.\n
It's great for tutorials,\ + \ testing new libraries, sharing code examples, ...\n

\n\n

Example

\n\ +

\nRun this code in a console or from any site:\n

\n
fetch('https://jsonplaceholder.typicode.com/todos/1')\n\
+        \  .then(response => response.json())\n  .then(json => console.log(json))\n\
+        
\n

\n\n

\n
\nCongrats\ + \ you've made your first call to JSONPlaceholder! \U0001F603 \U0001F389\n\ +

\nTip: you can use\n\nhttp://\n or\n\nhttps://\n\ + when making requests to JSONPlaceholder.\n

\n
\n\n\n

Resources

\n

\nJSONPlaceholder comes with a\ + \ set of 6 common resources:\n

\n\n\n\n\n\n\n\ + \n\n\ + \n\n\n\n\ + \n\n\n\n\ + \n\n\n\n\ + \n\n\n\n\ + \n
\n\ + \ /posts\n100 posts
\n/comments\n500 comments
\n/albums\n100 albums
\n/photos\n5000 photos
\n/todos\n200 todos
\n/users\n10 users
\n

\nNote: resources have relations. For\ + \ example:\nposts have many\ncomments,\nalbums have many\n\ + photos, ... see below for routes examples.\n

\n\n

Routes

\n\ +

\nAll HTTP methods are supported.\n

\n\n\n\n\ + \n\n\n\n\n\n\n\n\n\n\n\ + \n\n\n\n\n\n\n\n\n\n\n\n\n\ + \n\n\n\n\n\n\ + \n\n\n\n\n
GET\n/posts\n
GET\n\ + /posts/1\n
GET\n\ + /posts/1/comments\n
GET\n/comments?postId=1\n\ +
GET\n/posts?userId=1\n\ +
POST/posts
PUT/posts/1
PATCH/posts/1
DELETE/posts/1
\n

\nNote:\ + \ you can view detailed examples\nhere.\n

\n\ + \n

Use your own data

\n\n

\nWith My JSON Server online service and a simple GitHub repo, you can have\ + \ your own online fake REST server in seconds.\n

\n
\n
\n
\n\ + \n\n\n\n\n\ + \n\n\n" + headers: + Access-Control-Allow-Credentials: + - 'true' + Age: + - '4432' + CF-Cache-Status: + - HIT + CF-RAY: + - 5469de47682ecf00-IAD + Cache-Control: + - public, max-age=14400 + Connection: + - keep-alive + Content-Type: + - text/html; charset=UTF-8 + Date: + - Tue, 17 Dec 2019 15:18:12 GMT + Expect-CT: + - max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct" + Last-Modified: + - Mon, 05 Aug 2019 03:07:14 GMT + Server: + - cloudflare + Set-Cookie: + - __cfduid=dc33ebab29f22c648b958d03fed5596b51576595892; expires=Thu, 16-Jan-20 + 15:18:12 GMT; path=/; domain=.typicode.com; HttpOnly; SameSite=Lax + Transfer-Encoding: + - chunked + Vary: + - Origin, Accept-Encoding + Via: + - 1.1 vegur + X-Powered-By: + - Express + status: + code: 200 + message: OK +version: 1 diff --git a/fixtures/proxy_cannot_connect.yaml b/fixtures/proxy_cannot_connect.yaml new file mode 100644 index 000000000..9788ab4e4 --- /dev/null +++ b/fixtures/proxy_cannot_connect.yaml @@ -0,0 +1,22 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/json + method: GET + uri: http://localhost:8000/ + response: + body: + string: 'hello + + ' + headers: + Date: + - Tue, 17 Dec 2019 00:06:22 GMT + Server: + - BaseHTTP/0.6 Python/3.7.3 + status: + code: 200 + message: OK +version: 1 diff --git a/fixtures/proxy_internal_error.yaml b/fixtures/proxy_internal_error.yaml new file mode 100644 index 000000000..7295de697 --- /dev/null +++ b/fixtures/proxy_internal_error.yaml @@ -0,0 +1,22 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/json + method: GET + uri: http://localhost:8000/ + response: + body: + string: 'hello + + ' + headers: + Date: + - Tue, 17 Dec 2019 00:00:13 GMT + Server: + - BaseHTTP/0.6 Python/3.7.3 + status: + code: 200 + message: OK +version: 1 diff --git a/fixtures/proxy_internal_server_error.yaml b/fixtures/proxy_internal_server_error.yaml new file mode 100644 index 000000000..603e62467 --- /dev/null +++ b/fixtures/proxy_internal_server_error.yaml @@ -0,0 +1,20 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/json + method: GET + uri: http://localhost:8000/crash + response: + body: + string: '' + headers: + Date: + - Mon, 16 Dec 2019 23:58:10 GMT + Server: + - BaseHTTP/0.6 Python/3.7.3 + status: + code: 500 + message: Internal Server Error +version: 1 diff --git a/fixtures/proxy_unofficial_status.yaml b/fixtures/proxy_unofficial_status.yaml new file mode 100644 index 000000000..fa62ef962 --- /dev/null +++ b/fixtures/proxy_unofficial_status.yaml @@ -0,0 +1,20 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/json + method: GET + uri: http://localhost:8000/teapot + response: + body: + string: '' + headers: + Date: + - Mon, 16 Dec 2019 23:59:52 GMT + Server: + - BaseHTTP/0.6 Python/3.7.3 + status: + code: 418 + message: '' +version: 1 diff --git a/securedrop_proxy/entrypoint.py b/securedrop_proxy/entrypoint.py index 70f21a3db..fb0a8c1c0 100755 --- a/securedrop_proxy/entrypoint.py +++ b/securedrop_proxy/entrypoint.py @@ -6,6 +6,8 @@ # called with exactly one argument: the path to its config file. See # the README for configuration options. +import http +import json import logging import os import sys @@ -30,60 +32,55 @@ def start(): ''' try: configure_logging() - except Exception as e: - print(e) - return - - logging.debug('Starting SecureDrop Proxy {}'.format(version)) - # a fresh, new proxy object - p = proxy.Proxy() + logging.debug('Starting SecureDrop Proxy {}'.format(version)) - # set up an error handler early, so we can use it during - # configuration, etc - p.on_done = callbacks.err_on_done + # a fresh, new proxy object + p = proxy.Proxy() - # path to config file must be at argv[1] - if len(sys.argv) != 2: - p.simple_error( - 500, "sd-proxy script not called with path to configuration file" - ) - p.on_done(p.res) + # set up an error handler early, so we can use it during + # configuration, etc + original_on_done = p.on_done + p.on_done = callbacks.err_on_done - # read config. `read_conf` will call `p.on_done` if there is a config - # problem, and will return a Conf object on success. - conf_path = sys.argv[1] - p.conf = config.read_conf(conf_path, p) + # path to config file must be at argv[1] + if len(sys.argv) != 2: + raise ValueError("sd-proxy script not called with path to configuration file") - # read user request from STDIN - incoming = [] - for line in sys.stdin: - incoming.append(line) - incoming = "\n".join(incoming) + # read config. `read_conf` will call `p.on_done` if there is a config + # problem, and will return a Conf object on success. + conf_path = sys.argv[1] + p.conf = config.read_conf(conf_path, p) - main.__main__(incoming, p) + # read user request from STDIN + incoming = [] + for line in sys.stdin: + incoming.append(line) + incoming = "\n".join(incoming) - -def excepthook(*exc_args): - ''' - This function is called in the event of a catastrophic failure. - Log exception and exit cleanly. - ''' - logging.error('Unrecoverable error', exc_info=(exc_args)) - sys.__excepthook__(*exc_args) - print('') # force terminal prompt on to a new line - sys.exit(1) + p.on_done = original_on_done + main.__main__(incoming, p) + except Exception as e: + response = { + "status": http.HTTPStatus.INTERNAL_SERVER_ERROR, + "body": json.dumps({ + "error": str(e), + }) + } + print(json.dumps(response)) + sys.exit(1) def configure_logging() -> None: ''' All logging related settings are set up by this function. ''' - log_folder = os.path.join(DEFAULT_HOME, 'logs') + home = os.getenv("SECUREDROP_HOME", DEFAULT_HOME) + log_folder = os.path.join(home, 'logs') if not os.path.exists(log_folder): os.makedirs(log_folder) - log_file = os.path.join(DEFAULT_HOME, 'logs', 'proxy.log') + log_file = os.path.join(home, 'logs', 'proxy.log') # set logging format log_fmt = ('%(asctime)s - %(name)s:%(lineno)d(%(funcName)s) %(levelname)s: %(message)s') @@ -98,6 +95,3 @@ def configure_logging() -> None: log = logging.getLogger() log.setLevel(LOGLEVEL) log.addHandler(handler) - - # override excepthook to capture a log of catastrophic failures. - sys.excepthook = excepthook diff --git a/securedrop_proxy/main.py b/securedrop_proxy/main.py index 6fadafaf5..8986449f4 100644 --- a/securedrop_proxy/main.py +++ b/securedrop_proxy/main.py @@ -21,6 +21,7 @@ def __main__(incoming, p): logging.error(e) p.simple_error(400, 'Invalid JSON in request') p.on_done(p.res) + return req = proxy.Req() try: @@ -38,6 +39,6 @@ def __main__(incoming, p): req.body = client_req['body'] p.req = req - p.on_save = callbacks.on_save - p.on_done = callbacks.on_done + if p.on_save is None: + p.on_save = callbacks.on_save p.proxy() diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index cc30b3214..59a9a3af5 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -1,4 +1,5 @@ import furl +import http import json import logging import requests @@ -6,6 +7,7 @@ import werkzeug import securedrop_proxy.version as version +from securedrop_proxy import callbacks logger = logging.getLogger(__name__) @@ -28,22 +30,21 @@ def __init__(self, status): class Proxy: - @staticmethod - def _on_done(res): - print(json.dumps(res.__dict__)) - - def __init__(self, conf=None, req=Req(), on_save=None, on_done=None): + def __init__(self, conf=None, req=Req(), on_save=None, on_done=None, timeout: float = None): self.conf = conf self.req = req self.res = None self.on_save = on_save if on_done is not None: self.on_done = on_done - else: - self.on_done = self._on_done + + self.timeout = float(timeout) if timeout else 10 self._prepared_request = None + def on_done(self, res): + callbacks.on_done(res) + @staticmethod def valid_path(path): u = furl.furl(path) @@ -75,7 +76,7 @@ def prep_request(self): try: url = furl.furl("{}://{}:{}/{}".format(scheme, host, port, path)) except Exception as e: - logging.error(e) + logger.error(e) self.simple_error(500, "Proxy error while generating URL to request") raise ValueError("Error generating URL from provided values") @@ -118,7 +119,7 @@ def handle_non_json_response(self): self.res = res def handle_response(self): - logging.debug('Handling response') + logger.debug("Handling response") ctype = werkzeug.http.parse_options_header(self._presp.headers["content-type"]) @@ -135,23 +136,45 @@ def proxy(self): try: if self.on_save is None: - self.simple_error(400, "Request callback is not set.") - raise ValueError("Request callback is not set.") + self.simple_error( + http.HTTPStatus.BAD_REQUEST, "Request on_save callback is not set." + ) + raise ValueError("Request on_save callback is not set.") self.prep_request() - logging.debug('Sending request') + logger.debug("Sending request") s = requests.Session() - self._presp = s.send(self._prepared_request) + self._presp = s.send(self._prepared_request, timeout=self.timeout) + self._presp.raise_for_status() self.handle_response() - except ValueError as e: - logging.error(e) + logger.error(e) # effectively a 4xx error # we have set self.response to indicate an error pass - - # catch server errors here, handle maybe-differently from - # ValueErrors... - + except requests.exceptions.Timeout as e: + # Timeout covers both ConnectTimeout and ReadTimeout + logger.error(e) + self.simple_error(http.HTTPStatus.GATEWAY_TIMEOUT, "request timed out") + except ( + requests.exceptions.ConnectionError, # covers ProxyError, SSLError + requests.exceptions.TooManyRedirects, + ) as e: + logger.error(e) + self.simple_error(http.HTTPStatus.BAD_GATEWAY, "could not connect to server") + except requests.exceptions.HTTPError as e: + logger.error(e) + try: + self.simple_error( + e.response.status_code, + http.HTTPStatus(e.response.status_code).phrase.lower() + ) + except ValueError: + # Return a generic error message when the response + # status code is not found in http.HTTPStatus. + self.simple_error(e.response.status_code, "unspecified server error") + except Exception as e: + logger.error(e) + self.simple_error(http.HTTPStatus.INTERNAL_SERVER_ERROR, "internal proxy error") self.on_done(self.res) diff --git a/tests/files/dev-config.yaml b/tests/files/dev-config.yaml new file mode 100644 index 000000000..8e72283a2 --- /dev/null +++ b/tests/files/dev-config.yaml @@ -0,0 +1,5 @@ +host: jsonplaceholder.typicode.com +scheme: https +port: 443 +target_vm: compost +dev: True diff --git a/tests/test_callbacks.py b/tests/test_callbacks.py index d0993e473..83c861ba6 100644 --- a/tests/test_callbacks.py +++ b/tests/test_callbacks.py @@ -5,6 +5,8 @@ import unittest from unittest.mock import patch +import vcr + from securedrop_proxy import callbacks from securedrop_proxy import config from securedrop_proxy import proxy @@ -76,3 +78,52 @@ def test_on_save_200_success(self): 'application/json') self.assertEqual(self.res.status, 200) self.assertIn('filename', self.res.body) + + @vcr.use_cassette("fixtures/proxy_callbacks.yaml") + def test_custom_callbacks(self): + """ + Test the handlers in a real proxy request. + """ + conf = config.Conf() + conf.host = 'jsonplaceholder.typicode.com' + conf.scheme = 'https' + conf.port = 443 + + req = proxy.Req() + req.method = "GET" + + on_save_addition = "added by the on_save callback\n" + on_done_addition = "added by the on_done callback\n" + + def on_save(fh, res, conf): + res.headers['Content-Type'] = 'text/plain' + res.body = on_save_addition + + def on_done(res): + res.headers['Content-Type'] = 'text/plain' + res.body += on_done_addition + + p = proxy.Proxy(self.conf, req, on_save=on_save, on_done=on_done) + p.proxy() + + self.assertEqual( + p.res.body, + "{}{}".format(on_save_addition, on_done_addition) + ) + + @vcr.use_cassette("fixtures/proxy_callbacks.yaml") + def test_production_on_save(self): + """ + Test on_save's production file handling. + """ + conf = config.Conf() + conf.host = 'jsonplaceholder.typicode.com' + conf.scheme = 'https' + conf.port = 443 + conf.dev = False + conf.target_vm = "sd-svs-dispvm" + + with patch("subprocess.run") as patched_run: + fh = tempfile.NamedTemporaryFile() + callbacks.on_save(fh, self.res, conf) + self.assertEqual(patched_run.call_args[0][0][0], "qvm-move-to-vm") diff --git a/tests/test_config.py b/tests/test_config.py index 916ad679d..0ae93eab9 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -90,3 +90,7 @@ def err_on_done(res): with self.assertRaises(SystemExit): config.read_conf('tests/files/missing-target-vm.yaml', self.p) + + def test_dev_config(self): + c = config.read_conf('tests/files/dev-config.yaml', self.p) + self.assertTrue(c.dev) diff --git a/tests/test_entrypoint.py b/tests/test_entrypoint.py new file mode 100644 index 000000000..13b1d0425 --- /dev/null +++ b/tests/test_entrypoint.py @@ -0,0 +1,128 @@ +import contextlib +import http +import io +import json +import os +import sys +import tempfile +import unittest.mock + +import vcr +from securedrop_proxy import entrypoint + + +@contextlib.contextmanager +def sdhome(*args, **kwds): + with tempfile.TemporaryDirectory() as tmphome: + os.environ["SECUREDROP_HOME"] = tmphome + try: + yield tmphome + finally: + del os.environ["SECUREDROP_HOME"] + + +class TestEntrypoint(unittest.TestCase): + """ + Test the entrypoint used in production. + """ + + def test_missing_config(self): + config_path = "/tmp/nonexistent-config.yaml" + self.assertFalse(os.path.exists(config_path)) + + output = None + with unittest.mock.patch( + "sys.argv", new_callable=lambda: ["sd-proxy", config_path] + ) as mock_argv, unittest.mock.patch("sys.stdout", new_callable=io.StringIO) as mock_stdout: + with self.assertRaises(SystemExit), sdhome(): + entrypoint.start() + output = mock_stdout.getvalue() + + response = json.loads(output) + self.assertEqual(response["status"], http.HTTPStatus.INTERNAL_SERVER_ERROR) + body = json.loads(response["body"]) + self.assertEqual( + body["error"], "Configuration file does not exist at {}".format(config_path) + ) + + def test_unwritable_log_folder(self): + """ + Tests a permission problem in `configure_logging`. + """ + output = None + with sdhome() as home: + os.chmod(home, 0o0444) + with unittest.mock.patch("sys.stdout", new_callable=io.StringIO) as mock_stdout: + with self.assertRaises(SystemExit): + entrypoint.start() + output = mock_stdout.getvalue() + os.chmod(home, 0o0700) + + response = json.loads(output) + self.assertEqual(response["status"], http.HTTPStatus.INTERNAL_SERVER_ERROR) + body = json.loads(response["body"]) + self.assertIn("Permission denied: ", body["error"]) + + def test_wrong_number_of_arguments(self): + with sdhome() as home: + with unittest.mock.patch( + "sys.argv", new_callable=lambda: ["sd-proxy"] + ) as mock_argv, unittest.mock.patch( + "sys.stdout", new_callable=io.StringIO + ) as mock_stdout: + with self.assertRaises(SystemExit): + entrypoint.start() + output = mock_stdout.getvalue() + + response = json.loads(output) + self.assertEqual(response["status"], http.HTTPStatus.INTERNAL_SERVER_ERROR) + body = json.loads(response["body"]) + self.assertEqual( + body["error"], "sd-proxy script not called with path to configuration file" + ) + + def test_empty_input(self): + config_path = "tests/files/valid-config.yaml" + self.assertTrue(os.path.exists(config_path)) + + with sdhome() as home: + with unittest.mock.patch( + "sys.stdin", new_callable=lambda: io.StringIO("") + ) as mock_stdin, unittest.mock.patch( + "sys.stdout", new_callable=io.StringIO + ) as mock_stdout, unittest.mock.patch( + "sys.argv", new_callable=lambda: ["sd-proxy", config_path] + ) as mock_argv: + entrypoint.start() + output = mock_stdout.getvalue() + + response = json.loads(output) + self.assertEqual(response["status"], http.HTTPStatus.BAD_REQUEST) + body = json.loads(response["body"]) + self.assertEqual( + body["error"], "Invalid JSON in request" + ) + + @vcr.use_cassette("fixtures/main_json_response.yaml") + def test_json_response(self): + config_path = "tests/files/valid-config.yaml" + self.assertTrue(os.path.exists(config_path)) + + test_input = { + "method": "GET", + "path_query": "/posts?userId=1", + } + + output = None + with sdhome() as home, unittest.mock.patch( + "sys.stdin", new_callable=lambda: io.StringIO(json.dumps(test_input)) + ) as mock_stding, unittest.mock.patch( + "sys.stdout", new_callable=io.StringIO + ) as mock_stdout, unittest.mock.patch( + "sys.argv", new_callable=lambda: ["sd-proxy", config_path] + ) as mock_argv: + entrypoint.start() + output = mock_stdout.getvalue() + + response = json.loads(output) + self.assertEqual(response["status"], http.HTTPStatus.OK) diff --git a/tests/test_main.py b/tests/test_main.py index 8a4a28e15..1c4946146 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -1,3 +1,4 @@ +import http from io import StringIO import json import subprocess @@ -5,6 +6,8 @@ import unittest import uuid +import vcr + from securedrop_proxy import config from securedrop_proxy import main from securedrop_proxy import proxy @@ -18,6 +21,7 @@ def setUp(self): self.conf.port = 443 self.conf.dev = True + @vcr.use_cassette('fixtures/main_json_response.yaml') def test_json_response(self): test_input_json = """{ "method": "GET", "path_query": "/posts?userId=1" }""" @@ -32,12 +36,10 @@ def on_save(res, fh, conf): pass def on_done(res): - res = res.__dict__ - self.assertEqual(res['status'], 200) + self.assertEqual(res.status, http.HTTPStatus.OK) + print(json.dumps(res.__dict__)) - self.p = proxy.Proxy(self.conf, req, on_save) - self.p.on_done = on_done - self.p.proxy() + self.p = proxy.Proxy(self.conf, req, on_save, on_done) saved_stdout = sys.stdout try: @@ -52,6 +54,7 @@ def on_done(res): for item in json.loads(response['body']): self.assertEqual(item['userId'], 1) + @vcr.use_cassette('fixtures/main_non_json_response.yaml') def test_non_json_response(self): test_input_json = """{ "method": "GET", "path_query": "" }""" @@ -66,7 +69,6 @@ def on_save(fh, res, conf): res.body = json.dumps({'filename': self.fn}) self.p = proxy.Proxy(self.conf, proxy.Req(), on_save) - self.p.proxy() saved_stdout = sys.stdout try: @@ -88,9 +90,9 @@ def on_save(fh, res, conf): saved_file = f.read() # We expect HTML content in the file from the test data - self.assertIn("", saved_file) + self.assertIn("", saved_file) - def test_error_response(self): + def test_input_invalid_json(self): test_input_json = """"foo": "bar", "baz": "bliff" }""" def on_save(fh, res, conf): @@ -101,9 +103,66 @@ def on_done(res): self.assertEqual(res['status'], 400) sys.exit(1) - self.p = proxy.Proxy(self.conf, proxy.Req(), on_save) - self.p.on_done = on_done + p = proxy.Proxy(self.conf, proxy.Req(), on_save, on_done) with self.assertRaises(SystemExit): - self.p.proxy() - main.__main__(test_input_json, self.p) + main.__main__(test_input_json, p) + + def test_input_missing_keys(self): + test_input_json = """{ "foo": "bar", "baz": "bliff" }""" + + def on_save(fh, res, conf): + pass + + def on_done(res): + res = res.__dict__ + self.assertEqual(res['status'], 400) + self.assertEqual(res['body'], '{"error": "Missing keys in request"}') + sys.exit(1) + + p = proxy.Proxy(self.conf, proxy.Req(), on_save, on_done) + with self.assertRaises(SystemExit): + main.__main__(test_input_json, p) + + @vcr.use_cassette('fixtures/main_json_response.yaml') + def test_input_headers(self): + test_input = { + "method": "GET", + "path_query": "/posts?userId=1", + "headers": { "X-Test-Header": "th" } + } + + def on_save(fh, res, conf): + pass + + p = proxy.Proxy(self.conf, proxy.Req(), on_save) + main.__main__(json.dumps(test_input), p) + self.assertEqual(p.req.headers, test_input["headers"]) + + @vcr.use_cassette('fixtures/main_input_body.yaml') + def test_input_body(self): + test_input = { + "method": "POST", + "path_query": "/posts", + "body": { "id": 42, "title": "test" } + } + + def on_save(fh, res, conf): + pass + + p = proxy.Proxy(self.conf, proxy.Req(), on_save) + main.__main__(json.dumps(test_input), p) + self.assertEqual(p.req.body, test_input["body"]) + + @vcr.use_cassette('fixtures/main_non_json_response.yaml') + def test_default_callbacks(self): + test_input = { + "method": "GET", + "path_query": "", + } + + p = proxy.Proxy(self.conf, proxy.Req()) + with unittest.mock.patch("securedrop_proxy.callbacks.on_done") as on_done, unittest.mock.patch("securedrop_proxy.callbacks.on_save") as on_save: + main.__main__(json.dumps(test_input), p) + self.assertEqual(on_save.call_count, 1) + self.assertEqual(on_done.call_count, 1) diff --git a/tests/test_proxy.py b/tests/test_proxy.py index 6278dce45..bc73b0589 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -1,8 +1,12 @@ +import http import json -import vcr import unittest import uuid +import requests +import vcr + +from securedrop_proxy import callbacks from securedrop_proxy import proxy from securedrop_proxy import config from securedrop_proxy import version @@ -136,7 +140,7 @@ def test_proxy_400_no_handler(self): self.assertEqual(p.res.status, 400) self.assertEqual(p.res.headers['Content-Type'], 'application/json') - self.assertIn('Request callback is not set', + self.assertIn('Request on_save callback is not set', p.res.body) @@ -167,3 +171,127 @@ def test_proxy_500_misconfiguration(self): self.assertEqual(p.res.headers['Content-Type'], 'application/json') self.assertIn('Proxy error while generating URL to request', p.res.body) + + +class TestServerErrorHandling(unittest.TestCase): + def setUp(self): + self.conf = config.Conf() + self.conf.host = "localhost" + self.conf.scheme = "http" + self.conf.port = 8000 + + def make_request(self, method="GET", path_query="/", headers=None): + req = proxy.Req() + req.method = method if method else "GET" + req.path_query = path_query if path_query else "/" + req.headers = headers if headers else {"Accept": "application/json"} + return req + + def test_cannot_connect(self): + """ + Test for "502 Bad Gateway" when the server can't be reached. + """ + req = self.make_request() + + conf = config.Conf() + conf.host = "sdproxytest.local" + conf.scheme = "https" + conf.port = 8000 + + p = proxy.Proxy(conf, req, on_save=callbacks.on_save) + p.proxy() + + self.assertEqual(p.res.status, http.HTTPStatus.BAD_GATEWAY) + self.assertIn("application/json", p.res.headers["Content-Type"]) + body = json.loads(p.res.body) + self.assertEqual(body["error"], "could not connect to server") + + def test_server_timeout(self): + """ + Test for "504 Gateway Timeout" when the server times out. + """ + class TimeoutProxy(proxy.Proxy): + """ + Mocks a slow upstream server. + + VCR cassettes cannot represent a request that takes too + long. This Proxy subclass raises the exception that would + cause. + """ + def prep_request(self): + raise requests.exceptions.Timeout('test timeout') + + req = self.make_request(path_query="/tarpit") + p = TimeoutProxy(self.conf, req, on_save=callbacks.on_save, timeout=0.00001) + p.proxy() + + self.assertEqual(p.res.status, http.HTTPStatus.GATEWAY_TIMEOUT) + self.assertIn("application/json", p.res.headers["Content-Type"]) + body = json.loads(p.res.body) + self.assertEqual(body["error"], "request timed out") + + @vcr.use_cassette("fixtures/proxy_bad_request.yaml") + def test_bad_request(self): + """ + Test handling of "400 Bad Request" from the server. + """ + req = self.make_request(path_query="/bad") + p = proxy.Proxy(self.conf, req, on_save=callbacks.on_save) + p.proxy() + + self.assertEqual(p.res.status, http.HTTPStatus.BAD_REQUEST) + self.assertIn("application/json", p.res.headers["Content-Type"]) + body = json.loads(p.res.body) + self.assertEqual(body["error"], http.HTTPStatus.BAD_REQUEST.phrase.lower()) + + @vcr.use_cassette("fixtures/proxy_unofficial_status.yaml") + def test_unofficial_status(self): + """ + Make sure we handle unofficial status codes from the server. + + Should the server ever need to return a status code not in + Python's http.HTTPStatus, the proxy should still return a + proper JSON error response with a generic error message. + """ + req = self.make_request(path_query="/teapot") + p = proxy.Proxy(self.conf, req, on_save=callbacks.on_save) + p.proxy() + + self.assertEqual(p.res.status, 418) + self.assertIn("application/json", p.res.headers["Content-Type"]) + body = json.loads(p.res.body) + self.assertEqual(body["error"], "unspecified server error") + + @vcr.use_cassette("fixtures/proxy_internal_server_error.yaml") + def test_internal_server_error(self): + """ + Test handling of "500 Internal Server Error" from the server. + """ + req = self.make_request(path_query="/crash") + p = proxy.Proxy(self.conf, req, on_save=callbacks.on_save) + p.proxy() + + self.assertEqual(p.res.status, http.HTTPStatus.INTERNAL_SERVER_ERROR) + self.assertIn("application/json", p.res.headers["Content-Type"]) + body = json.loads(p.res.body) + self.assertEqual( + body["error"], + http.HTTPStatus.INTERNAL_SERVER_ERROR.phrase.lower() + ) + + @vcr.use_cassette("fixtures/proxy_internal_error.yaml") + def test_internal_error(self): + """ + Ensure that the proxy returns JSON despite internal errors. + """ + def bad_on_save(self, fh, res, conf): + raise Exception("test internal proxy error") + + req = self.make_request() + p = proxy.Proxy(self.conf, req, on_save=bad_on_save) + p.proxy() + + self.assertEqual(p.res.status, http.HTTPStatus.INTERNAL_SERVER_ERROR) + self.assertIn("application/json", p.res.headers["Content-Type"]) + body = json.loads(p.res.body) + self.assertEqual(body["error"], "internal proxy error") From e2e9bf3ebed4a4803d45d984ad1466cf47419a07 Mon Sep 17 00:00:00 2001 From: John Hensley Date: Wed, 18 Dec 2019 10:45:04 -0500 Subject: [PATCH 159/352] Address review Give test_main.test_input_headers its own fixture. Use "if not callback" instead of "if callback is None". --- fixtures/main_input_headers.yaml | 94 ++++++++++++++++++++++++++++++++ securedrop_proxy/main.py | 2 +- securedrop_proxy/proxy.py | 2 +- tests/test_main.py | 2 +- 4 files changed, 97 insertions(+), 3 deletions(-) create mode 100644 fixtures/main_input_headers.yaml diff --git a/fixtures/main_input_headers.yaml b/fixtures/main_input_headers.yaml new file mode 100644 index 000000000..606bfa7ad --- /dev/null +++ b/fixtures/main_input_headers.yaml @@ -0,0 +1,94 @@ +interactions: +- request: + body: null + headers: + X-Test-Header: + - th + method: GET + uri: https://jsonplaceholder.typicode.com/posts?userId=1 + response: + body: + string: "[\n {\n \"userId\": 1,\n \"id\": 1,\n \"title\": \"sunt aut\ + \ facere repellat provident occaecati excepturi optio reprehenderit\",\n \ + \ \"body\": \"quia et suscipit\\nsuscipit recusandae consequuntur expedita\ + \ et cum\\nreprehenderit molestiae ut ut quas totam\\nnostrum rerum est autem\ + \ sunt rem eveniet architecto\"\n },\n {\n \"userId\": 1,\n \"id\"\ + : 2,\n \"title\": \"qui est esse\",\n \"body\": \"est rerum tempore\ + \ vitae\\nsequi sint nihil reprehenderit dolor beatae ea dolores neque\\nfugiat\ + \ blanditiis voluptate porro vel nihil molestiae ut reiciendis\\nqui aperiam\ + \ non debitis possimus qui neque nisi nulla\"\n },\n {\n \"userId\":\ + \ 1,\n \"id\": 3,\n \"title\": \"ea molestias quasi exercitationem repellat\ + \ qui ipsa sit aut\",\n \"body\": \"et iusto sed quo iure\\nvoluptatem\ + \ occaecati omnis eligendi aut ad\\nvoluptatem doloribus vel accusantium quis\ + \ pariatur\\nmolestiae porro eius odio et labore et velit aut\"\n },\n {\n\ + \ \"userId\": 1,\n \"id\": 4,\n \"title\": \"eum et est occaecati\"\ + ,\n \"body\": \"ullam et saepe reiciendis voluptatem adipisci\\nsit amet\ + \ autem assumenda provident rerum culpa\\nquis hic commodi nesciunt rem tenetur\ + \ doloremque ipsam iure\\nquis sunt voluptatem rerum illo velit\"\n },\n\ + \ {\n \"userId\": 1,\n \"id\": 5,\n \"title\": \"nesciunt quas odio\"\ + ,\n \"body\": \"repudiandae veniam quaerat sunt sed\\nalias aut fugiat\ + \ sit autem sed est\\nvoluptatem omnis possimus esse voluptatibus quis\\nest\ + \ aut tenetur dolor neque\"\n },\n {\n \"userId\": 1,\n \"id\": 6,\n\ + \ \"title\": \"dolorem eum magni eos aperiam quia\",\n \"body\": \"\ + ut aspernatur corporis harum nihil quis provident sequi\\nmollitia nobis aliquid\ + \ molestiae\\nperspiciatis et ea nemo ab reprehenderit accusantium quas\\\ + nvoluptate dolores velit et doloremque molestiae\"\n },\n {\n \"userId\"\ + : 1,\n \"id\": 7,\n \"title\": \"magnam facilis autem\",\n \"body\"\ + : \"dolore placeat quibusdam ea quo vitae\\nmagni quis enim qui quis quo nemo\ + \ aut saepe\\nquidem repellat excepturi ut quia\\nsunt ut sequi eos ea sed\ + \ quas\"\n },\n {\n \"userId\": 1,\n \"id\": 8,\n \"title\": \"\ + dolorem dolore est ipsam\",\n \"body\": \"dignissimos aperiam dolorem qui\ + \ eum\\nfacilis quibusdam animi sint suscipit qui sint possimus cum\\nquaerat\ + \ magni maiores excepturi\\nipsam ut commodi dolor voluptatum modi aut vitae\"\ + \n },\n {\n \"userId\": 1,\n \"id\": 9,\n \"title\": \"nesciunt\ + \ iure omnis dolorem tempora et accusantium\",\n \"body\": \"consectetur\ + \ animi nesciunt iure dolore\\nenim quia ad\\nveniam autem ut quam aut nobis\\\ + net est aut quod aut provident voluptas autem voluptas\"\n },\n {\n \"\ + userId\": 1,\n \"id\": 10,\n \"title\": \"optio molestias id quia eum\"\ + ,\n \"body\": \"quo et expedita modi cum officia vel magni\\ndoloribus\ + \ qui repudiandae\\nvero nisi sit\\nquos veniam quod sed accusamus veritatis\ + \ error\"\n }\n]" + headers: + Access-Control-Allow-Credentials: + - 'true' + Age: + - '1789' + CF-Cache-Status: + - HIT + CF-RAY: + - 54722d954de5e0ea-IAD + Cache-Control: + - max-age=14400 + Connection: + - keep-alive + Content-Type: + - application/json; charset=utf-8 + Date: + - Wed, 18 Dec 2019 15:30:26 GMT + Etag: + - W/"aa6-j2NSH739l9uq40OywFMn7Y0C/iY" + Expect-CT: + - max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct" + Expires: + - '-1' + Pragma: + - no-cache + Server: + - cloudflare + Set-Cookie: + - __cfduid=d21e159ad4987bc5ba9d781aed2f9db5c1576683026; expires=Fri, 17-Jan-20 + 15:30:26 GMT; path=/; domain=.typicode.com; HttpOnly; SameSite=Lax + Transfer-Encoding: + - chunked + Vary: + - Origin, Accept-Encoding + Via: + - 1.1 vegur + X-Content-Type-Options: + - nosniff + X-Powered-By: + - Express + status: + code: 200 + message: OK +version: 1 diff --git a/securedrop_proxy/main.py b/securedrop_proxy/main.py index 8986449f4..e67f158ce 100644 --- a/securedrop_proxy/main.py +++ b/securedrop_proxy/main.py @@ -39,6 +39,6 @@ def __main__(incoming, p): req.body = client_req['body'] p.req = req - if p.on_save is None: + if not p.on_save: p.on_save = callbacks.on_save p.proxy() diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index 59a9a3af5..dcdcbc095 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -135,7 +135,7 @@ def handle_response(self): def proxy(self): try: - if self.on_save is None: + if not self.on_save: self.simple_error( http.HTTPStatus.BAD_REQUEST, "Request on_save callback is not set." ) diff --git a/tests/test_main.py b/tests/test_main.py index 1c4946146..05595187c 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -124,7 +124,7 @@ def on_done(res): with self.assertRaises(SystemExit): main.__main__(test_input_json, p) - @vcr.use_cassette('fixtures/main_json_response.yaml') + @vcr.use_cassette('fixtures/main_input_headers.yaml') def test_input_headers(self): test_input = { "method": "GET", From 8cb672b3af7f8f92e654e5f665c6216c3955bebf Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Fri, 3 Jan 2020 14:34:33 +0530 Subject: [PATCH 160/352] Fixes lint issues in test files --- tests/test_entrypoint.py | 29 ++++++++++++----------- tests/test_main.py | 50 ++++++++++++++++++++++------------------ 2 files changed, 42 insertions(+), 37 deletions(-) diff --git a/tests/test_entrypoint.py b/tests/test_entrypoint.py index 13b1d0425..8a5b0eaf2 100644 --- a/tests/test_entrypoint.py +++ b/tests/test_entrypoint.py @@ -3,7 +3,6 @@ import io import json import os -import sys import tempfile import unittest.mock @@ -33,7 +32,9 @@ def test_missing_config(self): output = None with unittest.mock.patch( "sys.argv", new_callable=lambda: ["sd-proxy", config_path] - ) as mock_argv, unittest.mock.patch("sys.stdout", new_callable=io.StringIO) as mock_stdout: + ) as mock_argv, unittest.mock.patch( # noqa: F841 + "sys.stdout", new_callable=io.StringIO + ) as mock_stdout: with self.assertRaises(SystemExit), sdhome(): entrypoint.start() output = mock_stdout.getvalue() @@ -52,7 +53,9 @@ def test_unwritable_log_folder(self): output = None with sdhome() as home: os.chmod(home, 0o0444) - with unittest.mock.patch("sys.stdout", new_callable=io.StringIO) as mock_stdout: + with unittest.mock.patch( + "sys.stdout", new_callable=io.StringIO + ) as mock_stdout: with self.assertRaises(SystemExit): entrypoint.start() output = mock_stdout.getvalue() @@ -64,10 +67,10 @@ def test_unwritable_log_folder(self): self.assertIn("Permission denied: ", body["error"]) def test_wrong_number_of_arguments(self): - with sdhome() as home: + with sdhome() as home: # noqa: F841 with unittest.mock.patch( "sys.argv", new_callable=lambda: ["sd-proxy"] - ) as mock_argv, unittest.mock.patch( + ) as mock_argv, unittest.mock.patch( # noqa: F841 "sys.stdout", new_callable=io.StringIO ) as mock_stdout: with self.assertRaises(SystemExit): @@ -85,23 +88,21 @@ def test_empty_input(self): config_path = "tests/files/valid-config.yaml" self.assertTrue(os.path.exists(config_path)) - with sdhome() as home: + with sdhome() as home: # noqa: F841 with unittest.mock.patch( "sys.stdin", new_callable=lambda: io.StringIO("") - ) as mock_stdin, unittest.mock.patch( + ) as mock_stdin, unittest.mock.patch( # noqa: F841 "sys.stdout", new_callable=io.StringIO ) as mock_stdout, unittest.mock.patch( "sys.argv", new_callable=lambda: ["sd-proxy", config_path] - ) as mock_argv: + ) as mock_argv: # noqa: F841 entrypoint.start() output = mock_stdout.getvalue() response = json.loads(output) self.assertEqual(response["status"], http.HTTPStatus.BAD_REQUEST) body = json.loads(response["body"]) - self.assertEqual( - body["error"], "Invalid JSON in request" - ) + self.assertEqual(body["error"], "Invalid JSON in request") @vcr.use_cassette("fixtures/main_json_response.yaml") def test_json_response(self): @@ -114,13 +115,13 @@ def test_json_response(self): } output = None - with sdhome() as home, unittest.mock.patch( + with sdhome() as home, unittest.mock.patch( # noqa: F841 "sys.stdin", new_callable=lambda: io.StringIO(json.dumps(test_input)) - ) as mock_stding, unittest.mock.patch( + ) as mock_stding, unittest.mock.patch( # noqa: F841 "sys.stdout", new_callable=io.StringIO ) as mock_stdout, unittest.mock.patch( "sys.argv", new_callable=lambda: ["sd-proxy", config_path] - ) as mock_argv: + ) as mock_argv: # noqa: F841 entrypoint.start() output = mock_stdout.getvalue() diff --git a/tests/test_main.py b/tests/test_main.py index 05595187c..c4e10e230 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -16,20 +16,20 @@ class TestMain(unittest.TestCase): def setUp(self): self.conf = config.Conf() - self.conf.host = 'jsonplaceholder.typicode.com' - self.conf.scheme = 'https' + self.conf.host = "jsonplaceholder.typicode.com" + self.conf.scheme = "https" self.conf.port = 443 self.conf.dev = True - @vcr.use_cassette('fixtures/main_json_response.yaml') + @vcr.use_cassette("fixtures/main_json_response.yaml") def test_json_response(self): test_input_json = """{ "method": "GET", "path_query": "/posts?userId=1" }""" req = proxy.Req() - req.method = 'GET' - req.path_query = '' - req.headers = {'Accept': 'application/json'} + req.method = "GET" + req.path_query = "" + req.headers = {"Accept": "application/json"} # Use custom callbacks def on_save(res, fh, conf): @@ -51,10 +51,10 @@ def on_done(res): sys.stdout = saved_stdout response = json.loads(output) - for item in json.loads(response['body']): - self.assertEqual(item['userId'], 1) + for item in json.loads(response["body"]): + self.assertEqual(item["userId"], 1) - @vcr.use_cassette('fixtures/main_non_json_response.yaml') + @vcr.use_cassette("fixtures/main_non_json_response.yaml") def test_non_json_response(self): test_input_json = """{ "method": "GET", "path_query": "" }""" @@ -64,9 +64,9 @@ def on_save(fh, res, conf): subprocess.run(["cp", fh.name, "/tmp/{}".format(self.fn)]) - res.headers['X-Origin-Content-Type'] = res.headers['Content-Type'] - res.headers['Content-Type'] = 'application/json' - res.body = json.dumps({'filename': self.fn}) + res.headers["X-Origin-Content-Type"] = res.headers["Content-Type"] + res.headers["Content-Type"] = "application/json" + res.body = json.dumps({"filename": self.fn}) self.p = proxy.Proxy(self.conf, proxy.Req(), on_save) @@ -80,10 +80,10 @@ def on_save(fh, res, conf): sys.stdout = saved_stdout response = json.loads(output) - self.assertEqual(response['status'], 200) + self.assertEqual(response["status"], 200) # The proxy should have created a filename in the response body - self.assertIn('filename', response['body']) + self.assertIn("filename", response["body"]) # The file should not be empty with open("/tmp/{}".format(self.fn)) as f: @@ -100,7 +100,7 @@ def on_save(fh, res, conf): def on_done(res): res = res.__dict__ - self.assertEqual(res['status'], 400) + self.assertEqual(res["status"], 400) sys.exit(1) p = proxy.Proxy(self.conf, proxy.Req(), on_save, on_done) @@ -116,20 +116,20 @@ def on_save(fh, res, conf): def on_done(res): res = res.__dict__ - self.assertEqual(res['status'], 400) - self.assertEqual(res['body'], '{"error": "Missing keys in request"}') + self.assertEqual(res["status"], 400) + self.assertEqual(res["body"], '{"error": "Missing keys in request"}') sys.exit(1) p = proxy.Proxy(self.conf, proxy.Req(), on_save, on_done) with self.assertRaises(SystemExit): main.__main__(test_input_json, p) - @vcr.use_cassette('fixtures/main_input_headers.yaml') + @vcr.use_cassette("fixtures/main_input_headers.yaml") def test_input_headers(self): test_input = { "method": "GET", "path_query": "/posts?userId=1", - "headers": { "X-Test-Header": "th" } + "headers": {"X-Test-Header": "th"}, } def on_save(fh, res, conf): @@ -139,12 +139,12 @@ def on_save(fh, res, conf): main.__main__(json.dumps(test_input), p) self.assertEqual(p.req.headers, test_input["headers"]) - @vcr.use_cassette('fixtures/main_input_body.yaml') + @vcr.use_cassette("fixtures/main_input_body.yaml") def test_input_body(self): test_input = { "method": "POST", "path_query": "/posts", - "body": { "id": 42, "title": "test" } + "body": {"id": 42, "title": "test"}, } def on_save(fh, res, conf): @@ -154,7 +154,7 @@ def on_save(fh, res, conf): main.__main__(json.dumps(test_input), p) self.assertEqual(p.req.body, test_input["body"]) - @vcr.use_cassette('fixtures/main_non_json_response.yaml') + @vcr.use_cassette("fixtures/main_non_json_response.yaml") def test_default_callbacks(self): test_input = { "method": "GET", @@ -162,7 +162,11 @@ def test_default_callbacks(self): } p = proxy.Proxy(self.conf, proxy.Req()) - with unittest.mock.patch("securedrop_proxy.callbacks.on_done") as on_done, unittest.mock.patch("securedrop_proxy.callbacks.on_save") as on_save: + with unittest.mock.patch( + "securedrop_proxy.callbacks.on_done" + ) as on_done, unittest.mock.patch( + "securedrop_proxy.callbacks.on_save" + ) as on_save: main.__main__(json.dumps(test_input), p) self.assertEqual(on_save.call_count, 1) self.assertEqual(on_done.call_count, 1) From 97eed4b5341c27e37e395885c06c3f9380239efa Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Fri, 3 Jan 2020 14:43:00 +0530 Subject: [PATCH 161/352] Fixes mypy error on redefining attribute --- securedrop_proxy/proxy.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index dcdcbc095..733d2ed78 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -35,14 +35,14 @@ def __init__(self, conf=None, req=Req(), on_save=None, on_done=None, timeout: fl self.req = req self.res = None self.on_save = on_save - if on_done is not None: + if on_done: self.on_done = on_done self.timeout = float(timeout) if timeout else 10 self._prepared_request = None - def on_done(self, res): + def on_done(self, res): # type: ignore callbacks.on_done(res) @staticmethod From ea20b684af146e817b449872cf5a48eee271ad4f Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Tue, 7 Jan 2020 16:22:01 +0530 Subject: [PATCH 162/352] Restructures the code base with more object methods proxy.py Now, Proxy class gets a must conf_path argument, and it creates the inital `conf` attribute from that. Proxy also has default on_save and on_done and err_on_done methods. def handle_response(self) method has a `assert self.res` to mark that res is populated before this call. This is for mypy main.py Mostly has changes from black entrypoint.py No need for fancy dynamic err_call_back, the proxy object will call self.err_call_back if any issue in reading configuration. The test cases now have their own configuration files to create the proxy object. Also, to do proper dynamic attachment of any method of Proxy class we are using https://docs.python.org/3/library/types.html#types.MethodType so that our own on_save or on_done or err_on_done will be called during tests. --- securedrop_proxy/callbacks.py | 44 --- securedrop_proxy/config.py | 58 ---- securedrop_proxy/entrypoint.py | 52 ++-- securedrop_proxy/main.py | 33 +-- securedrop_proxy/proxy.py | 165 ++++++++++-- tests/files/badgateway-config.yaml | 5 + tests/files/invalid-config.yaml | 5 + tests/files/local-config.yaml | 5 + tests/test_callbacks.py | 129 --------- tests/test_config.py | 96 ------- tests/test_main.py | 96 ++++--- tests/test_proxy.py | 412 +++++++++++++++++++++-------- 12 files changed, 546 insertions(+), 554 deletions(-) delete mode 100644 securedrop_proxy/callbacks.py delete mode 100644 securedrop_proxy/config.py create mode 100644 tests/files/badgateway-config.yaml create mode 100644 tests/files/invalid-config.yaml create mode 100644 tests/files/local-config.yaml delete mode 100644 tests/test_callbacks.py delete mode 100644 tests/test_config.py diff --git a/securedrop_proxy/callbacks.py b/securedrop_proxy/callbacks.py deleted file mode 100644 index 0e890d21e..000000000 --- a/securedrop_proxy/callbacks.py +++ /dev/null @@ -1,44 +0,0 @@ -import os -import subprocess -import sys -import json -import tempfile -import uuid - - -def err_on_done(res): - print(json.dumps(res.__dict__)) - sys.exit(1) - - -# callback for handling non-JSON content. in production-like -# environments, we want to call `qvm-move-to-vm` (and expressly not -# `qvm-move`, since we want to include the destination VM name) to -# move the content to the target VM. for development and testing, we -# keep the file on the local VM. -# -# In any case, this callback mutates the given result object (in -# `res`) to include the name of the new file, or to indicate errors. -def on_save(fh, res, conf): - fn = str(uuid.uuid4()) - - try: - with tempfile.TemporaryDirectory() as tmpdir: - tmpfile = os.path.join(os.path.abspath(tmpdir), fn) - subprocess.run(["cp", fh.name, tmpfile]) - if conf.dev is not True: - subprocess.run(["qvm-move-to-vm", conf.target_vm, tmpfile]) - except Exception: - res.status = 500 - res.headers['Content-Type'] = 'application/json' - res.headers['X-Origin-Content-Type'] = res.headers['Content-Type'] - res.body = json.dumps({"error": "Unhandled error while handling non-JSON content, sorry"}) - return - - res.headers['Content-Type'] = 'application/json' - res.headers['X-Origin-Content-Type'] = res.headers['Content-Type'] - res.body = json.dumps({'filename': fn}) - - -def on_done(res): - print(json.dumps(res.__dict__)) diff --git a/securedrop_proxy/config.py b/securedrop_proxy/config.py deleted file mode 100644 index d82cdb459..000000000 --- a/securedrop_proxy/config.py +++ /dev/null @@ -1,58 +0,0 @@ -import os -import yaml - - -class Conf: - scheme = '' - host = '' - port = 0 - dev = False - - -def read_conf(conf_path, p): - - if not os.path.isfile(conf_path): - p.simple_error(500, 'Configuration file does not exist at {}'.format(conf_path)) - p.on_done(p.res) - - try: - fh = open(conf_path, 'r') - conf_in = yaml.safe_load(fh) - except yaml.YAMLError: - p.simple_error( - 500, "YAML syntax error while reading configuration file {}".format(conf_path) - ) - p.on_done(p.res) - except Exception: - p.simple_error( - 500, "Error while opening or reading configuration file {}".format(conf_path) - ) - p.on_done(p.res) - - req_conf_keys = set(('host', 'scheme', 'port')) - missing_keys = req_conf_keys - set(conf_in.keys()) - if len(missing_keys) > 0: - p.simple_error(500, 'Configuration file missing required keys: {}'.format(missing_keys)) - p.on_done(p.res) - - c = Conf() - c.host = conf_in['host'] - c.scheme = conf_in['scheme'] - c.port = conf_in['port'] - - if 'dev' in conf_in and conf_in['dev'] is True: - c.dev = True - else: - if "target_vm" not in conf_in: - p.simple_error( - 500, - ( - "Configuration file missing `target_vm` key, which is required " - "when not in development mode" - ), - ) - p.on_done(p.res) - - c.target_vm = conf_in['target_vm'] - - return c diff --git a/securedrop_proxy/entrypoint.py b/securedrop_proxy/entrypoint.py index fb0a8c1c0..cee130f17 100755 --- a/securedrop_proxy/entrypoint.py +++ b/securedrop_proxy/entrypoint.py @@ -14,76 +14,68 @@ from logging.handlers import TimedRotatingFileHandler -from securedrop_proxy import callbacks -from securedrop_proxy import config from securedrop_proxy import main from securedrop_proxy import proxy from securedrop_proxy.version import version DEFAULT_HOME = os.path.join(os.path.expanduser("~"), ".securedrop_proxy") -LOGLEVEL = os.environ.get('LOGLEVEL', 'info').upper() +LOGLEVEL = os.environ.get("LOGLEVEL", "info").upper() -def start(): - ''' +def start() -> None: + """ Set up a new proxy object with an error handler, configuration that we read from argv[1], and the original user request from STDIN. - ''' + """ try: configure_logging() - logging.debug('Starting SecureDrop Proxy {}'.format(version)) - - # a fresh, new proxy object - p = proxy.Proxy() - - # set up an error handler early, so we can use it during - # configuration, etc - original_on_done = p.on_done - p.on_done = callbacks.err_on_done + logging.debug("Starting SecureDrop Proxy {}".format(version)) # path to config file must be at argv[1] if len(sys.argv) != 2: - raise ValueError("sd-proxy script not called with path to configuration file") + raise ValueError( + "sd-proxy script not called with path to configuration file" + ) - # read config. `read_conf` will call `p.on_done` if there is a config + # read config. `read_conf` will call `p.err_on_done` if there is a config # problem, and will return a Conf object on success. conf_path = sys.argv[1] - p.conf = config.read_conf(conf_path, p) + # a fresh, new proxy object + p = proxy.Proxy(conf_path=conf_path) # read user request from STDIN - incoming = [] + incoming_lines = [] for line in sys.stdin: - incoming.append(line) - incoming = "\n".join(incoming) + incoming_lines.append(line) + incoming = "\n".join(incoming_lines) - p.on_done = original_on_done main.__main__(incoming, p) except Exception as e: response = { "status": http.HTTPStatus.INTERNAL_SERVER_ERROR, - "body": json.dumps({ - "error": str(e), - }) + "body": json.dumps({"error": str(e)}), } print(json.dumps(response)) sys.exit(1) def configure_logging() -> None: - ''' + """ All logging related settings are set up by this function. - ''' + """ home = os.getenv("SECUREDROP_HOME", DEFAULT_HOME) - log_folder = os.path.join(home, 'logs') + log_folder = os.path.join(home, "logs") if not os.path.exists(log_folder): os.makedirs(log_folder) - log_file = os.path.join(home, 'logs', 'proxy.log') + log_file = os.path.join(home, "logs", "proxy.log") # set logging format - log_fmt = ('%(asctime)s - %(name)s:%(lineno)d(%(funcName)s) %(levelname)s: %(message)s') + log_fmt = ( + "%(asctime)s - %(name)s:%(lineno)d(%(funcName)s) %(levelname)s: %(message)s" + ) formatter = logging.Formatter(log_fmt) # define log handlers such as for rotating log files diff --git a/securedrop_proxy/main.py b/securedrop_proxy/main.py index e67f158ce..69abf48d7 100644 --- a/securedrop_proxy/main.py +++ b/securedrop_proxy/main.py @@ -1,44 +1,45 @@ import json import logging +from typing import Dict, Any -from securedrop_proxy import callbacks from securedrop_proxy import proxy +from securedrop_proxy.proxy import Proxy + logger = logging.getLogger(__name__) -def __main__(incoming, p): - ''' +def __main__(incoming: str, p: Proxy) -> None: + """ Deserialize incoming request in order to build and send a proxy request. - ''' - logging.debug('Creating request to be sent by proxy') + """ + logging.debug("Creating request to be sent by proxy") - client_req = None + client_req: Dict[str, Any] = {} try: client_req = json.loads(incoming) except json.decoder.JSONDecodeError as e: logging.error(e) - p.simple_error(400, 'Invalid JSON in request') - p.on_done(p.res) + p.simple_error(400, "Invalid JSON in request") + p.on_done() return req = proxy.Req() try: - req.method = client_req['method'] - req.path_query = client_req['path_query'] + req.method = client_req["method"] + req.path_query = client_req["path_query"] except KeyError as e: logging.error(e) - p.simple_error(400, 'Missing keys in request') - p.on_done(p.res) + p.simple_error(400, "Missing keys in request") + p.on_done() if "headers" in client_req: - req.headers = client_req['headers'] + req.headers = client_req["headers"] if "body" in client_req: - req.body = client_req['body'] + req.body = client_req["body"] p.req = req - if not p.on_save: - p.on_save = callbacks.on_save + p.proxy() diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index 733d2ed78..ab295548b 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -6,53 +6,158 @@ import tempfile import werkzeug +import os +import subprocess +import sys +import uuid +import yaml +from typing import Dict, Optional + import securedrop_proxy.version as version -from securedrop_proxy import callbacks +from tempfile import _TemporaryFileWrapper # type: ignore logger = logging.getLogger(__name__) +class Conf: + scheme = "" + host = "" + port = 0 + dev = False + target_vm = "" + + class Req: - def __init__(self): + def __init__(self) -> None: self.method = "" self.path_query = "" - self.body = None - self.headers = {} + self.body = "" + self.headers: Dict[str, str] = {} class Response: - def __init__(self, status): + def __init__(self, status: int) -> None: self.status = status - self.body = None - self.headers = {} + self.body = "" + self.headers: Dict[str, str] = {} self.version = version.version class Proxy: - def __init__(self, conf=None, req=Req(), on_save=None, on_done=None, timeout: float = None): - self.conf = conf - self.req = req - self.res = None - self.on_save = on_save - if on_done: - self.on_done = on_done + def __init__( + self, conf_path: str, req: Req = Req(), timeout: float = None, + ) -> None: + # The configuration path for Proxy is a must. + self.read_conf(conf_path) + self.req = req + self.res: Optional[Response] = None self.timeout = float(timeout) if timeout else 10 self._prepared_request = None - def on_done(self, res): # type: ignore - callbacks.on_done(res) + def on_done(self) -> None: + print(json.dumps(self.res.__dict__)) @staticmethod - def valid_path(path): + def valid_path(path: str) -> bool: u = furl.furl(path) if u.host is not None: return False return True + def err_on_done(self): + print(json.dumps(self.res.__dict__)) + sys.exit(1) + + def read_conf(self, conf_path: str) -> None: + + if not os.path.isfile(conf_path): + self.simple_error( + 500, "Configuration file does not exist at {}".format(conf_path) + ) + self.err_on_done() + + try: + with open(conf_path) as fh: + conf_in = yaml.safe_load(fh) + except yaml.YAMLError: + self.simple_error( + 500, + "YAML syntax error while reading configuration file {}".format( + conf_path + ), + ) + self.err_on_done() + except Exception: + self.simple_error( + 500, + "Error while opening or reading configuration file {}".format( + conf_path + ), + ) + self.err_on_done() + + req_conf_keys = set(("host", "scheme", "port")) + missing_keys = req_conf_keys - set(conf_in.keys()) + if len(missing_keys) > 0: + self.simple_error( + 500, "Configuration file missing required keys: {}".format(missing_keys) + ) + self.err_on_done() + + self.conf = Conf() + self.conf.host = conf_in["host"] + self.conf.scheme = conf_in["scheme"] + self.conf.port = conf_in["port"] + + if "dev" in conf_in and conf_in["dev"]: + self.conf.dev = True + else: + if "target_vm" not in conf_in: + self.simple_error( + 500, + ( + "Configuration file missing `target_vm` key, which is required " + "when not in development mode" + ), + ) + self.err_on_done() + + self.conf.target_vm = conf_in["target_vm"] + + # callback for handling non-JSON content. in production-like + # environments, we want to call `qvm-move-to-vm` (and expressly not + # `qvm-move`, since we want to include the destination VM name) to + # move the content to the target VM. for development and testing, we + # keep the file on the local VM. + # + # In any case, this callback mutates the given result object (in + # `res`) to include the name of the new file, or to indicate errors. + def on_save(self, fh: _TemporaryFileWrapper, res: Response) -> None: + fn = str(uuid.uuid4()) + + try: + with tempfile.TemporaryDirectory() as tmpdir: + tmpfile = os.path.join(os.path.abspath(tmpdir), fn) + subprocess.run(["cp", fh.name, tmpfile]) + if self.conf.dev is not True: + subprocess.run(["qvm-move-to-vm", self.conf.target_vm, tmpfile]) + except Exception: + res.status = 500 + res.headers["Content-Type"] = "application/json" + res.headers["X-Origin-Content-Type"] = res.headers["Content-Type"] + res.body = json.dumps( + {"error": "Unhandled error while handling non-JSON content, sorry"} + ) + return + + res.headers["Content-Type"] = "application/json" + res.headers["X-Origin-Content-Type"] = res.headers["Content-Type"] + res.body = json.dumps({"filename": fn}) + def simple_error(self, status, err): res = Response(status) res.body = json.dumps({"error": err}) @@ -60,7 +165,7 @@ def simple_error(self, status, err): self.res = res - def prep_request(self): + def prep_request(self) -> None: scheme = self.conf.scheme host = self.conf.host @@ -83,14 +188,13 @@ def prep_request(self): url.path.normalize() preq = requests.Request(method, url.url) - preq.stream = True preq.headers = self.req.headers preq.data = self.req.body prep = preq.prepare() self._prepared_request = prep - def handle_json_response(self): + def handle_json_response(self) -> None: res = Response(self._presp.status_code) @@ -114,11 +218,11 @@ def handle_non_json_response(self): res.headers = self._presp.headers - self.on_save(fh, res, self.conf) + self.on_save(fh, res) self.res = res - def handle_response(self): + def handle_response(self) -> None: logger.debug("Handling response") ctype = werkzeug.http.parse_options_header(self._presp.headers["content-type"]) @@ -128,11 +232,14 @@ def handle_response(self): else: self.handle_non_json_response() + # https://mypy.readthedocs.io/en/latest/kinds_of_types.html#union-types + # To make sure that mypy knows the type of self.res is not None. + assert self.res # headers is a Requests class which doesn't JSON serialize. # coerce it into a normal dict so it will self.res.headers = dict(self.res.headers) - def proxy(self): + def proxy(self) -> None: try: if not self.on_save: @@ -162,13 +269,15 @@ def proxy(self): requests.exceptions.TooManyRedirects, ) as e: logger.error(e) - self.simple_error(http.HTTPStatus.BAD_GATEWAY, "could not connect to server") + self.simple_error( + http.HTTPStatus.BAD_GATEWAY, "could not connect to server" + ) except requests.exceptions.HTTPError as e: logger.error(e) try: self.simple_error( e.response.status_code, - http.HTTPStatus(e.response.status_code).phrase.lower() + http.HTTPStatus(e.response.status_code).phrase.lower(), ) except ValueError: # Return a generic error message when the response @@ -176,5 +285,7 @@ def proxy(self): self.simple_error(e.response.status_code, "unspecified server error") except Exception as e: logger.error(e) - self.simple_error(http.HTTPStatus.INTERNAL_SERVER_ERROR, "internal proxy error") - self.on_done(self.res) + self.simple_error( + http.HTTPStatus.INTERNAL_SERVER_ERROR, "internal proxy error" + ) + self.on_done() diff --git a/tests/files/badgateway-config.yaml b/tests/files/badgateway-config.yaml new file mode 100644 index 000000000..89396443c --- /dev/null +++ b/tests/files/badgateway-config.yaml @@ -0,0 +1,5 @@ +host: sdproxytest.local +scheme: https +port: 8000 +target_vm: compost +dev: False diff --git a/tests/files/invalid-config.yaml b/tests/files/invalid-config.yaml new file mode 100644 index 000000000..1338eefc3 --- /dev/null +++ b/tests/files/invalid-config.yaml @@ -0,0 +1,5 @@ +host: jsonplaceholder.typicode.com +scheme: https://http +port: 443 +target_vm: compost +dev: False diff --git a/tests/files/local-config.yaml b/tests/files/local-config.yaml new file mode 100644 index 000000000..7bd20fc78 --- /dev/null +++ b/tests/files/local-config.yaml @@ -0,0 +1,5 @@ +host: localhost +scheme: http +port: 8000 +target_vm: compost +dev: False diff --git a/tests/test_callbacks.py b/tests/test_callbacks.py deleted file mode 100644 index 83c861ba6..000000000 --- a/tests/test_callbacks.py +++ /dev/null @@ -1,129 +0,0 @@ -from io import StringIO -import json -import sys -import tempfile -import unittest -from unittest.mock import patch - -import vcr - -from securedrop_proxy import callbacks -from securedrop_proxy import config -from securedrop_proxy import proxy - - -class TestCallbacks(unittest.TestCase): - def setUp(self): - self.res = proxy.Response(status=200) - self.res.body = "babbys request" - - self.conf = config.Conf() - self.conf.host = 'jsonplaceholder.typicode.com' - self.conf.scheme = 'https' - self.conf.port = 443 - self.conf.dev = True - - def test_err_on_done(self): - saved_stdout = sys.stdout - try: - out = StringIO() - sys.stdout = out - with self.assertRaises(SystemExit): - callbacks.err_on_done(self.res) - output = out.getvalue().strip() - finally: - sys.stdout = saved_stdout - - response = json.loads(output) - self.assertEqual(response['status'], 200) - self.assertEqual(response['body'], 'babbys request') - - def test_on_done(self): - saved_stdout = sys.stdout - try: - out = StringIO() - sys.stdout = out - callbacks.on_done(self.res) - output = out.getvalue().strip() - finally: - sys.stdout = saved_stdout - - response = json.loads(output) - self.assertEqual(response['status'], 200) - self.assertEqual(response['body'], 'babbys request') - - def test_on_save_500_unhandled_error(self): - fh = tempfile.NamedTemporaryFile() - - # Let's generate an error and ensure that an appropriate response - # is sent back to the user - with patch("subprocess.run", side_effect=IOError): - callbacks.on_save(fh, self.res, self.conf) - - self.assertEqual(self.res.status, 500) - self.assertEqual(self.res.headers['Content-Type'], - 'application/json') - self.assertEqual(self.res.headers['X-Origin-Content-Type'], - 'application/json') - self.assertIn('Unhandled error', self.res.body) - - def test_on_save_200_success(self): - fh = tempfile.NamedTemporaryFile() - - callbacks.on_save(fh, self.res, self.conf) - - self.assertEqual(self.res.headers['Content-Type'], - 'application/json') - self.assertEqual(self.res.headers['X-Origin-Content-Type'], - 'application/json') - self.assertEqual(self.res.status, 200) - self.assertIn('filename', self.res.body) - - @vcr.use_cassette("fixtures/proxy_callbacks.yaml") - def test_custom_callbacks(self): - """ - Test the handlers in a real proxy request. - """ - conf = config.Conf() - conf.host = 'jsonplaceholder.typicode.com' - conf.scheme = 'https' - conf.port = 443 - - req = proxy.Req() - req.method = "GET" - - on_save_addition = "added by the on_save callback\n" - on_done_addition = "added by the on_done callback\n" - - def on_save(fh, res, conf): - res.headers['Content-Type'] = 'text/plain' - res.body = on_save_addition - - def on_done(res): - res.headers['Content-Type'] = 'text/plain' - res.body += on_done_addition - - p = proxy.Proxy(self.conf, req, on_save=on_save, on_done=on_done) - p.proxy() - - self.assertEqual( - p.res.body, - "{}{}".format(on_save_addition, on_done_addition) - ) - - @vcr.use_cassette("fixtures/proxy_callbacks.yaml") - def test_production_on_save(self): - """ - Test on_save's production file handling. - """ - conf = config.Conf() - conf.host = 'jsonplaceholder.typicode.com' - conf.scheme = 'https' - conf.port = 443 - conf.dev = False - conf.target_vm = "sd-svs-dispvm" - - with patch("subprocess.run") as patched_run: - fh = tempfile.NamedTemporaryFile() - callbacks.on_save(fh, self.res, conf) - self.assertEqual(patched_run.call_args[0][0][0], "qvm-move-to-vm") diff --git a/tests/test_config.py b/tests/test_config.py deleted file mode 100644 index 0ae93eab9..000000000 --- a/tests/test_config.py +++ /dev/null @@ -1,96 +0,0 @@ -import sys -import unittest -from unittest.mock import patch - -from securedrop_proxy import proxy -from securedrop_proxy import config - - -class TestConfig(unittest.TestCase): - def setUp(self): - self.p = proxy.Proxy() - - def test_config_file_does_not_exist(self): - def err_on_done(res): - res = res.__dict__ - self.assertEqual(res['status'], 500) - self.assertIn("Configuration file does not exist", - res['body']) - self.assertEqual(res['headers']['Content-Type'], - 'application/json') - sys.exit(1) - - self.p.on_done = err_on_done - with self.assertRaises(SystemExit): - config.read_conf('not/a/real/path', self.p) - - def test_config_file_when_yaml_is_invalid(self): - def err_on_done(res): - res = res.__dict__ - self.assertEqual(res['status'], 500) - self.assertIn("YAML syntax error", res['body']) - self.assertEqual(res['headers']['Content-Type'], - 'application/json') - sys.exit(1) - - self.p.on_done = err_on_done - with self.assertRaises(SystemExit): - config.read_conf('tests/files/invalid_yaml.yaml', self.p) - - def test_config_file_open_generic_exception(self): - def err_on_done(res): - res = res.__dict__ - self.assertEqual(res['status'], 500) - self.assertEqual(res['headers']['Content-Type'], - 'application/json') - sys.exit(1) - - self.p.on_done = err_on_done - - with self.assertRaises(SystemExit): - # Patching open so that we can simulate a non-YAML error - # (e.g. permissions) - with patch("builtins.open", side_effect=IOError): - config.read_conf('tests/files/valid-config.yaml', self.p) - - def test_config_has_valid_keys(self): - c = config.read_conf('tests/files/valid-config.yaml', self.p) - - # Verify we have a valid Conf object - self.assertEqual(c.host, 'jsonplaceholder.typicode.com') - self.assertEqual(c.port, 443) - self.assertFalse(c.dev) - self.assertEqual(c.scheme, 'https') - self.assertEqual(c.target_vm, 'compost') - - def test_config_500_when_missing_a_required_key(self): - def err_on_done(res): - res = res.__dict__ - self.assertEqual(res['status'], 500) - self.assertIn("missing required keys", res['body']) - self.assertEqual(res['headers']['Content-Type'], - 'application/json') - sys.exit(1) - - self.p.on_done = err_on_done - - with self.assertRaises(SystemExit): - config.read_conf('tests/files/missing-key.yaml', self.p) - - def test_config_500_when_missing_target_vm(self): - def err_on_done(res): - res = res.__dict__ - self.assertEqual(res['status'], 500) - self.assertIn("missing `target_vm` key", res['body']) - self.assertEqual(res['headers']['Content-Type'], - 'application/json') - sys.exit(1) - - self.p.on_done = err_on_done - - with self.assertRaises(SystemExit): - config.read_conf('tests/files/missing-target-vm.yaml', self.p) - - def test_dev_config(self): - c = config.read_conf('tests/files/dev-config.yaml', self.p) - self.assertTrue(c.dev) diff --git a/tests/test_main.py b/tests/test_main.py index c4e10e230..19bc4e5a8 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -5,21 +5,17 @@ import sys import unittest import uuid +import types import vcr -from securedrop_proxy import config from securedrop_proxy import main from securedrop_proxy import proxy class TestMain(unittest.TestCase): def setUp(self): - self.conf = config.Conf() - self.conf.host = "jsonplaceholder.typicode.com" - self.conf.scheme = "https" - self.conf.port = 443 - self.conf.dev = True + self.conf_path = "tests/files/valid-config.yaml" @vcr.use_cassette("fixtures/main_json_response.yaml") def test_json_response(self): @@ -32,14 +28,19 @@ def test_json_response(self): req.headers = {"Accept": "application/json"} # Use custom callbacks - def on_save(res, fh, conf): + def on_save(self, fh, res): pass - def on_done(res): - self.assertEqual(res.status, http.HTTPStatus.OK) - print(json.dumps(res.__dict__)) + def on_done(self): + assert self.res.status == http.HTTPStatus.OK + print(json.dumps(self.res.__dict__)) - self.p = proxy.Proxy(self.conf, req, on_save, on_done) + self.p = proxy.Proxy(self.conf_path, req) + + # Patching on_save and on_done + + self.p.on_done = types.MethodType(on_done, self.p) + self.p.on_save = types.MethodType(on_save, self.p) saved_stdout = sys.stdout try: @@ -59,8 +60,7 @@ def test_non_json_response(self): test_input_json = """{ "method": "GET", "path_query": "" }""" - def on_save(fh, res, conf): - self.fn = str(uuid.uuid4()) + def on_save(self, fh, res): subprocess.run(["cp", fh.name, "/tmp/{}".format(self.fn)]) @@ -68,7 +68,11 @@ def on_save(fh, res, conf): res.headers["Content-Type"] = "application/json" res.body = json.dumps({"filename": self.fn}) - self.p = proxy.Proxy(self.conf, proxy.Req(), on_save) + self.p = proxy.Proxy(self.conf_path, proxy.Req()) + + # Patching on_save to tests + self.p.on_save = types.MethodType(on_save, self.p) + self.p.fn = str(uuid.uuid4()) saved_stdout = sys.stdout try: @@ -86,7 +90,7 @@ def on_save(fh, res, conf): self.assertIn("filename", response["body"]) # The file should not be empty - with open("/tmp/{}".format(self.fn)) as f: + with open("/tmp/{}".format(self.p.fn)) as f: saved_file = f.read() # We expect HTML content in the file from the test data @@ -95,15 +99,20 @@ def on_save(fh, res, conf): def test_input_invalid_json(self): test_input_json = """"foo": "bar", "baz": "bliff" }""" - def on_save(fh, res, conf): + def on_save(self, fh, res): pass - def on_done(res): - res = res.__dict__ - self.assertEqual(res["status"], 400) + def on_done(self): + res = self.res.__dict__ + assert res["status"] == 400 sys.exit(1) - p = proxy.Proxy(self.conf, proxy.Req(), on_save, on_done) + p = proxy.Proxy(self.conf_path, proxy.Req()) + + # patching on_save and on_done for tests + + p.on_done = types.MethodType(on_done, p) + p.on_save = types.MethodType(on_save, p) with self.assertRaises(SystemExit): main.__main__(test_input_json, p) @@ -111,16 +120,22 @@ def on_done(res): def test_input_missing_keys(self): test_input_json = """{ "foo": "bar", "baz": "bliff" }""" - def on_save(fh, res, conf): + def on_save(self, fh, res): pass - def on_done(res): - res = res.__dict__ - self.assertEqual(res["status"], 400) - self.assertEqual(res["body"], '{"error": "Missing keys in request"}') + def on_done(self): + res = self.res.__dict__ + assert res["status"] == 400 + assert res["body"] == '{"error": "Missing keys in request"}', res sys.exit(1) - p = proxy.Proxy(self.conf, proxy.Req(), on_save, on_done) + p = proxy.Proxy(self.conf_path, proxy.Req()) + + # patching on_save and on_done for tests + + p.on_done = types.MethodType(on_done, p) + p.on_save = types.MethodType(on_save, p) + with self.assertRaises(SystemExit): main.__main__(test_input_json, p) @@ -132,10 +147,10 @@ def test_input_headers(self): "headers": {"X-Test-Header": "th"}, } - def on_save(fh, res, conf): + def on_save(self, fh, res): pass - p = proxy.Proxy(self.conf, proxy.Req(), on_save) + p = proxy.Proxy(self.conf_path, proxy.Req()) main.__main__(json.dumps(test_input), p) self.assertEqual(p.req.headers, test_input["headers"]) @@ -147,10 +162,15 @@ def test_input_body(self): "body": {"id": 42, "title": "test"}, } - def on_save(fh, res, conf): + def on_save(self, fh, res): pass - p = proxy.Proxy(self.conf, proxy.Req(), on_save) + p = proxy.Proxy(self.conf_path, proxy.Req()) + + # Patching on_save for tests + + p.on_save = types.MethodType(on_save, p) + main.__main__(json.dumps(test_input), p) self.assertEqual(p.req.body, test_input["body"]) @@ -161,12 +181,10 @@ def test_default_callbacks(self): "path_query": "", } - p = proxy.Proxy(self.conf, proxy.Req()) - with unittest.mock.patch( - "securedrop_proxy.callbacks.on_done" - ) as on_done, unittest.mock.patch( - "securedrop_proxy.callbacks.on_save" - ) as on_save: - main.__main__(json.dumps(test_input), p) - self.assertEqual(on_save.call_count, 1) - self.assertEqual(on_done.call_count, 1) + p = proxy.Proxy(self.conf_path, proxy.Req()) + p.on_done = unittest.mock.MagicMock() + p.on_save = unittest.mock.MagicMock() + + main.__main__(json.dumps(test_input), p) + self.assertEqual(p.on_save.call_count, 1) + self.assertEqual(p.on_done.call_count, 1) diff --git a/tests/test_proxy.py b/tests/test_proxy.py index bc73b0589..410a0738c 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -1,184 +1,158 @@ +import sys import http import json import unittest import uuid +import types +from io import StringIO +import tempfile +from unittest.mock import patch import requests import vcr -from securedrop_proxy import callbacks from securedrop_proxy import proxy -from securedrop_proxy import config from securedrop_proxy import version class TestProxyValidConfig(unittest.TestCase): def setUp(self): - self.conf = config.Conf() - self.conf.host = 'jsonplaceholder.typicode.com' - self.conf.scheme = 'https' - self.conf.port = 443 + self.conf_path = "tests/files/valid-config.yaml" - def on_save(self, fh, res, conf): - self.fn = str(uuid.uuid4()) - res.headers['X-Origin-Content-Type'] = res.headers['Content-Type'] - res.headers['Content-Type'] = 'application/json' - res.body = json.dumps({'filename': self.fn}) + def on_save(self, fh, res): + res.headers["X-Origin-Content-Type"] = res.headers["Content-Type"] + res.headers["Content-Type"] = "application/json" + res.body = json.dumps({"filename": self.fn}) def on_done(self, res): - res.headers['X-Origin-Content-Type'] = res.headers['Content-Type'] - res.headers['Content-Type'] = 'application/json' + res.headers["X-Origin-Content-Type"] = res.headers["Content-Type"] + res.headers["Content-Type"] = "application/json" def test_version(self): req = proxy.Req() - req.method = 'GET' - req.path_query = '' - req.headers = {'Accept': 'application/json'} + req.method = "GET" + req.path_query = "" + req.headers = {"Accept": "application/json"} - p = proxy.Proxy() + p = proxy.Proxy(self.conf_path) p.proxy() self.assertEqual(p.res.version, version.version) - def test_400_if_callback_not_set(self): - req = proxy.Req() - req.method = 'GET' - req.path_query = '' - req.headers = {'Accept': 'application/json'} - - p = proxy.Proxy() - p.proxy() - - self.assertEqual(p.res.status, 400) - - @vcr.use_cassette('fixtures/basic_proxy_functionality.yaml') + @vcr.use_cassette("fixtures/basic_proxy_functionality.yaml") def test_proxy_basic_functionality(self): req = proxy.Req() - req.method = 'GET' - req.path_query = '' - req.headers = {'Accept': 'application/json'} - - p = proxy.Proxy(self.conf, req, self.on_save) + req.method = "GET" + req.path_query = "" + req.headers = {"Accept": "application/json"} + + def on_save(self, fh, res): + res.headers["X-Origin-Content-Type"] = res.headers["Content-Type"] + res.headers["Content-Type"] = "application/json" + res.body = json.dumps({"filename": self.fn}) + + p = proxy.Proxy(self.conf_path, req) + # Patching on_save for test + p.on_save = types.MethodType(on_save, p) + p.fn = str(uuid.uuid4()) p.proxy() self.assertEqual(p.res.status, 200) - self.assertEqual(p.res.body, json.dumps({'filename': self.fn})) - self.assertEqual(p.res.headers['Content-Type'], 'application/json') + self.assertEqual(p.res.body, json.dumps({"filename": p.fn})) + self.assertEqual(p.res.headers["Content-Type"], "application/json") - @vcr.use_cassette('fixtures/proxy_404.yaml') + @vcr.use_cassette("fixtures/proxy_404.yaml") def test_proxy_produces_404(self): req = proxy.Req() - req.method = 'GET' - req.path_query = '/notfound' - req.headers = {'Accept': 'application/json'} + req.method = "GET" + req.path_query = "/notfound" + req.headers = {"Accept": "application/json"} + + p = proxy.Proxy(self.conf_path, req) - p = proxy.Proxy(self.conf, req) - p.on_save = self.on_save - p.on_done = self.on_done p.proxy() self.assertEqual(p.res.status, 404) - self.assertEqual(p.res.headers['Content-Type'], 'application/json') + self.assertEqual(p.res.headers["Content-Type"], "application/json") - @vcr.use_cassette('fixtures/proxy_parameters.yaml') + @vcr.use_cassette("fixtures/proxy_parameters.yaml") def test_proxy_handles_query_params_gracefully(self): req = proxy.Req() - req.method = 'GET' - req.path_query = '/posts?userId=1' - req.headers = {'Accept': 'application/json'} + req.method = "GET" + req.path_query = "/posts?userId=1" + req.headers = {"Accept": "application/json"} + + p = proxy.Proxy(self.conf_path, req) - p = proxy.Proxy(self.conf, req, self.on_save) p.proxy() self.assertEqual(p.res.status, 200) - self.assertIn('application/json', p.res.headers['Content-Type']) + self.assertIn("application/json", p.res.headers["Content-Type"]) body = json.loads(p.res.body) for item in body: - self.assertEqual(item['userId'], 1) + self.assertEqual(item["userId"], 1) # No cassette needed as no network request should be sent def test_proxy_400_bad_path(self): req = proxy.Req() - req.method = 'GET' - req.path_query = 'http://badpath.lol/path' - req.headers = {'Accept': 'application/json'} + req.method = "GET" + req.path_query = "http://badpath.lol/path" + req.headers = {"Accept": "application/json"} + + p = proxy.Proxy(self.conf_path, req) - p = proxy.Proxy(self.conf, req) - p.on_save = self.on_save - p.on_done = self.on_done p.proxy() self.assertEqual(p.res.status, 400) - self.assertEqual(p.res.headers['Content-Type'], 'application/json') - self.assertIn('Path provided in request did not look valid', - p.res.body) + self.assertEqual(p.res.headers["Content-Type"], "application/json") + self.assertIn("Path provided in request did not look valid", p.res.body) - @vcr.use_cassette('fixtures/proxy_200_valid_path.yaml') + @vcr.use_cassette("fixtures/proxy_200_valid_path.yaml") def test_proxy_200_valid_path(self): req = proxy.Req() - req.method = 'GET' - req.path_query = '/posts/1' - req.headers = {'Accept': 'application/json'} + req.method = "GET" + req.path_query = "/posts/1" + req.headers = {"Accept": "application/json"} + + p = proxy.Proxy(self.conf_path, req) - p = proxy.Proxy(self.conf, req, self.on_save) p.proxy() self.assertEqual(p.res.status, 200) - self.assertIn('application/json', p.res.headers['Content-Type']) + self.assertIn("application/json", p.res.headers["Content-Type"]) body = json.loads(p.res.body) - self.assertEqual(body['userId'], 1) - - # No cassette needed as no network request should be sent - def test_proxy_400_no_handler(self): - req = proxy.Req() - req.method = 'GET' - req.path_query = 'http://badpath.lol/path' - req.headers = {'Accept': 'application/json'} - - p = proxy.Proxy(self.conf, req) - p.proxy() - - self.assertEqual(p.res.status, 400) - self.assertEqual(p.res.headers['Content-Type'], 'application/json') - self.assertIn('Request on_save callback is not set', - p.res.body) + self.assertEqual(body["userId"], 1) class TestProxyInvalidConfig(unittest.TestCase): def setUp(self): - self.conf = config.Conf() - self.conf.host = 'jsonplaceholder.typicode.com' - self.conf.scheme = 'https://http' # bad - self.conf.port = 443 + self.conf_path = "tests/files/invalid-config.yaml" - def on_save(self, fh, res, conf): + def on_save(self, fh, res): self.fn = str(uuid.uuid4()) - res.headers['X-Origin-Content-Type'] = res.headers['Content-Type'] - res.headers['Content-Type'] = 'application/json' - res.body = json.dumps({'filename': self.fn}) + res.headers["X-Origin-Content-Type"] = res.headers["Content-Type"] + res.headers["Content-Type"] = "application/json" + res.body = json.dumps({"filename": self.fn}) # No cassette needed as no network request should be sent def test_proxy_500_misconfiguration(self): req = proxy.Req() - req.method = 'GET' - req.path_query = '/posts/1' - req.headers = {'Accept': 'application/json'} + req.method = "GET" + req.path_query = "/posts/1" + req.headers = {"Accept": "application/json"} + + p = proxy.Proxy(self.conf_path, req) - p = proxy.Proxy(self.conf, req, self.on_save) p.proxy() self.assertEqual(p.res.status, 500) - self.assertEqual(p.res.headers['Content-Type'], 'application/json') - self.assertIn('Proxy error while generating URL to request', - p.res.body) + self.assertEqual(p.res.headers["Content-Type"], "application/json") + self.assertIn("Proxy error while generating URL to request", p.res.body) class TestServerErrorHandling(unittest.TestCase): def setUp(self): - self.conf = config.Conf() - self.conf.host = "localhost" - self.conf.scheme = "http" - self.conf.port = 8000 + self.conf_path = "tests/files/local-config.yaml" def make_request(self, method="GET", path_query="/", headers=None): req = proxy.Req() @@ -193,12 +167,7 @@ def test_cannot_connect(self): """ req = self.make_request() - conf = config.Conf() - conf.host = "sdproxytest.local" - conf.scheme = "https" - conf.port = 8000 - - p = proxy.Proxy(conf, req, on_save=callbacks.on_save) + p = proxy.Proxy("tests/files/badgateway-config.yaml", req) p.proxy() self.assertEqual(p.res.status, http.HTTPStatus.BAD_GATEWAY) @@ -210,6 +179,7 @@ def test_server_timeout(self): """ Test for "504 Gateway Timeout" when the server times out. """ + class TimeoutProxy(proxy.Proxy): """ Mocks a slow upstream server. @@ -218,11 +188,12 @@ class TimeoutProxy(proxy.Proxy): long. This Proxy subclass raises the exception that would cause. """ + def prep_request(self): - raise requests.exceptions.Timeout('test timeout') + raise requests.exceptions.Timeout("test timeout") req = self.make_request(path_query="/tarpit") - p = TimeoutProxy(self.conf, req, on_save=callbacks.on_save, timeout=0.00001) + p = TimeoutProxy(self.conf_path, req, timeout=0.00001) p.proxy() self.assertEqual(p.res.status, http.HTTPStatus.GATEWAY_TIMEOUT) @@ -236,7 +207,7 @@ def test_bad_request(self): Test handling of "400 Bad Request" from the server. """ req = self.make_request(path_query="/bad") - p = proxy.Proxy(self.conf, req, on_save=callbacks.on_save) + p = proxy.Proxy(self.conf_path, req) p.proxy() self.assertEqual(p.res.status, http.HTTPStatus.BAD_REQUEST) @@ -254,7 +225,7 @@ def test_unofficial_status(self): proper JSON error response with a generic error message. """ req = self.make_request(path_query="/teapot") - p = proxy.Proxy(self.conf, req, on_save=callbacks.on_save) + p = proxy.Proxy(self.conf_path, req) p.proxy() self.assertEqual(p.res.status, 418) @@ -268,15 +239,14 @@ def test_internal_server_error(self): Test handling of "500 Internal Server Error" from the server. """ req = self.make_request(path_query="/crash") - p = proxy.Proxy(self.conf, req, on_save=callbacks.on_save) + p = proxy.Proxy(self.conf_path, req) p.proxy() self.assertEqual(p.res.status, http.HTTPStatus.INTERNAL_SERVER_ERROR) self.assertIn("application/json", p.res.headers["Content-Type"]) body = json.loads(p.res.body) self.assertEqual( - body["error"], - http.HTTPStatus.INTERNAL_SERVER_ERROR.phrase.lower() + body["error"], http.HTTPStatus.INTERNAL_SERVER_ERROR.phrase.lower() ) @vcr.use_cassette("fixtures/proxy_internal_error.yaml") @@ -284,14 +254,226 @@ def test_internal_error(self): """ Ensure that the proxy returns JSON despite internal errors. """ + def bad_on_save(self, fh, res, conf): raise Exception("test internal proxy error") req = self.make_request() - p = proxy.Proxy(self.conf, req, on_save=bad_on_save) + p = proxy.Proxy(self.conf_path, req) + + # Patching on_save for tests + p.on_save = types.MethodType(bad_on_save, p) p.proxy() self.assertEqual(p.res.status, http.HTTPStatus.INTERNAL_SERVER_ERROR) self.assertIn("application/json", p.res.headers["Content-Type"]) body = json.loads(p.res.body) self.assertEqual(body["error"], "internal proxy error") + + +class TestProxyMethods(unittest.TestCase): + def setUp(self): + self.res = proxy.Response(status=200) + self.res.body = "babbys request" + + self.conf_path = "tests/files/dev-config.yaml" + + def test_err_on_done(self): + saved_stdout = sys.stdout + try: + out = StringIO() + sys.stdout = out + with self.assertRaises(SystemExit): + p = proxy.Proxy(self.conf_path) + p.res = self.res + p.err_on_done() + output = out.getvalue().strip() + finally: + sys.stdout = saved_stdout + + response = json.loads(output) + self.assertEqual(response["status"], 200) + self.assertEqual(response["body"], "babbys request") + + def test_on_done(self): + saved_stdout = sys.stdout + try: + out = StringIO() + sys.stdout = out + p = proxy.Proxy(self.conf_path) + p.res = self.res + p.on_done() + output = out.getvalue().strip() + finally: + sys.stdout = saved_stdout + + response = json.loads(output) + self.assertEqual(response["status"], 200) + self.assertEqual(response["body"], "babbys request") + + def test_on_save_500_unhandled_error(self): + fh = tempfile.NamedTemporaryFile() + + # Let's generate an error and ensure that an appropriate response + # is sent back to the user + with patch("subprocess.run", side_effect=IOError): + p = proxy.Proxy(self.conf_path) + p.on_save(fh, self.res) + + self.assertEqual(self.res.status, 500) + self.assertEqual(self.res.headers["Content-Type"], "application/json") + self.assertEqual(self.res.headers["X-Origin-Content-Type"], "application/json") + self.assertIn("Unhandled error", self.res.body) + + def test_on_save_200_success(self): + fh = tempfile.NamedTemporaryFile() + + p = proxy.Proxy(self.conf_path) + p.on_save(fh, self.res) + + self.assertEqual(self.res.headers["Content-Type"], "application/json") + self.assertEqual(self.res.headers["X-Origin-Content-Type"], "application/json") + self.assertEqual(self.res.status, 200) + self.assertIn("filename", self.res.body) + + @vcr.use_cassette("fixtures/proxy_callbacks.yaml") + def test_custom_callbacks(self): + """ + Test the handlers in a real proxy request. + """ + conf = proxy.Conf() + conf.host = "jsonplaceholder.typicode.com" + conf.scheme = "https" + conf.port = 443 + + req = proxy.Req() + req.method = "GET" + + on_save_addition = "added by the on_save callback\n" + on_done_addition = "added by the on_done callback\n" + + def on_save(self, fh, res): + res.headers["Content-Type"] = "text/plain" + res.body = on_save_addition + + def on_done(self): + self.res.headers["Content-Type"] = "text/plain" + self.res.body += on_done_addition + + p = proxy.Proxy(self.conf_path, req) + # Patching for tests + p.conf = conf + p.on_done = types.MethodType(on_done, p) + p.on_save = types.MethodType(on_save, p) + p.proxy() + + self.assertEqual(p.res.body, "{}{}".format(on_save_addition, on_done_addition)) + + @vcr.use_cassette("fixtures/proxy_callbacks.yaml") + def test_production_on_save(self): + """ + Test on_save's production file handling. + """ + conf = proxy.Conf() + conf.host = "jsonplaceholder.typicode.com" + conf.scheme = "https" + conf.port = 443 + conf.dev = False + conf.target_vm = "sd-svs-dispvm" + + with patch("subprocess.run") as patched_run: + fh = tempfile.NamedTemporaryFile() + p = proxy.Proxy(self.conf_path) + # Patching for tests + p.conf = conf + p.on_save(fh, self.res) + self.assertEqual(patched_run.call_args[0][0][0], "qvm-move-to-vm") + + +class TestConfig(unittest.TestCase): + def setUp(self): + self.conf_path = "tests/files/dev-config.yaml" + + def test_config_file_does_not_exist(self): + def err_on_done(self): + res = self.res.__dict__ + assert res["status"] == 500 + assert "Configuration file does not exist" in res["body"] + assert res["headers"]["Content-Type"] == "application/json" + sys.exit(1) + + p = proxy.Proxy(self.conf_path) + p.err_on_done = types.MethodType(err_on_done, p) + with self.assertRaises(SystemExit): + p.read_conf("not/a/real/path") + + def test_config_file_when_yaml_is_invalid(self): + def err_on_done(self): + res = self.res.__dict__ + assert res["status"] == 500 + assert "YAML syntax error" in res["body"] + assert res["headers"]["Content-Type"] == "application/json" + sys.exit(1) + + p = proxy.Proxy(self.conf_path) + p.err_on_done = types.MethodType(err_on_done, p) + with self.assertRaises(SystemExit): + p.read_conf("tests/files/invalid_yaml.yaml") + + def test_config_file_open_generic_exception(self): + def err_on_done(self): + res = self.res.__dict__ + assert res["status"] == 500 + assert res["headers"]["Content-Type"] == "application/json" + sys.exit(1) + + p = proxy.Proxy(self.conf_path) + p.err_on_done = types.MethodType(err_on_done, p) + + with self.assertRaises(SystemExit): + # Patching open so that we can simulate a non-YAML error + # (e.g. permissions) + with patch("builtins.open", side_effect=IOError): + p.read_conf("tests/files/valid-config.yaml") + + def test_config_has_valid_keys(self): + p = proxy.Proxy("tests/files/valid-config.yaml") + + # Verify we have a valid Conf object + self.assertEqual(p.conf.host, "jsonplaceholder.typicode.com") + self.assertEqual(p.conf.port, 443) + self.assertFalse(p.conf.dev) + self.assertEqual(p.conf.scheme, "https") + self.assertEqual(p.conf.target_vm, "compost") + + def test_config_500_when_missing_a_required_key(self): + def err_on_done(self): + res = self.res.__dict__ + assert res["status"] == 500 + assert "missing required keys" in res["body"] + assert res["headers"]["Content-Type"] == "application/json" + sys.exit(1) + + p = proxy.Proxy(self.conf_path) + p.err_on_done = types.MethodType(err_on_done, p) + + with self.assertRaises(SystemExit): + p.read_conf("tests/files/missing-key.yaml") + + def test_config_500_when_missing_target_vm(self): + def err_on_done(self): + res = self.res.__dict__ + assert res["status"] == 500 + assert "missing `target_vm` key" in res["body"] + assert res["headers"]["Content-Type"] == "application/json" + sys.exit(1) + + p = proxy.Proxy(self.conf_path) + p.err_on_done = types.MethodType(err_on_done, p) + + with self.assertRaises(SystemExit): + p.read_conf("tests/files/missing-target-vm.yaml") + + def test_dev_config(self): + p = proxy.Proxy("tests/files/dev-config.yaml") + assert p.conf.dev From 51fd0cb74e7ccbbcb6aab0bbdb2cebd79d2b65e7 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Tue, 7 Jan 2020 16:38:34 +0530 Subject: [PATCH 163/352] Removes Stretch test and build in CI --- .circleci/config.yml | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 76852d363..ac1812489 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -47,16 +47,6 @@ common-steps: version: 2 jobs: - build-stretch: - docker: - - image: circleci/python:3.5-stretch - steps: - - checkout - - *install_packaging_dependencies - - *verify_requirements - - *make_source_tarball - - *build_debian_package - build-buster: docker: - image: circleci/python:3.7-buster @@ -67,13 +57,6 @@ jobs: - *make_source_tarball - *build_debian_package - test-stretch: - docker: - - image: circleci/python:3.5-stretch - steps: - - checkout - - *run_tests - test-buster: docker: - image: circleci/python:3.7-buster @@ -85,7 +68,5 @@ workflows: version: 2 securedrop_proxy_ci: jobs: - - test-stretch - test-buster - - build-stretch - build-buster From 842a65999bafb9d706799661d4da4126a961e9d9 Mon Sep 17 00:00:00 2001 From: mickael e Date: Wed, 8 Jan 2020 14:53:58 -0500 Subject: [PATCH 164/352] Remove notify-send notification in securedrop-export Since we are now logging and aggregating logs, and that we are disabling notifications, this code is no longer required. --- securedrop_export/disk/actions.py | 1 - securedrop_export/export.py | 13 ------------- securedrop_export/print/actions.py | 4 +--- tests/test_export.py | 14 -------------- 4 files changed, 1 insertion(+), 31 deletions(-) diff --git a/securedrop_export/disk/actions.py b/securedrop_export/disk/actions.py index 02b09f2c3..072cb93fc 100644 --- a/securedrop_export/disk/actions.py +++ b/securedrop_export/disk/actions.py @@ -171,7 +171,6 @@ def copy_submission(self): logger.info('Copying file to {}'.format(self.submission.target_dirname)) subprocess.check_call(["cp", "-r", export_data, target_path]) logger.info('File copied successfully to {}'.format(self.submission.target_dirname)) - self.submission.popup_message("Files exported successfully to disk.") except (subprocess.CalledProcessError, OSError): self.submission.exit_gracefully(ExportStatus.ERROR_USB_WRITE.value) finally: diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 976b1bd13..dce797e6b 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -125,19 +125,6 @@ def safe_check_call(self, command, error_message): except subprocess.CalledProcessError as ex: self.exit_gracefully(msg=error_message, e=ex.output) - def popup_message(self, msg: str): - self.safe_check_call( - command=[ - "notify-send", - "--expire-time", - "3000", - "--icon", - "/usr/share/securedrop/icons/sd-logo.png", - "SecureDrop: {}".format(msg), - ], - error_message="Error sending notification:" - ) - class ExportAction(abc.ABC): """ diff --git a/securedrop_export/print/actions.py b/securedrop_export/print/actions.py index f58ff4cd0..d23725235 100644 --- a/securedrop_export/print/actions.py +++ b/securedrop_export/print/actions.py @@ -149,7 +149,6 @@ def setup_printer(self, printer_uri, printer_ppd): def print_test_page(self): logger.info('Printing test page') self.print_file("/usr/share/cups/data/testprint") - self.submission.popup_message("Printing test page") def print_all_files(self): files_path = os.path.join(self.submission.tmpdir, "export_data/") @@ -159,8 +158,7 @@ def print_all_files(self): file_path = os.path.join(files_path, f) self.print_file(file_path) print_count += 1 - msg = "Printing document {} of {}".format(print_count, len(files)) - self.submission.popup_message(msg) + logger.info("Printing document {} of {}".format(print_count, len(files))) def is_open_office_file(self, filename): OPEN_OFFICE_FORMATS = [ diff --git a/tests/test_export.py b/tests/test_export.py index 95665761f..a161e75ed 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -1,5 +1,3 @@ -from unittest import mock - import os import pytest import subprocess # noqa: F401 @@ -118,18 +116,6 @@ def test_valid_encryption_config(capsys): assert config.is_valid() -@mock.patch("subprocess.check_call") -def test_popup_message(mocked_call): - submission = export.SDExport("testfile", TEST_CONFIG) - submission.popup_message("hello!") - mocked_call.assert_called_once_with([ - "notify-send", - "--expire-time", "3000", - "--icon", "/usr/share/securedrop/icons/sd-logo.png", - "SecureDrop: hello!" - ]) - - def test_safe_check_call(capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) submission.safe_check_call(['ls'], "this will work") From c17bac9730d1ec515d5d89c47d7934949a277605 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Tue, 7 Jan 2020 19:09:39 +0530 Subject: [PATCH 165/352] Adds minimal type annotation --- securedrop_proxy/proxy.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index ab295548b..8af6862be 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -46,16 +46,16 @@ def __init__(self, status: int) -> None: class Proxy: def __init__( - self, conf_path: str, req: Req = Req(), timeout: float = None, + self, conf_path: str, req: Req = Req(), timeout: float = 10.0 ) -> None: # The configuration path for Proxy is a must. self.read_conf(conf_path) self.req = req self.res: Optional[Response] = None - self.timeout = float(timeout) if timeout else 10 + self.timeout = float(timeout) - self._prepared_request = None + self._prepared_request: Optional[Req] = None def on_done(self) -> None: print(json.dumps(self.res.__dict__)) @@ -158,7 +158,7 @@ def on_save(self, fh: _TemporaryFileWrapper, res: Response) -> None: res.headers["X-Origin-Content-Type"] = res.headers["Content-Type"] res.body = json.dumps({"filename": fn}) - def simple_error(self, status, err): + def simple_error(self, status: int, err: str) -> None: res = Response(status) res.body = json.dumps({"error": err}) res.headers = {"Content-Type": "application/json"} @@ -203,7 +203,7 @@ def handle_json_response(self) -> None: self.res = res - def handle_non_json_response(self): + def handle_non_json_response(self) -> None: res = Response(self._presp.status_code) @@ -242,13 +242,10 @@ def handle_response(self) -> None: def proxy(self) -> None: try: - if not self.on_save: - self.simple_error( - http.HTTPStatus.BAD_REQUEST, "Request on_save callback is not set." - ) - raise ValueError("Request on_save callback is not set.") self.prep_request() + # To confirm that we have a prepared request before the proxy call + assert self._prepared_request logger.debug("Sending request") s = requests.Session() self._presp = s.send(self._prepared_request, timeout=self.timeout) From 9e2d0d332bee11f4a5ad9138b431a4c8627d41ae Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Wed, 15 Jan 2020 15:28:40 -0800 Subject: [PATCH 166/352] remove stretch test and use ssh for git --- .circleci/config.yml | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index ad0cbded6..02f7e1d1e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -20,9 +20,10 @@ common-steps: name: Install Debian packaging dependencies and download wheels command: | mkdir ~/packaging && cd ~/packaging + git config --global --unset url.ssh://git@github.com.insteadof git clone https://github.com/freedomofpress/securedrop-debian-packaging.git cd securedrop-debian-packaging - make install-deps && make fetch-wheels + make install-deps PKG_DIR=~/project make requirements - &verify_requirements @@ -72,14 +73,6 @@ jobs: source .venv/bin/activate make safety - test-stretch: - docker: - - image: circleci/python:3.5-stretch - steps: - - checkout - - *install_packages - - *run_tests - test-buster: docker: - image: circleci/python:3.7-buster @@ -103,6 +96,5 @@ workflows: securedrop_export_ci: jobs: - lint - - test-stretch - test-buster - build-buster From 6dc3b1455836cba56255e38dc651c3f196272cd6 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Thu, 16 Jan 2020 16:44:13 +0530 Subject: [PATCH 167/352] Fixes CI for git-lfs based package builds --- .circleci/config.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index ac1812489..d47571379 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -14,9 +14,10 @@ common-steps: name: Install Debian packaging dependencies and download wheels command: | mkdir ~/packaging && cd ~/packaging + git config --global --unset url.ssh://git@github.com.insteadof git clone https://github.com/freedomofpress/securedrop-debian-packaging.git cd securedrop-debian-packaging - make install-deps && make fetch-wheels + make install-deps PKG_DIR=~/project make requirements - &verify_requirements From 1d3b75c947faa35e00de90b7898e3bf628ca377f Mon Sep 17 00:00:00 2001 From: Erik Moeller Date: Tue, 14 Jan 2020 17:40:59 -0800 Subject: [PATCH 168/352] VM rename per https://github.com/freedomofpress/securedrop-workstation/issues/285 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 4d097a2c5..8b28f6d24 100644 --- a/README.md +++ b/README.md @@ -109,7 +109,7 @@ The supported device types for export are as follows, including the possible err - `USB_CHECK_ERROR` if an error occurred during pre-flight 2. `disk-test`: Preflight check that checks for LUKS-encrypted volume that returns: - - `USB_ENCRYPTED` if a LUKS volume is attached to sd-export + - `USB_ENCRYPTED` if a LUKS volume is attached to sd-devices - `USB_ENCRYPTION_NOT_SUPPORTED` if a LUKS volume is not attached or there was any other error - `USB_DISK_ERROR` From c6c1dbe74330add1826a56e540d66b49402252c6 Mon Sep 17 00:00:00 2001 From: Erik Moeller Date: Tue, 14 Jan 2020 17:43:03 -0800 Subject: [PATCH 169/352] VM rename per https://github.com/freedomofpress/securedrop-workstation/issues/285 --- tests/test_proxy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_proxy.py b/tests/test_proxy.py index 410a0738c..f4cd56038 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -379,7 +379,7 @@ def test_production_on_save(self): conf.scheme = "https" conf.port = 443 conf.dev = False - conf.target_vm = "sd-svs-dispvm" + conf.target_vm = "sd-viewer" with patch("subprocess.run") as patched_run: fh = tempfile.NamedTemporaryFile() From d3bd8ee57ecfedf1928b44b4d1a40555f011225f Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Tue, 14 Jan 2020 15:52:53 -0800 Subject: [PATCH 170/352] add start-vm and printer-preflight check --- securedrop_export/entrypoint.py | 3 +- securedrop_export/exceptions.py | 1 + securedrop_export/export.py | 2 ++ securedrop_export/main.py | 8 ++++- securedrop_export/print/actions.py | 52 ++++++++++++++++++++++++------ 5 files changed, 54 insertions(+), 12 deletions(-) diff --git a/securedrop_export/entrypoint.py b/securedrop_export/entrypoint.py index ed1d69bb1..d19bbbed1 100755 --- a/securedrop_export/entrypoint.py +++ b/securedrop_export/entrypoint.py @@ -58,8 +58,9 @@ def start(): main.__main__(my_sub) # Delete extracted achive from tempfile shutil.rmtree(my_sub.tmpdir) - except Exception: + except Exception as e: # exit with 0 return code otherwise the os will attempt to open # the file with another application + logger.error(e) msg = "ERROR_GENERIC" my_sub.exit_gracefully(msg) diff --git a/securedrop_export/exceptions.py b/securedrop_export/exceptions.py index 1c14bc684..e144a1684 100644 --- a/securedrop_export/exceptions.py +++ b/securedrop_export/exceptions.py @@ -22,6 +22,7 @@ class ExportStatus(Enum): USB_DISK_ERROR = 'USB_DISK_ERROR' # Printer preflight related errors + ERROR_MULTIPLE_PRINTERS_FOUND = 'ERROR_MULTIPLE_PRINTERS_FOUND' ERROR_PRINTER_NOT_FOUND = 'ERROR_PRINTER_NOT_FOUND' ERROR_PRINTER_NOT_SUPPORTED = 'ERROR_PRINTER_NOT_SUPPORTED' ERROR_PRINTER_DRIVER_UNAVAILABLE = 'ERROR_PRINTER_DRIVER_UNAVAILABLE' diff --git a/securedrop_export/export.py b/securedrop_export/export.py index dce797e6b..814a034df 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -23,11 +23,13 @@ class Metadata(object): METADATA_FILE = "metadata.json" SUPPORTED_EXPORT_METHODS = [ + "start-vm", "usb-test", # general preflight check "disk", "disk-test", # disk preflight test "printer", "printer-test", # print test page + "printer-preflight" ] SUPPORTED_ENCRYPTION_METHODS = ["luks"] diff --git a/securedrop_export/main.py b/securedrop_export/main.py index b68dce170..bbdb25f68 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -2,7 +2,8 @@ from securedrop_export import export from securedrop_export.exceptions import ExportStatus -from securedrop_export.print.actions import PrintExportAction, PrintTestPageAction +from securedrop_export.print.actions import PrintExportAction, PrintTestPageAction, \ + PrintPreflightAction from securedrop_export.disk.actions import DiskTestAction, DiskExportAction, USBTestAction logger = logging.getLogger(__name__) @@ -19,12 +20,17 @@ def __main__(submission): if not submission.archive_metadata.is_valid(): submission.exit_gracefully(ExportStatus.ERROR_ARCHIVE_METADATA.value) + if submission.archive_metadata.export_method == "start-vm": + submission.exit_gracefully('') + if submission.archive_metadata.export_method == "usb-test": action = USBTestAction(submission) elif submission.archive_metadata.export_method == "disk": action = DiskExportAction(submission) elif submission.archive_metadata.export_method == "disk-test": action = DiskTestAction(submission) + elif submission.archive_metadata.export_method == "printer-preflight": + action = PrintPreflightAction(submission) elif submission.archive_metadata.export_method == "printer": action = PrintExportAction(submission) elif submission.archive_metadata.export_method == "printer-test": diff --git a/securedrop_export/print/actions.py b/securedrop_export/print/actions.py index d23725235..9f2a02625 100644 --- a/securedrop_export/print/actions.py +++ b/securedrop_export/print/actions.py @@ -50,6 +50,36 @@ def wait_for_print(self): self.submission.exit_gracefully(ExportStatus.ERROR_PRINT.value) return True + def check_printer_setup(self) -> None: + try: + logger.info('Searching for printer') + output = subprocess.check_output(["sudo", "lpinfo", "-v"]) + printers = [x for x in output.decode('utf-8').split() if "usb://" in x] + if not printers: + logger.info('No usb printers connected') + self.submission.exit_gracefully(ExportStatus.ERROR_PRINTER_NOT_FOUND.value) + + supported_printers = \ + [p for p in printers if any(sub in p for sub in ("Brother", "LaserJet"))] + if not supported_printers: + logger.info('{} are unsupported printers'.format(printers)) + self.submission.exit_gracefully(ExportStatus.ERROR_PRINTER_NOT_SUPPORTED.value) + + if len(supported_printers) > 1: + logger.info('Too many usb printers connected') + self.submission.exit_gracefully(ExportStatus.ERROR_MULTIPLE_PRINTERS_FOUND.value) + + printer_uri = printers[0] + + logger.info('Installing printer drivers') + printer_ppd = self.install_printer_ppd(printer_uri) + + logger.info('Setting up printer') + self.setup_printer(printer_uri, printer_ppd) + except subprocess.CalledProcessError as e: + logger.error(e) + self.submission.exit_gracefully(ExportStatus.ERROR_GENERIC.value) + def get_printer_uri(self): # Get the URI via lpinfo and only accept URIs of supported printers printer_uri = "" @@ -204,14 +234,8 @@ def __init__(self, *args, **kwargs): def run(self): logger.info('Export archive is printer') + self.check_printer_setup() # prints all documents in the archive - logger.info('Searching for printer') - printer_uri = self.get_printer_uri() - logger.info('Installing printer drivers') - printer_ppd = self.install_printer_ppd(printer_uri) - logger.info('Setting up printer') - self.setup_printer(printer_uri, printer_ppd) - logger.info('Printing files') self.print_all_files() @@ -220,8 +244,16 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def run(self): + logger.info('Export archive is printer-test') + self.check_printer_setup() # Prints a test page to ensure the printer is functional - printer_uri = self.get_printer_uri() - printer_ppd = self.install_printer_ppd(printer_uri) - self.setup_printer(printer_uri, printer_ppd) self.print_test_page() + + +class PrintPreflightAction(PrintAction): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def run(self): + logger.info('Export archive is printer-preflight') + self.check_printer_setup() From a2c59172099f5b2eed2faf55569a34f85daf79bd Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Fri, 17 Jan 2020 12:36:07 -0800 Subject: [PATCH 171/352] only install drivers if needed --- securedrop_export/print/actions.py | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/securedrop_export/print/actions.py b/securedrop_export/print/actions.py index 9f2a02625..41a47368f 100644 --- a/securedrop_export/print/actions.py +++ b/securedrop_export/print/actions.py @@ -120,17 +120,19 @@ def install_printer_ppd(self, uri): printer_driver = LASERJET_DRIVER printer_ppd = LASERJET_PPD - # Some drivers don't come with ppd files pre-compiled, we must compile them - self.submission.safe_check_call( - command=[ - "sudo", - "ppdc", - printer_driver, - "-d", - "/usr/share/cups/model/", - ], - error_message=ExportStatus.ERROR_PRINTER_DRIVER_UNAVAILABLE.value - ) + # Compile and install drivers that are not already installed + if not os.path.exists(printer_ppd): + self.submission.safe_check_call( + command=[ + "sudo", + "ppdc", + printer_driver, + "-d", + "/usr/share/cups/model/", + ], + error_message=ExportStatus.ERROR_PRINTER_DRIVER_UNAVAILABLE.value + ) + return printer_ppd def setup_printer(self, printer_uri, printer_ppd): From baae8c019dd40f66bc1bdc6fa7f6425bc9e0bbc6 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Mon, 20 Jan 2020 15:01:58 +0530 Subject: [PATCH 172/352] Adds logging to /var/log/syslog using rsyslog Also adds a new test to test configure_logging function call. --- securedrop_proxy/entrypoint.py | 15 ++++++++++++++- tests/test_entrypoint.py | 18 ++++++++++++++++++ 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/securedrop_proxy/entrypoint.py b/securedrop_proxy/entrypoint.py index cee130f17..dbd5fa4c9 100755 --- a/securedrop_proxy/entrypoint.py +++ b/securedrop_proxy/entrypoint.py @@ -11,8 +11,9 @@ import logging import os import sys +import platform -from logging.handlers import TimedRotatingFileHandler +from logging.handlers import TimedRotatingFileHandler, SysLogHandler from securedrop_proxy import main from securedrop_proxy import proxy @@ -83,7 +84,19 @@ def configure_logging() -> None: handler.setFormatter(formatter) handler.setLevel(logging.DEBUG) + # For rsyslog handler + if platform.system() != "Linux": # pragma: no cover + syslog_file = "/var/run/syslog" + else: + syslog_file = "/dev/log" + + sysloghandler = SysLogHandler(address=syslog_file) + sysloghandler.setFormatter(formatter) + # set up primary log log = logging.getLogger() log.setLevel(LOGLEVEL) log.addHandler(handler) + + # add the secondard logger + log.addHandler(sysloghandler) diff --git a/tests/test_entrypoint.py b/tests/test_entrypoint.py index 8a5b0eaf2..842cb866a 100644 --- a/tests/test_entrypoint.py +++ b/tests/test_entrypoint.py @@ -3,8 +3,10 @@ import io import json import os +import platform import tempfile import unittest.mock +from unittest.mock import patch import vcr from securedrop_proxy import entrypoint @@ -46,6 +48,22 @@ def test_missing_config(self): body["error"], "Configuration file does not exist at {}".format(config_path) ) + @patch("securedrop_proxy.entrypoint.logging") + @patch("securedrop_proxy.entrypoint.SysLogHandler") + @patch("securedrop_proxy.entrypoint.TimedRotatingFileHandler") + def test_configure_logging(self, mock_log_conf, mock_log_conf_sys, mock_logging): + with sdhome() as homedir: + mock_log_file = os.path.join(homedir, 'logs', 'proxy.log') + entrypoint.configure_logging() + mock_log_conf.assert_called_once_with(mock_log_file) + # For rsyslog handler + if platform.system() != "Linux": # pragma: no cover + syslog_file = "/var/run/syslog" + else: + syslog_file = "/dev/log" + mock_log_conf_sys.assert_called_once_with(address=syslog_file) + mock_logging.getLogger.assert_called_once_with() + def test_unwritable_log_folder(self): """ Tests a permission problem in `configure_logging`. From d06df800b8f9866af72561d8d3d55cf1252b071e Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Fri, 24 Jan 2020 17:12:28 -0500 Subject: [PATCH 173/352] build-requirements: update for production beta --- build-requirements.txt | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/build-requirements.txt b/build-requirements.txt index 4b17af9f9..bcc35fca1 100644 --- a/build-requirements.txt +++ b/build-requirements.txt @@ -1,10 +1,10 @@ -certifi==2018.10.15 --hash=sha256:a5471c55b011bd45d6155f5c3629310c1d2f1e1a5a899b7e438a223343de583d -chardet==3.0.4 --hash=sha256:9f178988ca4c86e8a319b51aac1185b6fe5192328eb5a163c286f4bf50b7b3d8 -furl==2.0.0 --hash=sha256:cc0eb8998dcc7c5b58bc8625891a9ff563e2765e112024fa3d1e3521481de8b6 -idna==2.7 --hash=sha256:954e65e127d0433a352981f43f291a438423d5b385ebf643c70fd740e0634111 -orderedmultidict==1.0 --hash=sha256:25489716d76d2cc8aa656bfb00cd40b6ca29d5e11ccde0db60c2b46ad52bb40a -pyyaml==5.1 --hash=sha256:b8d80623e9d4e348c59ea726ce3032a2eb15abca6a48d3828362d11c6014a0a7 --hash=sha256:c6dec5d6ffa44a50d83b9c4e8df0443a0a87e4213ecf2e24fcae6ea991f3b0c0 -requests==2.20.0 --hash=sha256:2a539dd6af40a611f3b8eb3f99d3567781352ece1698b2fab42bf4c2218705b5 -six==1.11.0 --hash=sha256:4663c7a1dbed033cfb294f2d534bd6151c0698dc12ecabb4eaa3cb041d758528 -urllib3==1.24.3 --hash=sha256:028309393606e28e640e2031edd27eb969c94f9364b0871912608aaa8e66c96e -werkzeug==0.14.1 --hash=sha256:177ea4248bf0475cbc060edb35a0bdcf6e6daeac9e1296de5ddb3493e5ec15b9 +certifi==2018.10.15 --hash=sha256:173b19dd31ca7faa50d1fcc0eaf30f5e32e8e99e17d8c7fd4cfc8bc8d94e18a6 +chardet==3.0.4 --hash=sha256:f5632e583a4f61f1e16d0cc98127d241fb11c3c6ddfddee159307d4215186837 +furl==2.0.0 --hash=sha256:1855003e64dcb934556ad79994ba1a3a852da337e353d84d3b4ef75031913451 +idna==2.7 --hash=sha256:491f674364ba3232ed1eb4c1eb7407887f62cef6c300aad7df6e01acd88ffb25 +orderedmultidict==1.0 --hash=sha256:51efddca0b4ae6d885bbafd8ca44e51758166c144cf006dbead5c9394b2a9eae +pyyaml==5.1 --hash=sha256:59ff9ed67bd8be210f91fd20f0ef844ee897d11eb4c19c611a77a37896c124b9 +requests==2.20.0 --hash=sha256:d87b2085783d31d874ac7bc62660e287932aaee7059e80b41b76462eb18d35cc +six==1.11.0 --hash=sha256:aa4ad34049ddff178b533062797fd1db9f0038b7c5c2461a7cde2244300b9f3d +urllib3==1.24.3 --hash=sha256:3d440cbb168e2c963d5099232bdb3f7390bf031b6270dad1bc79751698a1399a +werkzeug==0.14.1 --hash=sha256:eb89d94f6e0d4d7b2efff64c56674dd73f21105fce3a0d6e9b650d780add2e11 From 542d6c3db6b35c1526570cb8a4fd9e9ed122ce9d Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Mon, 27 Jan 2020 18:08:17 +0530 Subject: [PATCH 174/352] Adds rsyslog based logging --- securedrop_export/entrypoint.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/securedrop_export/entrypoint.py b/securedrop_export/entrypoint.py index d19bbbed1..a34c6dd25 100755 --- a/securedrop_export/entrypoint.py +++ b/securedrop_export/entrypoint.py @@ -2,8 +2,9 @@ import os import shutil import sys +import platform -from logging.handlers import TimedRotatingFileHandler +from logging.handlers import TimedRotatingFileHandler, SysLogHandler from securedrop_export import __version__ from securedrop_export import export from securedrop_export import main @@ -31,12 +32,23 @@ def configure_logging(): handler = TimedRotatingFileHandler(log_file) handler.setFormatter(formatter) + + # For rsyslog handler + if platform.system() != "Linux": # pragma: no cover + syslog_file = "/var/run/syslog" + else: + syslog_file = "/dev/log" + + sysloghandler = SysLogHandler(address=syslog_file) + sysloghandler.setFormatter(formatter) handler.setLevel(logging.DEBUG) # set up primary log log = logging.getLogger() log.setLevel(logging.DEBUG) log.addHandler(handler) + # add the second logger + log.addHandler(sysloghandler) def start(): From 98188f190e364530a518444f6868a1bf62ca3aa8 Mon Sep 17 00:00:00 2001 From: John Hensley Date: Tue, 28 Jan 2020 14:37:32 -0500 Subject: [PATCH 175/352] securedrop-proxy 0.1.6 --- changelog.md | 9 +++++++++ securedrop_proxy/VERSION | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/changelog.md b/changelog.md index 176c9a111..dcd7b8334 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,14 @@ # Changelog +## 0.1.6 + + * Fixes CI for git-lfs based package builds (#60) + * Rename VMs (#59) + * Restructures the code base with more object methods (#55) + * Add quality control tools (#54) + * Improve error handling, tests (#53) + * Adds buster packaging in CI (#52) + ## 0.1.5 * Update build-requirements.txt to include wheels for Buster diff --git a/securedrop_proxy/VERSION b/securedrop_proxy/VERSION index 9faa1b7a7..c946ee616 100644 --- a/securedrop_proxy/VERSION +++ b/securedrop_proxy/VERSION @@ -1 +1 @@ -0.1.5 +0.1.6 From d15d24e7f1accd1492e222125162d93ce7f6f21e Mon Sep 17 00:00:00 2001 From: Erik Moeller Date: Wed, 29 Jan 2020 14:04:58 -0800 Subject: [PATCH 176/352] Update description in README to mirror current implementation --- README.md | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index 8b28f6d24..9abfc7190 100644 --- a/README.md +++ b/README.md @@ -98,7 +98,7 @@ For all device types (described in detail below), the following standard error t - `ERROR_EXTRACTION`: Error while extracting the archive - `ERROR_METADATA_PARSING`: The metadata.json file cannot be correctly parsed - `ERROR_ARCHIVE_METADATA`: The metadata failed the check -- `ERROR_USB_CONFIGURATION`: There is no USB controller attached to the VM, the dom0 configuration (in `config.json`) or USB device identifier is is misconfigured +- `ERROR_USB_CONFIGURATION`: There is no USB controller attached to the VM - `ERROR_GENERIC`: An uncaught (unexpected) error somewhere in the script. These should not happen unless the code improperly handles errors The supported device types for export are as follows, including the possible errors specific to that device type: @@ -134,22 +134,31 @@ The supported device types for export are as follows, including the possible err ### Export Folder Structure -When exporting to a USB drive (using the disk device in metadata.json), the files will be placed on the drive as follows: The root of the USB drive will contain one folder per source, reflecting their source codename in the client. Documents or messages exported will be copied to that directory, preserving the filename from the server. In case a same file is exported twice, a confirmation window replace/rename/abort. +When exporting to a USB drive, the files will be placed on the drive as follows: The root of the USB drive will contain one `sd-export-[timestamp]` folder, where `[timestamp]` is in the format `YYYYMMDD-hhmmss`. This folder will contain a subfolder `export_data`, which will contain the exported file with its original name as submitted by the source. For example: + +``` +. + +└── sd-export-20200116-003153 + └── export_data + └── secret_memo.pdf +``` + +To support multiple files, in the long term, we are planning to use a folder structure similar to the following, where the journalist designation for a source is used for folder names and message/reply file names. -Example folder structure of USB export drive: ``` . -├── cytotoxic payer +├── cytotoxic-payer │ ├── 1-cytotoxic-payer-msg -│ │ └── file-to-export-1.txt +│ │ └── 1-cytotoxic-payer-msg.txt │ ├── 2-cytotoxic-payer-msg -│ │ └── file-to-export-2.txt +│ │ └── 2-cytotoxic-payer-msg.txt │ └── 3-cytotoxic-payer-doc -│ │ └── file-to-export-3.doc -├── grandiloquent pasteboard +│ │ └── interesting_file.doc +├── grandiloquent-pasteboard │ └── 1-grandiloquent-pasteboard-doc -│ │ └── file-to-export-1.doc -└── snug seek +│ │ └── questionable_file.pdf +└── snug-seek ``` From 8fc1a3001dde3a8ed6d5e96a863b0b6a9627f1f6 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Tue, 4 Feb 2020 15:16:03 -0500 Subject: [PATCH 177/352] deps: update Werkzeug to 0.16.0 --- build-requirements.txt | 2 +- dev-requirements.txt | 6 +++--- requirements.in | 2 +- requirements.txt | 6 +++--- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/build-requirements.txt b/build-requirements.txt index bcc35fca1..390b2281c 100644 --- a/build-requirements.txt +++ b/build-requirements.txt @@ -7,4 +7,4 @@ pyyaml==5.1 --hash=sha256:59ff9ed67bd8be210f91fd20f0ef844ee897d11eb4c19c611a77a3 requests==2.20.0 --hash=sha256:d87b2085783d31d874ac7bc62660e287932aaee7059e80b41b76462eb18d35cc six==1.11.0 --hash=sha256:aa4ad34049ddff178b533062797fd1db9f0038b7c5c2461a7cde2244300b9f3d urllib3==1.24.3 --hash=sha256:3d440cbb168e2c963d5099232bdb3f7390bf031b6270dad1bc79751698a1399a -werkzeug==0.14.1 --hash=sha256:eb89d94f6e0d4d7b2efff64c56674dd73f21105fce3a0d6e9b650d780add2e11 +werkzeug==0.16.0 --hash=sha256:429de1b931a2a58bf5cfac8447253949f7a930d30a73f2755e0ad0f9824592bf diff --git a/dev-requirements.txt b/dev-requirements.txt index 826afb92f..1153d0469 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -160,9 +160,9 @@ urllib3==1.24.3 \ vcrpy==2.0.1 \ --hash=sha256:127e79cf7b569d071d1bd761b83f7b62b2ce2a2eb63ceca7aa67cba8f2602ea3 \ --hash=sha256:57be64aa8e9883a4117d0b15de28af62275c001abcdb00b6dc2d4406073d9a4f -werkzeug==0.14.1 \ - --hash=sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c \ - --hash=sha256:d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b +werkzeug==0.16.0 \ + --hash=sha256:7280924747b5733b246fe23972186c6b348f9ae29724135a6dfc1e53cea433e7 \ + --hash=sha256:e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4 wrapt==1.10.11 \ --hash=sha256:d4d560d479f2c21e1b5443bbd15fe7ec4b37fe7e53d335d3b9b0a7b1226fe3c6 yarl==1.2.6 \ diff --git a/requirements.in b/requirements.in index 15019974a..1e2aafcf0 100644 --- a/requirements.in +++ b/requirements.in @@ -7,4 +7,4 @@ PyYAML==5.1 six==1.11.0 requests==2.20.0 urllib3==1.24.3 -Werkzeug==0.14.1 +Werkzeug==0.16.0 diff --git a/requirements.txt b/requirements.txt index 6ac4b0979..38b09c7e3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -40,6 +40,6 @@ six==1.11.0 \ urllib3==1.24.3 \ --hash=sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4 \ --hash=sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb -werkzeug==0.14.1 \ - --hash=sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c \ - --hash=sha256:d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b +werkzeug==0.16.0 \ + --hash=sha256:7280924747b5733b246fe23972186c6b348f9ae29724135a6dfc1e53cea433e7 \ + --hash=sha256:e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4 From 29c1cf7caa08a5e3209dfac7abe94f161be1d61f Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Thu, 9 Jan 2020 14:47:28 +0530 Subject: [PATCH 178/352] Adds initial scripts to use rsyslog for logging in workstation `sd-rsyslog` is the output plugin of rsyslog to be installed in /usr/sbin `sdlog.conf` is the configuration of rsyslog in /etc/rsyslog.d/ `securedrop-redis-log` is part of the Qrexec service inside of sd-log vm. This will receive the messages from any other vm, and add them into a queue in Redis. `securedrop-log-saver` will be the service inside `sd-log` VM, this will monitor the queue, and save any incoming message to the `syslog.log` file of the respective directory for each VM. It also has the Makefile for the project. --- MANIFEST.in | 9 +- Makefile | 40 +++++++++ README.md | 43 +++++++-- VERSION | 1 + build-requirements.txt | 1 + changelog.md | 4 + requirements.in | 1 + requirements.txt | 9 ++ sd-rsyslog | 193 ++++++++++++++++++++++++++++++++++++++++ sd-rsyslog-example.conf | 4 + sdlog.conf | 4 + securedrop-log-saver | 54 +++++++++++ securedrop-log.service | 12 +++ securedrop-redis-log | 46 ++++++++++ securedrop.Log | 2 +- securedrop_log/VERSION | 1 - setup.py | 12 ++- update_version.sh | 10 +-- 18 files changed, 421 insertions(+), 25 deletions(-) create mode 100644 Makefile create mode 100644 VERSION create mode 100644 requirements.in create mode 100644 sd-rsyslog create mode 100644 sd-rsyslog-example.conf create mode 100644 sdlog.conf create mode 100755 securedrop-log-saver create mode 100644 securedrop-log.service create mode 100755 securedrop-redis-log delete mode 100644 securedrop_log/VERSION diff --git a/MANIFEST.in b/MANIFEST.in index 6d11dc950..9596bbc0a 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -3,8 +3,9 @@ include README.md include changelog.md include build-requirements.txt include requirements.txt -include securedrop_log/*.py -include securedrop_log/VERSION -include setup.py -include securedrop-log +include securedrop-log* +include securedrop-redis-log include securedrop.Log +include sd-rsyslog* +include sdlog.conf +include VERSION \ No newline at end of file diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..0a669f0db --- /dev/null +++ b/Makefile @@ -0,0 +1,40 @@ +DEFAULT_GOAL: help +SHELL := /bin/bash + +# Bandit is a static code analysis tool to detect security vulnerabilities in Python applications +# https://wiki.openstack.org/wiki/Security/Projects/Bandit +.PHONY: bandit +bandit: ## Run bandit with medium level excluding test-related folders + pip install --upgrade pip && \ + pip install --upgrade bandit!=1.6.0 && \ + bandit -ll --recursive . --exclude tests,.venv + +.PHONY: safety +safety: ## Runs `safety check` to check python dependencies for vulnerabilities + pip install --upgrade safety && \ + for req_file in `find . -type f -name '*requirements.txt'`; do \ + echo "Checking file $$req_file" \ + && safety check --full-report -r $$req_file \ + && echo -e '\n' \ + || exit 1; \ + done + +.PHONY: update-pip-requirements +update-pip-requirements: ## Updates all Python requirements files via pip-compile. + pip-compile --generate-hashes --output-file requirements.txt requirements.in + + +# Explaination of the below shell command should it ever break. +# 1. Set the field separator to ": ##" and any make targets that might appear between : and ## +# 2. Use sed-like syntax to remove the make targets +# 3. Format the split fields into $$1) the target name (in blue) and $$2) the target descrption +# 4. Pass this file as an arg to awk +# 5. Sort it alphabetically +# 6. Format columns with colon as delimiter. +.PHONY: help +help: ## Print this message and exit. + @printf "Makefile for developing and testing the SecureDrop Logging system.\n" + @printf "Subcommands:\n\n" + @awk 'BEGIN {FS = ":.*?## "} /^[0-9a-zA-Z_-]+:.*?## / {printf "\033[36m%s\033[0m : %s\n", $$1, $$2}' $(MAKEFILE_LIST) \ + | sort \ + | column -s ':' -t diff --git a/README.md b/README.md index efaf8e8a9..0379fca7b 100644 --- a/README.md +++ b/README.md @@ -24,19 +24,47 @@ Add the following content to `/etc/qubes-rpc/securedrop.Log` /usr/sbin/securedrop-log ``` -and then place `securedrop-log` script to `/usr/sbin/` directory and make sure that -it is executable. +and then place `securedrop-redis-log` and `securedrop-log-saver` scripts to the +virtualenv at `/opt/venvs/securedrop-log` and create links to `/usr/sbin/` +directory and make sure that they are executable. This step will be automated via +the Debian package. + + +Copy `securedrop-log.service` file to `/usr/systemd/system` and then + +``` +sudo systemctl daemon-reload +sudo systemctl start redis +sudo systemctl start securedrop-log +``` + +To test the logging, make sure to execute `securedrop-log-saver` from a terminal in `sd-log` +and check the ~/QubesIncomingLogs/vmname/syslog.log file via **tail -f**. + ### To use from any Python code in workvm +Put `sd-rsyslog-example.conf` file to `/etc/sd-rsyslog.conf`, make sure update +it so that is shows the right **localvm** name. + +Copy `sd-rsyslog` executable to **/usr/sbin**, and remember to `chmod +x` +the binary. + +Next, restart the rsyslog service. + +``` +systemctl restart rsyslog +``` + + Here is an example code using Python logging ```Python import logging -from securedrop_log import SecureDropLog +import logging.handlers def main(): - handler = SecureDropLog("workvm", "proxy-debian") + handler = logging.handlers.SysLogHandler(address="/dev/log") logging.basicConfig(level=logging.DEBUG, handlers=[handler]) logger = logging.getLogger("example") @@ -48,8 +76,9 @@ if __name__ == "__main__": ``` -## The journalctl example +Or use the logger command. -You will need `python3-systemd` package for the same. +``` +logger This line should show in the syslog.log file in the sd-log file. +``` -The code is in `journal-example.py` file. \ No newline at end of file diff --git a/VERSION b/VERSION new file mode 100644 index 000000000..81340c7e7 --- /dev/null +++ b/VERSION @@ -0,0 +1 @@ +0.0.4 diff --git a/build-requirements.txt b/build-requirements.txt index e69de29bb..af566a319 100644 --- a/build-requirements.txt +++ b/build-requirements.txt @@ -0,0 +1 @@ +redis==3.3.11 --hash=sha256:022f124431ae16ee3a3a69c8016e3e2b057b4f4e0bfa7787b6271d893890c3cc diff --git a/changelog.md b/changelog.md index b82e78e7a..c9fba47b1 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,9 @@ # Changelog +## 0.0.4 + + * Converts into rsyslog based logging system. + ## 0.0.3 * Fixes typos MANIFEST.in and setup.py diff --git a/requirements.in b/requirements.in new file mode 100644 index 000000000..767bdac05 --- /dev/null +++ b/requirements.in @@ -0,0 +1 @@ +redis==3.3.11 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index e69de29bb..3c896d549 100644 --- a/requirements.txt +++ b/requirements.txt @@ -0,0 +1,9 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --generate-hashes --output-file=requirements.txt requirements.in +# +redis==3.3.11 \ + --hash=sha256:3613daad9ce5951e426f460deddd5caf469e08a3af633e9578fc77d362becf62 \ + --hash=sha256:8d0fc278d3f5e1249967cba2eb4a5632d19e45ce5c09442b8422d15ee2c22cc2 diff --git a/sd-rsyslog b/sd-rsyslog new file mode 100644 index 000000000..4f3233ee5 --- /dev/null +++ b/sd-rsyslog @@ -0,0 +1,193 @@ +#!/opt/venvs/securedrop-log/bin/python3 +"""A skeleton for a Python rsyslog output plugin with error handling. +Requires Python 3. + +To integrate a plugin based on this skeleton with rsyslog, configure an +'omprog' action like the following: + action(type="omprog" + binary="/usr/bin/myplugin.py" + output="/var/log/myplugin.log" + confirmMessages="on" + ...) + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + -or- + see COPYING.ASL20 in the source distribution + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import sys +import os +import logging +import configparser +from subprocess import Popen, PIPE + +# Global definitions specific to your plugin +process = None + +class RecoverableError(Exception): + """An error that has caused the processing of the current message to + fail, but does not require restarting the plugin. + + An example of such an error would be a temporary loss of connection to + a database or a server. If such an error occurs in the onMessage function, + your plugin should wrap it in a RecoverableError before raising it. + For example: + + try: + # code that connects to a database + except DbConnectionError as e: + raise RecoverableError from e + + Recoverable errors will cause the 'omprog' action to be temporarily + suspended by rsyslog, during a period that can be configured using the + "action.resumeInterval" action parameter. When the action is resumed, + rsyslog will resend the failed message to your plugin. + """ + + +def onInit(): + """Do everything that is needed to initialize processing (e.g. open files, + create handles, connect to systems...). + """ + # Apart from processing the logs received from rsyslog, you want your plugin + # to be able to report its own logs in some way. This will facilitate + # diagnosing problems and debugging your code. Here we set up the standard + # Python logging system to output the logs to stderr. In the rsyslog + # configuration, you can configure the 'omprog' action to capture the stderr + # of your plugin by specifying the action's "output" parameter. + logging.basicConfig(stream=sys.stderr, + level=logging.WARNING, + format='%(asctime)s %(levelname)s %(message)s') + + # This is an example of a debug log. (Note that for debug logs to be + # emitted you must set 'level' to logging.DEBUG above.) + logging.debug("onInit called") + + + global process + if not os.path.exists("/etc/sd-rsyslog.conf"): + print("Please create the configuration file at /etc/sd-rsyslog.conf", file=sys.stderr) + sys.exit(1) + config = configparser.ConfigParser() + config.read('/etc/sd-rsyslog.conf') + logvmname = config['sd-rsyslog']['remotevm'] + localvmname = config['sd-rsyslog']['localvm'] + process = Popen( + ["/usr/lib/qubes/qrexec-client-vm", logvmname, "securedrop.Log"], + stdin=PIPE, + stdout=PIPE, + stderr=PIPE, + ) + process.stdin.write(localvmname.encode("utf-8")) + process.stdin.write(b"\n") + process.stdin.flush() + + +def onMessage(msg): + """Process one log message received from rsyslog (e.g. send it to a + database). If this function raises an error, the message will be retried + by rsyslog. + + Args: + msg (str): the log message. Does NOT include a trailing newline. + + Raises: + RecoverableError: If a recoverable error occurs. The message will be + retried without restarting the plugin. + Exception: If a non-recoverable error occurs. The plugin will be + restarted before retrying the message. + """ + logging.debug("onMessage called") + + # For illustrative purposes, this plugin skeleton appends the received logs + # to a file. When implementing your plugin, remove the following code. + global process + process.stdin.write(msg.encode("utf-8")) + process.stdin.write(b"\n") + process.stdin.flush() + + +def onExit(): + """Do everything that is needed to finish processing (e.g. close files, + handles, disconnect from systems...). This is being called immediately + before exiting. + + This function should not raise any error. If it does, the error will be + logged as a warning and ignored. + """ + logging.debug("onExit called") + + # For illustrative purposes, this plugin skeleton appends the received logs + # to a file. When implementing your plugin, remove the following code. + global process + process.stdin.flush() + + +""" +------------------------------------------------------- +This is plumbing that DOES NOT need to be CHANGED +------------------------------------------------------- +This is the main loop that receives messages from rsyslog via stdin, +invokes the above entrypoints, and provides status codes to rsyslog +via stdout. In most cases, modifying this code should not be necessary. +""" +try: + onInit() +except Exception as e: + # If an error occurs during initialization, log it and terminate. The + # 'omprog' action will eventually restart the program. + logging.exception("Initialization error, exiting program") + sys.exit(1) + +# Tell rsyslog we are ready to start processing messages: +print("OK", flush=True) + +endedWithError = False +try: + line = sys.stdin.readline() + while line: + line = line.rstrip('\n') + try: + onMessage(line) + status = "OK" + except RecoverableError as e: + # Any line written to stdout that is not a status code will be + # treated as a recoverable error by 'omprog', and cause the action + # to be temporarily suspended. In this skeleton, we simply return + # a one-line representation of the Python exception. (If debugging + # is enabled in rsyslog, this line will appear in the debug logs.) + status = repr(e) + # We also log the complete exception to stderr (or to the logging + # handler(s) configured in doInit, if any). + logging.exception(e) + + # Send the status code (or the one-line error message) to rsyslog: + print(status, flush=True) + line = sys.stdin.readline() + +except Exception: + # If a non-recoverable error occurs, log it and terminate. The 'omprog' + # action will eventually restart the program. + logging.exception("Unrecoverable error, exiting program") + endedWithError = True + +try: + onExit() +except Exception: + logging.warning("Exception ignored in onExit", exc_info=True) + +if endedWithError: + sys.exit(1) +else: + sys.exit(0) + diff --git a/sd-rsyslog-example.conf b/sd-rsyslog-example.conf new file mode 100644 index 000000000..b79af60bb --- /dev/null +++ b/sd-rsyslog-example.conf @@ -0,0 +1,4 @@ +[sd-rsyslog] +remotevm = sd-log +localvm = sd-app + diff --git a/sdlog.conf b/sdlog.conf new file mode 100644 index 000000000..be8cac7f6 --- /dev/null +++ b/sdlog.conf @@ -0,0 +1,4 @@ +module(load="omprog") +action(type="omprog" + binary="/usr/sbin/sd-rsyslog" + template="RSYSLOG_TraditionalFileFormat") diff --git a/securedrop-log-saver b/securedrop-log-saver new file mode 100755 index 000000000..3de1c7638 --- /dev/null +++ b/securedrop-log-saver @@ -0,0 +1,54 @@ +#!/opt/venvs/securedrop-log/bin/python3 + +import os +import sys +import redis +import errno + + +def main(): + rclient = redis.Redis() + # This is the cache of open files for each vm + openfiles = {} + try: + while True: + # Wait for the next message + qname, data = rclient.blpop("syslogmsg") + msg = data.decode("utf-8") + vmname, msg_str = msg.split("::", 1) + + if vmname in openfiles: + fh = openfiles[vmname] + else: + # First open a file + filepath = os.path.join( + os.getenv("HOME", "/"), + "QubesIncomingLogs", + f"{vmname}", + "syslog.log", + ) + dirpath = os.path.dirname(filepath) + try: + os.makedirs(dirpath) + except OSError as err: + if err.errno != errno.EEXIST: + raise + fh = open(filepath, "a") + + # cache it for the next call + openfiles[vmname] = fh + + # Now just write and flush + fh.write(msg_str) + fh.write("\n") + fh.flush() + except Exception as e: + print(e, file=sys.stderr) + # Clean up all open files + for k, v in openfiles: + v.close() + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/securedrop-log.service b/securedrop-log.service new file mode 100644 index 000000000..796df4ec3 --- /dev/null +++ b/securedrop-log.service @@ -0,0 +1,12 @@ +[Unit] +Description=securedrop logging Service +After=network.target + +[Service] +Type=simple +User=user +ExecStart=/usr/sbin/securedrop-log-saver +Restart=on-abort + +[Install] +WantedBy=multi-user.target diff --git a/securedrop-redis-log b/securedrop-redis-log new file mode 100755 index 000000000..740c2f810 --- /dev/null +++ b/securedrop-redis-log @@ -0,0 +1,46 @@ +#!/opt/venvs/securedrop-log/bin/python3 + +from __future__ import print_function + +import tempfile +import io +import sys +import os +import errno +import shutil +import subprocess +import redis +from datetime import datetime + + +def sanitize_line(untrusted_line): + line = bytearray(untrusted_line) + for i, c in enumerate(line): + if c >= 0x20 and c <= 0x7E: + pass + else: + line[i] = 0x2E + return bytearray(line).decode("ascii") + + +stdin = sys.stdin.buffer # python3 + + +rd = redis.Redis() + + +def log(msg, vmname="remote"): + global rd + redis_msg = f"{vmname}::{msg}" + rd.rpush("syslogmsg", redis_msg) + + +# the first line is always the remote vm name +untrusted_line = stdin.readline() +qrexec_remote = untrusted_line.rstrip(b"\n").decode("utf-8") +while True: + untrusted_line = stdin.readline() + if untrusted_line == b"": + break + + log(sanitize_line(untrusted_line.rstrip(b"\n")), qrexec_remote) diff --git a/securedrop.Log b/securedrop.Log index 0ebaf241e..bfb53e420 100644 --- a/securedrop.Log +++ b/securedrop.Log @@ -1 +1 @@ -/usr/sbin/securedrop-log +/usr/sbin/securedrop-redis-log diff --git a/securedrop_log/VERSION b/securedrop_log/VERSION deleted file mode 100644 index bcab45af1..000000000 --- a/securedrop_log/VERSION +++ /dev/null @@ -1 +0,0 @@ -0.0.3 diff --git a/setup.py b/setup.py index 790b992e7..efbe8f282 100644 --- a/setup.py +++ b/setup.py @@ -4,8 +4,8 @@ with open("README.md", "r") as fh: long_description = fh.read() -version = pkgutil.get_data("securedrop_log", "VERSION").decode("utf-8") -version = version.strip() +with open("VERSION") as fh: + version = fh.read().strip() setuptools.setup( @@ -20,15 +20,13 @@ install_requires=[], python_requires=">=3.5", url="https://github.com/freedomofpress/securedrop-log", - packages=["securedrop_log",], - package_data={"securedrop_log": ["VERSION"],}, - classifiers=( + classifiers=[ "Development Status :: 3 - Alpha", "Programming Language :: Python :: 3", "Topic :: Software Development :: Libraries :: Python Modules", "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", "Intended Audience :: Developers", "Operating System :: POSIX :: Linux", - ), - data_files=[("sbin", ["securedrop-log"])], + ], + data_files=[("sbin", ["securedrop-log", "securedrop-log-saver", "securedrop-redis-log"])], ) diff --git a/update_version.sh b/update_version.sh index 18c133047..3bd6903ce 100755 --- a/update_version.sh +++ b/update_version.sh @@ -10,17 +10,17 @@ if [ -z "$NEW_VERSION" ]; then exit 1 fi -# Get the old version from securedrop_log/VERSION -OLD_VERSION=$(cat securedrop_log/VERSION) +# Get the old version from VERSION +OLD_VERSION=$(cat VERSION) if [ -z "$OLD_VERSION" ]; then echo "Couldn't find the old version: does this script need to be updated?" exit 1 fi -# Update the version in securedrop_log/VERSION (setup.py is done automatically) +# Update the version in VERSION (setup.py is done automatically) if [[ "$OSTYPE" == "darwin"* ]]; then # The empty '' after sed -i is required on macOS to indicate no backup file should be saved. - sed -i '' "s@$(echo "${OLD_VERSION}" | sed 's/\./\\./g')@$NEW_VERSION@g" securedrop_log/VERSION + sed -i '' "s@$(echo "${OLD_VERSION}" | sed 's/\./\\./g')@$NEW_VERSION@g" VERSION else - sed -i "s@$(echo "${OLD_VERSION}" | sed 's/\./\\./g')@$NEW_VERSION@g" securedrop_log/VERSION + sed -i "s@$(echo "${OLD_VERSION}" | sed 's/\./\\./g')@$NEW_VERSION@g" VERSION fi From 9fbcae497efa1076990ad6f5fcd550e44a2eedf1 Mon Sep 17 00:00:00 2001 From: mickael e Date: Thu, 6 Feb 2020 11:15:55 -0500 Subject: [PATCH 179/352] ci: Add build-buster job --- .circleci/config.yml | 50 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index dd6ad3702..71dfd2d95 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,3 +1,42 @@ +--- +common-steps: + - &install_packaging_dependencies + run: + name: Install Debian packaging dependencies and download wheels + command: | + mkdir ~/packaging && cd ~/packaging + git config --global --unset url.ssh://git@github.com.insteadof + git clone https://github.com/freedomofpress/securedrop-debian-packaging.git + cd securedrop-debian-packaging + make install-deps + PKG_DIR=~/project make requirements + + - &verify_requirements + run: + name: Ensure that build-requirements.txt and requirements.txt are in sync. + command: | + cd ~/project + # Return 1 if unstaged changes exist (after `make requirements` in the + # previous run step), else return 0. + git diff --quiet + + - &make_source_tarball + run: + name: Tag and make source tarball + command: | + cd ~/project + ./update_version.sh 1000.0 # Dummy version number, doesn't matter what we put here + python3 setup.py sdist + + - &build_debian_package + run: + name: Build debian package + command: | + cd ~/packaging/securedrop-debian-packaging + export PKG_VERSION=1000.0 + export PKG_PATH=/home/circleci/project/dist/securedrop-log-$PKG_VERSION.tar.gz + make securedrop-log + version: 2 jobs: test: @@ -9,8 +48,19 @@ jobs: name: Run tests command: python3 -m unittest + build-buster: + docker: + - image: circleci/python:3.7-buster + steps: + - checkout + - *install_packaging_dependencies + - *verify_requirements + - *make_source_tarball + - *build_debian_package + workflows: version: 2 per_pr: jobs: - test + - build-buster From 3167b4a242eaf85b01b395ef9ab442eb60684ac1 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Tue, 11 Feb 2020 21:42:57 +0530 Subject: [PATCH 180/352] Removes container's upstream virtualenv freedomofpress/securedrop-debian-packaging#142 ^^ related to above PR. --- .circleci/config.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 02f7e1d1e..d4fb3a49f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,5 +1,10 @@ --- common-steps: + - &removevirtualenv + run: + name: Removes the upstream virtualenv from the original container image + command: sudo pip uninstall virtualenv -y + - &install_packages run: name: Install packages @@ -86,6 +91,7 @@ jobs: - image: circleci/python:3.7-buster steps: - checkout + - *removevirtualenv - *install_packaging_dependencies - *verify_requirements - *make_source_tarball From 5ee13531b868d601ef9d9951553dce1e0eef2e32 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Tue, 11 Feb 2020 21:45:43 +0530 Subject: [PATCH 181/352] Removes container's upstream virtualenv freedomofpress/securedrop-debian-packaging#142 ^^ related to above PR. --- .circleci/config.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index d47571379..dedca8c84 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,5 +1,10 @@ --- common-steps: + - &removevirtualenv + run: + name: Removes the upstream virtualenv from the original container image + command: sudo pip uninstall virtualenv -y + - &run_tests run: name: Install requirements and run tests @@ -53,6 +58,7 @@ jobs: - image: circleci/python:3.7-buster steps: - checkout + - *removevirtualenv - *install_packaging_dependencies - *verify_requirements - *make_source_tarball From 701817ad45f5035af0b72f0941d495a9d9232f93 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Tue, 11 Feb 2020 21:48:35 +0530 Subject: [PATCH 182/352] Removes container's upstream virtualenv freedomofpress/securedrop-debian-packaging#142 ^^ related to above PR. --- .circleci/config.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 71dfd2d95..4d7b27ebb 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,5 +1,10 @@ --- common-steps: + - &removevirtualenv + run: + name: Removes the upstream virtualenv from the original container image + command: sudo pip uninstall virtualenv -y + - &install_packaging_dependencies run: name: Install Debian packaging dependencies and download wheels @@ -53,6 +58,7 @@ jobs: - image: circleci/python:3.7-buster steps: - checkout + - *removevirtualenv - *install_packaging_dependencies - *verify_requirements - *make_source_tarball From a9a02ea7ed57f69cb35bc14af97da5f4ba4f37fa Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Tue, 11 Feb 2020 16:54:32 +0530 Subject: [PATCH 183/352] Starts the service after redis --- securedrop-log.service | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/securedrop-log.service b/securedrop-log.service index 796df4ec3..1eb5adb30 100644 --- a/securedrop-log.service +++ b/securedrop-log.service @@ -1,6 +1,6 @@ [Unit] Description=securedrop logging Service -After=network.target +After=network.target redis.service [Service] Type=simple From 8f6116e66d9f9c3ad1c197037a5336ac04ef7b4a Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 13 Feb 2020 11:05:27 -0500 Subject: [PATCH 184/352] securedrop-proxy 0.1.7 --- changelog.md | 4 ++++ securedrop_proxy/VERSION | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/changelog.md b/changelog.md index dcd7b8334..5b816ff00 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,9 @@ # Changelog +## 0.1.7 + + * Update Werkzeug to 0.16.0 due to CVE-2019-14806 (#64) + ## 0.1.6 * Fixes CI for git-lfs based package builds (#60) diff --git a/securedrop_proxy/VERSION b/securedrop_proxy/VERSION index c946ee616..11808190d 100644 --- a/securedrop_proxy/VERSION +++ b/securedrop_proxy/VERSION @@ -1 +1 @@ -0.1.6 +0.1.7 From d0a820b39c60836bef9aa7b13866384b038f1ee4 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Tue, 18 Feb 2020 11:47:15 -0800 Subject: [PATCH 185/352] combine commands to add printer and accept requests --- securedrop_export/print/actions.py | 36 +++++------------------------- 1 file changed, 5 insertions(+), 31 deletions(-) diff --git a/securedrop_export/print/actions.py b/securedrop_export/print/actions.py index 41a47368f..a8031f26a 100644 --- a/securedrop_export/print/actions.py +++ b/securedrop_export/print/actions.py @@ -70,11 +70,7 @@ def check_printer_setup(self) -> None: self.submission.exit_gracefully(ExportStatus.ERROR_MULTIPLE_PRINTERS_FOUND.value) printer_uri = printers[0] - - logger.info('Installing printer drivers') printer_ppd = self.install_printer_ppd(printer_uri) - - logger.info('Setting up printer') self.setup_printer(printer_uri, printer_ppd) except subprocess.CalledProcessError as e: logger.error(e) @@ -122,6 +118,7 @@ def install_printer_ppd(self, uri): # Compile and install drivers that are not already installed if not os.path.exists(printer_ppd): + logger.info('Installing printer drivers') self.submission.safe_check_call( command=[ "sudo", @@ -137,46 +134,23 @@ def install_printer_ppd(self, uri): def setup_printer(self, printer_uri, printer_ppd): # Add the printer using lpadmin - logger.info('Setting up printer name {}'.format(self.printer_name)) + logger.info('Setting up printer {}'.format(self.printer_name)) self.submission.safe_check_call( command=[ "sudo", "lpadmin", "-p", self.printer_name, + "-E", "-v", printer_uri, "-P", printer_ppd, + "-u", + "allow:user" ], error_message=ExportStatus.ERROR_PRINTER_INSTALL.value ) - # Activate the printer so that it can receive jobs - logger.info('Activating printer {}'.format(self.printer_name)) - self.submission.safe_check_call( - command=["sudo", "lpadmin", "-p", self.printer_name], - error_message=ExportStatus.ERROR_PRINTER_INSTALL.value - ) - # worksaround for version of lpadmin/cups in debian buster: - # see https://forums.developer.apple.com/thread/106112 - self.submission.safe_check_call( - command=["sudo", "cupsaccept", self.printer_name], - error_message=ExportStatus.ERROR_PRINTER_INSTALL.value - ) - # A non-zero return code is expected here, but the command is required - # and works as expected. - command = ["sudo", "cupsenable", self.printer_name] - try: - subprocess.check_call(command) - except subprocess.CalledProcessError: - pass - - # Allow user to print (without using sudo) - logger.info('Allow user to print {}'.format(self.printer_name)) - self.submission.safe_check_call( - command=["sudo", "lpadmin", "-p", self.printer_name, "-u", "allow:user"], - error_message=ExportStatus.ERROR_PRINTER_INSTALL.value - ) def print_test_page(self): logger.info('Printing test page') From d256353378c3b18ab0b6b9f6d742af2d6675a2c9 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 20 Feb 2020 10:41:03 -0500 Subject: [PATCH 186/352] prod release instructions --- README.md | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 02be24f19..df6198d8c 100644 --- a/README.md +++ b/README.md @@ -51,12 +51,14 @@ the updated dependency to our pip mirror (you can request this in the PR). ## Making a Release -**Note:** These are the release guidelines for pre-production alpha releases. Production release tags must be signed with the SecureDrop release key. - 1. Update versions: `./update_version.sh $new_version_number` and add a new entry in the changelog. 2. Commit the changes with commit message `securedrop-proxy $new_version_number` and make a PR. 3. You should confirm via a manual debian package build and manual testing in Qubes that there are no regressions (this is limited pre-release QA). -4. Once your PR is approved, you can add a tag and push: `git tag $new_version_number`. +4. Once your PR is approved, you can add a tag: `git tag $new_version_number`. +5. Perform the release signing ceremony on the tag. Push the tag. +6. The signer should create the source tarball via `python3 setup.py sdist`. +7. Add a detached signature (with the release key) for the source tarball. +8. Submit the source tarball and signature via PR into this [repository](https://github.com/freedomofpress/securedrop-debian-packaging). This tarball will be used by the package builder. #### Configuration From 70eae01e899fcfeb77ddcad34c4d85216cc7b778 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 20 Feb 2020 10:42:49 -0500 Subject: [PATCH 187/352] securedrop-proxy 0.2.0 --- changelog.md | 4 ++++ securedrop_proxy/VERSION | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/changelog.md b/changelog.md index 5b816ff00..dae1be583 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,9 @@ # Changelog +## 0.2.0 + + * Initial beta release. + ## 0.1.7 * Update Werkzeug to 0.16.0 due to CVE-2019-14806 (#64) diff --git a/securedrop_proxy/VERSION b/securedrop_proxy/VERSION index 11808190d..0ea3a944b 100644 --- a/securedrop_proxy/VERSION +++ b/securedrop_proxy/VERSION @@ -1 +1 @@ -0.1.7 +0.2.0 From d991b4f3c14902b84d24a5e15c35f10ea6964bc1 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 20 Feb 2020 19:03:55 -0500 Subject: [PATCH 188/352] securedrop-export 0.2.0 --- changelog.md | 15 +++++++++++---- securedrop_export/VERSION | 2 +- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/changelog.md b/changelog.md index 02c74ef5a..b4ede824b 100644 --- a/changelog.md +++ b/changelog.md @@ -1,6 +1,13 @@ -securedrop-export (0.1.1-1) unstable; urgency=medium +# Changelog - [ Freedom Of The Press Foundation ] - * Initial release +## 0.2.0 - -- SecureDrop Team Thu, 18 Jul 2019 10:47:38 -0700 + * Initial beta release. + +## 0.1.2 + + * Adds logging (#17). + +## 0.1.1 + + * Initial release. diff --git a/securedrop_export/VERSION b/securedrop_export/VERSION index d917d3e26..0ea3a944b 100644 --- a/securedrop_export/VERSION +++ b/securedrop_export/VERSION @@ -1 +1 @@ -0.1.2 +0.2.0 From 0f32743123cf1673362406ac7818eefd9a0b8237 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 20 Feb 2020 19:08:33 -0500 Subject: [PATCH 189/352] securedrop-log 0.1.0 --- VERSION | 2 +- changelog.md | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 81340c7e7..6e8bf73aa 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.0.4 +0.1.0 diff --git a/changelog.md b/changelog.md index c9fba47b1..1456efc39 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,9 @@ # Changelog +## 0.1.0 + + * Initial beta release. + ## 0.0.4 * Converts into rsyslog based logging system. From 7ed5effb70e7037613fa7ed3fa42f9cd27595c29 Mon Sep 17 00:00:00 2001 From: mickael e Date: Mon, 2 Mar 2020 11:19:27 -0500 Subject: [PATCH 190/352] Set default mime handling of files to open-in-dvm.desktop Uses the same logic of securedrop-client --- MANIFEST.in | 2 + files/mimeapps.list | 297 ++++++++++++++++++++++++++++++++++++++ files/open-in-dvm.desktop | 10 ++ 3 files changed, 309 insertions(+) create mode 100644 files/mimeapps.list create mode 100644 files/open-in-dvm.desktop diff --git a/MANIFEST.in b/MANIFEST.in index 8104f05fc..e1056bffe 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -9,4 +9,6 @@ include setup.py include files/send-to-usb.desktop include files/application-x-sd-export.xml include files/sd-logo.png +include files/mimeapps.list +include files/open-in-dvm.desktop diff --git a/files/mimeapps.list b/files/mimeapps.list new file mode 100644 index 000000000..63aabd78a --- /dev/null +++ b/files/mimeapps.list @@ -0,0 +1,297 @@ +[Default Applications] +application/x-sd-export=send-to-usb.desktop; +application/x-dia-diagram=open-in-dvm.desktop; +text/x-vcard=open-in-dvm.desktop; +text/directory=open-in-dvm.desktop; +text/calendar=open-in-dvm.desktop; +application/x-cd-image=open-in-dvm.desktop; +application/x-raw-disk-image=open-in-dvm.desktop; +application/x-raw-disk-image-xz-compressed=open-in-dvm.desktop; +image/x-compressed-xcf=open-in-dvm.desktop; +image/x-xcf=open-in-dvm.desktop; +image/x-psd=open-in-dvm.desktop; +image/x-fits=open-in-dvm.desktop; +image/bmp=open-in-dvm.desktop; +image/gif=open-in-dvm.desktop; +image/x-icb=open-in-dvm.desktop; +image/x-ico=open-in-dvm.desktop; +image/x-pcx=open-in-dvm.desktop; +image/x-portable-anymap=open-in-dvm.desktop; +image/x-portable-bitmap=open-in-dvm.desktop; +image/x-portable-graymap=open-in-dvm.desktop; +image/x-portable-pixmap=open-in-dvm.desktop; +image/x-xbitmap=open-in-dvm.desktop; +image/x-xpixmap=open-in-dvm.desktop; +image/svg+xml=open-in-dvm.desktop; +application/vnd.ms-word=open-in-dvm.desktop; +application/vnd.wordperfect=open-in-dvm.desktop; +application/vnd.sun.xml.writer=open-in-dvm.desktop; +application/vnd.sun.xml.writer.global=open-in-dvm.desktop; +application/vnd.sun.xml.writer.template=open-in-dvm.desktop; +application/vnd.stardivision.writer=open-in-dvm.desktop; +application/vnd.oasis.opendocument.text=open-in-dvm.desktop; +application/vnd.oasis.opendocument.text-template=open-in-dvm.desktop; +application/vnd.oasis.opendocument.text-web=open-in-dvm.desktop; +application/vnd.oasis.opendocument.text-master=open-in-dvm.desktop; +application/vnd.openxmlformats-officedocument.wordprocessingml.document=open-in-dvm.desktop; +application/vnd.openxmlformats-officedocument.wordprocessingml.template=open-in-dvm.desktop; +application/vnd.ms-excel=open-in-dvm.desktop; +application/vnd.stardivision.calc=open-in-dvm.desktop; +application/vnd.sun.xml.calc=open-in-dvm.desktop; +application/vnd.sun.xml.calc.template=open-in-dvm.desktop; +application/vnd.openxmlformats-officedocument.spreadsheetml.sheet=open-in-dvm.desktop; +application/vnd.openxmlformats-officedocument.spreadsheetml.template=open-in-dvm.desktop; +application/vnd.oasis.opendocument.spreadsheet=open-in-dvm.desktop; +application/vnd.oasis.opendocument.spreadsheet-template=open-in-dvm.desktop; +application/vnd.ms-powerpoint=open-in-dvm.desktop; +application/vnd.stardivision.impress=open-in-dvm.desktop; +application/vnd.sun.xml.impress=open-in-dvm.desktop; +application/vnd.sun.xml.impress.template=open-in-dvm.desktop; +application/vnd.oasis.opendocument.presentation=open-in-dvm.desktop; +application/vnd.oasis.opendocument.presentation-template=open-in-dvm.desktop; +application/vnd.openxmlformats-officedocument.presentationml.presentation=open-in-dvm.desktop; +application/vnd.openxmlformats-officedocument.presentationml.template=open-in-dvm.desktop; +application/vnd.stardivision.draw=open-in-dvm.desktop; +application/vnd.sun.xml.draw=open-in-dvm.desktop; +application/vnd.sun.xml.draw.template=open-in-dvm.desktop; +application/vnd.oasis.opendocument.graphics=open-in-dvm.desktop; +application/vnd.oasis.opendocument.graphics-template=open-in-dvm.desktop; +application/vnd.oasis.opendocument.formula=open-in-dvm.desktop; +application/vnd.sun.xml.math=open-in-dvm.desktop; +application/vnd.stardivision.math=open-in-dvm.desktop; +application/vnd.oasis.opendocument.database=open-in-dvm.desktop; +application/vnd.sun.xml.base=open-in-dvm.desktop; +application/pdf=open-in-dvm.desktop; +application/postscript=open-in-dvm.desktop; +application/x-qw=open-in-dvm.desktop; +application/x-gnucash=open-in-dvm.desktop; +application/vnd.lotus-1-2-3=open-in-dvm.desktop; +application/x-oleo=open-in-dvm.desktop; +application/x-gnumeric=open-in-dvm.desktop; +application/x-xbase=open-in-dvm.desktop; +application/x-abiword=open-in-dvm.desktop; +application/x-dvi=open-in-dvm.desktop; +application/x-catalog=open-in-dvm.desktop; +application/x-rpm=open-in-dvm.desktop; +text/csv=open-in-dvm.desktop; +text/plain=open-in-dvm.desktop; +text/html=open-in-dvm.desktop; +application/xhtml+xml=open-in-dvm.desktop; +inode/directory=open-in-dvm.desktop; +x-content/blank-cd=open-in-dvm.desktop; +x-content/blank-dvd=open-in-dvm.desktop; +x-content/blank-bd=open-in-dvm.desktop; +x-content/blank-hddvd=open-in-dvm.desktop; +x-content/video-dvd=open-in-dvm.desktop; +x-content/video-vcd=open-in-dvm.desktop; +x-content/video-svcd=open-in-dvm.desktop; +#x-content/video-bluray=open-in-dvm.desktop; +#x-content/video-hddvd=open-in-dvm.desktop; +x-content/audio-cdda=open-in-dvm.desktop; +x-content/audio-dvd=open-in-dvm.desktop; +x-content/audio-player=open-in-dvm.desktop; +x-content/image-dcf=open-in-dvm.desktop; +x-content/image-picturecd=open-in-dvm.desktop; +# URI scheme handlers +x-scheme-handler/mailto=open-in-dvm.desktop; +x-scheme-handler/http=open-in-dvm.desktop; +x-scheme-handler/https=open-in-dvm.desktop; +application/mxf=open-in-dvm.desktop; +application/ogg=open-in-dvm.desktop; +application/ram=open-in-dvm.desktop; +application/sdp=open-in-dvm.desktop; +application/smil=open-in-dvm.desktop; +application/smil+xml=open-in-dvm.desktop; +application/vnd.apple.mpegurl=open-in-dvm.desktop; +application/vnd.ms-wpl=open-in-dvm.desktop; +application/vnd.rn-realmedia=open-in-dvm.desktop; +application/x-extension-m4a=open-in-dvm.desktop; +application/x-extension-mp4=open-in-dvm.desktop; +application/x-flac=open-in-dvm.desktop; +application/x-flash-video=open-in-dvm.desktop; +application/x-matroska=open-in-dvm.desktop; +application/x-netshow-channel=open-in-dvm.desktop; +application/x-ogg=open-in-dvm.desktop; +application/x-quicktime-media-link=open-in-dvm.desktop; +application/x-quicktimeplayer=open-in-dvm.desktop; +application/x-shorten=open-in-dvm.desktop; +application/x-smil=open-in-dvm.desktop; +application/xspf+xml=open-in-dvm.desktop; +audio/3gpp=open-in-dvm.desktop; +audio/ac3=open-in-dvm.desktop; +audio/AMR=open-in-dvm.desktop; +audio/AMR-WB=open-in-dvm.desktop; +audio/basic=open-in-dvm.desktop; +audio/midi=open-in-dvm.desktop; +audio/mp2=open-in-dvm.desktop; +audio/mp4=open-in-dvm.desktop; +audio/mpeg=open-in-dvm.desktop; +audio/mpegurl=open-in-dvm.desktop; +audio/ogg=open-in-dvm.desktop; +audio/prs.sid=open-in-dvm.desktop; +audio/vnd.rn-realaudio=open-in-dvm.desktop; +audio/x-aiff=open-in-dvm.desktop; +audio/x-ape=open-in-dvm.desktop; +audio/x-flac=open-in-dvm.desktop; +audio/x-gsm=open-in-dvm.desktop; +audio/x-it=open-in-dvm.desktop; +audio/x-m4a=open-in-dvm.desktop; +audio/x-matroska=open-in-dvm.desktop; +audio/x-mod=open-in-dvm.desktop; +audio/x-mp3=open-in-dvm.desktop; +audio/x-mpeg=open-in-dvm.desktop; +audio/x-mpegurl=open-in-dvm.desktop; +audio/x-ms-asf=open-in-dvm.desktop; +audio/x-ms-asx=open-in-dvm.desktop; +audio/x-ms-wax=open-in-dvm.desktop; +audio/x-ms-wma=open-in-dvm.desktop; +audio/x-musepack=open-in-dvm.desktop; +audio/x-pn-aiff=open-in-dvm.desktop; +audio/x-pn-au=open-in-dvm.desktop; +audio/x-pn-realaudio=open-in-dvm.desktop; +audio/x-pn-realaudio-plugin=open-in-dvm.desktop; +audio/x-pn-wav=open-in-dvm.desktop; +audio/x-pn-windows-acm=open-in-dvm.desktop; +audio/x-realaudio=open-in-dvm.desktop; +audio/x-real-audio=open-in-dvm.desktop; +audio/x-s3m=open-in-dvm.desktop; +audio/x-sbc=open-in-dvm.desktop; +audio/x-scpls=open-in-dvm.desktop; +audio/x-speex=open-in-dvm.desktop; +audio/x-stm=open-in-dvm.desktop; +audio/x-tta=open-in-dvm.desktop; +audio/x-wav=open-in-dvm.desktop; +audio/x-wavpack=open-in-dvm.desktop; +audio/x-vorbis=open-in-dvm.desktop; +audio/x-vorbis+ogg=open-in-dvm.desktop; +audio/x-xm=open-in-dvm.desktop; +image/vnd.rn-realpix=open-in-dvm.desktop; +image/x-pict=open-in-dvm.desktop; +misc/ultravox=open-in-dvm.desktop; +text/google-video-pointer=open-in-dvm.desktop; +text/x-google-video-pointer=open-in-dvm.desktop; +video/3gp=open-in-dvm.desktop; +video/3gpp=open-in-dvm.desktop; +video/dv=open-in-dvm.desktop; +video/divx=open-in-dvm.desktop; +video/fli=open-in-dvm.desktop; +video/flv=open-in-dvm.desktop; +video/mp2t=open-in-dvm.desktop; +video/mp4=open-in-dvm.desktop; +video/mp4v-es=open-in-dvm.desktop; +video/mpeg=open-in-dvm.desktop; +video/msvideo=open-in-dvm.desktop; +video/ogg=open-in-dvm.desktop; +video/quicktime=open-in-dvm.desktop; +video/vivo=open-in-dvm.desktop; +video/vnd.divx=open-in-dvm.desktop; +video/vnd.mpegurl=open-in-dvm.desktop; +video/vnd.rn-realvideo=open-in-dvm.desktop; +video/vnd.vivo=open-in-dvm.desktop; +video/webm=open-in-dvm.desktop; +video/x-anim=open-in-dvm.desktop; +video/x-avi=open-in-dvm.desktop; +video/x-flc=open-in-dvm.desktop; +video/x-fli=open-in-dvm.desktop; +video/x-flic=open-in-dvm.desktop; +video/x-flv=open-in-dvm.desktop; +video/x-m4v=open-in-dvm.desktop; +video/x-matroska=open-in-dvm.desktop; +video/x-mpeg=open-in-dvm.desktop; +video/x-mpeg2=open-in-dvm.desktop; +video/x-ms-asf=open-in-dvm.desktop; +video/x-ms-asx=open-in-dvm.desktop; +video/x-msvideo=open-in-dvm.desktop; +video/x-ms-wm=open-in-dvm.desktop; +video/x-ms-wmv=open-in-dvm.desktop; +video/x-ms-wmx=open-in-dvm.desktop; +video/x-ms-wvx=open-in-dvm.desktop; +video/x-nsv=open-in-dvm.desktop; +video/x-ogm+ogg=open-in-dvm.desktop; +video/x-theora+ogg=open-in-dvm.desktop; +video/x-totem-stream=open-in-dvm.desktop; +x-content/video-dvd=open-in-dvm.desktop; +x-content/video-vcd=open-in-dvm.desktop; +x-content/video-svcd=open-in-dvm.desktop; +x-scheme-handler/pnm=open-in-dvm.desktop; +x-scheme-handler/mms=open-in-dvm.desktop; +x-scheme-handler/net=open-in-dvm.desktop; +x-scheme-handler/rtp=open-in-dvm.desktop; +x-scheme-handler/rtmp=open-in-dvm.desktop; +x-scheme-handler/rtsp=open-in-dvm.desktop; +x-scheme-handler/mmsh=open-in-dvm.desktop; +x-scheme-handler/uvox=open-in-dvm.desktop; +x-scheme-handler/icy=open-in-dvm.desktop; +x-scheme-handler/icyx=open-in-dvm.desktop; +application/x-7z-compressed=open-in-dvm.desktop; +application/x-7z-compressed-tar=open-in-dvm.desktop; +application/x-ace=open-in-dvm.desktop; +application/x-alz=open-in-dvm.desktop; +application/x-ar=open-in-dvm.desktop; +application/x-arj=open-in-dvm.desktop; +application/x-bzip=open-in-dvm.desktop; +application/x-bzip-compressed-tar=open-in-dvm.desktop; +application/x-bzip1=open-in-dvm.desktop; +application/x-bzip1-compressed-tar=open-in-dvm.desktop; +application/x-cabinet=open-in-dvm.desktop; +application/x-cbr=open-in-dvm.desktop; +application/x-cbz=open-in-dvm.desktop; +application/x-compress=open-in-dvm.desktop; +application/x-compressed-tar=open-in-dvm.desktop; +application/x-cpio=open-in-dvm.desktop; +application/x-deb=open-in-dvm.desktop; +application/x-ear=open-in-dvm.desktop; +application/x-ms-dos-executable=open-in-dvm.desktop; +application/x-gtar=open-in-dvm.desktop; +application/x-gzip=open-in-dvm.desktop; +application/x-gzpostscript=open-in-dvm.desktop; +application/x-java-archive=open-in-dvm.desktop; +application/x-lha=open-in-dvm.desktop; +application/x-lhz=open-in-dvm.desktop; +application/x-lrzip=open-in-dvm.desktop; +application/x-lrzip-compressed-tar=open-in-dvm.desktop; +application/x-lzip=open-in-dvm.desktop; +application/x-lzip-compressed-tar=open-in-dvm.desktop; +application/x-lzma=open-in-dvm.desktop; +application/x-lzma-compressed-tar=open-in-dvm.desktop; +application/x-lzop=open-in-dvm.desktop; +application/x-lzop-compressed-tar=open-in-dvm.desktop; +application/x-ms-wim=open-in-dvm.desktop; +application/x-rar=open-in-dvm.desktop; +application/x-rar-compressed=open-in-dvm.desktop; +application/x-rzip=open-in-dvm.desktop; +application/x-tar=open-in-dvm.desktop; +application/x-tarz=open-in-dvm.desktop; +application/x-stuffit=open-in-dvm.desktop; +application/x-war=open-in-dvm.desktop; +application/x-xz=open-in-dvm.desktop; +application/x-xz-compressed-tar=open-in-dvm.desktop; +application/x-zip=open-in-dvm.desktop; +application/x-zip-compressed=open-in-dvm.desktop; +application/x-zoo=open-in-dvm.desktop; +application/zip=open-in-dvm.desktop; +application/x-archive=open-in-dvm.desktop; +application/vnd.ms-cab-compressed=open-in-dvm.desktop; +application/x-source-rpm=open-in-dvm.desktop; +image/bmp=open-in-dvm.desktop; +image/gif=open-in-dvm.desktop; +image/jpeg=open-in-dvm.desktop; +image/jpg=open-in-dvm.desktop; +image/pjpeg=open-in-dvm.desktop; +image/png=open-in-dvm.desktop; +image/tiff=open-in-dvm.desktop; +image/x-bmp=open-in-dvm.desktop; +image/x-gray=open-in-dvm.desktop; +image/x-icb=open-in-dvm.desktop; +image/x-ico=open-in-dvm.desktop; +image/x-png=open-in-dvm.desktop; +image/x-portable-anymap=open-in-dvm.desktop; +image/x-portable-bitmap=open-in-dvm.desktop; +image/x-portable-graymap=open-in-dvm.desktop; +image/x-portable-pixmap=open-in-dvm.desktop; +image/x-xbitmap=open-in-dvm.desktop; +image/x-xpixmap=open-in-dvm.desktop; +image/x-pcx=open-in-dvm.desktop; +image/svg+xml=open-in-dvm.desktop; +image/svg+xml-compressed=open-in-dvm.desktop; +image/vnd.wap.wbmp=open-in-dvm.desktop; diff --git a/files/open-in-dvm.desktop b/files/open-in-dvm.desktop new file mode 100644 index 000000000..64478ed45 --- /dev/null +++ b/files/open-in-dvm.desktop @@ -0,0 +1,10 @@ +[Desktop Entry] +Type=Application +Version=1.0 +Name=Open in Disposable VM +Comment=Open file in a Disposable VM +TryExec=/usr/bin/qvm-open-in-vm +Exec=/usr/bin/qvm-open-in-vm '@dispvm:sd-viewer' %f +Icon=/usr/share/icons/Qubes/dispvm-gray.png +Terminal=false +Categories=Qubes;Utility; From 35a4ce39272f0be0004a0778c7150ab7fb818445 Mon Sep 17 00:00:00 2001 From: mickael e Date: Mon, 2 Mar 2020 11:20:12 -0500 Subject: [PATCH 191/352] securedrop-export 0.2.1 --- changelog.md | 4 ++++ securedrop_export/VERSION | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/changelog.md b/changelog.md index b4ede824b..74cc058c8 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,9 @@ # Changelog +## 0.2.1 + + * Open files in dvm by default + ## 0.2.0 * Initial beta release. diff --git a/securedrop_export/VERSION b/securedrop_export/VERSION index 0ea3a944b..0c62199f1 100644 --- a/securedrop_export/VERSION +++ b/securedrop_export/VERSION @@ -1 +1 @@ -0.2.0 +0.2.1 From 89668e21f486a0b9ad069f9b8f9be2d5fdedaa4d Mon Sep 17 00:00:00 2001 From: Conor Schaefer Date: Mon, 2 Mar 2020 10:03:14 -0800 Subject: [PATCH 192/352] Infers hostname for "localvm" setting Still checks the config first, but if the "localvm" flag isn't set there, defaults to the hostname provided by the system. Useful for simplfying config story between TemplateVMs & AppVMs in Qubes. --- VERSION | 2 +- changelog.md | 4 ++++ sd-rsyslog | 3 ++- 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/VERSION b/VERSION index 6e8bf73aa..17e51c385 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.1.0 +0.1.1 diff --git a/changelog.md b/changelog.md index 1456efc39..32937ae6f 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,9 @@ # Changelog +## 0.1.1 + + * Infers hostname from system settings, if no config value found. + ## 0.1.0 * Initial beta release. diff --git a/sd-rsyslog b/sd-rsyslog index 4f3233ee5..09432263d 100644 --- a/sd-rsyslog +++ b/sd-rsyslog @@ -30,6 +30,7 @@ import os import logging import configparser from subprocess import Popen, PIPE +from socket import gethostname # Global definitions specific to your plugin process = None @@ -81,7 +82,7 @@ def onInit(): config = configparser.ConfigParser() config.read('/etc/sd-rsyslog.conf') logvmname = config['sd-rsyslog']['remotevm'] - localvmname = config['sd-rsyslog']['localvm'] + localvmname = config['sd-rsyslog'].get('localvm', gethostname()) process = Popen( ["/usr/lib/qubes/qrexec-client-vm", logvmname, "securedrop.Log"], stdin=PIPE, From fdefcdf6e2d2ab1d2dd90602af78eeebd3fc6100 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Fri, 20 Mar 2020 21:06:51 -0400 Subject: [PATCH 193/352] logging: avoid storing original filename of source document --- securedrop_export/print/actions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/securedrop_export/print/actions.py b/securedrop_export/print/actions.py index a8031f26a..f0bc5adf3 100644 --- a/securedrop_export/print/actions.py +++ b/securedrop_export/print/actions.py @@ -197,7 +197,7 @@ def print_file(self, file_to_print): ) file_to_print = converted_path - logger.info('Sending file to printer {}:{}'.format(self.printer_name, file_to_print)) + logger.info('Sending file to printer {}'.format(self.printer_name)) self.submission.safe_check_call( command=["xpp", "-P", self.printer_name, file_to_print], error_message=ExportStatus.ERROR_PRINT.value From ad40a19c475973b332ad64a72dd72db14354e8aa Mon Sep 17 00:00:00 2001 From: mickael e Date: Fri, 20 Mar 2020 12:01:40 -0400 Subject: [PATCH 194/352] application/x-desktop -> open-in-dvm --- files/mimeapps.list | 1 + 1 file changed, 1 insertion(+) diff --git a/files/mimeapps.list b/files/mimeapps.list index 63aabd78a..886a600b0 100644 --- a/files/mimeapps.list +++ b/files/mimeapps.list @@ -5,6 +5,7 @@ text/x-vcard=open-in-dvm.desktop; text/directory=open-in-dvm.desktop; text/calendar=open-in-dvm.desktop; application/x-cd-image=open-in-dvm.desktop; +application/x-desktop=open-in-dvm.desktop; application/x-raw-disk-image=open-in-dvm.desktop; application/x-raw-disk-image-xz-compressed=open-in-dvm.desktop; image/x-compressed-xcf=open-in-dvm.desktop; From 5e2955d6f1a700d611181d1dc1503947aa9eb0cd Mon Sep 17 00:00:00 2001 From: mickael e Date: Mon, 23 Mar 2020 09:20:48 -0400 Subject: [PATCH 195/352] Adds view-only parameter to open-in-dvm.desktop Ensures changes to a file opened in a dvm are not copied back to the calling vm --- files/open-in-dvm.desktop | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/files/open-in-dvm.desktop b/files/open-in-dvm.desktop index 64478ed45..5dd268167 100644 --- a/files/open-in-dvm.desktop +++ b/files/open-in-dvm.desktop @@ -4,7 +4,7 @@ Version=1.0 Name=Open in Disposable VM Comment=Open file in a Disposable VM TryExec=/usr/bin/qvm-open-in-vm -Exec=/usr/bin/qvm-open-in-vm '@dispvm:sd-viewer' %f +Exec=/usr/bin/qvm-open-in-vm --view-only '@dispvm:sd-viewer' %f Icon=/usr/share/icons/Qubes/dispvm-gray.png Terminal=false Categories=Qubes;Utility; From ee0e73adc9c7865c8e2a1cb769559eea2782b006 Mon Sep 17 00:00:00 2001 From: mickael e Date: Fri, 20 Mar 2020 12:02:13 -0400 Subject: [PATCH 196/352] securedrop-export 0.2.2 --- changelog.md | 4 ++++ securedrop_export/VERSION | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/changelog.md b/changelog.md index 74cc058c8..84074d1db 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,9 @@ # Changelog +## 0.2.2 + + * Update mimetype handling + ## 0.2.1 * Open files in dvm by default diff --git a/securedrop_export/VERSION b/securedrop_export/VERSION index 0c62199f1..ee1372d33 100644 --- a/securedrop_export/VERSION +++ b/securedrop_export/VERSION @@ -1 +1 @@ -0.2.1 +0.2.2 From 98ace34e4ad66c98425e3c05371a2d0925809c3e Mon Sep 17 00:00:00 2001 From: mickael e Date: Fri, 27 Mar 2020 17:28:27 -0400 Subject: [PATCH 197/352] securedrop-export 0.2.3 --- changelog.md | 5 +++++ securedrop_export/VERSION | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/changelog.md b/changelog.md index 84074d1db..dafcbb260 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,10 @@ # Changelog +## 0.2.3 + + * Adds gnome-disks to sd-devices + * Documentation updates + ## 0.2.2 * Update mimetype handling diff --git a/securedrop_export/VERSION b/securedrop_export/VERSION index ee1372d33..717903969 100644 --- a/securedrop_export/VERSION +++ b/securedrop_export/VERSION @@ -1 +1 @@ -0.2.2 +0.2.3 From 4bfc44efbd8412c25d6b643a855d46a20321d8c2 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Mon, 30 Mar 2020 16:33:05 +0530 Subject: [PATCH 198/352] Fixes#68 uses incoming timeout value from JSON Now we are using the incoming timeout value from the JSON input. --- securedrop_proxy/main.py | 3 +++ tests/test_main.py | 42 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 45 insertions(+) diff --git a/securedrop_proxy/main.py b/securedrop_proxy/main.py index 69abf48d7..ff979053a 100644 --- a/securedrop_proxy/main.py +++ b/securedrop_proxy/main.py @@ -42,4 +42,7 @@ def __main__(incoming: str, p: Proxy) -> None: p.req = req + if "timeout" in client_req: + p.timeout = client_req["timeout"] + p.proxy() diff --git a/tests/test_main.py b/tests/test_main.py index 19bc4e5a8..05f158623 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -55,6 +55,48 @@ def on_done(self): for item in json.loads(response["body"]): self.assertEqual(item["userId"], 1) + @vcr.use_cassette("fixtures/main_json_response_with_timeout.yaml") + def test_json_response_with_timeout(self): + test_input_json = """{ "method": "GET", + "path_query": "/posts?userId=1", + "timeout": 40.0 }""" + + req = proxy.Req() + req.method = "GET" + req.path_query = "" + req.headers = {"Accept": "application/json"} + + # Use custom callbacks + def on_save(self, fh, res): + pass + + def on_done(self): + assert self.res.status == http.HTTPStatus.OK + print(json.dumps(self.res.__dict__)) + + self.p = proxy.Proxy(self.conf_path, req) + + # Patching on_save and on_done + + self.p.on_done = types.MethodType(on_done, self.p) + self.p.on_save = types.MethodType(on_save, self.p) + + saved_stdout = sys.stdout + try: + out = StringIO() + sys.stdout = out + main.__main__(test_input_json, self.p) + output = out.getvalue().strip() + finally: + sys.stdout = saved_stdout + + # Test that the right timeout was set in proxy object + assert self.p.timeout == 40.0 + + response = json.loads(output) + for item in json.loads(response["body"]): + self.assertEqual(item["userId"], 1) + @vcr.use_cassette("fixtures/main_non_json_response.yaml") def test_non_json_response(self): test_input_json = """{ "method": "GET", From bcfa19fa4825f54592899e4eeecb3c2ad31715f0 Mon Sep 17 00:00:00 2001 From: John Hensley Date: Mon, 30 Mar 2020 11:47:52 -0400 Subject: [PATCH 199/352] Increase the proxy's default timeout Increase the default Requests connection and read timeout from ten to 120 seconds. --- securedrop_proxy/proxy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index 8af6862be..f709b1237 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -46,7 +46,7 @@ def __init__(self, status: int) -> None: class Proxy: def __init__( - self, conf_path: str, req: Req = Req(), timeout: float = 10.0 + self, conf_path: str, req: Req = Req(), timeout: float = 120.0 ) -> None: # The configuration path for Proxy is a must. self.read_conf(conf_path) From d97074662d1db0b970ce05e5ca96b2ba3699fb48 Mon Sep 17 00:00:00 2001 From: mickael e Date: Mon, 30 Mar 2020 14:00:36 -0400 Subject: [PATCH 200/352] securedrop-proxy 0.2.1 --- changelog.md | 4 ++++ securedrop_proxy/VERSION | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/changelog.md b/changelog.md index dae1be583..c5590c3da 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,9 @@ # Changelog +## 0.2.1 + + * Increase default timeout to 120s from 10s (#70) + ## 0.2.0 * Initial beta release. diff --git a/securedrop_proxy/VERSION b/securedrop_proxy/VERSION index 0ea3a944b..0c62199f1 100644 --- a/securedrop_proxy/VERSION +++ b/securedrop_proxy/VERSION @@ -1 +1 @@ -0.2.0 +0.2.1 From 01511033357ce7397bdb812ce58fcefc0315e43a Mon Sep 17 00:00:00 2001 From: mickael e Date: Wed, 1 Apr 2020 14:52:35 -0400 Subject: [PATCH 201/352] Update pyyaml to 5.3.1 Addresses CVE-2020-1747 --- dev-requirements.txt | 26 +++++++++++++------------- requirements.in | 2 +- requirements.txt | 24 ++++++++++++------------ 3 files changed, 26 insertions(+), 26 deletions(-) diff --git a/dev-requirements.txt b/dev-requirements.txt index 1153d0469..5ee12c538 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -115,18 +115,18 @@ pycodestyle==2.4.0 \ pyflakes==2.0.0 \ --hash=sha256:9a7662ec724d0120012f6e29d6248ae3727d821bba522a0e6b356eff19126a49 \ --hash=sha256:f661252913bc1dbe7fcfcbf0af0db3f42ab65aabd1a6ca68fe5d466bace94dae -pyyaml==5.1 \ - --hash=sha256:1adecc22f88d38052fb787d959f003811ca858b799590a5eaa70e63dca50308c \ - --hash=sha256:436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95 \ - --hash=sha256:460a5a4248763f6f37ea225d19d5c205677d8d525f6a83357ca622ed541830c2 \ - --hash=sha256:5a22a9c84653debfbf198d02fe592c176ea548cccce47553f35f466e15cf2fd4 \ - --hash=sha256:7a5d3f26b89d688db27822343dfa25c599627bc92093e788956372285c6298ad \ - --hash=sha256:9372b04a02080752d9e6f990179a4ab840227c6e2ce15b95e1278456664cf2ba \ - --hash=sha256:a5dcbebee834eaddf3fa7366316b880ff4062e4bcc9787b78c7fbb4a26ff2dd1 \ - --hash=sha256:aee5bab92a176e7cd034e57f46e9df9a9862a71f8f37cad167c6fc74c65f5b4e \ - --hash=sha256:c51f642898c0bacd335fc119da60baae0824f2cde95b0330b56c0553439f0673 \ - --hash=sha256:c68ea4d3ba1705da1e0d85da6684ac657912679a649e8868bd850d2c299cce13 \ - --hash=sha256:e23d0cc5299223dcc37885dae624f382297717e459ea24053709675a976a3e19 +pyyaml==5.3.1 \ + --hash=sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97 \ + --hash=sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76 \ + --hash=sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2 \ + --hash=sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648 \ + --hash=sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf \ + --hash=sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f \ + --hash=sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2 \ + --hash=sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee \ + --hash=sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d \ + --hash=sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c \ + --hash=sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a requests==2.20.0 \ --hash=sha256:99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c \ --hash=sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279 @@ -178,4 +178,4 @@ yarl==1.2.6 \ # WARNING: The following packages were not pinned, but pip requires them to be # pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag. -# setuptools +# setuptools==46.1.3 # via flake8 diff --git a/requirements.in b/requirements.in index 1e2aafcf0..1f2566b91 100644 --- a/requirements.in +++ b/requirements.in @@ -3,7 +3,7 @@ chardet==3.0.4 furl==2.0.0 idna==2.7 orderedmultidict==1.0 -PyYAML==5.1 +PyYAML==5.3.1 six==1.11.0 requests==2.20.0 urllib3==1.24.3 diff --git a/requirements.txt b/requirements.txt index 38b09c7e3..214b04d95 100644 --- a/requirements.txt +++ b/requirements.txt @@ -19,18 +19,18 @@ idna==2.7 \ orderedmultidict==1.0 \ --hash=sha256:24e3b730cf84e4a6a68be5cc760864905cf66abc89851e724bd5b4e849eaa96b \ --hash=sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5 -pyyaml==5.1 \ - --hash=sha256:1adecc22f88d38052fb787d959f003811ca858b799590a5eaa70e63dca50308c \ - --hash=sha256:436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95 \ - --hash=sha256:460a5a4248763f6f37ea225d19d5c205677d8d525f6a83357ca622ed541830c2 \ - --hash=sha256:5a22a9c84653debfbf198d02fe592c176ea548cccce47553f35f466e15cf2fd4 \ - --hash=sha256:7a5d3f26b89d688db27822343dfa25c599627bc92093e788956372285c6298ad \ - --hash=sha256:9372b04a02080752d9e6f990179a4ab840227c6e2ce15b95e1278456664cf2ba \ - --hash=sha256:a5dcbebee834eaddf3fa7366316b880ff4062e4bcc9787b78c7fbb4a26ff2dd1 \ - --hash=sha256:aee5bab92a176e7cd034e57f46e9df9a9862a71f8f37cad167c6fc74c65f5b4e \ - --hash=sha256:c51f642898c0bacd335fc119da60baae0824f2cde95b0330b56c0553439f0673 \ - --hash=sha256:c68ea4d3ba1705da1e0d85da6684ac657912679a649e8868bd850d2c299cce13 \ - --hash=sha256:e23d0cc5299223dcc37885dae624f382297717e459ea24053709675a976a3e19 +pyyaml==5.3.1 \ + --hash=sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97 \ + --hash=sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76 \ + --hash=sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2 \ + --hash=sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648 \ + --hash=sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf \ + --hash=sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f \ + --hash=sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2 \ + --hash=sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee \ + --hash=sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d \ + --hash=sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c \ + --hash=sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a requests==2.20.0 \ --hash=sha256:99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c \ --hash=sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279 From cb4b5ab882de321879ce6e13088f1b072c5345ad Mon Sep 17 00:00:00 2001 From: mickael e Date: Wed, 1 Apr 2020 17:28:00 -0400 Subject: [PATCH 202/352] Update to PyYAML 5.3.1 in build-requirements Use the wheel hosted on pip mirror --- build-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-requirements.txt b/build-requirements.txt index 390b2281c..ed7807329 100644 --- a/build-requirements.txt +++ b/build-requirements.txt @@ -3,7 +3,7 @@ chardet==3.0.4 --hash=sha256:f5632e583a4f61f1e16d0cc98127d241fb11c3c6ddfddee1593 furl==2.0.0 --hash=sha256:1855003e64dcb934556ad79994ba1a3a852da337e353d84d3b4ef75031913451 idna==2.7 --hash=sha256:491f674364ba3232ed1eb4c1eb7407887f62cef6c300aad7df6e01acd88ffb25 orderedmultidict==1.0 --hash=sha256:51efddca0b4ae6d885bbafd8ca44e51758166c144cf006dbead5c9394b2a9eae -pyyaml==5.1 --hash=sha256:59ff9ed67bd8be210f91fd20f0ef844ee897d11eb4c19c611a77a37896c124b9 +pyyaml==5.3.1 --hash=sha256:cb4442140d3195f5f799096aa35aadce15f493046135a03668023b80824dd44c requests==2.20.0 --hash=sha256:d87b2085783d31d874ac7bc62660e287932aaee7059e80b41b76462eb18d35cc six==1.11.0 --hash=sha256:aa4ad34049ddff178b533062797fd1db9f0038b7c5c2461a7cde2244300b9f3d urllib3==1.24.3 --hash=sha256:3d440cbb168e2c963d5099232bdb3f7390bf031b6270dad1bc79751698a1399a From 200c194e7d255f6e336d750070e195f5b16da3d6 Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Tue, 5 May 2020 11:52:02 -0400 Subject: [PATCH 203/352] securedrop-proxy 0.3.0 --- changelog.md | 5 +++++ securedrop_proxy/VERSION | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/changelog.md b/changelog.md index c5590c3da..bfded302f 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,10 @@ # Changelog +## 0.3.0 + + * Use incoming timeout value from JSON (#69). + * Update PyYaml to 5.3.1 due to CVE-2020-1747 (#73). + ## 0.2.1 * Increase default timeout to 120s from 10s (#70) diff --git a/securedrop_proxy/VERSION b/securedrop_proxy/VERSION index 0c62199f1..0d91a54c7 100644 --- a/securedrop_proxy/VERSION +++ b/securedrop_proxy/VERSION @@ -1 +1 @@ -0.2.1 +0.3.0 From c24a7a7c52f7b85846c24598889ecd3859e17857 Mon Sep 17 00:00:00 2001 From: John Hensley Date: Fri, 29 May 2020 09:57:09 -0400 Subject: [PATCH 204/352] Add the fixture for test_json_response_with_timeout --- fixtures/main_json_response_with_timeout.yaml | 94 +++++++++++++++++++ 1 file changed, 94 insertions(+) create mode 100644 fixtures/main_json_response_with_timeout.yaml diff --git a/fixtures/main_json_response_with_timeout.yaml b/fixtures/main_json_response_with_timeout.yaml new file mode 100644 index 000000000..f10ccee63 --- /dev/null +++ b/fixtures/main_json_response_with_timeout.yaml @@ -0,0 +1,94 @@ +interactions: +- request: + body: null + headers: {} + method: GET + uri: https://jsonplaceholder.typicode.com/posts?userId=1 + response: + body: + string: "[\n {\n \"userId\": 1,\n \"id\": 1,\n \"title\": \"sunt aut\ + \ facere repellat provident occaecati excepturi optio reprehenderit\",\n \ + \ \"body\": \"quia et suscipit\\nsuscipit recusandae consequuntur expedita\ + \ et cum\\nreprehenderit molestiae ut ut quas totam\\nnostrum rerum est autem\ + \ sunt rem eveniet architecto\"\n },\n {\n \"userId\": 1,\n \"id\"\ + : 2,\n \"title\": \"qui est esse\",\n \"body\": \"est rerum tempore\ + \ vitae\\nsequi sint nihil reprehenderit dolor beatae ea dolores neque\\nfugiat\ + \ blanditiis voluptate porro vel nihil molestiae ut reiciendis\\nqui aperiam\ + \ non debitis possimus qui neque nisi nulla\"\n },\n {\n \"userId\":\ + \ 1,\n \"id\": 3,\n \"title\": \"ea molestias quasi exercitationem repellat\ + \ qui ipsa sit aut\",\n \"body\": \"et iusto sed quo iure\\nvoluptatem\ + \ occaecati omnis eligendi aut ad\\nvoluptatem doloribus vel accusantium quis\ + \ pariatur\\nmolestiae porro eius odio et labore et velit aut\"\n },\n {\n\ + \ \"userId\": 1,\n \"id\": 4,\n \"title\": \"eum et est occaecati\"\ + ,\n \"body\": \"ullam et saepe reiciendis voluptatem adipisci\\nsit amet\ + \ autem assumenda provident rerum culpa\\nquis hic commodi nesciunt rem tenetur\ + \ doloremque ipsam iure\\nquis sunt voluptatem rerum illo velit\"\n },\n\ + \ {\n \"userId\": 1,\n \"id\": 5,\n \"title\": \"nesciunt quas odio\"\ + ,\n \"body\": \"repudiandae veniam quaerat sunt sed\\nalias aut fugiat\ + \ sit autem sed est\\nvoluptatem omnis possimus esse voluptatibus quis\\nest\ + \ aut tenetur dolor neque\"\n },\n {\n \"userId\": 1,\n \"id\": 6,\n\ + \ \"title\": \"dolorem eum magni eos aperiam quia\",\n \"body\": \"\ + ut aspernatur corporis harum nihil quis provident sequi\\nmollitia nobis aliquid\ + \ molestiae\\nperspiciatis et ea nemo ab reprehenderit accusantium quas\\\ + nvoluptate dolores velit et doloremque molestiae\"\n },\n {\n \"userId\"\ + : 1,\n \"id\": 7,\n \"title\": \"magnam facilis autem\",\n \"body\"\ + : \"dolore placeat quibusdam ea quo vitae\\nmagni quis enim qui quis quo nemo\ + \ aut saepe\\nquidem repellat excepturi ut quia\\nsunt ut sequi eos ea sed\ + \ quas\"\n },\n {\n \"userId\": 1,\n \"id\": 8,\n \"title\": \"\ + dolorem dolore est ipsam\",\n \"body\": \"dignissimos aperiam dolorem qui\ + \ eum\\nfacilis quibusdam animi sint suscipit qui sint possimus cum\\nquaerat\ + \ magni maiores excepturi\\nipsam ut commodi dolor voluptatum modi aut vitae\"\ + \n },\n {\n \"userId\": 1,\n \"id\": 9,\n \"title\": \"nesciunt\ + \ iure omnis dolorem tempora et accusantium\",\n \"body\": \"consectetur\ + \ animi nesciunt iure dolore\\nenim quia ad\\nveniam autem ut quam aut nobis\\\ + net est aut quod aut provident voluptas autem voluptas\"\n },\n {\n \"\ + userId\": 1,\n \"id\": 10,\n \"title\": \"optio molestias id quia eum\"\ + ,\n \"body\": \"quo et expedita modi cum officia vel magni\\ndoloribus\ + \ qui repudiandae\\nvero nisi sit\\nquos veniam quod sed accusamus veritatis\ + \ error\"\n }\n]" + headers: + Access-Control-Allow-Credentials: + - 'true' + Age: + - '53198' + CF-Cache-Status: + - STALE + CF-RAY: + - 59b0b761ace6fe12-DTW + Cache-Control: + - max-age=14400 + Connection: + - keep-alive + Content-Type: + - application/json; charset=utf-8 + Date: + - Fri, 29 May 2020 13:56:31 GMT + Etag: + - W/"aa6-j2NSH739l9uq40OywFMn7Y0C/iY" + Expect-CT: + - max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct" + Expires: + - '-1' + Pragma: + - no-cache + Server: + - cloudflare + Set-Cookie: + - __cfduid=d259d305f25abe26719e6850ce4b06cae1590760560; expires=Sun, 28-Jun-20 + 13:56:00 GMT; path=/; domain=.typicode.com; HttpOnly; SameSite=Lax + Transfer-Encoding: + - chunked + Vary: + - Origin, Accept-Encoding + Via: + - 1.1 vegur + X-Content-Type-Options: + - nosniff + X-Powered-By: + - Express + cf-request-id: + - 030250f1080000fe12a1a71200000001 + status: + code: 200 + message: OK +version: 1 From f9bcbd3e8b49b737b2b85bf608f48349ddc68a4b Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Fri, 17 Jan 2020 15:50:55 +0530 Subject: [PATCH 205/352] Fixes #57 runs black & isort for code formatting To have latest black, we need updated typed-ast, for that we have to upgrade mypy too, and for mypy upgrade to work, we had to upgrade the mypy-extensions. It first runs isort to check if it passes, and then it runs black. Both uses 100 as line length. setup.py marks Python version as >= 3.7 as we are testing and running the code only on 3.7 on Debian Buster. Also contains the formatting change in proxy.py for black check to run sucessfully on CI. --- Makefile | 9 +- blackconfig/pyproject.toml | 2 + dev-requirements.in | 8 +- dev-requirements.txt | 205 +++++++++++++++++++++++---------- securedrop_proxy/entrypoint.py | 4 +- securedrop_proxy/proxy.py | 5 +- setup.py | 2 +- 7 files changed, 162 insertions(+), 73 deletions(-) create mode 100644 blackconfig/pyproject.toml diff --git a/Makefile b/Makefile index f00f0a7e1..81dcf37f4 100644 --- a/Makefile +++ b/Makefile @@ -20,13 +20,20 @@ safety: ## Runs `safety check` to check python dependencies for vulnerabilities done .PHONY: lint -lint: ## Run flake8 +lint: isort black ## Run isort, black and flake8 @flake8 securedrop_proxy tests .PHONY: mypy mypy: ## Run mypy static type checker @mypy --ignore-missing-imports securedrop_proxy +.PHONY: black +black: ## Run black for file formatting + @black --config ./blackconfig/pyproject.toml --check securedrop_proxy tests + +.PHONY: isort +isort: ## Run isort for file formatting + @isort -c -w 100 securedrop_proxy/*.py tests/*.py --diff .PHONY: update-pip-requirements update-pip-requirements: ## Updates all Python requirements files via pip-compile. diff --git a/blackconfig/pyproject.toml b/blackconfig/pyproject.toml new file mode 100644 index 000000000..aa4949aa1 --- /dev/null +++ b/blackconfig/pyproject.toml @@ -0,0 +1,2 @@ +[tool.black] +line-length = 100 diff --git a/dev-requirements.in b/dev-requirements.in index 9a880c48f..8f8b1ef46 100644 --- a/dev-requirements.in +++ b/dev-requirements.in @@ -1,10 +1,12 @@ +black==19.10b0 coverage==5.0 flake8==3.6.0 +isort==4.3.21 mccabe==0.6.1 multidict==4.4.2 -mypy==0.701 -mypy-extensions==0.4.1 -pip-tools==3.1.0 +mypy==0.761 +mypy-extensions==0.4.3 +pip-tools==4.3.0 pycodestyle==2.4.0 pyflakes==2.0.0 six==1.11.0 diff --git a/dev-requirements.txt b/dev-requirements.txt index 5ee12c538..f2d820fb3 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -2,18 +2,32 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --generate-hashes --output-file=dev-requirements.txt dev-requirements.in requirements.in +# pip-compile --allow-unsafe --generate-hashes --output-file=dev-requirements.txt dev-requirements.in requirements.in # +appdirs==1.4.3 \ + --hash=sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92 \ + --hash=sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e \ + # via black +attrs==19.3.0 \ + --hash=sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c \ + --hash=sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72 \ + # via black +black==19.10b0 \ + --hash=sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b \ + --hash=sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539 \ + # via -r dev-requirements.in certifi==2018.10.15 \ --hash=sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c \ - --hash=sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a + --hash=sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a \ + # via -r requirements.in, requests chardet==3.0.4 \ --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ - --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 + --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 \ + # via -r requirements.in, requests click==7.0 \ --hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \ --hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 \ - # via pip-tools + # via black, pip-tools coverage==5.0 \ --hash=sha256:0cd13a6e98c37b510a2d34c8281d5e1a226aaf9b65b7d770ef03c63169965351 \ --hash=sha256:1a4b6b6a2a3a6612e6361130c2cc3dc4378d8c221752b96167ccbad94b47f3cd \ @@ -45,19 +59,28 @@ coverage==5.0 \ --hash=sha256:d609a6d564ad3d327e9509846c2c47f170456344521462b469e5cb39e48ba31c \ --hash=sha256:e1bad043c12fb58e8c7d92b3d7f2f49977dcb80a08a6d1e7a5114a11bf819fca \ --hash=sha256:e5a675f6829c53c87d79117a8eb656cc4a5f8918185a32fc93ba09778e90f6db \ - --hash=sha256:fec32646b98baf4a22fdceb08703965bd16dea09051fbeb31a04b5b6e72b846c + --hash=sha256:fec32646b98baf4a22fdceb08703965bd16dea09051fbeb31a04b5b6e72b846c \ + # via -r dev-requirements.in flake8==3.6.0 \ --hash=sha256:6a35f5b8761f45c5513e3405f110a86bea57982c3b75b766ce7b65217abe1670 \ - --hash=sha256:c01f8a3963b3571a8e6bd7a4063359aff90749e160778e03817cd9b71c9e07d2 + --hash=sha256:c01f8a3963b3571a8e6bd7a4063359aff90749e160778e03817cd9b71c9e07d2 \ + # via -r dev-requirements.in furl==2.0.0 \ --hash=sha256:f7e90e9f85ef3f2e64485f04c2a80b50af6133942812fd87a44d45305b079018 \ - --hash=sha256:fdcaedc1fb19a63d7d875b0105b0a5b496dd0989330d454a42bcb401fa5454ec + --hash=sha256:fdcaedc1fb19a63d7d875b0105b0a5b496dd0989330d454a42bcb401fa5454ec \ + # via -r requirements.in idna==2.7 \ --hash=sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e \ - --hash=sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16 + --hash=sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16 \ + # via -r requirements.in, requests, yarl +isort==4.3.21 \ + --hash=sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1 \ + --hash=sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd \ + # via -r dev-requirements.in mccabe==0.6.1 \ --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ - --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f + --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f \ + # via -r dev-requirements.in, flake8 multidict==4.4.2 \ --hash=sha256:05eeab69bf2b0664644c62bd92fabb045163e5b8d4376a31dfb52ce0210ced7b \ --hash=sha256:0c85880efa7cadb18e3b5eef0aa075dc9c0a3064cbbaef2e20be264b9cf47a64 \ @@ -87,34 +110,48 @@ multidict==4.4.2 \ --hash=sha256:a596019c3eafb1b0ae07db9f55a08578b43c79adb1fe1ab1fd818430ae59ee6f \ --hash=sha256:e8848ae3cd6a784c29fae5055028bee9bffcc704d8bcad09bd46b42b44a833e2 \ --hash=sha256:e8a048bfd7d5a280f27527d11449a509ddedf08b58a09a24314828631c099306 \ - --hash=sha256:f6dd28a0ac60e2426a6918f36f1b4e2620fc785a0de7654cd206ba842eee57fd -mypy-extensions==0.4.1 \ - --hash=sha256:37e0e956f41369209a3d5f34580150bcacfabaa57b33a15c0b25f4b5725e0812 \ - --hash=sha256:b16cabe759f55e3409a7d231ebd2841378fb0c27a5d1994719e340e4f429ac3e -mypy==0.701 \ - --hash=sha256:2afe51527b1f6cdc4a5f34fc90473109b22bf7f21086ba3e9451857cf11489e6 \ - --hash=sha256:56a16df3e0abb145d8accd5dbb70eba6c4bd26e2f89042b491faa78c9635d1e2 \ - --hash=sha256:5764f10d27b2e93c84f70af5778941b8f4aa1379b2430f85c827e0f5464e8714 \ - --hash=sha256:5bbc86374f04a3aa817622f98e40375ccb28c4836f36b66706cf3c6ccce86eda \ - --hash=sha256:6a9343089f6377e71e20ca734cd8e7ac25d36478a9df580efabfe9059819bf82 \ - --hash=sha256:6c9851bc4a23dc1d854d3f5dfd5f20a016f8da86bcdbb42687879bb5f86434b0 \ - --hash=sha256:b8e85956af3fcf043d6f87c91cbe8705073fc67029ba6e22d3468bfee42c4823 \ - --hash=sha256:b9a0af8fae490306bc112229000aa0c2ccc837b49d29a5c42e088c132a2334dd \ - --hash=sha256:bbf643528e2a55df2c1587008d6e3bda5c0445f1240dfa85129af22ae16d7a9a \ - --hash=sha256:c46ab3438bd21511db0f2c612d89d8344154c0c9494afc7fbc932de514cf8d15 \ - --hash=sha256:f7a83d6bd805855ef83ec605eb01ab4fa42bcef254b13631e451cbb44914a9b0 + --hash=sha256:f6dd28a0ac60e2426a6918f36f1b4e2620fc785a0de7654cd206ba842eee57fd \ + # via -r dev-requirements.in, yarl +mypy-extensions==0.4.3 \ + --hash=sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d \ + --hash=sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8 \ + # via -r dev-requirements.in, mypy +mypy==0.761 \ + --hash=sha256:0a9a45157e532da06fe56adcfef8a74629566b607fa2c1ac0122d1ff995c748a \ + --hash=sha256:2c35cae79ceb20d47facfad51f952df16c2ae9f45db6cb38405a3da1cf8fc0a7 \ + --hash=sha256:4b9365ade157794cef9685791032521233729cb00ce76b0ddc78749abea463d2 \ + --hash=sha256:53ea810ae3f83f9c9b452582261ea859828a9ed666f2e1ca840300b69322c474 \ + --hash=sha256:634aef60b4ff0f650d3e59d4374626ca6153fcaff96ec075b215b568e6ee3cb0 \ + --hash=sha256:7e396ce53cacd5596ff6d191b47ab0ea18f8e0ec04e15d69728d530e86d4c217 \ + --hash=sha256:7eadc91af8270455e0d73565b8964da1642fe226665dd5c9560067cd64d56749 \ + --hash=sha256:7f672d02fffcbace4db2b05369142e0506cdcde20cea0e07c7c2171c4fd11dd6 \ + --hash=sha256:85baab8d74ec601e86134afe2bcccd87820f79d2f8d5798c889507d1088287bf \ + --hash=sha256:87c556fb85d709dacd4b4cb6167eecc5bbb4f0a9864b69136a0d4640fdc76a36 \ + --hash=sha256:a6bd44efee4dc8c3324c13785a9dc3519b3ee3a92cada42d2b57762b7053b49b \ + --hash=sha256:c6d27bd20c3ba60d5b02f20bd28e20091d6286a699174dfad515636cb09b5a72 \ + --hash=sha256:e2bb577d10d09a2d8822a042a23b8d62bc3b269667c9eb8e60a6edfa000211b1 \ + --hash=sha256:f97a605d7c8bc2c6d1172c2f0d5a65b24142e11a58de689046e62c2d632ca8c1 \ + # via -r dev-requirements.in orderedmultidict==1.0 \ --hash=sha256:24e3b730cf84e4a6a68be5cc760864905cf66abc89851e724bd5b4e849eaa96b \ - --hash=sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5 -pip-tools==3.1.0 \ - --hash=sha256:31b43e5f8d605fc84f7506199025460abcb98a29d12cc99db268f73e39cf55e5 \ - --hash=sha256:b1ceca03b4a48346b2f6870565abb09d8d257d5b1524b4c6b222185bf26c3870 + --hash=sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5 \ + # via -r requirements.in, furl +pathspec==0.7.0 \ + --hash=sha256:163b0632d4e31cef212976cf57b43d9fd6b0bac6e67c26015d611a647d5e7424 \ + --hash=sha256:562aa70af2e0d434367d9790ad37aed893de47f1693e4201fd1d3dca15d19b96 \ + # via black +pip-tools==4.3.0 \ + --hash=sha256:06efa50b7089b2abbfcf4b47684960538af74669e801e69a557cb8a1c6ad6674 \ + --hash=sha256:79e8137a2b96906ccaed0151e1df42daf386d51abb80286173d112b5296a5775 \ + # via -r dev-requirements.in pycodestyle==2.4.0 \ --hash=sha256:cbc619d09254895b0d12c2c691e237b2e91e9b2ecf5e84c26b35400f93dcfb83 \ - --hash=sha256:cbfca99bd594a10f674d0cd97a3d802a1fdef635d4361e1a2658de47ed261e3a + --hash=sha256:cbfca99bd594a10f674d0cd97a3d802a1fdef635d4361e1a2658de47ed261e3a \ + # via -r dev-requirements.in, flake8 pyflakes==2.0.0 \ --hash=sha256:9a7662ec724d0120012f6e29d6248ae3727d821bba522a0e6b356eff19126a49 \ - --hash=sha256:f661252913bc1dbe7fcfcbf0af0db3f42ab65aabd1a6ca68fe5d466bace94dae + --hash=sha256:f661252913bc1dbe7fcfcbf0af0db3f42ab65aabd1a6ca68fe5d466bace94dae \ + # via -r dev-requirements.in, flake8 pyyaml==5.3.1 \ --hash=sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97 \ --hash=sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76 \ @@ -126,45 +163,86 @@ pyyaml==5.3.1 \ --hash=sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee \ --hash=sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d \ --hash=sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c \ - --hash=sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a + --hash=sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a \ + # via -r requirements.in, vcrpy +regex==2020.1.8 \ + --hash=sha256:07b39bf943d3d2fe63d46281d8504f8df0ff3fe4c57e13d1656737950e53e525 \ + --hash=sha256:0932941cdfb3afcbc26cc3bcf7c3f3d73d5a9b9c56955d432dbf8bbc147d4c5b \ + --hash=sha256:0e182d2f097ea8549a249040922fa2b92ae28be4be4895933e369a525ba36576 \ + --hash=sha256:10671601ee06cf4dc1bc0b4805309040bb34c9af423c12c379c83d7895622bb5 \ + --hash=sha256:23e2c2c0ff50f44877f64780b815b8fd2e003cda9ce817a7fd00dea5600c84a0 \ + --hash=sha256:26ff99c980f53b3191d8931b199b29d6787c059f2e029b2b0c694343b1708c35 \ + --hash=sha256:27429b8d74ba683484a06b260b7bb00f312e7c757792628ea251afdbf1434003 \ + --hash=sha256:3e77409b678b21a056415da3a56abfd7c3ad03da71f3051bbcdb68cf44d3c34d \ + --hash=sha256:4e8f02d3d72ca94efc8396f8036c0d3bcc812aefc28ec70f35bb888c74a25161 \ + --hash=sha256:4eae742636aec40cf7ab98171ab9400393360b97e8f9da67b1867a9ee0889b26 \ + --hash=sha256:6a6ae17bf8f2d82d1e8858a47757ce389b880083c4ff2498dba17c56e6c103b9 \ + --hash=sha256:6a6ba91b94427cd49cd27764679024b14a96874e0dc638ae6bdd4b1a3ce97be1 \ + --hash=sha256:7bcd322935377abcc79bfe5b63c44abd0b29387f267791d566bbb566edfdd146 \ + --hash=sha256:98b8ed7bb2155e2cbb8b76f627b2fd12cf4b22ab6e14873e8641f266e0fb6d8f \ + --hash=sha256:bd25bb7980917e4e70ccccd7e3b5740614f1c408a642c245019cff9d7d1b6149 \ + --hash=sha256:d0f424328f9822b0323b3b6f2e4b9c90960b24743d220763c7f07071e0778351 \ + --hash=sha256:d58e4606da2a41659c84baeb3cfa2e4c87a74cec89a1e7c56bee4b956f9d7461 \ + --hash=sha256:e3cd21cc2840ca67de0bbe4071f79f031c81418deb544ceda93ad75ca1ee9f7b \ + --hash=sha256:e6c02171d62ed6972ca8631f6f34fa3281d51db8b326ee397b9c83093a6b7242 \ + --hash=sha256:e7c7661f7276507bce416eaae22040fd91ca471b5b33c13f8ff21137ed6f248c \ + --hash=sha256:ecc6de77df3ef68fee966bb8cb4e067e84d4d1f397d0ef6fce46913663540d77 \ + # via black requests==2.20.0 \ --hash=sha256:99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c \ - --hash=sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279 + --hash=sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279 \ + # via -r requirements.in six==1.11.0 \ --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \ - --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb -typed-ast==1.3.5 \ - --hash=sha256:132eae51d6ef3ff4a8c47c393a4ef5ebf0d1aecc96880eb5d6c8ceab7017cc9b \ - --hash=sha256:18141c1484ab8784006c839be8b985cfc82a2e9725837b0ecfa0203f71c4e39d \ - --hash=sha256:2baf617f5bbbfe73fd8846463f5aeafc912b5ee247f410700245d68525ec584a \ - --hash=sha256:3d90063f2cbbe39177e9b4d888e45777012652d6110156845b828908c51ae462 \ - --hash=sha256:4304b2218b842d610aa1a1d87e1dc9559597969acc62ce717ee4dfeaa44d7eee \ - --hash=sha256:4983ede548ffc3541bae49a82675996497348e55bafd1554dc4e4a5d6eda541a \ - --hash=sha256:5315f4509c1476718a4825f45a203b82d7fdf2a6f5f0c8f166435975b1c9f7d4 \ - --hash=sha256:6cdfb1b49d5345f7c2b90d638822d16ba62dc82f7616e9b4caa10b72f3f16649 \ - --hash=sha256:7b325f12635598c604690efd7a0197d0b94b7d7778498e76e0710cd582fd1c7a \ - --hash=sha256:8d3b0e3b8626615826f9a626548057c5275a9733512b137984a68ba1598d3d2f \ - --hash=sha256:8f8631160c79f53081bd23446525db0bc4c5616f78d04021e6e434b286493fd7 \ - --hash=sha256:912de10965f3dc89da23936f1cc4ed60764f712e5fa603a09dd904f88c996760 \ - --hash=sha256:b010c07b975fe853c65d7bbe9d4ac62f1c69086750a574f6292597763781ba18 \ - --hash=sha256:c908c10505904c48081a5415a1e295d8403e353e0c14c42b6d67f8f97fae6616 \ - --hash=sha256:c94dd3807c0c0610f7c76f078119f4ea48235a953512752b9175f9f98f5ae2bd \ - --hash=sha256:ce65dee7594a84c466e79d7fb7d3303e7295d16a83c22c7c4037071b059e2c21 \ - --hash=sha256:eaa9cfcb221a8a4c2889be6f93da141ac777eb8819f077e1d09fb12d00a09a93 \ - --hash=sha256:f3376bc31bad66d46d44b4e6522c5c21976bf9bca4ef5987bb2bf727f4506cbb \ - --hash=sha256:f9202fa138544e13a4ec1a6792c35834250a85958fde1251b6a22e07d1260ae7 \ + --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb \ + # via -r dev-requirements.in, -r requirements.in, furl, orderedmultidict, pip-tools, vcrpy +toml==0.10.0 \ + --hash=sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c \ + --hash=sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e \ + # via black +typed-ast==1.4.1 \ + --hash=sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355 \ + --hash=sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919 \ + --hash=sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa \ + --hash=sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652 \ + --hash=sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75 \ + --hash=sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01 \ + --hash=sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d \ + --hash=sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1 \ + --hash=sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907 \ + --hash=sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c \ + --hash=sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3 \ + --hash=sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b \ + --hash=sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614 \ + --hash=sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb \ + --hash=sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b \ + --hash=sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41 \ + --hash=sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6 \ + --hash=sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34 \ + --hash=sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe \ + --hash=sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4 \ + --hash=sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7 \ + # via black, mypy +typing-extensions==3.7.4.1 \ + --hash=sha256:091ecc894d5e908ac75209f10d5b4f118fbdb2eb1ede6a63544054bb1edb41f2 \ + --hash=sha256:910f4656f54de5993ad9304959ce9bb903f90aadc7c67a0bef07e678014e892d \ + --hash=sha256:cf8b63fedea4d89bab840ecbb93e75578af28f76f66c35889bd7065f5af88575 \ # via mypy urllib3==1.24.3 \ --hash=sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4 \ - --hash=sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb + --hash=sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb \ + # via -r requirements.in, requests vcrpy==2.0.1 \ --hash=sha256:127e79cf7b569d071d1bd761b83f7b62b2ce2a2eb63ceca7aa67cba8f2602ea3 \ - --hash=sha256:57be64aa8e9883a4117d0b15de28af62275c001abcdb00b6dc2d4406073d9a4f + --hash=sha256:57be64aa8e9883a4117d0b15de28af62275c001abcdb00b6dc2d4406073d9a4f \ + # via -r dev-requirements.in werkzeug==0.16.0 \ --hash=sha256:7280924747b5733b246fe23972186c6b348f9ae29724135a6dfc1e53cea433e7 \ - --hash=sha256:e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4 + --hash=sha256:e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4 \ + # via -r requirements.in wrapt==1.10.11 \ - --hash=sha256:d4d560d479f2c21e1b5443bbd15fe7ec4b37fe7e53d335d3b9b0a7b1226fe3c6 + --hash=sha256:d4d560d479f2c21e1b5443bbd15fe7ec4b37fe7e53d335d3b9b0a7b1226fe3c6 \ + # via -r dev-requirements.in, vcrpy yarl==1.2.6 \ --hash=sha256:2556b779125621b311844a072e0ed367e8409a18fa12cbd68eb1258d187820f9 \ --hash=sha256:4aec0769f1799a9d4496827292c02a7b1f75c0bab56ab2b60dd94ebb57cbd5ee \ @@ -174,8 +252,11 @@ yarl==1.2.6 \ --hash=sha256:c8cbc21bbfa1dd7d5386d48cc814fe3d35b80f60299cdde9279046f399c3b0d8 \ --hash=sha256:db6f70a4b09cde813a4807843abaaa60f3b15fb4a2a06f9ae9c311472662daa1 \ --hash=sha256:f17495e6fe3d377e3faac68121caef6f974fcb9e046bc075bcff40d8e5cc69a4 \ - --hash=sha256:f85900b9cca0c67767bb61b2b9bd53208aaa7373dae633dbe25d179b4bf38aa7 + --hash=sha256:f85900b9cca0c67767bb61b2b9bd53208aaa7373dae633dbe25d179b4bf38aa7 \ + # via -r dev-requirements.in, vcrpy -# WARNING: The following packages were not pinned, but pip requires them to be -# pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag. -# setuptools==46.1.3 # via flake8 +# The following packages are considered to be unsafe in a requirements file: +setuptools==47.1.1 \ + --hash=sha256:145fa62b9d7bb544fce16e9b5a9bf4ab2032d2f758b7cd674af09a92736aff74 \ + --hash=sha256:74f33f44290f95c5c4a7c13ccc9d6d1a16837fe9dce0acf411dd244e7de95143 \ + # via flake8 diff --git a/securedrop_proxy/entrypoint.py b/securedrop_proxy/entrypoint.py index dbd5fa4c9..56ea61748 100755 --- a/securedrop_proxy/entrypoint.py +++ b/securedrop_proxy/entrypoint.py @@ -26,8 +26,8 @@ def start() -> None: """ - Set up a new proxy object with an error handler, configuration that we read from argv[1], and - the original user request from STDIN. + Set up a new proxy object with an error handler, configuration that we read + from argv[1], and the original user request from STDIN. """ try: configure_logging() diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index f709b1237..6c79d7f27 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -45,10 +45,7 @@ def __init__(self, status: int) -> None: class Proxy: - def __init__( - self, conf_path: str, req: Req = Req(), timeout: float = 120.0 - ) -> None: - # The configuration path for Proxy is a must. + def __init__(self, conf_path: str, req: Req = Req(), timeout: float = 10.0) -> None: self.read_conf(conf_path) self.req = req diff --git a/setup.py b/setup.py index 1e3f4ca6c..5aa3dfd36 100644 --- a/setup.py +++ b/setup.py @@ -17,7 +17,7 @@ long_description_content_type="text/markdown", license="GPLv3+", install_requires=["requests", "furl", "pyyaml", "werkzeug"], - python_requires=">=3.5", + python_requires=">=3.7", url="https://github.com/freedomofpress/securedrop-proxy", packages=setuptools.find_packages(exclude=["docs", "tests"]), package_data={ From 787844dc339e090aa0e9ac2241895365522c4119 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Thu, 4 Jun 2020 18:29:16 +0530 Subject: [PATCH 206/352] Auto fixes from isort and black --- securedrop_proxy/entrypoint.py | 17 +++++---------- securedrop_proxy/main.py | 4 +--- securedrop_proxy/proxy.py | 40 +++++++++++----------------------- tests/test_entrypoint.py | 7 +++--- tests/test_main.py | 7 +++--- tests/test_proxy.py | 13 +++++------ 6 files changed, 30 insertions(+), 58 deletions(-) diff --git a/securedrop_proxy/entrypoint.py b/securedrop_proxy/entrypoint.py index 56ea61748..b2f535554 100755 --- a/securedrop_proxy/entrypoint.py +++ b/securedrop_proxy/entrypoint.py @@ -10,16 +10,13 @@ import json import logging import os -import sys import platform +import sys +from logging.handlers import SysLogHandler, TimedRotatingFileHandler -from logging.handlers import TimedRotatingFileHandler, SysLogHandler - -from securedrop_proxy import main -from securedrop_proxy import proxy +from securedrop_proxy import main, proxy from securedrop_proxy.version import version - DEFAULT_HOME = os.path.join(os.path.expanduser("~"), ".securedrop_proxy") LOGLEVEL = os.environ.get("LOGLEVEL", "info").upper() @@ -36,9 +33,7 @@ def start() -> None: # path to config file must be at argv[1] if len(sys.argv) != 2: - raise ValueError( - "sd-proxy script not called with path to configuration file" - ) + raise ValueError("sd-proxy script not called with path to configuration file") # read config. `read_conf` will call `p.err_on_done` if there is a config # problem, and will return a Conf object on success. @@ -74,9 +69,7 @@ def configure_logging() -> None: log_file = os.path.join(home, "logs", "proxy.log") # set logging format - log_fmt = ( - "%(asctime)s - %(name)s:%(lineno)d(%(funcName)s) %(levelname)s: %(message)s" - ) + log_fmt = "%(asctime)s - %(name)s:%(lineno)d(%(funcName)s) %(levelname)s: %(message)s" formatter = logging.Formatter(log_fmt) # define log handlers such as for rotating log files diff --git a/securedrop_proxy/main.py b/securedrop_proxy/main.py index ff979053a..1fd76cf88 100644 --- a/securedrop_proxy/main.py +++ b/securedrop_proxy/main.py @@ -1,10 +1,8 @@ import json import logging -from typing import Dict, Any +from typing import Any, Dict from securedrop_proxy import proxy - - from securedrop_proxy.proxy import Proxy logger = logging.getLogger(__name__) diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index 6c79d7f27..69a4f4429 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -1,21 +1,20 @@ -import furl import http import json import logging -import requests -import tempfile -import werkzeug - import os import subprocess import sys +import tempfile import uuid -import yaml +from tempfile import _TemporaryFileWrapper # type: ignore from typing import Dict, Optional -import securedrop_proxy.version as version +import furl +import requests +import werkzeug +import yaml -from tempfile import _TemporaryFileWrapper # type: ignore +import securedrop_proxy.version as version logger = logging.getLogger(__name__) @@ -72,9 +71,7 @@ def err_on_done(self): def read_conf(self, conf_path: str) -> None: if not os.path.isfile(conf_path): - self.simple_error( - 500, "Configuration file does not exist at {}".format(conf_path) - ) + self.simple_error(500, "Configuration file does not exist at {}".format(conf_path)) self.err_on_done() try: @@ -82,18 +79,12 @@ def read_conf(self, conf_path: str) -> None: conf_in = yaml.safe_load(fh) except yaml.YAMLError: self.simple_error( - 500, - "YAML syntax error while reading configuration file {}".format( - conf_path - ), + 500, "YAML syntax error while reading configuration file {}".format(conf_path), ) self.err_on_done() except Exception: self.simple_error( - 500, - "Error while opening or reading configuration file {}".format( - conf_path - ), + 500, "Error while opening or reading configuration file {}".format(conf_path), ) self.err_on_done() @@ -263,15 +254,12 @@ def proxy(self) -> None: requests.exceptions.TooManyRedirects, ) as e: logger.error(e) - self.simple_error( - http.HTTPStatus.BAD_GATEWAY, "could not connect to server" - ) + self.simple_error(http.HTTPStatus.BAD_GATEWAY, "could not connect to server") except requests.exceptions.HTTPError as e: logger.error(e) try: self.simple_error( - e.response.status_code, - http.HTTPStatus(e.response.status_code).phrase.lower(), + e.response.status_code, http.HTTPStatus(e.response.status_code).phrase.lower(), ) except ValueError: # Return a generic error message when the response @@ -279,7 +267,5 @@ def proxy(self) -> None: self.simple_error(e.response.status_code, "unspecified server error") except Exception as e: logger.error(e) - self.simple_error( - http.HTTPStatus.INTERNAL_SERVER_ERROR, "internal proxy error" - ) + self.simple_error(http.HTTPStatus.INTERNAL_SERVER_ERROR, "internal proxy error") self.on_done() diff --git a/tests/test_entrypoint.py b/tests/test_entrypoint.py index 842cb866a..d12e661fb 100644 --- a/tests/test_entrypoint.py +++ b/tests/test_entrypoint.py @@ -9,6 +9,7 @@ from unittest.mock import patch import vcr + from securedrop_proxy import entrypoint @@ -53,7 +54,7 @@ def test_missing_config(self): @patch("securedrop_proxy.entrypoint.TimedRotatingFileHandler") def test_configure_logging(self, mock_log_conf, mock_log_conf_sys, mock_logging): with sdhome() as homedir: - mock_log_file = os.path.join(homedir, 'logs', 'proxy.log') + mock_log_file = os.path.join(homedir, "logs", "proxy.log") entrypoint.configure_logging() mock_log_conf.assert_called_once_with(mock_log_file) # For rsyslog handler @@ -71,9 +72,7 @@ def test_unwritable_log_folder(self): output = None with sdhome() as home: os.chmod(home, 0o0444) - with unittest.mock.patch( - "sys.stdout", new_callable=io.StringIO - ) as mock_stdout: + with unittest.mock.patch("sys.stdout", new_callable=io.StringIO) as mock_stdout: with self.assertRaises(SystemExit): entrypoint.start() output = mock_stdout.getvalue() diff --git a/tests/test_main.py b/tests/test_main.py index 05f158623..94a39a2cc 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -1,16 +1,15 @@ import http -from io import StringIO import json import subprocess import sys +import types import unittest import uuid -import types +from io import StringIO import vcr -from securedrop_proxy import main -from securedrop_proxy import proxy +from securedrop_proxy import main, proxy class TestMain(unittest.TestCase): diff --git a/tests/test_proxy.py b/tests/test_proxy.py index f4cd56038..7a035cfd3 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -1,18 +1,17 @@ -import sys import http import json +import sys +import tempfile +import types import unittest import uuid -import types from io import StringIO -import tempfile from unittest.mock import patch import requests import vcr -from securedrop_proxy import proxy -from securedrop_proxy import version +from securedrop_proxy import proxy, version class TestProxyValidConfig(unittest.TestCase): @@ -245,9 +244,7 @@ def test_internal_server_error(self): self.assertEqual(p.res.status, http.HTTPStatus.INTERNAL_SERVER_ERROR) self.assertIn("application/json", p.res.headers["Content-Type"]) body = json.loads(p.res.body) - self.assertEqual( - body["error"], http.HTTPStatus.INTERNAL_SERVER_ERROR.phrase.lower() - ) + self.assertEqual(body["error"], http.HTTPStatus.INTERNAL_SERVER_ERROR.phrase.lower()) @vcr.use_cassette("fixtures/proxy_internal_error.yaml") def test_internal_error(self): From 32e62719148d54d1c264e8b781056803fa46b24a Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Thu, 4 Jun 2020 18:37:59 +0530 Subject: [PATCH 207/352] Adds a .git-blame-ignore-revs file It adds a configuration file to skip the previous commmit which has isort and black formatting changes. ``` git config blame.ignoreRevsFile .git-blame-ignore-revs ```` After one executes the above command, `git blame` does not show details for the formatting commit. --- .git-blame-ignore-revs | 1 + 1 file changed, 1 insertion(+) create mode 100644 .git-blame-ignore-revs diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 000000000..dbeda0ff0 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1 @@ +787844dc339e090aa0e9ac2241895365522c4119 From bf75744fdbc08d75e1e731c3e4ec5c7be17b767c Mon Sep 17 00:00:00 2001 From: mickael e Date: Thu, 1 Oct 2020 15:51:41 -0400 Subject: [PATCH 208/352] Create codeql-analysis.yml --- .github/workflows/codeql-analysis.yml | 71 +++++++++++++++++++++++++++ 1 file changed, 71 insertions(+) create mode 100644 .github/workflows/codeql-analysis.yml diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 000000000..0e907d316 --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,71 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +name: "CodeQL" + +on: + push: + branches: [main] + pull_request: + # The branches below must be a subset of the branches above + branches: [main] + schedule: + - cron: '0 3 * * 6' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + # Override automatic language detection by changing the below list + # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python'] + language: ['python'] + # Learn more... + # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + with: + # We must fetch at least the immediate parents so that if this is + # a pull request then we can checkout the head. + fetch-depth: 2 + + # If this run was triggered by a pull request event, then checkout + # the head of the pull request instead of the merge commit. + - run: git checkout HEAD^2 + if: ${{ github.event_name == 'pull_request' }} + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v1 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + # queries: ./path/to/local/query, your-org/your-repo/queries@main + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v1 + + # ℹ️ Command-line programs to run using the OS shell. + # 📚 https://git.io/JvXDl + + # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines + # and modify them (or add more) to build your code if your project + # uses a compiled language + + #- run: | + # make bootstrap + # make release + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v1 From 00d5ba14576c7551a48877b5d720f568380a1f19 Mon Sep 17 00:00:00 2001 From: Erik Moeller Date: Thu, 1 Oct 2020 17:53:51 -0700 Subject: [PATCH 209/352] Use qubesdb-read instead of gethostname This prevents misidentification of Whonix VMs, which always use 'host' as the hostname. --- sd-rsyslog | 27 ++++++++++++++++++++++----- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/sd-rsyslog b/sd-rsyslog index 09432263d..4f62d95e3 100644 --- a/sd-rsyslog +++ b/sd-rsyslog @@ -30,11 +30,11 @@ import os import logging import configparser from subprocess import Popen, PIPE -from socket import gethostname # Global definitions specific to your plugin process = None + class RecoverableError(Exception): """An error that has caused the processing of the current message to fail, but does not require restarting the plugin. @@ -74,7 +74,6 @@ def onInit(): # emitted you must set 'level' to logging.DEBUG above.) logging.debug("onInit called") - global process if not os.path.exists("/etc/sd-rsyslog.conf"): print("Please create the configuration file at /etc/sd-rsyslog.conf", file=sys.stderr) @@ -82,7 +81,26 @@ def onInit(): config = configparser.ConfigParser() config.read('/etc/sd-rsyslog.conf') logvmname = config['sd-rsyslog']['remotevm'] - localvmname = config['sd-rsyslog'].get('localvm', gethostname()) + localvmname = config['sd-rsyslog'].get('localvm', None) + + # If no localvm name is specified, it must be supplied by Qubes OS. If this + # fails, we exit, to avoid falsely identified logs. + if localvmname is None: + try: + get_vm_name_process = Popen(["/usr/bin/qubesdb-read", "/name"], + stdout=PIPE, stderr=PIPE) + vm_name_output, vm_name_error = get_vm_name_process.communicate() + if vm_name_error != b"": + print("Error obtaining VM name via qubesdb-read:") + print(vm_name_error.decode("utf-8").strip()) + sys.exit(1) + localvmname = vm_name_output.decode("utf-8").strip() + except FileNotFoundError: # not on Qubes? + print("Could not run qubesdb-read command to obtain VM name.") + print("Note that sd-rsyslog must be run on Qubes OS if no") + print("localvm name is specified in the configuration.") + sys.exit(1) + process = Popen( ["/usr/lib/qubes/qrexec-client-vm", logvmname, "securedrop.Log"], stdin=PIPE, @@ -144,7 +162,7 @@ via stdout. In most cases, modifying this code should not be necessary. """ try: onInit() -except Exception as e: +except Exception: # If an error occurs during initialization, log it and terminate. The # 'omprog' action will eventually restart the program. logging.exception("Initialization error, exiting program") @@ -191,4 +209,3 @@ if endedWithError: sys.exit(1) else: sys.exit(0) - From df49576e43c28bf77edea0ca98338b06116e398c Mon Sep 17 00:00:00 2001 From: Kevin O'Gorman Date: Sat, 3 Oct 2020 22:42:37 -0400 Subject: [PATCH 210/352] Updated dependencies to clear safety checks - updated urllib3 from 1.25.8 to 1.25.10 - updated pip-tools to >=5.0.0 to clear pip-related error - Updated Makefile to pin pip and setuptools in dev requirements --- Makefile | 2 +- dev-requirements.in | 2 +- dev-requirements.txt | 28 ++++++++++++++++------------ requirements.in | 4 ++-- requirements.txt | 38 ++++++++++++++++++++++++-------------- 5 files changed, 44 insertions(+), 30 deletions(-) diff --git a/Makefile b/Makefile index 81dcf37f4..9c754f119 100644 --- a/Makefile +++ b/Makefile @@ -37,7 +37,7 @@ isort: ## Run isort for file formatting .PHONY: update-pip-requirements update-pip-requirements: ## Updates all Python requirements files via pip-compile. - pip-compile --generate-hashes --output-file dev-requirements.txt dev-requirements.in requirements.in + pip-compile --allow-unsafe --generate-hashes --output-file dev-requirements.txt dev-requirements.in requirements.in pip-compile --generate-hashes --output-file requirements.txt requirements.in .PHONY: test diff --git a/dev-requirements.in b/dev-requirements.in index 8f8b1ef46..e4fe026d6 100644 --- a/dev-requirements.in +++ b/dev-requirements.in @@ -6,7 +6,7 @@ mccabe==0.6.1 multidict==4.4.2 mypy==0.761 mypy-extensions==0.4.3 -pip-tools==4.3.0 +pip-tools>=5.0.0 pycodestyle==2.4.0 pyflakes==2.0.0 six==1.11.0 diff --git a/dev-requirements.txt b/dev-requirements.txt index f2d820fb3..2bdc0745f 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -140,9 +140,9 @@ pathspec==0.7.0 \ --hash=sha256:163b0632d4e31cef212976cf57b43d9fd6b0bac6e67c26015d611a647d5e7424 \ --hash=sha256:562aa70af2e0d434367d9790ad37aed893de47f1693e4201fd1d3dca15d19b96 \ # via black -pip-tools==4.3.0 \ - --hash=sha256:06efa50b7089b2abbfcf4b47684960538af74669e801e69a557cb8a1c6ad6674 \ - --hash=sha256:79e8137a2b96906ccaed0151e1df42daf386d51abb80286173d112b5296a5775 \ +pip-tools==5.3.1 \ + --hash=sha256:5672c2b6ca0f1fd803f3b45568c2cf7fadf135b4971e7d665232b2075544c0ef \ + --hash=sha256:73787e23269bf8a9230f376c351297b9037ed0d32ab0f9bef4a187d976acc054 \ # via -r dev-requirements.in pycodestyle==2.4.0 \ --hash=sha256:cbc619d09254895b0d12c2c691e237b2e91e9b2ecf5e84c26b35400f93dcfb83 \ @@ -188,9 +188,9 @@ regex==2020.1.8 \ --hash=sha256:e7c7661f7276507bce416eaae22040fd91ca471b5b33c13f8ff21137ed6f248c \ --hash=sha256:ecc6de77df3ef68fee966bb8cb4e067e84d4d1f397d0ef6fce46913663540d77 \ # via black -requests==2.20.0 \ - --hash=sha256:99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c \ - --hash=sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279 \ +requests==2.22.0 \ + --hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \ + --hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31 \ # via -r requirements.in six==1.11.0 \ --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \ @@ -228,9 +228,9 @@ typing-extensions==3.7.4.1 \ --hash=sha256:910f4656f54de5993ad9304959ce9bb903f90aadc7c67a0bef07e678014e892d \ --hash=sha256:cf8b63fedea4d89bab840ecbb93e75578af28f76f66c35889bd7065f5af88575 \ # via mypy -urllib3==1.24.3 \ - --hash=sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4 \ - --hash=sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb \ +urllib3==1.25.10 \ + --hash=sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a \ + --hash=sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461 \ # via -r requirements.in, requests vcrpy==2.0.1 \ --hash=sha256:127e79cf7b569d071d1bd761b83f7b62b2ce2a2eb63ceca7aa67cba8f2602ea3 \ @@ -256,7 +256,11 @@ yarl==1.2.6 \ # via -r dev-requirements.in, vcrpy # The following packages are considered to be unsafe in a requirements file: -setuptools==47.1.1 \ - --hash=sha256:145fa62b9d7bb544fce16e9b5a9bf4ab2032d2f758b7cd674af09a92736aff74 \ - --hash=sha256:74f33f44290f95c5c4a7c13ccc9d6d1a16837fe9dce0acf411dd244e7de95143 \ +pip==20.2.3 \ + --hash=sha256:0f35d63b7245205f4060efe1982f5ea2196aa6e5b26c07669adcf800e2542026 \ + --hash=sha256:30c70b6179711a7c4cf76da89e8a0f5282279dfb0278bec7b94134be92543b6d \ + # via pip-tools +setuptools==50.3.0 \ + --hash=sha256:39060a59d91cf5cf403fa3bacbb52df4205a8c3585e0b9ba4b30e0e19d4c4b18 \ + --hash=sha256:c77b3920663a435c9450d9d971c48f5a7478fca8881b2cd2564e59f970f03536 \ # via flake8 diff --git a/requirements.in b/requirements.in index 1f2566b91..ba72056e0 100644 --- a/requirements.in +++ b/requirements.in @@ -5,6 +5,6 @@ idna==2.7 orderedmultidict==1.0 PyYAML==5.3.1 six==1.11.0 -requests==2.20.0 -urllib3==1.24.3 +requests==2.22.0 +urllib3>=1.25.10 Werkzeug==0.16.0 diff --git a/requirements.txt b/requirements.txt index 214b04d95..a61eb958b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,19 +6,24 @@ # certifi==2018.10.15 \ --hash=sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c \ - --hash=sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a + --hash=sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a \ + # via -r requirements.in, requests chardet==3.0.4 \ --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ - --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 + --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 \ + # via -r requirements.in, requests furl==2.0.0 \ --hash=sha256:f7e90e9f85ef3f2e64485f04c2a80b50af6133942812fd87a44d45305b079018 \ - --hash=sha256:fdcaedc1fb19a63d7d875b0105b0a5b496dd0989330d454a42bcb401fa5454ec + --hash=sha256:fdcaedc1fb19a63d7d875b0105b0a5b496dd0989330d454a42bcb401fa5454ec \ + # via -r requirements.in idna==2.7 \ --hash=sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e \ - --hash=sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16 + --hash=sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16 \ + # via -r requirements.in, requests orderedmultidict==1.0 \ --hash=sha256:24e3b730cf84e4a6a68be5cc760864905cf66abc89851e724bd5b4e849eaa96b \ - --hash=sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5 + --hash=sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5 \ + # via -r requirements.in, furl pyyaml==5.3.1 \ --hash=sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97 \ --hash=sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76 \ @@ -30,16 +35,21 @@ pyyaml==5.3.1 \ --hash=sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee \ --hash=sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d \ --hash=sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c \ - --hash=sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a -requests==2.20.0 \ - --hash=sha256:99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c \ - --hash=sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279 + --hash=sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a \ + # via -r requirements.in +requests==2.22.0 \ + --hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \ + --hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31 \ + # via -r requirements.in six==1.11.0 \ --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \ - --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb -urllib3==1.24.3 \ - --hash=sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4 \ - --hash=sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb + --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb \ + # via -r requirements.in, furl, orderedmultidict +urllib3==1.25.10 \ + --hash=sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a \ + --hash=sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461 \ + # via -r requirements.in, requests werkzeug==0.16.0 \ --hash=sha256:7280924747b5733b246fe23972186c6b348f9ae29724135a6dfc1e53cea433e7 \ - --hash=sha256:e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4 + --hash=sha256:e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4 \ + # via -r requirements.in From c2b295a8668261147a99f7d24550aac566190a5b Mon Sep 17 00:00:00 2001 From: Kevin O'Gorman Date: Wed, 7 Oct 2020 22:20:52 -0400 Subject: [PATCH 211/352] updated urllib3 and requests in build requirements --- build-requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build-requirements.txt b/build-requirements.txt index ed7807329..85d0a6825 100644 --- a/build-requirements.txt +++ b/build-requirements.txt @@ -4,7 +4,7 @@ furl==2.0.0 --hash=sha256:1855003e64dcb934556ad79994ba1a3a852da337e353d84d3b4ef7 idna==2.7 --hash=sha256:491f674364ba3232ed1eb4c1eb7407887f62cef6c300aad7df6e01acd88ffb25 orderedmultidict==1.0 --hash=sha256:51efddca0b4ae6d885bbafd8ca44e51758166c144cf006dbead5c9394b2a9eae pyyaml==5.3.1 --hash=sha256:cb4442140d3195f5f799096aa35aadce15f493046135a03668023b80824dd44c -requests==2.20.0 --hash=sha256:d87b2085783d31d874ac7bc62660e287932aaee7059e80b41b76462eb18d35cc +requests==2.22.0 --hash=sha256:e3711bd465fd013abe428a8dade2938b578f05d2b06b0ae83daa98869c4548e8 six==1.11.0 --hash=sha256:aa4ad34049ddff178b533062797fd1db9f0038b7c5c2461a7cde2244300b9f3d -urllib3==1.24.3 --hash=sha256:3d440cbb168e2c963d5099232bdb3f7390bf031b6270dad1bc79751698a1399a +urllib3==1.25.10 --hash=sha256:32bcd1ec52a4dd17ede2725ad166c789f50d402870947d6824598915b89ecf70 werkzeug==0.16.0 --hash=sha256:429de1b931a2a58bf5cfac8447253949f7a930d30a73f2755e0ad0f9824592bf From 3afc03f776ee5df977a6715fbb46637fae737087 Mon Sep 17 00:00:00 2001 From: Erik Moeller Date: Thu, 8 Oct 2020 17:06:28 -0700 Subject: [PATCH 212/352] Use logging instead of print --- sd-rsyslog | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/sd-rsyslog b/sd-rsyslog index 4f62d95e3..47bd25d2b 100644 --- a/sd-rsyslog +++ b/sd-rsyslog @@ -76,7 +76,8 @@ def onInit(): global process if not os.path.exists("/etc/sd-rsyslog.conf"): - print("Please create the configuration file at /etc/sd-rsyslog.conf", file=sys.stderr) + logging.exception("Please create the configuration file at /etc/sd-rsyslog.conf", + file=sys.stderr) sys.exit(1) config = configparser.ConfigParser() config.read('/etc/sd-rsyslog.conf') @@ -91,14 +92,14 @@ def onInit(): stdout=PIPE, stderr=PIPE) vm_name_output, vm_name_error = get_vm_name_process.communicate() if vm_name_error != b"": - print("Error obtaining VM name via qubesdb-read:") - print(vm_name_error.decode("utf-8").strip()) + logging.exception("Error obtaining VM name via qubesdb-read:") + logging.exception(vm_name_error.decode("utf-8").strip()) sys.exit(1) localvmname = vm_name_output.decode("utf-8").strip() except FileNotFoundError: # not on Qubes? - print("Could not run qubesdb-read command to obtain VM name.") - print("Note that sd-rsyslog must be run on Qubes OS if no") - print("localvm name is specified in the configuration.") + logging.exception("Could not run qubesdb-read command to obtain VM name.") + logging.exception("Note that sd-rsyslog must be run on Qubes OS if no " + "localvm name is specified in the configuration.") sys.exit(1) process = Popen( From 5256ebc4e55af392dd73b60928fc2a1800c50eb7 Mon Sep 17 00:00:00 2001 From: Conor Schaefer Date: Fri, 9 Oct 2020 14:51:39 -0700 Subject: [PATCH 213/352] Fixes logging exception statement Holdover from the previous refactor from 'print' to 'logging.exception'. --- sd-rsyslog | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sd-rsyslog b/sd-rsyslog index 47bd25d2b..e79e86005 100644 --- a/sd-rsyslog +++ b/sd-rsyslog @@ -76,8 +76,7 @@ def onInit(): global process if not os.path.exists("/etc/sd-rsyslog.conf"): - logging.exception("Please create the configuration file at /etc/sd-rsyslog.conf", - file=sys.stderr) + logging.exception("Please create the configuration file at /etc/sd-rsyslog.conf") sys.exit(1) config = configparser.ConfigParser() config.read('/etc/sd-rsyslog.conf') From 34a8cf6e032d6e7cc7b768d8a29f01cabecae08c Mon Sep 17 00:00:00 2001 From: Conor Schaefer Date: Thu, 15 Oct 2020 15:02:44 -0700 Subject: [PATCH 214/352] Removes mimetype associations As part of template consolidation [0], we're moving mimetype associations out of system volumes and into private volumes. Therefore we no longer need these files in the 'securedrop-export' package, as the corresponding files have already been ported to 'securedrop-workstation-config'. [0] https://github.com/freedomofpress/securedrop-workstation/issues/471 --- MANIFEST.in | 3 - changelog.md | 3 + files/mimeapps.list | 298 -------------------------------------- files/open-in-dvm.desktop | 10 -- securedrop_export/VERSION | 2 +- 5 files changed, 4 insertions(+), 312 deletions(-) delete mode 100644 files/mimeapps.list delete mode 100644 files/open-in-dvm.desktop diff --git a/MANIFEST.in b/MANIFEST.in index e1056bffe..ad64433d6 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -9,6 +9,3 @@ include setup.py include files/send-to-usb.desktop include files/application-x-sd-export.xml include files/sd-logo.png -include files/mimeapps.list -include files/open-in-dvm.desktop - diff --git a/changelog.md b/changelog.md index dafcbb260..b443f7f94 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,8 @@ # Changelog +## 0.2.4 + * Removes mimetype associations and open-in-dvm desktop file + ## 0.2.3 * Adds gnome-disks to sd-devices diff --git a/files/mimeapps.list b/files/mimeapps.list deleted file mode 100644 index 886a600b0..000000000 --- a/files/mimeapps.list +++ /dev/null @@ -1,298 +0,0 @@ -[Default Applications] -application/x-sd-export=send-to-usb.desktop; -application/x-dia-diagram=open-in-dvm.desktop; -text/x-vcard=open-in-dvm.desktop; -text/directory=open-in-dvm.desktop; -text/calendar=open-in-dvm.desktop; -application/x-cd-image=open-in-dvm.desktop; -application/x-desktop=open-in-dvm.desktop; -application/x-raw-disk-image=open-in-dvm.desktop; -application/x-raw-disk-image-xz-compressed=open-in-dvm.desktop; -image/x-compressed-xcf=open-in-dvm.desktop; -image/x-xcf=open-in-dvm.desktop; -image/x-psd=open-in-dvm.desktop; -image/x-fits=open-in-dvm.desktop; -image/bmp=open-in-dvm.desktop; -image/gif=open-in-dvm.desktop; -image/x-icb=open-in-dvm.desktop; -image/x-ico=open-in-dvm.desktop; -image/x-pcx=open-in-dvm.desktop; -image/x-portable-anymap=open-in-dvm.desktop; -image/x-portable-bitmap=open-in-dvm.desktop; -image/x-portable-graymap=open-in-dvm.desktop; -image/x-portable-pixmap=open-in-dvm.desktop; -image/x-xbitmap=open-in-dvm.desktop; -image/x-xpixmap=open-in-dvm.desktop; -image/svg+xml=open-in-dvm.desktop; -application/vnd.ms-word=open-in-dvm.desktop; -application/vnd.wordperfect=open-in-dvm.desktop; -application/vnd.sun.xml.writer=open-in-dvm.desktop; -application/vnd.sun.xml.writer.global=open-in-dvm.desktop; -application/vnd.sun.xml.writer.template=open-in-dvm.desktop; -application/vnd.stardivision.writer=open-in-dvm.desktop; -application/vnd.oasis.opendocument.text=open-in-dvm.desktop; -application/vnd.oasis.opendocument.text-template=open-in-dvm.desktop; -application/vnd.oasis.opendocument.text-web=open-in-dvm.desktop; -application/vnd.oasis.opendocument.text-master=open-in-dvm.desktop; -application/vnd.openxmlformats-officedocument.wordprocessingml.document=open-in-dvm.desktop; -application/vnd.openxmlformats-officedocument.wordprocessingml.template=open-in-dvm.desktop; -application/vnd.ms-excel=open-in-dvm.desktop; -application/vnd.stardivision.calc=open-in-dvm.desktop; -application/vnd.sun.xml.calc=open-in-dvm.desktop; -application/vnd.sun.xml.calc.template=open-in-dvm.desktop; -application/vnd.openxmlformats-officedocument.spreadsheetml.sheet=open-in-dvm.desktop; -application/vnd.openxmlformats-officedocument.spreadsheetml.template=open-in-dvm.desktop; -application/vnd.oasis.opendocument.spreadsheet=open-in-dvm.desktop; -application/vnd.oasis.opendocument.spreadsheet-template=open-in-dvm.desktop; -application/vnd.ms-powerpoint=open-in-dvm.desktop; -application/vnd.stardivision.impress=open-in-dvm.desktop; -application/vnd.sun.xml.impress=open-in-dvm.desktop; -application/vnd.sun.xml.impress.template=open-in-dvm.desktop; -application/vnd.oasis.opendocument.presentation=open-in-dvm.desktop; -application/vnd.oasis.opendocument.presentation-template=open-in-dvm.desktop; -application/vnd.openxmlformats-officedocument.presentationml.presentation=open-in-dvm.desktop; -application/vnd.openxmlformats-officedocument.presentationml.template=open-in-dvm.desktop; -application/vnd.stardivision.draw=open-in-dvm.desktop; -application/vnd.sun.xml.draw=open-in-dvm.desktop; -application/vnd.sun.xml.draw.template=open-in-dvm.desktop; -application/vnd.oasis.opendocument.graphics=open-in-dvm.desktop; -application/vnd.oasis.opendocument.graphics-template=open-in-dvm.desktop; -application/vnd.oasis.opendocument.formula=open-in-dvm.desktop; -application/vnd.sun.xml.math=open-in-dvm.desktop; -application/vnd.stardivision.math=open-in-dvm.desktop; -application/vnd.oasis.opendocument.database=open-in-dvm.desktop; -application/vnd.sun.xml.base=open-in-dvm.desktop; -application/pdf=open-in-dvm.desktop; -application/postscript=open-in-dvm.desktop; -application/x-qw=open-in-dvm.desktop; -application/x-gnucash=open-in-dvm.desktop; -application/vnd.lotus-1-2-3=open-in-dvm.desktop; -application/x-oleo=open-in-dvm.desktop; -application/x-gnumeric=open-in-dvm.desktop; -application/x-xbase=open-in-dvm.desktop; -application/x-abiword=open-in-dvm.desktop; -application/x-dvi=open-in-dvm.desktop; -application/x-catalog=open-in-dvm.desktop; -application/x-rpm=open-in-dvm.desktop; -text/csv=open-in-dvm.desktop; -text/plain=open-in-dvm.desktop; -text/html=open-in-dvm.desktop; -application/xhtml+xml=open-in-dvm.desktop; -inode/directory=open-in-dvm.desktop; -x-content/blank-cd=open-in-dvm.desktop; -x-content/blank-dvd=open-in-dvm.desktop; -x-content/blank-bd=open-in-dvm.desktop; -x-content/blank-hddvd=open-in-dvm.desktop; -x-content/video-dvd=open-in-dvm.desktop; -x-content/video-vcd=open-in-dvm.desktop; -x-content/video-svcd=open-in-dvm.desktop; -#x-content/video-bluray=open-in-dvm.desktop; -#x-content/video-hddvd=open-in-dvm.desktop; -x-content/audio-cdda=open-in-dvm.desktop; -x-content/audio-dvd=open-in-dvm.desktop; -x-content/audio-player=open-in-dvm.desktop; -x-content/image-dcf=open-in-dvm.desktop; -x-content/image-picturecd=open-in-dvm.desktop; -# URI scheme handlers -x-scheme-handler/mailto=open-in-dvm.desktop; -x-scheme-handler/http=open-in-dvm.desktop; -x-scheme-handler/https=open-in-dvm.desktop; -application/mxf=open-in-dvm.desktop; -application/ogg=open-in-dvm.desktop; -application/ram=open-in-dvm.desktop; -application/sdp=open-in-dvm.desktop; -application/smil=open-in-dvm.desktop; -application/smil+xml=open-in-dvm.desktop; -application/vnd.apple.mpegurl=open-in-dvm.desktop; -application/vnd.ms-wpl=open-in-dvm.desktop; -application/vnd.rn-realmedia=open-in-dvm.desktop; -application/x-extension-m4a=open-in-dvm.desktop; -application/x-extension-mp4=open-in-dvm.desktop; -application/x-flac=open-in-dvm.desktop; -application/x-flash-video=open-in-dvm.desktop; -application/x-matroska=open-in-dvm.desktop; -application/x-netshow-channel=open-in-dvm.desktop; -application/x-ogg=open-in-dvm.desktop; -application/x-quicktime-media-link=open-in-dvm.desktop; -application/x-quicktimeplayer=open-in-dvm.desktop; -application/x-shorten=open-in-dvm.desktop; -application/x-smil=open-in-dvm.desktop; -application/xspf+xml=open-in-dvm.desktop; -audio/3gpp=open-in-dvm.desktop; -audio/ac3=open-in-dvm.desktop; -audio/AMR=open-in-dvm.desktop; -audio/AMR-WB=open-in-dvm.desktop; -audio/basic=open-in-dvm.desktop; -audio/midi=open-in-dvm.desktop; -audio/mp2=open-in-dvm.desktop; -audio/mp4=open-in-dvm.desktop; -audio/mpeg=open-in-dvm.desktop; -audio/mpegurl=open-in-dvm.desktop; -audio/ogg=open-in-dvm.desktop; -audio/prs.sid=open-in-dvm.desktop; -audio/vnd.rn-realaudio=open-in-dvm.desktop; -audio/x-aiff=open-in-dvm.desktop; -audio/x-ape=open-in-dvm.desktop; -audio/x-flac=open-in-dvm.desktop; -audio/x-gsm=open-in-dvm.desktop; -audio/x-it=open-in-dvm.desktop; -audio/x-m4a=open-in-dvm.desktop; -audio/x-matroska=open-in-dvm.desktop; -audio/x-mod=open-in-dvm.desktop; -audio/x-mp3=open-in-dvm.desktop; -audio/x-mpeg=open-in-dvm.desktop; -audio/x-mpegurl=open-in-dvm.desktop; -audio/x-ms-asf=open-in-dvm.desktop; -audio/x-ms-asx=open-in-dvm.desktop; -audio/x-ms-wax=open-in-dvm.desktop; -audio/x-ms-wma=open-in-dvm.desktop; -audio/x-musepack=open-in-dvm.desktop; -audio/x-pn-aiff=open-in-dvm.desktop; -audio/x-pn-au=open-in-dvm.desktop; -audio/x-pn-realaudio=open-in-dvm.desktop; -audio/x-pn-realaudio-plugin=open-in-dvm.desktop; -audio/x-pn-wav=open-in-dvm.desktop; -audio/x-pn-windows-acm=open-in-dvm.desktop; -audio/x-realaudio=open-in-dvm.desktop; -audio/x-real-audio=open-in-dvm.desktop; -audio/x-s3m=open-in-dvm.desktop; -audio/x-sbc=open-in-dvm.desktop; -audio/x-scpls=open-in-dvm.desktop; -audio/x-speex=open-in-dvm.desktop; -audio/x-stm=open-in-dvm.desktop; -audio/x-tta=open-in-dvm.desktop; -audio/x-wav=open-in-dvm.desktop; -audio/x-wavpack=open-in-dvm.desktop; -audio/x-vorbis=open-in-dvm.desktop; -audio/x-vorbis+ogg=open-in-dvm.desktop; -audio/x-xm=open-in-dvm.desktop; -image/vnd.rn-realpix=open-in-dvm.desktop; -image/x-pict=open-in-dvm.desktop; -misc/ultravox=open-in-dvm.desktop; -text/google-video-pointer=open-in-dvm.desktop; -text/x-google-video-pointer=open-in-dvm.desktop; -video/3gp=open-in-dvm.desktop; -video/3gpp=open-in-dvm.desktop; -video/dv=open-in-dvm.desktop; -video/divx=open-in-dvm.desktop; -video/fli=open-in-dvm.desktop; -video/flv=open-in-dvm.desktop; -video/mp2t=open-in-dvm.desktop; -video/mp4=open-in-dvm.desktop; -video/mp4v-es=open-in-dvm.desktop; -video/mpeg=open-in-dvm.desktop; -video/msvideo=open-in-dvm.desktop; -video/ogg=open-in-dvm.desktop; -video/quicktime=open-in-dvm.desktop; -video/vivo=open-in-dvm.desktop; -video/vnd.divx=open-in-dvm.desktop; -video/vnd.mpegurl=open-in-dvm.desktop; -video/vnd.rn-realvideo=open-in-dvm.desktop; -video/vnd.vivo=open-in-dvm.desktop; -video/webm=open-in-dvm.desktop; -video/x-anim=open-in-dvm.desktop; -video/x-avi=open-in-dvm.desktop; -video/x-flc=open-in-dvm.desktop; -video/x-fli=open-in-dvm.desktop; -video/x-flic=open-in-dvm.desktop; -video/x-flv=open-in-dvm.desktop; -video/x-m4v=open-in-dvm.desktop; -video/x-matroska=open-in-dvm.desktop; -video/x-mpeg=open-in-dvm.desktop; -video/x-mpeg2=open-in-dvm.desktop; -video/x-ms-asf=open-in-dvm.desktop; -video/x-ms-asx=open-in-dvm.desktop; -video/x-msvideo=open-in-dvm.desktop; -video/x-ms-wm=open-in-dvm.desktop; -video/x-ms-wmv=open-in-dvm.desktop; -video/x-ms-wmx=open-in-dvm.desktop; -video/x-ms-wvx=open-in-dvm.desktop; -video/x-nsv=open-in-dvm.desktop; -video/x-ogm+ogg=open-in-dvm.desktop; -video/x-theora+ogg=open-in-dvm.desktop; -video/x-totem-stream=open-in-dvm.desktop; -x-content/video-dvd=open-in-dvm.desktop; -x-content/video-vcd=open-in-dvm.desktop; -x-content/video-svcd=open-in-dvm.desktop; -x-scheme-handler/pnm=open-in-dvm.desktop; -x-scheme-handler/mms=open-in-dvm.desktop; -x-scheme-handler/net=open-in-dvm.desktop; -x-scheme-handler/rtp=open-in-dvm.desktop; -x-scheme-handler/rtmp=open-in-dvm.desktop; -x-scheme-handler/rtsp=open-in-dvm.desktop; -x-scheme-handler/mmsh=open-in-dvm.desktop; -x-scheme-handler/uvox=open-in-dvm.desktop; -x-scheme-handler/icy=open-in-dvm.desktop; -x-scheme-handler/icyx=open-in-dvm.desktop; -application/x-7z-compressed=open-in-dvm.desktop; -application/x-7z-compressed-tar=open-in-dvm.desktop; -application/x-ace=open-in-dvm.desktop; -application/x-alz=open-in-dvm.desktop; -application/x-ar=open-in-dvm.desktop; -application/x-arj=open-in-dvm.desktop; -application/x-bzip=open-in-dvm.desktop; -application/x-bzip-compressed-tar=open-in-dvm.desktop; -application/x-bzip1=open-in-dvm.desktop; -application/x-bzip1-compressed-tar=open-in-dvm.desktop; -application/x-cabinet=open-in-dvm.desktop; -application/x-cbr=open-in-dvm.desktop; -application/x-cbz=open-in-dvm.desktop; -application/x-compress=open-in-dvm.desktop; -application/x-compressed-tar=open-in-dvm.desktop; -application/x-cpio=open-in-dvm.desktop; -application/x-deb=open-in-dvm.desktop; -application/x-ear=open-in-dvm.desktop; -application/x-ms-dos-executable=open-in-dvm.desktop; -application/x-gtar=open-in-dvm.desktop; -application/x-gzip=open-in-dvm.desktop; -application/x-gzpostscript=open-in-dvm.desktop; -application/x-java-archive=open-in-dvm.desktop; -application/x-lha=open-in-dvm.desktop; -application/x-lhz=open-in-dvm.desktop; -application/x-lrzip=open-in-dvm.desktop; -application/x-lrzip-compressed-tar=open-in-dvm.desktop; -application/x-lzip=open-in-dvm.desktop; -application/x-lzip-compressed-tar=open-in-dvm.desktop; -application/x-lzma=open-in-dvm.desktop; -application/x-lzma-compressed-tar=open-in-dvm.desktop; -application/x-lzop=open-in-dvm.desktop; -application/x-lzop-compressed-tar=open-in-dvm.desktop; -application/x-ms-wim=open-in-dvm.desktop; -application/x-rar=open-in-dvm.desktop; -application/x-rar-compressed=open-in-dvm.desktop; -application/x-rzip=open-in-dvm.desktop; -application/x-tar=open-in-dvm.desktop; -application/x-tarz=open-in-dvm.desktop; -application/x-stuffit=open-in-dvm.desktop; -application/x-war=open-in-dvm.desktop; -application/x-xz=open-in-dvm.desktop; -application/x-xz-compressed-tar=open-in-dvm.desktop; -application/x-zip=open-in-dvm.desktop; -application/x-zip-compressed=open-in-dvm.desktop; -application/x-zoo=open-in-dvm.desktop; -application/zip=open-in-dvm.desktop; -application/x-archive=open-in-dvm.desktop; -application/vnd.ms-cab-compressed=open-in-dvm.desktop; -application/x-source-rpm=open-in-dvm.desktop; -image/bmp=open-in-dvm.desktop; -image/gif=open-in-dvm.desktop; -image/jpeg=open-in-dvm.desktop; -image/jpg=open-in-dvm.desktop; -image/pjpeg=open-in-dvm.desktop; -image/png=open-in-dvm.desktop; -image/tiff=open-in-dvm.desktop; -image/x-bmp=open-in-dvm.desktop; -image/x-gray=open-in-dvm.desktop; -image/x-icb=open-in-dvm.desktop; -image/x-ico=open-in-dvm.desktop; -image/x-png=open-in-dvm.desktop; -image/x-portable-anymap=open-in-dvm.desktop; -image/x-portable-bitmap=open-in-dvm.desktop; -image/x-portable-graymap=open-in-dvm.desktop; -image/x-portable-pixmap=open-in-dvm.desktop; -image/x-xbitmap=open-in-dvm.desktop; -image/x-xpixmap=open-in-dvm.desktop; -image/x-pcx=open-in-dvm.desktop; -image/svg+xml=open-in-dvm.desktop; -image/svg+xml-compressed=open-in-dvm.desktop; -image/vnd.wap.wbmp=open-in-dvm.desktop; diff --git a/files/open-in-dvm.desktop b/files/open-in-dvm.desktop deleted file mode 100644 index 5dd268167..000000000 --- a/files/open-in-dvm.desktop +++ /dev/null @@ -1,10 +0,0 @@ -[Desktop Entry] -Type=Application -Version=1.0 -Name=Open in Disposable VM -Comment=Open file in a Disposable VM -TryExec=/usr/bin/qvm-open-in-vm -Exec=/usr/bin/qvm-open-in-vm --view-only '@dispvm:sd-viewer' %f -Icon=/usr/share/icons/Qubes/dispvm-gray.png -Terminal=false -Categories=Qubes;Utility; diff --git a/securedrop_export/VERSION b/securedrop_export/VERSION index 717903969..abd410582 100644 --- a/securedrop_export/VERSION +++ b/securedrop_export/VERSION @@ -1 +1 @@ -0.2.3 +0.2.4 From 85e3241e34ea92a3da0db008e340a72895a3bdf0 Mon Sep 17 00:00:00 2001 From: Conor Schaefer Date: Tue, 27 Oct 2020 13:00:04 -0700 Subject: [PATCH 215/352] Moves config to private volume The Qubes RPC file hardcodes the filepath to the YAML config file, which contains site-specific information such as the Onion URL for the Journalist Interface. As part of template consolidation [0], we're moving the config file out of the system/root partition and into the private (i.e. /home/) volume, so that the `sd-proxy` AppVM has the config information it needs while sharing a TemplateVM with other components. [0] https://github.com/freedomofpress/securedrop-workstation#471 --- qubes/securedrop.Proxy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qubes/securedrop.Proxy b/qubes/securedrop.Proxy index cea1922e4..f8eee7cb2 100755 --- a/qubes/securedrop.Proxy +++ b/qubes/securedrop.Proxy @@ -1 +1 @@ -/usr/bin/sd-proxy /etc/sd-proxy.yaml +/usr/bin/sd-proxy /home/user/.securedrop_proxy/sd-proxy.yaml From 51567adf1190d99594ca32ebeb77171b7a112d7f Mon Sep 17 00:00:00 2001 From: Kevin O'Gorman Date: Thu, 29 Oct 2020 16:12:57 -0400 Subject: [PATCH 216/352] Updates version and changelog in preparation for new package --- VERSION | 2 +- changelog.md | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 17e51c385..d917d3e26 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.1.1 +0.1.2 diff --git a/changelog.md b/changelog.md index 32937ae6f..8a5d98b3b 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,9 @@ # Changelog +## 0.1.2 + + * Uses Qubes domain name instead of system hostname. + ## 0.1.1 * Infers hostname from system settings, if no config value found. From 25175d39f5ae9e52feef469c4973042226a48867 Mon Sep 17 00:00:00 2001 From: Kevin O'Gorman Date: Thu, 29 Oct 2020 16:39:42 -0400 Subject: [PATCH 217/352] updated version and changelog --- changelog.md | 6 ++++++ securedrop_proxy/VERSION | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/changelog.md b/changelog.md index bfded302f..7b6a457e4 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,11 @@ # Changelog +## 0.3.1 + + * Moved proxy configuration to private volume (#79) + * Added black and isort checks to standardise code formatting (#61) + * Update urllib3 to version 1.25.10, requests to version 2.22.0, due to CVE-2020-26137 (#76). + ## 0.3.0 * Use incoming timeout value from JSON (#69). diff --git a/securedrop_proxy/VERSION b/securedrop_proxy/VERSION index 0d91a54c7..9e11b32fc 100644 --- a/securedrop_proxy/VERSION +++ b/securedrop_proxy/VERSION @@ -1 +1 @@ -0.3.0 +0.3.1 From b3fbfb86de871e20819dc145f6db5409f46c65b1 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Wed, 6 Jan 2021 14:32:52 +0530 Subject: [PATCH 218/352] Uses new reproducible wheels from our builds The new wheels are coming in the following PR: https://github.com/freedomofpress/securedrop-debian-packaging/pull/213 These wheels are reproducible, you can build them following our Makefile. --- build-requirements.txt | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/build-requirements.txt b/build-requirements.txt index 85d0a6825..509f24abc 100644 --- a/build-requirements.txt +++ b/build-requirements.txt @@ -1,10 +1,10 @@ -certifi==2018.10.15 --hash=sha256:173b19dd31ca7faa50d1fcc0eaf30f5e32e8e99e17d8c7fd4cfc8bc8d94e18a6 -chardet==3.0.4 --hash=sha256:f5632e583a4f61f1e16d0cc98127d241fb11c3c6ddfddee159307d4215186837 -furl==2.0.0 --hash=sha256:1855003e64dcb934556ad79994ba1a3a852da337e353d84d3b4ef75031913451 -idna==2.7 --hash=sha256:491f674364ba3232ed1eb4c1eb7407887f62cef6c300aad7df6e01acd88ffb25 -orderedmultidict==1.0 --hash=sha256:51efddca0b4ae6d885bbafd8ca44e51758166c144cf006dbead5c9394b2a9eae -pyyaml==5.3.1 --hash=sha256:cb4442140d3195f5f799096aa35aadce15f493046135a03668023b80824dd44c -requests==2.22.0 --hash=sha256:e3711bd465fd013abe428a8dade2938b578f05d2b06b0ae83daa98869c4548e8 -six==1.11.0 --hash=sha256:aa4ad34049ddff178b533062797fd1db9f0038b7c5c2461a7cde2244300b9f3d -urllib3==1.25.10 --hash=sha256:32bcd1ec52a4dd17ede2725ad166c789f50d402870947d6824598915b89ecf70 -werkzeug==0.16.0 --hash=sha256:429de1b931a2a58bf5cfac8447253949f7a930d30a73f2755e0ad0f9824592bf +certifi==2018.10.15 --hash=sha256:2d5538b9d89b3a9cb423b8f08417c657856cab733d560067e864157f71b1c5aa +chardet==3.0.4 --hash=sha256:e5cf39014befb85add77118fdc946f0a3387df7840235ba5d20fd6e3a672410a +furl==2.0.0 --hash=sha256:9f50360f6e4a0f1d0a35fb4997878e7186a73331f0fde5f6fc9b1bb9f006e6cc +idna==2.7 --hash=sha256:a967a752dba5492be34f30f8633cee512d2ec923fe3a2e201d87353b540d2955 +orderedmultidict==1.0 --hash=sha256:f6022beda2b3387c61e6eb7e0e1e3e2832fd9f55f3f64d4b4b226eea7487327f +pyyaml==5.3.1 --hash=sha256:ff021f812a5dbaeb10678462952bb447bf6b322ab54923f5591811056a827c15 +requests==2.22.0 --hash=sha256:83ddbb326afd5524e7fbca582fd9673103652ea3b0c6601ac5ba1a4501f077c9 +six==1.11.0 --hash=sha256:eb52689b06ca7433c1cac3b91f320400bd3b358790b7ff4b6367cb1c81d37561 +urllib3==1.25.10 --hash=sha256:c78fdeffd1a01640ea99d35851539a4bc91e46a672989c4d96663e3808121389 +werkzeug==0.16.0 --hash=sha256:bd05301a84a9bc3b33f178e53446181879744b74f098ed35850ba21125379be1 From 0329232f1457eebaf07c50d9cce5b31edce3bf20 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Wed, 6 Jan 2021 14:44:45 +0530 Subject: [PATCH 219/352] Uses new reproducible wheels from our builds The new wheels are coming in the following PR: https://github.com/freedomofpress/securedrop-debian-packaging/pull/213 These wheels are reproducible, you can build them following our Makefile. --- build-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-requirements.txt b/build-requirements.txt index af566a319..b9813f49e 100644 --- a/build-requirements.txt +++ b/build-requirements.txt @@ -1 +1 @@ -redis==3.3.11 --hash=sha256:022f124431ae16ee3a3a69c8016e3e2b057b4f4e0bfa7787b6271d893890c3cc +redis==3.3.11 --hash=sha256:974926675d246ade47a101e305596a0f50282a405a03b7173a70d99c79e6370c From f43623fd78f389ddf71ce729ce1db04c41ee27f5 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Fri, 12 Mar 2021 14:23:00 -0800 Subject: [PATCH 220/352] add tests, semgrep and some rulez Signed-off-by: Allie Crevier --- .flake8 | 2 +- .semgrep/custom-rules.yaml | 99 +++++++++++++++++++++++ Makefile | 15 ++++ test-requirements.in | 1 + test-requirements.txt | 157 +++++++++++++++++++++++++++++++------ tests/test_export.py | 127 +++++++++++++++++++++++++++++- 6 files changed, 373 insertions(+), 28 deletions(-) create mode 100644 .semgrep/custom-rules.yaml diff --git a/.flake8 b/.flake8 index 61d908155..7da1f9608 100644 --- a/.flake8 +++ b/.flake8 @@ -1,2 +1,2 @@ [flake8] -max-line-length = 99 +max-line-length = 100 diff --git a/.semgrep/custom-rules.yaml b/.semgrep/custom-rules.yaml new file mode 100644 index 000000000..6793bb298 --- /dev/null +++ b/.semgrep/custom-rules.yaml @@ -0,0 +1,99 @@ +rules: + +- id: tarfile-extractall-traversal + languages: + - python + severity: ERROR + message: Possible path traversal through tarfile.open($PATH).extractall() if the source tar is controlled by an attacker. + patterns: + - pattern: "....extractall(...)" + - pattern-not-inside: | + def safe_extractall(...): + ... + +- id: tarfile-extract-traversal + languages: + - python + severity: ERROR + message: Possible path traversal through tarfile.open($PATH).extract() if the source tar is controlled by an attacker. + patterns: + - pattern: "....extract(...)" + +- id: gzip-extract-traversal + languages: + - python + severity: ERROR + message: Possible path traversal through gzip.open if the source zip file is controlled by an attacker. + patterns: + - pattern: | + with gzip.open(...) as $IN, open(...) as $OUT: + ... + copyfileobj(...) + +- id: gzip-open-insecure + languages: + - python + severity: ERROR + message: Possible path traversal through gzip.open if the source zip file is controlled by an attacker. + patterns: + - pattern: | + with gzip.open(...) as $IN, open(...) as $OUT: + ... + - pattern-not-inside: | + def safe_gzip_extract(...): + ... + +- id: mkdir-insecure + languages: + - python + severity: ERROR + message: Possible path traversal or insecure directory and file permissions through os.mkdir(). Use securedrop_export.utils.safe_mkdir instead. + patterns: + - pattern: "....mkdir(...)" + - pattern-not-inside: | + def safe_mkdir(...): + ... + +- id: makedirs-insecure + languages: + - python + severity: ERROR + message: Possible path traversal or insecure directory and file permissions through os.makedirs(). Use securedrop_export.utils.safe_mkdir instead. + patterns: + - pattern: "....makedirs(...)" + - pattern-not-inside: | + def safe_mkdir(...): + ... + +- id: copy-insecure + languages: + - python + severity: ERROR + message: Possible path traversal or insecure directory and file permissions through shutil.copy(). Use securedrop_export.utils.safe_copy instead. + patterns: + - pattern: "....shutil.copy(...)" + - pattern-not-inside: | + def safe_copy(...): + ... + +- id: copyfileobj-insecure + languages: + - python + severity: ERROR + message: Possible path traversal or insecure directory and file permissions through shutil.copyfileobj(). Use securedrop_export.utils.safe_copyfileobj instead. + patterns: + - pattern: "....shutil.copyfileobj(...)" + - pattern-not-inside: | + def safe_copyfileobj(...): + ... + +- id: move-insecure + languages: + - python + severity: ERROR + message: Possible path traversal or insecure directory and file permissions through shutil.move(). Use securedrop_export.utils.safe_move instead. + patterns: + - pattern: "....shutil.move(...)" + - pattern-not-inside: | + def safe_move(...): + ... diff --git a/Makefile b/Makefile index 762c42ec9..153924c04 100644 --- a/Makefile +++ b/Makefile @@ -24,6 +24,21 @@ test: ## Run tests lint: ## Run linter flake8 securedrop_export/ tests/ +SEMGREP_FLAGS := --exclude "tests/" --error --strict --verbose + +.PHONY: semgrep +semgrep:semgrep-community semgrep-local + +.PHONY: semgrep-community +semgrep-community: + @echo "Running semgrep with semgrep.dev community rules..." + @semgrep $(SEMGREP_FLAGS) --config "p/r2c-security-audit" --config "p/r2c-ci" + +.PHONY: semgrep-local +semgrep-local: + @echo "Running semgrep with local rules..." + @semgrep $(SEMGREP_FLAGS) --config ".semgrep" + # Explaination of the below shell command should it ever break. # 1. Set the field separator to ": ##" and any make targets that might appear between : and ## # 2. Use sed-like syntax to remove the make targets diff --git a/test-requirements.in b/test-requirements.in index 6a68d166f..305ef7cbc 100644 --- a/test-requirements.in +++ b/test-requirements.in @@ -4,3 +4,4 @@ pip-tools pytest pytest-cov pytest-mock +semgrep==0.42.0 diff --git a/test-requirements.txt b/test-requirements.txt index e18ef566f..8225364f0 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -6,16 +6,31 @@ # atomicwrites==1.3.0 \ --hash=sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4 \ - --hash=sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6 \ - # via pytest -attrs==19.1.0 \ - --hash=sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79 \ - --hash=sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399 \ + --hash=sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6 # via pytest +attrs==20.3.0 \ + --hash=sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6 \ + --hash=sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700 + # via + # jsonschema + # pytest + # semgrep +certifi==2020.12.5 \ + --hash=sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c \ + --hash=sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830 + # via requests +chardet==4.0.0 \ + --hash=sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa \ + --hash=sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5 + # via requests click==7.0 \ --hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \ - --hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 \ + --hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 # via pip-tools +colorama==0.4.4 \ + --hash=sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b \ + --hash=sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2 + # via semgrep coverage==4.5.4 \ --hash=sha256:08907593569fe59baca0bf152c43f3863201efb6113ecb38ce7e97ce339805a6 \ --hash=sha256:0be0f1ed45fc0c185cfd4ecc19a1d6532d72f86a2bac9de7e24541febad72650 \ @@ -48,76 +63,168 @@ coverage==4.5.4 \ --hash=sha256:ef824cad1f980d27f26166f86856efe11eff9912c4fed97d3804820d43fa550c \ --hash=sha256:efc89291bd5a08855829a3c522df16d856455297cf35ae827a37edac45f466a7 \ --hash=sha256:fa964bae817babece5aa2e8c1af841bebb6d0b9add8e637548809d040443fee0 \ - --hash=sha256:ff37757e068ae606659c28c3bd0d923f9d29a85de79bf25b2b34b148473b5025 \ + --hash=sha256:ff37757e068ae606659c28c3bd0d923f9d29a85de79bf25b2b34b148473b5025 # via pytest-cov entrypoints==0.3 \ --hash=sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19 \ - --hash=sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451 \ + --hash=sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451 # via flake8 flake8==3.7.8 \ --hash=sha256:19241c1cbc971b9962473e4438a2ca19749a7dd002dd1a946eaba171b4114548 \ --hash=sha256:8e9dfa3cecb2400b3738a42c54c3043e821682b9c840b0448c0503f781130696 + # via -r test-requirements.in +idna==2.10 \ + --hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6 \ + --hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0 + # via requests importlib-metadata==0.18 \ --hash=sha256:6dfd58dfe281e8d240937776065dd3624ad5469c835248219bd16cf2e12dbeb7 \ - --hash=sha256:cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db \ - # via pluggy, pytest + --hash=sha256:cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db + # via + # jsonschema + # pluggy + # pytest +jsonschema==3.2.0 \ + --hash=sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163 \ + --hash=sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a + # via semgrep +junit-xml==1.9 \ + --hash=sha256:ec5ca1a55aefdd76d28fcc0b135251d156c7106fa979686a4b48d62b761b4732 + # via semgrep mccabe==0.6.1 \ --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ - --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f \ + --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f # via flake8 more-itertools==5.0.0 \ --hash=sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4 \ --hash=sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc \ - --hash=sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9 \ - # via pytest -packaging==19.0 \ - --hash=sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af \ - --hash=sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3 \ + --hash=sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9 # via pytest +packaging==20.9 \ + --hash=sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5 \ + --hash=sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a + # via + # pytest + # semgrep pathlib2==2.3.5 \ --hash=sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db \ --hash=sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868 + # via -r test-requirements.in pip-tools==4.2.0 \ --hash=sha256:123174aabf7f4a63dd6e0bfc8aeeb5eaddbecb75a41e9f0dd4c447b1f2de14f7 \ --hash=sha256:5427ea4dcc175649723985fbcace9b2d8f46f9adbcc63bc2d7b247d9bcc74917 + # via -r test-requirements.in pluggy==0.12.0 \ --hash=sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc \ - --hash=sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c \ + --hash=sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c # via pytest py==1.8.0 \ --hash=sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa \ - --hash=sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53 \ + --hash=sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53 # via pytest pycodestyle==2.5.0 \ --hash=sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56 \ - --hash=sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c \ + --hash=sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c # via flake8 pyflakes==2.1.1 \ --hash=sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0 \ - --hash=sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2 \ + --hash=sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2 # via flake8 pyparsing==2.4.1.1 \ --hash=sha256:43c5486cefefa536c9aab528881c992328f020eefe4f6d06332449c365218580 \ - --hash=sha256:d6c5ffe9d0305b9b977f7a642d36b9370954d1da7ada4c62393382cbadad4265 \ + --hash=sha256:d6c5ffe9d0305b9b977f7a642d36b9370954d1da7ada4c62393382cbadad4265 # via packaging +pyrsistent==0.17.3 \ + --hash=sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e + # via jsonschema pytest-cov==2.8.1 \ --hash=sha256:cc6742d8bac45070217169f5f72ceee1e0e55b0221f54bcf24845972d3a47f2b \ --hash=sha256:cdbdef4f870408ebdbfeb44e63e07eb18bb4619fae852f6e760645fa36172626 + # via -r test-requirements.in pytest-mock==1.12.1 \ --hash=sha256:96a0cebc66e09930be2a15b03333d90b59584d3fb011924f81c14b50ee0afbba \ --hash=sha256:e5381be2608e49547f5e47633c5f81241ebf6206d17ce516a7a18d5a917e3859 + # via -r test-requirements.in pytest==4.6.4 \ --hash=sha256:6aa9bc2f6f6504d7949e9df2a756739ca06e58ffda19b5e53c725f7b03fb4aae \ --hash=sha256:b77ae6f2d1a760760902a7676887b665c086f71e3461c64ed2a312afcedc00d6 + # via + # -r test-requirements.in + # pytest-cov + # pytest-mock +requests==2.25.1 \ + --hash=sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804 \ + --hash=sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e + # via semgrep +ruamel.yaml.clib==0.2.2 \ + --hash=sha256:058a1cc3df2a8aecc12f983a48bda99315cebf55a3b3a5463e37bb599b05727b \ + --hash=sha256:1236df55e0f73cd138c0eca074ee086136c3f16a97c2ac719032c050f7e0622f \ + --hash=sha256:1f8c0a4577c0e6c99d208de5c4d3fd8aceed9574bb154d7a2b21c16bb924154c \ + --hash=sha256:2602e91bd5c1b874d6f93d3086f9830f3e907c543c7672cf293a97c3fabdcd91 \ + --hash=sha256:28116f204103cb3a108dfd37668f20abe6e3cafd0d3fd40dba126c732457b3cc \ + --hash=sha256:2d24bd98af676f4990c4d715bcdc2a60b19c56a3fb3a763164d2d8ca0e806ba7 \ + --hash=sha256:2fd336a5c6415c82e2deb40d08c222087febe0aebe520f4d21910629018ab0f3 \ + --hash=sha256:30dca9bbcbb1cc858717438218d11eafb78666759e5094dd767468c0d577a7e7 \ + --hash=sha256:44c7b0498c39f27795224438f1a6be6c5352f82cb887bc33d962c3a3acc00df6 \ + --hash=sha256:464e66a04e740d754170be5e740657a3b3b6d2bcc567f0c3437879a6e6087ff6 \ + --hash=sha256:46d6d20815064e8bb023ea8628cfb7402c0f0e83de2c2227a88097e239a7dffd \ + --hash=sha256:4df5019e7783d14b79217ad9c56edf1ba7485d614ad5a385d1b3c768635c81c0 \ + --hash=sha256:4e52c96ca66de04be42ea2278012a2342d89f5e82b4512fb6fb7134e377e2e62 \ + --hash=sha256:5254af7d8bdf4d5484c089f929cb7f5bafa59b4f01d4f48adda4be41e6d29f99 \ + --hash=sha256:52ae5739e4b5d6317b52f5b040b1b6639e8af68a5b8fd606a8b08658fbd0cab5 \ + --hash=sha256:53b9dd1abd70e257a6e32f934ebc482dac5edb8c93e23deb663eac724c30b026 \ + --hash=sha256:6c0a5dc52fc74eb87c67374a4e554d4761fd42a4d01390b7e868b30d21f4b8bb \ + --hash=sha256:73b3d43e04cc4b228fa6fa5d796409ece6fcb53a6c270eb2048109cbcbc3b9c2 \ + --hash=sha256:74161d827407f4db9072011adcfb825b5258a5ccb3d2cd518dd6c9edea9e30f1 \ + --hash=sha256:75f0ee6839532e52a3a53f80ce64925ed4aed697dd3fa890c4c918f3304bd4f4 \ + --hash=sha256:839dd72545ef7ba78fd2aa1a5dd07b33696adf3e68fae7f31327161c1093001b \ + --hash=sha256:8be05be57dc5c7b4a0b24edcaa2f7275866d9c907725226cdde46da09367d923 \ + --hash=sha256:8e8fd0a22c9d92af3a34f91e8a2594eeb35cba90ab643c5e0e643567dc8be43e \ + --hash=sha256:a873e4d4954f865dcb60bdc4914af7eaae48fb56b60ed6daa1d6251c72f5337c \ + --hash=sha256:ab845f1f51f7eb750a78937be9f79baea4a42c7960f5a94dde34e69f3cce1988 \ + --hash=sha256:b1e981fe1aff1fd11627f531524826a4dcc1f26c726235a52fcb62ded27d150f \ + --hash=sha256:b4b0d31f2052b3f9f9b5327024dc629a253a83d8649d4734ca7f35b60ec3e9e5 \ + --hash=sha256:c6ac7e45367b1317e56f1461719c853fd6825226f45b835df7436bb04031fd8a \ + --hash=sha256:daf21aa33ee9b351f66deed30a3d450ab55c14242cfdfcd377798e2c0d25c9f1 \ + --hash=sha256:e9f7d1d8c26a6a12c23421061f9022bb62704e38211fe375c645485f38df34a2 \ + --hash=sha256:f6061a31880c1ed6b6ce341215336e2f3d0c1deccd84957b6fa8ca474b41e89f + # via ruamel.yaml +ruamel.yaml==0.16.10 \ + --hash=sha256:0962fd7999e064c4865f96fb1e23079075f4a2a14849bcdc5cdba53a24f9759b \ + --hash=sha256:099c644a778bf72ffa00524f78dd0b6476bca94a1da344130f4bf3381ce5b954 + # via semgrep +semgrep==0.42.0 \ + --hash=sha256:179741ce6f8f6785d048af5402bb2452a8771d4282f8aa7cb6852a5adad79fe8 \ + --hash=sha256:376b7a25817a24b32302f49656ea0ddcb2e535de2b05fdf42646f0bd4f33957e \ + --hash=sha256:e50ac0028b98f344166d2464853009837aed9abe669deac93fec04b677b97d2c + # via -r test-requirements.in six==1.12.0 \ --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \ - --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \ - # via more-itertools, packaging, pathlib2, pip-tools, pytest + --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 + # via + # jsonschema + # junit-xml + # more-itertools + # pathlib2 + # pip-tools + # pytest +tqdm==4.59.0 \ + --hash=sha256:9fdf349068d047d4cfbe24862c425883af1db29bcddf4b0eeb2524f6fbdb23c7 \ + --hash=sha256:d666ae29164da3e517fcf125e41d4fe96e5bb375cd87ff9763f6b38b5592fe33 + # via semgrep +urllib3==1.26.3 \ + --hash=sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80 \ + --hash=sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73 + # via requests wcwidth==0.1.7 \ --hash=sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e \ - --hash=sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c \ + --hash=sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c # via pytest zipp==0.5.2 \ --hash=sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a \ - --hash=sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec \ + --hash=sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec # via importlib-metadata + +# WARNING: The following packages were not pinned, but pip requires them to be +# pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag. +# setuptools diff --git a/tests/test_export.py b/tests/test_export.py index a161e75ed..ec6df0a69 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -1,8 +1,12 @@ import os -import pytest import subprocess # noqa: F401 import tempfile +import json +import pytest +import tarfile +from io import BytesIO + from securedrop_export import export TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") @@ -10,8 +14,127 @@ ANOTHER_BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad-2.json") -def test_exit_gracefully_no_exception(capsys): +def test_extract_tarball(): + with tempfile.TemporaryDirectory() as temp_dir: + archive_path = os.path.join(temp_dir, "archive.sd-export") + with tarfile.open(archive_path, "w:gz") as archive: + metadata = {"device": "disk", "encryption_method": "luks", "encryption_key": "test"} + metadata_str = json.dumps(metadata) + metadata_bytes = BytesIO(metadata_str.encode("utf-8")) + metadata_file_info = tarfile.TarInfo("metadata.json") + metadata_file_info.size = len(metadata_str) + archive.addfile(metadata_file_info, metadata_bytes) + content = b"test" + file_info = tarfile.TarInfo("some/dirs/file.txt") + file_info.size = len(content) + file_info.mode = 0o777 + archive.addfile(file_info, BytesIO(content)) + + dir_info = tarfile.TarInfo("some") + dir_info.type = tarfile.DIRTYPE + dir_info.mode = 0o777 + archive.addfile(dir_info) + + archive.close() + + submission = export.SDExport(archive_path, TEST_CONFIG) + assert oct(os.stat(submission.tmpdir).st_mode) == "0o40700" + + submission.extract_tarball() + + extracted_file_path = os.path.join(submission.tmpdir, "some", "dirs", "file.txt") + assert os.path.exists(extracted_file_path) + assert oct(os.stat(extracted_file_path).st_mode) == "0o100600" + + # Subdirectories that are added as members are extracted with 700 permissions + assert oct(os.stat(os.path.join(submission.tmpdir, "some")).st_mode) == "0o40700" + # Subdirectories that are not added as members are extracted with 700 permissions + # because os.umask(0o077) is set in the SDExport constructor. + assert oct(os.stat(os.path.join(submission.tmpdir, "some", "dirs")).st_mode) == "0o40700" + + +def test_extract_tarball_with_symlink(): + with tempfile.TemporaryDirectory() as temp_dir: + archive_path = os.path.join(temp_dir, "archive.sd-export") + with tarfile.open(archive_path, "w:gz") as archive: + metadata = {"device": "disk", "encryption_method": "luks", "encryption_key": "test"} + metadata_str = json.dumps(metadata) + metadata_bytes = BytesIO(metadata_str.encode("utf-8")) + metadata_file_info = tarfile.TarInfo("metadata.json") + metadata_file_info.size = len(metadata_str) + archive.addfile(metadata_file_info, metadata_bytes) + symlink_info = tarfile.TarInfo("symlink") + symlink_info.type = tarfile.SYMTYPE + symlink_info.linkname = "file" + archive.addfile(symlink_info) + archive.close() + + submission = export.SDExport(archive_path, TEST_CONFIG) + assert oct(os.stat(submission.tmpdir).st_mode) == "0o40700" + + submission.extract_tarball() + + symlink_path = os.path.join(submission.tmpdir, "symlink") + assert os.path.islink(symlink_path) + + +def test_extract_tarball_raises_if_doing_path_traversal(): + with tempfile.TemporaryDirectory() as temp_dir: + archive_path = os.path.join(temp_dir, "archive.sd-export") + with tarfile.open(archive_path, "w:gz") as archive: + metadata = {"device": "disk", "encryption_method": "luks", "encryption_key": "test"} + metadata_str = json.dumps(metadata) + metadata_bytes = BytesIO(metadata_str.encode("utf-8")) + metadata_file_info = tarfile.TarInfo("metadata.json") + metadata_file_info.size = len(metadata_str) + archive.addfile(metadata_file_info, metadata_bytes) + content = b"test" + traversed_file_info = tarfile.TarInfo("../../../../../../../../../tmp/traversed") + traversed_file_info.size = len(content) + archive.addfile(traversed_file_info, BytesIO(content)) + archive.close() + + submission = export.SDExport(archive_path, TEST_CONFIG) + + with pytest.raises(SystemExit): + submission.extract_tarball() + + assert not os.path.exists('/tmp/traversed') + assert not os.path.exists(os.path.join(submission.tmpdir, "tmp", "traversed")) + + +def test_extract_tarball_raises_if_doing_path_traversal_with_symlink(): + """ + This is a contrived path-traversal check because /tmp/traversed2 would have to be created as + another tafile member, so it would be extracted to the extraction path and not to /tmp. + However, it allows us to test that we raise if there is a path traversal attempt via a symlink. + """ + with tempfile.TemporaryDirectory() as temp_dir: + archive_path = os.path.join(temp_dir, "archive.sd-export") + with tarfile.open(archive_path, "w:gz") as archive: + metadata = {"device": "disk", "encryption_method": "luks", "encryption_key": "test"} + metadata_str = json.dumps(metadata) + metadata_bytes = BytesIO(metadata_str.encode("utf-8")) + metadata_file_info = tarfile.TarInfo("metadata.json") + metadata_file_info.size = len(metadata_str) + archive.addfile(metadata_file_info, metadata_bytes) + content = b"test" + symlink_info = tarfile.TarInfo("symlink") + symlink_info.size = len(content) + symlink_info.type = tarfile.SYMTYPE + symlink_info.linkname = "../../../../../../../../../tmp/traversed2" + archive.addfile(symlink_info, BytesIO(content)) + archive.close() + + submission = export.SDExport(archive_path, TEST_CONFIG) + + with pytest.raises(SystemExit): + submission.extract_tarball() + + assert not os.path.exists(os.path.join(submission.tmpdir, "symlink")) + +def test_exit_gracefully_no_exception(capsys): submission = export.SDExport("testfile", TEST_CONFIG) test_msg = 'test' From 569bd56e2a87d6421ff66df55f72ebb94bcea36d Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Tue, 16 Mar 2021 14:47:20 -0700 Subject: [PATCH 221/352] ensure minimal perms and validate target path --- securedrop_export/entrypoint.py | 10 ++- securedrop_export/export.py | 13 +-- securedrop_export/utils.py | 147 ++++++++++++++++++++++++++++++++ 3 files changed, 161 insertions(+), 9 deletions(-) create mode 100644 securedrop_export/utils.py diff --git a/securedrop_export/entrypoint.py b/securedrop_export/entrypoint.py index a34c6dd25..b82e8e3bc 100755 --- a/securedrop_export/entrypoint.py +++ b/securedrop_export/entrypoint.py @@ -8,9 +8,12 @@ from securedrop_export import __version__ from securedrop_export import export from securedrop_export import main +from securedrop_export.utils import safe_mkdir CONFIG_PATH = "/etc/sd-export-config.json" DEFAULT_HOME = os.path.join(os.path.expanduser("~"), ".securedrop_export") +LOG_DIR_NAME = "logs" +EXPORT_LOG_FILENAME = "export.log" logger = logging.getLogger(__name__) @@ -19,11 +22,10 @@ def configure_logging(): """ All logging related settings are set up by this function. """ - log_folder = os.path.join(DEFAULT_HOME, 'logs') - if not os.path.exists(log_folder): - os.makedirs(log_folder) + safe_mkdir(DEFAULT_HOME) + safe_mkdir(DEFAULT_HOME, LOG_DIR_NAME) - log_file = os.path.join(DEFAULT_HOME, 'logs', 'export.log') + log_file = os.path.join(DEFAULT_HOME, LOG_DIR_NAME, EXPORT_LOG_FILENAME) # set logging format log_fmt = ('%(asctime)s - %(name)s:%(lineno)d(%(funcName)s) ' diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 814a034df..ab3bf45f2 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -8,10 +8,10 @@ import shutil import subprocess import sys -import tarfile import tempfile from securedrop_export.exceptions import ExportStatus +from securedrop_export.utils import safe_extractall logger = logging.getLogger(__name__) @@ -78,6 +78,7 @@ def is_valid(self): class SDExport(object): def __init__(self, archive, config_path): + os.umask(0o077) self.archive = archive self.submission_dirname = os.path.basename(self.archive).split(".")[0] self.target_dirname = "sd-export-{}".format( @@ -87,10 +88,12 @@ def __init__(self, archive, config_path): def extract_tarball(self): try: - logger.info('Extracting tarball {} into {}'.format(self.archive, self.tmpdir)) - with tarfile.open(self.archive) as tar: - tar.extractall(self.tmpdir) - except Exception: + logger.info( + "Extracting tarball {} into {}".format(self.archive, self.tmpdir) + ) + safe_extractall(self.archive, self.tmpdir, self.tmpdir) + except Exception as ex: + logger.error("Unable to extract tarball: {}".format(ex)) self.exit_gracefully(ExportStatus.ERROR_EXTRACTION.value) def exit_gracefully(self, msg, e=False): diff --git a/securedrop_export/utils.py b/securedrop_export/utils.py new file mode 100644 index 000000000..bd219a4fc --- /dev/null +++ b/securedrop_export/utils.py @@ -0,0 +1,147 @@ +import os +import tarfile +from pathlib import Path +from typing import Optional, Union + + +def safe_mkdir( + base_path: Union[Path, str], + relative_path: Union[Optional[Path], Optional[str]] = None, +) -> None: + """ + Safely create directories with restricted 700 permissions inside the base_path directory. The + caller of this function should ensure that base_path comes from a hard-coded string. + + Raises FileNotFoundError if base_path does not already exist or requires more than one new dir + Raises RuntimeError if any dir in relative_path or the last dir of base_path have insecure perms + Raises ValueError if any of the following conditions is true: + * base_dir fails path traversal check, e.g. "/home/../traversed" fails check + * the resolved relative_path is not a subdirectory of base_path + * a child directory in relative_path already exists with permissions other than 700 + """ + base_path = Path(base_path) + if not base_path.is_absolute(): + raise ValueError(f"Base directory '{base_path}' must be an absolute path") + + check_path_traversal(base_path) + + if relative_path: + check_path_traversal(relative_path) + full_path = base_path.joinpath(relative_path) + else: + full_path = base_path + + # Create each parent directory, including base_path, first. + # + # Note: We do not use parents=True because the parent directories will not be created with the + # specified mode. Parents are created using system default permissions, which we modify to be + # 700 via os.umask in the SDExport contructor. Creating directories one-by-one with mode=0o0700 + # is not necessary but adds defense in depth. + relative_path = relative_filepath(full_path, base_path) + for parent in reversed(relative_path.parents): + base_path.joinpath(parent).mkdir(mode=0o0700, exist_ok=True) + + # Now create the full_path directory. + full_path.mkdir(mode=0o0700, exist_ok=True) + + # Check permissions after creating the directories + check_all_permissions(relative_path, base_path) + + +def safe_extractall(archive_file_path: str, dest_path: str, base_path: str) -> None: + """ + Safely extract a file specified by archive_file_path to dest_path. + """ + safe_mkdir(base_path, dest_path) + + with tarfile.open(archive_file_path) as tar: + # Tarfile types include: + # + # FIFO special file (a named pipe) + # Regular file + # Directory + # Symbolic link + # Hard link + # Block device + # Character device + for file_info in tar.getmembers(): + file_info.mode = 0o600 + if file_info.isdir(): + file_info.mode = 0o700 + elif file_info.islnk() or file_info.issym(): + check_path_traversal(file_info.linkname) + else: + check_path_traversal(file_info.name) + + tar.extractall(dest_path) + + +def relative_filepath(filepath: Union[str, Path], base_dir: Union[str, Path]) -> Path: + """ + Raise ValueError if the filepath is not relative to the supplied base_dir or if base_dir is not + an absolute path. + + Note: resolve() will also resolve symlinks, so a symlink such as /tmp/tmp1a2s3d4f/innocent + that points to ../../../../../tmp/traversed will raise a ValueError if the base_dir is the + expected /tmp/tmp1a2s3d4f. + """ + return Path(filepath).resolve().relative_to(base_dir) + + +def check_path_traversal(filename_or_filepath: Union[str, Path]) -> None: + """ + Raise ValueError if filename_or_filepath does any path traversal. This works on filenames, + relative paths, and absolute paths. + """ + filename_or_filepath = Path(filename_or_filepath) + if filename_or_filepath.is_absolute(): + base_path = filename_or_filepath + else: + base_path = Path().resolve() + + try: + relative_path = relative_filepath(filename_or_filepath, base_path) + + # One last check just to cover "weird/../traversals" that may not traverse past the relative + # base, but can still have harmful side effects to the application. If this kind of + # traversal is needed, then call relative_filepath instead in order to check that the + # desired traversal does not go past a safe base directory. + if ( + relative_path != filename_or_filepath + and not filename_or_filepath.is_absolute() + ): + raise ValueError + except ValueError: + raise ValueError(f"Unsafe file or directory name: '{filename_or_filepath}'") + + +def check_all_permissions(path: Union[str, Path], base_path: Union[str, Path]) -> None: + """ + Check that the permissions of each directory between base_path and path are set to 700. + """ + base_path = Path(base_path) + full_path = base_path.joinpath(path) + if not full_path.exists(): + return + + Path(full_path).chmod(0o700) + check_dir_permissions(full_path) + + relative_path = relative_filepath(full_path, base_path) + for parent in relative_path.parents: + full_path = base_path.joinpath(parent) + Path(full_path).chmod(0o700) + check_dir_permissions(str(full_path)) + + +def check_dir_permissions(dir_path: Union[str, Path]) -> None: + """ + Check that a directory has ``700`` as the final 3 bytes. Raises a ``RuntimeError`` otherwise. + """ + if os.path.exists(dir_path): + stat_res = os.stat(dir_path).st_mode + masked = stat_res & 0o777 + if masked & 0o077: + raise RuntimeError( + "Unsafe permissions ({}) on {}".format(oct(stat_res), dir_path) + ) From a47574ef58034c060697eb2dc3fc33f4e0f93c2a Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Tue, 16 Mar 2021 15:10:48 -0700 Subject: [PATCH 222/352] update ci image --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index d4fb3a49f..1bba7c98a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -61,7 +61,7 @@ version: 2 jobs: lint: docker: - - image: circleci/python:3.5 + - image: circleci/python:3.7 steps: - checkout - run: From 03c1640a70e18e07fa4c09cfe36336582986fe83 Mon Sep 17 00:00:00 2001 From: Conor Schaefer Date: Tue, 16 Mar 2021 15:28:06 -0700 Subject: [PATCH 223/352] Updates "py" test dependency to 1.9.0 Closes #68. --- test-requirements.in | 1 + test-requirements.txt | 7 +++---- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/test-requirements.in b/test-requirements.in index 305ef7cbc..024c0a86a 100644 --- a/test-requirements.in +++ b/test-requirements.in @@ -1,6 +1,7 @@ flake8 pathlib2 # required by pytest for python 3.5 pip-tools +py>=1.9.0 pytest pytest-cov pytest-mock diff --git a/test-requirements.txt b/test-requirements.txt index 8225364f0..107e041fc 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -118,10 +118,9 @@ pluggy==0.12.0 \ --hash=sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc \ --hash=sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c # via pytest -py==1.8.0 \ - --hash=sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa \ - --hash=sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53 - # via pytest +py==1.10.0 \ + --hash=sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3 \ + --hash=sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a pycodestyle==2.5.0 \ --hash=sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56 \ --hash=sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c From 9e717f07ee1e46de2261b4009b7ea3ce3306bdba Mon Sep 17 00:00:00 2001 From: Conor Schaefer Date: Tue, 16 Mar 2021 15:24:26 -0700 Subject: [PATCH 224/352] Bumps version 0.2.4 -> 0.2.5 --- changelog.md | 3 +++ securedrop_export/VERSION | 2 +- securedrop_export/__init__.py | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/changelog.md b/changelog.md index b443f7f94..0b31003fa 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,8 @@ # Changelog +## 0.2.5 + * Sets restrictive permissions, validates target paths + ## 0.2.4 * Removes mimetype associations and open-in-dvm desktop file diff --git a/securedrop_export/VERSION b/securedrop_export/VERSION index abd410582..3a4036fb4 100644 --- a/securedrop_export/VERSION +++ b/securedrop_export/VERSION @@ -1 +1 @@ -0.2.4 +0.2.5 diff --git a/securedrop_export/__init__.py b/securedrop_export/__init__.py index 10939f01b..13a85f774 100644 --- a/securedrop_export/__init__.py +++ b/securedrop_export/__init__.py @@ -1 +1 @@ -__version__ = '0.1.2' +__version__ = '0.2.5' From b683bdb8e413df6dd8b3c6508dd543e90acce2ea Mon Sep 17 00:00:00 2001 From: mickael e Date: Tue, 2 Mar 2021 16:09:10 -0500 Subject: [PATCH 225/352] Update PyYAML to 5.4.1 Addresses CVE-2020-14343 --- dev-requirements.txt | 34 ++++++++++++++++++++++------------ requirements.in | 2 +- requirements.txt | 34 ++++++++++++++++++++++------------ 3 files changed, 45 insertions(+), 25 deletions(-) diff --git a/dev-requirements.txt b/dev-requirements.txt index 2bdc0745f..daa7e02d9 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -152,18 +152,28 @@ pyflakes==2.0.0 \ --hash=sha256:9a7662ec724d0120012f6e29d6248ae3727d821bba522a0e6b356eff19126a49 \ --hash=sha256:f661252913bc1dbe7fcfcbf0af0db3f42ab65aabd1a6ca68fe5d466bace94dae \ # via -r dev-requirements.in, flake8 -pyyaml==5.3.1 \ - --hash=sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97 \ - --hash=sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76 \ - --hash=sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2 \ - --hash=sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648 \ - --hash=sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf \ - --hash=sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f \ - --hash=sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2 \ - --hash=sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee \ - --hash=sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d \ - --hash=sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c \ - --hash=sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a \ +pyyaml==5.4.1 \ + --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \ + --hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \ + --hash=sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393 \ + --hash=sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77 \ + --hash=sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922 \ + --hash=sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5 \ + --hash=sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8 \ + --hash=sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10 \ + --hash=sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc \ + --hash=sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018 \ + --hash=sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e \ + --hash=sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253 \ + --hash=sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183 \ + --hash=sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb \ + --hash=sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185 \ + --hash=sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db \ + --hash=sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46 \ + --hash=sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b \ + --hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \ + --hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \ + --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \ # via -r requirements.in, vcrpy regex==2020.1.8 \ --hash=sha256:07b39bf943d3d2fe63d46281d8504f8df0ff3fe4c57e13d1656737950e53e525 \ diff --git a/requirements.in b/requirements.in index ba72056e0..8aa528ea4 100644 --- a/requirements.in +++ b/requirements.in @@ -3,7 +3,7 @@ chardet==3.0.4 furl==2.0.0 idna==2.7 orderedmultidict==1.0 -PyYAML==5.3.1 +PyYAML==5.4.1 six==1.11.0 requests==2.22.0 urllib3>=1.25.10 diff --git a/requirements.txt b/requirements.txt index a61eb958b..4b2f9fbe2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -24,18 +24,28 @@ orderedmultidict==1.0 \ --hash=sha256:24e3b730cf84e4a6a68be5cc760864905cf66abc89851e724bd5b4e849eaa96b \ --hash=sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5 \ # via -r requirements.in, furl -pyyaml==5.3.1 \ - --hash=sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97 \ - --hash=sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76 \ - --hash=sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2 \ - --hash=sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648 \ - --hash=sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf \ - --hash=sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f \ - --hash=sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2 \ - --hash=sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee \ - --hash=sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d \ - --hash=sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c \ - --hash=sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a \ +pyyaml==5.4.1 \ + --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \ + --hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \ + --hash=sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393 \ + --hash=sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77 \ + --hash=sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922 \ + --hash=sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5 \ + --hash=sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8 \ + --hash=sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10 \ + --hash=sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc \ + --hash=sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018 \ + --hash=sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e \ + --hash=sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253 \ + --hash=sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183 \ + --hash=sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb \ + --hash=sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185 \ + --hash=sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db \ + --hash=sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46 \ + --hash=sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b \ + --hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \ + --hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \ + --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \ # via -r requirements.in requests==2.22.0 \ --hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \ From 2bd058f7bb039e802bfb9171db5a678bb3b0f9fc Mon Sep 17 00:00:00 2001 From: mickael e Date: Tue, 2 Mar 2021 17:00:21 -0500 Subject: [PATCH 226/352] Adds locally build PyYAML 5.4.1 wheel from securedrop-debian-packaging --- build-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-requirements.txt b/build-requirements.txt index 509f24abc..c5c3c27cd 100644 --- a/build-requirements.txt +++ b/build-requirements.txt @@ -3,7 +3,7 @@ chardet==3.0.4 --hash=sha256:e5cf39014befb85add77118fdc946f0a3387df7840235ba5d20 furl==2.0.0 --hash=sha256:9f50360f6e4a0f1d0a35fb4997878e7186a73331f0fde5f6fc9b1bb9f006e6cc idna==2.7 --hash=sha256:a967a752dba5492be34f30f8633cee512d2ec923fe3a2e201d87353b540d2955 orderedmultidict==1.0 --hash=sha256:f6022beda2b3387c61e6eb7e0e1e3e2832fd9f55f3f64d4b4b226eea7487327f -pyyaml==5.3.1 --hash=sha256:ff021f812a5dbaeb10678462952bb447bf6b322ab54923f5591811056a827c15 +pyyaml==5.4.1 --hash=sha256:c2ab2737721fe6f5545fa830312fe7b77e74932c8bbd780a17a3d7df2ff65f99 requests==2.22.0 --hash=sha256:83ddbb326afd5524e7fbca582fd9673103652ea3b0c6601ac5ba1a4501f077c9 six==1.11.0 --hash=sha256:eb52689b06ca7433c1cac3b91f320400bd3b358790b7ff4b6367cb1c81d37561 urllib3==1.25.10 --hash=sha256:c78fdeffd1a01640ea99d35851539a4bc91e46a672989c4d96663e3808121389 From a5da1db19d70bb6059e359e1300d5bbe05ea3e9a Mon Sep 17 00:00:00 2001 From: mickael e Date: Tue, 2 Mar 2021 17:24:33 -0500 Subject: [PATCH 227/352] Adds nightly CI run --- .circleci/config.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index dedca8c84..7b2d242fb 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -77,3 +77,15 @@ workflows: jobs: - test-buster - build-buster + + nightly: + triggers: + - schedule: + cron: "0 6 * * *" + filters: + branches: + only: + - main + jobs: + - test-buster + - build-buster From d6787c7cc7a0890a3d21c7a948ab708ce6b39510 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Tue, 23 Mar 2021 14:59:54 -0700 Subject: [PATCH 228/352] validate paths for all tarfile types --- securedrop_export/export.py | 2 +- securedrop_export/utils.py | 22 ++-- tests/test_export.py | 234 +++++++++++++++++++++++++++++++++++- 3 files changed, 242 insertions(+), 16 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index ab3bf45f2..1f8274037 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -91,7 +91,7 @@ def extract_tarball(self): logger.info( "Extracting tarball {} into {}".format(self.archive, self.tmpdir) ) - safe_extractall(self.archive, self.tmpdir, self.tmpdir) + safe_extractall(self.archive, self.tmpdir) except Exception as ex: logger.error("Unable to extract tarball: {}".format(ex)) self.exit_gracefully(ExportStatus.ERROR_EXTRACTION.value) diff --git a/securedrop_export/utils.py b/securedrop_export/utils.py index bd219a4fc..8aeaba86e 100644 --- a/securedrop_export/utils.py +++ b/securedrop_export/utils.py @@ -48,12 +48,10 @@ def safe_mkdir( check_all_permissions(relative_path, base_path) -def safe_extractall(archive_file_path: str, dest_path: str, base_path: str) -> None: +def safe_extractall(archive_file_path: str, dest_path: str) -> None: """ Safely extract a file specified by archive_file_path to dest_path. """ - safe_mkdir(base_path, dest_path) - with tarfile.open(archive_file_path) as tar: # Tarfile types include: # @@ -65,13 +63,19 @@ def safe_extractall(archive_file_path: str, dest_path: str, base_path: str) -> N # Block device # Character device for file_info in tar.getmembers(): - file_info.mode = 0o600 - if file_info.isdir(): - file_info.mode = 0o700 - elif file_info.islnk() or file_info.issym(): + file_info.mode = 0o700 if file_info.isdir() else 0o600 + + check_path_traversal(file_info.name) + + # If the path is relative then we don't need to check that it resolves to dest_path + if Path(file_info.name).is_absolute(): + relative_filepath(file_info.name, dest_path) + + if file_info.islnk() or file_info.issym(): check_path_traversal(file_info.linkname) - else: - check_path_traversal(file_info.name) + # If the path is relative then we don't need to check that it resolves to dest_path + if Path(file_info.linkname).is_absolute(): + relative_filepath(file_info.linkname, dest_path) tar.extractall(dest_path) diff --git a/tests/test_export.py b/tests/test_export.py index ec6df0a69..b3caf17be 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -15,6 +15,9 @@ def test_extract_tarball(): + """ + Check that we can successfully extract a valid tarball. + """ with tempfile.TemporaryDirectory() as temp_dir: archive_path = os.path.join(temp_dir, "archive.sd-export") with tarfile.open(archive_path, "w:gz") as archive: @@ -54,6 +57,9 @@ def test_extract_tarball(): def test_extract_tarball_with_symlink(): + """ + Check that we can successfully extract a valid tarball that contains a valid symlink. + """ with tempfile.TemporaryDirectory() as temp_dir: archive_path = os.path.join(temp_dir, "archive.sd-export") with tarfile.open(archive_path, "w:gz") as archive: @@ -79,6 +85,12 @@ def test_extract_tarball_with_symlink(): def test_extract_tarball_raises_if_doing_path_traversal(): + """ + Check that we do not allow tarfile member file to do path traversal via TarInfo.name. + """ + if os.path.exists("/tmp/traversed"): + os.remove("/tmp/traversed") + with tempfile.TemporaryDirectory() as temp_dir: archive_path = os.path.join(temp_dir, "archive.sd-export") with tarfile.open(archive_path, "w:gz") as archive: @@ -100,15 +112,225 @@ def test_extract_tarball_raises_if_doing_path_traversal(): submission.extract_tarball() assert not os.path.exists('/tmp/traversed') - assert not os.path.exists(os.path.join(submission.tmpdir, "tmp", "traversed")) + + +def test_extract_tarball_raises_if_doing_path_traversal_with_dir(): + """ + Check that we do not allow tarfile member directory to do path traversal via TarInfo.name. + """ + if os.path.exists("/tmp/traversed/"): + os.rmdir("/tmp/traversed/") + + if os.path.exists("/tmp/traversed"): + os.remove("/tmp/traversed") + + with tempfile.TemporaryDirectory() as temp_dir: + archive_path = os.path.join(temp_dir, "archive.sd-export") + with tarfile.open(archive_path, "w:gz") as archive: + metadata = {"device": "disk", "encryption_method": "luks", "encryption_key": "test"} + metadata_str = json.dumps(metadata) + metadata_bytes = BytesIO(metadata_str.encode("utf-8")) + metadata_file_info = tarfile.TarInfo("metadata.json") + metadata_file_info.size = len(metadata_str) + archive.addfile(metadata_file_info, metadata_bytes) + dir_info = tarfile.TarInfo("../../../../../../../../../tmp/traversed") + dir_info.type = tarfile.DIRTYPE + dir_info.mode = 0o777 + archive.addfile(dir_info) + archive.close() + + submission = export.SDExport(archive_path, TEST_CONFIG) + + with pytest.raises(SystemExit): + submission.extract_tarball() + + assert not os.path.exists('/tmp/traversed') def test_extract_tarball_raises_if_doing_path_traversal_with_symlink(): """ - This is a contrived path-traversal check because /tmp/traversed2 would have to be created as - another tafile member, so it would be extracted to the extraction path and not to /tmp. - However, it allows us to test that we raise if there is a path traversal attempt via a symlink. + Check that we do not allow tarfile member symlink to do path traversal via TarInfo.name. + """ + if os.path.exists("/tmp/traversed/"): + os.rmdir("/tmp/traversed/") + + if os.path.exists("/tmp/traversed"): + os.remove("/tmp/traversed") + + with tempfile.TemporaryDirectory() as temp_dir: + archive_path = os.path.join(temp_dir, "archive.sd-export") + with tarfile.open(archive_path, "w:gz") as archive: + metadata = {"device": "disk", "encryption_method": "luks", "encryption_key": "test"} + metadata_str = json.dumps(metadata) + metadata_bytes = BytesIO(metadata_str.encode("utf-8")) + metadata_file_info = tarfile.TarInfo("metadata.json") + metadata_file_info.size = len(metadata_str) + archive.addfile(metadata_file_info, metadata_bytes) + content = b"test" + symlink_info = tarfile.TarInfo("symlink") + symlink_info.size = len(content) + symlink_info.type = tarfile.SYMTYPE + symlink_info.name = "../../../../../../../../../tmp/traversed" + archive.addfile(symlink_info, BytesIO(content)) + archive.close() + + submission = export.SDExport(archive_path, TEST_CONFIG) + + with pytest.raises(SystemExit): + submission.extract_tarball() + + assert not os.path.exists('/tmp/traversed') + + +def test_extract_tarball_raises_if_doing_path_traversal_with_symlink_linkname(): + """ + Check that we do not allow tarfile member symlink to do path traversal via TarInfo.linkname. """ + if os.path.exists("/tmp/traversed/"): + os.rmdir("/tmp/traversed/") + + if os.path.exists("/tmp/traversed"): + os.remove("/tmp/traversed") + + with tempfile.TemporaryDirectory() as temp_dir: + archive_path = os.path.join(temp_dir, "archive.sd-export") + with tarfile.open(archive_path, "w:gz") as archive: + metadata = {"device": "disk", "encryption_method": "luks", "encryption_key": "test"} + metadata_str = json.dumps(metadata) + metadata_bytes = BytesIO(metadata_str.encode("utf-8")) + metadata_file_info = tarfile.TarInfo("metadata.json") + metadata_file_info.size = len(metadata_str) + archive.addfile(metadata_file_info, metadata_bytes) + content = b"test" + symlink_info = tarfile.TarInfo("symlink") + symlink_info.size = len(content) + symlink_info.type = tarfile.SYMTYPE + symlink_info.linkname = "../../../../../../../../../tmp/traversed" + archive.addfile(symlink_info, BytesIO(content)) + archive.close() + + submission = export.SDExport(archive_path, TEST_CONFIG) + + with pytest.raises(SystemExit): + submission.extract_tarball() + + assert not os.path.exists('/tmp/traversed') + + +def test_extract_tarball_raises_if_name_has_unsafe_absolute_path(): + """ + Check that we do not allow tarfile member file to specify an unsafe absolute path via + TarInfo.name. + """ + if os.path.exists("/tmp/unsafe"): + os.remove("/tmp/unsafe") + + with tempfile.TemporaryDirectory() as temp_dir: + archive_path = os.path.join(temp_dir, "archive.sd-export") + with tarfile.open(archive_path, "w:gz") as archive: + metadata = {"device": "disk", "encryption_method": "luks", "encryption_key": "test"} + metadata_str = json.dumps(metadata) + metadata_bytes = BytesIO(metadata_str.encode("utf-8")) + metadata_file_info = tarfile.TarInfo("metadata.json") + metadata_file_info.size = len(metadata_str) + archive.addfile(metadata_file_info, metadata_bytes) + content = b"test" + file_info = tarfile.TarInfo("/tmp/unsafe") + file_info.size = len(content) + file_info.mode = 0o777 + archive.addfile(file_info, BytesIO(content)) + archive.close() + + submission = export.SDExport(archive_path, TEST_CONFIG) + + with pytest.raises(SystemExit): + submission.extract_tarball() + + assert not os.path.exists('/tmp/unsafe') + + +def test_extract_tarball_raises_if_name_has_unsafe_absolute_path_with_symlink(): + """ + Check that we do not allow tarfile member symlink to specify an unsafe absolute path via + TarInfo.name. + """ + if os.path.exists("/tmp/unsafe"): + os.remove("/tmp/unsafe") + + tmp = tempfile.gettempdir() + with tempfile.TemporaryDirectory() as temp_dir: + archive_path = os.path.join(temp_dir, "archive.sd-export") + symlink_path = os.path.join(temp_dir, "symlink") + + os.system(f"ln -s {tmp}/unsafe {symlink_path}") # create symlink to "/tmp/unsafe" + + with tarfile.open(archive_path, "w:gz") as archive: + metadata = {"device": "disk", "encryption_method": "luks", "encryption_key": "test"} + metadata_str = json.dumps(metadata) + metadata_bytes = BytesIO(metadata_str.encode("utf-8")) + metadata_file_info = tarfile.TarInfo("metadata.json") + metadata_file_info.size = len(metadata_str) + archive.addfile(metadata_file_info, metadata_bytes) + archive.add(symlink_path, "symlink") + archive.close() + + submission = export.SDExport(archive_path, TEST_CONFIG) + + with pytest.raises(SystemExit): + submission.extract_tarball() + + assert not os.path.exists('/tmp/unsafe') + + +def test_extract_tarball_raises_if_name_has_unsafe_absolute_path_with_symlink_to_dir(): + """ + Check that we do not allow tarfile member symlink to specify an unsafe absolute path via + TarInfo.name. + + Note: Same test as `test_extract_tarball_raises_if_name_has_unsafe_absolute_path_with_symlink` + but checks that symlinks to absolute directories are also caught. + """ + if os.path.exists("/tmp/unsafe"): + os.remove("/tmp/unsafe") + + tmp = tempfile.gettempdir() + with tempfile.TemporaryDirectory() as temp_dir: + archive_path = os.path.join(temp_dir, "archive.sd-export") + symlink_path = os.path.join(temp_dir, "symlink") + file_path = os.path.join(temp_dir, "unsafe") + + with open(file_path, "w") as file: + file.write("some-content") + + os.system(f"ln -s {tmp} {symlink_path}") # create symlink to "/tmp" + + with tarfile.open(archive_path, "w:gz") as archive: + metadata = {"device": "disk", "encryption_method": "luks", "encryption_key": "test"} + metadata_str = json.dumps(metadata) + metadata_bytes = BytesIO(metadata_str.encode("utf-8")) + metadata_file_info = tarfile.TarInfo("metadata.json") + metadata_file_info.size = len(metadata_str) + archive.addfile(metadata_file_info, metadata_bytes) + archive.add(symlink_path, "symlink") + archive.add(file_path, "symlink/unsafe") + archive.close() + + submission = export.SDExport(archive_path, TEST_CONFIG) + + with pytest.raises(SystemExit): + submission.extract_tarball() + + assert not os.path.exists('/tmp/unsafe') + + +def test_extract_tarball_raises_if_linkname_has_unsafe_absolute_path(): + """ + Check that we do not allow tarfile member file to specify an unsafe absolute path via + TarInfo.linkname. + """ + if os.path.exists("/tmp/unsafe"): + os.remove("/tmp/unsafe") + with tempfile.TemporaryDirectory() as temp_dir: archive_path = os.path.join(temp_dir, "archive.sd-export") with tarfile.open(archive_path, "w:gz") as archive: @@ -122,7 +344,7 @@ def test_extract_tarball_raises_if_doing_path_traversal_with_symlink(): symlink_info = tarfile.TarInfo("symlink") symlink_info.size = len(content) symlink_info.type = tarfile.SYMTYPE - symlink_info.linkname = "../../../../../../../../../tmp/traversed2" + symlink_info.linkname = "/tmp/unsafe" archive.addfile(symlink_info, BytesIO(content)) archive.close() @@ -131,7 +353,7 @@ def test_extract_tarball_raises_if_doing_path_traversal_with_symlink(): with pytest.raises(SystemExit): submission.extract_tarball() - assert not os.path.exists(os.path.join(submission.tmpdir, "symlink")) + assert not os.path.exists('/tmp/unsafe') def test_exit_gracefully_no_exception(capsys): From e6fdccd3d7a1d577aa8fc909f16e89fba7c04bd1 Mon Sep 17 00:00:00 2001 From: mickael e Date: Fri, 26 Mar 2021 13:32:33 -0400 Subject: [PATCH 229/352] seucredrop-export 0.2.6 --- changelog.md | 3 +++ securedrop_export/VERSION | 2 +- securedrop_export/__init__.py | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/changelog.md b/changelog.md index 0b31003fa..5653795a1 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,8 @@ # Changelog +## 0.2.6 + * Further validate target paths + ## 0.2.5 * Sets restrictive permissions, validates target paths diff --git a/securedrop_export/VERSION b/securedrop_export/VERSION index 3a4036fb4..53a75d673 100644 --- a/securedrop_export/VERSION +++ b/securedrop_export/VERSION @@ -1 +1 @@ -0.2.5 +0.2.6 diff --git a/securedrop_export/__init__.py b/securedrop_export/__init__.py index 13a85f774..44b18069b 100644 --- a/securedrop_export/__init__.py +++ b/securedrop_export/__init__.py @@ -1 +1 @@ -__version__ = '0.2.5' +__version__ = '0.2.6' From bb2d58ac39c9e67d67b8e9dd3bfab844a74bce8d Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Mon, 5 Apr 2021 14:05:10 -0700 Subject: [PATCH 230/352] use new reproducible wheel from packaging with build --- build-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-requirements.txt b/build-requirements.txt index c5c3c27cd..fd3cc471c 100644 --- a/build-requirements.txt +++ b/build-requirements.txt @@ -1,7 +1,7 @@ certifi==2018.10.15 --hash=sha256:2d5538b9d89b3a9cb423b8f08417c657856cab733d560067e864157f71b1c5aa chardet==3.0.4 --hash=sha256:e5cf39014befb85add77118fdc946f0a3387df7840235ba5d20fd6e3a672410a furl==2.0.0 --hash=sha256:9f50360f6e4a0f1d0a35fb4997878e7186a73331f0fde5f6fc9b1bb9f006e6cc -idna==2.7 --hash=sha256:a967a752dba5492be34f30f8633cee512d2ec923fe3a2e201d87353b540d2955 +idna==2.7 --hash=sha256:69bbcd9c42b0add994610a68202532e9b327992b61344cd76e743ee592725f50 orderedmultidict==1.0 --hash=sha256:f6022beda2b3387c61e6eb7e0e1e3e2832fd9f55f3f64d4b4b226eea7487327f pyyaml==5.4.1 --hash=sha256:c2ab2737721fe6f5545fa830312fe7b77e74932c8bbd780a17a3d7df2ff65f99 requests==2.22.0 --hash=sha256:83ddbb326afd5524e7fbca582fd9673103652ea3b0c6601ac5ba1a4501f077c9 From 46618848668bddd97f444ce1e75c732f886f30fa Mon Sep 17 00:00:00 2001 From: Erik Moeller Date: Mon, 5 Apr 2021 16:50:48 -0700 Subject: [PATCH 231/352] Refresh all test requirements --- test-requirements.in | 3 + test-requirements.txt | 235 ++++++++++++++++++++++++------------------ 2 files changed, 135 insertions(+), 103 deletions(-) diff --git a/test-requirements.in b/test-requirements.in index 024c0a86a..09ef16b5d 100644 --- a/test-requirements.in +++ b/test-requirements.in @@ -1,4 +1,5 @@ flake8 +importlib-metadata # otherwise introduced unpinned via flake8 pathlib2 # required by pytest for python 3.5 pip-tools py>=1.9.0 @@ -6,3 +7,5 @@ pytest pytest-cov pytest-mock semgrep==0.42.0 +typing-extensions # otherwise introduced unpinned via importlib-metadata +zipp # otherwise introduced unpinned via pep517 (via pip-tools) diff --git a/test-requirements.txt b/test-requirements.txt index 107e041fc..610ede02e 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -2,12 +2,8 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --generate-hashes --output-file=test-requirements.txt test-requirements.in +# pip-compile --allow-unsafe --generate-hashes --output-file=test-requirements.txt test-requirements.in # -atomicwrites==1.3.0 \ - --hash=sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4 \ - --hash=sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6 - # via pytest attrs==20.3.0 \ --hash=sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6 \ --hash=sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700 @@ -23,67 +19,84 @@ chardet==4.0.0 \ --hash=sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa \ --hash=sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5 # via requests -click==7.0 \ - --hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \ - --hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 +click==7.1.2 \ + --hash=sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a \ + --hash=sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc # via pip-tools colorama==0.4.4 \ --hash=sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b \ --hash=sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2 # via semgrep -coverage==4.5.4 \ - --hash=sha256:08907593569fe59baca0bf152c43f3863201efb6113ecb38ce7e97ce339805a6 \ - --hash=sha256:0be0f1ed45fc0c185cfd4ecc19a1d6532d72f86a2bac9de7e24541febad72650 \ - --hash=sha256:141f08ed3c4b1847015e2cd62ec06d35e67a3ac185c26f7635f4406b90afa9c5 \ - --hash=sha256:19e4df788a0581238e9390c85a7a09af39c7b539b29f25c89209e6c3e371270d \ - --hash=sha256:23cc09ed395b03424d1ae30dcc292615c1372bfba7141eb85e11e50efaa6b351 \ - --hash=sha256:245388cda02af78276b479f299bbf3783ef0a6a6273037d7c60dc73b8d8d7755 \ - --hash=sha256:331cb5115673a20fb131dadd22f5bcaf7677ef758741312bee4937d71a14b2ef \ - --hash=sha256:386e2e4090f0bc5df274e720105c342263423e77ee8826002dcffe0c9533dbca \ - --hash=sha256:3a794ce50daee01c74a494919d5ebdc23d58873747fa0e288318728533a3e1ca \ - --hash=sha256:60851187677b24c6085248f0a0b9b98d49cba7ecc7ec60ba6b9d2e5574ac1ee9 \ - --hash=sha256:63a9a5fc43b58735f65ed63d2cf43508f462dc49857da70b8980ad78d41d52fc \ - --hash=sha256:6b62544bb68106e3f00b21c8930e83e584fdca005d4fffd29bb39fb3ffa03cb5 \ - --hash=sha256:6ba744056423ef8d450cf627289166da65903885272055fb4b5e113137cfa14f \ - --hash=sha256:7494b0b0274c5072bddbfd5b4a6c6f18fbbe1ab1d22a41e99cd2d00c8f96ecfe \ - --hash=sha256:826f32b9547c8091679ff292a82aca9c7b9650f9fda3e2ca6bf2ac905b7ce888 \ - --hash=sha256:93715dffbcd0678057f947f496484e906bf9509f5c1c38fc9ba3922893cda5f5 \ - --hash=sha256:9a334d6c83dfeadae576b4d633a71620d40d1c379129d587faa42ee3e2a85cce \ - --hash=sha256:af7ed8a8aa6957aac47b4268631fa1df984643f07ef00acd374e456364b373f5 \ - --hash=sha256:bf0a7aed7f5521c7ca67febd57db473af4762b9622254291fbcbb8cd0ba5e33e \ - --hash=sha256:bf1ef9eb901113a9805287e090452c05547578eaab1b62e4ad456fcc049a9b7e \ - --hash=sha256:c0afd27bc0e307a1ffc04ca5ec010a290e49e3afbe841c5cafc5c5a80ecd81c9 \ - --hash=sha256:dd579709a87092c6dbee09d1b7cfa81831040705ffa12a1b248935274aee0437 \ - --hash=sha256:df6712284b2e44a065097846488f66840445eb987eb81b3cc6e4149e7b6982e1 \ - --hash=sha256:e07d9f1a23e9e93ab5c62902833bf3e4b1f65502927379148b6622686223125c \ - --hash=sha256:e2ede7c1d45e65e209d6093b762e98e8318ddeff95317d07a27a2140b80cfd24 \ - --hash=sha256:e4ef9c164eb55123c62411f5936b5c2e521b12356037b6e1c2617cef45523d47 \ - --hash=sha256:eca2b7343524e7ba246cab8ff00cab47a2d6d54ada3b02772e908a45675722e2 \ - --hash=sha256:eee64c616adeff7db37cc37da4180a3a5b6177f5c46b187894e633f088fb5b28 \ - --hash=sha256:ef824cad1f980d27f26166f86856efe11eff9912c4fed97d3804820d43fa550c \ - --hash=sha256:efc89291bd5a08855829a3c522df16d856455297cf35ae827a37edac45f466a7 \ - --hash=sha256:fa964bae817babece5aa2e8c1af841bebb6d0b9add8e637548809d040443fee0 \ - --hash=sha256:ff37757e068ae606659c28c3bd0d923f9d29a85de79bf25b2b34b148473b5025 +coverage==5.5 \ + --hash=sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c \ + --hash=sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6 \ + --hash=sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45 \ + --hash=sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a \ + --hash=sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03 \ + --hash=sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529 \ + --hash=sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a \ + --hash=sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a \ + --hash=sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2 \ + --hash=sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6 \ + --hash=sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759 \ + --hash=sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53 \ + --hash=sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a \ + --hash=sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4 \ + --hash=sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff \ + --hash=sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502 \ + --hash=sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793 \ + --hash=sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb \ + --hash=sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905 \ + --hash=sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821 \ + --hash=sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b \ + --hash=sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81 \ + --hash=sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0 \ + --hash=sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b \ + --hash=sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3 \ + --hash=sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184 \ + --hash=sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701 \ + --hash=sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a \ + --hash=sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82 \ + --hash=sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638 \ + --hash=sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5 \ + --hash=sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083 \ + --hash=sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6 \ + --hash=sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90 \ + --hash=sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465 \ + --hash=sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a \ + --hash=sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3 \ + --hash=sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e \ + --hash=sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066 \ + --hash=sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf \ + --hash=sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b \ + --hash=sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae \ + --hash=sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669 \ + --hash=sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873 \ + --hash=sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b \ + --hash=sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6 \ + --hash=sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb \ + --hash=sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160 \ + --hash=sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c \ + --hash=sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079 \ + --hash=sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d \ + --hash=sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6 # via pytest-cov -entrypoints==0.3 \ - --hash=sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19 \ - --hash=sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451 - # via flake8 -flake8==3.7.8 \ - --hash=sha256:19241c1cbc971b9962473e4438a2ca19749a7dd002dd1a946eaba171b4114548 \ - --hash=sha256:8e9dfa3cecb2400b3738a42c54c3043e821682b9c840b0448c0503f781130696 +flake8==3.9.0 \ + --hash=sha256:12d05ab02614b6aee8df7c36b97d1a3b2372761222b19b58621355e82acddcff \ + --hash=sha256:78873e372b12b093da7b5e5ed302e8ad9e988b38b063b61ad937f26ca58fc5f0 # via -r test-requirements.in idna==2.10 \ --hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6 \ --hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0 # via requests -importlib-metadata==0.18 \ - --hash=sha256:6dfd58dfe281e8d240937776065dd3624ad5469c835248219bd16cf2e12dbeb7 \ - --hash=sha256:cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db - # via - # jsonschema - # pluggy - # pytest +importlib-metadata==3.10.0 \ + --hash=sha256:c9db46394197244adf2f0b08ec5bc3cf16757e9590b02af1fca085c16c0d600a \ + --hash=sha256:d2d46ef77ffc85cbf7dac7e81dd663fde71c45326131bea8033b9bad42268ebe + # via -r test-requirements.in +iniconfig==1.1.1 \ + --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \ + --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32 + # via pytest jsonschema==3.2.0 \ --hash=sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163 \ --hash=sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a @@ -95,11 +108,6 @@ mccabe==0.6.1 \ --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f # via flake8 -more-itertools==5.0.0 \ - --hash=sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4 \ - --hash=sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc \ - --hash=sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9 - # via pytest packaging==20.9 \ --hash=sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5 \ --hash=sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a @@ -110,43 +118,50 @@ pathlib2==2.3.5 \ --hash=sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db \ --hash=sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868 # via -r test-requirements.in -pip-tools==4.2.0 \ - --hash=sha256:123174aabf7f4a63dd6e0bfc8aeeb5eaddbecb75a41e9f0dd4c447b1f2de14f7 \ - --hash=sha256:5427ea4dcc175649723985fbcace9b2d8f46f9adbcc63bc2d7b247d9bcc74917 +pep517==0.10.0 \ + --hash=sha256:ac59f3f6b9726a49e15a649474539442cf76e0697e39df4869d25e68e880931b \ + --hash=sha256:eba39d201ef937584ad3343df3581069085bacc95454c80188291d5b3ac7a249 + # via pip-tools +pip-tools==6.0.1 \ + --hash=sha256:3b0c7b95e8d3dfb011bb42cb38f356fcf5d0630480462b59c4d0a112b8d90281 \ + --hash=sha256:50ec26df7710557ab574f19f7511830294999e6121b42b87473b48cb9984d788 # via -r test-requirements.in -pluggy==0.12.0 \ - --hash=sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc \ - --hash=sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c +pluggy==0.13.1 \ + --hash=sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0 \ + --hash=sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d # via pytest py==1.10.0 \ --hash=sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3 \ --hash=sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a -pycodestyle==2.5.0 \ - --hash=sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56 \ - --hash=sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c + # via + # -r test-requirements.in + # pytest +pycodestyle==2.7.0 \ + --hash=sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068 \ + --hash=sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef # via flake8 -pyflakes==2.1.1 \ - --hash=sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0 \ - --hash=sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2 +pyflakes==2.3.1 \ + --hash=sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3 \ + --hash=sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db # via flake8 -pyparsing==2.4.1.1 \ - --hash=sha256:43c5486cefefa536c9aab528881c992328f020eefe4f6d06332449c365218580 \ - --hash=sha256:d6c5ffe9d0305b9b977f7a642d36b9370954d1da7ada4c62393382cbadad4265 +pyparsing==2.4.7 \ + --hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \ + --hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b # via packaging pyrsistent==0.17.3 \ --hash=sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e # via jsonschema -pytest-cov==2.8.1 \ - --hash=sha256:cc6742d8bac45070217169f5f72ceee1e0e55b0221f54bcf24845972d3a47f2b \ - --hash=sha256:cdbdef4f870408ebdbfeb44e63e07eb18bb4619fae852f6e760645fa36172626 +pytest-cov==2.11.1 \ + --hash=sha256:359952d9d39b9f822d9d29324483e7ba04a3a17dd7d05aa6beb7ea01e359e5f7 \ + --hash=sha256:bdb9fdb0b85a7cc825269a4c56b48ccaa5c7e365054b6038772c32ddcdc969da # via -r test-requirements.in -pytest-mock==1.12.1 \ - --hash=sha256:96a0cebc66e09930be2a15b03333d90b59584d3fb011924f81c14b50ee0afbba \ - --hash=sha256:e5381be2608e49547f5e47633c5f81241ebf6206d17ce516a7a18d5a917e3859 +pytest-mock==3.5.1 \ + --hash=sha256:379b391cfad22422ea2e252bdfc008edd08509029bcde3c25b2c0bd741e0424e \ + --hash=sha256:a1e2aba6af9560d313c642dae7e00a2a12b022b80301d9d7fc8ec6858e1dd9fc # via -r test-requirements.in -pytest==4.6.4 \ - --hash=sha256:6aa9bc2f6f6504d7949e9df2a756739ca06e58ffda19b5e53c725f7b03fb4aae \ - --hash=sha256:b77ae6f2d1a760760902a7676887b665c086f71e3461c64ed2a312afcedc00d6 +pytest==6.2.3 \ + --hash=sha256:671238a46e4df0f3498d1c3270e5deb9b32d25134c99b7d75370a68cfbe9b634 \ + --hash=sha256:6ad9c7bdf517a808242b998ac20063c41532a570d088d77eec1ee12b0b5574bc # via # -r test-requirements.in # pytest-cov @@ -197,33 +212,47 @@ semgrep==0.42.0 \ --hash=sha256:376b7a25817a24b32302f49656ea0ddcb2e535de2b05fdf42646f0bd4f33957e \ --hash=sha256:e50ac0028b98f344166d2464853009837aed9abe669deac93fec04b677b97d2c # via -r test-requirements.in -six==1.12.0 \ - --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \ - --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 +six==1.15.0 \ + --hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \ + --hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced # via # jsonschema # junit-xml - # more-itertools # pathlib2 - # pip-tools +toml==0.10.2 \ + --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ + --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f + # via + # pep517 # pytest tqdm==4.59.0 \ --hash=sha256:9fdf349068d047d4cfbe24862c425883af1db29bcddf4b0eeb2524f6fbdb23c7 \ --hash=sha256:d666ae29164da3e517fcf125e41d4fe96e5bb375cd87ff9763f6b38b5592fe33 # via semgrep -urllib3==1.26.3 \ - --hash=sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80 \ - --hash=sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73 +typing-extensions==3.7.4.3 \ + --hash=sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918 \ + --hash=sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c \ + --hash=sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f + # via -r test-requirements.in +urllib3==1.26.4 \ + --hash=sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df \ + --hash=sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937 # via requests -wcwidth==0.1.7 \ - --hash=sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e \ - --hash=sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c - # via pytest -zipp==0.5.2 \ - --hash=sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a \ - --hash=sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec - # via importlib-metadata +zipp==3.4.1 \ + --hash=sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76 \ + --hash=sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098 + # via + # -r test-requirements.in + # importlib-metadata -# WARNING: The following packages were not pinned, but pip requires them to be -# pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag. -# setuptools +# The following packages are considered to be unsafe in a requirements file: +pip==21.0.1 \ + --hash=sha256:37fd50e056e2aed635dec96594606f0286640489b0db0ce7607f7e51890372d5 \ + --hash=sha256:99bbde183ec5ec037318e774b0d8ae0a64352fe53b2c7fd630be1d07e94f41e5 + # via pip-tools +setuptools==54.2.0 \ + --hash=sha256:aa9c24fb83a9116b8d425e53bec24c7bfdbffc313c2159f9ed036d4a6dd32d7d \ + --hash=sha256:b726461910b9ba30f077880c228bea22121aec50b172edf39eb7ff026c054a11 + # via + # jsonschema + # semgrep From de638b860ddf62342f4cebbc076c0ab928e2030c Mon Sep 17 00:00:00 2001 From: Erik Moeller Date: Fri, 9 Apr 2021 09:07:45 -0700 Subject: [PATCH 232/352] Add link to Code of Conduct to README --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index df6198d8c..de9b10490 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,5 @@ +> By contributing to this project, you agree to abide by our [Code of Conduct](https://github.com/freedomofpress/.github/blob/main/CODE_OF_CONDUCT.md). + ## securedrop workstation proxy [![CircleCI](https://circleci.com/gh/freedomofpress/securedrop-proxy.svg?style=svg)](https://circleci.com/gh/freedomofpress/securedrop-proxy) From 2ba05e2f7bc0a8260376f4b7bcea12104dd6147e Mon Sep 17 00:00:00 2001 From: Erik Moeller Date: Fri, 9 Apr 2021 09:18:58 -0700 Subject: [PATCH 233/352] Add link to Code of Conduct to README --- README.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 0379fca7b..888f606c4 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,12 @@ +> By contributing to this project, you agree to abide by our [Code of Conduct](https://github.com/freedomofpress/.github/blob/main/CODE_OF_CONDUCT.md). + # securedrop-log This is a Python module and qrexec service for logging in Qubes for [SecureDrop](https://securedrop.org). ## How to use/try this? -In our example, we will use a vm named *logging* for storing logs, and we will use +In our example, we will use a vm named *logging* for storing logs, and we will use *workvm* to send in logs to the *logging* vm. ### In dom0 @@ -48,7 +50,7 @@ Put `sd-rsyslog-example.conf` file to `/etc/sd-rsyslog.conf`, make sure update it so that is shows the right **localvm** name. Copy `sd-rsyslog` executable to **/usr/sbin**, and remember to `chmod +x` -the binary. +the binary. Next, restart the rsyslog service. @@ -81,4 +83,3 @@ Or use the logger command. ``` logger This line should show in the syslog.log file in the sd-log file. ``` - From 9e4dcf450090f91d526380ce7d944b6cf0ed9f7c Mon Sep 17 00:00:00 2001 From: Erik Moeller Date: Fri, 9 Apr 2021 09:12:07 -0700 Subject: [PATCH 234/352] Add link to Code of Conduct to README --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 9abfc7190..7572ee30c 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,5 @@ +> By contributing to this project, you agree to abide by our [Code of Conduct](https://github.com/freedomofpress/.github/blob/main/CODE_OF_CONDUCT.md). + [![CircleCI](https://circleci.com/gh/freedomofpress/securedrop-export.svg?style=svg)](https://circleci.com/gh/freedomofpress/securedrop-export) # securedrop-export From fcb05d2bb8b41b2aaf538169055e29ec72be98cc Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Tue, 13 Apr 2021 18:25:04 +0530 Subject: [PATCH 235/352] Updates the wheel sha256sum for PyYAML --- build-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-requirements.txt b/build-requirements.txt index fd3cc471c..824f7b9bf 100644 --- a/build-requirements.txt +++ b/build-requirements.txt @@ -3,7 +3,7 @@ chardet==3.0.4 --hash=sha256:e5cf39014befb85add77118fdc946f0a3387df7840235ba5d20 furl==2.0.0 --hash=sha256:9f50360f6e4a0f1d0a35fb4997878e7186a73331f0fde5f6fc9b1bb9f006e6cc idna==2.7 --hash=sha256:69bbcd9c42b0add994610a68202532e9b327992b61344cd76e743ee592725f50 orderedmultidict==1.0 --hash=sha256:f6022beda2b3387c61e6eb7e0e1e3e2832fd9f55f3f64d4b4b226eea7487327f -pyyaml==5.4.1 --hash=sha256:c2ab2737721fe6f5545fa830312fe7b77e74932c8bbd780a17a3d7df2ff65f99 +pyyaml==5.4.1 --hash=sha256:be111e40b3e32707b373b90ef490fa0908bf7769c77f8cf940004f0c957954f6 requests==2.22.0 --hash=sha256:83ddbb326afd5524e7fbca582fd9673103652ea3b0c6601ac5ba1a4501f077c9 six==1.11.0 --hash=sha256:eb52689b06ca7433c1cac3b91f320400bd3b358790b7ff4b6367cb1c81d37561 urllib3==1.25.10 --hash=sha256:c78fdeffd1a01640ea99d35851539a4bc91e46a672989c4d96663e3808121389 From e8ec8a3afc5024dacf391de005c80c011833b205 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Thu, 29 Apr 2021 17:42:36 -0700 Subject: [PATCH 236/352] update semgrep and deps --- test-requirements.in | 4 +-- test-requirements.txt | 78 +++++++++++++++++++++---------------------- 2 files changed, 40 insertions(+), 42 deletions(-) diff --git a/test-requirements.in b/test-requirements.in index 09ef16b5d..008c85a3d 100644 --- a/test-requirements.in +++ b/test-requirements.in @@ -6,6 +6,6 @@ py>=1.9.0 pytest pytest-cov pytest-mock -semgrep==0.42.0 +semgrep==0.49.0 typing-extensions # otherwise introduced unpinned via importlib-metadata -zipp # otherwise introduced unpinned via pep517 (via pip-tools) +zipp # otherwise introduced unpinned via pep517 (via pip-tools) \ No newline at end of file diff --git a/test-requirements.txt b/test-requirements.txt index 610ede02e..9a1709c3a 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --allow-unsafe --generate-hashes --output-file=test-requirements.txt test-requirements.in +# pip-compile --generate-hashes --output-file=test-requirements.txt test-requirements.in # attrs==20.3.0 \ --hash=sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6 \ @@ -81,18 +81,24 @@ coverage==5.5 \ --hash=sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d \ --hash=sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6 # via pytest-cov -flake8==3.9.0 \ - --hash=sha256:12d05ab02614b6aee8df7c36b97d1a3b2372761222b19b58621355e82acddcff \ - --hash=sha256:78873e372b12b093da7b5e5ed302e8ad9e988b38b063b61ad937f26ca58fc5f0 +flake8==3.9.1 \ + --hash=sha256:1aa8990be1e689d96c745c5682b687ea49f2e05a443aff1f8251092b0014e378 \ + --hash=sha256:3b9f848952dddccf635be78098ca75010f073bfe14d2c6bda867154bea728d2a # via -r test-requirements.in idna==2.10 \ --hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6 \ --hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0 # via requests -importlib-metadata==3.10.0 \ - --hash=sha256:c9db46394197244adf2f0b08ec5bc3cf16757e9590b02af1fca085c16c0d600a \ - --hash=sha256:d2d46ef77ffc85cbf7dac7e81dd663fde71c45326131bea8033b9bad42268ebe - # via -r test-requirements.in +importlib-metadata==4.0.1 \ + --hash=sha256:8c501196e49fb9df5df43833bdb1e4328f64847763ec8a50703148b73784d581 \ + --hash=sha256:d7eb1dea6d6a6086f8be21784cc9e3bcfa55872b52309bc5fad53a8ea444465d + # via + # -r test-requirements.in + # flake8 + # jsonschema + # pep517 + # pluggy + # pytest iniconfig==1.1.1 \ --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \ --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32 @@ -101,9 +107,6 @@ jsonschema==3.2.0 \ --hash=sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163 \ --hash=sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a # via semgrep -junit-xml==1.9 \ - --hash=sha256:ec5ca1a55aefdd76d28fcc0b135251d156c7106fa979686a4b48d62b761b4732 - # via semgrep mccabe==0.6.1 \ --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f @@ -122,9 +125,9 @@ pep517==0.10.0 \ --hash=sha256:ac59f3f6b9726a49e15a649474539442cf76e0697e39df4869d25e68e880931b \ --hash=sha256:eba39d201ef937584ad3343df3581069085bacc95454c80188291d5b3ac7a249 # via pip-tools -pip-tools==6.0.1 \ - --hash=sha256:3b0c7b95e8d3dfb011bb42cb38f356fcf5d0630480462b59c4d0a112b8d90281 \ - --hash=sha256:50ec26df7710557ab574f19f7511830294999e6121b42b87473b48cb9984d788 +pip-tools==6.1.0 \ + --hash=sha256:197e3f8839095ccec3ad1ef410e0804c07d9f17dff1c340fb417ca2b63feacc9 \ + --hash=sha256:400bf77e29cca48c31abc210042932bb52dcc138ef4ea4d52c5db429aa8ae6ee # via -r test-requirements.in pluggy==0.13.1 \ --hash=sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0 \ @@ -155,9 +158,9 @@ pytest-cov==2.11.1 \ --hash=sha256:359952d9d39b9f822d9d29324483e7ba04a3a17dd7d05aa6beb7ea01e359e5f7 \ --hash=sha256:bdb9fdb0b85a7cc825269a4c56b48ccaa5c7e365054b6038772c32ddcdc969da # via -r test-requirements.in -pytest-mock==3.5.1 \ - --hash=sha256:379b391cfad22422ea2e252bdfc008edd08509029bcde3c25b2c0bd741e0424e \ - --hash=sha256:a1e2aba6af9560d313c642dae7e00a2a12b022b80301d9d7fc8ec6858e1dd9fc +pytest-mock==3.6.0 \ + --hash=sha256:952139a535b5b48ac0bb2f90b5dd36b67c7e1ba92601f3a8012678c4bd7f0bcc \ + --hash=sha256:f7c3d42d6287f4e45846c8231c31902b6fa2bea98735af413a43da4cf5b727f1 # via -r test-requirements.in pytest==6.2.3 \ --hash=sha256:671238a46e4df0f3498d1c3270e5deb9b32d25134c99b7d75370a68cfbe9b634 \ @@ -203,21 +206,20 @@ ruamel.yaml.clib==0.2.2 \ --hash=sha256:e9f7d1d8c26a6a12c23421061f9022bb62704e38211fe375c645485f38df34a2 \ --hash=sha256:f6061a31880c1ed6b6ce341215336e2f3d0c1deccd84957b6fa8ca474b41e89f # via ruamel.yaml -ruamel.yaml==0.16.10 \ - --hash=sha256:0962fd7999e064c4865f96fb1e23079075f4a2a14849bcdc5cdba53a24f9759b \ - --hash=sha256:099c644a778bf72ffa00524f78dd0b6476bca94a1da344130f4bf3381ce5b954 +ruamel.yaml==0.17.4 \ + --hash=sha256:44bc6b54fddd45e4bc0619059196679f9e8b79c027f4131bb072e6a22f4d5e28 \ + --hash=sha256:ac79fb25f5476e8e9ed1c53b8a2286d2c3f5dde49eb37dbcee5c7eb6a8415a22 # via semgrep -semgrep==0.42.0 \ - --hash=sha256:179741ce6f8f6785d048af5402bb2452a8771d4282f8aa7cb6852a5adad79fe8 \ - --hash=sha256:376b7a25817a24b32302f49656ea0ddcb2e535de2b05fdf42646f0bd4f33957e \ - --hash=sha256:e50ac0028b98f344166d2464853009837aed9abe669deac93fec04b677b97d2c +semgrep==0.49.0 \ + --hash=sha256:99b2bae87afc824a232063c91524fda2039699733f34ba28fc963f2b76228f1a \ + --hash=sha256:cca38e34824bdf17aeae6e7a8fc3ce0ac441fec51b207783a469fff9994eb08b \ + --hash=sha256:e49c84f452809af7895a09f06ce48bd86fdbb7b09482fef99f528da2afb71b46 # via -r test-requirements.in six==1.15.0 \ --hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \ --hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced # via # jsonschema - # junit-xml # pathlib2 toml==0.10.2 \ --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ @@ -225,15 +227,17 @@ toml==0.10.2 \ # via # pep517 # pytest -tqdm==4.59.0 \ - --hash=sha256:9fdf349068d047d4cfbe24862c425883af1db29bcddf4b0eeb2524f6fbdb23c7 \ - --hash=sha256:d666ae29164da3e517fcf125e41d4fe96e5bb375cd87ff9763f6b38b5592fe33 +tqdm==4.60.0 \ + --hash=sha256:daec693491c52e9498632dfbe9ccfc4882a557f5fa08982db1b4d3adbe0887c3 \ + --hash=sha256:ebdebdb95e3477ceea267decfc0784859aa3df3e27e22d23b83e9b272bf157ae # via semgrep typing-extensions==3.7.4.3 \ --hash=sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918 \ --hash=sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c \ --hash=sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f - # via -r test-requirements.in + # via + # -r test-requirements.in + # importlib-metadata urllib3==1.26.4 \ --hash=sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df \ --hash=sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937 @@ -244,15 +248,9 @@ zipp==3.4.1 \ # via # -r test-requirements.in # importlib-metadata + # pep517 -# The following packages are considered to be unsafe in a requirements file: -pip==21.0.1 \ - --hash=sha256:37fd50e056e2aed635dec96594606f0286640489b0db0ce7607f7e51890372d5 \ - --hash=sha256:99bbde183ec5ec037318e774b0d8ae0a64352fe53b2c7fd630be1d07e94f41e5 - # via pip-tools -setuptools==54.2.0 \ - --hash=sha256:aa9c24fb83a9116b8d425e53bec24c7bfdbffc313c2159f9ed036d4a6dd32d7d \ - --hash=sha256:b726461910b9ba30f077880c228bea22121aec50b172edf39eb7ff026c054a11 - # via - # jsonschema - # semgrep +# WARNING: The following packages were not pinned, but pip requires them to be +# pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag. +# pip +# setuptools From 36b95b97e0680c2789e49aa79f0c7fc8314a16e9 Mon Sep 17 00:00:00 2001 From: mickael e Date: Fri, 30 Apr 2021 15:03:16 -0400 Subject: [PATCH 237/352] Also run semgrep in ci --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 153924c04..338e22a92 100644 --- a/Makefile +++ b/Makefile @@ -13,7 +13,7 @@ update-pip-requirements: ## Updates all Python requirements files via pip-compil pip-compile --generate-hashes --output-file test-requirements.txt test-requirements.in .PHONY: check -check: lint test ## Run linter and tests +check: lint semgrep test ## Run linter and tests TESTS ?= tests .PHONY: test From 7a09df7b0b64670e339add27979b73a40192dd6f Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Thu, 29 Apr 2021 17:29:05 -0700 Subject: [PATCH 238/352] make relpath traversal check clearer --- securedrop_export/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/securedrop_export/utils.py b/securedrop_export/utils.py index 8aeaba86e..219d0e963 100644 --- a/securedrop_export/utils.py +++ b/securedrop_export/utils.py @@ -101,7 +101,7 @@ def check_path_traversal(filename_or_filepath: Union[str, Path]) -> None: if filename_or_filepath.is_absolute(): base_path = filename_or_filepath else: - base_path = Path().resolve() + base_path = Path.cwd() # use cwd so we can next ensure relative path does not traverse up try: relative_path = relative_filepath(filename_or_filepath, base_path) From 7d351b28fa5a31b91a9a40bedce8153f2cc6cdd7 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Fri, 30 Apr 2021 12:38:10 -0700 Subject: [PATCH 239/352] cleanup project root directory Signed-off-by: Allie Crevier --- .circleci/config.yml | 4 ++-- MANIFEST.in | 4 ++-- Makefile | 2 +- build-requirements.txt => requirements/build-requirements.txt | 0 requirements.txt => requirements/requirements.txt | 0 test-requirements.in => requirements/test-requirements.in | 0 test-requirements.txt => requirements/test-requirements.txt | 0 7 files changed, 5 insertions(+), 5 deletions(-) rename build-requirements.txt => requirements/build-requirements.txt (100%) rename requirements.txt => requirements/requirements.txt (100%) rename test-requirements.in => requirements/test-requirements.in (100%) rename test-requirements.txt => requirements/test-requirements.txt (100%) diff --git a/.circleci/config.yml b/.circleci/config.yml index 1bba7c98a..1d00acdbb 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -17,7 +17,7 @@ common-steps: command: | virtualenv .venv source .venv/bin/activate - pip install --require-hashes -r test-requirements.txt + pip install --require-hashes -r requirements/test-requirements.txt make test - &install_packaging_dependencies @@ -69,7 +69,7 @@ jobs: command: | virtualenv .venv source .venv/bin/activate - pip install --require-hashes -r test-requirements.txt + pip install --require-hashes -r requirements/test-requirements.txt make lint - run: name: Check Python dependencies for CVEs diff --git a/MANIFEST.in b/MANIFEST.in index ad64433d6..4322c8cb2 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -2,8 +2,8 @@ include LICENSE include README.md include securedrop_export/VERSION include changelog.md -include build-requirements.txt -include requirements.txt +include requirements/build-requirements.txt +include requirements/requirements.txt include securedrop_export/*.py include setup.py include files/send-to-usb.desktop diff --git a/Makefile b/Makefile index 338e22a92..91be0943e 100644 --- a/Makefile +++ b/Makefile @@ -10,7 +10,7 @@ safety: ## Runs `safety check` to check python dependencies for vulnerabilities .PHONY: update-pip-requirements update-pip-requirements: ## Updates all Python requirements files via pip-compile. - pip-compile --generate-hashes --output-file test-requirements.txt test-requirements.in + pip-compile --generate-hashes --output-file requirements/test-requirements.txt requirements/test-requirements.in .PHONY: check check: lint semgrep test ## Run linter and tests diff --git a/build-requirements.txt b/requirements/build-requirements.txt similarity index 100% rename from build-requirements.txt rename to requirements/build-requirements.txt diff --git a/requirements.txt b/requirements/requirements.txt similarity index 100% rename from requirements.txt rename to requirements/requirements.txt diff --git a/test-requirements.in b/requirements/test-requirements.in similarity index 100% rename from test-requirements.in rename to requirements/test-requirements.in diff --git a/test-requirements.txt b/requirements/test-requirements.txt similarity index 100% rename from test-requirements.txt rename to requirements/test-requirements.txt From 93ab598e1eed4f6591a6a9a8b0d3a63c52b37622 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Fri, 30 Apr 2021 13:13:05 -0700 Subject: [PATCH 240/352] update pip requirements using requirements directory --- requirements/test-requirements.txt | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/requirements/test-requirements.txt b/requirements/test-requirements.txt index 9a1709c3a..86f75e2a7 100644 --- a/requirements/test-requirements.txt +++ b/requirements/test-requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --generate-hashes --output-file=test-requirements.txt test-requirements.in +# pip-compile --generate-hashes --output-file=requirements/test-requirements.txt requirements/test-requirements.in # attrs==20.3.0 \ --hash=sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6 \ @@ -84,7 +84,7 @@ coverage==5.5 \ flake8==3.9.1 \ --hash=sha256:1aa8990be1e689d96c745c5682b687ea49f2e05a443aff1f8251092b0014e378 \ --hash=sha256:3b9f848952dddccf635be78098ca75010f073bfe14d2c6bda867154bea728d2a - # via -r test-requirements.in + # via -r requirements/test-requirements.in idna==2.10 \ --hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6 \ --hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0 @@ -93,7 +93,7 @@ importlib-metadata==4.0.1 \ --hash=sha256:8c501196e49fb9df5df43833bdb1e4328f64847763ec8a50703148b73784d581 \ --hash=sha256:d7eb1dea6d6a6086f8be21784cc9e3bcfa55872b52309bc5fad53a8ea444465d # via - # -r test-requirements.in + # -r requirements/test-requirements.in # flake8 # jsonschema # pep517 @@ -120,7 +120,7 @@ packaging==20.9 \ pathlib2==2.3.5 \ --hash=sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db \ --hash=sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868 - # via -r test-requirements.in + # via -r requirements/test-requirements.in pep517==0.10.0 \ --hash=sha256:ac59f3f6b9726a49e15a649474539442cf76e0697e39df4869d25e68e880931b \ --hash=sha256:eba39d201ef937584ad3343df3581069085bacc95454c80188291d5b3ac7a249 @@ -128,7 +128,7 @@ pep517==0.10.0 \ pip-tools==6.1.0 \ --hash=sha256:197e3f8839095ccec3ad1ef410e0804c07d9f17dff1c340fb417ca2b63feacc9 \ --hash=sha256:400bf77e29cca48c31abc210042932bb52dcc138ef4ea4d52c5db429aa8ae6ee - # via -r test-requirements.in + # via -r requirements/test-requirements.in pluggy==0.13.1 \ --hash=sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0 \ --hash=sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d @@ -137,7 +137,7 @@ py==1.10.0 \ --hash=sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3 \ --hash=sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a # via - # -r test-requirements.in + # -r requirements/test-requirements.in # pytest pycodestyle==2.7.0 \ --hash=sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068 \ @@ -157,16 +157,16 @@ pyrsistent==0.17.3 \ pytest-cov==2.11.1 \ --hash=sha256:359952d9d39b9f822d9d29324483e7ba04a3a17dd7d05aa6beb7ea01e359e5f7 \ --hash=sha256:bdb9fdb0b85a7cc825269a4c56b48ccaa5c7e365054b6038772c32ddcdc969da - # via -r test-requirements.in + # via -r requirements/test-requirements.in pytest-mock==3.6.0 \ --hash=sha256:952139a535b5b48ac0bb2f90b5dd36b67c7e1ba92601f3a8012678c4bd7f0bcc \ --hash=sha256:f7c3d42d6287f4e45846c8231c31902b6fa2bea98735af413a43da4cf5b727f1 - # via -r test-requirements.in + # via -r requirements/test-requirements.in pytest==6.2.3 \ --hash=sha256:671238a46e4df0f3498d1c3270e5deb9b32d25134c99b7d75370a68cfbe9b634 \ --hash=sha256:6ad9c7bdf517a808242b998ac20063c41532a570d088d77eec1ee12b0b5574bc # via - # -r test-requirements.in + # -r requirements/test-requirements.in # pytest-cov # pytest-mock requests==2.25.1 \ @@ -214,7 +214,7 @@ semgrep==0.49.0 \ --hash=sha256:99b2bae87afc824a232063c91524fda2039699733f34ba28fc963f2b76228f1a \ --hash=sha256:cca38e34824bdf17aeae6e7a8fc3ce0ac441fec51b207783a469fff9994eb08b \ --hash=sha256:e49c84f452809af7895a09f06ce48bd86fdbb7b09482fef99f528da2afb71b46 - # via -r test-requirements.in + # via -r requirements/test-requirements.in six==1.15.0 \ --hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \ --hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced @@ -236,7 +236,7 @@ typing-extensions==3.7.4.3 \ --hash=sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c \ --hash=sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f # via - # -r test-requirements.in + # -r requirements/test-requirements.in # importlib-metadata urllib3==1.26.4 \ --hash=sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df \ @@ -246,7 +246,7 @@ zipp==3.4.1 \ --hash=sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76 \ --hash=sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098 # via - # -r test-requirements.in + # -r requirements/test-requirements.in # importlib-metadata # pep517 From e1e6c194c205c32c77c486f8d2b47389abe675a2 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Fri, 7 May 2021 14:59:26 -0700 Subject: [PATCH 241/352] add venv make target --- Makefile | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/Makefile b/Makefile index 9c754f119..992cc6298 100644 --- a/Makefile +++ b/Makefile @@ -3,6 +3,14 @@ .PHONY: all all: help +.PHONY: venv +venv: + python3 -m venv .venv + ## Good idea to upgrade pip and wheel when you create a new virtual environment. + ## Or you could use the virtualenv command instead. + .venv/bin/pip install --upgrade pip wheel + .venv/bin/pip install --require-hashes -r "dev-requirements.txt" + .PHONY: bandit bandit: ## Run bandit with medium level excluding test-related folders pip install --upgrade pip && \ From 8ba39f20361c3011dd58be7229881005eb61af71 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Fri, 7 May 2021 16:53:55 -0700 Subject: [PATCH 242/352] update dev dependencies Signed-off-by: Allie Crevier --- Makefile | 2 +- dev-requirements.in | 30 +-- dev-requirements.txt | 577 +++++++++++++++++++++++++++---------------- requirements.txt | 45 ++-- 4 files changed, 410 insertions(+), 244 deletions(-) diff --git a/Makefile b/Makefile index 992cc6298..68a156801 100644 --- a/Makefile +++ b/Makefile @@ -45,7 +45,7 @@ isort: ## Run isort for file formatting .PHONY: update-pip-requirements update-pip-requirements: ## Updates all Python requirements files via pip-compile. - pip-compile --allow-unsafe --generate-hashes --output-file dev-requirements.txt dev-requirements.in requirements.in + pip-compile --generate-hashes --allow-unsafe --upgrade --output-file dev-requirements.txt dev-requirements.in requirements.in pip-compile --generate-hashes --output-file requirements.txt requirements.in .PHONY: test diff --git a/dev-requirements.in b/dev-requirements.in index e4fe026d6..238652e24 100644 --- a/dev-requirements.in +++ b/dev-requirements.in @@ -1,15 +1,15 @@ -black==19.10b0 -coverage==5.0 -flake8==3.6.0 -isort==4.3.21 -mccabe==0.6.1 -multidict==4.4.2 -mypy==0.761 -mypy-extensions==0.4.3 -pip-tools>=5.0.0 -pycodestyle==2.4.0 -pyflakes==2.0.0 -six==1.11.0 -vcrpy==2.0.1 -wrapt==1.10.11 -yarl==1.2.6 +black +coverage +flake8 +isort +mccabe +multidict +mypy +mypy-extensions +pip-tools +pycodestyle +pyflakes +six +vcrpy +wrapt +yarl diff --git a/dev-requirements.txt b/dev-requirements.txt index daa7e02d9..b14a54c05 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -4,154 +4,219 @@ # # pip-compile --allow-unsafe --generate-hashes --output-file=dev-requirements.txt dev-requirements.in requirements.in # -appdirs==1.4.3 \ - --hash=sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92 \ - --hash=sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e \ +appdirs==1.4.4 \ + --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41 \ + --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128 # via black -attrs==19.3.0 \ - --hash=sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c \ - --hash=sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72 \ - # via black -black==19.10b0 \ - --hash=sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b \ - --hash=sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539 \ +black==21.5b1 \ + --hash=sha256:23695358dbcb3deafe7f0a3ad89feee5999a46be5fec21f4f1d108be0bcdb3b1 \ + --hash=sha256:8a60071a0043876a4ae96e6c69bd3a127dad2c1ca7c8083573eb82f92705d008 # via -r dev-requirements.in certifi==2018.10.15 \ --hash=sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c \ - --hash=sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a \ - # via -r requirements.in, requests + --hash=sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a + # via + # -r requirements.in + # requests chardet==3.0.4 \ --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ - --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 \ - # via -r requirements.in, requests -click==7.0 \ - --hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \ - --hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 \ - # via black, pip-tools -coverage==5.0 \ - --hash=sha256:0cd13a6e98c37b510a2d34c8281d5e1a226aaf9b65b7d770ef03c63169965351 \ - --hash=sha256:1a4b6b6a2a3a6612e6361130c2cc3dc4378d8c221752b96167ccbad94b47f3cd \ - --hash=sha256:2ee55e6dba516ddf6f484aa83ccabbb0adf45a18892204c23486938d12258cde \ - --hash=sha256:3be5338a2eb4ef03c57f20917e1d12a1fd10e3853fed060b6d6b677cb3745898 \ - --hash=sha256:44b783b02db03c4777d8cf71bae19eadc171a6f2a96777d916b2c30a1eb3d070 \ - --hash=sha256:475bf7c4252af0a56e1abba9606f1e54127cdf122063095c75ab04f6f99cf45e \ - --hash=sha256:47c81ee687eafc2f1db7f03fbe99aab81330565ebc62fb3b61edfc2216a550c8 \ - --hash=sha256:4a7f8e72b18f2aca288ff02255ce32cc830bc04d993efbc87abf6beddc9e56c0 \ - --hash=sha256:50197163a22fd17f79086e087a787883b3ec9280a509807daf158dfc2a7ded02 \ - --hash=sha256:56b13000acf891f700f5067512b804d1ec8c301d627486c678b903859d07f798 \ - --hash=sha256:79388ae29c896299b3567965dbcd93255f175c17c6c7bca38614d12718c47466 \ - --hash=sha256:79fd5d3d62238c4f583b75d48d53cdae759fe04d4fb18fe8b371d88ad2b6f8be \ - --hash=sha256:7fe3e2fde2bf1d7ce25ebcd2d3de3650b8d60d9a73ce6dcef36e20191291613d \ - --hash=sha256:81042a24f67b96e4287774014fa27220d8a4d91af1043389e4d73892efc89ac6 \ - --hash=sha256:81326f1095c53111f8afc95da281e1414185f4a538609a77ca50bdfa39a6c207 \ - --hash=sha256:8873dc0d8f42142ea9f20c27bbdc485190fff93823c6795be661703369e5877d \ - --hash=sha256:88d2cbcb0a112f47eef71eb95460b6995da18e6f8ca50c264585abc2c473154b \ - --hash=sha256:91f2491aeab9599956c45a77c5666d323efdec790bfe23fcceafcd91105d585a \ - --hash=sha256:979daa8655ae5a51e8e7a24e7d34e250ae8309fd9719490df92cbb2fe2b0422b \ - --hash=sha256:9c871b006c878a890c6e44a5b2f3c6291335324b298c904dc0402ee92ee1f0be \ - --hash=sha256:a6d092545e5af53e960465f652e00efbf5357adad177b2630d63978d85e46a72 \ - --hash=sha256:b5ed7837b923d1d71c4f587ae1539ccd96bfd6be9788f507dbe94dab5febbb5d \ - --hash=sha256:ba259f68250f16d2444cbbfaddaa0bb20e1560a4fdaad50bece25c199e6af864 \ - --hash=sha256:be1d89614c6b6c36d7578496dc8625123bda2ff44f224cf8b1c45b810ee7383f \ - --hash=sha256:c1b030a79749aa8d1f1486885040114ee56933b15ccfc90049ba266e4aa2139f \ - --hash=sha256:c95bb147fab76f2ecde332d972d8f4138b8f2daee6c466af4ff3b4f29bd4c19e \ - --hash=sha256:d52c1c2d7e856cecc05aa0526453cb14574f821b7f413cc279b9514750d795c1 \ - --hash=sha256:d609a6d564ad3d327e9509846c2c47f170456344521462b469e5cb39e48ba31c \ - --hash=sha256:e1bad043c12fb58e8c7d92b3d7f2f49977dcb80a08a6d1e7a5114a11bf819fca \ - --hash=sha256:e5a675f6829c53c87d79117a8eb656cc4a5f8918185a32fc93ba09778e90f6db \ - --hash=sha256:fec32646b98baf4a22fdceb08703965bd16dea09051fbeb31a04b5b6e72b846c \ + --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 + # via + # -r requirements.in + # requests +click==8.0.0 \ + --hash=sha256:7d8c289ee437bcb0316820ccee14aefcb056e58d31830ecab8e47eda6540e136 \ + --hash=sha256:e90e62ced43dc8105fb9a26d62f0d9340b5c8db053a814e25d95c19873ae87db + # via + # black + # pip-tools +coverage==5.5 \ + --hash=sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c \ + --hash=sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6 \ + --hash=sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45 \ + --hash=sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a \ + --hash=sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03 \ + --hash=sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529 \ + --hash=sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a \ + --hash=sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a \ + --hash=sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2 \ + --hash=sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6 \ + --hash=sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759 \ + --hash=sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53 \ + --hash=sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a \ + --hash=sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4 \ + --hash=sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff \ + --hash=sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502 \ + --hash=sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793 \ + --hash=sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb \ + --hash=sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905 \ + --hash=sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821 \ + --hash=sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b \ + --hash=sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81 \ + --hash=sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0 \ + --hash=sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b \ + --hash=sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3 \ + --hash=sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184 \ + --hash=sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701 \ + --hash=sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a \ + --hash=sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82 \ + --hash=sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638 \ + --hash=sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5 \ + --hash=sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083 \ + --hash=sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6 \ + --hash=sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90 \ + --hash=sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465 \ + --hash=sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a \ + --hash=sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3 \ + --hash=sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e \ + --hash=sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066 \ + --hash=sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf \ + --hash=sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b \ + --hash=sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae \ + --hash=sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669 \ + --hash=sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873 \ + --hash=sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b \ + --hash=sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6 \ + --hash=sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb \ + --hash=sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160 \ + --hash=sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c \ + --hash=sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079 \ + --hash=sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d \ + --hash=sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6 # via -r dev-requirements.in -flake8==3.6.0 \ - --hash=sha256:6a35f5b8761f45c5513e3405f110a86bea57982c3b75b766ce7b65217abe1670 \ - --hash=sha256:c01f8a3963b3571a8e6bd7a4063359aff90749e160778e03817cd9b71c9e07d2 \ +flake8==3.9.2 \ + --hash=sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b \ + --hash=sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907 # via -r dev-requirements.in furl==2.0.0 \ --hash=sha256:f7e90e9f85ef3f2e64485f04c2a80b50af6133942812fd87a44d45305b079018 \ - --hash=sha256:fdcaedc1fb19a63d7d875b0105b0a5b496dd0989330d454a42bcb401fa5454ec \ + --hash=sha256:fdcaedc1fb19a63d7d875b0105b0a5b496dd0989330d454a42bcb401fa5454ec # via -r requirements.in idna==2.7 \ --hash=sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e \ - --hash=sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16 \ - # via -r requirements.in, requests, yarl -isort==4.3.21 \ - --hash=sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1 \ - --hash=sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd \ + --hash=sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16 + # via + # -r requirements.in + # requests + # yarl +importlib-metadata==4.0.1 \ + --hash=sha256:8c501196e49fb9df5df43833bdb1e4328f64847763ec8a50703148b73784d581 \ + --hash=sha256:d7eb1dea6d6a6086f8be21784cc9e3bcfa55872b52309bc5fad53a8ea444465d + # via + # flake8 + # pep517 +isort==5.8.0 \ + --hash=sha256:0a943902919f65c5684ac4e0154b1ad4fac6dcaa5d9f3426b732f1c8b5419be6 \ + --hash=sha256:2bb1680aad211e3c9944dbce1d4ba09a989f04e238296c87fe2139faa26d655d # via -r dev-requirements.in mccabe==0.6.1 \ --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ - --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f \ - # via -r dev-requirements.in, flake8 -multidict==4.4.2 \ - --hash=sha256:05eeab69bf2b0664644c62bd92fabb045163e5b8d4376a31dfb52ce0210ced7b \ - --hash=sha256:0c85880efa7cadb18e3b5eef0aa075dc9c0a3064cbbaef2e20be264b9cf47a64 \ - --hash=sha256:136f5a4a6a4adeacc4dc820b8b22f0a378fb74f326e259c54d1817639d1d40a0 \ - --hash=sha256:14906ad3347c7d03e9101749b16611cf2028547716d0840838d3c5e2b3b0f2d3 \ - --hash=sha256:1ade4a3b71b1bf9e90c5f3d034a87fe4949c087ef1f6cd727fdd766fe8bbd121 \ - --hash=sha256:22939a00a511a59f9ecc0158b8db728afef57975ce3782b3a265a319d05b9b12 \ - --hash=sha256:2b86b02d872bc5ba5b3a4530f6a7ba0b541458ab4f7c1429a12ac326231203f7 \ - --hash=sha256:3c11e92c3dfc321014e22fb442bc9eb70e01af30d6ce442026b0c35723448c66 \ - --hash=sha256:4ba3bd26f282b201fdbce351f1c5d17ceb224cbedb73d6e96e6ce391b354aacc \ - --hash=sha256:4c6e78d042e93751f60672989efbd6a6bc54213ed7ff695fff82784bbb9ea035 \ - --hash=sha256:4d80d1901b89cc935a6cf5b9fd89df66565272722fe2e5473168927a9937e0ca \ - --hash=sha256:4fcf71d33178a00cc34a57b29f5dab1734b9ce0f1c97fb34666deefac6f92037 \ - --hash=sha256:52f7670b41d4b4d97866ebc38121de8bcb9813128b7c4942b07794d08193c0ab \ - --hash=sha256:5368e2b7649a26b7253c6c9e53241248aab9da49099442f5be238fde436f18c9 \ - --hash=sha256:5bb65fbb48999044938f0c0508e929b14a9b8bf4939d8263e9ea6691f7b54663 \ - --hash=sha256:60672bb5577472800fcca1ac9dae232d1461db9f20f055184be8ce54b0052572 \ - --hash=sha256:669e9be6d148fc0283f53e17dd140cde4dc7c87edac8319147edd5aa2a830771 \ - --hash=sha256:6a0b7a804e8d1716aa2c72e73210b48be83d25ba9ec5cf52cf91122285707bb1 \ - --hash=sha256:79034ea3da3cf2a815e3e52afdc1f6c1894468c98bdce5d2546fa2342585497f \ - --hash=sha256:79247feeef6abcc11137ad17922e865052f23447152059402fc320f99ff544bb \ - --hash=sha256:81671c2049e6bf42c7fd11a060f8bc58f58b7b3d6f3f951fc0b15e376a6a5a98 \ - --hash=sha256:82ac4a5cb56cc9280d4ae52c2d2ebcd6e0668dd0f9ef17f0a9d7c82bd61e24fa \ - --hash=sha256:9436267dbbaa49dad18fbbb54f85386b0f5818d055e7b8e01d219661b6745279 \ - --hash=sha256:94e4140bb1343115a1afd6d84ebf8fca5fb7bfb50e1c2cbd6f2fb5d3117ef102 \ - --hash=sha256:a2cab366eae8a0ffe0813fd8e335cf0d6b9bb6c5227315f53bb457519b811537 \ - --hash=sha256:a596019c3eafb1b0ae07db9f55a08578b43c79adb1fe1ab1fd818430ae59ee6f \ - --hash=sha256:e8848ae3cd6a784c29fae5055028bee9bffcc704d8bcad09bd46b42b44a833e2 \ - --hash=sha256:e8a048bfd7d5a280f27527d11449a509ddedf08b58a09a24314828631c099306 \ - --hash=sha256:f6dd28a0ac60e2426a6918f36f1b4e2620fc785a0de7654cd206ba842eee57fd \ - # via -r dev-requirements.in, yarl + --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f + # via + # -r dev-requirements.in + # flake8 +multidict==5.1.0 \ + --hash=sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a \ + --hash=sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93 \ + --hash=sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632 \ + --hash=sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656 \ + --hash=sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79 \ + --hash=sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7 \ + --hash=sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d \ + --hash=sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5 \ + --hash=sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224 \ + --hash=sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26 \ + --hash=sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea \ + --hash=sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348 \ + --hash=sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6 \ + --hash=sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76 \ + --hash=sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1 \ + --hash=sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f \ + --hash=sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952 \ + --hash=sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a \ + --hash=sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37 \ + --hash=sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9 \ + --hash=sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359 \ + --hash=sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8 \ + --hash=sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da \ + --hash=sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3 \ + --hash=sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d \ + --hash=sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf \ + --hash=sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841 \ + --hash=sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d \ + --hash=sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93 \ + --hash=sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f \ + --hash=sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647 \ + --hash=sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635 \ + --hash=sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456 \ + --hash=sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda \ + --hash=sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5 \ + --hash=sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281 \ + --hash=sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80 + # via + # -r dev-requirements.in + # yarl mypy-extensions==0.4.3 \ --hash=sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d \ - --hash=sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8 \ - # via -r dev-requirements.in, mypy -mypy==0.761 \ - --hash=sha256:0a9a45157e532da06fe56adcfef8a74629566b607fa2c1ac0122d1ff995c748a \ - --hash=sha256:2c35cae79ceb20d47facfad51f952df16c2ae9f45db6cb38405a3da1cf8fc0a7 \ - --hash=sha256:4b9365ade157794cef9685791032521233729cb00ce76b0ddc78749abea463d2 \ - --hash=sha256:53ea810ae3f83f9c9b452582261ea859828a9ed666f2e1ca840300b69322c474 \ - --hash=sha256:634aef60b4ff0f650d3e59d4374626ca6153fcaff96ec075b215b568e6ee3cb0 \ - --hash=sha256:7e396ce53cacd5596ff6d191b47ab0ea18f8e0ec04e15d69728d530e86d4c217 \ - --hash=sha256:7eadc91af8270455e0d73565b8964da1642fe226665dd5c9560067cd64d56749 \ - --hash=sha256:7f672d02fffcbace4db2b05369142e0506cdcde20cea0e07c7c2171c4fd11dd6 \ - --hash=sha256:85baab8d74ec601e86134afe2bcccd87820f79d2f8d5798c889507d1088287bf \ - --hash=sha256:87c556fb85d709dacd4b4cb6167eecc5bbb4f0a9864b69136a0d4640fdc76a36 \ - --hash=sha256:a6bd44efee4dc8c3324c13785a9dc3519b3ee3a92cada42d2b57762b7053b49b \ - --hash=sha256:c6d27bd20c3ba60d5b02f20bd28e20091d6286a699174dfad515636cb09b5a72 \ - --hash=sha256:e2bb577d10d09a2d8822a042a23b8d62bc3b269667c9eb8e60a6edfa000211b1 \ - --hash=sha256:f97a605d7c8bc2c6d1172c2f0d5a65b24142e11a58de689046e62c2d632ca8c1 \ + --hash=sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8 + # via + # -r dev-requirements.in + # black + # mypy +mypy==0.812 \ + --hash=sha256:0d0a87c0e7e3a9becdfbe936c981d32e5ee0ccda3e0f07e1ef2c3d1a817cf73e \ + --hash=sha256:25adde9b862f8f9aac9d2d11971f226bd4c8fbaa89fb76bdadb267ef22d10064 \ + --hash=sha256:28fb5479c494b1bab244620685e2eb3c3f988d71fd5d64cc753195e8ed53df7c \ + --hash=sha256:2f9b3407c58347a452fc0736861593e105139b905cca7d097e413453a1d650b4 \ + --hash=sha256:33f159443db0829d16f0a8d83d94df3109bb6dd801975fe86bacb9bf71628e97 \ + --hash=sha256:3f2aca7f68580dc2508289c729bd49ee929a436208d2b2b6aab15745a70a57df \ + --hash=sha256:499c798053cdebcaa916eef8cd733e5584b5909f789de856b482cd7d069bdad8 \ + --hash=sha256:4eec37370483331d13514c3f55f446fc5248d6373e7029a29ecb7b7494851e7a \ + --hash=sha256:552a815579aa1e995f39fd05dde6cd378e191b063f031f2acfe73ce9fb7f9e56 \ + --hash=sha256:5873888fff1c7cf5b71efbe80e0e73153fe9212fafdf8e44adfe4c20ec9f82d7 \ + --hash=sha256:61a3d5b97955422964be6b3baf05ff2ce7f26f52c85dd88db11d5e03e146a3a6 \ + --hash=sha256:674e822aa665b9fd75130c6c5f5ed9564a38c6cea6a6432ce47eafb68ee578c5 \ + --hash=sha256:7ce3175801d0ae5fdfa79b4f0cfed08807af4d075b402b7e294e6aa72af9aa2a \ + --hash=sha256:9743c91088d396c1a5a3c9978354b61b0382b4e3c440ce83cf77994a43e8c521 \ + --hash=sha256:9f94aac67a2045ec719ffe6111df543bac7874cee01f41928f6969756e030564 \ + --hash=sha256:a26f8ec704e5a7423c8824d425086705e381b4f1dfdef6e3a1edab7ba174ec49 \ + --hash=sha256:abf7e0c3cf117c44d9285cc6128856106183938c68fd4944763003decdcfeb66 \ + --hash=sha256:b09669bcda124e83708f34a94606e01b614fa71931d356c1f1a5297ba11f110a \ + --hash=sha256:cd07039aa5df222037005b08fbbfd69b3ab0b0bd7a07d7906de75ae52c4e3119 \ + --hash=sha256:d23e0ea196702d918b60c8288561e722bf437d82cb7ef2edcd98cfa38905d506 \ + --hash=sha256:d65cc1df038ef55a99e617431f0553cd77763869eebdf9042403e16089fe746c \ + --hash=sha256:d7da2e1d5f558c37d6e8c1246f1aec1e7349e4913d8fb3cb289a35de573fe2eb # via -r dev-requirements.in orderedmultidict==1.0 \ --hash=sha256:24e3b730cf84e4a6a68be5cc760864905cf66abc89851e724bd5b4e849eaa96b \ - --hash=sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5 \ - # via -r requirements.in, furl -pathspec==0.7.0 \ - --hash=sha256:163b0632d4e31cef212976cf57b43d9fd6b0bac6e67c26015d611a647d5e7424 \ - --hash=sha256:562aa70af2e0d434367d9790ad37aed893de47f1693e4201fd1d3dca15d19b96 \ + --hash=sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5 + # via + # -r requirements.in + # furl +pathspec==0.8.1 \ + --hash=sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd \ + --hash=sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d # via black -pip-tools==5.3.1 \ - --hash=sha256:5672c2b6ca0f1fd803f3b45568c2cf7fadf135b4971e7d665232b2075544c0ef \ - --hash=sha256:73787e23269bf8a9230f376c351297b9037ed0d32ab0f9bef4a187d976acc054 \ +pep517==0.10.0 \ + --hash=sha256:ac59f3f6b9726a49e15a649474539442cf76e0697e39df4869d25e68e880931b \ + --hash=sha256:eba39d201ef937584ad3343df3581069085bacc95454c80188291d5b3ac7a249 + # via pip-tools +pip-tools==6.1.0 \ + --hash=sha256:197e3f8839095ccec3ad1ef410e0804c07d9f17dff1c340fb417ca2b63feacc9 \ + --hash=sha256:400bf77e29cca48c31abc210042932bb52dcc138ef4ea4d52c5db429aa8ae6ee # via -r dev-requirements.in -pycodestyle==2.4.0 \ - --hash=sha256:cbc619d09254895b0d12c2c691e237b2e91e9b2ecf5e84c26b35400f93dcfb83 \ - --hash=sha256:cbfca99bd594a10f674d0cd97a3d802a1fdef635d4361e1a2658de47ed261e3a \ - # via -r dev-requirements.in, flake8 -pyflakes==2.0.0 \ - --hash=sha256:9a7662ec724d0120012f6e29d6248ae3727d821bba522a0e6b356eff19126a49 \ - --hash=sha256:f661252913bc1dbe7fcfcbf0af0db3f42ab65aabd1a6ca68fe5d466bace94dae \ - # via -r dev-requirements.in, flake8 +pycodestyle==2.7.0 \ + --hash=sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068 \ + --hash=sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef + # via + # -r dev-requirements.in + # flake8 +pyflakes==2.3.1 \ + --hash=sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3 \ + --hash=sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db + # via + # -r dev-requirements.in + # flake8 pyyaml==5.4.1 \ --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \ --hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \ @@ -165,112 +230,200 @@ pyyaml==5.4.1 \ --hash=sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018 \ --hash=sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e \ --hash=sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253 \ + --hash=sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347 \ --hash=sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183 \ + --hash=sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541 \ --hash=sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb \ --hash=sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185 \ + --hash=sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc \ --hash=sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db \ + --hash=sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa \ --hash=sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46 \ + --hash=sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122 \ --hash=sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b \ --hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \ --hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \ --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \ - # via -r requirements.in, vcrpy -regex==2020.1.8 \ - --hash=sha256:07b39bf943d3d2fe63d46281d8504f8df0ff3fe4c57e13d1656737950e53e525 \ - --hash=sha256:0932941cdfb3afcbc26cc3bcf7c3f3d73d5a9b9c56955d432dbf8bbc147d4c5b \ - --hash=sha256:0e182d2f097ea8549a249040922fa2b92ae28be4be4895933e369a525ba36576 \ - --hash=sha256:10671601ee06cf4dc1bc0b4805309040bb34c9af423c12c379c83d7895622bb5 \ - --hash=sha256:23e2c2c0ff50f44877f64780b815b8fd2e003cda9ce817a7fd00dea5600c84a0 \ - --hash=sha256:26ff99c980f53b3191d8931b199b29d6787c059f2e029b2b0c694343b1708c35 \ - --hash=sha256:27429b8d74ba683484a06b260b7bb00f312e7c757792628ea251afdbf1434003 \ - --hash=sha256:3e77409b678b21a056415da3a56abfd7c3ad03da71f3051bbcdb68cf44d3c34d \ - --hash=sha256:4e8f02d3d72ca94efc8396f8036c0d3bcc812aefc28ec70f35bb888c74a25161 \ - --hash=sha256:4eae742636aec40cf7ab98171ab9400393360b97e8f9da67b1867a9ee0889b26 \ - --hash=sha256:6a6ae17bf8f2d82d1e8858a47757ce389b880083c4ff2498dba17c56e6c103b9 \ - --hash=sha256:6a6ba91b94427cd49cd27764679024b14a96874e0dc638ae6bdd4b1a3ce97be1 \ - --hash=sha256:7bcd322935377abcc79bfe5b63c44abd0b29387f267791d566bbb566edfdd146 \ - --hash=sha256:98b8ed7bb2155e2cbb8b76f627b2fd12cf4b22ab6e14873e8641f266e0fb6d8f \ - --hash=sha256:bd25bb7980917e4e70ccccd7e3b5740614f1c408a642c245019cff9d7d1b6149 \ - --hash=sha256:d0f424328f9822b0323b3b6f2e4b9c90960b24743d220763c7f07071e0778351 \ - --hash=sha256:d58e4606da2a41659c84baeb3cfa2e4c87a74cec89a1e7c56bee4b956f9d7461 \ - --hash=sha256:e3cd21cc2840ca67de0bbe4071f79f031c81418deb544ceda93ad75ca1ee9f7b \ - --hash=sha256:e6c02171d62ed6972ca8631f6f34fa3281d51db8b326ee397b9c83093a6b7242 \ - --hash=sha256:e7c7661f7276507bce416eaae22040fd91ca471b5b33c13f8ff21137ed6f248c \ - --hash=sha256:ecc6de77df3ef68fee966bb8cb4e067e84d4d1f397d0ef6fce46913663540d77 \ + --hash=sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247 \ + --hash=sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6 \ + --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0 + # via + # -r requirements.in + # vcrpy +regex==2021.4.4 \ + --hash=sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5 \ + --hash=sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79 \ + --hash=sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31 \ + --hash=sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500 \ + --hash=sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11 \ + --hash=sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14 \ + --hash=sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3 \ + --hash=sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439 \ + --hash=sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c \ + --hash=sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82 \ + --hash=sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711 \ + --hash=sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093 \ + --hash=sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a \ + --hash=sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb \ + --hash=sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8 \ + --hash=sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17 \ + --hash=sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000 \ + --hash=sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d \ + --hash=sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480 \ + --hash=sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc \ + --hash=sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0 \ + --hash=sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9 \ + --hash=sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765 \ + --hash=sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e \ + --hash=sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a \ + --hash=sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07 \ + --hash=sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f \ + --hash=sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac \ + --hash=sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7 \ + --hash=sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed \ + --hash=sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968 \ + --hash=sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7 \ + --hash=sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2 \ + --hash=sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4 \ + --hash=sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87 \ + --hash=sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8 \ + --hash=sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10 \ + --hash=sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29 \ + --hash=sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605 \ + --hash=sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6 \ + --hash=sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042 # via black requests==2.22.0 \ --hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \ - --hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31 \ + --hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31 # via -r requirements.in six==1.11.0 \ --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \ - --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb \ - # via -r dev-requirements.in, -r requirements.in, furl, orderedmultidict, pip-tools, vcrpy -toml==0.10.0 \ - --hash=sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c \ - --hash=sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e \ - # via black -typed-ast==1.4.1 \ - --hash=sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355 \ - --hash=sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919 \ - --hash=sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa \ - --hash=sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652 \ - --hash=sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75 \ - --hash=sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01 \ - --hash=sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d \ - --hash=sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1 \ - --hash=sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907 \ - --hash=sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c \ - --hash=sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3 \ - --hash=sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b \ - --hash=sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614 \ - --hash=sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb \ - --hash=sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b \ - --hash=sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41 \ - --hash=sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6 \ - --hash=sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34 \ - --hash=sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe \ - --hash=sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4 \ - --hash=sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7 \ - # via black, mypy -typing-extensions==3.7.4.1 \ - --hash=sha256:091ecc894d5e908ac75209f10d5b4f118fbdb2eb1ede6a63544054bb1edb41f2 \ - --hash=sha256:910f4656f54de5993ad9304959ce9bb903f90aadc7c67a0bef07e678014e892d \ - --hash=sha256:cf8b63fedea4d89bab840ecbb93e75578af28f76f66c35889bd7065f5af88575 \ - # via mypy -urllib3==1.25.10 \ - --hash=sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a \ - --hash=sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461 \ - # via -r requirements.in, requests -vcrpy==2.0.1 \ - --hash=sha256:127e79cf7b569d071d1bd761b83f7b62b2ce2a2eb63ceca7aa67cba8f2602ea3 \ - --hash=sha256:57be64aa8e9883a4117d0b15de28af62275c001abcdb00b6dc2d4406073d9a4f \ + --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb + # via + # -r dev-requirements.in + # -r requirements.in + # furl + # orderedmultidict + # vcrpy +toml==0.10.2 \ + --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ + --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f + # via + # black + # pep517 +typed-ast==1.4.3 \ + --hash=sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace \ + --hash=sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff \ + --hash=sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266 \ + --hash=sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528 \ + --hash=sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6 \ + --hash=sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808 \ + --hash=sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4 \ + --hash=sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363 \ + --hash=sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341 \ + --hash=sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04 \ + --hash=sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41 \ + --hash=sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e \ + --hash=sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3 \ + --hash=sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899 \ + --hash=sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805 \ + --hash=sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c \ + --hash=sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c \ + --hash=sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39 \ + --hash=sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a \ + --hash=sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3 \ + --hash=sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7 \ + --hash=sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f \ + --hash=sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075 \ + --hash=sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0 \ + --hash=sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40 \ + --hash=sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428 \ + --hash=sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927 \ + --hash=sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3 \ + --hash=sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f \ + --hash=sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65 + # via + # black + # mypy +typing-extensions==3.10.0.0 \ + --hash=sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497 \ + --hash=sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342 \ + --hash=sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84 + # via + # black + # importlib-metadata + # mypy + # yarl +urllib3==1.25.11 \ + --hash=sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2 \ + --hash=sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e + # via + # -r requirements.in + # requests +vcrpy==4.1.1 \ + --hash=sha256:12c3fcdae7b88ecf11fc0d3e6d77586549d4575a2ceee18e82eee75c1f626162 \ + --hash=sha256:57095bf22fc0a2d99ee9674cdafebed0f3ba763018582450706f7d3a74fff599 # via -r dev-requirements.in werkzeug==0.16.0 \ --hash=sha256:7280924747b5733b246fe23972186c6b348f9ae29724135a6dfc1e53cea433e7 \ - --hash=sha256:e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4 \ + --hash=sha256:e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4 # via -r requirements.in -wrapt==1.10.11 \ - --hash=sha256:d4d560d479f2c21e1b5443bbd15fe7ec4b37fe7e53d335d3b9b0a7b1226fe3c6 \ - # via -r dev-requirements.in, vcrpy -yarl==1.2.6 \ - --hash=sha256:2556b779125621b311844a072e0ed367e8409a18fa12cbd68eb1258d187820f9 \ - --hash=sha256:4aec0769f1799a9d4496827292c02a7b1f75c0bab56ab2b60dd94ebb57cbd5ee \ - --hash=sha256:55369d95afaacf2fa6b49c84d18b51f1704a6560c432a0f9a1aeb23f7b971308 \ - --hash=sha256:6c098b85442c8fe3303e708bbb775afd0f6b29f77612e8892627bcab4b939357 \ - --hash=sha256:9182cd6f93412d32e009020a44d6d170d2093646464a88aeec2aef50592f8c78 \ - --hash=sha256:c8cbc21bbfa1dd7d5386d48cc814fe3d35b80f60299cdde9279046f399c3b0d8 \ - --hash=sha256:db6f70a4b09cde813a4807843abaaa60f3b15fb4a2a06f9ae9c311472662daa1 \ - --hash=sha256:f17495e6fe3d377e3faac68121caef6f974fcb9e046bc075bcff40d8e5cc69a4 \ - --hash=sha256:f85900b9cca0c67767bb61b2b9bd53208aaa7373dae633dbe25d179b4bf38aa7 \ - # via -r dev-requirements.in, vcrpy +wrapt==1.12.1 \ + --hash=sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7 + # via + # -r dev-requirements.in + # vcrpy +yarl==1.6.3 \ + --hash=sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e \ + --hash=sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434 \ + --hash=sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366 \ + --hash=sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3 \ + --hash=sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec \ + --hash=sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959 \ + --hash=sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e \ + --hash=sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c \ + --hash=sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6 \ + --hash=sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a \ + --hash=sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6 \ + --hash=sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424 \ + --hash=sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e \ + --hash=sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f \ + --hash=sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50 \ + --hash=sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2 \ + --hash=sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc \ + --hash=sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4 \ + --hash=sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970 \ + --hash=sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10 \ + --hash=sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0 \ + --hash=sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406 \ + --hash=sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896 \ + --hash=sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643 \ + --hash=sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721 \ + --hash=sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478 \ + --hash=sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724 \ + --hash=sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e \ + --hash=sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8 \ + --hash=sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96 \ + --hash=sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25 \ + --hash=sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76 \ + --hash=sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2 \ + --hash=sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2 \ + --hash=sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c \ + --hash=sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a \ + --hash=sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71 + # via + # -r dev-requirements.in + # vcrpy +zipp==3.4.1 \ + --hash=sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76 \ + --hash=sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098 + # via + # importlib-metadata + # pep517 # The following packages are considered to be unsafe in a requirements file: -pip==20.2.3 \ - --hash=sha256:0f35d63b7245205f4060efe1982f5ea2196aa6e5b26c07669adcf800e2542026 \ - --hash=sha256:30c70b6179711a7c4cf76da89e8a0f5282279dfb0278bec7b94134be92543b6d \ +pip==21.1.1 \ + --hash=sha256:11d095ed5c15265fc5c15cc40a45188675c239fb0f9913b673a33e54ff7d45f0 \ + --hash=sha256:51ad01ddcd8de923533b01a870e7b987c2eb4d83b50b89e1bf102723ff9fed8b # via pip-tools -setuptools==50.3.0 \ - --hash=sha256:39060a59d91cf5cf403fa3bacbb52df4205a8c3585e0b9ba4b30e0e19d4c4b18 \ - --hash=sha256:c77b3920663a435c9450d9d971c48f5a7478fca8881b2cd2564e59f970f03536 \ - # via flake8 diff --git a/requirements.txt b/requirements.txt index 4b2f9fbe2..70515d60d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,24 +6,32 @@ # certifi==2018.10.15 \ --hash=sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c \ - --hash=sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a \ - # via -r requirements.in, requests + --hash=sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a + # via + # -r requirements.in + # requests chardet==3.0.4 \ --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ - --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 \ - # via -r requirements.in, requests + --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 + # via + # -r requirements.in + # requests furl==2.0.0 \ --hash=sha256:f7e90e9f85ef3f2e64485f04c2a80b50af6133942812fd87a44d45305b079018 \ - --hash=sha256:fdcaedc1fb19a63d7d875b0105b0a5b496dd0989330d454a42bcb401fa5454ec \ + --hash=sha256:fdcaedc1fb19a63d7d875b0105b0a5b496dd0989330d454a42bcb401fa5454ec # via -r requirements.in idna==2.7 \ --hash=sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e \ - --hash=sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16 \ - # via -r requirements.in, requests + --hash=sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16 + # via + # -r requirements.in + # requests orderedmultidict==1.0 \ --hash=sha256:24e3b730cf84e4a6a68be5cc760864905cf66abc89851e724bd5b4e849eaa96b \ - --hash=sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5 \ - # via -r requirements.in, furl + --hash=sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5 + # via + # -r requirements.in + # furl pyyaml==5.4.1 \ --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \ --hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \ @@ -45,21 +53,26 @@ pyyaml==5.4.1 \ --hash=sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b \ --hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \ --hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \ - --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \ + --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc # via -r requirements.in requests==2.22.0 \ --hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \ - --hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31 \ + --hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31 # via -r requirements.in six==1.11.0 \ --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \ - --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb \ - # via -r requirements.in, furl, orderedmultidict + --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb + # via + # -r requirements.in + # furl + # orderedmultidict urllib3==1.25.10 \ --hash=sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a \ - --hash=sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461 \ - # via -r requirements.in, requests + --hash=sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461 + # via + # -r requirements.in + # requests werkzeug==0.16.0 \ --hash=sha256:7280924747b5733b246fe23972186c6b348f9ae29724135a6dfc1e53cea433e7 \ - --hash=sha256:e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4 \ + --hash=sha256:e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4 # via -r requirements.in From 610104dbf9aee27bad7e09eea490c80fd89e4206 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Mon, 17 May 2021 11:56:53 -0700 Subject: [PATCH 243/352] correct errors from running mypy and black --- Makefile | 14 +++++++++++--- blackconfig/pyproject.toml | 2 -- pyproject.toml | 5 +++++ securedrop_proxy/proxy.py | 23 +++++++++++++---------- 4 files changed, 29 insertions(+), 15 deletions(-) delete mode 100644 blackconfig/pyproject.toml create mode 100644 pyproject.toml diff --git a/Makefile b/Makefile index 68a156801..688163fcf 100644 --- a/Makefile +++ b/Makefile @@ -28,7 +28,7 @@ safety: ## Runs `safety check` to check python dependencies for vulnerabilities done .PHONY: lint -lint: isort black ## Run isort, black and flake8 +lint: isort-check black-check ## Run isort, black and flake8 @flake8 securedrop_proxy tests .PHONY: mypy @@ -37,11 +37,19 @@ mypy: ## Run mypy static type checker .PHONY: black black: ## Run black for file formatting - @black --config ./blackconfig/pyproject.toml --check securedrop_proxy tests + @black securedrop_proxy tests + +.PHONY: black-check +black-check: ## Check Python source code formatting with black + @black --check --diff securedrop_proxy tests .PHONY: isort isort: ## Run isort for file formatting - @isort -c -w 100 securedrop_proxy/*.py tests/*.py --diff + @isort securedrop_proxy/*.py tests/*.py + +.PHONY: isort-check +isort-check: ## Check isort for file formatting + @isort --check-only --diff securedrop_proxy/*.py tests/*.py .PHONY: update-pip-requirements update-pip-requirements: ## Updates all Python requirements files via pip-compile. diff --git a/blackconfig/pyproject.toml b/blackconfig/pyproject.toml deleted file mode 100644 index aa4949aa1..000000000 --- a/blackconfig/pyproject.toml +++ /dev/null @@ -1,2 +0,0 @@ -[tool.black] -line-length = 100 diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..7eaeaea01 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,5 @@ +[tool.black] +line-length = 100 + +[tool.isort] +line_length = 100 diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index 69a4f4429..4e6170233 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -9,7 +9,7 @@ from tempfile import _TemporaryFileWrapper # type: ignore from typing import Dict, Optional -import furl +import furl # type: ignore import requests import werkzeug import yaml @@ -32,14 +32,14 @@ def __init__(self) -> None: self.method = "" self.path_query = "" self.body = "" - self.headers: Dict[str, str] = {} + self.headers = {} # type: Dict[str, str] class Response: def __init__(self, status: int) -> None: self.status = status self.body = "" - self.headers: Dict[str, str] = {} + self.headers = {} # type: Dict[str, str] self.version = version.version @@ -48,10 +48,10 @@ def __init__(self, conf_path: str, req: Req = Req(), timeout: float = 10.0) -> N self.read_conf(conf_path) self.req = req - self.res: Optional[Response] = None + self.res = None # type: Optional[Response] self.timeout = float(timeout) - self._prepared_request: Optional[Req] = None + self._prepared_request = None # type: Optional[requests.PreparedRequest] def on_done(self) -> None: print(json.dumps(self.res.__dict__)) @@ -79,12 +79,14 @@ def read_conf(self, conf_path: str) -> None: conf_in = yaml.safe_load(fh) except yaml.YAMLError: self.simple_error( - 500, "YAML syntax error while reading configuration file {}".format(conf_path), + 500, + "YAML syntax error while reading configuration file {}".format(conf_path), ) self.err_on_done() except Exception: self.simple_error( - 500, "Error while opening or reading configuration file {}".format(conf_path), + 500, + "Error while opening or reading configuration file {}".format(conf_path), ) self.err_on_done() @@ -186,7 +188,7 @@ def handle_json_response(self) -> None: res = Response(self._presp.status_code) - res.headers = self._presp.headers + res.headers = dict(self._presp.headers) res.body = self._presp.content.decode() self.res = res @@ -204,7 +206,7 @@ def handle_non_json_response(self) -> None: fh.close() - res.headers = self._presp.headers + res.headers = dict(self._presp.headers) self.on_save(fh, res) @@ -259,7 +261,8 @@ def proxy(self) -> None: logger.error(e) try: self.simple_error( - e.response.status_code, http.HTTPStatus(e.response.status_code).phrase.lower(), + e.response.status_code, + http.HTTPStatus(e.response.status_code).phrase.lower(), ) except ValueError: # Return a generic error message when the response From 36688d6aa4012982b6d339f9904f7ccf3c3a4520 Mon Sep 17 00:00:00 2001 From: Allie Crevier <4522213+creviera@users.noreply.github.com> Date: Mon, 7 Jun 2021 11:19:06 -0700 Subject: [PATCH 244/352] Create SECURITY.md --- SECURITY.md | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 SECURITY.md diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 000000000..1e84430bb --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,8 @@ +# Security Policy + +## Reporting a Vulnerability + +If you have found a vulnerability, please **DO NOT** file a public issue. Please send us your report privately either via: + +- SecureDrop's public bug bounty program managed by [Bugcrowd](https://bugcrowd.com/freedomofpress) +- Email to security@freedom.press (Optionally GPG-encrypted to [734F6E707434ECA6C007E1AE82BD6C9616DABB79](https://securedrop.org/documents/6/fpf-email.asc) From 83d87a96a34b83bc949a90db58a805e94e1f9f29 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Fri, 30 Jul 2021 17:05:29 -0700 Subject: [PATCH 245/352] use latest pip-tools --- README.md | 6 +- dev-requirements.txt | 202 +++++++++++++++++++++++-------------------- requirements.txt | 2 +- 3 files changed, 112 insertions(+), 98 deletions(-) diff --git a/README.md b/README.md index de9b10490..6d80117f1 100644 --- a/README.md +++ b/README.md @@ -27,15 +27,15 @@ server's response to STDOUT. For discussion about the shape of the request and response objects, see https://github.com/freedomofpress/securedrop-workstation/issues/107. -#### Installation Requirements +#### Quick Start To try the proxy script, create a virtual environment and install the requirements. In the root of the project directory, run ``` -virtualenv .venv +make venv source .venv/bin/activate -pip install --require-hashes -r dev-requirements.txt +make test ``` #### Update Dependencies diff --git a/dev-requirements.txt b/dev-requirements.txt index b14a54c05..df6ec18dc 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile +# This file is autogenerated by pip-compile with python 3.7 # To update, run: # # pip-compile --allow-unsafe --generate-hashes --output-file=dev-requirements.txt dev-requirements.in requirements.in @@ -8,9 +8,9 @@ appdirs==1.4.4 \ --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41 \ --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128 # via black -black==21.5b1 \ - --hash=sha256:23695358dbcb3deafe7f0a3ad89feee5999a46be5fec21f4f1d108be0bcdb3b1 \ - --hash=sha256:8a60071a0043876a4ae96e6c69bd3a127dad2c1ca7c8083573eb82f92705d008 +black==21.7b0 \ + --hash=sha256:1c7aa6ada8ee864db745b22790a32f94b2795c253a75d6d9b5e439ff10d23116 \ + --hash=sha256:c8373c6491de9362e39271630b65b964607bc5c79c83783547d76c839b3aa219 # via -r dev-requirements.in certifi==2018.10.15 \ --hash=sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c \ @@ -24,9 +24,9 @@ chardet==3.0.4 \ # via # -r requirements.in # requests -click==8.0.0 \ - --hash=sha256:7d8c289ee437bcb0316820ccee14aefcb056e58d31830ecab8e47eda6540e136 \ - --hash=sha256:e90e62ced43dc8105fb9a26d62f0d9340b5c8db053a814e25d95c19873ae87db +click==8.0.1 \ + --hash=sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a \ + --hash=sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6 # via # black # pip-tools @@ -99,15 +99,16 @@ idna==2.7 \ # -r requirements.in # requests # yarl -importlib-metadata==4.0.1 \ - --hash=sha256:8c501196e49fb9df5df43833bdb1e4328f64847763ec8a50703148b73784d581 \ - --hash=sha256:d7eb1dea6d6a6086f8be21784cc9e3bcfa55872b52309bc5fad53a8ea444465d +importlib-metadata==4.6.1 \ + --hash=sha256:079ada16b7fc30dfbb5d13399a5113110dab1aa7c2bc62f66af75f0b717c8cac \ + --hash=sha256:9f55f560e116f8643ecf2922d9cd3e1c7e8d52e683178fecd9d08f6aa357e11e # via + # click # flake8 # pep517 -isort==5.8.0 \ - --hash=sha256:0a943902919f65c5684ac4e0154b1ad4fac6dcaa5d9f3426b732f1c8b5419be6 \ - --hash=sha256:2bb1680aad211e3c9944dbce1d4ba09a989f04e238296c87fe2139faa26d655d +isort==5.9.3 \ + --hash=sha256:9c2ea1e62d871267b78307fe511c0838ba0da28698c5732d54e2790bf3ba9899 \ + --hash=sha256:e17d6e2b81095c9db0a03a8025a957f334d6ea30b26f9ec70805411e5c7c81f2 # via -r dev-requirements.in mccabe==0.6.1 \ --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ @@ -156,6 +157,31 @@ multidict==5.1.0 \ # via # -r dev-requirements.in # yarl +mypy==0.910 \ + --hash=sha256:088cd9c7904b4ad80bec811053272986611b84221835e079be5bcad029e79dd9 \ + --hash=sha256:0aadfb2d3935988ec3815952e44058a3100499f5be5b28c34ac9d79f002a4a9a \ + --hash=sha256:119bed3832d961f3a880787bf621634ba042cb8dc850a7429f643508eeac97b9 \ + --hash=sha256:1a85e280d4d217150ce8cb1a6dddffd14e753a4e0c3cf90baabb32cefa41b59e \ + --hash=sha256:3c4b8ca36877fc75339253721f69603a9c7fdb5d4d5a95a1a1b899d8b86a4de2 \ + --hash=sha256:3e382b29f8e0ccf19a2df2b29a167591245df90c0b5a2542249873b5c1d78212 \ + --hash=sha256:42c266ced41b65ed40a282c575705325fa7991af370036d3f134518336636f5b \ + --hash=sha256:53fd2eb27a8ee2892614370896956af2ff61254c275aaee4c230ae771cadd885 \ + --hash=sha256:704098302473cb31a218f1775a873b376b30b4c18229421e9e9dc8916fd16150 \ + --hash=sha256:7df1ead20c81371ccd6091fa3e2878559b5c4d4caadaf1a484cf88d93ca06703 \ + --hash=sha256:866c41f28cee548475f146aa4d39a51cf3b6a84246969f3759cb3e9c742fc072 \ + --hash=sha256:a155d80ea6cee511a3694b108c4494a39f42de11ee4e61e72bc424c490e46457 \ + --hash=sha256:adaeee09bfde366d2c13fe6093a7df5df83c9a2ba98638c7d76b010694db760e \ + --hash=sha256:b6fb13123aeef4a3abbcfd7e71773ff3ff1526a7d3dc538f3929a49b42be03f0 \ + --hash=sha256:b94e4b785e304a04ea0828759172a15add27088520dc7e49ceade7834275bedb \ + --hash=sha256:c0df2d30ed496a08de5daed2a9ea807d07c21ae0ab23acf541ab88c24b26ab97 \ + --hash=sha256:c6c2602dffb74867498f86e6129fd52a2770c48b7cd3ece77ada4fa38f94eba8 \ + --hash=sha256:ceb6e0a6e27fb364fb3853389607cf7eb3a126ad335790fa1e14ed02fba50811 \ + --hash=sha256:d9dd839eb0dc1bbe866a288ba3c1afc33a202015d2ad83b31e875b5905a079b6 \ + --hash=sha256:e4dab234478e3bd3ce83bac4193b2ecd9cf94e720ddd95ce69840273bf44f6de \ + --hash=sha256:ec4e0cd079db280b6bdabdc807047ff3e199f334050db5cbb91ba3e959a67504 \ + --hash=sha256:ecd2c3fe726758037234c93df7e98deb257fd15c24c9180dacf1ef829da5f921 \ + --hash=sha256:ef565033fa5a958e62796867b1df10c40263ea9ded87164d67572834e57a174d + # via -r dev-requirements.in mypy-extensions==0.4.3 \ --hash=sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d \ --hash=sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8 @@ -163,47 +189,23 @@ mypy-extensions==0.4.3 \ # -r dev-requirements.in # black # mypy -mypy==0.812 \ - --hash=sha256:0d0a87c0e7e3a9becdfbe936c981d32e5ee0ccda3e0f07e1ef2c3d1a817cf73e \ - --hash=sha256:25adde9b862f8f9aac9d2d11971f226bd4c8fbaa89fb76bdadb267ef22d10064 \ - --hash=sha256:28fb5479c494b1bab244620685e2eb3c3f988d71fd5d64cc753195e8ed53df7c \ - --hash=sha256:2f9b3407c58347a452fc0736861593e105139b905cca7d097e413453a1d650b4 \ - --hash=sha256:33f159443db0829d16f0a8d83d94df3109bb6dd801975fe86bacb9bf71628e97 \ - --hash=sha256:3f2aca7f68580dc2508289c729bd49ee929a436208d2b2b6aab15745a70a57df \ - --hash=sha256:499c798053cdebcaa916eef8cd733e5584b5909f789de856b482cd7d069bdad8 \ - --hash=sha256:4eec37370483331d13514c3f55f446fc5248d6373e7029a29ecb7b7494851e7a \ - --hash=sha256:552a815579aa1e995f39fd05dde6cd378e191b063f031f2acfe73ce9fb7f9e56 \ - --hash=sha256:5873888fff1c7cf5b71efbe80e0e73153fe9212fafdf8e44adfe4c20ec9f82d7 \ - --hash=sha256:61a3d5b97955422964be6b3baf05ff2ce7f26f52c85dd88db11d5e03e146a3a6 \ - --hash=sha256:674e822aa665b9fd75130c6c5f5ed9564a38c6cea6a6432ce47eafb68ee578c5 \ - --hash=sha256:7ce3175801d0ae5fdfa79b4f0cfed08807af4d075b402b7e294e6aa72af9aa2a \ - --hash=sha256:9743c91088d396c1a5a3c9978354b61b0382b4e3c440ce83cf77994a43e8c521 \ - --hash=sha256:9f94aac67a2045ec719ffe6111df543bac7874cee01f41928f6969756e030564 \ - --hash=sha256:a26f8ec704e5a7423c8824d425086705e381b4f1dfdef6e3a1edab7ba174ec49 \ - --hash=sha256:abf7e0c3cf117c44d9285cc6128856106183938c68fd4944763003decdcfeb66 \ - --hash=sha256:b09669bcda124e83708f34a94606e01b614fa71931d356c1f1a5297ba11f110a \ - --hash=sha256:cd07039aa5df222037005b08fbbfd69b3ab0b0bd7a07d7906de75ae52c4e3119 \ - --hash=sha256:d23e0ea196702d918b60c8288561e722bf437d82cb7ef2edcd98cfa38905d506 \ - --hash=sha256:d65cc1df038ef55a99e617431f0553cd77763869eebdf9042403e16089fe746c \ - --hash=sha256:d7da2e1d5f558c37d6e8c1246f1aec1e7349e4913d8fb3cb289a35de573fe2eb - # via -r dev-requirements.in orderedmultidict==1.0 \ --hash=sha256:24e3b730cf84e4a6a68be5cc760864905cf66abc89851e724bd5b4e849eaa96b \ --hash=sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5 # via # -r requirements.in # furl -pathspec==0.8.1 \ - --hash=sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd \ - --hash=sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d +pathspec==0.9.0 \ + --hash=sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a \ + --hash=sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1 # via black -pep517==0.10.0 \ - --hash=sha256:ac59f3f6b9726a49e15a649474539442cf76e0697e39df4869d25e68e880931b \ - --hash=sha256:eba39d201ef937584ad3343df3581069085bacc95454c80188291d5b3ac7a249 +pep517==0.11.0 \ + --hash=sha256:3fa6b85b9def7ba4de99fb7f96fe3f02e2d630df8aa2720a5cf3b183f087a738 \ + --hash=sha256:e1ba5dffa3a131387979a68ff3e391ac7d645be409216b961bc2efe6468ab0b2 # via pip-tools -pip-tools==6.1.0 \ - --hash=sha256:197e3f8839095ccec3ad1ef410e0804c07d9f17dff1c340fb417ca2b63feacc9 \ - --hash=sha256:400bf77e29cca48c31abc210042932bb52dcc138ef4ea4d52c5db429aa8ae6ee +pip-tools==6.2.0 \ + --hash=sha256:77727ef7457d1865e61fe34c2b1439f9b971b570cc232616a22ce82ab89d357d \ + --hash=sha256:9ed38c73da4993e531694ea151f77048b4dbf2ba7b94c4a569daa39568cc6564 # via -r dev-requirements.in pycodestyle==2.7.0 \ --hash=sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068 \ @@ -250,48 +252,48 @@ pyyaml==5.4.1 \ # via # -r requirements.in # vcrpy -regex==2021.4.4 \ - --hash=sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5 \ - --hash=sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79 \ - --hash=sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31 \ - --hash=sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500 \ - --hash=sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11 \ - --hash=sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14 \ - --hash=sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3 \ - --hash=sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439 \ - --hash=sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c \ - --hash=sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82 \ - --hash=sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711 \ - --hash=sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093 \ - --hash=sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a \ - --hash=sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb \ - --hash=sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8 \ - --hash=sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17 \ - --hash=sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000 \ - --hash=sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d \ - --hash=sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480 \ - --hash=sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc \ - --hash=sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0 \ - --hash=sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9 \ - --hash=sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765 \ - --hash=sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e \ - --hash=sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a \ - --hash=sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07 \ - --hash=sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f \ - --hash=sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac \ - --hash=sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7 \ - --hash=sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed \ - --hash=sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968 \ - --hash=sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7 \ - --hash=sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2 \ - --hash=sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4 \ - --hash=sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87 \ - --hash=sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8 \ - --hash=sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10 \ - --hash=sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29 \ - --hash=sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605 \ - --hash=sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6 \ - --hash=sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042 +regex==2021.7.6 \ + --hash=sha256:0eb2c6e0fcec5e0f1d3bcc1133556563222a2ffd2211945d7b1480c1b1a42a6f \ + --hash=sha256:15dddb19823f5147e7517bb12635b3c82e6f2a3a6b696cc3e321522e8b9308ad \ + --hash=sha256:173bc44ff95bc1e96398c38f3629d86fa72e539c79900283afa895694229fe6a \ + --hash=sha256:1c78780bf46d620ff4fff40728f98b8afd8b8e35c3efd638c7df67be2d5cddbf \ + --hash=sha256:2366fe0479ca0e9afa534174faa2beae87847d208d457d200183f28c74eaea59 \ + --hash=sha256:2bceeb491b38225b1fee4517107b8491ba54fba77cf22a12e996d96a3c55613d \ + --hash=sha256:2ddeabc7652024803666ea09f32dd1ed40a0579b6fbb2a213eba590683025895 \ + --hash=sha256:2fe5e71e11a54e3355fa272137d521a40aace5d937d08b494bed4529964c19c4 \ + --hash=sha256:319eb2a8d0888fa6f1d9177705f341bc9455a2c8aca130016e52c7fe8d6c37a3 \ + --hash=sha256:3f5716923d3d0bfb27048242a6e0f14eecdb2e2a7fac47eda1d055288595f222 \ + --hash=sha256:422dec1e7cbb2efbbe50e3f1de36b82906def93ed48da12d1714cabcd993d7f0 \ + --hash=sha256:4c9c3155fe74269f61e27617529b7f09552fbb12e44b1189cebbdb24294e6e1c \ + --hash=sha256:4f64fc59fd5b10557f6cd0937e1597af022ad9b27d454e182485f1db3008f417 \ + --hash=sha256:564a4c8a29435d1f2256ba247a0315325ea63335508ad8ed938a4f14c4116a5d \ + --hash=sha256:59506c6e8bd9306cd8a41511e32d16d5d1194110b8cfe5a11d102d8b63cf945d \ + --hash=sha256:598c0a79b4b851b922f504f9f39a863d83ebdfff787261a5ed061c21e67dd761 \ + --hash=sha256:59c00bb8dd8775473cbfb967925ad2c3ecc8886b3b2d0c90a8e2707e06c743f0 \ + --hash=sha256:6110bab7eab6566492618540c70edd4d2a18f40ca1d51d704f1d81c52d245026 \ + --hash=sha256:6afe6a627888c9a6cfbb603d1d017ce204cebd589d66e0703309b8048c3b0854 \ + --hash=sha256:791aa1b300e5b6e5d597c37c346fb4d66422178566bbb426dd87eaae475053fb \ + --hash=sha256:8394e266005f2d8c6f0bc6780001f7afa3ef81a7a2111fa35058ded6fce79e4d \ + --hash=sha256:875c355360d0f8d3d827e462b29ea7682bf52327d500a4f837e934e9e4656068 \ + --hash=sha256:89e5528803566af4df368df2d6f503c84fbfb8249e6631c7b025fe23e6bd0cde \ + --hash=sha256:99d8ab206a5270c1002bfcf25c51bf329ca951e5a169f3b43214fdda1f0b5f0d \ + --hash=sha256:9a854b916806c7e3b40e6616ac9e85d3cdb7649d9e6590653deb5b341a736cec \ + --hash=sha256:b85ac458354165405c8a84725de7bbd07b00d9f72c31a60ffbf96bb38d3e25fa \ + --hash=sha256:bc84fb254a875a9f66616ed4538542fb7965db6356f3df571d783f7c8d256edd \ + --hash=sha256:c92831dac113a6e0ab28bc98f33781383fe294df1a2c3dfd1e850114da35fd5b \ + --hash=sha256:cbe23b323988a04c3e5b0c387fe3f8f363bf06c0680daf775875d979e376bd26 \ + --hash=sha256:ccb3d2190476d00414aab36cca453e4596e8f70a206e2aa8db3d495a109153d2 \ + --hash=sha256:d8bbce0c96462dbceaa7ac4a7dfbbee92745b801b24bce10a98d2f2b1ea9432f \ + --hash=sha256:db2b7df831c3187a37f3bb80ec095f249fa276dbe09abd3d35297fc250385694 \ + --hash=sha256:e586f448df2bbc37dfadccdb7ccd125c62b4348cb90c10840d695592aa1b29e0 \ + --hash=sha256:e5983c19d0beb6af88cb4d47afb92d96751fb3fa1784d8785b1cdf14c6519407 \ + --hash=sha256:e6a1e5ca97d411a461041d057348e578dc344ecd2add3555aedba3b408c9f874 \ + --hash=sha256:eaf58b9e30e0e546cdc3ac06cf9165a1ca5b3de8221e9df679416ca667972035 \ + --hash=sha256:ed693137a9187052fc46eedfafdcb74e09917166362af4cc4fddc3b31560e93d \ + --hash=sha256:edd1a68f79b89b0c57339bce297ad5d5ffcc6ae7e1afdb10f1947706ed066c9c \ + --hash=sha256:f080248b3e029d052bf74a897b9d74cfb7643537fbde97fe8225a6467fb559b5 \ + --hash=sha256:f9392a4555f3e4cb45310a65b403d86b589adc773898c25a39184b1ba4db8985 \ + --hash=sha256:f98dc35ab9a749276f1a4a38ab3e0e2ba1662ce710f6530f5b0a6656f1c32b58 # via black requests==2.22.0 \ --hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \ @@ -309,6 +311,10 @@ six==1.11.0 \ toml==0.10.2 \ --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f + # via mypy +tomli==1.2.0 \ + --hash=sha256:056f0376bf5a6b182c513f9582c1e5b0487265eb6c48842b69aa9ca1cd5f640a \ + --hash=sha256:d60e681734099207a6add7a10326bc2ddd1fdc36c1b0f547d00ef73ac63739c2 # via # black # pep517 @@ -369,6 +375,10 @@ werkzeug==0.16.0 \ --hash=sha256:7280924747b5733b246fe23972186c6b348f9ae29724135a6dfc1e53cea433e7 \ --hash=sha256:e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4 # via -r requirements.in +wheel==0.36.2 \ + --hash=sha256:78b5b185f0e5763c26ca1e324373aadd49182ca90e825f7853f4b2509215dc0e \ + --hash=sha256:e11eefd162658ea59a60a0f6c7d493a7190ea4b9a85e335b33489d9f17e0245e + # via pip-tools wrapt==1.12.1 \ --hash=sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7 # via @@ -415,15 +425,19 @@ yarl==1.6.3 \ # via # -r dev-requirements.in # vcrpy -zipp==3.4.1 \ - --hash=sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76 \ - --hash=sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098 +zipp==3.5.0 \ + --hash=sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3 \ + --hash=sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4 # via # importlib-metadata # pep517 # The following packages are considered to be unsafe in a requirements file: -pip==21.1.1 \ - --hash=sha256:11d095ed5c15265fc5c15cc40a45188675c239fb0f9913b673a33e54ff7d45f0 \ - --hash=sha256:51ad01ddcd8de923533b01a870e7b987c2eb4d83b50b89e1bf102723ff9fed8b +pip==21.2.1 \ + --hash=sha256:303a82aaa24cdc01f7ebbd1afc7d1b871a4aa0a88bb5bedef1fa86a3ee44ca0a \ + --hash=sha256:da0ac9d9032d1d7bac69e9e301778f77b8b6626b85203f99edd2b545434d90a7 + # via pip-tools +setuptools==57.4.0 \ + --hash=sha256:6bac238ffdf24e8806c61440e755192470352850f3419a52f26ffe0a1a64f465 \ + --hash=sha256:a49230977aa6cfb9d933614d2f7b79036e9945c4cdd7583163f4e920b83418d6 # via pip-tools diff --git a/requirements.txt b/requirements.txt index 70515d60d..9afbecd00 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile +# This file is autogenerated by pip-compile with python 3.7 # To update, run: # # pip-compile --generate-hashes --output-file=requirements.txt requirements.in From 79f552cf33b188ab93b000a49c11f24852338ae3 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Fri, 30 Jul 2021 17:06:03 -0700 Subject: [PATCH 246/352] update ci to use `make venv` --- .circleci/config.yml | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 7b2d242fb..344b993be 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,17 +1,11 @@ --- common-steps: - - &removevirtualenv - run: - name: Removes the upstream virtualenv from the original container image - command: sudo pip uninstall virtualenv -y - - &run_tests run: name: Install requirements and run tests command: | - virtualenv .venv + make venv source .venv/bin/activate - pip install --require-hashes -r dev-requirements.txt make check - &install_packaging_dependencies @@ -58,7 +52,6 @@ jobs: - image: circleci/python:3.7-buster steps: - checkout - - *removevirtualenv - *install_packaging_dependencies - *verify_requirements - *make_source_tarball From d556d671c2f2b4377b00451d7dd71a57b1589ff7 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Fri, 30 Jul 2021 17:26:16 -0700 Subject: [PATCH 247/352] fix incompatible type error --- securedrop_proxy/proxy.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index 4e6170233..97f168c3e 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -6,8 +6,7 @@ import sys import tempfile import uuid -from tempfile import _TemporaryFileWrapper # type: ignore -from typing import Dict, Optional +from typing import IO, Dict, Optional import furl # type: ignore import requests @@ -126,7 +125,7 @@ def read_conf(self, conf_path: str) -> None: # # In any case, this callback mutates the given result object (in # `res`) to include the name of the new file, or to indicate errors. - def on_save(self, fh: _TemporaryFileWrapper, res: Response) -> None: + def on_save(self, fh: IO[bytes], res: Response) -> None: fn = str(uuid.uuid4()) try: From 38e0443376c7c634f1f992fd47501a030a48f272 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Fri, 30 Jul 2021 17:38:30 -0700 Subject: [PATCH 248/352] add third-party Library Stubs for newer version of mypy --- dev-requirements.in | 2 ++ dev-requirements.txt | 8 ++++++++ 2 files changed, 10 insertions(+) diff --git a/dev-requirements.in b/dev-requirements.in index 238652e24..2fb106860 100644 --- a/dev-requirements.in +++ b/dev-requirements.in @@ -9,6 +9,8 @@ mypy-extensions pip-tools pycodestyle pyflakes +types-PyYAML +types-requests six vcrpy wrapt diff --git a/dev-requirements.txt b/dev-requirements.txt index df6ec18dc..3e2cf90a0 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -352,6 +352,14 @@ typed-ast==1.4.3 \ # via # black # mypy +types-pyyaml==5.4.3 \ + --hash=sha256:2e7b81b2b7af751634425107b986086c6ba7cb61270a43a5c290c58be8cdbc3a \ + --hash=sha256:bca83cbfc0be48600a8abf1e3d87fb762a91e6d35d724029a3321dd2dce2ceb1 + # via -r dev-requirements.in +types-requests==2.25.1 \ + --hash=sha256:2d514ee172088a8fc0d554537d6424bd261c18e63195cfe47c410df0de0ed96f \ + --hash=sha256:6e9534281fe5d06ba8116807a8de930b90b6f92dff179f8cbfa2dfdd3bd2c465 + # via -r dev-requirements.in typing-extensions==3.10.0.0 \ --hash=sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497 \ --hash=sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342 \ From e6dc5fc2f3cb8d27a63e884361f7492ba076f39a Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Fri, 30 Jul 2021 17:47:24 -0700 Subject: [PATCH 249/352] add back *removevirtualenv step --- .circleci/config.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 344b993be..f1821abf3 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,5 +1,10 @@ --- common-steps: + - &removevirtualenv + run: + name: Removes the upstream virtualenv from the original container image + command: sudo pip uninstall virtualenv -y + - &run_tests run: name: Install requirements and run tests @@ -52,6 +57,7 @@ jobs: - image: circleci/python:3.7-buster steps: - checkout + - *removevirtualenv - *install_packaging_dependencies - *verify_requirements - *make_source_tarball From 0743561c069e574d557339b2af892ab3ffa824f1 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Tue, 10 Aug 2021 13:54:22 -0700 Subject: [PATCH 250/352] remove some deps and regenerate test-requirements.txt Signed-off-by: Allie Crevier --- Makefile | 7 +- requirements/test-requirements.in | 5 +- requirements/test-requirements.txt | 249 ++++++++++++++++------------- 3 files changed, 143 insertions(+), 118 deletions(-) diff --git a/Makefile b/Makefile index 91be0943e..fd00bb395 100644 --- a/Makefile +++ b/Makefile @@ -1,3 +1,8 @@ +.PHONY: venv +venv: ## Provision a Python 3 virtualenv for development. + python3 -m venv .venv + .venv/bin/pip install --require-hashes -r "requirements/test-requirements.txt" + .PHONY: safety safety: ## Runs `safety check` to check python dependencies for vulnerabilities pip install --upgrade safety && \ @@ -10,7 +15,7 @@ safety: ## Runs `safety check` to check python dependencies for vulnerabilities .PHONY: update-pip-requirements update-pip-requirements: ## Updates all Python requirements files via pip-compile. - pip-compile --generate-hashes --output-file requirements/test-requirements.txt requirements/test-requirements.in + pip-compile --allow-unsafe --generate-hashes --output-file requirements/test-requirements.txt requirements/test-requirements.in .PHONY: check check: lint semgrep test ## Run linter and tests diff --git a/requirements/test-requirements.in b/requirements/test-requirements.in index 008c85a3d..96e3b86e9 100644 --- a/requirements/test-requirements.in +++ b/requirements/test-requirements.in @@ -1,11 +1,8 @@ flake8 -importlib-metadata # otherwise introduced unpinned via flake8 pathlib2 # required by pytest for python 3.5 pip-tools py>=1.9.0 pytest pytest-cov pytest-mock -semgrep==0.49.0 -typing-extensions # otherwise introduced unpinned via importlib-metadata -zipp # otherwise introduced unpinned via pep517 (via pip-tools) \ No newline at end of file +semgrep==0.49.0 \ No newline at end of file diff --git a/requirements/test-requirements.txt b/requirements/test-requirements.txt index 86f75e2a7..8350b3468 100644 --- a/requirements/test-requirements.txt +++ b/requirements/test-requirements.txt @@ -1,27 +1,27 @@ # -# This file is autogenerated by pip-compile +# This file is autogenerated by pip-compile with python 3.7 # To update, run: # -# pip-compile --generate-hashes --output-file=requirements/test-requirements.txt requirements/test-requirements.in +# pip-compile --allow-unsafe --generate-hashes --output-file=requirements/test-requirements.txt requirements/test-requirements.in # -attrs==20.3.0 \ - --hash=sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6 \ - --hash=sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700 +attrs==21.2.0 \ + --hash=sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1 \ + --hash=sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb # via # jsonschema # pytest # semgrep -certifi==2020.12.5 \ - --hash=sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c \ - --hash=sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830 +certifi==2021.5.30 \ + --hash=sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee \ + --hash=sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8 # via requests -chardet==4.0.0 \ - --hash=sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa \ - --hash=sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5 +charset-normalizer==2.0.4 \ + --hash=sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b \ + --hash=sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3 # via requests -click==7.1.2 \ - --hash=sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a \ - --hash=sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc +click==8.0.1 \ + --hash=sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a \ + --hash=sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6 # via pip-tools colorama==0.4.4 \ --hash=sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b \ @@ -81,19 +81,19 @@ coverage==5.5 \ --hash=sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d \ --hash=sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6 # via pytest-cov -flake8==3.9.1 \ - --hash=sha256:1aa8990be1e689d96c745c5682b687ea49f2e05a443aff1f8251092b0014e378 \ - --hash=sha256:3b9f848952dddccf635be78098ca75010f073bfe14d2c6bda867154bea728d2a +flake8==3.9.2 \ + --hash=sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b \ + --hash=sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907 # via -r requirements/test-requirements.in -idna==2.10 \ - --hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6 \ - --hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0 +idna==3.2 \ + --hash=sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a \ + --hash=sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3 # via requests -importlib-metadata==4.0.1 \ - --hash=sha256:8c501196e49fb9df5df43833bdb1e4328f64847763ec8a50703148b73784d581 \ - --hash=sha256:d7eb1dea6d6a6086f8be21784cc9e3bcfa55872b52309bc5fad53a8ea444465d +importlib-metadata==4.6.3 \ + --hash=sha256:0645585859e9a6689c523927a5032f2ba5919f1f7d0e84bd4533312320de1ff9 \ + --hash=sha256:51c6635429c77cf1ae634c997ff9e53ca3438b495f10a55ba28594dd69764a8b # via - # -r requirements/test-requirements.in + # click # flake8 # jsonschema # pep517 @@ -111,23 +111,23 @@ mccabe==0.6.1 \ --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f # via flake8 -packaging==20.9 \ - --hash=sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5 \ - --hash=sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a +packaging==21.0 \ + --hash=sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7 \ + --hash=sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14 # via # pytest # semgrep -pathlib2==2.3.5 \ - --hash=sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db \ - --hash=sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868 +pathlib2==2.3.6 \ + --hash=sha256:3a130b266b3a36134dcc79c17b3c7ac9634f083825ca6ea9d8f557ee6195c9c8 \ + --hash=sha256:7d8bcb5555003cdf4a8d2872c538faa3a0f5d20630cb360e518ca3b981795e5f # via -r requirements/test-requirements.in -pep517==0.10.0 \ - --hash=sha256:ac59f3f6b9726a49e15a649474539442cf76e0697e39df4869d25e68e880931b \ - --hash=sha256:eba39d201ef937584ad3343df3581069085bacc95454c80188291d5b3ac7a249 +pep517==0.11.0 \ + --hash=sha256:3fa6b85b9def7ba4de99fb7f96fe3f02e2d630df8aa2720a5cf3b183f087a738 \ + --hash=sha256:e1ba5dffa3a131387979a68ff3e391ac7d645be409216b961bc2efe6468ab0b2 # via pip-tools -pip-tools==6.1.0 \ - --hash=sha256:197e3f8839095ccec3ad1ef410e0804c07d9f17dff1c340fb417ca2b63feacc9 \ - --hash=sha256:400bf77e29cca48c31abc210042932bb52dcc138ef4ea4d52c5db429aa8ae6ee +pip-tools==6.2.0 \ + --hash=sha256:77727ef7457d1865e61fe34c2b1439f9b971b570cc232616a22ce82ab89d357d \ + --hash=sha256:9ed38c73da4993e531694ea151f77048b4dbf2ba7b94c4a569daa39568cc6564 # via -r requirements/test-requirements.in pluggy==0.13.1 \ --hash=sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0 \ @@ -151,73 +151,83 @@ pyparsing==2.4.7 \ --hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \ --hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b # via packaging -pyrsistent==0.17.3 \ - --hash=sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e +pyrsistent==0.18.0 \ + --hash=sha256:097b96f129dd36a8c9e33594e7ebb151b1515eb52cceb08474c10a5479e799f2 \ + --hash=sha256:2aaf19dc8ce517a8653746d98e962ef480ff34b6bc563fc067be6401ffb457c7 \ + --hash=sha256:404e1f1d254d314d55adb8d87f4f465c8693d6f902f67eb6ef5b4526dc58e6ea \ + --hash=sha256:48578680353f41dca1ca3dc48629fb77dfc745128b56fc01096b2530c13fd426 \ + --hash=sha256:4916c10896721e472ee12c95cdc2891ce5890898d2f9907b1b4ae0f53588b710 \ + --hash=sha256:527be2bfa8dc80f6f8ddd65242ba476a6c4fb4e3aedbf281dfbac1b1ed4165b1 \ + --hash=sha256:58a70d93fb79dc585b21f9d72487b929a6fe58da0754fa4cb9f279bb92369396 \ + --hash=sha256:5e4395bbf841693eaebaa5bb5c8f5cdbb1d139e07c975c682ec4e4f8126e03d2 \ + --hash=sha256:6b5eed00e597b5b5773b4ca30bd48a5774ef1e96f2a45d105db5b4ebb4bca680 \ + --hash=sha256:73ff61b1411e3fb0ba144b8f08d6749749775fe89688093e1efef9839d2dcc35 \ + --hash=sha256:772e94c2c6864f2cd2ffbe58bb3bdefbe2a32afa0acb1a77e472aac831f83427 \ + --hash=sha256:773c781216f8c2900b42a7b638d5b517bb134ae1acbebe4d1e8f1f41ea60eb4b \ + --hash=sha256:a0c772d791c38bbc77be659af29bb14c38ced151433592e326361610250c605b \ + --hash=sha256:b29b869cf58412ca5738d23691e96d8aff535e17390128a1a52717c9a109da4f \ + --hash=sha256:c1a9ff320fa699337e05edcaae79ef8c2880b52720bc031b219e5b5008ebbdef \ + --hash=sha256:cd3caef37a415fd0dae6148a1b6957a8c5f275a62cca02e18474608cb263640c \ + --hash=sha256:d5ec194c9c573aafaceebf05fc400656722793dac57f254cd4741f3c27ae57b4 \ + --hash=sha256:da6e5e818d18459fa46fac0a4a4e543507fe1110e808101277c5a2b5bab0cd2d \ + --hash=sha256:e79d94ca58fcafef6395f6352383fa1a76922268fa02caa2272fff501c2fdc78 \ + --hash=sha256:f3ef98d7b76da5eb19c37fda834d50262ff9167c65658d1d8f974d2e4d90676b \ + --hash=sha256:f4c8cabb46ff8e5d61f56a037974228e978f26bfefce4f61a4b1ac0ba7a2ab72 # via jsonschema -pytest-cov==2.11.1 \ - --hash=sha256:359952d9d39b9f822d9d29324483e7ba04a3a17dd7d05aa6beb7ea01e359e5f7 \ - --hash=sha256:bdb9fdb0b85a7cc825269a4c56b48ccaa5c7e365054b6038772c32ddcdc969da - # via -r requirements/test-requirements.in -pytest-mock==3.6.0 \ - --hash=sha256:952139a535b5b48ac0bb2f90b5dd36b67c7e1ba92601f3a8012678c4bd7f0bcc \ - --hash=sha256:f7c3d42d6287f4e45846c8231c31902b6fa2bea98735af413a43da4cf5b727f1 - # via -r requirements/test-requirements.in -pytest==6.2.3 \ - --hash=sha256:671238a46e4df0f3498d1c3270e5deb9b32d25134c99b7d75370a68cfbe9b634 \ - --hash=sha256:6ad9c7bdf517a808242b998ac20063c41532a570d088d77eec1ee12b0b5574bc +pytest==6.2.4 \ + --hash=sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b \ + --hash=sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890 # via # -r requirements/test-requirements.in # pytest-cov # pytest-mock -requests==2.25.1 \ - --hash=sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804 \ - --hash=sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e +pytest-cov==2.12.1 \ + --hash=sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a \ + --hash=sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7 + # via -r requirements/test-requirements.in +pytest-mock==3.6.1 \ + --hash=sha256:30c2f2cc9759e76eee674b81ea28c9f0b94f8f0445a1b87762cadf774f0df7e3 \ + --hash=sha256:40217a058c52a63f1042f0784f62009e976ba824c418cced42e88d5f40ab0e62 + # via -r requirements/test-requirements.in +requests==2.26.0 \ + --hash=sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24 \ + --hash=sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7 # via semgrep -ruamel.yaml.clib==0.2.2 \ - --hash=sha256:058a1cc3df2a8aecc12f983a48bda99315cebf55a3b3a5463e37bb599b05727b \ - --hash=sha256:1236df55e0f73cd138c0eca074ee086136c3f16a97c2ac719032c050f7e0622f \ - --hash=sha256:1f8c0a4577c0e6c99d208de5c4d3fd8aceed9574bb154d7a2b21c16bb924154c \ - --hash=sha256:2602e91bd5c1b874d6f93d3086f9830f3e907c543c7672cf293a97c3fabdcd91 \ - --hash=sha256:28116f204103cb3a108dfd37668f20abe6e3cafd0d3fd40dba126c732457b3cc \ - --hash=sha256:2d24bd98af676f4990c4d715bcdc2a60b19c56a3fb3a763164d2d8ca0e806ba7 \ - --hash=sha256:2fd336a5c6415c82e2deb40d08c222087febe0aebe520f4d21910629018ab0f3 \ - --hash=sha256:30dca9bbcbb1cc858717438218d11eafb78666759e5094dd767468c0d577a7e7 \ - --hash=sha256:44c7b0498c39f27795224438f1a6be6c5352f82cb887bc33d962c3a3acc00df6 \ - --hash=sha256:464e66a04e740d754170be5e740657a3b3b6d2bcc567f0c3437879a6e6087ff6 \ - --hash=sha256:46d6d20815064e8bb023ea8628cfb7402c0f0e83de2c2227a88097e239a7dffd \ - --hash=sha256:4df5019e7783d14b79217ad9c56edf1ba7485d614ad5a385d1b3c768635c81c0 \ - --hash=sha256:4e52c96ca66de04be42ea2278012a2342d89f5e82b4512fb6fb7134e377e2e62 \ - --hash=sha256:5254af7d8bdf4d5484c089f929cb7f5bafa59b4f01d4f48adda4be41e6d29f99 \ - --hash=sha256:52ae5739e4b5d6317b52f5b040b1b6639e8af68a5b8fd606a8b08658fbd0cab5 \ - --hash=sha256:53b9dd1abd70e257a6e32f934ebc482dac5edb8c93e23deb663eac724c30b026 \ - --hash=sha256:6c0a5dc52fc74eb87c67374a4e554d4761fd42a4d01390b7e868b30d21f4b8bb \ - --hash=sha256:73b3d43e04cc4b228fa6fa5d796409ece6fcb53a6c270eb2048109cbcbc3b9c2 \ - --hash=sha256:74161d827407f4db9072011adcfb825b5258a5ccb3d2cd518dd6c9edea9e30f1 \ - --hash=sha256:75f0ee6839532e52a3a53f80ce64925ed4aed697dd3fa890c4c918f3304bd4f4 \ - --hash=sha256:839dd72545ef7ba78fd2aa1a5dd07b33696adf3e68fae7f31327161c1093001b \ - --hash=sha256:8be05be57dc5c7b4a0b24edcaa2f7275866d9c907725226cdde46da09367d923 \ - --hash=sha256:8e8fd0a22c9d92af3a34f91e8a2594eeb35cba90ab643c5e0e643567dc8be43e \ - --hash=sha256:a873e4d4954f865dcb60bdc4914af7eaae48fb56b60ed6daa1d6251c72f5337c \ - --hash=sha256:ab845f1f51f7eb750a78937be9f79baea4a42c7960f5a94dde34e69f3cce1988 \ - --hash=sha256:b1e981fe1aff1fd11627f531524826a4dcc1f26c726235a52fcb62ded27d150f \ - --hash=sha256:b4b0d31f2052b3f9f9b5327024dc629a253a83d8649d4734ca7f35b60ec3e9e5 \ - --hash=sha256:c6ac7e45367b1317e56f1461719c853fd6825226f45b835df7436bb04031fd8a \ - --hash=sha256:daf21aa33ee9b351f66deed30a3d450ab55c14242cfdfcd377798e2c0d25c9f1 \ - --hash=sha256:e9f7d1d8c26a6a12c23421061f9022bb62704e38211fe375c645485f38df34a2 \ - --hash=sha256:f6061a31880c1ed6b6ce341215336e2f3d0c1deccd84957b6fa8ca474b41e89f - # via ruamel.yaml -ruamel.yaml==0.17.4 \ - --hash=sha256:44bc6b54fddd45e4bc0619059196679f9e8b79c027f4131bb072e6a22f4d5e28 \ - --hash=sha256:ac79fb25f5476e8e9ed1c53b8a2286d2c3f5dde49eb37dbcee5c7eb6a8415a22 +ruamel.yaml==0.17.10 \ + --hash=sha256:106bc8d6dc6a0ff7c9196a47570432036f41d556b779c6b4e618085f57e39e67 \ + --hash=sha256:ffb9b703853e9e8b7861606dfdab1026cf02505bade0653d1880f4b2db47f815 # via semgrep +ruamel.yaml.clib==0.2.6 \ + --hash=sha256:0847201b767447fc33b9c235780d3aa90357d20dd6108b92be544427bea197dd \ + --hash=sha256:1866cf2c284a03b9524a5cc00daca56d80057c5ce3cdc86a52020f4c720856f0 \ + --hash=sha256:31ea73e564a7b5fbbe8188ab8b334393e06d997914a4e184975348f204790277 \ + --hash=sha256:3fb9575a5acd13031c57a62cc7823e5d2ff8bc3835ba4d94b921b4e6ee664104 \ + --hash=sha256:4ff604ce439abb20794f05613c374759ce10e3595d1867764dd1ae675b85acbd \ + --hash=sha256:72a2b8b2ff0a627496aad76f37a652bcef400fd861721744201ef1b45199ab78 \ + --hash=sha256:78988ed190206672da0f5d50c61afef8f67daa718d614377dcd5e3ed85ab4a99 \ + --hash=sha256:7b2927e92feb51d830f531de4ccb11b320255ee95e791022555971c466af4527 \ + --hash=sha256:7f7ecb53ae6848f959db6ae93bdff1740e651809780822270eab111500842a84 \ + --hash=sha256:825d5fccef6da42f3c8eccd4281af399f21c02b32d98e113dbc631ea6a6ecbc7 \ + --hash=sha256:846fc8336443106fe23f9b6d6b8c14a53d38cef9a375149d61f99d78782ea468 \ + --hash=sha256:89221ec6d6026f8ae859c09b9718799fea22c0e8da8b766b0b2c9a9ba2db326b \ + --hash=sha256:9efef4aab5353387b07f6b22ace0867032b900d8e91674b5d8ea9150db5cae94 \ + --hash=sha256:a32f8d81ea0c6173ab1b3da956869114cae53ba1e9f72374032e33ba3118c233 \ + --hash=sha256:a49e0161897901d1ac9c4a79984b8410f450565bbad64dbfcbf76152743a0cdb \ + --hash=sha256:ada3f400d9923a190ea8b59c8f60680c4ef8a4b0dfae134d2f2ff68429adfab5 \ + --hash=sha256:bf75d28fa071645c529b5474a550a44686821decebdd00e21127ef1fd566eabe \ + --hash=sha256:cfdb9389d888c5b74af297e51ce357b800dd844898af9d4a547ffc143fa56751 \ + --hash=sha256:d67f273097c368265a7b81e152e07fb90ed395df6e552b9fa858c6d2c9f42502 \ + --hash=sha256:dc6a613d6c74eef5a14a214d433d06291526145431c3b964f5e16529b1842bed \ + --hash=sha256:de9c6b8a1ba52919ae919f3ae96abb72b994dd0350226e28f3686cb4f142165c + # via ruamel.yaml semgrep==0.49.0 \ --hash=sha256:99b2bae87afc824a232063c91524fda2039699733f34ba28fc963f2b76228f1a \ --hash=sha256:cca38e34824bdf17aeae6e7a8fc3ce0ac441fec51b207783a469fff9994eb08b \ --hash=sha256:e49c84f452809af7895a09f06ce48bd86fdbb7b09482fef99f528da2afb71b46 # via -r requirements/test-requirements.in -six==1.15.0 \ - --hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \ - --hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 # via # jsonschema # pathlib2 @@ -225,32 +235,45 @@ toml==0.10.2 \ --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f # via - # pep517 # pytest -tqdm==4.60.0 \ - --hash=sha256:daec693491c52e9498632dfbe9ccfc4882a557f5fa08982db1b4d3adbe0887c3 \ - --hash=sha256:ebdebdb95e3477ceea267decfc0784859aa3df3e27e22d23b83e9b272bf157ae + # pytest-cov +tomli==1.2.1 \ + --hash=sha256:8dd0e9524d6f386271a36b41dbf6c57d8e32fd96fd22b6584679dc569d20899f \ + --hash=sha256:a5b75cb6f3968abb47af1b40c1819dc519ea82bcc065776a866e8d74c5ca9442 + # via pep517 +tqdm==4.62.0 \ + --hash=sha256:3642d483b558eec80d3c831e23953582c34d7e4540db86d9e5ed9dad238dabc6 \ + --hash=sha256:706dea48ee05ba16e936ee91cb3791cd2ea6da348a0e50b46863ff4363ff4340 # via semgrep -typing-extensions==3.7.4.3 \ - --hash=sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918 \ - --hash=sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c \ - --hash=sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f - # via - # -r requirements/test-requirements.in - # importlib-metadata -urllib3==1.26.4 \ - --hash=sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df \ - --hash=sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937 +typing-extensions==3.10.0.0 \ + --hash=sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497 \ + --hash=sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342 \ + --hash=sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84 + # via importlib-metadata +urllib3==1.26.6 \ + --hash=sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4 \ + --hash=sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f # via requests -zipp==3.4.1 \ - --hash=sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76 \ - --hash=sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098 +wheel==0.37.0 \ + --hash=sha256:21014b2bd93c6d0034b6ba5d35e4eb284340e09d63c59aef6fc14b0f346146fd \ + --hash=sha256:e2ef7239991699e3355d54f8e968a21bb940a1dbf34a4d226741e64462516fad + # via pip-tools +zipp==3.5.0 \ + --hash=sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3 \ + --hash=sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4 # via - # -r requirements/test-requirements.in # importlib-metadata # pep517 -# WARNING: The following packages were not pinned, but pip requires them to be -# pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag. -# pip -# setuptools +# The following packages are considered to be unsafe in a requirements file: +pip==21.2.3 \ + --hash=sha256:895df6014c2f02f9d278a8ad6e31cdfd312952b4a93c3068d0556964f4490057 \ + --hash=sha256:91e66f2a2702e7d2dcc092ed8c5ebe923e69b9997ea28ba25823943bcd3bf820 + # via pip-tools +setuptools==57.4.0 \ + --hash=sha256:6bac238ffdf24e8806c61440e755192470352850f3419a52f26ffe0a1a64f465 \ + --hash=sha256:a49230977aa6cfb9d933614d2f7b79036e9945c4cdd7583163f4e920b83418d6 + # via + # jsonschema + # pip-tools + # semgrep From 2dcc6de0ce9aadc93e85dab54d2be95cc63357e1 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Tue, 10 Aug 2021 14:05:19 -0700 Subject: [PATCH 251/352] use latest semgrep --- requirements/test-requirements.in | 2 +- requirements/test-requirements.txt | 19 +++++++++++++++---- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/requirements/test-requirements.in b/requirements/test-requirements.in index 96e3b86e9..ed04d7cb9 100644 --- a/requirements/test-requirements.in +++ b/requirements/test-requirements.in @@ -5,4 +5,4 @@ py>=1.9.0 pytest pytest-cov pytest-mock -semgrep==0.49.0 \ No newline at end of file +semgrep \ No newline at end of file diff --git a/requirements/test-requirements.txt b/requirements/test-requirements.txt index 8350b3468..0318a2ab1 100644 --- a/requirements/test-requirements.txt +++ b/requirements/test-requirements.txt @@ -11,6 +11,10 @@ attrs==21.2.0 \ # jsonschema # pytest # semgrep +bracex==2.1.1 \ + --hash=sha256:01f715cd0ed7a622ec8b32322e715813f7574de531f09b70f6f3b2c10f682425 \ + --hash=sha256:64e2a6d14de9c8e022cf40539ac8468ba7c4b99550a2b05fc87fd20e392e568f + # via wcmatch certifi==2021.5.30 \ --hash=sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee \ --hash=sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8 @@ -121,6 +125,9 @@ pathlib2==2.3.6 \ --hash=sha256:3a130b266b3a36134dcc79c17b3c7ac9634f083825ca6ea9d8f557ee6195c9c8 \ --hash=sha256:7d8bcb5555003cdf4a8d2872c538faa3a0f5d20630cb360e518ca3b981795e5f # via -r requirements/test-requirements.in +peewee==3.14.4 \ + --hash=sha256:9e356b327c2eaec6dd42ecea6f4ddded025793dba906a3d065a0452e726c51a2 + # via semgrep pep517==0.11.0 \ --hash=sha256:3fa6b85b9def7ba4de99fb7f96fe3f02e2d630df8aa2720a5cf3b183f087a738 \ --hash=sha256:e1ba5dffa3a131387979a68ff3e391ac7d645be409216b961bc2efe6468ab0b2 @@ -220,10 +227,10 @@ ruamel.yaml.clib==0.2.6 \ --hash=sha256:dc6a613d6c74eef5a14a214d433d06291526145431c3b964f5e16529b1842bed \ --hash=sha256:de9c6b8a1ba52919ae919f3ae96abb72b994dd0350226e28f3686cb4f142165c # via ruamel.yaml -semgrep==0.49.0 \ - --hash=sha256:99b2bae87afc824a232063c91524fda2039699733f34ba28fc963f2b76228f1a \ - --hash=sha256:cca38e34824bdf17aeae6e7a8fc3ce0ac441fec51b207783a469fff9994eb08b \ - --hash=sha256:e49c84f452809af7895a09f06ce48bd86fdbb7b09482fef99f528da2afb71b46 +semgrep==0.61.0 \ + --hash=sha256:04256932d2b234a034948d39bd04349cdc878e2868b91d2039038dd5451248ba \ + --hash=sha256:7dc56bf0ecfd66c86e1e130103d3088b82ff2b5a6c30a204314a7a1760790420 \ + --hash=sha256:caa74cce5f61fd18049ac8ac7a4ec2a5e604beb9232d7e26a171b7a2aeeb9d5c # via -r requirements/test-requirements.in six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ @@ -254,6 +261,10 @@ urllib3==1.26.6 \ --hash=sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4 \ --hash=sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f # via requests +wcmatch==8.2 \ + --hash=sha256:4d54ddb506c90b5a5bba3a96a1cfb0bb07127909e19046a71d689ddfb18c3617 \ + --hash=sha256:9146b1ab9354e0797ef6ef69bc89cb32cb9f46d1b9eeef69c559aeec8f3bffb6 + # via semgrep wheel==0.37.0 \ --hash=sha256:21014b2bd93c6d0034b6ba5d35e4eb284340e09d63c59aef6fc14b0f346146fd \ --hash=sha256:e2ef7239991699e3355d54f8e968a21bb940a1dbf34a4d226741e64462516fad From caebb466b764c1ca1eab12296831493e92ea64bf Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Tue, 10 Aug 2021 15:47:58 -0700 Subject: [PATCH 252/352] remove pathlib2 from in file Signed-off-by: Allie Crevier --- requirements/test-requirements.in | 1 - requirements/test-requirements.txt | 8 +------- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/requirements/test-requirements.in b/requirements/test-requirements.in index ed04d7cb9..e0bfaf54a 100644 --- a/requirements/test-requirements.in +++ b/requirements/test-requirements.in @@ -1,5 +1,4 @@ flake8 -pathlib2 # required by pytest for python 3.5 pip-tools py>=1.9.0 pytest diff --git a/requirements/test-requirements.txt b/requirements/test-requirements.txt index 0318a2ab1..9ff54c558 100644 --- a/requirements/test-requirements.txt +++ b/requirements/test-requirements.txt @@ -121,10 +121,6 @@ packaging==21.0 \ # via # pytest # semgrep -pathlib2==2.3.6 \ - --hash=sha256:3a130b266b3a36134dcc79c17b3c7ac9634f083825ca6ea9d8f557ee6195c9c8 \ - --hash=sha256:7d8bcb5555003cdf4a8d2872c538faa3a0f5d20630cb360e518ca3b981795e5f - # via -r requirements/test-requirements.in peewee==3.14.4 \ --hash=sha256:9e356b327c2eaec6dd42ecea6f4ddded025793dba906a3d065a0452e726c51a2 # via semgrep @@ -235,9 +231,7 @@ semgrep==0.61.0 \ six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 - # via - # jsonschema - # pathlib2 + # via jsonschema toml==0.10.2 \ --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f From 89f191f2813daa8cc8adedc3aef55822ba626c67 Mon Sep 17 00:00:00 2001 From: Kushal Das Date: Fri, 13 Aug 2021 11:13:54 +0530 Subject: [PATCH 253/352] Adds upgrade-pip makefile target One can upgrade a dependency via: `PACKAGE=packagename make upgrade-pip` --- Makefile | 3 +++ README.md | 8 ++++++++ 2 files changed, 11 insertions(+) diff --git a/Makefile b/Makefile index 0a669f0db..185bffb50 100644 --- a/Makefile +++ b/Makefile @@ -23,6 +23,9 @@ safety: ## Runs `safety check` to check python dependencies for vulnerabilities update-pip-requirements: ## Updates all Python requirements files via pip-compile. pip-compile --generate-hashes --output-file requirements.txt requirements.in +.PHONY: upgrade-pip +upgrade-pip: ## Upgrade one single package via pip-compile + pip-compile --generate-hashes --upgrade-package $(PACKAGE) --output-file requirements.txt requirements.in # Explaination of the below shell command should it ever break. # 1. Set the field separator to ": ##" and any make targets that might appear between : and ## diff --git a/README.md b/README.md index 888f606c4..9bce5f6ba 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,14 @@ This is a Python module and qrexec service for logging in Qubes for [SecureDrop](https://securedrop.org). +## How to upgrade the dependencies? + +To upgrade one single Python dependency, say `redis`, run the following: + +```bash +PACKAGE=redis make upgrade-pip +``` + ## How to use/try this? In our example, we will use a vm named *logging* for storing logs, and we will use From fcbd963381a27197ec9515e4eca1e710cc780f01 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Wed, 25 Aug 2021 12:16:54 -0700 Subject: [PATCH 254/352] add more helper functions for updating reqs Signed-off-by: Allie Crevier --- Makefile | 32 +++++++++-- requirements.txt | 9 --- requirements/dev-requirements.in | 1 + requirements/dev-requirements.txt | 57 +++++++++++++++++++ .../requirements.in | 0 requirements/requirements.txt | 10 ++++ 6 files changed, 94 insertions(+), 15 deletions(-) delete mode 100644 requirements.txt create mode 100644 requirements/dev-requirements.in create mode 100644 requirements/dev-requirements.txt rename requirements.in => requirements/requirements.in (100%) create mode 100644 requirements/requirements.txt diff --git a/Makefile b/Makefile index 185bffb50..3b8e95fcb 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,12 @@ DEFAULT_GOAL: help SHELL := /bin/bash +.PHONY: venv +venv: ## Provision a Python 3 virtualenv for **development** + python3 -m venv .venv + .venv/bin/pip install --upgrade pip wheel + .venv/bin/pip install --require-hashes -r requirements/dev-requirements.txt + # Bandit is a static code analysis tool to detect security vulnerabilities in Python applications # https://wiki.openstack.org/wiki/Security/Projects/Bandit .PHONY: bandit @@ -19,13 +25,27 @@ safety: ## Runs `safety check` to check python dependencies for vulnerabilities || exit 1; \ done -.PHONY: update-pip-requirements -update-pip-requirements: ## Updates all Python requirements files via pip-compile. - pip-compile --generate-hashes --output-file requirements.txt requirements.in +.PHONY: sync-requirements +sync-requirements: ## Update dev-requirements.txt to pin to the same versions of prod dependencies + rm -r requirements/dev-requirements.txt && cp requirements/requirements.txt requirements/dev-requirements.txt + pip-compile --allow-unsafe --generate-hashes --output-file requirements/dev-requirements.txt requirements/requirements.in requirements/dev-requirements.in + +.PHONY: requirements +requirements: ## Update *requirements.txt files if pinned versions do not comply with the dependency specifications in *requirements.in + pip-compile --generate-hashes --output-file requirements/requirements.txt requirements/requirements.in + $(MAKE) sync-requirements + +.PHONY: update-dependency +update-dependency: ## Add or upgrade a package to the latest version that complies with the dependency specifications in requirements.in + pip-compile --generate-hashes --upgrade-package $(PACKAGE) --output-file requirements/requirements.txt requirements/requirements.in + $(MAKE) sync-requirements -.PHONY: upgrade-pip -upgrade-pip: ## Upgrade one single package via pip-compile - pip-compile --generate-hashes --upgrade-package $(PACKAGE) --output-file requirements.txt requirements.in +.PHONY: update-dev-only-dependencies +update-dev-only-dependencies: ## Update dev-requirements.txt to pin to the latest versions of dev-only dependencies that comply with the dependency specifications in dev-requirements.in + $(MAKE) sync-requirements + @while read line; do \ + pip-compile --allow-unsafe --generate-hashes --upgrade-package $file --output-file requirements/dev-requirements.txt requirements/requirements.in requirements/dev-requirements.in; \ + done < 'requirements/dev-requirements.in' # Explaination of the below shell command should it ever break. # 1. Set the field separator to ": ##" and any make targets that might appear between : and ## diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 3c896d549..000000000 --- a/requirements.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --generate-hashes --output-file=requirements.txt requirements.in -# -redis==3.3.11 \ - --hash=sha256:3613daad9ce5951e426f460deddd5caf469e08a3af633e9578fc77d362becf62 \ - --hash=sha256:8d0fc278d3f5e1249967cba2eb4a5632d19e45ce5c09442b8422d15ee2c22cc2 diff --git a/requirements/dev-requirements.in b/requirements/dev-requirements.in new file mode 100644 index 000000000..2c2a9f384 --- /dev/null +++ b/requirements/dev-requirements.in @@ -0,0 +1 @@ +pip-tools diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt new file mode 100644 index 000000000..51774d6bc --- /dev/null +++ b/requirements/dev-requirements.txt @@ -0,0 +1,57 @@ +# +# This file is autogenerated by pip-compile with python 3.7 +# To update, run: +# +# pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-requirements.txt requirements/dev-requirements.in requirements/requirements.in +# +click==8.0.1 \ + --hash=sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a \ + --hash=sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6 + # via pip-tools +importlib-metadata==4.6.4 \ + --hash=sha256:7b30a78db2922d78a6f47fb30683156a14f3c6aa5cc23f77cc8967e9ab2d002f \ + --hash=sha256:ed5157fef23a4bc4594615a0dd8eba94b2bb36bf2a343fa3d8bb2fa0a62a99d5 + # via + # click + # pep517 +pep517==0.11.0 \ + --hash=sha256:3fa6b85b9def7ba4de99fb7f96fe3f02e2d630df8aa2720a5cf3b183f087a738 \ + --hash=sha256:e1ba5dffa3a131387979a68ff3e391ac7d645be409216b961bc2efe6468ab0b2 + # via pip-tools +pip-tools==6.2.0 \ + --hash=sha256:77727ef7457d1865e61fe34c2b1439f9b971b570cc232616a22ce82ab89d357d \ + --hash=sha256:9ed38c73da4993e531694ea151f77048b4dbf2ba7b94c4a569daa39568cc6564 + # via -r requirements/dev-requirements.in +redis==3.3.11 \ + --hash=sha256:3613daad9ce5951e426f460deddd5caf469e08a3af633e9578fc77d362becf62 \ + --hash=sha256:8d0fc278d3f5e1249967cba2eb4a5632d19e45ce5c09442b8422d15ee2c22cc2 + # via -r requirements/requirements.in +tomli==1.2.1 \ + --hash=sha256:8dd0e9524d6f386271a36b41dbf6c57d8e32fd96fd22b6584679dc569d20899f \ + --hash=sha256:a5b75cb6f3968abb47af1b40c1819dc519ea82bcc065776a866e8d74c5ca9442 + # via pep517 +typing-extensions==3.10.0.0 \ + --hash=sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497 \ + --hash=sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342 \ + --hash=sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84 + # via importlib-metadata +wheel==0.37.0 \ + --hash=sha256:21014b2bd93c6d0034b6ba5d35e4eb284340e09d63c59aef6fc14b0f346146fd \ + --hash=sha256:e2ef7239991699e3355d54f8e968a21bb940a1dbf34a4d226741e64462516fad + # via pip-tools +zipp==3.5.0 \ + --hash=sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3 \ + --hash=sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4 + # via + # importlib-metadata + # pep517 + +# The following packages are considered to be unsafe in a requirements file: +pip==21.2.4 \ + --hash=sha256:0eb8a1516c3d138ae8689c0c1a60fde7143310832f9dc77e11d8a4bc62de193b \ + --hash=sha256:fa9ebb85d3fd607617c0c44aca302b1b45d87f9c2a1649b46c26167ca4296323 + # via pip-tools +setuptools==57.4.0 \ + --hash=sha256:6bac238ffdf24e8806c61440e755192470352850f3419a52f26ffe0a1a64f465 \ + --hash=sha256:a49230977aa6cfb9d933614d2f7b79036e9945c4cdd7583163f4e920b83418d6 + # via pip-tools diff --git a/requirements.in b/requirements/requirements.in similarity index 100% rename from requirements.in rename to requirements/requirements.in diff --git a/requirements/requirements.txt b/requirements/requirements.txt new file mode 100644 index 000000000..5bf812bb7 --- /dev/null +++ b/requirements/requirements.txt @@ -0,0 +1,10 @@ +# +# This file is autogenerated by pip-compile with python 3.7 +# To update, run: +# +# pip-compile --generate-hashes --output-file=requirements/requirements.txt requirements/requirements.in +# +redis==3.3.11 \ + --hash=sha256:3613daad9ce5951e426f460deddd5caf469e08a3af633e9578fc77d362becf62 \ + --hash=sha256:8d0fc278d3f5e1249967cba2eb4a5632d19e45ce5c09442b8422d15ee2c22cc2 + # via -r requirements/requirements.in From febef708bbb34b86aa21e7b7c1ab021b47cdf6f8 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Wed, 25 Aug 2021 15:40:00 -0700 Subject: [PATCH 255/352] also move build-requirements.txt to requirements directory --- build-requirements.txt | 1 - 1 file changed, 1 deletion(-) delete mode 100644 build-requirements.txt diff --git a/build-requirements.txt b/build-requirements.txt deleted file mode 100644 index b9813f49e..000000000 --- a/build-requirements.txt +++ /dev/null @@ -1 +0,0 @@ -redis==3.3.11 --hash=sha256:974926675d246ade47a101e305596a0f50282a405a03b7173a70d99c79e6370c From c9ad57aea1c06f0f3d73248c61ce361cfa63cba7 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Wed, 25 Aug 2021 15:40:15 -0700 Subject: [PATCH 256/352] also move build-requirements.txt to requirements directory --- requirements/build-requirements.txt | 1 + 1 file changed, 1 insertion(+) create mode 100644 requirements/build-requirements.txt diff --git a/requirements/build-requirements.txt b/requirements/build-requirements.txt new file mode 100644 index 000000000..b9813f49e --- /dev/null +++ b/requirements/build-requirements.txt @@ -0,0 +1 @@ +redis==3.3.11 --hash=sha256:974926675d246ade47a101e305596a0f50282a405a03b7173a70d99c79e6370c From 8798ca41672191dde7c7e21eaf69b526f5ba0069 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Wed, 25 Aug 2021 15:42:18 -0700 Subject: [PATCH 257/352] add helpers to update dev-only deps and sync with prod Signed-off-by: Allie Crevier --- Makefile | 29 ++++++++++++++----- .../build-requirements.txt | 0 .../dev-requirements.in | 0 .../dev-requirements.txt | 0 .../requirements.in | 0 .../requirements.txt | 22 +++++++------- 6 files changed, 33 insertions(+), 18 deletions(-) rename build-requirements.txt => requirements/build-requirements.txt (100%) rename dev-requirements.in => requirements/dev-requirements.in (100%) rename dev-requirements.txt => requirements/dev-requirements.txt (100%) rename requirements.in => requirements/requirements.in (100%) rename requirements.txt => requirements/requirements.txt (87%) diff --git a/Makefile b/Makefile index 688163fcf..20f7e3fb5 100644 --- a/Makefile +++ b/Makefile @@ -5,9 +5,7 @@ all: help .PHONY: venv venv: - python3 -m venv .venv - ## Good idea to upgrade pip and wheel when you create a new virtual environment. - ## Or you could use the virtualenv command instead. + python3 -m venv .venv ## Provision a Python 3 virtualenv for **development** .venv/bin/pip install --upgrade pip wheel .venv/bin/pip install --require-hashes -r "dev-requirements.txt" @@ -51,10 +49,27 @@ isort: ## Run isort for file formatting isort-check: ## Check isort for file formatting @isort --check-only --diff securedrop_proxy/*.py tests/*.py -.PHONY: update-pip-requirements -update-pip-requirements: ## Updates all Python requirements files via pip-compile. - pip-compile --generate-hashes --allow-unsafe --upgrade --output-file dev-requirements.txt dev-requirements.in requirements.in - pip-compile --generate-hashes --output-file requirements.txt requirements.in +.PHONY: sync-requirements +sync-requirements: ## Update dev-requirements.txt to pin to the same versions of prod dependencies + rm -r requirements/dev-requirements.txt && cp requirements/requirements.txt requirements/dev-requirements.txt + pip-compile --allow-unsafe --generate-hashes --output-file requirements/dev-requirements.txt requirements/requirements.in requirements/dev-requirements.in + +.PHONY: requirements +requirements: ## Update *requirements.txt files if pinned versions do not comply with the dependency specifications in *requirements.in + pip-compile --generate-hashes --output-file requirements/requirements.txt requirements/requirements.in + $(MAKE) sync-requirements + +.PHONY: update-dependency +update-dependency: ## Add or upgrade a package to the latest version that complies with the dependency specifications in requirements.in + pip-compile --generate-hashes --upgrade-package $(PACKAGE) --output-file requirements/requirements.txt requirements/requirements.in + $(MAKE) sync-requirements + +.PHONY: update-dev-only-dependencies +update-dev-only-dependencies: ## Update dev-requirements.txt to pin to the latest versions of dev-only dependencies that comply with the dependency specifications in dev-requirements.in + $(MAKE) sync-requirements + @while read line; do \ + pip-compile --allow-unsafe --generate-hashes --upgrade-package $file --output-file requirements/dev-requirements.txt requirements/requirements.in requirements/dev-requirements.in; \ + done < 'requirements/dev-requirements.in' .PHONY: test test: clean .coverage ## Runs tests with coverage diff --git a/build-requirements.txt b/requirements/build-requirements.txt similarity index 100% rename from build-requirements.txt rename to requirements/build-requirements.txt diff --git a/dev-requirements.in b/requirements/dev-requirements.in similarity index 100% rename from dev-requirements.in rename to requirements/dev-requirements.in diff --git a/dev-requirements.txt b/requirements/dev-requirements.txt similarity index 100% rename from dev-requirements.txt rename to requirements/dev-requirements.txt diff --git a/requirements.in b/requirements/requirements.in similarity index 100% rename from requirements.in rename to requirements/requirements.in diff --git a/requirements.txt b/requirements/requirements.txt similarity index 87% rename from requirements.txt rename to requirements/requirements.txt index 9afbecd00..9e391e742 100644 --- a/requirements.txt +++ b/requirements/requirements.txt @@ -2,35 +2,35 @@ # This file is autogenerated by pip-compile with python 3.7 # To update, run: # -# pip-compile --generate-hashes --output-file=requirements.txt requirements.in +# pip-compile --generate-hashes --output-file=requirements/requirements.txt requirements/requirements.in # certifi==2018.10.15 \ --hash=sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c \ --hash=sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a # via - # -r requirements.in + # -r requirements/requirements.in # requests chardet==3.0.4 \ --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 # via - # -r requirements.in + # -r requirements/requirements.in # requests furl==2.0.0 \ --hash=sha256:f7e90e9f85ef3f2e64485f04c2a80b50af6133942812fd87a44d45305b079018 \ --hash=sha256:fdcaedc1fb19a63d7d875b0105b0a5b496dd0989330d454a42bcb401fa5454ec - # via -r requirements.in + # via -r requirements/requirements.in idna==2.7 \ --hash=sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e \ --hash=sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16 # via - # -r requirements.in + # -r requirements/requirements.in # requests orderedmultidict==1.0 \ --hash=sha256:24e3b730cf84e4a6a68be5cc760864905cf66abc89851e724bd5b4e849eaa96b \ --hash=sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5 # via - # -r requirements.in + # -r requirements/requirements.in # furl pyyaml==5.4.1 \ --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \ @@ -54,25 +54,25 @@ pyyaml==5.4.1 \ --hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \ --hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \ --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc - # via -r requirements.in + # via -r requirements/requirements.in requests==2.22.0 \ --hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \ --hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31 - # via -r requirements.in + # via -r requirements/requirements.in six==1.11.0 \ --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \ --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb # via - # -r requirements.in + # -r requirements/requirements.in # furl # orderedmultidict urllib3==1.25.10 \ --hash=sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a \ --hash=sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461 # via - # -r requirements.in + # -r requirements/requirements.in # requests werkzeug==0.16.0 \ --hash=sha256:7280924747b5733b246fe23972186c6b348f9ae29724135a6dfc1e53cea433e7 \ --hash=sha256:e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4 - # via -r requirements.in + # via -r requirements/requirements.in From 352024c75ec6a570bb473ee2766cf28a95b929a1 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Wed, 25 Aug 2021 15:47:31 -0700 Subject: [PATCH 258/352] update dev-only dependencies and sync with prod --- requirements/dev-requirements.txt | 192 ++++++++++++++---------------- 1 file changed, 92 insertions(+), 100 deletions(-) diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt index 3e2cf90a0..dc6989228 100644 --- a/requirements/dev-requirements.txt +++ b/requirements/dev-requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with python 3.7 # To update, run: # -# pip-compile --allow-unsafe --generate-hashes --output-file=dev-requirements.txt dev-requirements.in requirements.in +# pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-requirements.txt requirements/dev-requirements.in requirements/requirements.in # appdirs==1.4.4 \ --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41 \ @@ -11,18 +11,18 @@ appdirs==1.4.4 \ black==21.7b0 \ --hash=sha256:1c7aa6ada8ee864db745b22790a32f94b2795c253a75d6d9b5e439ff10d23116 \ --hash=sha256:c8373c6491de9362e39271630b65b964607bc5c79c83783547d76c839b3aa219 - # via -r dev-requirements.in + # via -r requirements/dev-requirements.in certifi==2018.10.15 \ --hash=sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c \ --hash=sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a # via - # -r requirements.in + # -r requirements/requirements.in # requests chardet==3.0.4 \ --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 # via - # -r requirements.in + # -r requirements/requirements.in # requests click==8.0.1 \ --hash=sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a \ @@ -83,25 +83,25 @@ coverage==5.5 \ --hash=sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079 \ --hash=sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d \ --hash=sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6 - # via -r dev-requirements.in + # via -r requirements/dev-requirements.in flake8==3.9.2 \ --hash=sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b \ --hash=sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907 - # via -r dev-requirements.in + # via -r requirements/dev-requirements.in furl==2.0.0 \ --hash=sha256:f7e90e9f85ef3f2e64485f04c2a80b50af6133942812fd87a44d45305b079018 \ --hash=sha256:fdcaedc1fb19a63d7d875b0105b0a5b496dd0989330d454a42bcb401fa5454ec - # via -r requirements.in + # via -r requirements/requirements.in idna==2.7 \ --hash=sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e \ --hash=sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16 # via - # -r requirements.in + # -r requirements/requirements.in # requests # yarl -importlib-metadata==4.6.1 \ - --hash=sha256:079ada16b7fc30dfbb5d13399a5113110dab1aa7c2bc62f66af75f0b717c8cac \ - --hash=sha256:9f55f560e116f8643ecf2922d9cd3e1c7e8d52e683178fecd9d08f6aa357e11e +importlib-metadata==4.6.4 \ + --hash=sha256:7b30a78db2922d78a6f47fb30683156a14f3c6aa5cc23f77cc8967e9ab2d002f \ + --hash=sha256:ed5157fef23a4bc4594615a0dd8eba94b2bb36bf2a343fa3d8bb2fa0a62a99d5 # via # click # flake8 @@ -109,12 +109,12 @@ importlib-metadata==4.6.1 \ isort==5.9.3 \ --hash=sha256:9c2ea1e62d871267b78307fe511c0838ba0da28698c5732d54e2790bf3ba9899 \ --hash=sha256:e17d6e2b81095c9db0a03a8025a957f334d6ea30b26f9ec70805411e5c7c81f2 - # via -r dev-requirements.in + # via -r requirements/dev-requirements.in mccabe==0.6.1 \ --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f # via - # -r dev-requirements.in + # -r requirements/dev-requirements.in # flake8 multidict==5.1.0 \ --hash=sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a \ @@ -155,7 +155,7 @@ multidict==5.1.0 \ --hash=sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281 \ --hash=sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80 # via - # -r dev-requirements.in + # -r requirements/dev-requirements.in # yarl mypy==0.910 \ --hash=sha256:088cd9c7904b4ad80bec811053272986611b84221835e079be5bcad029e79dd9 \ @@ -181,19 +181,19 @@ mypy==0.910 \ --hash=sha256:ec4e0cd079db280b6bdabdc807047ff3e199f334050db5cbb91ba3e959a67504 \ --hash=sha256:ecd2c3fe726758037234c93df7e98deb257fd15c24c9180dacf1ef829da5f921 \ --hash=sha256:ef565033fa5a958e62796867b1df10c40263ea9ded87164d67572834e57a174d - # via -r dev-requirements.in + # via -r requirements/dev-requirements.in mypy-extensions==0.4.3 \ --hash=sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d \ --hash=sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8 # via - # -r dev-requirements.in + # -r requirements/dev-requirements.in # black # mypy orderedmultidict==1.0 \ --hash=sha256:24e3b730cf84e4a6a68be5cc760864905cf66abc89851e724bd5b4e849eaa96b \ --hash=sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5 # via - # -r requirements.in + # -r requirements/requirements.in # furl pathspec==0.9.0 \ --hash=sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a \ @@ -206,18 +206,18 @@ pep517==0.11.0 \ pip-tools==6.2.0 \ --hash=sha256:77727ef7457d1865e61fe34c2b1439f9b971b570cc232616a22ce82ab89d357d \ --hash=sha256:9ed38c73da4993e531694ea151f77048b4dbf2ba7b94c4a569daa39568cc6564 - # via -r dev-requirements.in + # via -r requirements/dev-requirements.in pycodestyle==2.7.0 \ --hash=sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068 \ --hash=sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef # via - # -r dev-requirements.in + # -r requirements/dev-requirements.in # flake8 pyflakes==2.3.1 \ --hash=sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3 \ --hash=sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db # via - # -r dev-requirements.in + # -r requirements/dev-requirements.in # flake8 pyyaml==5.4.1 \ --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \ @@ -232,79 +232,71 @@ pyyaml==5.4.1 \ --hash=sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018 \ --hash=sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e \ --hash=sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253 \ - --hash=sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347 \ --hash=sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183 \ - --hash=sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541 \ --hash=sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb \ --hash=sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185 \ - --hash=sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc \ --hash=sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db \ - --hash=sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa \ --hash=sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46 \ - --hash=sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122 \ --hash=sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b \ --hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \ --hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \ - --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \ - --hash=sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247 \ - --hash=sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6 \ - --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0 + --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc # via - # -r requirements.in + # -r requirements/requirements.in # vcrpy -regex==2021.7.6 \ - --hash=sha256:0eb2c6e0fcec5e0f1d3bcc1133556563222a2ffd2211945d7b1480c1b1a42a6f \ - --hash=sha256:15dddb19823f5147e7517bb12635b3c82e6f2a3a6b696cc3e321522e8b9308ad \ - --hash=sha256:173bc44ff95bc1e96398c38f3629d86fa72e539c79900283afa895694229fe6a \ - --hash=sha256:1c78780bf46d620ff4fff40728f98b8afd8b8e35c3efd638c7df67be2d5cddbf \ - --hash=sha256:2366fe0479ca0e9afa534174faa2beae87847d208d457d200183f28c74eaea59 \ - --hash=sha256:2bceeb491b38225b1fee4517107b8491ba54fba77cf22a12e996d96a3c55613d \ - --hash=sha256:2ddeabc7652024803666ea09f32dd1ed40a0579b6fbb2a213eba590683025895 \ - --hash=sha256:2fe5e71e11a54e3355fa272137d521a40aace5d937d08b494bed4529964c19c4 \ - --hash=sha256:319eb2a8d0888fa6f1d9177705f341bc9455a2c8aca130016e52c7fe8d6c37a3 \ - --hash=sha256:3f5716923d3d0bfb27048242a6e0f14eecdb2e2a7fac47eda1d055288595f222 \ - --hash=sha256:422dec1e7cbb2efbbe50e3f1de36b82906def93ed48da12d1714cabcd993d7f0 \ - --hash=sha256:4c9c3155fe74269f61e27617529b7f09552fbb12e44b1189cebbdb24294e6e1c \ - --hash=sha256:4f64fc59fd5b10557f6cd0937e1597af022ad9b27d454e182485f1db3008f417 \ - --hash=sha256:564a4c8a29435d1f2256ba247a0315325ea63335508ad8ed938a4f14c4116a5d \ - --hash=sha256:59506c6e8bd9306cd8a41511e32d16d5d1194110b8cfe5a11d102d8b63cf945d \ - --hash=sha256:598c0a79b4b851b922f504f9f39a863d83ebdfff787261a5ed061c21e67dd761 \ - --hash=sha256:59c00bb8dd8775473cbfb967925ad2c3ecc8886b3b2d0c90a8e2707e06c743f0 \ - --hash=sha256:6110bab7eab6566492618540c70edd4d2a18f40ca1d51d704f1d81c52d245026 \ - --hash=sha256:6afe6a627888c9a6cfbb603d1d017ce204cebd589d66e0703309b8048c3b0854 \ - --hash=sha256:791aa1b300e5b6e5d597c37c346fb4d66422178566bbb426dd87eaae475053fb \ - --hash=sha256:8394e266005f2d8c6f0bc6780001f7afa3ef81a7a2111fa35058ded6fce79e4d \ - --hash=sha256:875c355360d0f8d3d827e462b29ea7682bf52327d500a4f837e934e9e4656068 \ - --hash=sha256:89e5528803566af4df368df2d6f503c84fbfb8249e6631c7b025fe23e6bd0cde \ - --hash=sha256:99d8ab206a5270c1002bfcf25c51bf329ca951e5a169f3b43214fdda1f0b5f0d \ - --hash=sha256:9a854b916806c7e3b40e6616ac9e85d3cdb7649d9e6590653deb5b341a736cec \ - --hash=sha256:b85ac458354165405c8a84725de7bbd07b00d9f72c31a60ffbf96bb38d3e25fa \ - --hash=sha256:bc84fb254a875a9f66616ed4538542fb7965db6356f3df571d783f7c8d256edd \ - --hash=sha256:c92831dac113a6e0ab28bc98f33781383fe294df1a2c3dfd1e850114da35fd5b \ - --hash=sha256:cbe23b323988a04c3e5b0c387fe3f8f363bf06c0680daf775875d979e376bd26 \ - --hash=sha256:ccb3d2190476d00414aab36cca453e4596e8f70a206e2aa8db3d495a109153d2 \ - --hash=sha256:d8bbce0c96462dbceaa7ac4a7dfbbee92745b801b24bce10a98d2f2b1ea9432f \ - --hash=sha256:db2b7df831c3187a37f3bb80ec095f249fa276dbe09abd3d35297fc250385694 \ - --hash=sha256:e586f448df2bbc37dfadccdb7ccd125c62b4348cb90c10840d695592aa1b29e0 \ - --hash=sha256:e5983c19d0beb6af88cb4d47afb92d96751fb3fa1784d8785b1cdf14c6519407 \ - --hash=sha256:e6a1e5ca97d411a461041d057348e578dc344ecd2add3555aedba3b408c9f874 \ - --hash=sha256:eaf58b9e30e0e546cdc3ac06cf9165a1ca5b3de8221e9df679416ca667972035 \ - --hash=sha256:ed693137a9187052fc46eedfafdcb74e09917166362af4cc4fddc3b31560e93d \ - --hash=sha256:edd1a68f79b89b0c57339bce297ad5d5ffcc6ae7e1afdb10f1947706ed066c9c \ - --hash=sha256:f080248b3e029d052bf74a897b9d74cfb7643537fbde97fe8225a6467fb559b5 \ - --hash=sha256:f9392a4555f3e4cb45310a65b403d86b589adc773898c25a39184b1ba4db8985 \ - --hash=sha256:f98dc35ab9a749276f1a4a38ab3e0e2ba1662ce710f6530f5b0a6656f1c32b58 +regex==2021.8.21 \ + --hash=sha256:03840a07a402576b8e3a6261f17eb88abd653ad4e18ec46ef10c9a63f8c99ebd \ + --hash=sha256:06ba444bbf7ede3890a912bd4904bb65bf0da8f0d8808b90545481362c978642 \ + --hash=sha256:1f9974826aeeda32a76648fc677e3125ade379869a84aa964b683984a2dea9f1 \ + --hash=sha256:330836ad89ff0be756b58758878409f591d4737b6a8cef26a162e2a4961c3321 \ + --hash=sha256:38600fd58c2996829480de7d034fb2d3a0307110e44dae80b6b4f9b3d2eea529 \ + --hash=sha256:3a195e26df1fbb40ebee75865f9b64ba692a5824ecb91c078cc665b01f7a9a36 \ + --hash=sha256:41acdd6d64cd56f857e271009966c2ffcbd07ec9149ca91f71088574eaa4278a \ + --hash=sha256:45f97ade892ace20252e5ccecdd7515c7df5feeb42c3d2a8b8c55920c3551c30 \ + --hash=sha256:4b0c211c55d4aac4309c3209833c803fada3fc21cdf7b74abedda42a0c9dc3ce \ + --hash=sha256:5d5209c3ba25864b1a57461526ebde31483db295fc6195fdfc4f8355e10f7376 \ + --hash=sha256:615fb5a524cffc91ab4490b69e10ae76c1ccbfa3383ea2fad72e54a85c7d47dd \ + --hash=sha256:61e734c2bcb3742c3f454dfa930ea60ea08f56fd1a0eb52d8cb189a2f6be9586 \ + --hash=sha256:640ccca4d0a6fcc6590f005ecd7b16c3d8f5d52174e4854f96b16f34c39d6cb7 \ + --hash=sha256:6dbd51c3db300ce9d3171f4106da18fe49e7045232630fe3d4c6e37cb2b39ab9 \ + --hash=sha256:71a904da8c9c02aee581f4452a5a988c3003207cb8033db426f29e5b2c0b7aea \ + --hash=sha256:8021dee64899f993f4b5cca323aae65aabc01a546ed44356a0965e29d7893c94 \ + --hash=sha256:8b8d551f1bd60b3e1c59ff55b9e8d74607a5308f66e2916948cafd13480b44a3 \ + --hash=sha256:93f9f720081d97acee38a411e861d4ce84cbc8ea5319bc1f8e38c972c47af49f \ + --hash=sha256:96f0c79a70642dfdf7e6a018ebcbea7ea5205e27d8e019cad442d2acfc9af267 \ + --hash=sha256:9966337353e436e6ba652814b0a957a517feb492a98b8f9d3b6ba76d22301dcc \ + --hash=sha256:a34ba9e39f8269fd66ab4f7a802794ffea6d6ac500568ec05b327a862c21ce23 \ + --hash=sha256:a49f85f0a099a5755d0a2cc6fc337e3cb945ad6390ec892332c691ab0a045882 \ + --hash=sha256:a795829dc522227265d72b25d6ee6f6d41eb2105c15912c230097c8f5bfdbcdc \ + --hash=sha256:a89ca4105f8099de349d139d1090bad387fe2b208b717b288699ca26f179acbe \ + --hash=sha256:ac95101736239260189f426b1e361dc1b704513963357dc474beb0f39f5b7759 \ + --hash=sha256:ae87ab669431f611c56e581679db33b9a467f87d7bf197ac384e71e4956b4456 \ + --hash=sha256:b091dcfee169ad8de21b61eb2c3a75f9f0f859f851f64fdaf9320759a3244239 \ + --hash=sha256:b511c6009d50d5c0dd0bab85ed25bc8ad6b6f5611de3a63a59786207e82824bb \ + --hash=sha256:b79dc2b2e313565416c1e62807c7c25c67a6ff0a0f8d83a318df464555b65948 \ + --hash=sha256:bca14dfcfd9aae06d7d8d7e105539bd77d39d06caaae57a1ce945670bae744e0 \ + --hash=sha256:c835c30f3af5c63a80917b72115e1defb83de99c73bc727bddd979a3b449e183 \ + --hash=sha256:ccd721f1d4fc42b541b633d6e339018a08dd0290dc67269df79552843a06ca92 \ + --hash=sha256:d6c2b1d78ceceb6741d703508cd0e9197b34f6bf6864dab30f940f8886e04ade \ + --hash=sha256:d6ec4ae13760ceda023b2e5ef1f9bc0b21e4b0830458db143794a117fdbdc044 \ + --hash=sha256:d8b623fc429a38a881ab2d9a56ef30e8ea20c72a891c193f5ebbddc016e083ee \ + --hash=sha256:ea9753d64cba6f226947c318a923dadaf1e21cd8db02f71652405263daa1f033 \ + --hash=sha256:ebbceefbffae118ab954d3cd6bf718f5790db66152f95202ebc231d58ad4e2c2 \ + --hash=sha256:ecb6e7c45f9cd199c10ec35262b53b2247fb9a408803ed00ee5bb2b54aa626f5 \ + --hash=sha256:ef9326c64349e2d718373415814e754183057ebc092261387a2c2f732d9172b2 \ + --hash=sha256:f93a9d8804f4cec9da6c26c8cfae2c777028b4fdd9f49de0302e26e00bb86504 \ + --hash=sha256:faf08b0341828f6a29b8f7dd94d5cf8cc7c39bfc3e67b78514c54b494b66915a # via black requests==2.22.0 \ --hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \ --hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31 - # via -r requirements.in + # via -r requirements/requirements.in six==1.11.0 \ --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \ --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb # via - # -r dev-requirements.in - # -r requirements.in + # -r requirements/dev-requirements.in + # -r requirements/requirements.in # furl # orderedmultidict # vcrpy @@ -312,9 +304,9 @@ toml==0.10.2 \ --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f # via mypy -tomli==1.2.0 \ - --hash=sha256:056f0376bf5a6b182c513f9582c1e5b0487265eb6c48842b69aa9ca1cd5f640a \ - --hash=sha256:d60e681734099207a6add7a10326bc2ddd1fdc36c1b0f547d00ef73ac63739c2 +tomli==1.2.1 \ + --hash=sha256:8dd0e9524d6f386271a36b41dbf6c57d8e32fd96fd22b6584679dc569d20899f \ + --hash=sha256:a5b75cb6f3968abb47af1b40c1819dc519ea82bcc065776a866e8d74c5ca9442 # via # black # pep517 @@ -352,14 +344,14 @@ typed-ast==1.4.3 \ # via # black # mypy -types-pyyaml==5.4.3 \ - --hash=sha256:2e7b81b2b7af751634425107b986086c6ba7cb61270a43a5c290c58be8cdbc3a \ - --hash=sha256:bca83cbfc0be48600a8abf1e3d87fb762a91e6d35d724029a3321dd2dce2ceb1 - # via -r dev-requirements.in -types-requests==2.25.1 \ - --hash=sha256:2d514ee172088a8fc0d554537d6424bd261c18e63195cfe47c410df0de0ed96f \ - --hash=sha256:6e9534281fe5d06ba8116807a8de930b90b6f92dff179f8cbfa2dfdd3bd2c465 - # via -r dev-requirements.in +types-pyyaml==5.4.6 \ + --hash=sha256:745dcb4b1522423026bcc83abb9925fba747f1e8602d902f71a4058f9e7fb662 \ + --hash=sha256:96f8d3d96aa1a18a465e8f6a220e02cff2f52632314845a364ecbacb0aea6e30 + # via -r requirements/dev-requirements.in +types-requests==2.25.6 \ + --hash=sha256:a5a305b43ea57bf64d6731f89816946a405b591eff6de28d4c0fd58422cee779 \ + --hash=sha256:e21541c0f55c066c491a639309159556dd8c5833e49fcde929c4c47bdb0002ee + # via -r requirements/dev-requirements.in typing-extensions==3.10.0.0 \ --hash=sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497 \ --hash=sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342 \ @@ -369,28 +361,28 @@ typing-extensions==3.10.0.0 \ # importlib-metadata # mypy # yarl -urllib3==1.25.11 \ - --hash=sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2 \ - --hash=sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e +urllib3==1.25.10 \ + --hash=sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a \ + --hash=sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461 # via - # -r requirements.in + # -r requirements/requirements.in # requests vcrpy==4.1.1 \ --hash=sha256:12c3fcdae7b88ecf11fc0d3e6d77586549d4575a2ceee18e82eee75c1f626162 \ --hash=sha256:57095bf22fc0a2d99ee9674cdafebed0f3ba763018582450706f7d3a74fff599 - # via -r dev-requirements.in + # via -r requirements/dev-requirements.in werkzeug==0.16.0 \ --hash=sha256:7280924747b5733b246fe23972186c6b348f9ae29724135a6dfc1e53cea433e7 \ --hash=sha256:e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4 - # via -r requirements.in -wheel==0.36.2 \ - --hash=sha256:78b5b185f0e5763c26ca1e324373aadd49182ca90e825f7853f4b2509215dc0e \ - --hash=sha256:e11eefd162658ea59a60a0f6c7d493a7190ea4b9a85e335b33489d9f17e0245e + # via -r requirements/requirements.in +wheel==0.37.0 \ + --hash=sha256:21014b2bd93c6d0034b6ba5d35e4eb284340e09d63c59aef6fc14b0f346146fd \ + --hash=sha256:e2ef7239991699e3355d54f8e968a21bb940a1dbf34a4d226741e64462516fad # via pip-tools wrapt==1.12.1 \ --hash=sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7 # via - # -r dev-requirements.in + # -r requirements/dev-requirements.in # vcrpy yarl==1.6.3 \ --hash=sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e \ @@ -431,7 +423,7 @@ yarl==1.6.3 \ --hash=sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a \ --hash=sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71 # via - # -r dev-requirements.in + # -r requirements/dev-requirements.in # vcrpy zipp==3.5.0 \ --hash=sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3 \ @@ -441,9 +433,9 @@ zipp==3.5.0 \ # pep517 # The following packages are considered to be unsafe in a requirements file: -pip==21.2.1 \ - --hash=sha256:303a82aaa24cdc01f7ebbd1afc7d1b871a4aa0a88bb5bedef1fa86a3ee44ca0a \ - --hash=sha256:da0ac9d9032d1d7bac69e9e301778f77b8b6626b85203f99edd2b545434d90a7 +pip==21.2.4 \ + --hash=sha256:0eb8a1516c3d138ae8689c0c1a60fde7143310832f9dc77e11d8a4bc62de193b \ + --hash=sha256:fa9ebb85d3fd607617c0c44aca302b1b45d87f9c2a1649b46c26167ca4296323 # via pip-tools setuptools==57.4.0 \ --hash=sha256:6bac238ffdf24e8806c61440e755192470352850f3419a52f26ffe0a1a64f465 \ From 8c32be2798afde7b2c75689c5e814dacefd16dd2 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Wed, 25 Aug 2021 15:57:02 -0700 Subject: [PATCH 259/352] update manifest to include requirements dir --- MANIFEST.in | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/MANIFEST.in b/MANIFEST.in index 9596bbc0a..17086b81b 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,8 +1,8 @@ include LICENSE include README.md include changelog.md -include build-requirements.txt -include requirements.txt +include requirements/build-requirements.txt +include requirements/requirements.txt include securedrop-log* include securedrop-redis-log include securedrop.Log From f3c876cc4b38104a3c8a36eb4e5e88207d252d5f Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Wed, 25 Aug 2021 15:58:18 -0700 Subject: [PATCH 260/352] include requirements dir Signed-off-by: Allie Crevier --- MANIFEST.in | 4 ++-- Makefile | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/MANIFEST.in b/MANIFEST.in index b024c622c..1c69ebe30 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -4,7 +4,7 @@ include securedrop_proxy/VERSION include changelog.md include config-example.yaml include qubes/securedrop.Proxy -include build-requirements.txt -include requirements.txt +include requirements/build-requirements.txt +include requirements/requirements.txt include securedrop_proxy/*.py include setup.py diff --git a/Makefile b/Makefile index 20f7e3fb5..7b5bcd5de 100644 --- a/Makefile +++ b/Makefile @@ -7,7 +7,7 @@ all: help venv: python3 -m venv .venv ## Provision a Python 3 virtualenv for **development** .venv/bin/pip install --upgrade pip wheel - .venv/bin/pip install --require-hashes -r "dev-requirements.txt" + .venv/bin/pip install --require-hashes -r "requirements/dev-requirements.txt" .PHONY: bandit bandit: ## Run bandit with medium level excluding test-related folders From 4a6a4cbdf83f33979a757e1e629a495621264185 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Fri, 27 Aug 2021 12:26:52 -0700 Subject: [PATCH 261/352] add options to update dev-only dependencies and sync with prod Signed-off-by: Allie Crevier --- .circleci/config.yml | 4 +- Makefile | 34 ++++++++++--- ...st-requirements.in => dev-requirements.in} | 0 ...-requirements.txt => dev-requirements.txt} | 48 +++++++++---------- requirements/requirements.in | 0 requirements/requirements.txt | 6 +++ 6 files changed, 60 insertions(+), 32 deletions(-) rename requirements/{test-requirements.in => dev-requirements.in} (100%) rename requirements/{test-requirements.txt => dev-requirements.txt} (91%) create mode 100644 requirements/requirements.in diff --git a/.circleci/config.yml b/.circleci/config.yml index 1d00acdbb..49f04010e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -17,7 +17,7 @@ common-steps: command: | virtualenv .venv source .venv/bin/activate - pip install --require-hashes -r requirements/test-requirements.txt + pip install --require-hashes -r requirements/dev-requirements.txt make test - &install_packaging_dependencies @@ -69,7 +69,7 @@ jobs: command: | virtualenv .venv source .venv/bin/activate - pip install --require-hashes -r requirements/test-requirements.txt + pip install --require-hashes -r requirements/dev-requirements.txt make lint - run: name: Check Python dependencies for CVEs diff --git a/Makefile b/Makefile index fd00bb395..62f1b7101 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,11 @@ +.PHONY: all +all: help + .PHONY: venv -venv: ## Provision a Python 3 virtualenv for development. +venv: ## Provision a Python 3 virtualenv for **development** python3 -m venv .venv - .venv/bin/pip install --require-hashes -r "requirements/test-requirements.txt" + .venv/bin/pip install --upgrade pip wheel + .venv/bin/pip install --require-hashes -r requirements/dev-requirements.txt .PHONY: safety safety: ## Runs `safety check` to check python dependencies for vulnerabilities @@ -13,9 +17,27 @@ safety: ## Runs `safety check` to check python dependencies for vulnerabilities || exit 1; \ done -.PHONY: update-pip-requirements -update-pip-requirements: ## Updates all Python requirements files via pip-compile. - pip-compile --allow-unsafe --generate-hashes --output-file requirements/test-requirements.txt requirements/test-requirements.in +.PHONY: sync-requirements +sync-requirements: ## Update dev-requirements.txt to pin to the same versions of prod dependencies + rm -r requirements/dev-requirements.txt && cp requirements/requirements.txt requirements/dev-requirements.txt + pip-compile --allow-unsafe --generate-hashes --output-file requirements/dev-requirements.txt requirements/requirements.in requirements/dev-requirements.in + +.PHONY: requirements +requirements: ## Update *requirements.txt files if pinned versions do not comply with the dependency specifications in *requirements.in + pip-compile --generate-hashes --output-file requirements/requirements.txt requirements/requirements.in + $(MAKE) sync-requirements + +.PHONY: update-dependency +update-dependency: ## Add or upgrade a package to the latest version that complies with the dependency specifications in requirements.in + pip-compile --generate-hashes --upgrade-package $(PACKAGE) --output-file requirements/requirements.txt requirements/requirements.in + $(MAKE) sync-requirements + +.PHONY: update-dev-only-dependencies +update-dev-only-dependencies: ## Update dev-requirements.txt to pin to the latest versions of dev-only dependencies that comply with the dependency specifications in dev-requirements.in + $(MAKE) sync-requirements + @while read line; do \ + pip-compile --allow-unsafe --generate-hashes --upgrade-package $file --output-file requirements/dev-requirements.txt requirements/requirements.in requirements/dev-requirements.in; \ + done < 'requirements/dev-requirements.in' .PHONY: check check: lint semgrep test ## Run linter and tests @@ -54,7 +76,7 @@ semgrep-local: .PHONY: help help: ## Print this message and exit. @printf "Makefile for developing and testing the SecureDrop export code.\n" - @printf "Subcommands:\n\n" + @printf "Subcommands:\n" @awk 'BEGIN {FS = ":.*?## "} /^[0-9a-zA-Z_-]+:.*?## / {printf "\033[36m%s\033[0m : %s\n", $$1, $$2}' $(MAKEFILE_LIST) \ | sort \ | column -s ':' -t diff --git a/requirements/test-requirements.in b/requirements/dev-requirements.in similarity index 100% rename from requirements/test-requirements.in rename to requirements/dev-requirements.in diff --git a/requirements/test-requirements.txt b/requirements/dev-requirements.txt similarity index 91% rename from requirements/test-requirements.txt rename to requirements/dev-requirements.txt index 9ff54c558..6ecd42395 100644 --- a/requirements/test-requirements.txt +++ b/requirements/dev-requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with python 3.7 # To update, run: # -# pip-compile --allow-unsafe --generate-hashes --output-file=requirements/test-requirements.txt requirements/test-requirements.in +# pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-requirements.txt requirements/dev-requirements.in requirements/requirements.in # attrs==21.2.0 \ --hash=sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1 \ @@ -88,14 +88,14 @@ coverage==5.5 \ flake8==3.9.2 \ --hash=sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b \ --hash=sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907 - # via -r requirements/test-requirements.in + # via -r requirements/dev-requirements.in idna==3.2 \ --hash=sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a \ --hash=sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3 # via requests -importlib-metadata==4.6.3 \ - --hash=sha256:0645585859e9a6689c523927a5032f2ba5919f1f7d0e84bd4533312320de1ff9 \ - --hash=sha256:51c6635429c77cf1ae634c997ff9e53ca3438b495f10a55ba28594dd69764a8b +importlib-metadata==4.7.1 \ + --hash=sha256:9e04bf59076a15a9b6dd9c27806e8fcdf15280ba529c6a8cc3f4d5b4875bdd61 \ + --hash=sha256:c4eb3dec5f697682e383a39701a7de11cd5c02daf8dd93534b69e3e6473f6b1b # via # click # flake8 @@ -131,7 +131,7 @@ pep517==0.11.0 \ pip-tools==6.2.0 \ --hash=sha256:77727ef7457d1865e61fe34c2b1439f9b971b570cc232616a22ce82ab89d357d \ --hash=sha256:9ed38c73da4993e531694ea151f77048b4dbf2ba7b94c4a569daa39568cc6564 - # via -r requirements/test-requirements.in + # via -r requirements/dev-requirements.in pluggy==0.13.1 \ --hash=sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0 \ --hash=sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d @@ -140,7 +140,7 @@ py==1.10.0 \ --hash=sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3 \ --hash=sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a # via - # -r requirements/test-requirements.in + # -r requirements/dev-requirements.in # pytest pycodestyle==2.7.0 \ --hash=sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068 \ @@ -181,24 +181,24 @@ pytest==6.2.4 \ --hash=sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b \ --hash=sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890 # via - # -r requirements/test-requirements.in + # -r requirements/dev-requirements.in # pytest-cov # pytest-mock pytest-cov==2.12.1 \ --hash=sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a \ --hash=sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7 - # via -r requirements/test-requirements.in + # via -r requirements/dev-requirements.in pytest-mock==3.6.1 \ --hash=sha256:30c2f2cc9759e76eee674b81ea28c9f0b94f8f0445a1b87762cadf774f0df7e3 \ --hash=sha256:40217a058c52a63f1042f0784f62009e976ba824c418cced42e88d5f40ab0e62 - # via -r requirements/test-requirements.in + # via -r requirements/dev-requirements.in requests==2.26.0 \ --hash=sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24 \ --hash=sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7 # via semgrep -ruamel.yaml==0.17.10 \ - --hash=sha256:106bc8d6dc6a0ff7c9196a47570432036f41d556b779c6b4e618085f57e39e67 \ - --hash=sha256:ffb9b703853e9e8b7861606dfdab1026cf02505bade0653d1880f4b2db47f815 +ruamel.yaml==0.17.14 \ + --hash=sha256:4185fcfa9e037fea9ffd0bb6172354a03ec98c21e462355d72e068c74e493512 \ + --hash=sha256:b59c548ba6a2a99a97a842db2321c5adf28470d1decb04bdd82ce9535936a2fa # via semgrep ruamel.yaml.clib==0.2.6 \ --hash=sha256:0847201b767447fc33b9c235780d3aa90357d20dd6108b92be544427bea197dd \ @@ -223,11 +223,11 @@ ruamel.yaml.clib==0.2.6 \ --hash=sha256:dc6a613d6c74eef5a14a214d433d06291526145431c3b964f5e16529b1842bed \ --hash=sha256:de9c6b8a1ba52919ae919f3ae96abb72b994dd0350226e28f3686cb4f142165c # via ruamel.yaml -semgrep==0.61.0 \ - --hash=sha256:04256932d2b234a034948d39bd04349cdc878e2868b91d2039038dd5451248ba \ - --hash=sha256:7dc56bf0ecfd66c86e1e130103d3088b82ff2b5a6c30a204314a7a1760790420 \ - --hash=sha256:caa74cce5f61fd18049ac8ac7a4ec2a5e604beb9232d7e26a171b7a2aeeb9d5c - # via -r requirements/test-requirements.in +semgrep==0.63.0 \ + --hash=sha256:777322ce3bbe07a43cc5e8d9ad23691441d3af9785e942134ceb5bdad1eb4902 \ + --hash=sha256:96ca4e2f4439f29f84199aae8a3094480bb624b2466998aa06bd556deff849db \ + --hash=sha256:9fc462d70210a4edfce03d8de3f9bf9f046c58bbbe9a12db6155f756404ef6f5 + # via -r requirements/dev-requirements.in six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 @@ -242,9 +242,9 @@ tomli==1.2.1 \ --hash=sha256:8dd0e9524d6f386271a36b41dbf6c57d8e32fd96fd22b6584679dc569d20899f \ --hash=sha256:a5b75cb6f3968abb47af1b40c1819dc519ea82bcc065776a866e8d74c5ca9442 # via pep517 -tqdm==4.62.0 \ - --hash=sha256:3642d483b558eec80d3c831e23953582c34d7e4540db86d9e5ed9dad238dabc6 \ - --hash=sha256:706dea48ee05ba16e936ee91cb3791cd2ea6da348a0e50b46863ff4363ff4340 +tqdm==4.62.2 \ + --hash=sha256:80aead664e6c1672c4ae20dc50e1cdc5e20eeff9b14aa23ecd426375b28be588 \ + --hash=sha256:a4d6d112e507ef98513ac119ead1159d286deab17dffedd96921412c2d236ff5 # via semgrep typing-extensions==3.10.0.0 \ --hash=sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497 \ @@ -271,9 +271,9 @@ zipp==3.5.0 \ # pep517 # The following packages are considered to be unsafe in a requirements file: -pip==21.2.3 \ - --hash=sha256:895df6014c2f02f9d278a8ad6e31cdfd312952b4a93c3068d0556964f4490057 \ - --hash=sha256:91e66f2a2702e7d2dcc092ed8c5ebe923e69b9997ea28ba25823943bcd3bf820 +pip==21.2.4 \ + --hash=sha256:0eb8a1516c3d138ae8689c0c1a60fde7143310832f9dc77e11d8a4bc62de193b \ + --hash=sha256:fa9ebb85d3fd607617c0c44aca302b1b45d87f9c2a1649b46c26167ca4296323 # via pip-tools setuptools==57.4.0 \ --hash=sha256:6bac238ffdf24e8806c61440e755192470352850f3419a52f26ffe0a1a64f465 \ diff --git a/requirements/requirements.in b/requirements/requirements.in new file mode 100644 index 000000000..e69de29bb diff --git a/requirements/requirements.txt b/requirements/requirements.txt index e69de29bb..b78349c53 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -0,0 +1,6 @@ +# +# This file is autogenerated by pip-compile with python 3.7 +# To update, run: +# +# pip-compile --generate-hashes --output-file=requirements/requirements.txt requirements/requirements.in +# From 51b0d24c20800e03aabd457c562a7ec2e354f7d5 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Fri, 27 Aug 2021 12:56:41 -0700 Subject: [PATCH 262/352] update to latest localwheel dependencies Signed-off-by: Allie Crevier --- requirements/build-requirements.txt | 10 +- requirements/dev-requirements.txt | 136 +++++++++++++++------------- requirements/requirements.in | 10 +- requirements/requirements.txt | 40 ++++---- 4 files changed, 106 insertions(+), 90 deletions(-) diff --git a/requirements/build-requirements.txt b/requirements/build-requirements.txt index 824f7b9bf..7b00d5770 100644 --- a/requirements/build-requirements.txt +++ b/requirements/build-requirements.txt @@ -1,10 +1,10 @@ -certifi==2018.10.15 --hash=sha256:2d5538b9d89b3a9cb423b8f08417c657856cab733d560067e864157f71b1c5aa -chardet==3.0.4 --hash=sha256:e5cf39014befb85add77118fdc946f0a3387df7840235ba5d20fd6e3a672410a +certifi==2021.5.30 --hash=sha256:2dee87f82eab7a047280038bacb5bdbc4ee308ae39522ccd9da2ceefe7b6f84e +charset-normalizer==2.0.4 --hash=sha256:cd9a4492eef4e5276c07f9c0dc1338e7be3e95f2a536bf2c5b620b1f27d03d74 furl==2.0.0 --hash=sha256:9f50360f6e4a0f1d0a35fb4997878e7186a73331f0fde5f6fc9b1bb9f006e6cc -idna==2.7 --hash=sha256:69bbcd9c42b0add994610a68202532e9b327992b61344cd76e743ee592725f50 +idna==2.8 --hash=sha256:2ac4f96345f5f4fc6ebe59f6254d7bfdb943251ccd3ee7e40fe53739b7c6ef92 orderedmultidict==1.0 --hash=sha256:f6022beda2b3387c61e6eb7e0e1e3e2832fd9f55f3f64d4b4b226eea7487327f pyyaml==5.4.1 --hash=sha256:be111e40b3e32707b373b90ef490fa0908bf7769c77f8cf940004f0c957954f6 -requests==2.22.0 --hash=sha256:83ddbb326afd5524e7fbca582fd9673103652ea3b0c6601ac5ba1a4501f077c9 +requests==2.26.0 --hash=sha256:7cec5239ce6ec4f6bf3d1b8c7e4d34ebe1b86d3896fe9657a8465ee4d7282bc8 six==1.11.0 --hash=sha256:eb52689b06ca7433c1cac3b91f320400bd3b358790b7ff4b6367cb1c81d37561 -urllib3==1.25.10 --hash=sha256:c78fdeffd1a01640ea99d35851539a4bc91e46a672989c4d96663e3808121389 +urllib3==1.26.6 --hash=sha256:7a2814749409a681ab58babe6539b02a2f84f6649904211f90fb649811ae7b36 werkzeug==0.16.0 --hash=sha256:bd05301a84a9bc3b33f178e53446181879744b74f098ed35850ba21125379be1 diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt index dc6989228..701f3d1db 100644 --- a/requirements/dev-requirements.txt +++ b/requirements/dev-requirements.txt @@ -12,15 +12,15 @@ black==21.7b0 \ --hash=sha256:1c7aa6ada8ee864db745b22790a32f94b2795c253a75d6d9b5e439ff10d23116 \ --hash=sha256:c8373c6491de9362e39271630b65b964607bc5c79c83783547d76c839b3aa219 # via -r requirements/dev-requirements.in -certifi==2018.10.15 \ - --hash=sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c \ - --hash=sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a +certifi==2021.5.30 \ + --hash=sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee \ + --hash=sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8 # via # -r requirements/requirements.in # requests -chardet==3.0.4 \ - --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ - --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 +charset-normalizer==2.0.4 \ + --hash=sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b \ + --hash=sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3 # via # -r requirements/requirements.in # requests @@ -92,16 +92,16 @@ furl==2.0.0 \ --hash=sha256:f7e90e9f85ef3f2e64485f04c2a80b50af6133942812fd87a44d45305b079018 \ --hash=sha256:fdcaedc1fb19a63d7d875b0105b0a5b496dd0989330d454a42bcb401fa5454ec # via -r requirements/requirements.in -idna==2.7 \ - --hash=sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e \ - --hash=sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16 +idna==2.8 \ + --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \ + --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c # via # -r requirements/requirements.in # requests # yarl -importlib-metadata==4.6.4 \ - --hash=sha256:7b30a78db2922d78a6f47fb30683156a14f3c6aa5cc23f77cc8967e9ab2d002f \ - --hash=sha256:ed5157fef23a4bc4594615a0dd8eba94b2bb36bf2a343fa3d8bb2fa0a62a99d5 +importlib-metadata==4.7.1 \ + --hash=sha256:9e04bf59076a15a9b6dd9c27806e8fcdf15280ba529c6a8cc3f4d5b4875bdd61 \ + --hash=sha256:c4eb3dec5f697682e383a39701a7de11cd5c02daf8dd93534b69e3e6473f6b1b # via # click # flake8 @@ -232,64 +232,72 @@ pyyaml==5.4.1 \ --hash=sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018 \ --hash=sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e \ --hash=sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253 \ + --hash=sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347 \ --hash=sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183 \ + --hash=sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541 \ --hash=sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb \ --hash=sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185 \ + --hash=sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc \ --hash=sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db \ + --hash=sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa \ --hash=sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46 \ + --hash=sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122 \ --hash=sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b \ --hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \ --hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \ - --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc + --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \ + --hash=sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247 \ + --hash=sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6 \ + --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0 # via # -r requirements/requirements.in # vcrpy -regex==2021.8.21 \ - --hash=sha256:03840a07a402576b8e3a6261f17eb88abd653ad4e18ec46ef10c9a63f8c99ebd \ - --hash=sha256:06ba444bbf7ede3890a912bd4904bb65bf0da8f0d8808b90545481362c978642 \ - --hash=sha256:1f9974826aeeda32a76648fc677e3125ade379869a84aa964b683984a2dea9f1 \ - --hash=sha256:330836ad89ff0be756b58758878409f591d4737b6a8cef26a162e2a4961c3321 \ - --hash=sha256:38600fd58c2996829480de7d034fb2d3a0307110e44dae80b6b4f9b3d2eea529 \ - --hash=sha256:3a195e26df1fbb40ebee75865f9b64ba692a5824ecb91c078cc665b01f7a9a36 \ - --hash=sha256:41acdd6d64cd56f857e271009966c2ffcbd07ec9149ca91f71088574eaa4278a \ - --hash=sha256:45f97ade892ace20252e5ccecdd7515c7df5feeb42c3d2a8b8c55920c3551c30 \ - --hash=sha256:4b0c211c55d4aac4309c3209833c803fada3fc21cdf7b74abedda42a0c9dc3ce \ - --hash=sha256:5d5209c3ba25864b1a57461526ebde31483db295fc6195fdfc4f8355e10f7376 \ - --hash=sha256:615fb5a524cffc91ab4490b69e10ae76c1ccbfa3383ea2fad72e54a85c7d47dd \ - --hash=sha256:61e734c2bcb3742c3f454dfa930ea60ea08f56fd1a0eb52d8cb189a2f6be9586 \ - --hash=sha256:640ccca4d0a6fcc6590f005ecd7b16c3d8f5d52174e4854f96b16f34c39d6cb7 \ - --hash=sha256:6dbd51c3db300ce9d3171f4106da18fe49e7045232630fe3d4c6e37cb2b39ab9 \ - --hash=sha256:71a904da8c9c02aee581f4452a5a988c3003207cb8033db426f29e5b2c0b7aea \ - --hash=sha256:8021dee64899f993f4b5cca323aae65aabc01a546ed44356a0965e29d7893c94 \ - --hash=sha256:8b8d551f1bd60b3e1c59ff55b9e8d74607a5308f66e2916948cafd13480b44a3 \ - --hash=sha256:93f9f720081d97acee38a411e861d4ce84cbc8ea5319bc1f8e38c972c47af49f \ - --hash=sha256:96f0c79a70642dfdf7e6a018ebcbea7ea5205e27d8e019cad442d2acfc9af267 \ - --hash=sha256:9966337353e436e6ba652814b0a957a517feb492a98b8f9d3b6ba76d22301dcc \ - --hash=sha256:a34ba9e39f8269fd66ab4f7a802794ffea6d6ac500568ec05b327a862c21ce23 \ - --hash=sha256:a49f85f0a099a5755d0a2cc6fc337e3cb945ad6390ec892332c691ab0a045882 \ - --hash=sha256:a795829dc522227265d72b25d6ee6f6d41eb2105c15912c230097c8f5bfdbcdc \ - --hash=sha256:a89ca4105f8099de349d139d1090bad387fe2b208b717b288699ca26f179acbe \ - --hash=sha256:ac95101736239260189f426b1e361dc1b704513963357dc474beb0f39f5b7759 \ - --hash=sha256:ae87ab669431f611c56e581679db33b9a467f87d7bf197ac384e71e4956b4456 \ - --hash=sha256:b091dcfee169ad8de21b61eb2c3a75f9f0f859f851f64fdaf9320759a3244239 \ - --hash=sha256:b511c6009d50d5c0dd0bab85ed25bc8ad6b6f5611de3a63a59786207e82824bb \ - --hash=sha256:b79dc2b2e313565416c1e62807c7c25c67a6ff0a0f8d83a318df464555b65948 \ - --hash=sha256:bca14dfcfd9aae06d7d8d7e105539bd77d39d06caaae57a1ce945670bae744e0 \ - --hash=sha256:c835c30f3af5c63a80917b72115e1defb83de99c73bc727bddd979a3b449e183 \ - --hash=sha256:ccd721f1d4fc42b541b633d6e339018a08dd0290dc67269df79552843a06ca92 \ - --hash=sha256:d6c2b1d78ceceb6741d703508cd0e9197b34f6bf6864dab30f940f8886e04ade \ - --hash=sha256:d6ec4ae13760ceda023b2e5ef1f9bc0b21e4b0830458db143794a117fdbdc044 \ - --hash=sha256:d8b623fc429a38a881ab2d9a56ef30e8ea20c72a891c193f5ebbddc016e083ee \ - --hash=sha256:ea9753d64cba6f226947c318a923dadaf1e21cd8db02f71652405263daa1f033 \ - --hash=sha256:ebbceefbffae118ab954d3cd6bf718f5790db66152f95202ebc231d58ad4e2c2 \ - --hash=sha256:ecb6e7c45f9cd199c10ec35262b53b2247fb9a408803ed00ee5bb2b54aa626f5 \ - --hash=sha256:ef9326c64349e2d718373415814e754183057ebc092261387a2c2f732d9172b2 \ - --hash=sha256:f93a9d8804f4cec9da6c26c8cfae2c777028b4fdd9f49de0302e26e00bb86504 \ - --hash=sha256:faf08b0341828f6a29b8f7dd94d5cf8cc7c39bfc3e67b78514c54b494b66915a +regex==2021.8.28 \ + --hash=sha256:04f6b9749e335bb0d2f68c707f23bb1773c3fb6ecd10edf0f04df12a8920d468 \ + --hash=sha256:08d74bfaa4c7731b8dac0a992c63673a2782758f7cfad34cf9c1b9184f911354 \ + --hash=sha256:0fc1f8f06977c2d4f5e3d3f0d4a08089be783973fc6b6e278bde01f0544ff308 \ + --hash=sha256:121f4b3185feaade3f85f70294aef3f777199e9b5c0c0245c774ae884b110a2d \ + --hash=sha256:1413b5022ed6ac0d504ba425ef02549a57d0f4276de58e3ab7e82437892704fc \ + --hash=sha256:1743345e30917e8c574f273f51679c294effba6ad372db1967852f12c76759d8 \ + --hash=sha256:28fc475f560d8f67cc8767b94db4c9440210f6958495aeae70fac8faec631797 \ + --hash=sha256:31a99a4796bf5aefc8351e98507b09e1b09115574f7c9dbb9cf2111f7220d2e2 \ + --hash=sha256:328a1fad67445550b982caa2a2a850da5989fd6595e858f02d04636e7f8b0b13 \ + --hash=sha256:473858730ef6d6ff7f7d5f19452184cd0caa062a20047f6d6f3e135a4648865d \ + --hash=sha256:4cde065ab33bcaab774d84096fae266d9301d1a2f5519d7bd58fc55274afbf7a \ + --hash=sha256:5f6a808044faae658f546dd5f525e921de9fa409de7a5570865467f03a626fc0 \ + --hash=sha256:610b690b406653c84b7cb6091facb3033500ee81089867ee7d59e675f9ca2b73 \ + --hash=sha256:66256b6391c057305e5ae9209941ef63c33a476b73772ca967d4a2df70520ec1 \ + --hash=sha256:6eebf512aa90751d5ef6a7c2ac9d60113f32e86e5687326a50d7686e309f66ed \ + --hash=sha256:79aef6b5cd41feff359acaf98e040844613ff5298d0d19c455b3d9ae0bc8c35a \ + --hash=sha256:808ee5834e06f57978da3e003ad9d6292de69d2bf6263662a1a8ae30788e080b \ + --hash=sha256:8e44769068d33e0ea6ccdf4b84d80c5afffe5207aa4d1881a629cf0ef3ec398f \ + --hash=sha256:999ad08220467b6ad4bd3dd34e65329dd5d0df9b31e47106105e407954965256 \ + --hash=sha256:9b006628fe43aa69259ec04ca258d88ed19b64791693df59c422b607b6ece8bb \ + --hash=sha256:9d05ad5367c90814099000442b2125535e9d77581855b9bee8780f1b41f2b1a2 \ + --hash=sha256:a577a21de2ef8059b58f79ff76a4da81c45a75fe0bfb09bc8b7bb4293fa18983 \ + --hash=sha256:a617593aeacc7a691cc4af4a4410031654f2909053bd8c8e7db837f179a630eb \ + --hash=sha256:abb48494d88e8a82601af905143e0de838c776c1241d92021e9256d5515b3645 \ + --hash=sha256:ac88856a8cbccfc14f1b2d0b829af354cc1743cb375e7f04251ae73b2af6adf8 \ + --hash=sha256:b4c220a1fe0d2c622493b0a1fd48f8f991998fb447d3cd368033a4b86cf1127a \ + --hash=sha256:b844fb09bd9936ed158ff9df0ab601e2045b316b17aa8b931857365ea8586906 \ + --hash=sha256:bdc178caebd0f338d57ae445ef8e9b737ddf8fbc3ea187603f65aec5b041248f \ + --hash=sha256:c206587c83e795d417ed3adc8453a791f6d36b67c81416676cad053b4104152c \ + --hash=sha256:c61dcc1cf9fd165127a2853e2c31eb4fb961a4f26b394ac9fe5669c7a6592892 \ + --hash=sha256:c7cb4c512d2d3b0870e00fbbac2f291d4b4bf2634d59a31176a87afe2777c6f0 \ + --hash=sha256:d4a332404baa6665b54e5d283b4262f41f2103c255897084ec8f5487ce7b9e8e \ + --hash=sha256:d5111d4c843d80202e62b4fdbb4920db1dcee4f9366d6b03294f45ed7b18b42e \ + --hash=sha256:e1e8406b895aba6caa63d9fd1b6b1700d7e4825f78ccb1e5260551d168db38ed \ + --hash=sha256:e8690ed94481f219a7a967c118abaf71ccc440f69acd583cab721b90eeedb77c \ + --hash=sha256:ed283ab3a01d8b53de3a05bfdf4473ae24e43caee7dcb5584e86f3f3e5ab4374 \ + --hash=sha256:ed4b50355b066796dacdd1cf538f2ce57275d001838f9b132fab80b75e8c84dd \ + --hash=sha256:ee329d0387b5b41a5dddbb6243a21cb7896587a651bebb957e2d2bb8b63c0791 \ + --hash=sha256:f3bf1bc02bc421047bfec3343729c4bbbea42605bcfd6d6bfe2c07ade8b12d2a \ + --hash=sha256:f585cbbeecb35f35609edccb95efd95a3e35824cd7752b586503f7e6087303f1 \ + --hash=sha256:f60667673ff9c249709160529ab39667d1ae9fd38634e006bec95611f632e759 # via black -requests==2.22.0 \ - --hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \ - --hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31 +requests==2.26.0 \ + --hash=sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24 \ + --hash=sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7 # via -r requirements/requirements.in six==1.11.0 \ --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \ @@ -344,9 +352,9 @@ typed-ast==1.4.3 \ # via # black # mypy -types-pyyaml==5.4.6 \ - --hash=sha256:745dcb4b1522423026bcc83abb9925fba747f1e8602d902f71a4058f9e7fb662 \ - --hash=sha256:96f8d3d96aa1a18a465e8f6a220e02cff2f52632314845a364ecbacb0aea6e30 +types-pyyaml==5.4.8 \ + --hash=sha256:e084cfc878c8c8b1d4e89011ae5d65ea962fc5745c3d08b931df2aaaa665de96 \ + --hash=sha256:f00ad122a78a7d41b0ad4e3fa481751c2b9bc3f34b8f71ea35232211057dd50f # via -r requirements/dev-requirements.in types-requests==2.25.6 \ --hash=sha256:a5a305b43ea57bf64d6731f89816946a405b591eff6de28d4c0fd58422cee779 \ @@ -361,9 +369,9 @@ typing-extensions==3.10.0.0 \ # importlib-metadata # mypy # yarl -urllib3==1.25.10 \ - --hash=sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a \ - --hash=sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461 +urllib3==1.26.6 \ + --hash=sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4 \ + --hash=sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f # via # -r requirements/requirements.in # requests diff --git a/requirements/requirements.in b/requirements/requirements.in index 8aa528ea4..bf59b9850 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -1,10 +1,10 @@ -certifi==2018.10.15 -chardet==3.0.4 +certifi>=2021.5.30 +charset-normalizer>=2.0.4 furl==2.0.0 -idna==2.7 +idna>=2.7 orderedmultidict==1.0 PyYAML==5.4.1 six==1.11.0 -requests==2.22.0 -urllib3>=1.25.10 +requests>=2.26.0 +urllib3>=1.26.5 Werkzeug==0.16.0 diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 9e391e742..5f52ef94d 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -4,15 +4,15 @@ # # pip-compile --generate-hashes --output-file=requirements/requirements.txt requirements/requirements.in # -certifi==2018.10.15 \ - --hash=sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c \ - --hash=sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a +certifi==2021.5.30 \ + --hash=sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee \ + --hash=sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8 # via # -r requirements/requirements.in # requests -chardet==3.0.4 \ - --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ - --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 +charset-normalizer==2.0.4 \ + --hash=sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b \ + --hash=sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3 # via # -r requirements/requirements.in # requests @@ -20,9 +20,9 @@ furl==2.0.0 \ --hash=sha256:f7e90e9f85ef3f2e64485f04c2a80b50af6133942812fd87a44d45305b079018 \ --hash=sha256:fdcaedc1fb19a63d7d875b0105b0a5b496dd0989330d454a42bcb401fa5454ec # via -r requirements/requirements.in -idna==2.7 \ - --hash=sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e \ - --hash=sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16 +idna==2.8 \ + --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \ + --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c # via # -r requirements/requirements.in # requests @@ -45,19 +45,27 @@ pyyaml==5.4.1 \ --hash=sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018 \ --hash=sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e \ --hash=sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253 \ + --hash=sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347 \ --hash=sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183 \ + --hash=sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541 \ --hash=sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb \ --hash=sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185 \ + --hash=sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc \ --hash=sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db \ + --hash=sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa \ --hash=sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46 \ + --hash=sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122 \ --hash=sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b \ --hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \ --hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \ - --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc + --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \ + --hash=sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247 \ + --hash=sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6 \ + --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0 # via -r requirements/requirements.in -requests==2.22.0 \ - --hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \ - --hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31 +requests==2.26.0 \ + --hash=sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24 \ + --hash=sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7 # via -r requirements/requirements.in six==1.11.0 \ --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \ @@ -66,9 +74,9 @@ six==1.11.0 \ # -r requirements/requirements.in # furl # orderedmultidict -urllib3==1.25.10 \ - --hash=sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a \ - --hash=sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461 +urllib3==1.26.6 \ + --hash=sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4 \ + --hash=sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f # via # -r requirements/requirements.in # requests From 947857ad5a21b858f66f4179b5225796f39f675d Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Fri, 10 Sep 2021 15:31:15 -0700 Subject: [PATCH 263/352] update to idna 3.2 Signed-off-by: Allie Crevier --- requirements/build-requirements.txt | 2 +- requirements/dev-requirements.txt | 46 ++++++++++++++--------------- requirements/requirements.in | 6 ++-- requirements/requirements.txt | 6 ++-- 4 files changed, 30 insertions(+), 30 deletions(-) diff --git a/requirements/build-requirements.txt b/requirements/build-requirements.txt index 7b00d5770..2479943ae 100644 --- a/requirements/build-requirements.txt +++ b/requirements/build-requirements.txt @@ -1,7 +1,7 @@ certifi==2021.5.30 --hash=sha256:2dee87f82eab7a047280038bacb5bdbc4ee308ae39522ccd9da2ceefe7b6f84e charset-normalizer==2.0.4 --hash=sha256:cd9a4492eef4e5276c07f9c0dc1338e7be3e95f2a536bf2c5b620b1f27d03d74 furl==2.0.0 --hash=sha256:9f50360f6e4a0f1d0a35fb4997878e7186a73331f0fde5f6fc9b1bb9f006e6cc -idna==2.8 --hash=sha256:2ac4f96345f5f4fc6ebe59f6254d7bfdb943251ccd3ee7e40fe53739b7c6ef92 +idna==3.2 --hash=sha256:691d9fc304505c65ea9ceb8eb7385d63988e344c065cacbbd2156ff9bdfcf0c1 orderedmultidict==1.0 --hash=sha256:f6022beda2b3387c61e6eb7e0e1e3e2832fd9f55f3f64d4b4b226eea7487327f pyyaml==5.4.1 --hash=sha256:be111e40b3e32707b373b90ef490fa0908bf7769c77f8cf940004f0c957954f6 requests==2.26.0 --hash=sha256:7cec5239ce6ec4f6bf3d1b8c7e4d34ebe1b86d3896fe9657a8465ee4d7282bc8 diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt index 701f3d1db..558ba47ae 100644 --- a/requirements/dev-requirements.txt +++ b/requirements/dev-requirements.txt @@ -4,13 +4,9 @@ # # pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-requirements.txt requirements/dev-requirements.in requirements/requirements.in # -appdirs==1.4.4 \ - --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41 \ - --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128 - # via black -black==21.7b0 \ - --hash=sha256:1c7aa6ada8ee864db745b22790a32f94b2795c253a75d6d9b5e439ff10d23116 \ - --hash=sha256:c8373c6491de9362e39271630b65b964607bc5c79c83783547d76c839b3aa219 +black==21.8b0 \ + --hash=sha256:2a0f9a8c2b2a60dbcf1ccb058842fb22bdbbcb2f32c6cc02d9578f90b92ce8b7 \ + --hash=sha256:570608d28aa3af1792b98c4a337dbac6367877b47b12b88ab42095cfc1a627c2 # via -r requirements/dev-requirements.in certifi==2021.5.30 \ --hash=sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee \ @@ -92,16 +88,16 @@ furl==2.0.0 \ --hash=sha256:f7e90e9f85ef3f2e64485f04c2a80b50af6133942812fd87a44d45305b079018 \ --hash=sha256:fdcaedc1fb19a63d7d875b0105b0a5b496dd0989330d454a42bcb401fa5454ec # via -r requirements/requirements.in -idna==2.8 \ - --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \ - --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c +idna==3.2 \ + --hash=sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a \ + --hash=sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3 # via # -r requirements/requirements.in # requests # yarl -importlib-metadata==4.7.1 \ - --hash=sha256:9e04bf59076a15a9b6dd9c27806e8fcdf15280ba529c6a8cc3f4d5b4875bdd61 \ - --hash=sha256:c4eb3dec5f697682e383a39701a7de11cd5c02daf8dd93534b69e3e6473f6b1b +importlib-metadata==4.8.1 \ + --hash=sha256:b618b6d2d5ffa2f16add5697cf57a46c76a56229b0ed1c438322e4e95645bd15 \ + --hash=sha256:f284b3e11256ad1e5d03ab86bb2ccd6f5339688ff17a4d797a0fe7df326f23b1 # via # click # flake8 @@ -207,6 +203,10 @@ pip-tools==6.2.0 \ --hash=sha256:77727ef7457d1865e61fe34c2b1439f9b971b570cc232616a22ce82ab89d357d \ --hash=sha256:9ed38c73da4993e531694ea151f77048b4dbf2ba7b94c4a569daa39568cc6564 # via -r requirements/dev-requirements.in +platformdirs==2.3.0 \ + --hash=sha256:15b056538719b1c94bdaccb29e5f81879c7f7f0f4a153f46086d155dffcd4f0f \ + --hash=sha256:8003ac87717ae2c7ee1ea5a84a1a61e87f3fbd16eb5aadba194ea30a9019f648 + # via black pycodestyle==2.7.0 \ --hash=sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068 \ --hash=sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef @@ -352,18 +352,18 @@ typed-ast==1.4.3 \ # via # black # mypy -types-pyyaml==5.4.8 \ - --hash=sha256:e084cfc878c8c8b1d4e89011ae5d65ea962fc5745c3d08b931df2aaaa665de96 \ - --hash=sha256:f00ad122a78a7d41b0ad4e3fa481751c2b9bc3f34b8f71ea35232211057dd50f +types-pyyaml==5.4.10 \ + --hash=sha256:1d9e431e9f1f78a65ea957c558535a3b15ad67ea4912bce48a6c1b613dcf81ad \ + --hash=sha256:f1d1357168988e45fa20c65aecb3911462246a84809015dd889ebf8b1db74124 # via -r requirements/dev-requirements.in types-requests==2.25.6 \ --hash=sha256:a5a305b43ea57bf64d6731f89816946a405b591eff6de28d4c0fd58422cee779 \ --hash=sha256:e21541c0f55c066c491a639309159556dd8c5833e49fcde929c4c47bdb0002ee # via -r requirements/dev-requirements.in -typing-extensions==3.10.0.0 \ - --hash=sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497 \ - --hash=sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342 \ - --hash=sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84 +typing-extensions==3.10.0.2 \ + --hash=sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e \ + --hash=sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7 \ + --hash=sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34 # via # black # importlib-metadata @@ -445,7 +445,7 @@ pip==21.2.4 \ --hash=sha256:0eb8a1516c3d138ae8689c0c1a60fde7143310832f9dc77e11d8a4bc62de193b \ --hash=sha256:fa9ebb85d3fd607617c0c44aca302b1b45d87f9c2a1649b46c26167ca4296323 # via pip-tools -setuptools==57.4.0 \ - --hash=sha256:6bac238ffdf24e8806c61440e755192470352850f3419a52f26ffe0a1a64f465 \ - --hash=sha256:a49230977aa6cfb9d933614d2f7b79036e9945c4cdd7583163f4e920b83418d6 +setuptools==58.0.4 \ + --hash=sha256:69cc739bc2662098a68a9bc575cd974a57969e70c1d58ade89d104ab73d79770 \ + --hash=sha256:f10059f0152e0b7fb6b2edd77bcb1ecd4c9ed7048a826eb2d79f72fd2e6e237b # via pip-tools diff --git a/requirements/requirements.in b/requirements/requirements.in index bf59b9850..cede6d809 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -3,8 +3,8 @@ charset-normalizer>=2.0.4 furl==2.0.0 idna>=2.7 orderedmultidict==1.0 -PyYAML==5.4.1 -six==1.11.0 +pyyaml==5.4.1 requests>=2.26.0 +six==1.11.0 urllib3>=1.26.5 -Werkzeug==0.16.0 +werkzeug==0.16.0 diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 5f52ef94d..e24267cbe 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -20,9 +20,9 @@ furl==2.0.0 \ --hash=sha256:f7e90e9f85ef3f2e64485f04c2a80b50af6133942812fd87a44d45305b079018 \ --hash=sha256:fdcaedc1fb19a63d7d875b0105b0a5b496dd0989330d454a42bcb401fa5454ec # via -r requirements/requirements.in -idna==2.8 \ - --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \ - --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c +idna==3.2 \ + --hash=sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a \ + --hash=sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3 # via # -r requirements/requirements.in # requests From c84c4fdc087733c738c5833fce684d9796431c84 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Fri, 10 Sep 2021 16:21:23 -0700 Subject: [PATCH 264/352] update all dev-only dependencies --- requirements/dev-requirements.in | 1 - requirements/dev-requirements.txt | 50 +++++++++++++++---------------- 2 files changed, 24 insertions(+), 27 deletions(-) diff --git a/requirements/dev-requirements.in b/requirements/dev-requirements.in index e0bfaf54a..e7a7d9159 100644 --- a/requirements/dev-requirements.in +++ b/requirements/dev-requirements.in @@ -1,6 +1,5 @@ flake8 pip-tools -py>=1.9.0 pytest pytest-cov pytest-mock diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt index 6ecd42395..573060ac3 100644 --- a/requirements/dev-requirements.txt +++ b/requirements/dev-requirements.txt @@ -93,9 +93,9 @@ idna==3.2 \ --hash=sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a \ --hash=sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3 # via requests -importlib-metadata==4.7.1 \ - --hash=sha256:9e04bf59076a15a9b6dd9c27806e8fcdf15280ba529c6a8cc3f4d5b4875bdd61 \ - --hash=sha256:c4eb3dec5f697682e383a39701a7de11cd5c02daf8dd93534b69e3e6473f6b1b +importlib-metadata==4.8.1 \ + --hash=sha256:b618b6d2d5ffa2f16add5697cf57a46c76a56229b0ed1c438322e4e95645bd15 \ + --hash=sha256:f284b3e11256ad1e5d03ab86bb2ccd6f5339688ff17a4d797a0fe7df326f23b1 # via # click # flake8 @@ -132,16 +132,14 @@ pip-tools==6.2.0 \ --hash=sha256:77727ef7457d1865e61fe34c2b1439f9b971b570cc232616a22ce82ab89d357d \ --hash=sha256:9ed38c73da4993e531694ea151f77048b4dbf2ba7b94c4a569daa39568cc6564 # via -r requirements/dev-requirements.in -pluggy==0.13.1 \ - --hash=sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0 \ - --hash=sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d +pluggy==1.0.0 \ + --hash=sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159 \ + --hash=sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3 # via pytest py==1.10.0 \ --hash=sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3 \ --hash=sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a - # via - # -r requirements/dev-requirements.in - # pytest + # via pytest pycodestyle==2.7.0 \ --hash=sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068 \ --hash=sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef @@ -177,9 +175,9 @@ pyrsistent==0.18.0 \ --hash=sha256:f3ef98d7b76da5eb19c37fda834d50262ff9167c65658d1d8f974d2e4d90676b \ --hash=sha256:f4c8cabb46ff8e5d61f56a037974228e978f26bfefce4f61a4b1ac0ba7a2ab72 # via jsonschema -pytest==6.2.4 \ - --hash=sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b \ - --hash=sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890 +pytest==6.2.5 \ + --hash=sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89 \ + --hash=sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134 # via # -r requirements/dev-requirements.in # pytest-cov @@ -196,9 +194,9 @@ requests==2.26.0 \ --hash=sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24 \ --hash=sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7 # via semgrep -ruamel.yaml==0.17.14 \ - --hash=sha256:4185fcfa9e037fea9ffd0bb6172354a03ec98c21e462355d72e068c74e493512 \ - --hash=sha256:b59c548ba6a2a99a97a842db2321c5adf28470d1decb04bdd82ce9535936a2fa +ruamel.yaml==0.17.16 \ + --hash=sha256:1a771fc92d3823682b7f0893ad56cb5a5c87c48e62b5399d6f42c8759a583b33 \ + --hash=sha256:ea21da1198c4b41b8e7a259301cc9710d3b972bf8ba52f06218478e6802dd1f1 # via semgrep ruamel.yaml.clib==0.2.6 \ --hash=sha256:0847201b767447fc33b9c235780d3aa90357d20dd6108b92be544427bea197dd \ @@ -223,10 +221,10 @@ ruamel.yaml.clib==0.2.6 \ --hash=sha256:dc6a613d6c74eef5a14a214d433d06291526145431c3b964f5e16529b1842bed \ --hash=sha256:de9c6b8a1ba52919ae919f3ae96abb72b994dd0350226e28f3686cb4f142165c # via ruamel.yaml -semgrep==0.63.0 \ - --hash=sha256:777322ce3bbe07a43cc5e8d9ad23691441d3af9785e942134ceb5bdad1eb4902 \ - --hash=sha256:96ca4e2f4439f29f84199aae8a3094480bb624b2466998aa06bd556deff849db \ - --hash=sha256:9fc462d70210a4edfce03d8de3f9bf9f046c58bbbe9a12db6155f756404ef6f5 +semgrep==0.64.0 \ + --hash=sha256:06b5e9db524b3e5d185b2eb04531936909a803726367634041f617f426e8d30e \ + --hash=sha256:439538d8de173b1489b68625a1f9627db7215307acd37dbe35602dc43a143372 \ + --hash=sha256:7b7b16bb7fc9f92054231984b975da1bbb5e469ad51ec5d3b3db32c421569d43 # via -r requirements/dev-requirements.in six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ @@ -246,10 +244,10 @@ tqdm==4.62.2 \ --hash=sha256:80aead664e6c1672c4ae20dc50e1cdc5e20eeff9b14aa23ecd426375b28be588 \ --hash=sha256:a4d6d112e507ef98513ac119ead1159d286deab17dffedd96921412c2d236ff5 # via semgrep -typing-extensions==3.10.0.0 \ - --hash=sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497 \ - --hash=sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342 \ - --hash=sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84 +typing-extensions==3.10.0.2 \ + --hash=sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e \ + --hash=sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7 \ + --hash=sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34 # via importlib-metadata urllib3==1.26.6 \ --hash=sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4 \ @@ -275,9 +273,9 @@ pip==21.2.4 \ --hash=sha256:0eb8a1516c3d138ae8689c0c1a60fde7143310832f9dc77e11d8a4bc62de193b \ --hash=sha256:fa9ebb85d3fd607617c0c44aca302b1b45d87f9c2a1649b46c26167ca4296323 # via pip-tools -setuptools==57.4.0 \ - --hash=sha256:6bac238ffdf24e8806c61440e755192470352850f3419a52f26ffe0a1a64f465 \ - --hash=sha256:a49230977aa6cfb9d933614d2f7b79036e9945c4cdd7583163f4e920b83418d6 +setuptools==58.0.4 \ + --hash=sha256:69cc739bc2662098a68a9bc575cd974a57969e70c1d58ade89d104ab73d79770 \ + --hash=sha256:f10059f0152e0b7fb6b2edd77bcb1ecd4c9ed7048a826eb2d79f72fd2e6e237b # via # jsonschema # pip-tools From bdbf0d484ee95c1351ea5f30bb9856a1fc95cba8 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Wed, 27 Oct 2021 12:58:59 -0700 Subject: [PATCH 265/352] update to werkzeug 2.0.2 to improve security of debugger cookies Signed-off-by: Allie Crevier --- requirements/build-requirements.txt | 2 +- requirements/dev-requirements.txt | 6 +++--- requirements/requirements.in | 2 +- requirements/requirements.txt | 6 +++--- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/requirements/build-requirements.txt b/requirements/build-requirements.txt index 2479943ae..be41d42ce 100644 --- a/requirements/build-requirements.txt +++ b/requirements/build-requirements.txt @@ -7,4 +7,4 @@ pyyaml==5.4.1 --hash=sha256:be111e40b3e32707b373b90ef490fa0908bf7769c77f8cf94000 requests==2.26.0 --hash=sha256:7cec5239ce6ec4f6bf3d1b8c7e4d34ebe1b86d3896fe9657a8465ee4d7282bc8 six==1.11.0 --hash=sha256:eb52689b06ca7433c1cac3b91f320400bd3b358790b7ff4b6367cb1c81d37561 urllib3==1.26.6 --hash=sha256:7a2814749409a681ab58babe6539b02a2f84f6649904211f90fb649811ae7b36 -werkzeug==0.16.0 --hash=sha256:bd05301a84a9bc3b33f178e53446181879744b74f098ed35850ba21125379be1 +werkzeug==2.0.2 --hash=sha256:55e8ebd03bf69dc51cd986ba7bf3e25f549bb27a22de9d6bdd15c855ba8f1f99 diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt index 558ba47ae..b1aeb1a56 100644 --- a/requirements/dev-requirements.txt +++ b/requirements/dev-requirements.txt @@ -379,9 +379,9 @@ vcrpy==4.1.1 \ --hash=sha256:12c3fcdae7b88ecf11fc0d3e6d77586549d4575a2ceee18e82eee75c1f626162 \ --hash=sha256:57095bf22fc0a2d99ee9674cdafebed0f3ba763018582450706f7d3a74fff599 # via -r requirements/dev-requirements.in -werkzeug==0.16.0 \ - --hash=sha256:7280924747b5733b246fe23972186c6b348f9ae29724135a6dfc1e53cea433e7 \ - --hash=sha256:e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4 +werkzeug==2.0.2 \ + --hash=sha256:63d3dc1cf60e7b7e35e97fa9861f7397283b75d765afcaefd993d6046899de8f \ + --hash=sha256:aa2bb6fc8dee8d6c504c0ac1e7f5f7dc5810a9903e793b6f715a9f015bdadb9a # via -r requirements/requirements.in wheel==0.37.0 \ --hash=sha256:21014b2bd93c6d0034b6ba5d35e4eb284340e09d63c59aef6fc14b0f346146fd \ diff --git a/requirements/requirements.in b/requirements/requirements.in index cede6d809..9354373d7 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -7,4 +7,4 @@ pyyaml==5.4.1 requests>=2.26.0 six==1.11.0 urllib3>=1.26.5 -werkzeug==0.16.0 +werkzeug>=0.16.0 diff --git a/requirements/requirements.txt b/requirements/requirements.txt index e24267cbe..cd33d0ed7 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -80,7 +80,7 @@ urllib3==1.26.6 \ # via # -r requirements/requirements.in # requests -werkzeug==0.16.0 \ - --hash=sha256:7280924747b5733b246fe23972186c6b348f9ae29724135a6dfc1e53cea433e7 \ - --hash=sha256:e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4 +werkzeug==2.0.2 \ + --hash=sha256:63d3dc1cf60e7b7e35e97fa9861f7397283b75d765afcaefd993d6046899de8f \ + --hash=sha256:aa2bb6fc8dee8d6c504c0ac1e7f5f7dc5810a9903e793b6f715a9f015bdadb9a # via -r requirements/requirements.in From e17c9d5aa5e968c727a9ae5d10d3d0bd0db1a1ec Mon Sep 17 00:00:00 2001 From: ro Date: Wed, 19 Jan 2022 16:14:46 -0500 Subject: [PATCH 266/352] Update dev requirements --- requirements/dev-requirements.txt | 359 +++++++++++++++--------------- 1 file changed, 183 insertions(+), 176 deletions(-) diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt index 573060ac3..ea7d8455f 100644 --- a/requirements/dev-requirements.txt +++ b/requirements/dev-requirements.txt @@ -4,98 +4,100 @@ # # pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-requirements.txt requirements/dev-requirements.in requirements/requirements.in # -attrs==21.2.0 \ - --hash=sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1 \ - --hash=sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb +attrs==21.4.0 \ + --hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4 \ + --hash=sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd # via # jsonschema # pytest # semgrep -bracex==2.1.1 \ - --hash=sha256:01f715cd0ed7a622ec8b32322e715813f7574de531f09b70f6f3b2c10f682425 \ - --hash=sha256:64e2a6d14de9c8e022cf40539ac8468ba7c4b99550a2b05fc87fd20e392e568f +bracex==2.2.1 \ + --hash=sha256:096c4b788bf492f7af4e90ef8b5bcbfb99759ae3415ea1b83c9d29a5ed8f9a94 \ + --hash=sha256:1c8d1296e00ad9a91030ccb4c291f9e4dc7c054f12c707ba3c5ff3e9a81bcd21 # via wcmatch -certifi==2021.5.30 \ - --hash=sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee \ - --hash=sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8 +certifi==2021.10.8 \ + --hash=sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872 \ + --hash=sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569 # via requests -charset-normalizer==2.0.4 \ - --hash=sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b \ - --hash=sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3 +charset-normalizer==2.0.10 \ + --hash=sha256:876d180e9d7432c5d1dfd4c5d26b72f099d503e8fcc0feb7532c9289be60fcbd \ + --hash=sha256:cb957888737fc0bbcd78e3df769addb41fd1ff8cf950dc9e7ad7793f1bf44455 # via requests -click==8.0.1 \ - --hash=sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a \ - --hash=sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6 - # via pip-tools +click==8.0.3 \ + --hash=sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3 \ + --hash=sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b + # via + # click-option-group + # pip-tools + # semgrep +click-option-group==0.5.3 \ + --hash=sha256:9653a2297357335d7325a1827e71ac1245d91c97d959346a7decabd4a52d5354 \ + --hash=sha256:a6e924f3c46b657feb5b72679f7e930f8e5b224b766ab35c91ae4019b4e0615e + # via semgrep colorama==0.4.4 \ --hash=sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b \ --hash=sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2 # via semgrep -coverage==5.5 \ - --hash=sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c \ - --hash=sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6 \ - --hash=sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45 \ - --hash=sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a \ - --hash=sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03 \ - --hash=sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529 \ - --hash=sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a \ - --hash=sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a \ - --hash=sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2 \ - --hash=sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6 \ - --hash=sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759 \ - --hash=sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53 \ - --hash=sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a \ - --hash=sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4 \ - --hash=sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff \ - --hash=sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502 \ - --hash=sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793 \ - --hash=sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb \ - --hash=sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905 \ - --hash=sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821 \ - --hash=sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b \ - --hash=sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81 \ - --hash=sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0 \ - --hash=sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b \ - --hash=sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3 \ - --hash=sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184 \ - --hash=sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701 \ - --hash=sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a \ - --hash=sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82 \ - --hash=sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638 \ - --hash=sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5 \ - --hash=sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083 \ - --hash=sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6 \ - --hash=sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90 \ - --hash=sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465 \ - --hash=sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a \ - --hash=sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3 \ - --hash=sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e \ - --hash=sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066 \ - --hash=sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf \ - --hash=sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b \ - --hash=sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae \ - --hash=sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669 \ - --hash=sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873 \ - --hash=sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b \ - --hash=sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6 \ - --hash=sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb \ - --hash=sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160 \ - --hash=sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c \ - --hash=sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079 \ - --hash=sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d \ - --hash=sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6 +coverage[toml]==6.2 \ + --hash=sha256:01774a2c2c729619760320270e42cd9e797427ecfddd32c2a7b639cdc481f3c0 \ + --hash=sha256:03b20e52b7d31be571c9c06b74746746d4eb82fc260e594dc662ed48145e9efd \ + --hash=sha256:0a7726f74ff63f41e95ed3a89fef002916c828bb5fcae83b505b49d81a066884 \ + --hash=sha256:1219d760ccfafc03c0822ae2e06e3b1248a8e6d1a70928966bafc6838d3c9e48 \ + --hash=sha256:13362889b2d46e8d9f97c421539c97c963e34031ab0cb89e8ca83a10cc71ac76 \ + --hash=sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0 \ + --hash=sha256:17e6c11038d4ed6e8af1407d9e89a2904d573be29d51515f14262d7f10ef0a64 \ + --hash=sha256:215f8afcc02a24c2d9a10d3790b21054b58d71f4b3c6f055d4bb1b15cecce685 \ + --hash=sha256:22e60a3ca5acba37d1d4a2ee66e051f5b0e1b9ac950b5b0cf4aa5366eda41d47 \ + --hash=sha256:2641f803ee9f95b1f387f3e8f3bf28d83d9b69a39e9911e5bfee832bea75240d \ + --hash=sha256:276651978c94a8c5672ea60a2656e95a3cce2a3f31e9fb2d5ebd4c215d095840 \ + --hash=sha256:3f7c17209eef285c86f819ff04a6d4cbee9b33ef05cbcaae4c0b4e8e06b3ec8f \ + --hash=sha256:3feac4084291642165c3a0d9eaebedf19ffa505016c4d3db15bfe235718d4971 \ + --hash=sha256:49dbff64961bc9bdd2289a2bda6a3a5a331964ba5497f694e2cbd540d656dc1c \ + --hash=sha256:4e547122ca2d244f7c090fe3f4b5a5861255ff66b7ab6d98f44a0222aaf8671a \ + --hash=sha256:5829192582c0ec8ca4a2532407bc14c2f338d9878a10442f5d03804a95fac9de \ + --hash=sha256:5d6b09c972ce9200264c35a1d53d43ca55ef61836d9ec60f0d44273a31aa9f17 \ + --hash=sha256:600617008aa82032ddeace2535626d1bc212dfff32b43989539deda63b3f36e4 \ + --hash=sha256:619346d57c7126ae49ac95b11b0dc8e36c1dd49d148477461bb66c8cf13bb521 \ + --hash=sha256:63c424e6f5b4ab1cf1e23a43b12f542b0ec2e54f99ec9f11b75382152981df57 \ + --hash=sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b \ + --hash=sha256:6e1394d24d5938e561fbeaa0cd3d356207579c28bd1792f25a068743f2d5b282 \ + --hash=sha256:86f2e78b1eff847609b1ca8050c9e1fa3bd44ce755b2ec30e70f2d3ba3844644 \ + --hash=sha256:8bdfe9ff3a4ea37d17f172ac0dff1e1c383aec17a636b9b35906babc9f0f5475 \ + --hash=sha256:8e2c35a4c1f269704e90888e56f794e2d9c0262fb0c1b1c8c4ee44d9b9e77b5d \ + --hash=sha256:92b8c845527eae547a2a6617d336adc56394050c3ed8a6918683646328fbb6da \ + --hash=sha256:9365ed5cce5d0cf2c10afc6add145c5037d3148585b8ae0e77cc1efdd6aa2953 \ + --hash=sha256:9a29311bd6429be317c1f3fe4bc06c4c5ee45e2fa61b2a19d4d1d6111cb94af2 \ + --hash=sha256:9a2b5b52be0a8626fcbffd7e689781bf8c2ac01613e77feda93d96184949a98e \ + --hash=sha256:a4bdeb0a52d1d04123b41d90a4390b096f3ef38eee35e11f0b22c2d031222c6c \ + --hash=sha256:a9c8c4283e17690ff1a7427123ffb428ad6a52ed720d550e299e8291e33184dc \ + --hash=sha256:b637c57fdb8be84e91fac60d9325a66a5981f8086c954ea2772efe28425eaf64 \ + --hash=sha256:bf154ba7ee2fd613eb541c2bc03d3d9ac667080a737449d1a3fb342740eb1a74 \ + --hash=sha256:c254b03032d5a06de049ce8bca8338a5185f07fb76600afff3c161e053d88617 \ + --hash=sha256:c332d8f8d448ded473b97fefe4a0983265af21917d8b0cdcb8bb06b2afe632c3 \ + --hash=sha256:c7912d1526299cb04c88288e148c6c87c0df600eca76efd99d84396cfe00ef1d \ + --hash=sha256:cfd9386c1d6f13b37e05a91a8583e802f8059bebfccde61a418c5808dea6bbfa \ + --hash=sha256:d5d2033d5db1d58ae2d62f095e1aefb6988af65b4b12cb8987af409587cc0739 \ + --hash=sha256:dca38a21e4423f3edb821292e97cec7ad38086f84313462098568baedf4331f8 \ + --hash=sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8 \ + --hash=sha256:e3db840a4dee542e37e09f30859f1612da90e1c5239a6a2498c473183a50e781 \ + --hash=sha256:edcada2e24ed68f019175c2b2af2a8b481d3d084798b8c20d15d34f5c733fa58 \ + --hash=sha256:f467bbb837691ab5a8ca359199d3429a11a01e6dfb3d9dcc676dc035ca93c0a9 \ + --hash=sha256:f506af4f27def639ba45789fa6fde45f9a217da0be05f8910458e4557eed020c \ + --hash=sha256:f614fc9956d76d8a88a88bb41ddc12709caa755666f580af3a688899721efecd \ + --hash=sha256:f9afb5b746781fc2abce26193d1c817b7eb0e11459510fba65d2bd77fe161d9e \ + --hash=sha256:fb8b8ee99b3fffe4fd86f4c81b35a6bf7e4462cba019997af2fe679365db0c49 # via pytest-cov -flake8==3.9.2 \ - --hash=sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b \ - --hash=sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907 +flake8==4.0.1 \ + --hash=sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d \ + --hash=sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d # via -r requirements/dev-requirements.in -idna==3.2 \ - --hash=sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a \ - --hash=sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3 +idna==3.3 \ + --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ + --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d # via requests -importlib-metadata==4.8.1 \ - --hash=sha256:b618b6d2d5ffa2f16add5697cf57a46c76a56229b0ed1c438322e4e95645bd15 \ - --hash=sha256:f284b3e11256ad1e5d03ab86bb2ccd6f5339688ff17a4d797a0fe7df326f23b1 +importlib-metadata==4.2.0 \ + --hash=sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b \ + --hash=sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31 # via # click # flake8 @@ -103,77 +105,81 @@ importlib-metadata==4.8.1 \ # pep517 # pluggy # pytest +importlib-resources==5.4.0 \ + --hash=sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45 \ + --hash=sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b + # via jsonschema iniconfig==1.1.1 \ --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \ --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32 # via pytest -jsonschema==3.2.0 \ - --hash=sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163 \ - --hash=sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a +jsonschema==4.4.0 \ + --hash=sha256:636694eb41b3535ed608fe04129f26542b59ed99808b4f688aa32dcf55317a83 \ + --hash=sha256:77281a1f71684953ee8b3d488371b162419767973789272434bbc3f29d9c8823 # via semgrep mccabe==0.6.1 \ --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f # via flake8 -packaging==21.0 \ - --hash=sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7 \ - --hash=sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14 +packaging==21.3 \ + --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ + --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 # via # pytest # semgrep -peewee==3.14.4 \ - --hash=sha256:9e356b327c2eaec6dd42ecea6f4ddded025793dba906a3d065a0452e726c51a2 +peewee==3.14.8 \ + --hash=sha256:01bd7f734defb08d7a3346a0c0ca7011bc8d0d685934ec0e001b3371d522ec53 # via semgrep -pep517==0.11.0 \ - --hash=sha256:3fa6b85b9def7ba4de99fb7f96fe3f02e2d630df8aa2720a5cf3b183f087a738 \ - --hash=sha256:e1ba5dffa3a131387979a68ff3e391ac7d645be409216b961bc2efe6468ab0b2 +pep517==0.12.0 \ + --hash=sha256:931378d93d11b298cf511dd634cf5ea4cb249a28ef84160b3247ee9afb4e8ab0 \ + --hash=sha256:dd884c326898e2c6e11f9e0b64940606a93eb10ea022a2e067959f3a110cf161 # via pip-tools -pip-tools==6.2.0 \ - --hash=sha256:77727ef7457d1865e61fe34c2b1439f9b971b570cc232616a22ce82ab89d357d \ - --hash=sha256:9ed38c73da4993e531694ea151f77048b4dbf2ba7b94c4a569daa39568cc6564 +pip-tools==6.4.0 \ + --hash=sha256:65553a15b1ba34be5e43889345062e38fb9b219ffa23b084ca0d4c4039b6f53b \ + --hash=sha256:bb2c3272bc229b4a6d25230ebe255823aba1aa466a0d698c48ab7eb5ab7efdc9 # via -r requirements/dev-requirements.in pluggy==1.0.0 \ --hash=sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159 \ --hash=sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3 # via pytest -py==1.10.0 \ - --hash=sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3 \ - --hash=sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a +py==1.11.0 \ + --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ + --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 # via pytest -pycodestyle==2.7.0 \ - --hash=sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068 \ - --hash=sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef +pycodestyle==2.8.0 \ + --hash=sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20 \ + --hash=sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f # via flake8 -pyflakes==2.3.1 \ - --hash=sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3 \ - --hash=sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db +pyflakes==2.4.0 \ + --hash=sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c \ + --hash=sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e # via flake8 -pyparsing==2.4.7 \ - --hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \ - --hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b +pyparsing==3.0.6 \ + --hash=sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4 \ + --hash=sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81 # via packaging -pyrsistent==0.18.0 \ - --hash=sha256:097b96f129dd36a8c9e33594e7ebb151b1515eb52cceb08474c10a5479e799f2 \ - --hash=sha256:2aaf19dc8ce517a8653746d98e962ef480ff34b6bc563fc067be6401ffb457c7 \ - --hash=sha256:404e1f1d254d314d55adb8d87f4f465c8693d6f902f67eb6ef5b4526dc58e6ea \ - --hash=sha256:48578680353f41dca1ca3dc48629fb77dfc745128b56fc01096b2530c13fd426 \ - --hash=sha256:4916c10896721e472ee12c95cdc2891ce5890898d2f9907b1b4ae0f53588b710 \ - --hash=sha256:527be2bfa8dc80f6f8ddd65242ba476a6c4fb4e3aedbf281dfbac1b1ed4165b1 \ - --hash=sha256:58a70d93fb79dc585b21f9d72487b929a6fe58da0754fa4cb9f279bb92369396 \ - --hash=sha256:5e4395bbf841693eaebaa5bb5c8f5cdbb1d139e07c975c682ec4e4f8126e03d2 \ - --hash=sha256:6b5eed00e597b5b5773b4ca30bd48a5774ef1e96f2a45d105db5b4ebb4bca680 \ - --hash=sha256:73ff61b1411e3fb0ba144b8f08d6749749775fe89688093e1efef9839d2dcc35 \ - --hash=sha256:772e94c2c6864f2cd2ffbe58bb3bdefbe2a32afa0acb1a77e472aac831f83427 \ - --hash=sha256:773c781216f8c2900b42a7b638d5b517bb134ae1acbebe4d1e8f1f41ea60eb4b \ - --hash=sha256:a0c772d791c38bbc77be659af29bb14c38ced151433592e326361610250c605b \ - --hash=sha256:b29b869cf58412ca5738d23691e96d8aff535e17390128a1a52717c9a109da4f \ - --hash=sha256:c1a9ff320fa699337e05edcaae79ef8c2880b52720bc031b219e5b5008ebbdef \ - --hash=sha256:cd3caef37a415fd0dae6148a1b6957a8c5f275a62cca02e18474608cb263640c \ - --hash=sha256:d5ec194c9c573aafaceebf05fc400656722793dac57f254cd4741f3c27ae57b4 \ - --hash=sha256:da6e5e818d18459fa46fac0a4a4e543507fe1110e808101277c5a2b5bab0cd2d \ - --hash=sha256:e79d94ca58fcafef6395f6352383fa1a76922268fa02caa2272fff501c2fdc78 \ - --hash=sha256:f3ef98d7b76da5eb19c37fda834d50262ff9167c65658d1d8f974d2e4d90676b \ - --hash=sha256:f4c8cabb46ff8e5d61f56a037974228e978f26bfefce4f61a4b1ac0ba7a2ab72 +pyrsistent==0.18.1 \ + --hash=sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c \ + --hash=sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc \ + --hash=sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e \ + --hash=sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26 \ + --hash=sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec \ + --hash=sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286 \ + --hash=sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045 \ + --hash=sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec \ + --hash=sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8 \ + --hash=sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c \ + --hash=sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca \ + --hash=sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22 \ + --hash=sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a \ + --hash=sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96 \ + --hash=sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc \ + --hash=sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1 \ + --hash=sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07 \ + --hash=sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6 \ + --hash=sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b \ + --hash=sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5 \ + --hash=sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6 # via jsonschema pytest==6.2.5 \ --hash=sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89 \ @@ -182,29 +188,33 @@ pytest==6.2.5 \ # -r requirements/dev-requirements.in # pytest-cov # pytest-mock -pytest-cov==2.12.1 \ - --hash=sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a \ - --hash=sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7 +pytest-cov==3.0.0 \ + --hash=sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6 \ + --hash=sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470 # via -r requirements/dev-requirements.in pytest-mock==3.6.1 \ --hash=sha256:30c2f2cc9759e76eee674b81ea28c9f0b94f8f0445a1b87762cadf774f0df7e3 \ --hash=sha256:40217a058c52a63f1042f0784f62009e976ba824c418cced42e88d5f40ab0e62 # via -r requirements/dev-requirements.in -requests==2.26.0 \ - --hash=sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24 \ - --hash=sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7 +requests==2.27.1 \ + --hash=sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61 \ + --hash=sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d # via semgrep -ruamel.yaml==0.17.16 \ - --hash=sha256:1a771fc92d3823682b7f0893ad56cb5a5c87c48e62b5399d6f42c8759a583b33 \ - --hash=sha256:ea21da1198c4b41b8e7a259301cc9710d3b972bf8ba52f06218478e6802dd1f1 +ruamel.yaml==0.17.20 \ + --hash=sha256:4b8a33c1efb2b443a93fcaafcfa4d2e445f8e8c29c528d9f5cdafb7cc9e4004c \ + --hash=sha256:810eef9c46523a3f77479c66267a4708255ebe806a2d540078408c2227f011af # via semgrep ruamel.yaml.clib==0.2.6 \ --hash=sha256:0847201b767447fc33b9c235780d3aa90357d20dd6108b92be544427bea197dd \ + --hash=sha256:1070ba9dd7f9370d0513d649420c3b362ac2d687fe78c6e888f5b12bf8bc7bee \ --hash=sha256:1866cf2c284a03b9524a5cc00daca56d80057c5ce3cdc86a52020f4c720856f0 \ + --hash=sha256:221eca6f35076c6ae472a531afa1c223b9c29377e62936f61bc8e6e8bdc5f9e7 \ --hash=sha256:31ea73e564a7b5fbbe8188ab8b334393e06d997914a4e184975348f204790277 \ --hash=sha256:3fb9575a5acd13031c57a62cc7823e5d2ff8bc3835ba4d94b921b4e6ee664104 \ --hash=sha256:4ff604ce439abb20794f05613c374759ce10e3595d1867764dd1ae675b85acbd \ + --hash=sha256:6e7be2c5bcb297f5b82fee9c665eb2eb7001d1050deaba8471842979293a80b0 \ --hash=sha256:72a2b8b2ff0a627496aad76f37a652bcef400fd861721744201ef1b45199ab78 \ + --hash=sha256:77df077d32921ad46f34816a9a16e6356d8100374579bc35e15bab5d4e9377de \ --hash=sha256:78988ed190206672da0f5d50c61afef8f67daa718d614377dcd5e3ed85ab4a99 \ --hash=sha256:7b2927e92feb51d830f531de4ccb11b320255ee95e791022555971c466af4527 \ --hash=sha256:7f7ecb53ae6848f959db6ae93bdff1740e651809780822270eab111500842a84 \ @@ -221,62 +231,59 @@ ruamel.yaml.clib==0.2.6 \ --hash=sha256:dc6a613d6c74eef5a14a214d433d06291526145431c3b964f5e16529b1842bed \ --hash=sha256:de9c6b8a1ba52919ae919f3ae96abb72b994dd0350226e28f3686cb4f142165c # via ruamel.yaml -semgrep==0.64.0 \ - --hash=sha256:06b5e9db524b3e5d185b2eb04531936909a803726367634041f617f426e8d30e \ - --hash=sha256:439538d8de173b1489b68625a1f9627db7215307acd37dbe35602dc43a143372 \ - --hash=sha256:7b7b16bb7fc9f92054231984b975da1bbb5e469ad51ec5d3b3db32c421569d43 +semgrep==0.78.0 \ + --hash=sha256:0ae8f6fb9b9d5e9920d25662dd92dd0c20d32752b8cd154c1bb0ceca37b8e380 \ + --hash=sha256:5abb5ecc7a536614a6ee95b2dee762fe50f0cf63a41cc23ef21798c7b94fe1af \ + --hash=sha256:7f58e7a795f8d6d419f6ae13d44565c5075ab08e07054c24832752dd4f20d860 # via -r requirements/dev-requirements.in -six==1.16.0 \ - --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ - --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 - # via jsonschema toml==0.10.2 \ --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f + # via pytest +tomli==2.0.0 \ + --hash=sha256:b5bde28da1fed24b9bd1d4d2b8cba62300bfb4ec9a6187a957e8ddb9434c5224 \ + --hash=sha256:c292c34f58502a1eb2bbb9f5bbc9a5ebc37bee10ffb8c2d6bbdfa8eb13cc14e1 # via - # pytest - # pytest-cov -tomli==1.2.1 \ - --hash=sha256:8dd0e9524d6f386271a36b41dbf6c57d8e32fd96fd22b6584679dc569d20899f \ - --hash=sha256:a5b75cb6f3968abb47af1b40c1819dc519ea82bcc065776a866e8d74c5ca9442 - # via pep517 -tqdm==4.62.2 \ - --hash=sha256:80aead664e6c1672c4ae20dc50e1cdc5e20eeff9b14aa23ecd426375b28be588 \ - --hash=sha256:a4d6d112e507ef98513ac119ead1159d286deab17dffedd96921412c2d236ff5 + # coverage + # pep517 +tqdm==4.62.3 \ + --hash=sha256:8dd278a422499cd6b727e6ae4061c40b48fce8b76d1ccbf5d34fca9b7f925b0c \ + --hash=sha256:d359de7217506c9851b7869f3708d8ee53ed70a1b8edbba4dbcb47442592920d # via semgrep -typing-extensions==3.10.0.2 \ - --hash=sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e \ - --hash=sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7 \ - --hash=sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34 - # via importlib-metadata -urllib3==1.26.6 \ - --hash=sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4 \ - --hash=sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f +typing-extensions==4.0.1 \ + --hash=sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e \ + --hash=sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b + # via + # importlib-metadata + # jsonschema +urllib3==1.26.8 \ + --hash=sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed \ + --hash=sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c # via requests -wcmatch==8.2 \ - --hash=sha256:4d54ddb506c90b5a5bba3a96a1cfb0bb07127909e19046a71d689ddfb18c3617 \ - --hash=sha256:9146b1ab9354e0797ef6ef69bc89cb32cb9f46d1b9eeef69c559aeec8f3bffb6 +wcmatch==8.3 \ + --hash=sha256:371072912398af61d1e4e78609e18801c6faecd3cb36c54c82556a60abc965db \ + --hash=sha256:7141d2c85314253f16b38cb3d6cc0fb612918d407e1df3ccc2be7c86cc259c22 # via semgrep -wheel==0.37.0 \ - --hash=sha256:21014b2bd93c6d0034b6ba5d35e4eb284340e09d63c59aef6fc14b0f346146fd \ - --hash=sha256:e2ef7239991699e3355d54f8e968a21bb940a1dbf34a4d226741e64462516fad +wheel==0.37.1 \ + --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ + --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 # via pip-tools -zipp==3.5.0 \ - --hash=sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3 \ - --hash=sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4 +zipp==3.7.0 \ + --hash=sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d \ + --hash=sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375 # via # importlib-metadata + # importlib-resources # pep517 # The following packages are considered to be unsafe in a requirements file: -pip==21.2.4 \ - --hash=sha256:0eb8a1516c3d138ae8689c0c1a60fde7143310832f9dc77e11d8a4bc62de193b \ - --hash=sha256:fa9ebb85d3fd607617c0c44aca302b1b45d87f9c2a1649b46c26167ca4296323 +pip==21.3.1 \ + --hash=sha256:deaf32dcd9ab821e359cd8330786bcd077604b5c5730c0b096eda46f95c24a2d \ + --hash=sha256:fd11ba3d0fdb4c07fbc5ecbba0b1b719809420f25038f8ee3cd913d3faa3033a # via pip-tools -setuptools==58.0.4 \ - --hash=sha256:69cc739bc2662098a68a9bc575cd974a57969e70c1d58ade89d104ab73d79770 \ - --hash=sha256:f10059f0152e0b7fb6b2edd77bcb1ecd4c9ed7048a826eb2d79f72fd2e6e237b +setuptools==60.5.0 \ + --hash=sha256:2404879cda71495fc4d5cbc445ed52fdaddf352b36e40be8dcc63147cb4edabe \ + --hash=sha256:68eb94073fc486091447fcb0501efd6560a0e5a1839ba249e5ff3c4c93f05f90 # via - # jsonschema # pip-tools # semgrep From 2ad05528b3abca95140bc3bee73e49ad0e3e0592 Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Fri, 28 Jan 2022 14:07:04 -0800 Subject: [PATCH 267/352] Fix unofficial_status test on Python 3.9+ HTTP 418 was added to http.HTTPStatus in Python 3.9, so it no longer fails with "unspecified server error". Use status code 499 as something that should never really be added in the near future. --- fixtures/proxy_unofficial_status.yaml | 2 +- tests/test_proxy.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/fixtures/proxy_unofficial_status.yaml b/fixtures/proxy_unofficial_status.yaml index fa62ef962..e0b7a193d 100644 --- a/fixtures/proxy_unofficial_status.yaml +++ b/fixtures/proxy_unofficial_status.yaml @@ -15,6 +15,6 @@ interactions: Server: - BaseHTTP/0.6 Python/3.7.3 status: - code: 418 + code: 499 message: '' version: 1 diff --git a/tests/test_proxy.py b/tests/test_proxy.py index 7a035cfd3..e09509859 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -227,7 +227,7 @@ def test_unofficial_status(self): p = proxy.Proxy(self.conf_path, req) p.proxy() - self.assertEqual(p.res.status, 418) + self.assertEqual(p.res.status, 499) self.assertIn("application/json", p.res.headers["Content-Type"]) body = json.loads(p.res.body) self.assertEqual(body["error"], "unspecified server error") From 3f703d4274fa4c6d25442826f4bb45534a6ecb0d Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Fri, 28 Jan 2022 14:23:44 -0800 Subject: [PATCH 268/352] Reject JSON with duplicate keys Informational finding TOB-SDW-014 from the 2020 SecureDrop Workstation audit recommended explicitly checking for and rejecting duplicate JSON keys to prevent against JSON injection attacks. The new "json" module is a drop-in replacement for the current usage of the standard library's JSON module, except using `loads()` will throw an exception on duplicate keys. Callers should now catch any ValueErrors, which also covers JSONDecodeError. Fixes #84. --- securedrop_proxy/entrypoint.py | 3 +-- securedrop_proxy/json.py | 24 ++++++++++++++++++++++++ securedrop_proxy/main.py | 5 ++--- securedrop_proxy/proxy.py | 2 +- tests/test_json.py | 21 +++++++++++++++++++++ 5 files changed, 49 insertions(+), 6 deletions(-) create mode 100644 securedrop_proxy/json.py create mode 100644 tests/test_json.py diff --git a/securedrop_proxy/entrypoint.py b/securedrop_proxy/entrypoint.py index b2f535554..9e881969f 100755 --- a/securedrop_proxy/entrypoint.py +++ b/securedrop_proxy/entrypoint.py @@ -7,14 +7,13 @@ # the README for configuration options. import http -import json import logging import os import platform import sys from logging.handlers import SysLogHandler, TimedRotatingFileHandler -from securedrop_proxy import main, proxy +from securedrop_proxy import json, main, proxy from securedrop_proxy.version import version DEFAULT_HOME = os.path.join(os.path.expanduser("~"), ".securedrop_proxy") diff --git a/securedrop_proxy/json.py b/securedrop_proxy/json.py new file mode 100644 index 000000000..aa8f1c411 --- /dev/null +++ b/securedrop_proxy/json.py @@ -0,0 +1,24 @@ +""" +Wrapper around Python's json to catch duplicate keys (potential JSON injection) + +This was informational finding TOB-SDW-014 in the 2020 audit. +""" +import json + +dumps = json.dumps + + +def _check(seq): + d = {} + for key, value in seq: + if key in d: + raise ValueError(f"Key '{key}' found twice in JSON object") + d[key] = value + return d + + +def loads(text: str) -> dict: + """ + Turn a string into a JSON object, but reject duplicate keys + """ + return json.loads(text, object_pairs_hook=_check) diff --git a/securedrop_proxy/main.py b/securedrop_proxy/main.py index 1fd76cf88..040813f1a 100644 --- a/securedrop_proxy/main.py +++ b/securedrop_proxy/main.py @@ -1,8 +1,7 @@ -import json import logging from typing import Any, Dict -from securedrop_proxy import proxy +from securedrop_proxy import json, proxy from securedrop_proxy.proxy import Proxy logger = logging.getLogger(__name__) @@ -17,7 +16,7 @@ def __main__(incoming: str, p: Proxy) -> None: client_req: Dict[str, Any] = {} try: client_req = json.loads(incoming) - except json.decoder.JSONDecodeError as e: + except ValueError as e: logging.error(e) p.simple_error(400, "Invalid JSON in request") p.on_done() diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index 97f168c3e..36854bb05 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -1,5 +1,4 @@ import http -import json import logging import os import subprocess @@ -14,6 +13,7 @@ import yaml import securedrop_proxy.version as version +from securedrop_proxy import json logger = logging.getLogger(__name__) diff --git a/tests/test_json.py b/tests/test_json.py new file mode 100644 index 000000000..d268e34ac --- /dev/null +++ b/tests/test_json.py @@ -0,0 +1,21 @@ +import unittest + +from securedrop_proxy import json + + +class JsonTest(unittest.TestCase): + def test_dumps(self): + """Simple check since this is a passthrough to stdlib json""" + self.assertEqual( + json.dumps({"foo": "bar", "baz": ["one"]}), '{"foo": "bar", "baz": ["one"]}' + ) + + def test_loads(self): + # Verify basic loading works + self.assertEqual( + json.loads('{"foo": "bar", "baz": ["one"]}'), {"foo": "bar", "baz": ["one"]} + ) + # But duplicate keys are rejected + with self.assertRaises(ValueError) as exc: + json.loads('{"foo": "bar", "foo": "baz"}') + self.assertEqual(str(exc.exception), "Key 'foo' found twice in JSON object") From 3e68158b67cc145caffe4135d998a39ac63e10c3 Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Wed, 20 Apr 2022 18:40:05 -0400 Subject: [PATCH 269/352] Explicitly set packages in setup.py This is generally a good practice instead of relying on autodiscovery, plus it's what the other securedrop- packages do. Also it fixes a weird bug with reprotest, in which it can't determine the correct package. Fixes https://github.com/freedomofpress/securedrop-debian-packaging/issues/298. --- setup.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/setup.py b/setup.py index efbe8f282..0d3ccec97 100644 --- a/setup.py +++ b/setup.py @@ -19,6 +19,10 @@ license="GPLv3+", install_requires=[], python_requires=">=3.5", + packages=setuptools.find_packages(exclude=["docs", "tests"]), + package_data={ + 'securedrop_log': ['VERSION'], + }, url="https://github.com/freedomofpress/securedrop-log", classifiers=[ "Development Status :: 3 - Alpha", From b13d327f9fa7c07e1db4b2e4fd9dc47db09498e6 Mon Sep 17 00:00:00 2001 From: Michael Z Date: Tue, 10 May 2022 12:21:32 -0400 Subject: [PATCH 270/352] Add executable bit to `securedrop.Log` for logging support on 4.1 --- securedrop.Log | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 securedrop.Log diff --git a/securedrop.Log b/securedrop.Log old mode 100644 new mode 100755 From fc09f5ce648d7d7b67b8a8c2eda46601da99bc40 Mon Sep 17 00:00:00 2001 From: Michael Z Date: Mon, 23 May 2022 13:51:32 -0400 Subject: [PATCH 271/352] Treat ppdc warnings as non-fatal Users have reported printers not working because of `ppdc` warning messages reported in the client (asking them to contact an administrator). These warnings do not result in non-zero return codes and are seemingly really just warnings, so no need to get users involved. Fixes #51 --- securedrop_export/export.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 1f8274037..ee7f2c520 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -128,7 +128,13 @@ def safe_check_call(self, command, error_message): try: subprocess.check_call(command) except subprocess.CalledProcessError as ex: - self.exit_gracefully(msg=error_message, e=ex.output) + # ppdc emits warnings which should not be treated as user facing errors + if ex.returncode == 0 and \ + ex.stderr is not None and \ + ex.stderr.startswith("ppdc: Warning"): + logger.info('Encountered warning: {}'.format(ex.output)) + else: + self.exit_gracefully(msg=error_message, e=ex.output) class ExportAction(abc.ABC): From cf9c25a85e35ade653581b0baee92ff13b89e704 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Tue, 24 May 2022 13:18:06 -0700 Subject: [PATCH 272/352] add black and bump other dev dependencies --- requirements/dev-requirements.in | 1 + requirements/dev-requirements.txt | 310 +++++++++++++++++++----------- 2 files changed, 195 insertions(+), 116 deletions(-) diff --git a/requirements/dev-requirements.in b/requirements/dev-requirements.in index e7a7d9159..5200c5299 100644 --- a/requirements/dev-requirements.in +++ b/requirements/dev-requirements.in @@ -1,3 +1,4 @@ +black flake8 pip-tools pytest diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt index ea7d8455f..9cc7487ba 100644 --- a/requirements/dev-requirements.txt +++ b/requirements/dev-requirements.txt @@ -8,25 +8,59 @@ attrs==21.4.0 \ --hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4 \ --hash=sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd # via + # glom # jsonschema # pytest # semgrep -bracex==2.2.1 \ - --hash=sha256:096c4b788bf492f7af4e90ef8b5bcbfb99759ae3415ea1b83c9d29a5ed8f9a94 \ - --hash=sha256:1c8d1296e00ad9a91030ccb4c291f9e4dc7c054f12c707ba3c5ff3e9a81bcd21 +black==22.3.0 \ + --hash=sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b \ + --hash=sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176 \ + --hash=sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09 \ + --hash=sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a \ + --hash=sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015 \ + --hash=sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79 \ + --hash=sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb \ + --hash=sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20 \ + --hash=sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464 \ + --hash=sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968 \ + --hash=sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82 \ + --hash=sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21 \ + --hash=sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0 \ + --hash=sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265 \ + --hash=sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b \ + --hash=sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a \ + --hash=sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72 \ + --hash=sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce \ + --hash=sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0 \ + --hash=sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a \ + --hash=sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163 \ + --hash=sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad \ + --hash=sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d + # via -r requirements/dev-requirements.in +boltons==21.0.0 \ + --hash=sha256:65e70a79a731a7fe6e98592ecfb5ccf2115873d01dbc576079874629e5c90f13 \ + --hash=sha256:b9bb7b58b2b420bbe11a6025fdef6d3e5edc9f76a42fb467afe7ca212ef9948b + # via + # face + # glom + # semgrep +bracex==2.3 \ + --hash=sha256:6789a715744bcb3359b53c4012dd94be5ab7669c638affe89f670595a3c73cc0 \ + --hash=sha256:a3ce1d8a9fb7acc887e2e60ac5aa269f243d960c34c3d8a541fb672bdb9aa957 # via wcmatch -certifi==2021.10.8 \ - --hash=sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872 \ - --hash=sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569 +certifi==2022.5.18.1 \ + --hash=sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7 \ + --hash=sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a # via requests -charset-normalizer==2.0.10 \ - --hash=sha256:876d180e9d7432c5d1dfd4c5d26b72f099d503e8fcc0feb7532c9289be60fcbd \ - --hash=sha256:cb957888737fc0bbcd78e3df769addb41fd1ff8cf950dc9e7ad7793f1bf44455 +charset-normalizer==2.0.12 \ + --hash=sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597 \ + --hash=sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df # via requests -click==8.0.3 \ - --hash=sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3 \ - --hash=sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b +click==8.1.3 \ + --hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \ + --hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48 # via + # black # click-option-group # pip-tools # semgrep @@ -38,59 +72,65 @@ colorama==0.4.4 \ --hash=sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b \ --hash=sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2 # via semgrep -coverage[toml]==6.2 \ - --hash=sha256:01774a2c2c729619760320270e42cd9e797427ecfddd32c2a7b639cdc481f3c0 \ - --hash=sha256:03b20e52b7d31be571c9c06b74746746d4eb82fc260e594dc662ed48145e9efd \ - --hash=sha256:0a7726f74ff63f41e95ed3a89fef002916c828bb5fcae83b505b49d81a066884 \ - --hash=sha256:1219d760ccfafc03c0822ae2e06e3b1248a8e6d1a70928966bafc6838d3c9e48 \ - --hash=sha256:13362889b2d46e8d9f97c421539c97c963e34031ab0cb89e8ca83a10cc71ac76 \ - --hash=sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0 \ - --hash=sha256:17e6c11038d4ed6e8af1407d9e89a2904d573be29d51515f14262d7f10ef0a64 \ - --hash=sha256:215f8afcc02a24c2d9a10d3790b21054b58d71f4b3c6f055d4bb1b15cecce685 \ - --hash=sha256:22e60a3ca5acba37d1d4a2ee66e051f5b0e1b9ac950b5b0cf4aa5366eda41d47 \ - --hash=sha256:2641f803ee9f95b1f387f3e8f3bf28d83d9b69a39e9911e5bfee832bea75240d \ - --hash=sha256:276651978c94a8c5672ea60a2656e95a3cce2a3f31e9fb2d5ebd4c215d095840 \ - --hash=sha256:3f7c17209eef285c86f819ff04a6d4cbee9b33ef05cbcaae4c0b4e8e06b3ec8f \ - --hash=sha256:3feac4084291642165c3a0d9eaebedf19ffa505016c4d3db15bfe235718d4971 \ - --hash=sha256:49dbff64961bc9bdd2289a2bda6a3a5a331964ba5497f694e2cbd540d656dc1c \ - --hash=sha256:4e547122ca2d244f7c090fe3f4b5a5861255ff66b7ab6d98f44a0222aaf8671a \ - --hash=sha256:5829192582c0ec8ca4a2532407bc14c2f338d9878a10442f5d03804a95fac9de \ - --hash=sha256:5d6b09c972ce9200264c35a1d53d43ca55ef61836d9ec60f0d44273a31aa9f17 \ - --hash=sha256:600617008aa82032ddeace2535626d1bc212dfff32b43989539deda63b3f36e4 \ - --hash=sha256:619346d57c7126ae49ac95b11b0dc8e36c1dd49d148477461bb66c8cf13bb521 \ - --hash=sha256:63c424e6f5b4ab1cf1e23a43b12f542b0ec2e54f99ec9f11b75382152981df57 \ - --hash=sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b \ - --hash=sha256:6e1394d24d5938e561fbeaa0cd3d356207579c28bd1792f25a068743f2d5b282 \ - --hash=sha256:86f2e78b1eff847609b1ca8050c9e1fa3bd44ce755b2ec30e70f2d3ba3844644 \ - --hash=sha256:8bdfe9ff3a4ea37d17f172ac0dff1e1c383aec17a636b9b35906babc9f0f5475 \ - --hash=sha256:8e2c35a4c1f269704e90888e56f794e2d9c0262fb0c1b1c8c4ee44d9b9e77b5d \ - --hash=sha256:92b8c845527eae547a2a6617d336adc56394050c3ed8a6918683646328fbb6da \ - --hash=sha256:9365ed5cce5d0cf2c10afc6add145c5037d3148585b8ae0e77cc1efdd6aa2953 \ - --hash=sha256:9a29311bd6429be317c1f3fe4bc06c4c5ee45e2fa61b2a19d4d1d6111cb94af2 \ - --hash=sha256:9a2b5b52be0a8626fcbffd7e689781bf8c2ac01613e77feda93d96184949a98e \ - --hash=sha256:a4bdeb0a52d1d04123b41d90a4390b096f3ef38eee35e11f0b22c2d031222c6c \ - --hash=sha256:a9c8c4283e17690ff1a7427123ffb428ad6a52ed720d550e299e8291e33184dc \ - --hash=sha256:b637c57fdb8be84e91fac60d9325a66a5981f8086c954ea2772efe28425eaf64 \ - --hash=sha256:bf154ba7ee2fd613eb541c2bc03d3d9ac667080a737449d1a3fb342740eb1a74 \ - --hash=sha256:c254b03032d5a06de049ce8bca8338a5185f07fb76600afff3c161e053d88617 \ - --hash=sha256:c332d8f8d448ded473b97fefe4a0983265af21917d8b0cdcb8bb06b2afe632c3 \ - --hash=sha256:c7912d1526299cb04c88288e148c6c87c0df600eca76efd99d84396cfe00ef1d \ - --hash=sha256:cfd9386c1d6f13b37e05a91a8583e802f8059bebfccde61a418c5808dea6bbfa \ - --hash=sha256:d5d2033d5db1d58ae2d62f095e1aefb6988af65b4b12cb8987af409587cc0739 \ - --hash=sha256:dca38a21e4423f3edb821292e97cec7ad38086f84313462098568baedf4331f8 \ - --hash=sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8 \ - --hash=sha256:e3db840a4dee542e37e09f30859f1612da90e1c5239a6a2498c473183a50e781 \ - --hash=sha256:edcada2e24ed68f019175c2b2af2a8b481d3d084798b8c20d15d34f5c733fa58 \ - --hash=sha256:f467bbb837691ab5a8ca359199d3429a11a01e6dfb3d9dcc676dc035ca93c0a9 \ - --hash=sha256:f506af4f27def639ba45789fa6fde45f9a217da0be05f8910458e4557eed020c \ - --hash=sha256:f614fc9956d76d8a88a88bb41ddc12709caa755666f580af3a688899721efecd \ - --hash=sha256:f9afb5b746781fc2abce26193d1c817b7eb0e11459510fba65d2bd77fe161d9e \ - --hash=sha256:fb8b8ee99b3fffe4fd86f4c81b35a6bf7e4462cba019997af2fe679365db0c49 +coverage[toml]==6.4 \ + --hash=sha256:00c8544510f3c98476bbd58201ac2b150ffbcce46a8c3e4fb89ebf01998f806a \ + --hash=sha256:016d7f5cf1c8c84f533a3c1f8f36126fbe00b2ec0ccca47cc5731c3723d327c6 \ + --hash=sha256:03014a74023abaf5a591eeeaf1ac66a73d54eba178ff4cb1fa0c0a44aae70383 \ + --hash=sha256:033ebec282793bd9eb988d0271c211e58442c31077976c19c442e24d827d356f \ + --hash=sha256:21e6686a95025927775ac501e74f5940cdf6fe052292f3a3f7349b0abae6d00f \ + --hash=sha256:26f8f92699756cb7af2b30720de0c5bb8d028e923a95b6d0c891088025a1ac8f \ + --hash=sha256:2e76bd16f0e31bc2b07e0fb1379551fcd40daf8cdf7e24f31a29e442878a827c \ + --hash=sha256:341e9c2008c481c5c72d0e0dbf64980a4b2238631a7f9780b0fe2e95755fb018 \ + --hash=sha256:3cfd07c5889ddb96a401449109a8b97a165be9d67077df6802f59708bfb07720 \ + --hash=sha256:4002f9e8c1f286e986fe96ec58742b93484195defc01d5cc7809b8f7acb5ece3 \ + --hash=sha256:50ed480b798febce113709846b11f5d5ed1e529c88d8ae92f707806c50297abf \ + --hash=sha256:543e172ce4c0de533fa892034cce260467b213c0ea8e39da2f65f9a477425211 \ + --hash=sha256:5a78cf2c43b13aa6b56003707c5203f28585944c277c1f3f109c7b041b16bd39 \ + --hash=sha256:5cd698341626f3c77784858427bad0cdd54a713115b423d22ac83a28303d1d95 \ + --hash=sha256:60c2147921da7f4d2d04f570e1838db32b95c5509d248f3fe6417e91437eaf41 \ + --hash=sha256:62d382f7d77eeeaff14b30516b17bcbe80f645f5cf02bb755baac376591c653c \ + --hash=sha256:69432946f154c6add0e9ede03cc43b96e2ef2733110a77444823c053b1ff5166 \ + --hash=sha256:727dafd7f67a6e1cad808dc884bd9c5a2f6ef1f8f6d2f22b37b96cb0080d4f49 \ + --hash=sha256:742fb8b43835078dd7496c3c25a1ec8d15351df49fb0037bffb4754291ef30ce \ + --hash=sha256:750e13834b597eeb8ae6e72aa58d1d831b96beec5ad1d04479ae3772373a8088 \ + --hash=sha256:7b546cf2b1974ddc2cb222a109b37c6ed1778b9be7e6b0c0bc0cf0438d9e45a6 \ + --hash=sha256:83bd142cdec5e4a5c4ca1d4ff6fa807d28460f9db919f9f6a31babaaa8b88426 \ + --hash=sha256:8d2e80dd3438e93b19e1223a9850fa65425e77f2607a364b6fd134fcd52dc9df \ + --hash=sha256:9229d074e097f21dfe0643d9d0140ee7433814b3f0fc3706b4abffd1e3038632 \ + --hash=sha256:968ed5407f9460bd5a591cefd1388cc00a8f5099de9e76234655ae48cfdbe2c3 \ + --hash=sha256:9c82f2cd69c71698152e943f4a5a6b83a3ab1db73b88f6e769fabc86074c3b08 \ + --hash=sha256:a00441f5ea4504f5abbc047589d09e0dc33eb447dc45a1a527c8b74bfdd32c65 \ + --hash=sha256:a022394996419142b33a0cf7274cb444c01d2bb123727c4bb0b9acabcb515dea \ + --hash=sha256:af5b9ee0fc146e907aa0f5fb858c3b3da9199d78b7bb2c9973d95550bd40f701 \ + --hash=sha256:b5578efe4038be02d76c344007b13119b2b20acd009a88dde8adec2de4f630b5 \ + --hash=sha256:b84ab65444dcc68d761e95d4d70f3cfd347ceca5a029f2ffec37d4f124f61311 \ + --hash=sha256:c53ad261dfc8695062fc8811ac7c162bd6096a05a19f26097f411bdf5747aee7 \ + --hash=sha256:cc173f1ce9ffb16b299f51c9ce53f66a62f4d975abe5640e976904066f3c835d \ + --hash=sha256:d548edacbf16a8276af13063a2b0669d58bbcfca7c55a255f84aac2870786a61 \ + --hash=sha256:d55fae115ef9f67934e9f1103c9ba826b4c690e4c5bcf94482b8b2398311bf9c \ + --hash=sha256:d8099ea680201c2221f8468c372198ceba9338a5fec0e940111962b03b3f716a \ + --hash=sha256:e35217031e4b534b09f9b9a5841b9344a30a6357627761d4218818b865d45055 \ + --hash=sha256:e4f52c272fdc82e7c65ff3f17a7179bc5f710ebc8ce8a5cadac81215e8326740 \ + --hash=sha256:e637ae0b7b481905358624ef2e81d7fb0b1af55f5ff99f9ba05442a444b11e45 \ + --hash=sha256:eef5292b60b6de753d6e7f2d128d5841c7915fb1e3321c3a1fe6acfe76c38052 \ + --hash=sha256:fb45fe08e1abc64eb836d187b20a59172053999823f7f6ef4f18a819c44ba16f # via pytest-cov +defusedxml==0.7.1 \ + --hash=sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69 \ + --hash=sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61 + # via semgrep +face==20.1.1 \ + --hash=sha256:3790311a7329e4b0d90baee346eecad54b337629576edf3a246683a5f0d24446 \ + --hash=sha256:7d59ca5ba341316e58cf72c6aff85cca2541cf5056c4af45cb63af9a814bed3e + # via glom flake8==4.0.1 \ --hash=sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d \ --hash=sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d # via -r requirements/dev-requirements.in +glom==22.1.0 \ + --hash=sha256:1510c6587a8f9c64a246641b70033cbc5ebde99f02ad245693678038e821aeb5 \ + --hash=sha256:5339da206bf3532e01a83a35aca202960ea885156986d190574b779598e9e772 + # via semgrep idna==3.3 \ --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d @@ -105,39 +145,47 @@ importlib-metadata==4.2.0 \ # pep517 # pluggy # pytest -importlib-resources==5.4.0 \ - --hash=sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45 \ - --hash=sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b - # via jsonschema iniconfig==1.1.1 \ --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \ --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32 # via pytest -jsonschema==4.4.0 \ - --hash=sha256:636694eb41b3535ed608fe04129f26542b59ed99808b4f688aa32dcf55317a83 \ - --hash=sha256:77281a1f71684953ee8b3d488371b162419767973789272434bbc3f29d9c8823 +jsonschema==3.2.0 \ + --hash=sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163 \ + --hash=sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a # via semgrep mccabe==0.6.1 \ --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f # via flake8 +mypy-extensions==0.4.3 \ + --hash=sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d \ + --hash=sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8 + # via black packaging==21.3 \ --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 # via # pytest # semgrep -peewee==3.14.8 \ - --hash=sha256:01bd7f734defb08d7a3346a0c0ca7011bc8d0d685934ec0e001b3371d522ec53 +pathspec==0.9.0 \ + --hash=sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a \ + --hash=sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1 + # via black +peewee==3.14.10 \ + --hash=sha256:23271422b332c82d30c92597dee905ee831b56c6d99c33e05901e6891c75fe15 # via semgrep pep517==0.12.0 \ --hash=sha256:931378d93d11b298cf511dd634cf5ea4cb249a28ef84160b3247ee9afb4e8ab0 \ --hash=sha256:dd884c326898e2c6e11f9e0b64940606a93eb10ea022a2e067959f3a110cf161 # via pip-tools -pip-tools==6.4.0 \ - --hash=sha256:65553a15b1ba34be5e43889345062e38fb9b219ffa23b084ca0d4c4039b6f53b \ - --hash=sha256:bb2c3272bc229b4a6d25230ebe255823aba1aa466a0d698c48ab7eb5ab7efdc9 +pip-tools==6.6.2 \ + --hash=sha256:6b486548e5a139e30e4c4a225b3b7c2d46942a9f6d1a91143c21b1de4d02fd9b \ + --hash=sha256:f638503a9f77d98d9a7d72584b1508d3f82ed019b8fab24f4e5ad078c1b8c95e # via -r requirements/dev-requirements.in +platformdirs==2.5.2 \ + --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ + --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 + # via black pluggy==1.0.0 \ --hash=sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159 \ --hash=sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3 @@ -154,9 +202,9 @@ pyflakes==2.4.0 \ --hash=sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c \ --hash=sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e # via flake8 -pyparsing==3.0.6 \ - --hash=sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4 \ - --hash=sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81 +pyparsing==3.0.9 \ + --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ + --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc # via packaging pyrsistent==0.18.1 \ --hash=sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c \ @@ -181,9 +229,9 @@ pyrsistent==0.18.1 \ --hash=sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5 \ --hash=sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6 # via jsonschema -pytest==6.2.5 \ - --hash=sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89 \ - --hash=sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134 +pytest==7.1.2 \ + --hash=sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c \ + --hash=sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45 # via # -r requirements/dev-requirements.in # pytest-cov @@ -192,17 +240,17 @@ pytest-cov==3.0.0 \ --hash=sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6 \ --hash=sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470 # via -r requirements/dev-requirements.in -pytest-mock==3.6.1 \ - --hash=sha256:30c2f2cc9759e76eee674b81ea28c9f0b94f8f0445a1b87762cadf774f0df7e3 \ - --hash=sha256:40217a058c52a63f1042f0784f62009e976ba824c418cced42e88d5f40ab0e62 +pytest-mock==3.7.0 \ + --hash=sha256:5112bd92cc9f186ee96e1a92efc84969ea494939c3aead39c50f421c4cc69534 \ + --hash=sha256:6cff27cec936bf81dc5ee87f07132b807bcda51106b5ec4b90a04331cba76231 # via -r requirements/dev-requirements.in requests==2.27.1 \ --hash=sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61 \ --hash=sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d # via semgrep -ruamel.yaml==0.17.20 \ - --hash=sha256:4b8a33c1efb2b443a93fcaafcfa4d2e445f8e8c29c528d9f5cdafb7cc9e4004c \ - --hash=sha256:810eef9c46523a3f77479c66267a4708255ebe806a2d540078408c2227f011af +ruamel.yaml==0.17.21 \ + --hash=sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7 \ + --hash=sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af # via semgrep ruamel.yaml.clib==0.2.6 \ --hash=sha256:0847201b767447fc33b9c235780d3aa90357d20dd6108b92be544427bea197dd \ @@ -231,35 +279,65 @@ ruamel.yaml.clib==0.2.6 \ --hash=sha256:dc6a613d6c74eef5a14a214d433d06291526145431c3b964f5e16529b1842bed \ --hash=sha256:de9c6b8a1ba52919ae919f3ae96abb72b994dd0350226e28f3686cb4f142165c # via ruamel.yaml -semgrep==0.78.0 \ - --hash=sha256:0ae8f6fb9b9d5e9920d25662dd92dd0c20d32752b8cd154c1bb0ceca37b8e380 \ - --hash=sha256:5abb5ecc7a536614a6ee95b2dee762fe50f0cf63a41cc23ef21798c7b94fe1af \ - --hash=sha256:7f58e7a795f8d6d419f6ae13d44565c5075ab08e07054c24832752dd4f20d860 +semgrep==0.93.0 \ + --hash=sha256:02fce22e81e68ded60f67f8cb2979a8014ac5bb7bceb93c2553d8ccc03211259 \ + --hash=sha256:1aa9788e507286694271234cd97f2bb19d741e5ae614f0fbb545715a7d0e872d \ + --hash=sha256:3de9c36cbafef772d453f77da9b6f3c2239975b0a29e0674773bf04a0756b1ec # via -r requirements/dev-requirements.in -toml==0.10.2 \ - --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ - --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f - # via pytest -tomli==2.0.0 \ - --hash=sha256:b5bde28da1fed24b9bd1d4d2b8cba62300bfb4ec9a6187a957e8ddb9434c5224 \ - --hash=sha256:c292c34f58502a1eb2bbb9f5bbc9a5ebc37bee10ffb8c2d6bbdfa8eb13cc14e1 +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 + # via jsonschema +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f # via + # black # coverage # pep517 -tqdm==4.62.3 \ - --hash=sha256:8dd278a422499cd6b727e6ae4061c40b48fce8b76d1ccbf5d34fca9b7f925b0c \ - --hash=sha256:d359de7217506c9851b7869f3708d8ee53ed70a1b8edbba4dbcb47442592920d + # pytest +tqdm==4.64.0 \ + --hash=sha256:40be55d30e200777a307a7585aee69e4eabb46b4ec6a4b4a5f2d9f11e7d5408d \ + --hash=sha256:74a2cdefe14d11442cedf3ba4e21a3b84ff9a2dbdc6cfae2c34addb2a14a5ea6 # via semgrep -typing-extensions==4.0.1 \ - --hash=sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e \ - --hash=sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b +typed-ast==1.5.4 \ + --hash=sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2 \ + --hash=sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1 \ + --hash=sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6 \ + --hash=sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62 \ + --hash=sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac \ + --hash=sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d \ + --hash=sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc \ + --hash=sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2 \ + --hash=sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97 \ + --hash=sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35 \ + --hash=sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6 \ + --hash=sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1 \ + --hash=sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4 \ + --hash=sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c \ + --hash=sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e \ + --hash=sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec \ + --hash=sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f \ + --hash=sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72 \ + --hash=sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47 \ + --hash=sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72 \ + --hash=sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe \ + --hash=sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6 \ + --hash=sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3 \ + --hash=sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66 + # via black +typing-extensions==4.2.0 \ + --hash=sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708 \ + --hash=sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376 # via + # black # importlib-metadata - # jsonschema -urllib3==1.26.8 \ - --hash=sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed \ - --hash=sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c - # via requests +urllib3==1.26.9 \ + --hash=sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14 \ + --hash=sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e + # via + # requests + # semgrep wcmatch==8.3 \ --hash=sha256:371072912398af61d1e4e78609e18801c6faecd3cb36c54c82556a60abc965db \ --hash=sha256:7141d2c85314253f16b38cb3d6cc0fb612918d407e1df3ccc2be7c86cc259c22 @@ -268,22 +346,22 @@ wheel==0.37.1 \ --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 # via pip-tools -zipp==3.7.0 \ - --hash=sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d \ - --hash=sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375 +zipp==3.8.0 \ + --hash=sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad \ + --hash=sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099 # via # importlib-metadata - # importlib-resources # pep517 # The following packages are considered to be unsafe in a requirements file: -pip==21.3.1 \ - --hash=sha256:deaf32dcd9ab821e359cd8330786bcd077604b5c5730c0b096eda46f95c24a2d \ - --hash=sha256:fd11ba3d0fdb4c07fbc5ecbba0b1b719809420f25038f8ee3cd913d3faa3033a +pip==22.1.1 \ + --hash=sha256:8dfb15d8a1c3d3085a4cbe11f29e19527dfaf2ba99354326fd62cec013eaee81 \ + --hash=sha256:e7bcf0b2cbdec2af84cc1b7b79b25fdbd7228fbdb61a4dca0b82810d0ba9d18b # via pip-tools -setuptools==60.5.0 \ - --hash=sha256:2404879cda71495fc4d5cbc445ed52fdaddf352b36e40be8dcc63147cb4edabe \ - --hash=sha256:68eb94073fc486091447fcb0501efd6560a0e5a1839ba249e5ff3c4c93f05f90 +setuptools==62.3.2 \ + --hash=sha256:68e45d17c9281ba25dc0104eadd2647172b3472d9e01f911efa57965e8d51a36 \ + --hash=sha256:a43bdedf853c670e5fed28e5623403bad2f73cf02f9a2774e91def6bda8265a7 # via + # jsonschema # pip-tools # semgrep From 2d7829aae172bbe1a06bae9c945069d43a1ddf40 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Tue, 24 May 2022 13:20:34 -0700 Subject: [PATCH 273/352] add black makefile target and apply changes --- Makefile | 10 ++- securedrop_export/__init__.py | 2 +- securedrop_export/disk/actions.py | 121 +++++++++++++++++------------ securedrop_export/entrypoint.py | 9 ++- securedrop_export/exceptions.py | 42 +++++----- securedrop_export/export.py | 20 +++-- securedrop_export/main.py | 15 +++- securedrop_export/print/actions.py | 83 ++++++++++++-------- securedrop_export/utils.py | 4 +- setup.py | 6 +- tests/disk/test_actions.py | 33 +++++--- tests/print/test_actions.py | 71 ++++++++++------- tests/test_export.py | 117 ++++++++++++++++++++-------- tests/test_main.py | 1 + 14 files changed, 335 insertions(+), 199 deletions(-) diff --git a/Makefile b/Makefile index 62f1b7101..ea83824cc 100644 --- a/Makefile +++ b/Makefile @@ -40,7 +40,11 @@ update-dev-only-dependencies: ## Update dev-requirements.txt to pin to the late done < 'requirements/dev-requirements.in' .PHONY: check -check: lint semgrep test ## Run linter and tests +check: lint semgrep test check-black ## Run linter and tests + +.PHONY: check-black +check-black: ## Check Python source code formatting with black + @black --check --diff ./ TESTS ?= tests .PHONY: test @@ -51,6 +55,10 @@ test: ## Run tests lint: ## Run linter flake8 securedrop_export/ tests/ +.PHONY: black +black: ## Format Python source code with black + @black ./ + SEMGREP_FLAGS := --exclude "tests/" --error --strict --verbose .PHONY: semgrep diff --git a/securedrop_export/__init__.py b/securedrop_export/__init__.py index 44b18069b..01ef12070 100644 --- a/securedrop_export/__init__.py +++ b/securedrop_export/__init__.py @@ -1 +1 @@ -__version__ = '0.2.6' +__version__ = "0.2.6" diff --git a/securedrop_export/disk/actions.py b/securedrop_export/disk/actions.py index 072cb93fc..9619aba03 100644 --- a/securedrop_export/disk/actions.py +++ b/securedrop_export/disk/actions.py @@ -29,30 +29,37 @@ def check_usb_connected(self, exit=False) -> None: usb_devices = self._get_connected_usbs() if len(usb_devices) == 0: - logger.info('0 USB devices connected') + logger.info("0 USB devices connected") self.submission.exit_gracefully(ExportStatus.USB_NOT_CONNECTED.value) elif len(usb_devices) == 1: - logger.info('1 USB device connected') + logger.info("1 USB device connected") self.device = usb_devices[0] if exit: self.submission.exit_gracefully(ExportStatus.USB_CONNECTED.value) elif len(usb_devices) > 1: - logger.info('>1 USB devices connected') + logger.info(">1 USB devices connected") # Return generic error until freedomofpress/securedrop-export/issues/25 self.submission.exit_gracefully(ExportStatus.ERROR_GENERIC.value) def _get_connected_usbs(self) -> List[str]: - logger.info('Performing usb preflight') + logger.info("Performing usb preflight") # List all block devices attached to VM that are disks and not partitions. try: - lsblk = subprocess.Popen(["lsblk", "-o", "NAME,TYPE"], stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - grep = subprocess.Popen(["grep", "disk"], stdin=lsblk.stdout, stdout=subprocess.PIPE, - stderr=subprocess.PIPE) + lsblk = subprocess.Popen( + ["lsblk", "-o", "NAME,TYPE"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + grep = subprocess.Popen( + ["grep", "disk"], + stdin=lsblk.stdout, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) command_output = grep.stdout.readlines() # The first word in each element of the command_output list is the device name - attached_devices = [x.decode('utf8').split()[0] for x in command_output] + attached_devices = [x.decode("utf8").split()[0] for x in command_output] except subprocess.CalledProcessError: self.submission.exit_gracefully(ExportStatus.ERROR_GENERIC.value) @@ -62,8 +69,9 @@ def _get_connected_usbs(self) -> List[str]: try: removable = subprocess.check_output( ["cat", "/sys/class/block/{}/removable".format(device)], - stderr=subprocess.PIPE) - is_removable = int(removable.decode('utf8').strip()) + stderr=subprocess.PIPE, + ) + is_removable = int(removable.decode("utf8").strip()) except subprocess.CalledProcessError: is_removable = False @@ -75,28 +83,36 @@ def _get_connected_usbs(self) -> List[str]: def set_extracted_device_name(self): try: device_and_partitions = subprocess.check_output( - ["lsblk", "-o", "TYPE", "--noheadings", self.device], stderr=subprocess.PIPE) + ["lsblk", "-o", "TYPE", "--noheadings", self.device], + stderr=subprocess.PIPE, + ) # we don't support multiple partitions - partition_count = device_and_partitions.decode('utf-8').split('\n').count('part') + partition_count = ( + device_and_partitions.decode("utf-8").split("\n").count("part") + ) if partition_count > 1: logger.debug("multiple partitions not supported") - self.submission.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value) + self.submission.exit_gracefully( + ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value + ) # redefine device to /dev/sda if disk is encrypted, /dev/sda1 if partition encrypted - self.device = self.device if partition_count == 0 else self.device + '1' + self.device = self.device if partition_count == 0 else self.device + "1" except subprocess.CalledProcessError: - self.submission.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value) + self.submission.exit_gracefully( + ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value + ) def check_luks_volume(self): # cryptsetup isLuks returns 0 if the device is a luks volume # subprocess with throw if the device is not luks (rc !=0) - logger.info('Checking if volume is luks-encrypted') + logger.info("Checking if volume is luks-encrypted") self.set_extracted_device_name() logger.debug("checking if {} is luks encrypted".format(self.device)) self.submission.safe_check_call( command=["sudo", "cryptsetup", "isLuks", self.device], - error_message=ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value + error_message=ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value, ) self.submission.exit_gracefully(ExportStatus.USB_ENCRYPTED.value) @@ -104,30 +120,32 @@ def unlock_luks_volume(self, encryption_key): try: # get the encrypted device name self.set_extracted_device_name() - luks_header = subprocess.check_output(["sudo", "cryptsetup", "luksDump", self.device]) - luks_header_list = luks_header.decode('utf-8').split('\n') + luks_header = subprocess.check_output( + ["sudo", "cryptsetup", "luksDump", self.device] + ) + luks_header_list = luks_header.decode("utf-8").split("\n") for line in luks_header_list: - items = line.split('\t') - if 'UUID' in items[0]: - self.encrypted_device = 'luks-' + items[1] + items = line.split("\t") + if "UUID" in items[0]: + self.encrypted_device = "luks-" + items[1] # the luks device is already unlocked - if os.path.exists(os.path.join('/dev/mapper/', self.encrypted_device)): - logger.debug('Device already unlocked') + if os.path.exists(os.path.join("/dev/mapper/", self.encrypted_device)): + logger.debug("Device already unlocked") return - logger.debug('Unlocking luks volume {}'.format(self.encrypted_device)) + logger.debug("Unlocking luks volume {}".format(self.encrypted_device)) p = subprocess.Popen( ["sudo", "cryptsetup", "luksOpen", self.device, self.encrypted_device], stdin=subprocess.PIPE, stdout=subprocess.PIPE, - stderr=subprocess.PIPE + stderr=subprocess.PIPE, ) - logger.debug('Passing key') + logger.debug("Passing key") p.communicate(input=str.encode(encryption_key, "utf-8")) rc = p.returncode if rc != 0: - logger.error('Bad phassphrase for {}'.format(self.encrypted_device)) + logger.error("Bad phassphrase for {}".format(self.encrypted_device)) self.submission.exit_gracefully(ExportStatus.USB_BAD_PASSPHRASE.value) except subprocess.CalledProcessError: self.submission.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED) @@ -135,10 +153,11 @@ def unlock_luks_volume(self, encryption_key): def mount_volume(self): # If the drive is already mounted then we don't need to mount it again output = subprocess.check_output( - ["lsblk", "-o", "MOUNTPOINT", "--noheadings", self.device]) - mountpoint = output.decode('utf-8').strip() + ["lsblk", "-o", "MOUNTPOINT", "--noheadings", self.device] + ) + mountpoint = output.decode("utf-8").strip() if mountpoint: - logger.debug('The device is already mounted') + logger.debug("The device is already mounted") self.mountpoint = mountpoint return @@ -146,18 +165,18 @@ def mount_volume(self): if not os.path.exists(self.mountpoint): self.submission.safe_check_call( command=["sudo", "mkdir", self.mountpoint], - error_message=ExportStatus.ERROR_USB_MOUNT + error_message=ExportStatus.ERROR_USB_MOUNT, ) mapped_device_path = os.path.join("/dev/mapper/", self.encrypted_device) - logger.info('Mounting {}'.format(mapped_device_path)) + logger.info("Mounting {}".format(mapped_device_path)) self.submission.safe_check_call( command=["sudo", "mount", mapped_device_path, self.mountpoint], - error_message=ExportStatus.ERROR_USB_MOUNT.value + error_message=ExportStatus.ERROR_USB_MOUNT.value, ) self.submission.safe_check_call( command=["sudo", "chown", "-R", "user:user", self.mountpoint], - error_message=ExportStatus.ERROR_USB_MOUNT.value + error_message=ExportStatus.ERROR_USB_MOUNT.value, ) def copy_submission(self): @@ -168,26 +187,30 @@ def copy_submission(self): target_path = os.path.join(self.mountpoint, self.submission.target_dirname) subprocess.check_call(["mkdir", target_path]) export_data = os.path.join(self.submission.tmpdir, "export_data/") - logger.info('Copying file to {}'.format(self.submission.target_dirname)) + logger.info("Copying file to {}".format(self.submission.target_dirname)) subprocess.check_call(["cp", "-r", export_data, target_path]) - logger.info('File copied successfully to {}'.format(self.submission.target_dirname)) + logger.info( + "File copied successfully to {}".format(self.submission.target_dirname) + ) except (subprocess.CalledProcessError, OSError): self.submission.exit_gracefully(ExportStatus.ERROR_USB_WRITE.value) finally: - logger.info('Syncing filesystems') + logger.info("Syncing filesystems") subprocess.check_call(["sync"]) if os.path.exists(self.mountpoint): - logger.info('Unmounting drive from {}'.format(self.mountpoint)) + logger.info("Unmounting drive from {}".format(self.mountpoint)) subprocess.check_call(["sudo", "umount", self.mountpoint]) - if os.path.exists(os.path.join('/dev/mapper', self.encrypted_device)): - logger.info('Locking luks volume {}'.format(self.encrypted_device)) + if os.path.exists(os.path.join("/dev/mapper", self.encrypted_device)): + logger.info("Locking luks volume {}".format(self.encrypted_device)) subprocess.check_call( ["sudo", "cryptsetup", "luksClose", self.encrypted_device] ) - logger.info('Deleting temporary directory {}'.format(self.submission.tmpdir)) + logger.info( + "Deleting temporary directory {}".format(self.submission.tmpdir) + ) subprocess.check_call(["rm", "-rf", self.submission.tmpdir]) sys.exit(0) @@ -197,7 +220,7 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def run(self): - logger.info('Export archive is usb-test') + logger.info("Export archive is usb-test") self.check_usb_connected(exit=True) @@ -206,7 +229,7 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def run(self): - logger.info('Export archive is disk-test') + logger.info("Export archive is disk-test") # check_usb_connected looks for the drive, sets the drive to use self.check_usb_connected() self.check_luks_volume() @@ -217,13 +240,13 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def run(self): - logger.info('Export archive is disk') + logger.info("Export archive is disk") # check_usb_connected looks for the drive, sets the drive to use self.check_usb_connected() - logger.info('Unlocking volume') + logger.info("Unlocking volume") # exports all documents in the archive to luks-encrypted volume self.unlock_luks_volume(self.submission.archive_metadata.encryption_key) - logger.info('Mounting volume') + logger.info("Mounting volume") self.mount_volume() - logger.info('Copying submission to drive') + logger.info("Copying submission to drive") self.copy_submission() diff --git a/securedrop_export/entrypoint.py b/securedrop_export/entrypoint.py index b82e8e3bc..3bb86baad 100755 --- a/securedrop_export/entrypoint.py +++ b/securedrop_export/entrypoint.py @@ -28,8 +28,9 @@ def configure_logging(): log_file = os.path.join(DEFAULT_HOME, LOG_DIR_NAME, EXPORT_LOG_FILENAME) # set logging format - log_fmt = ('%(asctime)s - %(name)s:%(lineno)d(%(funcName)s) ' - '%(levelname)s: %(message)s') + log_fmt = ( + "%(asctime)s - %(name)s:%(lineno)d(%(funcName)s) " "%(levelname)s: %(message)s" + ) formatter = logging.Formatter(log_fmt) handler = TimedRotatingFileHandler(log_file) @@ -60,13 +61,13 @@ def start(): msg = "ERROR_LOGGING" export.SDExport.exit_gracefully(msg) - logger.info('Starting SecureDrop Export {}'.format(__version__)) + logger.info("Starting SecureDrop Export {}".format(__version__)) my_sub = export.SDExport(sys.argv[1], CONFIG_PATH) try: # Halt immediately if target file is absent if not os.path.exists(my_sub.archive): - logger.info('Archive is not found {}.'.format(my_sub.archive)) + logger.info("Archive is not found {}.".format(my_sub.archive)) msg = "ERROR_FILE_NOT_FOUND" my_sub.exit_gracefully(msg) main.__main__(my_sub) diff --git a/securedrop_export/exceptions.py b/securedrop_export/exceptions.py index e144a1684..11855c0ff 100644 --- a/securedrop_export/exceptions.py +++ b/securedrop_export/exceptions.py @@ -4,37 +4,37 @@ class ExportStatus(Enum): # General errors - ERROR_FILE_NOT_FOUND = 'ERROR_FILE_NOT_FOUND' - ERROR_EXTRACTION = 'ERROR_EXTRACTION' - ERROR_METADATA_PARSING = 'ERROR_METADATA_PARSING' - ERROR_ARCHIVE_METADATA = 'ERROR_ARCHIVE_METADATA' - ERROR_USB_CONFIGURATION = 'ERROR_USB_CONFIGURATION' - ERROR_GENERIC = 'ERROR_GENERIC' + ERROR_FILE_NOT_FOUND = "ERROR_FILE_NOT_FOUND" + ERROR_EXTRACTION = "ERROR_EXTRACTION" + ERROR_METADATA_PARSING = "ERROR_METADATA_PARSING" + ERROR_ARCHIVE_METADATA = "ERROR_ARCHIVE_METADATA" + ERROR_USB_CONFIGURATION = "ERROR_USB_CONFIGURATION" + ERROR_GENERIC = "ERROR_GENERIC" # USB preflight related errors - USB_CONNECTED = 'USB_CONNECTED' - USB_NOT_CONNECTED = 'USB_NOT_CONNECTED' - ERROR_USB_CHECK = 'ERROR_USB_CHECK' + USB_CONNECTED = "USB_CONNECTED" + USB_NOT_CONNECTED = "USB_NOT_CONNECTED" + ERROR_USB_CHECK = "ERROR_USB_CHECK" # USB Disk preflight related errors - USB_ENCRYPTED = 'USB_ENCRYPTED' - USB_ENCRYPTION_NOT_SUPPORTED = 'USB_ENCRYPTION_NOT_SUPPORTED' - USB_DISK_ERROR = 'USB_DISK_ERROR' + USB_ENCRYPTED = "USB_ENCRYPTED" + USB_ENCRYPTION_NOT_SUPPORTED = "USB_ENCRYPTION_NOT_SUPPORTED" + USB_DISK_ERROR = "USB_DISK_ERROR" # Printer preflight related errors - ERROR_MULTIPLE_PRINTERS_FOUND = 'ERROR_MULTIPLE_PRINTERS_FOUND' - ERROR_PRINTER_NOT_FOUND = 'ERROR_PRINTER_NOT_FOUND' - ERROR_PRINTER_NOT_SUPPORTED = 'ERROR_PRINTER_NOT_SUPPORTED' - ERROR_PRINTER_DRIVER_UNAVAILABLE = 'ERROR_PRINTER_DRIVER_UNAVAILABLE' - ERROR_PRINTER_INSTALL = 'ERROR_PRINTER_INSTALL' + ERROR_MULTIPLE_PRINTERS_FOUND = "ERROR_MULTIPLE_PRINTERS_FOUND" + ERROR_PRINTER_NOT_FOUND = "ERROR_PRINTER_NOT_FOUND" + ERROR_PRINTER_NOT_SUPPORTED = "ERROR_PRINTER_NOT_SUPPORTED" + ERROR_PRINTER_DRIVER_UNAVAILABLE = "ERROR_PRINTER_DRIVER_UNAVAILABLE" + ERROR_PRINTER_INSTALL = "ERROR_PRINTER_INSTALL" # Disk export errors - USB_BAD_PASSPHRASE = 'USB_BAD_PASSPHRASE' - ERROR_USB_MOUNT = 'ERROR_USB_MOUNT' - ERROR_USB_WRITE = 'ERROR_USB_WRITE' + USB_BAD_PASSPHRASE = "USB_BAD_PASSPHRASE" + ERROR_USB_MOUNT = "ERROR_USB_MOUNT" + ERROR_USB_WRITE = "ERROR_USB_WRITE" # Printer export errors - ERROR_PRINT = 'ERROR_PRINT' + ERROR_PRINT = "ERROR_PRINT" class TimeoutException(Exception): diff --git a/securedrop_export/export.py b/securedrop_export/export.py index ee7f2c520..81a13d2ee 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -29,7 +29,7 @@ class Metadata(object): "disk-test", # disk preflight test "printer", "printer-test", # print test page - "printer-preflight" + "printer-preflight", ] SUPPORTED_ENCRYPTION_METHODS = ["luks"] @@ -38,28 +38,26 @@ def __init__(self, archive_path): try: with open(self.metadata_path) as f: - logger.info('Parsing archive metadata') + logger.info("Parsing archive metadata") json_config = json.loads(f.read()) self.export_method = json_config.get("device", None) self.encryption_method = json_config.get("encryption_method", None) - self.encryption_key = json_config.get( - "encryption_key", None - ) + self.encryption_key = json_config.get("encryption_key", None) logger.info( - 'Exporting to device {} with encryption_method {}'.format( + "Exporting to device {} with encryption_method {}".format( self.export_method, self.encryption_method ) ) except Exception: - logger.error('Metadata parsing failure') + logger.error("Metadata parsing failure") raise def is_valid(self): - logger.info('Validating metadata contents') + logger.info("Validating metadata contents") if self.export_method not in self.SUPPORTED_EXPORT_METHODS: logger.error( - 'Archive metadata: Export method {} is not supported'.format( + "Archive metadata: Export method {} is not supported".format( self.export_method ) ) @@ -68,7 +66,7 @@ def is_valid(self): if self.export_method == "disk": if self.encryption_method not in self.SUPPORTED_ENCRYPTION_METHODS: logger.error( - 'Archive metadata: Encryption method {} is not supported'.format( + "Archive metadata: Encryption method {} is not supported".format( self.encryption_method ) ) @@ -103,7 +101,7 @@ def exit_gracefully(self, msg, e=False): since non-zero exit values will cause system to try alternative solutions for mimetype handling, which we want to avoid. """ - logger.info('Exiting with message: {}'.format(msg)) + logger.info("Exiting with message: {}".format(msg)) if not e: sys.stderr.write(msg) sys.stderr.write("\n") diff --git a/securedrop_export/main.py b/securedrop_export/main.py index bbdb25f68..042c0cd9b 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -2,9 +2,16 @@ from securedrop_export import export from securedrop_export.exceptions import ExportStatus -from securedrop_export.print.actions import PrintExportAction, PrintTestPageAction, \ - PrintPreflightAction -from securedrop_export.disk.actions import DiskTestAction, DiskExportAction, USBTestAction +from securedrop_export.print.actions import ( + PrintExportAction, + PrintTestPageAction, + PrintPreflightAction, +) +from securedrop_export.disk.actions import ( + DiskTestAction, + DiskExportAction, + USBTestAction, +) logger = logging.getLogger(__name__) @@ -21,7 +28,7 @@ def __main__(submission): submission.exit_gracefully(ExportStatus.ERROR_ARCHIVE_METADATA.value) if submission.archive_metadata.export_method == "start-vm": - submission.exit_gracefully('') + submission.exit_gracefully("") if submission.archive_metadata.export_method == "usb-test": action = USBTestAction(submission) diff --git a/securedrop_export/print/actions.py b/securedrop_export/print/actions.py index f0bc5adf3..1235bccec 100644 --- a/securedrop_export/print/actions.py +++ b/securedrop_export/print/actions.py @@ -36,38 +36,47 @@ def wait_for_print(self): printer_idle_string = "printer {} is idle".format(self.printer_name) while True: try: - logger.info('Running lpstat waiting for printer {}'.format(self.printer_name)) + logger.info( + "Running lpstat waiting for printer {}".format(self.printer_name) + ) output = subprocess.check_output(["lpstat", "-p", self.printer_name]) if printer_idle_string in output.decode("utf-8"): - logger.info('Print completed') + logger.info("Print completed") return True else: time.sleep(5) except subprocess.CalledProcessError: self.submission.exit_gracefully(ExportStatus.ERROR_PRINT.value) except TimeoutException: - logger.error('Timeout waiting for printer {}'.format(self.printer_name)) + logger.error("Timeout waiting for printer {}".format(self.printer_name)) self.submission.exit_gracefully(ExportStatus.ERROR_PRINT.value) return True def check_printer_setup(self) -> None: try: - logger.info('Searching for printer') + logger.info("Searching for printer") output = subprocess.check_output(["sudo", "lpinfo", "-v"]) - printers = [x for x in output.decode('utf-8').split() if "usb://" in x] + printers = [x for x in output.decode("utf-8").split() if "usb://" in x] if not printers: - logger.info('No usb printers connected') - self.submission.exit_gracefully(ExportStatus.ERROR_PRINTER_NOT_FOUND.value) - - supported_printers = \ - [p for p in printers if any(sub in p for sub in ("Brother", "LaserJet"))] + logger.info("No usb printers connected") + self.submission.exit_gracefully( + ExportStatus.ERROR_PRINTER_NOT_FOUND.value + ) + + supported_printers = [ + p for p in printers if any(sub in p for sub in ("Brother", "LaserJet")) + ] if not supported_printers: - logger.info('{} are unsupported printers'.format(printers)) - self.submission.exit_gracefully(ExportStatus.ERROR_PRINTER_NOT_SUPPORTED.value) + logger.info("{} are unsupported printers".format(printers)) + self.submission.exit_gracefully( + ExportStatus.ERROR_PRINTER_NOT_SUPPORTED.value + ) if len(supported_printers) > 1: - logger.info('Too many usb printers connected') - self.submission.exit_gracefully(ExportStatus.ERROR_MULTIPLE_PRINTERS_FOUND.value) + logger.info("Too many usb printers connected") + self.submission.exit_gracefully( + ExportStatus.ERROR_MULTIPLE_PRINTERS_FOUND.value + ) printer_uri = printers[0] printer_ppd = self.install_printer_ppd(printer_uri) @@ -88,25 +97,31 @@ def get_printer_uri(self): for line in output.split(): if "usb://" in line.decode("utf-8"): printer_uri = line.decode("utf-8") - logger.info('lpinfo usb printer: {}'.format(printer_uri)) + logger.info("lpinfo usb printer: {}".format(printer_uri)) # verify that the printer is supported, else exit if printer_uri == "": # No usb printer is connected - logger.info('No usb printers connected') + logger.info("No usb printers connected") self.submission.exit_gracefully(ExportStatus.ERROR_PRINTER_NOT_FOUND.value) elif not any(x in printer_uri for x in ("Brother", "LaserJet")): # printer url is a make that is unsupported - logger.info('Printer {} is unsupported'.format(printer_uri)) - self.submission.exit_gracefully(ExportStatus.ERROR_PRINTER_NOT_SUPPORTED.value) + logger.info("Printer {} is unsupported".format(printer_uri)) + self.submission.exit_gracefully( + ExportStatus.ERROR_PRINTER_NOT_SUPPORTED.value + ) - logger.info('Printer {} is supported'.format(printer_uri)) + logger.info("Printer {} is supported".format(printer_uri)) return printer_uri def install_printer_ppd(self, uri): if not any(x in uri for x in ("Brother", "LaserJet")): - logger.error("Cannot install printer ppd for unsupported printer: {}".format(uri)) - self.submission.exit_gracefully(msg=ExportStatus.ERROR_PRINTER_NOT_SUPPORTED.value) + logger.error( + "Cannot install printer ppd for unsupported printer: {}".format(uri) + ) + self.submission.exit_gracefully( + msg=ExportStatus.ERROR_PRINTER_NOT_SUPPORTED.value + ) return if "Brother" in uri: @@ -118,7 +133,7 @@ def install_printer_ppd(self, uri): # Compile and install drivers that are not already installed if not os.path.exists(printer_ppd): - logger.info('Installing printer drivers') + logger.info("Installing printer drivers") self.submission.safe_check_call( command=[ "sudo", @@ -127,14 +142,14 @@ def install_printer_ppd(self, uri): "-d", "/usr/share/cups/model/", ], - error_message=ExportStatus.ERROR_PRINTER_DRIVER_UNAVAILABLE.value + error_message=ExportStatus.ERROR_PRINTER_DRIVER_UNAVAILABLE.value, ) return printer_ppd def setup_printer(self, printer_uri, printer_ppd): # Add the printer using lpadmin - logger.info('Setting up printer {}'.format(self.printer_name)) + logger.info("Setting up printer {}".format(self.printer_name)) self.submission.safe_check_call( command=[ "sudo", @@ -147,13 +162,13 @@ def setup_printer(self, printer_uri, printer_ppd): "-P", printer_ppd, "-u", - "allow:user" + "allow:user", ], - error_message=ExportStatus.ERROR_PRINTER_INSTALL.value + error_message=ExportStatus.ERROR_PRINTER_INSTALL.value, ) def print_test_page(self): - logger.info('Printing test page') + logger.info("Printing test page") self.print_file("/usr/share/cups/data/testprint") def print_all_files(self): @@ -187,20 +202,20 @@ def print_file(self, file_to_print): # If the file to print is an (open)office document, we need to call unoconf to # convert the file to pdf as printer drivers do not support this format if self.is_open_office_file(file_to_print): - logger.info('Converting Office document to pdf') + logger.info("Converting Office document to pdf") folder = os.path.dirname(file_to_print) converted_filename = file_to_print + ".pdf" converted_path = os.path.join(folder, converted_filename) self.submission.safe_check_call( command=["unoconv", "-o", converted_path, file_to_print], - error_message=ExportStatus.ERROR_PRINT.value + error_message=ExportStatus.ERROR_PRINT.value, ) file_to_print = converted_path - logger.info('Sending file to printer {}'.format(self.printer_name)) + logger.info("Sending file to printer {}".format(self.printer_name)) self.submission.safe_check_call( command=["xpp", "-P", self.printer_name, file_to_print], - error_message=ExportStatus.ERROR_PRINT.value + error_message=ExportStatus.ERROR_PRINT.value, ) @@ -209,7 +224,7 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def run(self): - logger.info('Export archive is printer') + logger.info("Export archive is printer") self.check_printer_setup() # prints all documents in the archive self.print_all_files() @@ -220,7 +235,7 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def run(self): - logger.info('Export archive is printer-test') + logger.info("Export archive is printer-test") self.check_printer_setup() # Prints a test page to ensure the printer is functional self.print_test_page() @@ -231,5 +246,5 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def run(self): - logger.info('Export archive is printer-preflight') + logger.info("Export archive is printer-preflight") self.check_printer_setup() diff --git a/securedrop_export/utils.py b/securedrop_export/utils.py index 219d0e963..f5e1229ce 100644 --- a/securedrop_export/utils.py +++ b/securedrop_export/utils.py @@ -101,7 +101,9 @@ def check_path_traversal(filename_or_filepath: Union[str, Path]) -> None: if filename_or_filepath.is_absolute(): base_path = filename_or_filepath else: - base_path = Path.cwd() # use cwd so we can next ensure relative path does not traverse up + base_path = ( + Path.cwd() + ) # use cwd so we can next ensure relative path does not traverse up try: relative_path = relative_filepath(filename_or_filepath, base_path) diff --git a/setup.py b/setup.py index c13b851e9..d21514991 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ url="https://github.com/freedomofpress/securedrop-export", packages=setuptools.find_packages(exclude=["docs", "tests"]), package_data={ - 'securedrop_export': ['VERSION'], + "securedrop_export": ["VERSION"], }, classifiers=( "Development Status :: 3 - Alpha", @@ -31,5 +31,7 @@ "Intended Audience :: Developers", "Operating System :: OS Independent", ), - entry_points={"console_scripts": ["send-to-usb = securedrop_export.entrypoint:start"]}, + entry_points={ + "console_scripts": ["send-to-usb = securedrop_export.entrypoint:start"] + }, ) diff --git a/tests/disk/test_actions.py b/tests/disk/test_actions.py index c34998598..d82da2791 100644 --- a/tests/disk/test_actions.py +++ b/tests/disk/test_actions.py @@ -26,16 +26,19 @@ def test_usb_precheck_disconnected(capsys, mocker): # Popen call returns lsblk output command_output = mock.MagicMock() command_output.stdout = mock.MagicMock() - command_output.stdout.readlines = mock.MagicMock(return_value=[b"sda disk\n", b"sdb disk\n"]) + command_output.stdout.readlines = mock.MagicMock( + return_value=[b"sda disk\n", b"sdb disk\n"] + ) mocker.patch("subprocess.Popen", return_value=command_output) # check_output returns removable status - mocker.patch("subprocess.check_output", return_value=[b'0\n', b'0\n']) + mocker.patch("subprocess.check_output", return_value=[b"0\n", b"0\n"]) mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - mocker.patch("subprocess.check_output", - side_effect=CalledProcessError(1, 'check_output')) + mocker.patch( + "subprocess.check_output", side_effect=CalledProcessError(1, "check_output") + ) action.check_usb_connected(exit=True) @@ -75,7 +78,9 @@ def test_usb_precheck_multiple_devices_connected(capsys, mocker): # Popen call returns lsblk output command_output = mock.MagicMock() command_output.stdout = mock.MagicMock() - command_output.stdout.readlines = mock.MagicMock(return_value=[b"sdb disk\n", b"sdc disk\n"]) + command_output.stdout.readlines = mock.MagicMock( + return_value=[b"sdb disk\n", b"sdc disk\n"] + ) mocker.patch("subprocess.Popen", return_value=command_output) # check_output returns removable status @@ -166,8 +171,9 @@ def test_luks_precheck_encrypted_multi_part(mocked_call, capsys, mocker): # Here we need to mock the exit_gracefully method with a side effect otherwise # program execution will continue after exit_gracefully and exit_gracefully # may be called a second time. - mocked_exit = mocker.patch.object(submission, "exit_gracefully", - side_effect=lambda x: sys.exit(0)) + mocked_exit = mocker.patch.object( + submission, "exit_gracefully", side_effect=lambda x: sys.exit(0) + ) # Output of `lsblk -o TYPE --noheadings DEVICE_NAME` when a drive has multiple # partitions @@ -189,15 +195,18 @@ def test_luks_precheck_encrypted_luks_error(mocked_call, capsys, mocker): expected_message = "USB_ENCRYPTION_NOT_SUPPORTED" assert expected_message == export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value - mocked_exit = mocker.patch.object(submission, "exit_gracefully", - side_effect=lambda msg, e: sys.exit(0)) + mocked_exit = mocker.patch.object( + submission, "exit_gracefully", side_effect=lambda msg, e: sys.exit(0) + ) single_partition_lsblk_output = b"disk\npart\n" mocker.patch("subprocess.check_output", return_value=single_partition_lsblk_output) - mocker.patch("subprocess.check_call", side_effect=CalledProcessError(1, 'check_call')) + mocker.patch( + "subprocess.check_call", side_effect=CalledProcessError(1, "check_call") + ) with pytest.raises(SystemExit): action.check_luks_volume() - assert mocked_exit.mock_calls[0][2]['msg'] == expected_message - assert mocked_exit.mock_calls[0][2]['e'] is None + assert mocked_exit.mock_calls[0][2]["msg"] == expected_message + assert mocked_exit.mock_calls[0][2]["e"] is None diff --git a/tests/print/test_actions.py b/tests/print/test_actions.py index c500e8f01..176871d1a 100644 --- a/tests/print/test_actions.py +++ b/tests/print/test_actions.py @@ -40,8 +40,9 @@ def test_get_bad_printer_uri(mocked_call, capsys, mocker): action = PrintExportAction(submission) expected_message = "ERROR_PRINTER_NOT_FOUND" assert export.ExportStatus.ERROR_PRINTER_NOT_FOUND.value == expected_message - mocked_exit = mocker.patch.object(submission, "exit_gracefully", - side_effect=lambda x: sys.exit(0)) + mocked_exit = mocker.patch.object( + submission, "exit_gracefully", side_effect=lambda x: sys.exit(0) + ) with pytest.raises(SystemExit): action.get_printer_uri() @@ -49,24 +50,30 @@ def test_get_bad_printer_uri(mocked_call, capsys, mocker): mocked_exit.assert_called_once_with(expected_message) -@pytest.mark.parametrize('open_office_paths', [ - "/tmp/whatver/thisisadoc.doc" - "/home/user/Downloads/thisisadoc.xlsx" - "/home/user/Downloads/file.odt" - "/tmp/tmpJf83j9/secret.pptx" -]) +@pytest.mark.parametrize( + "open_office_paths", + [ + "/tmp/whatver/thisisadoc.doc" + "/home/user/Downloads/thisisadoc.xlsx" + "/home/user/Downloads/file.odt" + "/tmp/tmpJf83j9/secret.pptx" + ], +) def test_is_open_office_file(capsys, open_office_paths): submission = export.SDExport("", TEST_CONFIG) action = PrintExportAction(submission) assert action.is_open_office_file(open_office_paths) -@pytest.mark.parametrize('open_office_paths', [ - "/tmp/whatver/thisisadoc.doccc" - "/home/user/Downloads/thisisa.xlsx.zip" - "/home/user/Downloads/file.odz" - "/tmp/tmpJf83j9/secret.gpg" -]) +@pytest.mark.parametrize( + "open_office_paths", + [ + "/tmp/whatver/thisisadoc.doccc" + "/home/user/Downloads/thisisa.xlsx.zip" + "/home/user/Downloads/file.odz" + "/tmp/tmpJf83j9/secret.gpg" + ], +) def test_is_not_open_office_file(capsys, open_office_paths): submission = export.SDExport("", TEST_CONFIG) action = PrintExportAction(submission) @@ -77,7 +84,9 @@ def test_is_not_open_office_file(capsys, open_office_paths): def test_install_printer_ppd_laserjet(mocker): submission = export.SDExport("testfile", TEST_CONFIG) action = PrintExportAction(submission) - ppd = action.install_printer_ppd("usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A00000") + ppd = action.install_printer_ppd( + "usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A00000" + ) assert ppd == "/usr/share/cups/model/hp-laserjet_6l.ppd" @@ -85,7 +94,9 @@ def test_install_printer_ppd_laserjet(mocker): def test_install_printer_ppd_brother(mocker): submission = export.SDExport("testfile", TEST_CONFIG) action = PrintExportAction(submission) - ppd = action.install_printer_ppd("usb://Brother/HL-L2320D%20series?serial=A00000A000000") + ppd = action.install_printer_ppd( + "usb://Brother/HL-L2320D%20series?serial=A00000A000000" + ) assert ppd == "/usr/share/cups/model/br7030.ppd" @@ -93,35 +104,43 @@ def test_install_printer_ppd_error_no_driver(mocker): submission = export.SDExport("testfile", TEST_CONFIG) action = PrintExportAction(submission) mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - mocker.patch("subprocess.check_call", side_effect=CalledProcessError(1, 'check_call')) + mocker.patch( + "subprocess.check_call", side_effect=CalledProcessError(1, "check_call") + ) - action.install_printer_ppd("usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000") + action.install_printer_ppd( + "usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000" + ) - assert mocked_exit.mock_calls[0][2]['msg'] == "ERROR_PRINTER_DRIVER_UNAVAILABLE" - assert mocked_exit.mock_calls[0][2]['e'] is None + assert mocked_exit.mock_calls[0][2]["msg"] == "ERROR_PRINTER_DRIVER_UNAVAILABLE" + assert mocked_exit.mock_calls[0][2]["e"] is None def test_install_printer_ppd_error_not_supported(mocker): submission = export.SDExport("testfile", TEST_CONFIG) action = PrintExportAction(submission) mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - mocker.patch("subprocess.check_call", side_effect=CalledProcessError(1, 'check_call')) + mocker.patch( + "subprocess.check_call", side_effect=CalledProcessError(1, "check_call") + ) action.install_printer_ppd("usb://Not/Supported?serial=A00000A000000") - assert mocked_exit.mock_calls[0][2]['msg'] == "ERROR_PRINTER_NOT_SUPPORTED" + assert mocked_exit.mock_calls[0][2]["msg"] == "ERROR_PRINTER_NOT_SUPPORTED" def test_setup_printer_error(mocker): submission = export.SDExport("testfile", TEST_CONFIG) action = PrintExportAction(submission) mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - mocker.patch("subprocess.check_call", side_effect=CalledProcessError(1, 'check_call')) + mocker.patch( + "subprocess.check_call", side_effect=CalledProcessError(1, "check_call") + ) action.setup_printer( "usb://Brother/HL-L2320D%20series?serial=A00000A000000", - "/usr/share/cups/model/br7030.ppd" + "/usr/share/cups/model/br7030.ppd", ) - assert mocked_exit.mock_calls[0][2]['msg'] == "ERROR_PRINTER_INSTALL" - assert mocked_exit.mock_calls[0][2]['e'] is None + assert mocked_exit.mock_calls[0][2]["msg"] == "ERROR_PRINTER_INSTALL" + assert mocked_exit.mock_calls[0][2]["e"] is None diff --git a/tests/test_export.py b/tests/test_export.py index b3caf17be..dfaab8667 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -11,7 +11,9 @@ TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad.json") -ANOTHER_BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad-2.json") +ANOTHER_BAD_TEST_CONFIG = os.path.join( + os.path.dirname(__file__), "sd-export-config-bad-2.json" +) def test_extract_tarball(): @@ -21,7 +23,11 @@ def test_extract_tarball(): with tempfile.TemporaryDirectory() as temp_dir: archive_path = os.path.join(temp_dir, "archive.sd-export") with tarfile.open(archive_path, "w:gz") as archive: - metadata = {"device": "disk", "encryption_method": "luks", "encryption_key": "test"} + metadata = { + "device": "disk", + "encryption_method": "luks", + "encryption_key": "test", + } metadata_str = json.dumps(metadata) metadata_bytes = BytesIO(metadata_str.encode("utf-8")) metadata_file_info = tarfile.TarInfo("metadata.json") @@ -45,15 +51,22 @@ def test_extract_tarball(): submission.extract_tarball() - extracted_file_path = os.path.join(submission.tmpdir, "some", "dirs", "file.txt") + extracted_file_path = os.path.join( + submission.tmpdir, "some", "dirs", "file.txt" + ) assert os.path.exists(extracted_file_path) assert oct(os.stat(extracted_file_path).st_mode) == "0o100600" # Subdirectories that are added as members are extracted with 700 permissions - assert oct(os.stat(os.path.join(submission.tmpdir, "some")).st_mode) == "0o40700" + assert ( + oct(os.stat(os.path.join(submission.tmpdir, "some")).st_mode) == "0o40700" + ) # Subdirectories that are not added as members are extracted with 700 permissions # because os.umask(0o077) is set in the SDExport constructor. - assert oct(os.stat(os.path.join(submission.tmpdir, "some", "dirs")).st_mode) == "0o40700" + assert ( + oct(os.stat(os.path.join(submission.tmpdir, "some", "dirs")).st_mode) + == "0o40700" + ) def test_extract_tarball_with_symlink(): @@ -63,7 +76,11 @@ def test_extract_tarball_with_symlink(): with tempfile.TemporaryDirectory() as temp_dir: archive_path = os.path.join(temp_dir, "archive.sd-export") with tarfile.open(archive_path, "w:gz") as archive: - metadata = {"device": "disk", "encryption_method": "luks", "encryption_key": "test"} + metadata = { + "device": "disk", + "encryption_method": "luks", + "encryption_key": "test", + } metadata_str = json.dumps(metadata) metadata_bytes = BytesIO(metadata_str.encode("utf-8")) metadata_file_info = tarfile.TarInfo("metadata.json") @@ -94,14 +111,20 @@ def test_extract_tarball_raises_if_doing_path_traversal(): with tempfile.TemporaryDirectory() as temp_dir: archive_path = os.path.join(temp_dir, "archive.sd-export") with tarfile.open(archive_path, "w:gz") as archive: - metadata = {"device": "disk", "encryption_method": "luks", "encryption_key": "test"} + metadata = { + "device": "disk", + "encryption_method": "luks", + "encryption_key": "test", + } metadata_str = json.dumps(metadata) metadata_bytes = BytesIO(metadata_str.encode("utf-8")) metadata_file_info = tarfile.TarInfo("metadata.json") metadata_file_info.size = len(metadata_str) archive.addfile(metadata_file_info, metadata_bytes) content = b"test" - traversed_file_info = tarfile.TarInfo("../../../../../../../../../tmp/traversed") + traversed_file_info = tarfile.TarInfo( + "../../../../../../../../../tmp/traversed" + ) traversed_file_info.size = len(content) archive.addfile(traversed_file_info, BytesIO(content)) archive.close() @@ -111,7 +134,7 @@ def test_extract_tarball_raises_if_doing_path_traversal(): with pytest.raises(SystemExit): submission.extract_tarball() - assert not os.path.exists('/tmp/traversed') + assert not os.path.exists("/tmp/traversed") def test_extract_tarball_raises_if_doing_path_traversal_with_dir(): @@ -127,7 +150,11 @@ def test_extract_tarball_raises_if_doing_path_traversal_with_dir(): with tempfile.TemporaryDirectory() as temp_dir: archive_path = os.path.join(temp_dir, "archive.sd-export") with tarfile.open(archive_path, "w:gz") as archive: - metadata = {"device": "disk", "encryption_method": "luks", "encryption_key": "test"} + metadata = { + "device": "disk", + "encryption_method": "luks", + "encryption_key": "test", + } metadata_str = json.dumps(metadata) metadata_bytes = BytesIO(metadata_str.encode("utf-8")) metadata_file_info = tarfile.TarInfo("metadata.json") @@ -144,7 +171,7 @@ def test_extract_tarball_raises_if_doing_path_traversal_with_dir(): with pytest.raises(SystemExit): submission.extract_tarball() - assert not os.path.exists('/tmp/traversed') + assert not os.path.exists("/tmp/traversed") def test_extract_tarball_raises_if_doing_path_traversal_with_symlink(): @@ -160,7 +187,11 @@ def test_extract_tarball_raises_if_doing_path_traversal_with_symlink(): with tempfile.TemporaryDirectory() as temp_dir: archive_path = os.path.join(temp_dir, "archive.sd-export") with tarfile.open(archive_path, "w:gz") as archive: - metadata = {"device": "disk", "encryption_method": "luks", "encryption_key": "test"} + metadata = { + "device": "disk", + "encryption_method": "luks", + "encryption_key": "test", + } metadata_str = json.dumps(metadata) metadata_bytes = BytesIO(metadata_str.encode("utf-8")) metadata_file_info = tarfile.TarInfo("metadata.json") @@ -179,7 +210,7 @@ def test_extract_tarball_raises_if_doing_path_traversal_with_symlink(): with pytest.raises(SystemExit): submission.extract_tarball() - assert not os.path.exists('/tmp/traversed') + assert not os.path.exists("/tmp/traversed") def test_extract_tarball_raises_if_doing_path_traversal_with_symlink_linkname(): @@ -195,7 +226,11 @@ def test_extract_tarball_raises_if_doing_path_traversal_with_symlink_linkname(): with tempfile.TemporaryDirectory() as temp_dir: archive_path = os.path.join(temp_dir, "archive.sd-export") with tarfile.open(archive_path, "w:gz") as archive: - metadata = {"device": "disk", "encryption_method": "luks", "encryption_key": "test"} + metadata = { + "device": "disk", + "encryption_method": "luks", + "encryption_key": "test", + } metadata_str = json.dumps(metadata) metadata_bytes = BytesIO(metadata_str.encode("utf-8")) metadata_file_info = tarfile.TarInfo("metadata.json") @@ -214,7 +249,7 @@ def test_extract_tarball_raises_if_doing_path_traversal_with_symlink_linkname(): with pytest.raises(SystemExit): submission.extract_tarball() - assert not os.path.exists('/tmp/traversed') + assert not os.path.exists("/tmp/traversed") def test_extract_tarball_raises_if_name_has_unsafe_absolute_path(): @@ -228,7 +263,11 @@ def test_extract_tarball_raises_if_name_has_unsafe_absolute_path(): with tempfile.TemporaryDirectory() as temp_dir: archive_path = os.path.join(temp_dir, "archive.sd-export") with tarfile.open(archive_path, "w:gz") as archive: - metadata = {"device": "disk", "encryption_method": "luks", "encryption_key": "test"} + metadata = { + "device": "disk", + "encryption_method": "luks", + "encryption_key": "test", + } metadata_str = json.dumps(metadata) metadata_bytes = BytesIO(metadata_str.encode("utf-8")) metadata_file_info = tarfile.TarInfo("metadata.json") @@ -246,7 +285,7 @@ def test_extract_tarball_raises_if_name_has_unsafe_absolute_path(): with pytest.raises(SystemExit): submission.extract_tarball() - assert not os.path.exists('/tmp/unsafe') + assert not os.path.exists("/tmp/unsafe") def test_extract_tarball_raises_if_name_has_unsafe_absolute_path_with_symlink(): @@ -262,10 +301,16 @@ def test_extract_tarball_raises_if_name_has_unsafe_absolute_path_with_symlink(): archive_path = os.path.join(temp_dir, "archive.sd-export") symlink_path = os.path.join(temp_dir, "symlink") - os.system(f"ln -s {tmp}/unsafe {symlink_path}") # create symlink to "/tmp/unsafe" + os.system( + f"ln -s {tmp}/unsafe {symlink_path}" + ) # create symlink to "/tmp/unsafe" with tarfile.open(archive_path, "w:gz") as archive: - metadata = {"device": "disk", "encryption_method": "luks", "encryption_key": "test"} + metadata = { + "device": "disk", + "encryption_method": "luks", + "encryption_key": "test", + } metadata_str = json.dumps(metadata) metadata_bytes = BytesIO(metadata_str.encode("utf-8")) metadata_file_info = tarfile.TarInfo("metadata.json") @@ -279,7 +324,7 @@ def test_extract_tarball_raises_if_name_has_unsafe_absolute_path_with_symlink(): with pytest.raises(SystemExit): submission.extract_tarball() - assert not os.path.exists('/tmp/unsafe') + assert not os.path.exists("/tmp/unsafe") def test_extract_tarball_raises_if_name_has_unsafe_absolute_path_with_symlink_to_dir(): @@ -305,7 +350,11 @@ def test_extract_tarball_raises_if_name_has_unsafe_absolute_path_with_symlink_to os.system(f"ln -s {tmp} {symlink_path}") # create symlink to "/tmp" with tarfile.open(archive_path, "w:gz") as archive: - metadata = {"device": "disk", "encryption_method": "luks", "encryption_key": "test"} + metadata = { + "device": "disk", + "encryption_method": "luks", + "encryption_key": "test", + } metadata_str = json.dumps(metadata) metadata_bytes = BytesIO(metadata_str.encode("utf-8")) metadata_file_info = tarfile.TarInfo("metadata.json") @@ -320,7 +369,7 @@ def test_extract_tarball_raises_if_name_has_unsafe_absolute_path_with_symlink_to with pytest.raises(SystemExit): submission.extract_tarball() - assert not os.path.exists('/tmp/unsafe') + assert not os.path.exists("/tmp/unsafe") def test_extract_tarball_raises_if_linkname_has_unsafe_absolute_path(): @@ -334,7 +383,11 @@ def test_extract_tarball_raises_if_linkname_has_unsafe_absolute_path(): with tempfile.TemporaryDirectory() as temp_dir: archive_path = os.path.join(temp_dir, "archive.sd-export") with tarfile.open(archive_path, "w:gz") as archive: - metadata = {"device": "disk", "encryption_method": "luks", "encryption_key": "test"} + metadata = { + "device": "disk", + "encryption_method": "luks", + "encryption_key": "test", + } metadata_str = json.dumps(metadata) metadata_bytes = BytesIO(metadata_str.encode("utf-8")) metadata_file_info = tarfile.TarInfo("metadata.json") @@ -353,12 +406,12 @@ def test_extract_tarball_raises_if_linkname_has_unsafe_absolute_path(): with pytest.raises(SystemExit): submission.extract_tarball() - assert not os.path.exists('/tmp/unsafe') + assert not os.path.exists("/tmp/unsafe") def test_exit_gracefully_no_exception(capsys): submission = export.SDExport("testfile", TEST_CONFIG) - test_msg = 'test' + test_msg = "test" with pytest.raises(SystemExit) as sysexit: submission.exit_gracefully(test_msg) @@ -373,12 +426,10 @@ def test_exit_gracefully_no_exception(capsys): def test_exit_gracefully_exception(capsys): submission = export.SDExport("testfile", TEST_CONFIG) - test_msg = 'test' + test_msg = "test" with pytest.raises(SystemExit) as sysexit: - submission.exit_gracefully( - test_msg, e=Exception('BANG!') - ) + submission.exit_gracefully(test_msg, e=Exception("BANG!")) # A graceful exit means a return code of 0 assert sysexit.value.code == 0 @@ -463,11 +514,11 @@ def test_valid_encryption_config(capsys): def test_safe_check_call(capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) - submission.safe_check_call(['ls'], "this will work") + submission.safe_check_call(["ls"], "this will work") mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) expected_message = "uh oh!!!!" - submission.safe_check_call(['ls', 'kjdsfhkdjfh'], expected_message) + submission.safe_check_call(["ls", "kjdsfhkdjfh"], expected_message) - assert mocked_exit.mock_calls[0][2]['msg'] == expected_message - assert mocked_exit.mock_calls[0][2]['e'] is None + assert mocked_exit.mock_calls[0][2]["msg"] == expected_message + assert mocked_exit.mock_calls[0][2]["e"] is None diff --git a/tests/test_main.py b/tests/test_main.py index d1e43d251..efa2a6e18 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -1,3 +1,4 @@ from securedrop_export.main import __main__ # noqa: F401 + # This import ensures at least the imports in main.__main__ # are executed during a test run From 2d39314f374f93486578a06ad854782e92f8811a Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Tue, 24 May 2022 19:22:30 -0700 Subject: [PATCH 274/352] have CI run linters --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 49f04010e..85645d619 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -18,7 +18,7 @@ common-steps: virtualenv .venv source .venv/bin/activate pip install --require-hashes -r requirements/dev-requirements.txt - make test + make check --keep-going - &install_packaging_dependencies run: From 252511e484a6142f015dfc9f8b2efad341a74986 Mon Sep 17 00:00:00 2001 From: Allie Crevier Date: Tue, 24 May 2022 19:33:31 -0700 Subject: [PATCH 275/352] rebase onto main and run black against new code --- securedrop_export/export.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index 81a13d2ee..fee15371d 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -127,10 +127,12 @@ def safe_check_call(self, command, error_message): subprocess.check_call(command) except subprocess.CalledProcessError as ex: # ppdc emits warnings which should not be treated as user facing errors - if ex.returncode == 0 and \ - ex.stderr is not None and \ - ex.stderr.startswith("ppdc: Warning"): - logger.info('Encountered warning: {}'.format(ex.output)) + if ( + ex.returncode == 0 + and ex.stderr is not None + and ex.stderr.startswith("ppdc: Warning") + ): + logger.info("Encountered warning: {}".format(ex.output)) else: self.exit_gracefully(msg=error_message, e=ex.output) From d82cbe6588d346ad760ded84e3e3e3c0dbe17959 Mon Sep 17 00:00:00 2001 From: ro Date: Wed, 25 May 2022 17:35:11 -0400 Subject: [PATCH 276/352] Use debian:buster CI image --- .circleci/config.yml | 31 ++++++++++++++++++------------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 85645d619..fd43d11c4 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,21 +1,23 @@ --- common-steps: - - &removevirtualenv + - &install_deps run: - name: Removes the upstream virtualenv from the original container image - command: sudo pip uninstall virtualenv -y - + name: Install base dependencies for Debian python + command: | + set -e + pip uninstall virtualenv -y || true + apt-get update && apt-get install -y sudo make git gnupg python3 python3-venv - &install_packages run: name: Install packages command: | - sudo apt install libnotify-bin + sudo apt install -y libnotify-bin - &run_tests run: name: Install test requirements and run tests command: | - virtualenv .venv + python3 -m venv .venv source .venv/bin/activate pip install --require-hashes -r requirements/dev-requirements.txt make check --keep-going @@ -25,7 +27,8 @@ common-steps: name: Install Debian packaging dependencies and download wheels command: | mkdir ~/packaging && cd ~/packaging - git config --global --unset url.ssh://git@github.com.insteadof + # local builds may not have an SSH url + git config --global --unset url.ssh://git@github.com.insteadof || true git clone https://github.com/freedomofpress/securedrop-debian-packaging.git cd securedrop-debian-packaging make install-deps @@ -54,20 +57,21 @@ common-steps: command: | cd ~/packaging/securedrop-debian-packaging export PKG_VERSION=1000.0 - export PKG_PATH=/home/circleci/project/dist/securedrop-export-$PKG_VERSION.tar.gz + export PKG_PATH=~/project/dist/securedrop-export-$PKG_VERSION.tar.gz make securedrop-export version: 2 jobs: lint: docker: - - image: circleci/python:3.7 + - image: debian:buster steps: + - *install_deps - checkout - run: name: Install test requirements and run lint command: | - virtualenv .venv + python3 -m venv .venv source .venv/bin/activate pip install --require-hashes -r requirements/dev-requirements.txt make lint @@ -80,18 +84,19 @@ jobs: test-buster: docker: - - image: circleci/python:3.7-buster + - image: debian:buster steps: + - *install_deps - checkout - *install_packages - *run_tests build-buster: docker: - - image: circleci/python:3.7-buster + - image: debian:buster steps: + - *install_deps - checkout - - *removevirtualenv - *install_packaging_dependencies - *verify_requirements - *make_source_tarball From a5727222a169a6e8b67d17f2261c6fcae891d8f1 Mon Sep 17 00:00:00 2001 From: Gonzalo Bulnes Guilpain Date: Thu, 26 May 2022 15:07:03 +1000 Subject: [PATCH 277/352] Build Debian package from repository instead of tarball This works around a difference of behaviour of the building tools that causes tarball builds to rely on outdated tooling. --- .circleci/config.yml | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 4d7b27ebb..c5667f693 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -25,21 +25,13 @@ common-steps: # previous run step), else return 0. git diff --quiet - - &make_source_tarball - run: - name: Tag and make source tarball - command: | - cd ~/project - ./update_version.sh 1000.0 # Dummy version number, doesn't matter what we put here - python3 setup.py sdist - - &build_debian_package run: name: Build debian package command: | cd ~/packaging/securedrop-debian-packaging export PKG_VERSION=1000.0 - export PKG_PATH=/home/circleci/project/dist/securedrop-log-$PKG_VERSION.tar.gz + export PKG_PATH=~/project/ make securedrop-log version: 2 @@ -61,7 +53,6 @@ jobs: - *removevirtualenv - *install_packaging_dependencies - *verify_requirements - - *make_source_tarball - *build_debian_package workflows: From 78fe3b7c1767e2d7c084f430a9fa79beebc40f10 Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Wed, 18 May 2022 12:22:24 -0400 Subject: [PATCH 278/352] Have CI test and build on bullseye too And switch to the standard debian images while we're at it. --- .circleci/config.yml | 36 ++++++++++++++++++++++++++---------- 1 file changed, 26 insertions(+), 10 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index c5667f693..0540c1050 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,16 +1,18 @@ --- common-steps: - - &removevirtualenv + - &run_tests run: - name: Removes the upstream virtualenv from the original container image - command: sudo pip uninstall virtualenv -y + name: Run tests + command: | + apt-get update && apt-get install -y python3 + python3 -m unittest - &install_packaging_dependencies run: name: Install Debian packaging dependencies and download wheels command: | + apt-get update && apt-get install -y git git-lfs make sudo mkdir ~/packaging && cd ~/packaging - git config --global --unset url.ssh://git@github.com.insteadof git clone https://github.com/freedomofpress/securedrop-debian-packaging.git cd securedrop-debian-packaging make install-deps @@ -38,19 +40,31 @@ version: 2 jobs: test: docker: - - image: circleci/python:3.7-buster + - image: debian:buster + steps: + - checkout + - *run_tests + test-bullseye: + docker: + - image: debian:bullseye steps: - checkout - - run: - name: Run tests - command: python3 -m unittest + - *run_tests build-buster: docker: - - image: circleci/python:3.7-buster + - image: debian:buster + steps: + - checkout + - *install_packaging_dependencies + - *verify_requirements + - *build_debian_package + + build-bullseye: + docker: + - image: debian:bullseye steps: - checkout - - *removevirtualenv - *install_packaging_dependencies - *verify_requirements - *build_debian_package @@ -60,4 +74,6 @@ workflows: per_pr: jobs: - test + - test-bullseye - build-buster + - build-bullseye From 357464d76be65761dca5f04dea4651b6f6d45b2a Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Fri, 3 Jun 2022 15:35:20 -0400 Subject: [PATCH 279/352] Add hash for 3.9 pyyaml wheel for bullseye support --- requirements/build-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/build-requirements.txt b/requirements/build-requirements.txt index be41d42ce..37d0982dc 100644 --- a/requirements/build-requirements.txt +++ b/requirements/build-requirements.txt @@ -3,7 +3,7 @@ charset-normalizer==2.0.4 --hash=sha256:cd9a4492eef4e5276c07f9c0dc1338e7be3e95f2 furl==2.0.0 --hash=sha256:9f50360f6e4a0f1d0a35fb4997878e7186a73331f0fde5f6fc9b1bb9f006e6cc idna==3.2 --hash=sha256:691d9fc304505c65ea9ceb8eb7385d63988e344c065cacbbd2156ff9bdfcf0c1 orderedmultidict==1.0 --hash=sha256:f6022beda2b3387c61e6eb7e0e1e3e2832fd9f55f3f64d4b4b226eea7487327f -pyyaml==5.4.1 --hash=sha256:be111e40b3e32707b373b90ef490fa0908bf7769c77f8cf940004f0c957954f6 +pyyaml==5.4.1 --hash=sha256:be111e40b3e32707b373b90ef490fa0908bf7769c77f8cf940004f0c957954f6 --hash=sha256:645773490bf785cd110b4a5e47635990c46219b7c4f01b424f0409cf01d12f2b requests==2.26.0 --hash=sha256:7cec5239ce6ec4f6bf3d1b8c7e4d34ebe1b86d3896fe9657a8465ee4d7282bc8 six==1.11.0 --hash=sha256:eb52689b06ca7433c1cac3b91f320400bd3b358790b7ff4b6367cb1c81d37561 urllib3==1.26.6 --hash=sha256:7a2814749409a681ab58babe6539b02a2f84f6649904211f90fb649811ae7b36 From 6d105b529b63ca0e19dbd4c879b2281d6a8b5992 Mon Sep 17 00:00:00 2001 From: Michael Z Date: Thu, 30 Jun 2022 16:35:22 -0400 Subject: [PATCH 280/352] Always communicate error messages to clients Previously, exit_gracefully did not share the type of error message with the client if the subprocess exception that called the method captured output. This is counter to what we would expect. Co-authored-by: Allie Crevier --- securedrop_export/export.py | 21 +++++++++++---------- tests/test_export.py | 10 +++++++--- 2 files changed, 18 insertions(+), 13 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index fee15371d..4ce348536 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -102,18 +102,19 @@ def exit_gracefully(self, msg, e=False): solutions for mimetype handling, which we want to avoid. """ logger.info("Exiting with message: {}".format(msg)) - if not e: + if e: + logger.error("Captured exception output: {}".format(e.output)) + try: + # If the file archive was extracted, delete before returning + if os.path.isdir(self.tmpdir): + shutil.rmtree(self.tmpdir) + # Do this after deletion to avoid giving the client two error messages in case of the + # block above failing sys.stderr.write(msg) sys.stderr.write("\n") - else: - try: - # If the file archive was extracted, delete before returning - if os.path.isdir(self.tmpdir): - shutil.rmtree(self.tmpdir) - logger.error("{}:{}".format(msg, e.output)) - except Exception as ex: - logger.error("Unhandled exception: {}".format(ex)) - sys.stderr.write(ExportStatus.ERROR_GENERIC.value) + except Exception as ex: + logger.error("Unhandled exception: {}".format(ex)) + sys.stderr.write(ExportStatus.ERROR_GENERIC.value) # exit with 0 return code otherwise the os will attempt to open # the file with another application sys.exit(0) diff --git a/tests/test_export.py b/tests/test_export.py index dfaab8667..4c0b81bc3 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -2,6 +2,8 @@ import subprocess # noqa: F401 import tempfile +from unittest import mock + import json import pytest import tarfile @@ -426,16 +428,18 @@ def test_exit_gracefully_no_exception(capsys): def test_exit_gracefully_exception(capsys): submission = export.SDExport("testfile", TEST_CONFIG) - test_msg = "test" + test_msg = "ERROR_GENERIC" with pytest.raises(SystemExit) as sysexit: - submission.exit_gracefully(test_msg, e=Exception("BANG!")) + exception = mock.MagicMock() + exception.output = "BANG!" + submission.exit_gracefully(test_msg, e=exception) # A graceful exit means a return code of 0 assert sysexit.value.code == 0 captured = capsys.readouterr() - assert captured.err == export.ExportStatus.ERROR_GENERIC.value + assert captured.err.rstrip() == export.ExportStatus.ERROR_GENERIC.value assert captured.out == "" From 6fce35b2efa313b3b44d7afd2ee7d253928a8f1e Mon Sep 17 00:00:00 2001 From: Michael Z Date: Thu, 30 Jun 2022 14:21:23 -0400 Subject: [PATCH 281/352] Handle subprocess stderr output in Python 3.9 subprocess.check_call does not raise an exception anymore if there's just output on stderr - hence, we move on to its newer API that allows us to capture stderr and analyze it even if the returncode is 0. It also safely ignores lpadmin driver warnings to allow us to continue to use printer drivers with CUPS. --- securedrop_export/export.py | 23 +++++++++++----------- securedrop_export/print/actions.py | 2 ++ tests/disk/test_actions.py | 13 +++++++++---- tests/print/test_actions.py | 10 +++++----- tests/test_export.py | 31 ++++++++++++++++++++++++++---- 5 files changed, 55 insertions(+), 24 deletions(-) diff --git a/securedrop_export/export.py b/securedrop_export/export.py index fee15371d..348380929 100755 --- a/securedrop_export/export.py +++ b/securedrop_export/export.py @@ -118,23 +118,24 @@ def exit_gracefully(self, msg, e=False): # the file with another application sys.exit(0) - def safe_check_call(self, command, error_message): + def safe_check_call(self, command, error_message, ignore_stderr_startswith=None): """ Safely wrap subprocess.check_output to ensure we always return 0 and log the error messages """ try: - subprocess.check_call(command) - except subprocess.CalledProcessError as ex: - # ppdc emits warnings which should not be treated as user facing errors - if ( - ex.returncode == 0 - and ex.stderr is not None - and ex.stderr.startswith("ppdc: Warning") - ): - logger.info("Encountered warning: {}".format(ex.output)) + err = subprocess.run(command, check=True, capture_output=True).stderr + # ppdc and lpadmin may emit warnings we are aware of which should not be treated as + # user facing errors + if ignore_stderr_startswith and err.startswith(ignore_stderr_startswith): + logger.info("Encountered warning: {}".format(err.decode("utf-8"))) + elif err == b"": + # Nothing on stderr and returncode is 0, we're good + pass else: - self.exit_gracefully(msg=error_message, e=ex.output) + self.exit_gracefully(msg=error_message, e=err) + except subprocess.CalledProcessError as ex: + self.exit_gracefully(msg=error_message, e=ex.output) class ExportAction(abc.ABC): diff --git a/securedrop_export/print/actions.py b/securedrop_export/print/actions.py index 1235bccec..01ffad515 100644 --- a/securedrop_export/print/actions.py +++ b/securedrop_export/print/actions.py @@ -143,6 +143,7 @@ def install_printer_ppd(self, uri): "/usr/share/cups/model/", ], error_message=ExportStatus.ERROR_PRINTER_DRIVER_UNAVAILABLE.value, + ignore_stderr_startswith=b"ppdc: Warning", ) return printer_ppd @@ -165,6 +166,7 @@ def setup_printer(self, printer_uri, printer_ppd): "allow:user", ], error_message=ExportStatus.ERROR_PRINTER_INSTALL.value, + ignore_stderr_startswith=b"lpadmin: Printer drivers", ) def print_test_page(self): diff --git a/tests/disk/test_actions.py b/tests/disk/test_actions.py index d82da2791..9c553a205 100644 --- a/tests/disk/test_actions.py +++ b/tests/disk/test_actions.py @@ -134,11 +134,14 @@ def test_extract_device_name_multiple_part(mocked_call, capsys, mocker): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PART) -@mock.patch("subprocess.check_call", return_value=0) def test_luks_precheck_encrypted_fde(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) action = DiskExportAction(submission) + command_output = mock.MagicMock() + command_output.stderr = b"" + mocker.patch("subprocess.run", return_value=command_output) + expected_message = export.ExportStatus.USB_ENCRYPTED.value mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) @@ -148,7 +151,6 @@ def test_luks_precheck_encrypted_fde(mocked_call, capsys, mocker): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) -@mock.patch("subprocess.check_call", return_value=0) def test_luks_precheck_encrypted_single_part(mocked_call, capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) action = DiskExportAction(submission) @@ -156,6 +158,10 @@ def test_luks_precheck_encrypted_single_part(mocked_call, capsys, mocker): expected_message = export.ExportStatus.USB_ENCRYPTED.value mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) + command_output = mock.MagicMock() + command_output.stderr = b"" + mocker.patch("subprocess.run", return_value=command_output) + action.check_luks_volume() mocked_exit.assert_called_once_with(expected_message) @@ -178,7 +184,6 @@ def test_luks_precheck_encrypted_multi_part(mocked_call, capsys, mocker): # Output of `lsblk -o TYPE --noheadings DEVICE_NAME` when a drive has multiple # partitions multi_partition_lsblk_output = b"disk\npart\npart\n" - mocker.patch("subprocess.check_call", return_value=0) mocker.patch("subprocess.check_output", return_value=multi_partition_lsblk_output) with pytest.raises(SystemExit): @@ -202,7 +207,7 @@ def test_luks_precheck_encrypted_luks_error(mocked_call, capsys, mocker): single_partition_lsblk_output = b"disk\npart\n" mocker.patch("subprocess.check_output", return_value=single_partition_lsblk_output) mocker.patch( - "subprocess.check_call", side_effect=CalledProcessError(1, "check_call") + "subprocess.run", side_effect=CalledProcessError(1, "run") ) with pytest.raises(SystemExit): diff --git a/tests/print/test_actions.py b/tests/print/test_actions.py index 176871d1a..caeeb8f90 100644 --- a/tests/print/test_actions.py +++ b/tests/print/test_actions.py @@ -80,7 +80,7 @@ def test_is_not_open_office_file(capsys, open_office_paths): assert not action.is_open_office_file(open_office_paths) -@mock.patch("subprocess.check_call") +@mock.patch("subprocess.run") def test_install_printer_ppd_laserjet(mocker): submission = export.SDExport("testfile", TEST_CONFIG) action = PrintExportAction(submission) @@ -90,7 +90,7 @@ def test_install_printer_ppd_laserjet(mocker): assert ppd == "/usr/share/cups/model/hp-laserjet_6l.ppd" -@mock.patch("subprocess.check_call") +@mock.patch("subprocess.run") def test_install_printer_ppd_brother(mocker): submission = export.SDExport("testfile", TEST_CONFIG) action = PrintExportAction(submission) @@ -105,7 +105,7 @@ def test_install_printer_ppd_error_no_driver(mocker): action = PrintExportAction(submission) mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) mocker.patch( - "subprocess.check_call", side_effect=CalledProcessError(1, "check_call") + "subprocess.run", side_effect=CalledProcessError(1, "run") ) action.install_printer_ppd( @@ -121,7 +121,7 @@ def test_install_printer_ppd_error_not_supported(mocker): action = PrintExportAction(submission) mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) mocker.patch( - "subprocess.check_call", side_effect=CalledProcessError(1, "check_call") + "subprocess.run", side_effect=CalledProcessError(1, "run") ) action.install_printer_ppd("usb://Not/Supported?serial=A00000A000000") @@ -134,7 +134,7 @@ def test_setup_printer_error(mocker): action = PrintExportAction(submission) mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) mocker.patch( - "subprocess.check_call", side_effect=CalledProcessError(1, "check_call") + "subprocess.run", side_effect=CalledProcessError(1, "run") ) action.setup_printer( diff --git a/tests/test_export.py b/tests/test_export.py index dfaab8667..602a550fc 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -515,10 +515,33 @@ def test_valid_encryption_config(capsys): def test_safe_check_call(capsys, mocker): submission = export.SDExport("testfile", TEST_CONFIG) submission.safe_check_call(["ls"], "this will work") - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) expected_message = "uh oh!!!!" - submission.safe_check_call(["ls", "kjdsfhkdjfh"], expected_message) + with pytest.raises(SystemExit) as sysexit: + submission.safe_check_call(["ls", "kjdsfhkdjfh"], expected_message) + + assert sysexit.value.code == 0 + + captured = capsys.readouterr() + assert captured.err == "{}\n".format(expected_message) + assert captured.out == "" + + # This should work too + submission.safe_check_call( + ["python3", "-c", "import sys;sys.stderr.write('hello')"], + expected_message, + ignore_stderr_startswith=b'hello', + ) - assert mocked_exit.mock_calls[0][2]["msg"] == expected_message - assert mocked_exit.mock_calls[0][2]["e"] is None + with pytest.raises(SystemExit) as sysexit: + submission.safe_check_call( + ["python3", "-c", "import sys;sys.stderr.write('hello\n')"], + expected_message, + ignore_stderr_startswith=b'world', + ) + + assert sysexit.value.code == 0 + + captured = capsys.readouterr() + assert captured.err == "{}\n".format(expected_message) + assert captured.out == "" From 66a292857f47f853132ed81c56cdbcf45699d127 Mon Sep 17 00:00:00 2001 From: ro Date: Mon, 4 Jul 2022 15:59:17 -0400 Subject: [PATCH 282/352] Update changelog and version --- VERSION | 2 +- changelog.md | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/VERSION b/VERSION index d917d3e26..0ea3a944b 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.1.2 +0.2.0 diff --git a/changelog.md b/changelog.md index 8a5d98b3b..9848e301f 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,9 @@ # Changelog +## 0.2.0 + + * Supports logging in QubesOS 4.1. + ## 0.1.2 * Uses Qubes domain name instead of system hostname. From 5abf04712bb0635871befd33a0e5d1c06920142a Mon Sep 17 00:00:00 2001 From: ro Date: Mon, 4 Jul 2022 17:31:06 -0400 Subject: [PATCH 283/352] Bump version to 0.4.0 and update changelog --- changelog.md | 7 +++++++ securedrop_proxy/VERSION | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/changelog.md b/changelog.md index 7b6a457e4..2453b0d66 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,12 @@ # Changelog +## 0.4.0 + + * Reject JSON with duplicate keys (TOB-SDW-014) (#98) + * Support Debian Bullseye (#100, #97) + * Use reproducible wheels (#81, #85) + * Dependency updates (#82, #88, #91, #93, #95, #96) + ## 0.3.1 * Moved proxy configuration to private volume (#79) diff --git a/securedrop_proxy/VERSION b/securedrop_proxy/VERSION index 9e11b32fc..1d0ba9ea1 100644 --- a/securedrop_proxy/VERSION +++ b/securedrop_proxy/VERSION @@ -1 +1 @@ -0.3.1 +0.4.0 From 06043e6f7ed51f3c0a9d53b7272d1b3c3eb3b48b Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Fri, 3 Jun 2022 16:57:37 -0400 Subject: [PATCH 284/352] Remove optional steps that were only needed for Circle CI images --- .circleci/config.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index fd43d11c4..ecf9ca534 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -5,7 +5,6 @@ common-steps: name: Install base dependencies for Debian python command: | set -e - pip uninstall virtualenv -y || true apt-get update && apt-get install -y sudo make git gnupg python3 python3-venv - &install_packages run: From e5d88a61c664b5b5f3f5e8e5eb030797ef9b2db7 Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Fri, 3 Jun 2022 16:57:56 -0400 Subject: [PATCH 285/352] Add bullseye to CI, drop make_source_tarball step --- .circleci/config.yml | 79 ++++++++++++++++++++++++++++++-------------- 1 file changed, 55 insertions(+), 24 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index ecf9ca534..20b50b767 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -5,7 +5,7 @@ common-steps: name: Install base dependencies for Debian python command: | set -e - apt-get update && apt-get install -y sudo make git gnupg python3 python3-venv + apt-get update && apt-get install -y sudo make git git-lfs gnupg python3 python3-venv - &install_packages run: name: Install packages @@ -42,23 +42,34 @@ common-steps: # previous run step), else return 0. git diff --quiet - - &make_source_tarball - run: - name: Tag and make source tarball - command: | - cd ~/project - ./update_version.sh 1000.0 # Dummy version number, doesn't matter what we put here - python3 setup.py sdist - - &build_debian_package run: name: Build debian package command: | cd ~/packaging/securedrop-debian-packaging export PKG_VERSION=1000.0 - export PKG_PATH=~/project/dist/securedrop-export-$PKG_VERSION.tar.gz + export PKG_PATH=~/project/ make securedrop-export + - &run_lint + run: + name: Install test requirements and run lint + command: | + python3 -m venv .venv + source .venv/bin/activate + pip install --require-hashes -r requirements/dev-requirements.txt + make lint + + - &run_safety + run: + name: Check Python dependencies for CVEs + command: | + set -e + source .venv/bin/activate + make safety + + + version: 2 jobs: lint: @@ -67,19 +78,17 @@ jobs: steps: - *install_deps - checkout - - run: - name: Install test requirements and run lint - command: | - python3 -m venv .venv - source .venv/bin/activate - pip install --require-hashes -r requirements/dev-requirements.txt - make lint - - run: - name: Check Python dependencies for CVEs - command: | - set -e - source .venv/bin/activate - make safety + - *run_lint + - *run_safety + + lint-bullseye: + docker: + - image: debian:buster + steps: + - *install_deps + - checkout + - *run_lint + - *run_safety test-buster: docker: @@ -90,6 +99,15 @@ jobs: - *install_packages - *run_tests + test-bullseye: + docker: + - image: debian:bullseye + steps: + - *install_deps + - checkout + - *install_packages + - *run_tests + build-buster: docker: - image: debian:buster @@ -98,13 +116,26 @@ jobs: - checkout - *install_packaging_dependencies - *verify_requirements - - *make_source_tarball - *build_debian_package + build-bullseye: + docker: + - image: debian:bullseye + steps: + - *install_deps + - checkout + - *install_packaging_dependencies + - *verify_requirements + - *build_debian_package + + workflows: version: 2 securedrop_export_ci: jobs: - lint + - lint-bullseye - test-buster + - test-bullseye - build-buster + - build-bullseye From cc451074a41e2f7c20766e998c969b255592b9f1 Mon Sep 17 00:00:00 2001 From: ro Date: Tue, 5 Jul 2022 16:53:27 -0400 Subject: [PATCH 286/352] fix test format --- tests/disk/test_actions.py | 4 +--- tests/print/test_actions.py | 12 +++--------- tests/test_export.py | 4 ++-- 3 files changed, 6 insertions(+), 14 deletions(-) diff --git a/tests/disk/test_actions.py b/tests/disk/test_actions.py index 9c553a205..7d5d24d2a 100644 --- a/tests/disk/test_actions.py +++ b/tests/disk/test_actions.py @@ -206,9 +206,7 @@ def test_luks_precheck_encrypted_luks_error(mocked_call, capsys, mocker): single_partition_lsblk_output = b"disk\npart\n" mocker.patch("subprocess.check_output", return_value=single_partition_lsblk_output) - mocker.patch( - "subprocess.run", side_effect=CalledProcessError(1, "run") - ) + mocker.patch("subprocess.run", side_effect=CalledProcessError(1, "run")) with pytest.raises(SystemExit): action.check_luks_volume() diff --git a/tests/print/test_actions.py b/tests/print/test_actions.py index caeeb8f90..37b2ea9c4 100644 --- a/tests/print/test_actions.py +++ b/tests/print/test_actions.py @@ -104,9 +104,7 @@ def test_install_printer_ppd_error_no_driver(mocker): submission = export.SDExport("testfile", TEST_CONFIG) action = PrintExportAction(submission) mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - mocker.patch( - "subprocess.run", side_effect=CalledProcessError(1, "run") - ) + mocker.patch("subprocess.run", side_effect=CalledProcessError(1, "run")) action.install_printer_ppd( "usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000" @@ -120,9 +118,7 @@ def test_install_printer_ppd_error_not_supported(mocker): submission = export.SDExport("testfile", TEST_CONFIG) action = PrintExportAction(submission) mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - mocker.patch( - "subprocess.run", side_effect=CalledProcessError(1, "run") - ) + mocker.patch("subprocess.run", side_effect=CalledProcessError(1, "run")) action.install_printer_ppd("usb://Not/Supported?serial=A00000A000000") @@ -133,9 +129,7 @@ def test_setup_printer_error(mocker): submission = export.SDExport("testfile", TEST_CONFIG) action = PrintExportAction(submission) mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - mocker.patch( - "subprocess.run", side_effect=CalledProcessError(1, "run") - ) + mocker.patch("subprocess.run", side_effect=CalledProcessError(1, "run")) action.setup_printer( "usb://Brother/HL-L2320D%20series?serial=A00000A000000", diff --git a/tests/test_export.py b/tests/test_export.py index 41a51659a..fb6f5868d 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -534,14 +534,14 @@ def test_safe_check_call(capsys, mocker): submission.safe_check_call( ["python3", "-c", "import sys;sys.stderr.write('hello')"], expected_message, - ignore_stderr_startswith=b'hello', + ignore_stderr_startswith=b"hello", ) with pytest.raises(SystemExit) as sysexit: submission.safe_check_call( ["python3", "-c", "import sys;sys.stderr.write('hello\n')"], expected_message, - ignore_stderr_startswith=b'world', + ignore_stderr_startswith=b"world", ) assert sysexit.value.code == 0 From 2bed056b2a026f81bb296f0f79341dbb32ab3831 Mon Sep 17 00:00:00 2001 From: ro Date: Tue, 5 Jul 2022 16:46:51 -0400 Subject: [PATCH 287/352] Bump version number and add changelog entry for 0.3.0 --- changelog.md | 7 +++++++ securedrop_export/VERSION | 2 +- securedrop_export/__init__.py | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/changelog.md b/changelog.md index 5653795a1..fa42d2297 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,12 @@ # Changelog +## 0.3.0 + + * Support Debian Bullseye + * Improve error-handling + * Update dependencies + * Refactor path-traversal check for improved readability + ## 0.2.6 * Further validate target paths diff --git a/securedrop_export/VERSION b/securedrop_export/VERSION index 53a75d673..0d91a54c7 100644 --- a/securedrop_export/VERSION +++ b/securedrop_export/VERSION @@ -1 +1 @@ -0.2.6 +0.3.0 diff --git a/securedrop_export/__init__.py b/securedrop_export/__init__.py index 01ef12070..493f7415d 100644 --- a/securedrop_export/__init__.py +++ b/securedrop_export/__init__.py @@ -1 +1 @@ -__version__ = "0.2.6" +__version__ = "0.3.0" From bbe2e9e56b9924e93095bee83df5913db38e9b31 Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Thu, 1 Sep 2022 18:40:29 -0400 Subject: [PATCH 288/352] Add sha256sum of rebuilt PyYAML wheel on Python 3.10 (bookworm) The wheel is added in . --- requirements/build-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/build-requirements.txt b/requirements/build-requirements.txt index 37d0982dc..f69c6fe8d 100644 --- a/requirements/build-requirements.txt +++ b/requirements/build-requirements.txt @@ -3,7 +3,7 @@ charset-normalizer==2.0.4 --hash=sha256:cd9a4492eef4e5276c07f9c0dc1338e7be3e95f2 furl==2.0.0 --hash=sha256:9f50360f6e4a0f1d0a35fb4997878e7186a73331f0fde5f6fc9b1bb9f006e6cc idna==3.2 --hash=sha256:691d9fc304505c65ea9ceb8eb7385d63988e344c065cacbbd2156ff9bdfcf0c1 orderedmultidict==1.0 --hash=sha256:f6022beda2b3387c61e6eb7e0e1e3e2832fd9f55f3f64d4b4b226eea7487327f -pyyaml==5.4.1 --hash=sha256:be111e40b3e32707b373b90ef490fa0908bf7769c77f8cf940004f0c957954f6 --hash=sha256:645773490bf785cd110b4a5e47635990c46219b7c4f01b424f0409cf01d12f2b +pyyaml==5.4.1 --hash=sha256:24ba69a7c05ba63fef9732bf26cc5d328b2089e525ee87fc9ec572c92f88dd46 --hash=sha256:be111e40b3e32707b373b90ef490fa0908bf7769c77f8cf940004f0c957954f6 --hash=sha256:645773490bf785cd110b4a5e47635990c46219b7c4f01b424f0409cf01d12f2b requests==2.26.0 --hash=sha256:7cec5239ce6ec4f6bf3d1b8c7e4d34ebe1b86d3896fe9657a8465ee4d7282bc8 six==1.11.0 --hash=sha256:eb52689b06ca7433c1cac3b91f320400bd3b358790b7ff4b6367cb1c81d37561 urllib3==1.26.6 --hash=sha256:7a2814749409a681ab58babe6539b02a2f84f6649904211f90fb649811ae7b36 From f1698a91d49c0168abb697f9882c3730cbb84935 Mon Sep 17 00:00:00 2001 From: Michael Z Date: Thu, 20 Oct 2022 12:00:23 -0400 Subject: [PATCH 289/352] Add RTF printing support Merely adds .rtf to the file name extensions that are to be converted by LibreOffice. Fixes #108 --- securedrop_export/print/actions.py | 1 + 1 file changed, 1 insertion(+) diff --git a/securedrop_export/print/actions.py b/securedrop_export/print/actions.py index 01ffad515..78a5e3aa3 100644 --- a/securedrop_export/print/actions.py +++ b/securedrop_export/print/actions.py @@ -194,6 +194,7 @@ def is_open_office_file(self, filename): ".odt", ".ods", ".odp", + ".rtf", ] for extension in OPEN_OFFICE_FORMATS: if os.path.basename(filename).endswith(extension): From 968c5084b4d6e5d766e3e073b3a523220f5d3546 Mon Sep 17 00:00:00 2001 From: Gonzalo Bulnes Guilpain Date: Wed, 23 Nov 2022 15:14:56 +1100 Subject: [PATCH 290/352] Minor ignore additional virtual env directories Re-generating the dev requirement files can be done by using two vitual environments with Python 3.9 and 3.10 respectively. Using distinctly named directories makes things a little bit quicker. --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 894a44cc0..5c46f9c06 100644 --- a/.gitignore +++ b/.gitignore @@ -89,6 +89,8 @@ venv/ ENV/ env.bak/ venv.bak/ +.venv39 +.venv310 # Spyder project settings .spyderproject From 4b2bb80a6c7949caa624739d53a5561b7057d184 Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Thu, 1 Sep 2022 20:01:06 -0400 Subject: [PATCH 291/352] Update CI configuration, add bullseye and bookworm jobs This is copy-pasted from securedrop-client, with steps that don't apply here removed. Some of them, like isort and mypy, really should be added to this repository. --- .circleci/config.yml | 208 ++++++++++++++++++++++++++----------------- 1 file changed, 124 insertions(+), 84 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 20b50b767..7bdae7873 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,46 +1,87 @@ --- common-steps: - - &install_deps + - &install_testing_dependencies run: - name: Install base dependencies for Debian python + name: Install testing dependencies command: | set -e - apt-get update && apt-get install -y sudo make git git-lfs gnupg python3 python3-venv - - &install_packages + apt update && apt install -y git gnupg make python3-dev gnupg python3-venv libnotify-bin + + - &install_build_dependencies run: - name: Install packages + name: Install build dependencies command: | - sudo apt install -y libnotify-bin + set -e + apt update && apt install -y git make sudo - - &run_tests + - &run_unit_tests run: - name: Install test requirements and run tests + name: Install requirements and run unit tests command: | - python3 -m venv .venv + set -e + make venv source .venv/bin/activate - pip install --require-hashes -r requirements/dev-requirements.txt - make check --keep-going + export PYTHONPATH=$PYTHONPATH:. # so alembic can get to Base metadata + make test + + - &run_lint + run: + name: Run lint, type checking, code formatting + command: | + set -e + make venv + source .venv/bin/activate + make check-black lint + + - &check_security + run: + name: Run static analysis on source code to find security issues + command: | + set -e + make venv + source .venv/bin/activate + make semgrep + + - &check_python_dependencies_for_vulnerabilities + run: + name: Check Python dependencies for known vulnerabilities + command: | + set -e + make venv + source .venv/bin/activate + make safety - &install_packaging_dependencies run: - name: Install Debian packaging dependencies and download wheels + name: Install Debian packaging dependencies and download Python wheels command: | + set -x mkdir ~/packaging && cd ~/packaging - # local builds may not have an SSH url + # local builds may not have an ssh url, so || true git config --global --unset url.ssh://git@github.com.insteadof || true git clone https://github.com/freedomofpress/securedrop-debian-packaging.git cd securedrop-debian-packaging + apt-get update && apt-get install -y sudo make make install-deps PKG_DIR=~/project make requirements - - &verify_requirements + - &check_packaging_requirements run: - name: Ensure that build-requirements.txt and requirements.txt are in sync. + name: Ensure that the same Python requirements are used for packaging and production. command: | cd ~/project - # Return 1 if unstaged changes exist (after `make requirements` in the - # previous run step), else return 0. - git diff --quiet + # Fail if unstaged changes exist (after `make requirements` in the previous run step). + git diff --ignore-matching-lines=# --exit-code + + - &check_testing_requirements + run: + name: Ensure that the same Python requirements are used for development/testing and production. + command: | + set -e + make venv + source .venv/bin/activate + make requirements + git diff --ignore-matching-lines=# --exit-code - &build_debian_package run: @@ -51,91 +92,90 @@ common-steps: export PKG_PATH=~/project/ make securedrop-export - - &run_lint - run: - name: Install test requirements and run lint - command: | - python3 -m venv .venv - source .venv/bin/activate - pip install --require-hashes -r requirements/dev-requirements.txt - make lint - - - &run_safety - run: - name: Check Python dependencies for CVEs - command: | - set -e - source .venv/bin/activate - make safety - - +version: 2.1 -version: 2 jobs: - lint: - docker: - - image: debian:buster + build: + parameters: ¶meters + image: + type: string + docker: &docker + - image: debian:<< parameters.image >> steps: - - *install_deps + - *install_build_dependencies - checkout - - *run_lint - - *run_safety + - *install_packaging_dependencies + - *check_packaging_requirements + - *build_debian_package - lint-bullseye: - docker: - - image: debian:buster + unit-test: + parameters: *parameters + docker: *docker steps: - - *install_deps + - *install_testing_dependencies - checkout - - *run_lint - - *run_safety + - *run_unit_tests + - store_test_results: + path: test-results - test-buster: - docker: - - image: debian:buster + lint: + parameters: *parameters + docker: *docker steps: - - *install_deps + - *install_testing_dependencies - checkout - - *install_packages - - *run_tests + - *run_lint - test-bullseye: - docker: - - image: debian:bullseye + check-security: + parameters: *parameters + docker: *docker steps: - - *install_deps + - *install_testing_dependencies - checkout - - *install_packages - - *run_tests + - *check_security - build-buster: - docker: - - image: debian:buster + check-python-security: + parameters: *parameters + docker: *docker steps: - - *install_deps + - *install_testing_dependencies - checkout - - *install_packaging_dependencies - - *verify_requirements - - *build_debian_package + - *check_python_dependencies_for_vulnerabilities - build-bullseye: - docker: - - image: debian:bullseye + check-testing-requirements: + parameters: *parameters + docker: *docker steps: - - *install_deps + - *install_testing_dependencies - checkout - - *install_packaging_dependencies - - *verify_requirements - - *build_debian_package - + - *check_testing_requirements workflows: - version: 2 securedrop_export_ci: - jobs: - - lint - - lint-bullseye - - test-buster - - test-bullseye - - build-buster - - build-bullseye + jobs: &jobs + - unit-test: + matrix: &matrix + parameters: + image: + - bullseye + - bookworm + - lint: + matrix: *matrix + - check-testing-requirements: + matrix: *matrix + - check-security: + matrix: *matrix + - check-python-security: + matrix: *matrix + - build: + matrix: *matrix + + nightly: + triggers: + - schedule: + cron: "0 6 * * *" + filters: + branches: + only: + - main + jobs: *jobs From 5424a2f8b4602ee51c4956785fa5f3afaa151fcd Mon Sep 17 00:00:00 2001 From: Ro Date: Tue, 22 Nov 2022 15:19:51 -0800 Subject: [PATCH 292/352] Update dev-only dependencies and sync requirements. --- requirements/dev-requirements.txt | 566 ++++++++++++++++-------------- requirements/requirements.txt | 2 +- 2 files changed, 307 insertions(+), 261 deletions(-) diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt index 9cc7487ba..49ef8edae 100644 --- a/requirements/dev-requirements.txt +++ b/requirements/dev-requirements.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with python 3.7 +# This file is autogenerated by pip-compile with python 3.9 # To update, run: # # pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-requirements.txt requirements/dev-requirements.in requirements/requirements.in @@ -12,30 +12,28 @@ attrs==21.4.0 \ # jsonschema # pytest # semgrep -black==22.3.0 \ - --hash=sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b \ - --hash=sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176 \ - --hash=sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09 \ - --hash=sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a \ - --hash=sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015 \ - --hash=sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79 \ - --hash=sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb \ - --hash=sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20 \ - --hash=sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464 \ - --hash=sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968 \ - --hash=sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82 \ - --hash=sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21 \ - --hash=sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0 \ - --hash=sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265 \ - --hash=sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b \ - --hash=sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a \ - --hash=sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72 \ - --hash=sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce \ - --hash=sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0 \ - --hash=sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a \ - --hash=sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163 \ - --hash=sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad \ - --hash=sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d +black==22.10.0 \ + --hash=sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7 \ + --hash=sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6 \ + --hash=sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650 \ + --hash=sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb \ + --hash=sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d \ + --hash=sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d \ + --hash=sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de \ + --hash=sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395 \ + --hash=sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae \ + --hash=sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa \ + --hash=sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef \ + --hash=sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383 \ + --hash=sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66 \ + --hash=sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87 \ + --hash=sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d \ + --hash=sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0 \ + --hash=sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b \ + --hash=sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458 \ + --hash=sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4 \ + --hash=sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1 \ + --hash=sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff # via -r requirements/dev-requirements.in boltons==21.0.0 \ --hash=sha256:65e70a79a731a7fe6e98592ecfb5ccf2115873d01dbc576079874629e5c90f13 \ @@ -44,17 +42,21 @@ boltons==21.0.0 \ # face # glom # semgrep -bracex==2.3 \ - --hash=sha256:6789a715744bcb3359b53c4012dd94be5ab7669c638affe89f670595a3c73cc0 \ - --hash=sha256:a3ce1d8a9fb7acc887e2e60ac5aa269f243d960c34c3d8a541fb672bdb9aa957 +bracex==2.3.post1 \ + --hash=sha256:351b7f20d56fb9ea91f9b9e9e7664db466eb234188c175fd943f8f755c807e73 \ + --hash=sha256:e7b23fc8b2cd06d3dec0692baabecb249dda94e06a617901ff03a6c56fd71693 # via wcmatch -certifi==2022.5.18.1 \ - --hash=sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7 \ - --hash=sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a +build==0.9.0 \ + --hash=sha256:1a07724e891cbd898923145eb7752ee7653674c511378eb9c7691aab1612bc3c \ + --hash=sha256:38a7a2b7a0bdc61a42a0a67509d88c71ecfc37b393baba770fae34e20929ff69 + # via pip-tools +certifi==2022.9.24 \ + --hash=sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14 \ + --hash=sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382 # via requests -charset-normalizer==2.0.12 \ - --hash=sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597 \ - --hash=sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df +charset-normalizer==2.1.1 \ + --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ + --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f # via requests click==8.1.3 \ --hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \ @@ -64,98 +66,101 @@ click==8.1.3 \ # click-option-group # pip-tools # semgrep -click-option-group==0.5.3 \ - --hash=sha256:9653a2297357335d7325a1827e71ac1245d91c97d959346a7decabd4a52d5354 \ - --hash=sha256:a6e924f3c46b657feb5b72679f7e930f8e5b224b766ab35c91ae4019b4e0615e +click-option-group==0.5.5 \ + --hash=sha256:0f8ca79bc9b1d6fcaafdbe194b17ba1a2dde44ddf19087235c3efed2ad288143 \ + --hash=sha256:78ee474f07a0ca0ef6c0317bb3ebe79387aafb0c4a1e03b1d8b2b0be1e42fc78 # via semgrep -colorama==0.4.4 \ - --hash=sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b \ - --hash=sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2 +colorama==0.4.6 \ + --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ + --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 # via semgrep -coverage[toml]==6.4 \ - --hash=sha256:00c8544510f3c98476bbd58201ac2b150ffbcce46a8c3e4fb89ebf01998f806a \ - --hash=sha256:016d7f5cf1c8c84f533a3c1f8f36126fbe00b2ec0ccca47cc5731c3723d327c6 \ - --hash=sha256:03014a74023abaf5a591eeeaf1ac66a73d54eba178ff4cb1fa0c0a44aae70383 \ - --hash=sha256:033ebec282793bd9eb988d0271c211e58442c31077976c19c442e24d827d356f \ - --hash=sha256:21e6686a95025927775ac501e74f5940cdf6fe052292f3a3f7349b0abae6d00f \ - --hash=sha256:26f8f92699756cb7af2b30720de0c5bb8d028e923a95b6d0c891088025a1ac8f \ - --hash=sha256:2e76bd16f0e31bc2b07e0fb1379551fcd40daf8cdf7e24f31a29e442878a827c \ - --hash=sha256:341e9c2008c481c5c72d0e0dbf64980a4b2238631a7f9780b0fe2e95755fb018 \ - --hash=sha256:3cfd07c5889ddb96a401449109a8b97a165be9d67077df6802f59708bfb07720 \ - --hash=sha256:4002f9e8c1f286e986fe96ec58742b93484195defc01d5cc7809b8f7acb5ece3 \ - --hash=sha256:50ed480b798febce113709846b11f5d5ed1e529c88d8ae92f707806c50297abf \ - --hash=sha256:543e172ce4c0de533fa892034cce260467b213c0ea8e39da2f65f9a477425211 \ - --hash=sha256:5a78cf2c43b13aa6b56003707c5203f28585944c277c1f3f109c7b041b16bd39 \ - --hash=sha256:5cd698341626f3c77784858427bad0cdd54a713115b423d22ac83a28303d1d95 \ - --hash=sha256:60c2147921da7f4d2d04f570e1838db32b95c5509d248f3fe6417e91437eaf41 \ - --hash=sha256:62d382f7d77eeeaff14b30516b17bcbe80f645f5cf02bb755baac376591c653c \ - --hash=sha256:69432946f154c6add0e9ede03cc43b96e2ef2733110a77444823c053b1ff5166 \ - --hash=sha256:727dafd7f67a6e1cad808dc884bd9c5a2f6ef1f8f6d2f22b37b96cb0080d4f49 \ - --hash=sha256:742fb8b43835078dd7496c3c25a1ec8d15351df49fb0037bffb4754291ef30ce \ - --hash=sha256:750e13834b597eeb8ae6e72aa58d1d831b96beec5ad1d04479ae3772373a8088 \ - --hash=sha256:7b546cf2b1974ddc2cb222a109b37c6ed1778b9be7e6b0c0bc0cf0438d9e45a6 \ - --hash=sha256:83bd142cdec5e4a5c4ca1d4ff6fa807d28460f9db919f9f6a31babaaa8b88426 \ - --hash=sha256:8d2e80dd3438e93b19e1223a9850fa65425e77f2607a364b6fd134fcd52dc9df \ - --hash=sha256:9229d074e097f21dfe0643d9d0140ee7433814b3f0fc3706b4abffd1e3038632 \ - --hash=sha256:968ed5407f9460bd5a591cefd1388cc00a8f5099de9e76234655ae48cfdbe2c3 \ - --hash=sha256:9c82f2cd69c71698152e943f4a5a6b83a3ab1db73b88f6e769fabc86074c3b08 \ - --hash=sha256:a00441f5ea4504f5abbc047589d09e0dc33eb447dc45a1a527c8b74bfdd32c65 \ - --hash=sha256:a022394996419142b33a0cf7274cb444c01d2bb123727c4bb0b9acabcb515dea \ - --hash=sha256:af5b9ee0fc146e907aa0f5fb858c3b3da9199d78b7bb2c9973d95550bd40f701 \ - --hash=sha256:b5578efe4038be02d76c344007b13119b2b20acd009a88dde8adec2de4f630b5 \ - --hash=sha256:b84ab65444dcc68d761e95d4d70f3cfd347ceca5a029f2ffec37d4f124f61311 \ - --hash=sha256:c53ad261dfc8695062fc8811ac7c162bd6096a05a19f26097f411bdf5747aee7 \ - --hash=sha256:cc173f1ce9ffb16b299f51c9ce53f66a62f4d975abe5640e976904066f3c835d \ - --hash=sha256:d548edacbf16a8276af13063a2b0669d58bbcfca7c55a255f84aac2870786a61 \ - --hash=sha256:d55fae115ef9f67934e9f1103c9ba826b4c690e4c5bcf94482b8b2398311bf9c \ - --hash=sha256:d8099ea680201c2221f8468c372198ceba9338a5fec0e940111962b03b3f716a \ - --hash=sha256:e35217031e4b534b09f9b9a5841b9344a30a6357627761d4218818b865d45055 \ - --hash=sha256:e4f52c272fdc82e7c65ff3f17a7179bc5f710ebc8ce8a5cadac81215e8326740 \ - --hash=sha256:e637ae0b7b481905358624ef2e81d7fb0b1af55f5ff99f9ba05442a444b11e45 \ - --hash=sha256:eef5292b60b6de753d6e7f2d128d5841c7915fb1e3321c3a1fe6acfe76c38052 \ - --hash=sha256:fb45fe08e1abc64eb836d187b20a59172053999823f7f6ef4f18a819c44ba16f +coverage[toml]==6.5.0 \ + --hash=sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79 \ + --hash=sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a \ + --hash=sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f \ + --hash=sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a \ + --hash=sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa \ + --hash=sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398 \ + --hash=sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba \ + --hash=sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d \ + --hash=sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf \ + --hash=sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b \ + --hash=sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518 \ + --hash=sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d \ + --hash=sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795 \ + --hash=sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2 \ + --hash=sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e \ + --hash=sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32 \ + --hash=sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745 \ + --hash=sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b \ + --hash=sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e \ + --hash=sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d \ + --hash=sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f \ + --hash=sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660 \ + --hash=sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62 \ + --hash=sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6 \ + --hash=sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04 \ + --hash=sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c \ + --hash=sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5 \ + --hash=sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef \ + --hash=sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc \ + --hash=sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae \ + --hash=sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578 \ + --hash=sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466 \ + --hash=sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4 \ + --hash=sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91 \ + --hash=sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0 \ + --hash=sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4 \ + --hash=sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b \ + --hash=sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe \ + --hash=sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b \ + --hash=sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75 \ + --hash=sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b \ + --hash=sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c \ + --hash=sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72 \ + --hash=sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b \ + --hash=sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f \ + --hash=sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e \ + --hash=sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53 \ + --hash=sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3 \ + --hash=sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84 \ + --hash=sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987 # via pytest-cov defusedxml==0.7.1 \ --hash=sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69 \ --hash=sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61 # via semgrep -face==20.1.1 \ - --hash=sha256:3790311a7329e4b0d90baee346eecad54b337629576edf3a246683a5f0d24446 \ - --hash=sha256:7d59ca5ba341316e58cf72c6aff85cca2541cf5056c4af45cb63af9a814bed3e +exceptiongroup==1.0.4 \ + --hash=sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828 \ + --hash=sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec + # via pytest +face==22.0.0 \ + --hash=sha256:344fe31562d0f6f444a45982418f3793d4b14f9abb98ccca1509d22e0a3e7e35 \ + --hash=sha256:d5d692f90bc8f5987b636e47e36384b9bbda499aaf0a77aa0b0bbe834c76923d # via glom -flake8==4.0.1 \ - --hash=sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d \ - --hash=sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d +flake8==5.0.4 \ + --hash=sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db \ + --hash=sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248 # via -r requirements/dev-requirements.in glom==22.1.0 \ --hash=sha256:1510c6587a8f9c64a246641b70033cbc5ebde99f02ad245693678038e821aeb5 \ --hash=sha256:5339da206bf3532e01a83a35aca202960ea885156986d190574b779598e9e772 # via semgrep -idna==3.3 \ - --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ - --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d +idna==3.4 \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==4.2.0 \ - --hash=sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b \ - --hash=sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31 - # via - # click - # flake8 - # jsonschema - # pep517 - # pluggy - # pytest iniconfig==1.1.1 \ --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \ --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32 # via pytest -jsonschema==3.2.0 \ - --hash=sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163 \ - --hash=sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a +jsonschema==4.17.0 \ + --hash=sha256:5bfcf2bca16a087ade17e02b282d34af7ccd749ef76241e7f9bd7c0cb8a9424d \ + --hash=sha256:f660066c3966db7d6daeaea8a75e0b68237a48e51cf49882087757bb59916248 # via semgrep -mccabe==0.6.1 \ - --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ - --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f +mccabe==0.7.0 \ + --hash=sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325 \ + --hash=sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e # via flake8 mypy-extensions==0.4.3 \ --hash=sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d \ @@ -165,203 +170,244 @@ packaging==21.3 \ --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 # via + # build # pytest # semgrep -pathspec==0.9.0 \ - --hash=sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a \ - --hash=sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1 +pathspec==0.10.2 \ + --hash=sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5 \ + --hash=sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0 # via black -peewee==3.14.10 \ - --hash=sha256:23271422b332c82d30c92597dee905ee831b56c6d99c33e05901e6891c75fe15 +peewee==3.15.4 \ + --hash=sha256:2581520c8dfbacd9d580c2719ae259f0637a9e46eda47dfc0ce01864c6366205 # via semgrep -pep517==0.12.0 \ - --hash=sha256:931378d93d11b298cf511dd634cf5ea4cb249a28ef84160b3247ee9afb4e8ab0 \ - --hash=sha256:dd884c326898e2c6e11f9e0b64940606a93eb10ea022a2e067959f3a110cf161 - # via pip-tools -pip-tools==6.6.2 \ - --hash=sha256:6b486548e5a139e30e4c4a225b3b7c2d46942a9f6d1a91143c21b1de4d02fd9b \ - --hash=sha256:f638503a9f77d98d9a7d72584b1508d3f82ed019b8fab24f4e5ad078c1b8c95e +pep517==0.13.0 \ + --hash=sha256:4ba4446d80aed5b5eac6509ade100bff3e7943a8489de249654a5ae9b33ee35b \ + --hash=sha256:ae69927c5c172be1add9203726d4b84cf3ebad1edcd5f71fcdc746e66e829f59 + # via build +pip-tools==6.10.0 \ + --hash=sha256:57ac98392548f5ca96c2831927deec3035efe81ff476e3c744bd474ca9c6a1f2 \ + --hash=sha256:7f9f7356052db6942b5aaabc8eba29983591ca0ad75affbf2f0a25d9361be624 # via -r requirements/dev-requirements.in -platformdirs==2.5.2 \ - --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ - --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 +platformdirs==2.5.4 \ + --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ + --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 # via black pluggy==1.0.0 \ --hash=sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159 \ --hash=sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3 # via pytest -py==1.11.0 \ - --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ - --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 - # via pytest -pycodestyle==2.8.0 \ - --hash=sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20 \ - --hash=sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f +pycodestyle==2.9.1 \ + --hash=sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785 \ + --hash=sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b # via flake8 -pyflakes==2.4.0 \ - --hash=sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c \ - --hash=sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e +pyflakes==2.5.0 \ + --hash=sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2 \ + --hash=sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3 # via flake8 pyparsing==3.0.9 \ --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc # via packaging -pyrsistent==0.18.1 \ - --hash=sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c \ - --hash=sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc \ - --hash=sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e \ - --hash=sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26 \ - --hash=sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec \ - --hash=sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286 \ - --hash=sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045 \ - --hash=sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec \ - --hash=sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8 \ - --hash=sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c \ - --hash=sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca \ - --hash=sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22 \ - --hash=sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a \ - --hash=sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96 \ - --hash=sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc \ - --hash=sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1 \ - --hash=sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07 \ - --hash=sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6 \ - --hash=sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b \ - --hash=sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5 \ - --hash=sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6 +pyrsistent==0.19.2 \ + --hash=sha256:055ab45d5911d7cae397dc418808d8802fb95262751872c841c170b0dbf51eed \ + --hash=sha256:111156137b2e71f3a9936baf27cb322e8024dac3dc54ec7fb9f0bcf3249e68bb \ + --hash=sha256:187d5730b0507d9285a96fca9716310d572e5464cadd19f22b63a6976254d77a \ + --hash=sha256:21455e2b16000440e896ab99e8304617151981ed40c29e9507ef1c2e4314ee95 \ + --hash=sha256:2aede922a488861de0ad00c7630a6e2d57e8023e4be72d9d7147a9fcd2d30712 \ + --hash=sha256:3ba4134a3ff0fc7ad225b6b457d1309f4698108fb6b35532d015dca8f5abed73 \ + --hash=sha256:456cb30ca8bff00596519f2c53e42c245c09e1a4543945703acd4312949bfd41 \ + --hash=sha256:71d332b0320642b3261e9fee47ab9e65872c2bd90260e5d225dabeed93cbd42b \ + --hash=sha256:879b4c2f4d41585c42df4d7654ddffff1239dc4065bc88b745f0341828b83e78 \ + --hash=sha256:9cd3e9978d12b5d99cbdc727a3022da0430ad007dacf33d0bf554b96427f33ab \ + --hash=sha256:a178209e2df710e3f142cbd05313ba0c5ebed0a55d78d9945ac7a4e09d923308 \ + --hash=sha256:b39725209e06759217d1ac5fcdb510e98670af9e37223985f330b611f62e7425 \ + --hash=sha256:bfa0351be89c9fcbcb8c9879b826f4353be10f58f8a677efab0c017bf7137ec2 \ + --hash=sha256:bfd880614c6237243ff53a0539f1cb26987a6dc8ac6e66e0c5a40617296a045e \ + --hash=sha256:c43bec251bbd10e3cb58ced80609c5c1eb238da9ca78b964aea410fb820d00d6 \ + --hash=sha256:d690b18ac4b3e3cab73b0b7aa7dbe65978a172ff94970ff98d82f2031f8971c2 \ + --hash=sha256:d6982b5a0237e1b7d876b60265564648a69b14017f3b5f908c5be2de3f9abb7a \ + --hash=sha256:dec3eac7549869365fe263831f576c8457f6c833937c68542d08fde73457d291 \ + --hash=sha256:e371b844cec09d8dc424d940e54bba8f67a03ebea20ff7b7b0d56f526c71d584 \ + --hash=sha256:e5d8f84d81e3729c3b506657dddfe46e8ba9c330bf1858ee33108f8bb2adb38a \ + --hash=sha256:ea6b79a02a28550c98b6ca9c35b9f492beaa54d7c5c9e9949555893c8a9234d0 \ + --hash=sha256:f1258f4e6c42ad0b20f9cfcc3ada5bd6b83374516cd01c0960e3cb75fdca6770 # via jsonschema -pytest==7.1.2 \ - --hash=sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c \ - --hash=sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45 +pytest==7.2.0 \ + --hash=sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71 \ + --hash=sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59 # via # -r requirements/dev-requirements.in # pytest-cov # pytest-mock -pytest-cov==3.0.0 \ - --hash=sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6 \ - --hash=sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470 +pytest-cov==4.0.0 \ + --hash=sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b \ + --hash=sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470 # via -r requirements/dev-requirements.in -pytest-mock==3.7.0 \ - --hash=sha256:5112bd92cc9f186ee96e1a92efc84969ea494939c3aead39c50f421c4cc69534 \ - --hash=sha256:6cff27cec936bf81dc5ee87f07132b807bcda51106b5ec4b90a04331cba76231 +pytest-mock==3.10.0 \ + --hash=sha256:f4c973eeae0282963eb293eb173ce91b091a79c1334455acfac9ddee8a1c784b \ + --hash=sha256:fbbdb085ef7c252a326fd8cdcac0aa3b1333d8811f131bdcc701002e1be7ed4f # via -r requirements/dev-requirements.in -requests==2.27.1 \ - --hash=sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61 \ - --hash=sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d +python-lsp-jsonrpc==1.0.0 \ + --hash=sha256:079b143be64b0a378bdb21dff5e28a8c1393fe7e8a654ef068322d754e545fc7 \ + --hash=sha256:7bec170733db628d3506ea3a5288ff76aa33c70215ed223abdb0d95e957660bd # via semgrep -ruamel.yaml==0.17.21 \ +requests==2.28.1 \ + --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ + --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 + # via semgrep +ruamel-yaml==0.17.21 \ --hash=sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7 \ --hash=sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af # via semgrep -ruamel.yaml.clib==0.2.6 \ - --hash=sha256:0847201b767447fc33b9c235780d3aa90357d20dd6108b92be544427bea197dd \ - --hash=sha256:1070ba9dd7f9370d0513d649420c3b362ac2d687fe78c6e888f5b12bf8bc7bee \ - --hash=sha256:1866cf2c284a03b9524a5cc00daca56d80057c5ce3cdc86a52020f4c720856f0 \ - --hash=sha256:221eca6f35076c6ae472a531afa1c223b9c29377e62936f61bc8e6e8bdc5f9e7 \ - --hash=sha256:31ea73e564a7b5fbbe8188ab8b334393e06d997914a4e184975348f204790277 \ - --hash=sha256:3fb9575a5acd13031c57a62cc7823e5d2ff8bc3835ba4d94b921b4e6ee664104 \ - --hash=sha256:4ff604ce439abb20794f05613c374759ce10e3595d1867764dd1ae675b85acbd \ - --hash=sha256:6e7be2c5bcb297f5b82fee9c665eb2eb7001d1050deaba8471842979293a80b0 \ - --hash=sha256:72a2b8b2ff0a627496aad76f37a652bcef400fd861721744201ef1b45199ab78 \ - --hash=sha256:77df077d32921ad46f34816a9a16e6356d8100374579bc35e15bab5d4e9377de \ - --hash=sha256:78988ed190206672da0f5d50c61afef8f67daa718d614377dcd5e3ed85ab4a99 \ - --hash=sha256:7b2927e92feb51d830f531de4ccb11b320255ee95e791022555971c466af4527 \ - --hash=sha256:7f7ecb53ae6848f959db6ae93bdff1740e651809780822270eab111500842a84 \ - --hash=sha256:825d5fccef6da42f3c8eccd4281af399f21c02b32d98e113dbc631ea6a6ecbc7 \ - --hash=sha256:846fc8336443106fe23f9b6d6b8c14a53d38cef9a375149d61f99d78782ea468 \ - --hash=sha256:89221ec6d6026f8ae859c09b9718799fea22c0e8da8b766b0b2c9a9ba2db326b \ - --hash=sha256:9efef4aab5353387b07f6b22ace0867032b900d8e91674b5d8ea9150db5cae94 \ - --hash=sha256:a32f8d81ea0c6173ab1b3da956869114cae53ba1e9f72374032e33ba3118c233 \ - --hash=sha256:a49e0161897901d1ac9c4a79984b8410f450565bbad64dbfcbf76152743a0cdb \ - --hash=sha256:ada3f400d9923a190ea8b59c8f60680c4ef8a4b0dfae134d2f2ff68429adfab5 \ - --hash=sha256:bf75d28fa071645c529b5474a550a44686821decebdd00e21127ef1fd566eabe \ - --hash=sha256:cfdb9389d888c5b74af297e51ce357b800dd844898af9d4a547ffc143fa56751 \ - --hash=sha256:d67f273097c368265a7b81e152e07fb90ed395df6e552b9fa858c6d2c9f42502 \ - --hash=sha256:dc6a613d6c74eef5a14a214d433d06291526145431c3b964f5e16529b1842bed \ - --hash=sha256:de9c6b8a1ba52919ae919f3ae96abb72b994dd0350226e28f3686cb4f142165c - # via ruamel.yaml -semgrep==0.93.0 \ - --hash=sha256:02fce22e81e68ded60f67f8cb2979a8014ac5bb7bceb93c2553d8ccc03211259 \ - --hash=sha256:1aa9788e507286694271234cd97f2bb19d741e5ae614f0fbb545715a7d0e872d \ - --hash=sha256:3de9c36cbafef772d453f77da9b6f3c2239975b0a29e0674773bf04a0756b1ec +ruamel-yaml-clib==0.2.7 \ + --hash=sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e \ + --hash=sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3 \ + --hash=sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5 \ + --hash=sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497 \ + --hash=sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f \ + --hash=sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac \ + --hash=sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697 \ + --hash=sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763 \ + --hash=sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282 \ + --hash=sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1 \ + --hash=sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072 \ + --hash=sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9 \ + --hash=sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5 \ + --hash=sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231 \ + --hash=sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93 \ + --hash=sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b \ + --hash=sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb \ + --hash=sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f \ + --hash=sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307 \ + --hash=sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8 \ + --hash=sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b \ + --hash=sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b \ + --hash=sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640 \ + --hash=sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7 \ + --hash=sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a \ + --hash=sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71 \ + --hash=sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8 \ + --hash=sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7 \ + --hash=sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80 \ + --hash=sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e \ + --hash=sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab \ + --hash=sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0 \ + --hash=sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646 + # via ruamel-yaml +semgrep==0.122.0 \ + --hash=sha256:6116391b0c8c87581d9d72113702b6f8c2938d799cdae7d71a845ec89249566c \ + --hash=sha256:a4c7400eb8bec9fe8df25520d1ffcb5d78b87c73dc654f1c2aec1195789bc611 \ + --hash=sha256:c7002b9aba97deb6677f4cabfa5dcc8faef2808ce6a6f28ecdd70cd8e90b01b5 \ + --hash=sha256:e3fb9956e2bb926cfeff52deafe4cec24d5f1e91fe6d3fc4f81e86ec452b2ad5 # via -r requirements/dev-requirements.in -six==1.16.0 \ - --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ - --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 - # via jsonschema tomli==2.0.1 \ --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f # via # black + # build # coverage # pep517 # pytest -tqdm==4.64.0 \ - --hash=sha256:40be55d30e200777a307a7585aee69e4eabb46b4ec6a4b4a5f2d9f11e7d5408d \ - --hash=sha256:74a2cdefe14d11442cedf3ba4e21a3b84ff9a2dbdc6cfae2c34addb2a14a5ea6 + # semgrep +tqdm==4.64.1 \ + --hash=sha256:5f4f682a004951c1b450bc753c710e9280c5746ce6ffedee253ddbcbf54cf1e4 \ + --hash=sha256:6fee160d6ffcd1b1c68c65f14c829c22832bc401726335ce92c52d395944a6a1 # via semgrep -typed-ast==1.5.4 \ - --hash=sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2 \ - --hash=sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1 \ - --hash=sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6 \ - --hash=sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62 \ - --hash=sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac \ - --hash=sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d \ - --hash=sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc \ - --hash=sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2 \ - --hash=sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97 \ - --hash=sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35 \ - --hash=sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6 \ - --hash=sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1 \ - --hash=sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4 \ - --hash=sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c \ - --hash=sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e \ - --hash=sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec \ - --hash=sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f \ - --hash=sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72 \ - --hash=sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47 \ - --hash=sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72 \ - --hash=sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe \ - --hash=sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6 \ - --hash=sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3 \ - --hash=sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66 - # via black -typing-extensions==4.2.0 \ - --hash=sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708 \ - --hash=sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376 +typing-extensions==4.4.0 \ + --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ + --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via # black - # importlib-metadata -urllib3==1.26.9 \ - --hash=sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14 \ - --hash=sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e + # semgrep +ujson==5.5.0 \ + --hash=sha256:0762a4fdf86e01f3f8d8b6b7158d01fdd870799ff3f402b676e358fcd879e7eb \ + --hash=sha256:10095160dbe6bba8059ad6677a01da251431f4c68041bf796dcac0956b34f8f7 \ + --hash=sha256:1a485117f97312bef45f5d79d2ff97eff4da503b8a04f3691f59d31141686459 \ + --hash=sha256:1cef44ea4973344baed3d50a5da4a8843de3a6af7dea7fadf0a594e53ce5892f \ + --hash=sha256:1dc2f46c31ef22b0aaa28cd71be897bea271e700636658d573df9c43c49ebbd0 \ + --hash=sha256:21678d7e068707e4d54bdfeb8c250ebc548b51e499aed778b22112ca31a79669 \ + --hash=sha256:278aa9d7cb56435c96d19f5d702e026bcf69f824e24b41e9b52706abd3565837 \ + --hash=sha256:2ab011e3556a9a1d9461bd686870c527327765ed02fe53550531d6609a8a33ff \ + --hash=sha256:2d90414e3b4b44b39825049185959488e084ea7fcaf6124afd5c00893938b09d \ + --hash=sha256:2e506ecf89b6b9d304362ccef770831ec242a52c89dab1b4aabf1ab0eb1d5ed6 \ + --hash=sha256:33cd9084fefc74cbacf88c92fd260b61211e00bcde38d640c369e5dc34a2b4e1 \ + --hash=sha256:3b74467564814fbce322427a5664e6bcc7dae6dbc8acbef76300fe43ca4072ab \ + --hash=sha256:3f3f4240d99d55eb97cb012e9adf401f5ed9cd827af0341ac44603832202b0d2 \ + --hash=sha256:3fe1aea596f9539fc20cd9e52f098c842afc090168824fd4ca9744fe13151a03 \ + --hash=sha256:4a8cb3c8637006c5bd8237ebb5992a76ba06e39988ad5cff2096227443e8fd6a \ + --hash=sha256:4ef4ab8352861b99bd7fedb1fc6df3ea7f7d5216c789ba6d859e4ea06f1a4c45 \ + --hash=sha256:5035bb997d163f346c22abcec75190e7e756a5349e7c708bd3d5fd7066a9a854 \ + --hash=sha256:593a0f6fb0e186c5ba65465ed6f6215a30d1efa898c25e74de1c8577a1bff6d0 \ + --hash=sha256:59cdcd934385f36e8bd76aedc234371cc75c848d95bdce804ac8aa8744cfeffa \ + --hash=sha256:5a9b1320d8363a42d857fae8065a2174d38217cdd58cd8dc4f48d54e0591271e \ + --hash=sha256:5f9681ec4c60d0da590552427d770636d9079038c30b265f507ccde23caa7823 \ + --hash=sha256:5fd797a4837ba10671954e7c09010cec7aca67e09d193f4920a16beea5f66f65 \ + --hash=sha256:6019e3480d933d3698f2ecb4b46d64bfadd64e718f04fac36e681f3254b49a93 \ + --hash=sha256:603607f56a0ee84d9cd2c7e9b1d29b18a70684b94ee34f07b9ffe8dc9c8a9f81 \ + --hash=sha256:60a4b481978ea2aad8fe8af1ecc271624d01b3cf4b09e9b643dd2fe19c07634c \ + --hash=sha256:6b9812638d7aa8ecda2e8e1513fb4da999249603bffab7439a5f8f0bb362b0db \ + --hash=sha256:6c7ae6e0778ab9610f5e80e0595957d101ab8de18c32a8c053a19943ef4831d0 \ + --hash=sha256:6f83be8257b2f2dd6dea5ee62cd28db90584da7a7af1fba77a2102fc7943638a \ + --hash=sha256:701e81e047f5c0cffd4ac828efca68b0bd270c616654966a051e9a5f836b385e \ + --hash=sha256:703fd69d9cb21d6ec2086789df9be2cf8140a76ff127050c24007ea8940dcd3b \ + --hash=sha256:7471d4486f23518cff343f1eec6c68d1b977ed74c3e6cc3e1ac896b9b7d68645 \ + --hash=sha256:765d46f3d5e7a1d48075035e2d1a9164f683e3fccde834ca04602e6c588835bc \ + --hash=sha256:7a09d203983104918c62f2eef9406f24c355511f9217967df23e70fa7f5b54ff \ + --hash=sha256:7c20cc83b0df47129ec6ed8a47fa7dcfc309c5bad029464004162738502568bb \ + --hash=sha256:7d7cfac2547c93389fa303fc0c0eb6698825564e8389c41c9b60009c746207b6 \ + --hash=sha256:7d87c817b292efb748f1974f37e8bb8a8772ef92f05f84e507159360814bcc3f \ + --hash=sha256:8141f654432cf75144d6103bfac2286b8adf23467201590b173a74535d6be22d \ + --hash=sha256:849f2ff40264152f25589cb48ddb4a43d14db811f841ec73989bfc0c8c4853fa \ + --hash=sha256:880c84ce59f49776cf120f77e7ca04877c97c6887917078dbc369eb47004d7cf \ + --hash=sha256:94874584b733a18b310b0e954d53168e62cd4a0fd9db85b1903f0902a7eb33e8 \ + --hash=sha256:95603eff711b8f3b9596e1c961dbeb745a792ba1904141612f194e07edd71e5f \ + --hash=sha256:9585892091ae86045135d6a6129a644142d6a51b23e1428bb5de6d10bc0ce0c7 \ + --hash=sha256:977bf5be704a88d46bf5b228df8b44521b1f3119d741062191608b3a6a38f224 \ + --hash=sha256:9cdc46859024501c20ab74ad542cdf2f08b94b5ce384f2f569483fa3ed926d04 \ + --hash=sha256:a34a5f034b339f69ef7f6a134c22d04b92e07b6ddc1dd65382e7e4ec65d6437d \ + --hash=sha256:a655f7b755cfc5c07f2116b6dcf0ba148c89adef9a6d40c1b0f1fada878c4345 \ + --hash=sha256:a7d12f2d2df195c8c4e49d2cdbad640353a856c62ca2c624d8b47aa33b65a2a2 \ + --hash=sha256:abfe83e082c9208891e2158c1b5044a650ecec408b823bf6bf16cd7f8085cafa \ + --hash=sha256:b25077a971c7da47bd6846a912a747f6963776d90720c88603b1b55d81790780 \ + --hash=sha256:bf416a93e1331820c77e3429df26946dbd4fe105e9b487cd2d1b7298b75784a8 \ + --hash=sha256:c04ae27e076d81a3839047d8eed57c1e17e361640616fd520d752375e3ba8f0c \ + --hash=sha256:d5bea13c73f36c4346808df3fa806596163a7962b6d28001ca2a391cab856089 \ + --hash=sha256:d75bef34e69e7effb7b4849e3f830e3174d2cc6ec7273503fdde111c222dc9b3 \ + --hash=sha256:d93940664a5ccfd79f72dcb939b0c31a3479889f14f0eb95ec52976f8c0cae7d \ + --hash=sha256:d9c89c521dc90c7564358e525f849b93ad1d710553c1491f66b8cce8113bc901 \ + --hash=sha256:e0b36257dc90194784531c3b922d8d31fb2b4d8e5adfd27aff4eee7174176365 \ + --hash=sha256:e1135264bcd40965cd35b0869e36952f54825024befdc7a923df9a7d83cfd800 \ + --hash=sha256:e510d288e613d6927796dfb728e13e4530fc83b9ccac5888a21f7860486eab21 \ + --hash=sha256:ee9a2c9a4b2421e77f8fe33ed0621dea03c66c710707553020b1e32f3afb6240 \ + --hash=sha256:f19f11055ba2961eb39bdb1ff15763a53fca4fa0b5b624da3c7a528e83cdd09c \ + --hash=sha256:f26544bc10c83a2ff9aa2e093500c1b473f327faae31fb468d591e5823333376 \ + --hash=sha256:f4875cafc9a6482c04c7df52a725d1c41beb74913c0ff4ec8f189f1954a2afe9 \ + --hash=sha256:f5179088ef6487c475604b7898731a6ddeeada7702cfb2162155b016703a8475 \ + --hash=sha256:f63d1ae1ca17bb2c847e298c7bcf084a73d56d434b4c50509fb93a4b4300b0b2 \ + --hash=sha256:ff4928dc1e9704b567171c16787238201fdbf023665573c12c02146fe1e02eec + # via python-lsp-jsonrpc +urllib3==1.26.12 \ + --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ + --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 # via # requests # semgrep -wcmatch==8.3 \ - --hash=sha256:371072912398af61d1e4e78609e18801c6faecd3cb36c54c82556a60abc965db \ - --hash=sha256:7141d2c85314253f16b38cb3d6cc0fb612918d407e1df3ccc2be7c86cc259c22 +wcmatch==8.4.1 \ + --hash=sha256:3476cd107aba7b25ba1d59406938a47dc7eec6cfd0ad09ff77193f21a964dee7 \ + --hash=sha256:b1f042a899ea4c458b7321da1b5e3331e3e0ec781583434de1301946ceadb943 # via semgrep -wheel==0.37.1 \ - --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ - --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 +wheel==0.38.4 \ + --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ + --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 # via pip-tools -zipp==3.8.0 \ - --hash=sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad \ - --hash=sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099 - # via - # importlib-metadata - # pep517 # The following packages are considered to be unsafe in a requirements file: -pip==22.1.1 \ - --hash=sha256:8dfb15d8a1c3d3085a4cbe11f29e19527dfaf2ba99354326fd62cec013eaee81 \ - --hash=sha256:e7bcf0b2cbdec2af84cc1b7b79b25fdbd7228fbdb61a4dca0b82810d0ba9d18b +pip==22.3.1 \ + --hash=sha256:65fd48317359f3af8e593943e6ae1506b66325085ea64b706a998c6e83eeaf38 \ + --hash=sha256:908c78e6bc29b676ede1c4d57981d490cb892eb45cd8c214ab6298125119e077 + # via pip-tools +setuptools==65.6.0 \ + --hash=sha256:6211d2f5eddad8757bd0484923ca7c0a6302ebc4ab32ea5e94357176e0ca0840 \ + --hash=sha256:d1eebf881c6114e51df1664bc2c9133d022f78d12d5f4f665b9191f084e2862d # via pip-tools -setuptools==62.3.2 \ - --hash=sha256:68e45d17c9281ba25dc0104eadd2647172b3472d9e01f911efa57965e8d51a36 \ - --hash=sha256:a43bdedf853c670e5fed28e5623403bad2f73cf02f9a2774e91def6bda8265a7 - # via - # jsonschema - # pip-tools - # semgrep diff --git a/requirements/requirements.txt b/requirements/requirements.txt index b78349c53..b7f4b2d8b 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with python 3.7 +# This file is autogenerated by pip-compile with python 3.9 # To update, run: # # pip-compile --generate-hashes --output-file=requirements/requirements.txt requirements/requirements.in From 68723028b012b3ad455d468faecba22e3569de84 Mon Sep 17 00:00:00 2001 From: Ro Date: Tue, 22 Nov 2022 17:26:18 -0800 Subject: [PATCH 293/352] Ensure Makefile includes version-specific requirements files Use VERSION_CODENAME script --- .circleci/config.yml | 5 +++++ Makefile | 14 +++++++++++--- scripts/codename | 15 +++++++++++++++ 3 files changed, 31 insertions(+), 3 deletions(-) create mode 100755 scripts/codename diff --git a/.circleci/config.yml b/.circleci/config.yml index 7bdae7873..0aa6fb8a4 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -19,6 +19,7 @@ common-steps: name: Install requirements and run unit tests command: | set -e + export VERSION_CODENAME=$(~/project/scripts/codename) make venv source .venv/bin/activate export PYTHONPATH=$PYTHONPATH:. # so alembic can get to Base metadata @@ -29,6 +30,7 @@ common-steps: name: Run lint, type checking, code formatting command: | set -e + export VERSION_CODENAME=$(~/project/scripts/codename) make venv source .venv/bin/activate make check-black lint @@ -38,6 +40,7 @@ common-steps: name: Run static analysis on source code to find security issues command: | set -e + export VERSION_CODENAME=$(~/project/scripts/codename) make venv source .venv/bin/activate make semgrep @@ -47,6 +50,7 @@ common-steps: name: Check Python dependencies for known vulnerabilities command: | set -e + export VERSION_CODENAME=$(~/project/scripts/codename) make venv source .venv/bin/activate make safety @@ -78,6 +82,7 @@ common-steps: name: Ensure that the same Python requirements are used for development/testing and production. command: | set -e + export VERSION_CODENAME=$(~/project/scripts/codename) make venv source .venv/bin/activate make requirements diff --git a/Makefile b/Makefile index ea83824cc..d56b2ee7b 100644 --- a/Makefile +++ b/Makefile @@ -1,11 +1,13 @@ .PHONY: all all: help +VERSION_CODENAME ?= bullseye + .PHONY: venv venv: ## Provision a Python 3 virtualenv for **development** python3 -m venv .venv .venv/bin/pip install --upgrade pip wheel - .venv/bin/pip install --require-hashes -r requirements/dev-requirements.txt + .venv/bin/pip install --require-hashes -r requirements/dev-${VERSION_CODENAME}-requirements.txt .PHONY: safety safety: ## Runs `safety check` to check python dependencies for vulnerabilities @@ -19,8 +21,14 @@ safety: ## Runs `safety check` to check python dependencies for vulnerabilities .PHONY: sync-requirements sync-requirements: ## Update dev-requirements.txt to pin to the same versions of prod dependencies - rm -r requirements/dev-requirements.txt && cp requirements/requirements.txt requirements/dev-requirements.txt - pip-compile --allow-unsafe --generate-hashes --output-file requirements/dev-requirements.txt requirements/requirements.in requirements/dev-requirements.in + if test -f "requirements/dev-bullseye-requirements.txt"; then rm -r requirements/dev-bullseye-requirements.txt; fi + if test -f "requirements/dev-bookworm-requirements.txt"; then rm -r requirements/dev-bookworm-requirements.txt; fi + $(MAKE) dev-requirements + +.PHONY: dev-requirements +dev-requirements: ## Update dev-*requirements.txt files if pinned versions do not comply with the dependency specifications in dev-*requirements.in + pip-compile --allow-unsafe --generate-hashes --output-file requirements/dev-bullseye-requirements.txt requirements/dev-bullseye-requirements.in + pip-compile --allow-unsafe --generate-hashes --output-file requirements/dev-bookworm-requirements.txt requirements/dev-bookworm-requirements.in .PHONY: requirements requirements: ## Update *requirements.txt files if pinned versions do not comply with the dependency specifications in *requirements.in diff --git a/scripts/codename b/scripts/codename new file mode 100755 index 000000000..261793bc5 --- /dev/null +++ b/scripts/codename @@ -0,0 +1,15 @@ +#!/bin/bash +# Returns the Debian version's codename (e.g. "bullseye") in a way that should +# work across both released versions and unreleased ones. +# See https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=1008735 + +source /etc/os-release + +if [[ "$VERSION_CODENAME" != "" ]]; then + echo $VERSION_CODENAME +else + # PRETTY_NAME="Debian GNU/Linux bookworm/sid" + # Use awk to split on spaces and / + echo $PRETTY_NAME | awk '{split($0, a, "[ /]"); print a[4]}' +fi + From 56456941faf4a7fb177b147cb69ba92a0d6de3d2 Mon Sep 17 00:00:00 2001 From: Ro Date: Tue, 22 Nov 2022 17:39:44 -0800 Subject: [PATCH 294/352] Separate requirements files for Bullseye and Bookworm Update rMmakefile venv and requirements targets to be version-aware --- Makefile | 7 +- ...ements.in => dev-bookworm-requirements.in} | 0 requirements/dev-bookworm-requirements.txt | 411 ++++++++++++++++++ requirements/dev-bullseye-requirements.in | 7 + ...ents.txt => dev-bullseye-requirements.txt} | 22 +- 5 files changed, 434 insertions(+), 13 deletions(-) rename requirements/{dev-requirements.in => dev-bookworm-requirements.in} (100%) create mode 100644 requirements/dev-bookworm-requirements.txt create mode 100644 requirements/dev-bullseye-requirements.in rename requirements/{dev-requirements.txt => dev-bullseye-requirements.txt} (97%) diff --git a/Makefile b/Makefile index d56b2ee7b..f535a9292 100644 --- a/Makefile +++ b/Makefile @@ -44,8 +44,11 @@ update-dependency: ## Add or upgrade a package to the latest version that compl update-dev-only-dependencies: ## Update dev-requirements.txt to pin to the latest versions of dev-only dependencies that comply with the dependency specifications in dev-requirements.in $(MAKE) sync-requirements @while read line; do \ - pip-compile --allow-unsafe --generate-hashes --upgrade-package $file --output-file requirements/dev-requirements.txt requirements/requirements.in requirements/dev-requirements.in; \ - done < 'requirements/dev-requirements.in' + pip-compile --allow-unsafe --generate-hashes --upgrade-package $file --output-file requirements/dev-${VERSION_CODENAME}-requirements.txt requirements/requirements.in requirements/dev-bookworm-requirements.in; \ + done < 'requirements/dev-bullseye-requirements.in' + @while read line; do \ + pip-compile --allow-unsafe --generate-hashes --upgrade-package $file --output-file requirements/dev-${VERSION_CODENAME}-requirements.txt requirements/requirements.in requirements/dev-bookworm-requirements.in; \ + done < 'requirements/dev-bookworm-requirements.in' .PHONY: check check: lint semgrep test check-black ## Run linter and tests diff --git a/requirements/dev-requirements.in b/requirements/dev-bookworm-requirements.in similarity index 100% rename from requirements/dev-requirements.in rename to requirements/dev-bookworm-requirements.in diff --git a/requirements/dev-bookworm-requirements.txt b/requirements/dev-bookworm-requirements.txt new file mode 100644 index 000000000..fd153a461 --- /dev/null +++ b/requirements/dev-bookworm-requirements.txt @@ -0,0 +1,411 @@ +# +# This file is autogenerated by pip-compile with python 3.10 +# To update, run: +# +# pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-bookworm-requirements.txt requirements/dev-bookworm-requirements.in +# +attrs==21.4.0 \ + --hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4 \ + --hash=sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd + # via + # glom + # jsonschema + # pytest + # semgrep +black==22.10.0 \ + --hash=sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7 \ + --hash=sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6 \ + --hash=sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650 \ + --hash=sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb \ + --hash=sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d \ + --hash=sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d \ + --hash=sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de \ + --hash=sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395 \ + --hash=sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae \ + --hash=sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa \ + --hash=sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef \ + --hash=sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383 \ + --hash=sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66 \ + --hash=sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87 \ + --hash=sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d \ + --hash=sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0 \ + --hash=sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b \ + --hash=sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458 \ + --hash=sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4 \ + --hash=sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1 \ + --hash=sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff + # via -r requirements/dev-bookworm-requirements.in +boltons==21.0.0 \ + --hash=sha256:65e70a79a731a7fe6e98592ecfb5ccf2115873d01dbc576079874629e5c90f13 \ + --hash=sha256:b9bb7b58b2b420bbe11a6025fdef6d3e5edc9f76a42fb467afe7ca212ef9948b + # via + # face + # glom + # semgrep +bracex==2.3.post1 \ + --hash=sha256:351b7f20d56fb9ea91f9b9e9e7664db466eb234188c175fd943f8f755c807e73 \ + --hash=sha256:e7b23fc8b2cd06d3dec0692baabecb249dda94e06a617901ff03a6c56fd71693 + # via wcmatch +build==0.9.0 \ + --hash=sha256:1a07724e891cbd898923145eb7752ee7653674c511378eb9c7691aab1612bc3c \ + --hash=sha256:38a7a2b7a0bdc61a42a0a67509d88c71ecfc37b393baba770fae34e20929ff69 + # via pip-tools +certifi==2022.9.24 \ + --hash=sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14 \ + --hash=sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382 + # via requests +charset-normalizer==2.1.1 \ + --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ + --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f + # via requests +click==8.1.3 \ + --hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \ + --hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48 + # via + # black + # click-option-group + # pip-tools + # semgrep +click-option-group==0.5.5 \ + --hash=sha256:0f8ca79bc9b1d6fcaafdbe194b17ba1a2dde44ddf19087235c3efed2ad288143 \ + --hash=sha256:78ee474f07a0ca0ef6c0317bb3ebe79387aafb0c4a1e03b1d8b2b0be1e42fc78 + # via semgrep +colorama==0.4.6 \ + --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ + --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 + # via semgrep +coverage[toml]==6.5.0 \ + --hash=sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79 \ + --hash=sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a \ + --hash=sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f \ + --hash=sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a \ + --hash=sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa \ + --hash=sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398 \ + --hash=sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba \ + --hash=sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d \ + --hash=sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf \ + --hash=sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b \ + --hash=sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518 \ + --hash=sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d \ + --hash=sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795 \ + --hash=sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2 \ + --hash=sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e \ + --hash=sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32 \ + --hash=sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745 \ + --hash=sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b \ + --hash=sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e \ + --hash=sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d \ + --hash=sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f \ + --hash=sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660 \ + --hash=sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62 \ + --hash=sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6 \ + --hash=sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04 \ + --hash=sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c \ + --hash=sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5 \ + --hash=sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef \ + --hash=sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc \ + --hash=sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae \ + --hash=sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578 \ + --hash=sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466 \ + --hash=sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4 \ + --hash=sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91 \ + --hash=sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0 \ + --hash=sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4 \ + --hash=sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b \ + --hash=sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe \ + --hash=sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b \ + --hash=sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75 \ + --hash=sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b \ + --hash=sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c \ + --hash=sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72 \ + --hash=sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b \ + --hash=sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f \ + --hash=sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e \ + --hash=sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53 \ + --hash=sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3 \ + --hash=sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84 \ + --hash=sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987 + # via pytest-cov +defusedxml==0.7.1 \ + --hash=sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69 \ + --hash=sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61 + # via semgrep +exceptiongroup==1.0.4 \ + --hash=sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828 \ + --hash=sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec + # via pytest +face==22.0.0 \ + --hash=sha256:344fe31562d0f6f444a45982418f3793d4b14f9abb98ccca1509d22e0a3e7e35 \ + --hash=sha256:d5d692f90bc8f5987b636e47e36384b9bbda499aaf0a77aa0b0bbe834c76923d + # via glom +flake8==5.0.4 \ + --hash=sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db \ + --hash=sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248 + # via -r requirements/dev-bookworm-requirements.in +glom==22.1.0 \ + --hash=sha256:1510c6587a8f9c64a246641b70033cbc5ebde99f02ad245693678038e821aeb5 \ + --hash=sha256:5339da206bf3532e01a83a35aca202960ea885156986d190574b779598e9e772 + # via semgrep +idna==3.4 \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 + # via requests +iniconfig==1.1.1 \ + --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \ + --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32 + # via pytest +jsonschema==4.17.1 \ + --hash=sha256:05b2d22c83640cde0b7e0aa329ca7754fbd98ea66ad8ae24aa61328dfe057fa3 \ + --hash=sha256:410ef23dcdbca4eaedc08b850079179883c2ed09378bd1f760d4af4aacfa28d7 + # via semgrep +mccabe==0.7.0 \ + --hash=sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325 \ + --hash=sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e + # via flake8 +mypy-extensions==0.4.3 \ + --hash=sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d \ + --hash=sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8 + # via black +packaging==21.3 \ + --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ + --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 + # via + # build + # pytest + # semgrep +pathspec==0.10.2 \ + --hash=sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5 \ + --hash=sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0 + # via black +peewee==3.15.4 \ + --hash=sha256:2581520c8dfbacd9d580c2719ae259f0637a9e46eda47dfc0ce01864c6366205 + # via semgrep +pep517==0.13.0 \ + --hash=sha256:4ba4446d80aed5b5eac6509ade100bff3e7943a8489de249654a5ae9b33ee35b \ + --hash=sha256:ae69927c5c172be1add9203726d4b84cf3ebad1edcd5f71fcdc746e66e829f59 + # via build +pip-tools==6.10.0 \ + --hash=sha256:57ac98392548f5ca96c2831927deec3035efe81ff476e3c744bd474ca9c6a1f2 \ + --hash=sha256:7f9f7356052db6942b5aaabc8eba29983591ca0ad75affbf2f0a25d9361be624 + # via -r requirements/dev-bookworm-requirements.in +platformdirs==2.5.4 \ + --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ + --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 + # via black +pluggy==1.0.0 \ + --hash=sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159 \ + --hash=sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3 + # via pytest +pycodestyle==2.9.1 \ + --hash=sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785 \ + --hash=sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b + # via flake8 +pyflakes==2.5.0 \ + --hash=sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2 \ + --hash=sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3 + # via flake8 +pyparsing==3.0.9 \ + --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ + --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc + # via packaging +pyrsistent==0.19.2 \ + --hash=sha256:055ab45d5911d7cae397dc418808d8802fb95262751872c841c170b0dbf51eed \ + --hash=sha256:111156137b2e71f3a9936baf27cb322e8024dac3dc54ec7fb9f0bcf3249e68bb \ + --hash=sha256:187d5730b0507d9285a96fca9716310d572e5464cadd19f22b63a6976254d77a \ + --hash=sha256:21455e2b16000440e896ab99e8304617151981ed40c29e9507ef1c2e4314ee95 \ + --hash=sha256:2aede922a488861de0ad00c7630a6e2d57e8023e4be72d9d7147a9fcd2d30712 \ + --hash=sha256:3ba4134a3ff0fc7ad225b6b457d1309f4698108fb6b35532d015dca8f5abed73 \ + --hash=sha256:456cb30ca8bff00596519f2c53e42c245c09e1a4543945703acd4312949bfd41 \ + --hash=sha256:71d332b0320642b3261e9fee47ab9e65872c2bd90260e5d225dabeed93cbd42b \ + --hash=sha256:879b4c2f4d41585c42df4d7654ddffff1239dc4065bc88b745f0341828b83e78 \ + --hash=sha256:9cd3e9978d12b5d99cbdc727a3022da0430ad007dacf33d0bf554b96427f33ab \ + --hash=sha256:a178209e2df710e3f142cbd05313ba0c5ebed0a55d78d9945ac7a4e09d923308 \ + --hash=sha256:b39725209e06759217d1ac5fcdb510e98670af9e37223985f330b611f62e7425 \ + --hash=sha256:bfa0351be89c9fcbcb8c9879b826f4353be10f58f8a677efab0c017bf7137ec2 \ + --hash=sha256:bfd880614c6237243ff53a0539f1cb26987a6dc8ac6e66e0c5a40617296a045e \ + --hash=sha256:c43bec251bbd10e3cb58ced80609c5c1eb238da9ca78b964aea410fb820d00d6 \ + --hash=sha256:d690b18ac4b3e3cab73b0b7aa7dbe65978a172ff94970ff98d82f2031f8971c2 \ + --hash=sha256:d6982b5a0237e1b7d876b60265564648a69b14017f3b5f908c5be2de3f9abb7a \ + --hash=sha256:dec3eac7549869365fe263831f576c8457f6c833937c68542d08fde73457d291 \ + --hash=sha256:e371b844cec09d8dc424d940e54bba8f67a03ebea20ff7b7b0d56f526c71d584 \ + --hash=sha256:e5d8f84d81e3729c3b506657dddfe46e8ba9c330bf1858ee33108f8bb2adb38a \ + --hash=sha256:ea6b79a02a28550c98b6ca9c35b9f492beaa54d7c5c9e9949555893c8a9234d0 \ + --hash=sha256:f1258f4e6c42ad0b20f9cfcc3ada5bd6b83374516cd01c0960e3cb75fdca6770 + # via jsonschema +pytest==7.2.0 \ + --hash=sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71 \ + --hash=sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59 + # via + # -r requirements/dev-bookworm-requirements.in + # pytest-cov + # pytest-mock +pytest-cov==4.0.0 \ + --hash=sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b \ + --hash=sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470 + # via -r requirements/dev-bookworm-requirements.in +pytest-mock==3.10.0 \ + --hash=sha256:f4c973eeae0282963eb293eb173ce91b091a79c1334455acfac9ddee8a1c784b \ + --hash=sha256:fbbdb085ef7c252a326fd8cdcac0aa3b1333d8811f131bdcc701002e1be7ed4f + # via -r requirements/dev-bookworm-requirements.in +python-lsp-jsonrpc==1.0.0 \ + --hash=sha256:079b143be64b0a378bdb21dff5e28a8c1393fe7e8a654ef068322d754e545fc7 \ + --hash=sha256:7bec170733db628d3506ea3a5288ff76aa33c70215ed223abdb0d95e957660bd + # via semgrep +requests==2.28.1 \ + --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ + --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 + # via semgrep +ruamel-yaml==0.17.21 \ + --hash=sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7 \ + --hash=sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af + # via semgrep +ruamel-yaml-clib==0.2.7 \ + --hash=sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e \ + --hash=sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3 \ + --hash=sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5 \ + --hash=sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497 \ + --hash=sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f \ + --hash=sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac \ + --hash=sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697 \ + --hash=sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763 \ + --hash=sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282 \ + --hash=sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1 \ + --hash=sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072 \ + --hash=sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9 \ + --hash=sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5 \ + --hash=sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231 \ + --hash=sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93 \ + --hash=sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b \ + --hash=sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb \ + --hash=sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f \ + --hash=sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307 \ + --hash=sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8 \ + --hash=sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b \ + --hash=sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b \ + --hash=sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640 \ + --hash=sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7 \ + --hash=sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a \ + --hash=sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71 \ + --hash=sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8 \ + --hash=sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7 \ + --hash=sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80 \ + --hash=sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e \ + --hash=sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab \ + --hash=sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0 \ + --hash=sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646 + # via ruamel-yaml +semgrep==0.122.0 \ + --hash=sha256:6116391b0c8c87581d9d72113702b6f8c2938d799cdae7d71a845ec89249566c \ + --hash=sha256:a4c7400eb8bec9fe8df25520d1ffcb5d78b87c73dc654f1c2aec1195789bc611 \ + --hash=sha256:c7002b9aba97deb6677f4cabfa5dcc8faef2808ce6a6f28ecdd70cd8e90b01b5 \ + --hash=sha256:e3fb9956e2bb926cfeff52deafe4cec24d5f1e91fe6d3fc4f81e86ec452b2ad5 + # via -r requirements/dev-bookworm-requirements.in +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via + # black + # build + # coverage + # pep517 + # pytest + # semgrep +tqdm==4.64.1 \ + --hash=sha256:5f4f682a004951c1b450bc753c710e9280c5746ce6ffedee253ddbcbf54cf1e4 \ + --hash=sha256:6fee160d6ffcd1b1c68c65f14c829c22832bc401726335ce92c52d395944a6a1 + # via semgrep +typing-extensions==4.4.0 \ + --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ + --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e + # via semgrep +ujson==5.5.0 \ + --hash=sha256:0762a4fdf86e01f3f8d8b6b7158d01fdd870799ff3f402b676e358fcd879e7eb \ + --hash=sha256:10095160dbe6bba8059ad6677a01da251431f4c68041bf796dcac0956b34f8f7 \ + --hash=sha256:1a485117f97312bef45f5d79d2ff97eff4da503b8a04f3691f59d31141686459 \ + --hash=sha256:1cef44ea4973344baed3d50a5da4a8843de3a6af7dea7fadf0a594e53ce5892f \ + --hash=sha256:1dc2f46c31ef22b0aaa28cd71be897bea271e700636658d573df9c43c49ebbd0 \ + --hash=sha256:21678d7e068707e4d54bdfeb8c250ebc548b51e499aed778b22112ca31a79669 \ + --hash=sha256:278aa9d7cb56435c96d19f5d702e026bcf69f824e24b41e9b52706abd3565837 \ + --hash=sha256:2ab011e3556a9a1d9461bd686870c527327765ed02fe53550531d6609a8a33ff \ + --hash=sha256:2d90414e3b4b44b39825049185959488e084ea7fcaf6124afd5c00893938b09d \ + --hash=sha256:2e506ecf89b6b9d304362ccef770831ec242a52c89dab1b4aabf1ab0eb1d5ed6 \ + --hash=sha256:33cd9084fefc74cbacf88c92fd260b61211e00bcde38d640c369e5dc34a2b4e1 \ + --hash=sha256:3b74467564814fbce322427a5664e6bcc7dae6dbc8acbef76300fe43ca4072ab \ + --hash=sha256:3f3f4240d99d55eb97cb012e9adf401f5ed9cd827af0341ac44603832202b0d2 \ + --hash=sha256:3fe1aea596f9539fc20cd9e52f098c842afc090168824fd4ca9744fe13151a03 \ + --hash=sha256:4a8cb3c8637006c5bd8237ebb5992a76ba06e39988ad5cff2096227443e8fd6a \ + --hash=sha256:4ef4ab8352861b99bd7fedb1fc6df3ea7f7d5216c789ba6d859e4ea06f1a4c45 \ + --hash=sha256:5035bb997d163f346c22abcec75190e7e756a5349e7c708bd3d5fd7066a9a854 \ + --hash=sha256:593a0f6fb0e186c5ba65465ed6f6215a30d1efa898c25e74de1c8577a1bff6d0 \ + --hash=sha256:59cdcd934385f36e8bd76aedc234371cc75c848d95bdce804ac8aa8744cfeffa \ + --hash=sha256:5a9b1320d8363a42d857fae8065a2174d38217cdd58cd8dc4f48d54e0591271e \ + --hash=sha256:5f9681ec4c60d0da590552427d770636d9079038c30b265f507ccde23caa7823 \ + --hash=sha256:5fd797a4837ba10671954e7c09010cec7aca67e09d193f4920a16beea5f66f65 \ + --hash=sha256:6019e3480d933d3698f2ecb4b46d64bfadd64e718f04fac36e681f3254b49a93 \ + --hash=sha256:603607f56a0ee84d9cd2c7e9b1d29b18a70684b94ee34f07b9ffe8dc9c8a9f81 \ + --hash=sha256:60a4b481978ea2aad8fe8af1ecc271624d01b3cf4b09e9b643dd2fe19c07634c \ + --hash=sha256:6b9812638d7aa8ecda2e8e1513fb4da999249603bffab7439a5f8f0bb362b0db \ + --hash=sha256:6c7ae6e0778ab9610f5e80e0595957d101ab8de18c32a8c053a19943ef4831d0 \ + --hash=sha256:6f83be8257b2f2dd6dea5ee62cd28db90584da7a7af1fba77a2102fc7943638a \ + --hash=sha256:701e81e047f5c0cffd4ac828efca68b0bd270c616654966a051e9a5f836b385e \ + --hash=sha256:703fd69d9cb21d6ec2086789df9be2cf8140a76ff127050c24007ea8940dcd3b \ + --hash=sha256:7471d4486f23518cff343f1eec6c68d1b977ed74c3e6cc3e1ac896b9b7d68645 \ + --hash=sha256:765d46f3d5e7a1d48075035e2d1a9164f683e3fccde834ca04602e6c588835bc \ + --hash=sha256:7a09d203983104918c62f2eef9406f24c355511f9217967df23e70fa7f5b54ff \ + --hash=sha256:7c20cc83b0df47129ec6ed8a47fa7dcfc309c5bad029464004162738502568bb \ + --hash=sha256:7d7cfac2547c93389fa303fc0c0eb6698825564e8389c41c9b60009c746207b6 \ + --hash=sha256:7d87c817b292efb748f1974f37e8bb8a8772ef92f05f84e507159360814bcc3f \ + --hash=sha256:8141f654432cf75144d6103bfac2286b8adf23467201590b173a74535d6be22d \ + --hash=sha256:849f2ff40264152f25589cb48ddb4a43d14db811f841ec73989bfc0c8c4853fa \ + --hash=sha256:880c84ce59f49776cf120f77e7ca04877c97c6887917078dbc369eb47004d7cf \ + --hash=sha256:94874584b733a18b310b0e954d53168e62cd4a0fd9db85b1903f0902a7eb33e8 \ + --hash=sha256:95603eff711b8f3b9596e1c961dbeb745a792ba1904141612f194e07edd71e5f \ + --hash=sha256:9585892091ae86045135d6a6129a644142d6a51b23e1428bb5de6d10bc0ce0c7 \ + --hash=sha256:977bf5be704a88d46bf5b228df8b44521b1f3119d741062191608b3a6a38f224 \ + --hash=sha256:9cdc46859024501c20ab74ad542cdf2f08b94b5ce384f2f569483fa3ed926d04 \ + --hash=sha256:a34a5f034b339f69ef7f6a134c22d04b92e07b6ddc1dd65382e7e4ec65d6437d \ + --hash=sha256:a655f7b755cfc5c07f2116b6dcf0ba148c89adef9a6d40c1b0f1fada878c4345 \ + --hash=sha256:a7d12f2d2df195c8c4e49d2cdbad640353a856c62ca2c624d8b47aa33b65a2a2 \ + --hash=sha256:abfe83e082c9208891e2158c1b5044a650ecec408b823bf6bf16cd7f8085cafa \ + --hash=sha256:b25077a971c7da47bd6846a912a747f6963776d90720c88603b1b55d81790780 \ + --hash=sha256:bf416a93e1331820c77e3429df26946dbd4fe105e9b487cd2d1b7298b75784a8 \ + --hash=sha256:c04ae27e076d81a3839047d8eed57c1e17e361640616fd520d752375e3ba8f0c \ + --hash=sha256:d5bea13c73f36c4346808df3fa806596163a7962b6d28001ca2a391cab856089 \ + --hash=sha256:d75bef34e69e7effb7b4849e3f830e3174d2cc6ec7273503fdde111c222dc9b3 \ + --hash=sha256:d93940664a5ccfd79f72dcb939b0c31a3479889f14f0eb95ec52976f8c0cae7d \ + --hash=sha256:d9c89c521dc90c7564358e525f849b93ad1d710553c1491f66b8cce8113bc901 \ + --hash=sha256:e0b36257dc90194784531c3b922d8d31fb2b4d8e5adfd27aff4eee7174176365 \ + --hash=sha256:e1135264bcd40965cd35b0869e36952f54825024befdc7a923df9a7d83cfd800 \ + --hash=sha256:e510d288e613d6927796dfb728e13e4530fc83b9ccac5888a21f7860486eab21 \ + --hash=sha256:ee9a2c9a4b2421e77f8fe33ed0621dea03c66c710707553020b1e32f3afb6240 \ + --hash=sha256:f19f11055ba2961eb39bdb1ff15763a53fca4fa0b5b624da3c7a528e83cdd09c \ + --hash=sha256:f26544bc10c83a2ff9aa2e093500c1b473f327faae31fb468d591e5823333376 \ + --hash=sha256:f4875cafc9a6482c04c7df52a725d1c41beb74913c0ff4ec8f189f1954a2afe9 \ + --hash=sha256:f5179088ef6487c475604b7898731a6ddeeada7702cfb2162155b016703a8475 \ + --hash=sha256:f63d1ae1ca17bb2c847e298c7bcf084a73d56d434b4c50509fb93a4b4300b0b2 \ + --hash=sha256:ff4928dc1e9704b567171c16787238201fdbf023665573c12c02146fe1e02eec + # via python-lsp-jsonrpc +urllib3==1.26.12 \ + --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ + --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 + # via + # requests + # semgrep +wcmatch==8.4.1 \ + --hash=sha256:3476cd107aba7b25ba1d59406938a47dc7eec6cfd0ad09ff77193f21a964dee7 \ + --hash=sha256:b1f042a899ea4c458b7321da1b5e3331e3e0ec781583434de1301946ceadb943 + # via semgrep +wheel==0.38.4 \ + --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ + --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 + # via pip-tools + +# The following packages are considered to be unsafe in a requirements file: +pip==22.3.1 \ + --hash=sha256:65fd48317359f3af8e593943e6ae1506b66325085ea64b706a998c6e83eeaf38 \ + --hash=sha256:908c78e6bc29b676ede1c4d57981d490cb892eb45cd8c214ab6298125119e077 + # via pip-tools +setuptools==65.6.0 \ + --hash=sha256:6211d2f5eddad8757bd0484923ca7c0a6302ebc4ab32ea5e94357176e0ca0840 \ + --hash=sha256:d1eebf881c6114e51df1664bc2c9133d022f78d12d5f4f665b9191f084e2862d + # via pip-tools diff --git a/requirements/dev-bullseye-requirements.in b/requirements/dev-bullseye-requirements.in new file mode 100644 index 000000000..5200c5299 --- /dev/null +++ b/requirements/dev-bullseye-requirements.in @@ -0,0 +1,7 @@ +black +flake8 +pip-tools +pytest +pytest-cov +pytest-mock +semgrep \ No newline at end of file diff --git a/requirements/dev-requirements.txt b/requirements/dev-bullseye-requirements.txt similarity index 97% rename from requirements/dev-requirements.txt rename to requirements/dev-bullseye-requirements.txt index 49ef8edae..3271ab69c 100644 --- a/requirements/dev-requirements.txt +++ b/requirements/dev-bullseye-requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with python 3.9 # To update, run: # -# pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-requirements.txt requirements/dev-requirements.in requirements/requirements.in +# pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-bullseye-requirements.txt requirements/dev-bullseye-requirements.in # attrs==21.4.0 \ --hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4 \ @@ -34,7 +34,7 @@ black==22.10.0 \ --hash=sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4 \ --hash=sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1 \ --hash=sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff - # via -r requirements/dev-requirements.in + # via -r requirements/dev-bullseye-requirements.in boltons==21.0.0 \ --hash=sha256:65e70a79a731a7fe6e98592ecfb5ccf2115873d01dbc576079874629e5c90f13 \ --hash=sha256:b9bb7b58b2b420bbe11a6025fdef6d3e5edc9f76a42fb467afe7ca212ef9948b @@ -141,7 +141,7 @@ face==22.0.0 \ flake8==5.0.4 \ --hash=sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db \ --hash=sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248 - # via -r requirements/dev-requirements.in + # via -r requirements/dev-bullseye-requirements.in glom==22.1.0 \ --hash=sha256:1510c6587a8f9c64a246641b70033cbc5ebde99f02ad245693678038e821aeb5 \ --hash=sha256:5339da206bf3532e01a83a35aca202960ea885156986d190574b779598e9e772 @@ -154,9 +154,9 @@ iniconfig==1.1.1 \ --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \ --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32 # via pytest -jsonschema==4.17.0 \ - --hash=sha256:5bfcf2bca16a087ade17e02b282d34af7ccd749ef76241e7f9bd7c0cb8a9424d \ - --hash=sha256:f660066c3966db7d6daeaea8a75e0b68237a48e51cf49882087757bb59916248 +jsonschema==4.17.1 \ + --hash=sha256:05b2d22c83640cde0b7e0aa329ca7754fbd98ea66ad8ae24aa61328dfe057fa3 \ + --hash=sha256:410ef23dcdbca4eaedc08b850079179883c2ed09378bd1f760d4af4aacfa28d7 # via semgrep mccabe==0.7.0 \ --hash=sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325 \ @@ -187,7 +187,7 @@ pep517==0.13.0 \ pip-tools==6.10.0 \ --hash=sha256:57ac98392548f5ca96c2831927deec3035efe81ff476e3c744bd474ca9c6a1f2 \ --hash=sha256:7f9f7356052db6942b5aaabc8eba29983591ca0ad75affbf2f0a25d9361be624 - # via -r requirements/dev-requirements.in + # via -r requirements/dev-bullseye-requirements.in platformdirs==2.5.4 \ --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 @@ -236,17 +236,17 @@ pytest==7.2.0 \ --hash=sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71 \ --hash=sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59 # via - # -r requirements/dev-requirements.in + # -r requirements/dev-bullseye-requirements.in # pytest-cov # pytest-mock pytest-cov==4.0.0 \ --hash=sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b \ --hash=sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470 - # via -r requirements/dev-requirements.in + # via -r requirements/dev-bullseye-requirements.in pytest-mock==3.10.0 \ --hash=sha256:f4c973eeae0282963eb293eb173ce91b091a79c1334455acfac9ddee8a1c784b \ --hash=sha256:fbbdb085ef7c252a326fd8cdcac0aa3b1333d8811f131bdcc701002e1be7ed4f - # via -r requirements/dev-requirements.in + # via -r requirements/dev-bullseye-requirements.in python-lsp-jsonrpc==1.0.0 \ --hash=sha256:079b143be64b0a378bdb21dff5e28a8c1393fe7e8a654ef068322d754e545fc7 \ --hash=sha256:7bec170733db628d3506ea3a5288ff76aa33c70215ed223abdb0d95e957660bd @@ -299,7 +299,7 @@ semgrep==0.122.0 \ --hash=sha256:a4c7400eb8bec9fe8df25520d1ffcb5d78b87c73dc654f1c2aec1195789bc611 \ --hash=sha256:c7002b9aba97deb6677f4cabfa5dcc8faef2808ce6a6f28ecdd70cd8e90b01b5 \ --hash=sha256:e3fb9956e2bb926cfeff52deafe4cec24d5f1e91fe6d3fc4f81e86ec452b2ad5 - # via -r requirements/dev-requirements.in + # via -r requirements/dev-bullseye-requirements.in tomli==2.0.1 \ --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f From 502c95868d1bed3daa08909f7d0cadb0d3632471 Mon Sep 17 00:00:00 2001 From: Ro Date: Tue, 29 Nov 2022 10:57:00 -0800 Subject: [PATCH 295/352] Ensure correct OS version builds requirements file. Fix requirements Makefile target. --- Makefile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index f535a9292..aaa0624b8 100644 --- a/Makefile +++ b/Makefile @@ -33,7 +33,7 @@ dev-requirements: ## Update dev-*requirements.txt files if pinned versions do n .PHONY: requirements requirements: ## Update *requirements.txt files if pinned versions do not comply with the dependency specifications in *requirements.in pip-compile --generate-hashes --output-file requirements/requirements.txt requirements/requirements.in - $(MAKE) sync-requirements + $(MAKE) dev-requirements .PHONY: update-dependency update-dependency: ## Add or upgrade a package to the latest version that complies with the dependency specifications in requirements.in @@ -44,10 +44,10 @@ update-dependency: ## Add or upgrade a package to the latest version that compl update-dev-only-dependencies: ## Update dev-requirements.txt to pin to the latest versions of dev-only dependencies that comply with the dependency specifications in dev-requirements.in $(MAKE) sync-requirements @while read line; do \ - pip-compile --allow-unsafe --generate-hashes --upgrade-package $file --output-file requirements/dev-${VERSION_CODENAME}-requirements.txt requirements/requirements.in requirements/dev-bookworm-requirements.in; \ + pip-compile --allow-unsafe --generate-hashes --upgrade-package $file --output-file requirements/dev-bullseye-requirements.txt requirements/requirements.in requirements/dev-bullseye-requirements.in; \ done < 'requirements/dev-bullseye-requirements.in' @while read line; do \ - pip-compile --allow-unsafe --generate-hashes --upgrade-package $file --output-file requirements/dev-${VERSION_CODENAME}-requirements.txt requirements/requirements.in requirements/dev-bookworm-requirements.in; \ + pip-compile --allow-unsafe --generate-hashes --upgrade-package $file --output-file requirements/dev-bookworm-requirements.txt requirements/requirements.in requirements/dev-bookworm-requirements.in; \ done < 'requirements/dev-bookworm-requirements.in' .PHONY: check From d4140a4c6dead195477b4ac32bf4500fcf533627 Mon Sep 17 00:00:00 2001 From: Ro Date: Fri, 16 Dec 2022 11:26:27 -0800 Subject: [PATCH 296/352] Update dependencies --- requirements/dev-requirements.txt | 696 ++++++++++++++++-------------- requirements/requirements.in | 2 +- requirements/requirements.txt | 8 +- 3 files changed, 387 insertions(+), 319 deletions(-) diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt index b1aeb1a56..6dc72691a 100644 --- a/requirements/dev-requirements.txt +++ b/requirements/dev-requirements.txt @@ -1,16 +1,30 @@ # -# This file is autogenerated by pip-compile with python 3.7 +# This file is autogenerated by pip-compile with python 3.9 # To update, run: # # pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-requirements.txt requirements/dev-requirements.in requirements/requirements.in # -black==21.8b0 \ - --hash=sha256:2a0f9a8c2b2a60dbcf1ccb058842fb22bdbbcb2f32c6cc02d9578f90b92ce8b7 \ - --hash=sha256:570608d28aa3af1792b98c4a337dbac6367877b47b12b88ab42095cfc1a627c2 +black==22.12.0 \ + --hash=sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320 \ + --hash=sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351 \ + --hash=sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350 \ + --hash=sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f \ + --hash=sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf \ + --hash=sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148 \ + --hash=sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4 \ + --hash=sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d \ + --hash=sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc \ + --hash=sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d \ + --hash=sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2 \ + --hash=sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f # via -r requirements/dev-requirements.in -certifi==2021.5.30 \ - --hash=sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee \ - --hash=sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8 +build==0.9.0 \ + --hash=sha256:1a07724e891cbd898923145eb7752ee7653674c511378eb9c7691aab1612bc3c \ + --hash=sha256:38a7a2b7a0bdc61a42a0a67509d88c71ecfc37b393baba770fae34e20929ff69 + # via pip-tools +certifi==2022.12.7 \ + --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ + --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 # via # -r requirements/requirements.in # requests @@ -20,69 +34,67 @@ charset-normalizer==2.0.4 \ # via # -r requirements/requirements.in # requests -click==8.0.1 \ - --hash=sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a \ - --hash=sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6 +click==8.1.3 \ + --hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \ + --hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48 # via # black # pip-tools -coverage==5.5 \ - --hash=sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c \ - --hash=sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6 \ - --hash=sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45 \ - --hash=sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a \ - --hash=sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03 \ - --hash=sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529 \ - --hash=sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a \ - --hash=sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a \ - --hash=sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2 \ - --hash=sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6 \ - --hash=sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759 \ - --hash=sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53 \ - --hash=sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a \ - --hash=sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4 \ - --hash=sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff \ - --hash=sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502 \ - --hash=sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793 \ - --hash=sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb \ - --hash=sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905 \ - --hash=sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821 \ - --hash=sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b \ - --hash=sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81 \ - --hash=sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0 \ - --hash=sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b \ - --hash=sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3 \ - --hash=sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184 \ - --hash=sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701 \ - --hash=sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a \ - --hash=sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82 \ - --hash=sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638 \ - --hash=sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5 \ - --hash=sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083 \ - --hash=sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6 \ - --hash=sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90 \ - --hash=sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465 \ - --hash=sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a \ - --hash=sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3 \ - --hash=sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e \ - --hash=sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066 \ - --hash=sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf \ - --hash=sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b \ - --hash=sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae \ - --hash=sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669 \ - --hash=sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873 \ - --hash=sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b \ - --hash=sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6 \ - --hash=sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb \ - --hash=sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160 \ - --hash=sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c \ - --hash=sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079 \ - --hash=sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d \ - --hash=sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6 +coverage==6.5.0 \ + --hash=sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79 \ + --hash=sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a \ + --hash=sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f \ + --hash=sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a \ + --hash=sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa \ + --hash=sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398 \ + --hash=sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba \ + --hash=sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d \ + --hash=sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf \ + --hash=sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b \ + --hash=sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518 \ + --hash=sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d \ + --hash=sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795 \ + --hash=sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2 \ + --hash=sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e \ + --hash=sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32 \ + --hash=sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745 \ + --hash=sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b \ + --hash=sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e \ + --hash=sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d \ + --hash=sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f \ + --hash=sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660 \ + --hash=sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62 \ + --hash=sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6 \ + --hash=sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04 \ + --hash=sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c \ + --hash=sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5 \ + --hash=sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef \ + --hash=sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc \ + --hash=sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae \ + --hash=sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578 \ + --hash=sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466 \ + --hash=sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4 \ + --hash=sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91 \ + --hash=sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0 \ + --hash=sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4 \ + --hash=sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b \ + --hash=sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe \ + --hash=sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b \ + --hash=sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75 \ + --hash=sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b \ + --hash=sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c \ + --hash=sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72 \ + --hash=sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b \ + --hash=sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f \ + --hash=sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e \ + --hash=sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53 \ + --hash=sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3 \ + --hash=sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84 \ + --hash=sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987 # via -r requirements/dev-requirements.in -flake8==3.9.2 \ - --hash=sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b \ - --hash=sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907 +flake8==6.0.0 \ + --hash=sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7 \ + --hash=sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181 # via -r requirements/dev-requirements.in furl==2.0.0 \ --hash=sha256:f7e90e9f85ef3f2e64485f04c2a80b50af6133942812fd87a44d45305b079018 \ @@ -95,88 +107,125 @@ idna==3.2 \ # -r requirements/requirements.in # requests # yarl -importlib-metadata==4.8.1 \ - --hash=sha256:b618b6d2d5ffa2f16add5697cf57a46c76a56229b0ed1c438322e4e95645bd15 \ - --hash=sha256:f284b3e11256ad1e5d03ab86bb2ccd6f5339688ff17a4d797a0fe7df326f23b1 - # via - # click - # flake8 - # pep517 -isort==5.9.3 \ - --hash=sha256:9c2ea1e62d871267b78307fe511c0838ba0da28698c5732d54e2790bf3ba9899 \ - --hash=sha256:e17d6e2b81095c9db0a03a8025a957f334d6ea30b26f9ec70805411e5c7c81f2 +isort==5.11.2 \ + --hash=sha256:dd8bbc5c0990f2a095d754e50360915f73b4c26fc82733eb5bfc6b48396af4d2 \ + --hash=sha256:e486966fba83f25b8045f8dd7455b0a0d1e4de481e1d7ce4669902d9fb85e622 # via -r requirements/dev-requirements.in -mccabe==0.6.1 \ - --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ - --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f +mccabe==0.7.0 \ + --hash=sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325 \ + --hash=sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e # via # -r requirements/dev-requirements.in # flake8 -multidict==5.1.0 \ - --hash=sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a \ - --hash=sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93 \ - --hash=sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632 \ - --hash=sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656 \ - --hash=sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79 \ - --hash=sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7 \ - --hash=sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d \ - --hash=sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5 \ - --hash=sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224 \ - --hash=sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26 \ - --hash=sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea \ - --hash=sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348 \ - --hash=sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6 \ - --hash=sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76 \ - --hash=sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1 \ - --hash=sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f \ - --hash=sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952 \ - --hash=sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a \ - --hash=sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37 \ - --hash=sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9 \ - --hash=sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359 \ - --hash=sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8 \ - --hash=sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da \ - --hash=sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3 \ - --hash=sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d \ - --hash=sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf \ - --hash=sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841 \ - --hash=sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d \ - --hash=sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93 \ - --hash=sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f \ - --hash=sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647 \ - --hash=sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635 \ - --hash=sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456 \ - --hash=sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda \ - --hash=sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5 \ - --hash=sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281 \ - --hash=sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80 +multidict==6.0.3 \ + --hash=sha256:018c8e3be7f161a12b3e41741b6721f9baeb2210f4ab25a6359b7d76c1017dce \ + --hash=sha256:01b456046a05ff7cceefb0e1d2a9d32f05efcb1c7e0d152446304e11557639ce \ + --hash=sha256:114a4ab3e5cfbc56c4b6697686ecb92376c7e8c56893ef20547921552f8bdf57 \ + --hash=sha256:12e0d396faa6dc55ff5379eee54d1df3b508243ff15bfc8295a6ec7a4483a335 \ + --hash=sha256:190626ced82d4cc567a09e7346340d380154a493bac6905e0095d8158cdf1e38 \ + --hash=sha256:1f5d5129a937af4e3c4a1d6c139f4051b7d17d43276cefdd8d442a7031f7eef2 \ + --hash=sha256:21e1ce0b187c4e93112304dcde2aa18922fdbe8fb4f13d8aa72a5657bce0563a \ + --hash=sha256:24e8d513bfcaadc1f8b0ebece3ff50961951c54b07d5a775008a882966102418 \ + --hash=sha256:2523a29006c034687eccd3ee70093a697129a3ffe8732535d3b2df6a4ecc279d \ + --hash=sha256:26fbbe17f8a7211b623502d2bf41022a51da3025142401417c765bf9a56fed4c \ + --hash=sha256:2b66d61966b12e6bba500e5cbb2c721a35e119c30ee02495c5629bd0e91eea30 \ + --hash=sha256:2cf5d19e12eff855aa198259c0b02fd3f5d07e1291fbd20279c37b3b0e6c9852 \ + --hash=sha256:2cfda34b7cb99eacada2072e0f69c0ad3285cb6f8e480b11f2b6d6c1c6f92718 \ + --hash=sha256:3541882266247c7cd3dba78d6ef28dbe704774df60c9e4231edaa4493522e614 \ + --hash=sha256:36df958b15639e40472adaa4f0c2c7828fe680f894a6b48c4ce229f59a6a798b \ + --hash=sha256:38d394814b39be1c36ac709006d39d50d72a884f9551acd9c8cc1ffae3fc8c4e \ + --hash=sha256:4159fc1ec9ede8ab93382e0d6ba9b1b3d23c72da39a834db7a116986605c7ab4 \ + --hash=sha256:445c0851a1cbc1f2ec3b40bc22f9c4a235edb3c9a0906122a9df6ea8d51f886c \ + --hash=sha256:47defc0218682281a52fb1f6346ebb8b68b17538163a89ea24dfe4da37a8a9a3 \ + --hash=sha256:4cc5c8cd205a9810d16a5cd428cd81bac554ad1477cb87f4ad722b10992e794d \ + --hash=sha256:4ccf55f28066b4f08666764a957c2b7c241c7547b0921d69c7ceab5f74fe1a45 \ + --hash=sha256:4fb3fe591956d8841882c463f934c9f7485cfd5f763a08c0d467b513dc18ef89 \ + --hash=sha256:526f8397fc124674b8f39748680a0ff673bd6a715fecb4866716d36e380f015f \ + --hash=sha256:578bfcb16f4b8675ef71b960c00f174b0426e0eeb796bab6737389d8288eb827 \ + --hash=sha256:5b51969503709415a35754954c2763f536a70b8bf7360322b2edb0c0a44391f6 \ + --hash=sha256:5e58ec0375803526d395f6f7e730ecc45d06e15f68f7b9cdbf644a2918324e51 \ + --hash=sha256:62db44727d0befea68e8ad2881bb87a9cfb6b87d45dd78609009627167f37b69 \ + --hash=sha256:67090b17a0a5be5704fd109f231ee73cefb1b3802d41288d6378b5df46ae89ba \ + --hash=sha256:6cd14e61f0da2a2cfb9fe05bfced2a1ed7063ce46a7a8cd473be4973de9a7f91 \ + --hash=sha256:70740c2bc9ab1c99f7cdcb104f27d16c63860c56d51c5bf0ef82fc1d892a2131 \ + --hash=sha256:73009ea04205966d47e16d98686ac5c438af23a1bb30b48a2c5da3423ec9ce37 \ + --hash=sha256:791458a1f7d1b4ab3bd9e93e0dcd1d59ef7ee9aa051dcd1ea030e62e49b923fd \ + --hash=sha256:7f9511e48bde6b995825e8d35e434fc96296cf07a25f4aae24ff9162be7eaa46 \ + --hash=sha256:81c3d597591b0940e04949e4e4f79359b2d2e542a686ba0da5e25de33fec13e0 \ + --hash=sha256:8230a39bae6c2e8a09e4da6bace5064693b00590a4a213e38f9a9366da10e7dd \ + --hash=sha256:8b92a9f3ab904397a33b193000dc4de7318ea175c4c460a1e154c415f9008e3d \ + --hash=sha256:94cbe5535ef150546b8321aebea22862a3284da51e7b55f6f95b7d73e96d90ee \ + --hash=sha256:960ce1b790952916e682093788696ef7e33ac6a97482f9b983abdc293091b531 \ + --hash=sha256:99341ca1f1db9e7f47914cb2461305665a662383765ced6f843712564766956d \ + --hash=sha256:9aac6881454a750554ed4b280a839dcf9e2133a9d12ab4d417d673fb102289b7 \ + --hash=sha256:9d359b0a962e052b713647ac1f13eabf2263167b149ed1e27d5c579f5c8c7d2c \ + --hash=sha256:9dbab2a7e9c073bc9538824a01f5ed689194db7f55f2b8102766873e906a6c1a \ + --hash=sha256:a27b029caa3b555a4f3da54bc1e718eb55fcf1a11fda8bf0132147b476cf4c08 \ + --hash=sha256:a8b817d4ed68fd568ec5e45dd75ddf30cc72a47a6b41b74d5bb211374c296f5e \ + --hash=sha256:ad7d66422b9cc51125509229693d27e18c08f2dea3ac9de408d821932b1b3759 \ + --hash=sha256:b46e79a9f4db53897d17bc64a39d1c7c2be3e3d4f8dba6d6730a2b13ddf0f986 \ + --hash=sha256:baa96a3418e27d723064854143b2f414a422c84cc87285a71558722049bebc5a \ + --hash=sha256:beeca903e4270b4afcd114f371a9602240dc143f9e944edfea00f8d4ad56c40d \ + --hash=sha256:c2a1168e5aa7c72499fb03c850e0f03f624fa4a5c8d2e215c518d0a73872eb64 \ + --hash=sha256:c5790cc603456b6dcf8a9a4765f666895a6afddc88b3d3ba7b53dea2b6e23116 \ + --hash=sha256:cb4a08f0aaaa869f189ffea0e17b86ad0237b51116d494da15ef7991ee6ad2d7 \ + --hash=sha256:cd5771e8ea325f85cbb361ddbdeb9ae424a68e5dfb6eea786afdcd22e68a7d5d \ + --hash=sha256:ce8e51774eb03844588d3c279adb94efcd0edeccd2f97516623292445bcc01f9 \ + --hash=sha256:d09daf5c6ce7fc6ed444c9339bbde5ea84e2534d1ca1cd37b60f365c77f00dea \ + --hash=sha256:d0e798b072cf2aab9daceb43d97c9c527a0c7593e67a7846ad4cc6051de1e303 \ + --hash=sha256:d325d61cac602976a5d47b19eaa7d04e3daf4efce2164c630219885087234102 \ + --hash=sha256:d408172519049e36fb6d29672f060dc8461fc7174eba9883c7026041ef9bfb38 \ + --hash=sha256:d52442e7c951e4c9ee591d6047706e66923d248d83958bbf99b8b19515fffaef \ + --hash=sha256:dc4cfef5d899f5f1a15f3d2ac49f71107a01a5a2745b4dd53fa0cede1419385a \ + --hash=sha256:df7b4cee3ff31b3335aba602f8d70dbc641e5b7164b1e9565570c9d3c536a438 \ + --hash=sha256:e068dfeadbce63072b2d8096486713d04db4946aad0a0f849bd4fc300799d0d3 \ + --hash=sha256:e07c24018986fb00d6e7eafca8fcd6e05095649e17fcf0e33a592caaa62a78b9 \ + --hash=sha256:e0bce9f7c30e7e3a9e683f670314c0144e8d34be6b7019e40604763bd278d84f \ + --hash=sha256:e1925f78a543b94c3d46274c66a366fee8a263747060220ed0188e5f3eeea1c0 \ + --hash=sha256:e322c94596054352f5a02771eec71563c018b15699b961aba14d6dd943367022 \ + --hash=sha256:e4a095e18847c12ec20e55326ab8782d9c2d599400a3a2f174fab4796875d0e2 \ + --hash=sha256:e5a811aab1b4aea0b4be669363c19847a8c547510f0e18fb632956369fdbdf67 \ + --hash=sha256:eddf604a3de2ace3d9a4e4d491be7562a1ac095a0a1c95a9ec5781ef0273ef11 \ + --hash=sha256:ee9b1cae9a6c5d023e5a150f6f6b9dbb3c3bbc7887d6ee07d4c0ecb49a473734 \ + --hash=sha256:f1650ea41c408755da5eed52ac6ccbc8938ccc3e698d81e6f6a1be02ff2a0945 \ + --hash=sha256:f2c0957b3e8c66c10d27272709a5299ab3670a0f187c9428f3b90d267119aedb \ + --hash=sha256:f76109387e1ec8d8e2137c94c437b89fe002f29e0881aae8ae45529bdff92000 \ + --hash=sha256:f8a728511c977df6f3d8af388fcb157e49f11db4a6637dd60131b8b6e40b0253 \ + --hash=sha256:fb6c3dc3d65014d2c782f5acf0b3ba14e639c6c33d3ed8932ead76b9080b3544 # via # -r requirements/dev-requirements.in # yarl -mypy==0.910 \ - --hash=sha256:088cd9c7904b4ad80bec811053272986611b84221835e079be5bcad029e79dd9 \ - --hash=sha256:0aadfb2d3935988ec3815952e44058a3100499f5be5b28c34ac9d79f002a4a9a \ - --hash=sha256:119bed3832d961f3a880787bf621634ba042cb8dc850a7429f643508eeac97b9 \ - --hash=sha256:1a85e280d4d217150ce8cb1a6dddffd14e753a4e0c3cf90baabb32cefa41b59e \ - --hash=sha256:3c4b8ca36877fc75339253721f69603a9c7fdb5d4d5a95a1a1b899d8b86a4de2 \ - --hash=sha256:3e382b29f8e0ccf19a2df2b29a167591245df90c0b5a2542249873b5c1d78212 \ - --hash=sha256:42c266ced41b65ed40a282c575705325fa7991af370036d3f134518336636f5b \ - --hash=sha256:53fd2eb27a8ee2892614370896956af2ff61254c275aaee4c230ae771cadd885 \ - --hash=sha256:704098302473cb31a218f1775a873b376b30b4c18229421e9e9dc8916fd16150 \ - --hash=sha256:7df1ead20c81371ccd6091fa3e2878559b5c4d4caadaf1a484cf88d93ca06703 \ - --hash=sha256:866c41f28cee548475f146aa4d39a51cf3b6a84246969f3759cb3e9c742fc072 \ - --hash=sha256:a155d80ea6cee511a3694b108c4494a39f42de11ee4e61e72bc424c490e46457 \ - --hash=sha256:adaeee09bfde366d2c13fe6093a7df5df83c9a2ba98638c7d76b010694db760e \ - --hash=sha256:b6fb13123aeef4a3abbcfd7e71773ff3ff1526a7d3dc538f3929a49b42be03f0 \ - --hash=sha256:b94e4b785e304a04ea0828759172a15add27088520dc7e49ceade7834275bedb \ - --hash=sha256:c0df2d30ed496a08de5daed2a9ea807d07c21ae0ab23acf541ab88c24b26ab97 \ - --hash=sha256:c6c2602dffb74867498f86e6129fd52a2770c48b7cd3ece77ada4fa38f94eba8 \ - --hash=sha256:ceb6e0a6e27fb364fb3853389607cf7eb3a126ad335790fa1e14ed02fba50811 \ - --hash=sha256:d9dd839eb0dc1bbe866a288ba3c1afc33a202015d2ad83b31e875b5905a079b6 \ - --hash=sha256:e4dab234478e3bd3ce83bac4193b2ecd9cf94e720ddd95ce69840273bf44f6de \ - --hash=sha256:ec4e0cd079db280b6bdabdc807047ff3e199f334050db5cbb91ba3e959a67504 \ - --hash=sha256:ecd2c3fe726758037234c93df7e98deb257fd15c24c9180dacf1ef829da5f921 \ - --hash=sha256:ef565033fa5a958e62796867b1df10c40263ea9ded87164d67572834e57a174d +mypy==0.991 \ + --hash=sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d \ + --hash=sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6 \ + --hash=sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf \ + --hash=sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f \ + --hash=sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813 \ + --hash=sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33 \ + --hash=sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad \ + --hash=sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05 \ + --hash=sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297 \ + --hash=sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06 \ + --hash=sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd \ + --hash=sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243 \ + --hash=sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305 \ + --hash=sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476 \ + --hash=sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711 \ + --hash=sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70 \ + --hash=sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5 \ + --hash=sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461 \ + --hash=sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab \ + --hash=sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c \ + --hash=sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d \ + --hash=sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135 \ + --hash=sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93 \ + --hash=sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648 \ + --hash=sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a \ + --hash=sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb \ + --hash=sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3 \ + --hash=sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372 \ + --hash=sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb \ + --hash=sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef # via -r requirements/dev-requirements.in mypy-extensions==0.4.3 \ --hash=sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d \ @@ -191,31 +240,35 @@ orderedmultidict==1.0 \ # via # -r requirements/requirements.in # furl -pathspec==0.9.0 \ - --hash=sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a \ - --hash=sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1 +packaging==22.0 \ + --hash=sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3 \ + --hash=sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3 + # via build +pathspec==0.10.3 \ + --hash=sha256:3c95343af8b756205e2aba76e843ba9520a24dd84f68c22b9f93251507509dd6 \ + --hash=sha256:56200de4077d9d0791465aa9095a01d421861e405b5096955051deefd697d6f6 # via black -pep517==0.11.0 \ - --hash=sha256:3fa6b85b9def7ba4de99fb7f96fe3f02e2d630df8aa2720a5cf3b183f087a738 \ - --hash=sha256:e1ba5dffa3a131387979a68ff3e391ac7d645be409216b961bc2efe6468ab0b2 - # via pip-tools -pip-tools==6.2.0 \ - --hash=sha256:77727ef7457d1865e61fe34c2b1439f9b971b570cc232616a22ce82ab89d357d \ - --hash=sha256:9ed38c73da4993e531694ea151f77048b4dbf2ba7b94c4a569daa39568cc6564 +pep517==0.13.0 \ + --hash=sha256:4ba4446d80aed5b5eac6509ade100bff3e7943a8489de249654a5ae9b33ee35b \ + --hash=sha256:ae69927c5c172be1add9203726d4b84cf3ebad1edcd5f71fcdc746e66e829f59 + # via build +pip-tools==6.12.0 \ + --hash=sha256:8e22fbc84ede7ca522ba4b033c4fcf6a6419adabc75d24747be3d8262504489a \ + --hash=sha256:f441603c63b16f4af0dd5026f7522a49eddec2bc8a4a4979af44e1f6b0a1c13e # via -r requirements/dev-requirements.in -platformdirs==2.3.0 \ - --hash=sha256:15b056538719b1c94bdaccb29e5f81879c7f7f0f4a153f46086d155dffcd4f0f \ - --hash=sha256:8003ac87717ae2c7ee1ea5a84a1a61e87f3fbd16eb5aadba194ea30a9019f648 +platformdirs==2.6.0 \ + --hash=sha256:1a89a12377800c81983db6be069ec068eee989748799b946cce2a6e80dcc54ca \ + --hash=sha256:b46ffafa316e6b83b47489d240ce17173f123a9b9c83282141c3daf26ad9ac2e # via black -pycodestyle==2.7.0 \ - --hash=sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068 \ - --hash=sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef +pycodestyle==2.10.0 \ + --hash=sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053 \ + --hash=sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610 # via # -r requirements/dev-requirements.in # flake8 -pyflakes==2.3.1 \ - --hash=sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3 \ - --hash=sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db +pyflakes==3.0.1 \ + --hash=sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf \ + --hash=sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd # via # -r requirements/dev-requirements.in # flake8 @@ -252,49 +305,6 @@ pyyaml==5.4.1 \ # via # -r requirements/requirements.in # vcrpy -regex==2021.8.28 \ - --hash=sha256:04f6b9749e335bb0d2f68c707f23bb1773c3fb6ecd10edf0f04df12a8920d468 \ - --hash=sha256:08d74bfaa4c7731b8dac0a992c63673a2782758f7cfad34cf9c1b9184f911354 \ - --hash=sha256:0fc1f8f06977c2d4f5e3d3f0d4a08089be783973fc6b6e278bde01f0544ff308 \ - --hash=sha256:121f4b3185feaade3f85f70294aef3f777199e9b5c0c0245c774ae884b110a2d \ - --hash=sha256:1413b5022ed6ac0d504ba425ef02549a57d0f4276de58e3ab7e82437892704fc \ - --hash=sha256:1743345e30917e8c574f273f51679c294effba6ad372db1967852f12c76759d8 \ - --hash=sha256:28fc475f560d8f67cc8767b94db4c9440210f6958495aeae70fac8faec631797 \ - --hash=sha256:31a99a4796bf5aefc8351e98507b09e1b09115574f7c9dbb9cf2111f7220d2e2 \ - --hash=sha256:328a1fad67445550b982caa2a2a850da5989fd6595e858f02d04636e7f8b0b13 \ - --hash=sha256:473858730ef6d6ff7f7d5f19452184cd0caa062a20047f6d6f3e135a4648865d \ - --hash=sha256:4cde065ab33bcaab774d84096fae266d9301d1a2f5519d7bd58fc55274afbf7a \ - --hash=sha256:5f6a808044faae658f546dd5f525e921de9fa409de7a5570865467f03a626fc0 \ - --hash=sha256:610b690b406653c84b7cb6091facb3033500ee81089867ee7d59e675f9ca2b73 \ - --hash=sha256:66256b6391c057305e5ae9209941ef63c33a476b73772ca967d4a2df70520ec1 \ - --hash=sha256:6eebf512aa90751d5ef6a7c2ac9d60113f32e86e5687326a50d7686e309f66ed \ - --hash=sha256:79aef6b5cd41feff359acaf98e040844613ff5298d0d19c455b3d9ae0bc8c35a \ - --hash=sha256:808ee5834e06f57978da3e003ad9d6292de69d2bf6263662a1a8ae30788e080b \ - --hash=sha256:8e44769068d33e0ea6ccdf4b84d80c5afffe5207aa4d1881a629cf0ef3ec398f \ - --hash=sha256:999ad08220467b6ad4bd3dd34e65329dd5d0df9b31e47106105e407954965256 \ - --hash=sha256:9b006628fe43aa69259ec04ca258d88ed19b64791693df59c422b607b6ece8bb \ - --hash=sha256:9d05ad5367c90814099000442b2125535e9d77581855b9bee8780f1b41f2b1a2 \ - --hash=sha256:a577a21de2ef8059b58f79ff76a4da81c45a75fe0bfb09bc8b7bb4293fa18983 \ - --hash=sha256:a617593aeacc7a691cc4af4a4410031654f2909053bd8c8e7db837f179a630eb \ - --hash=sha256:abb48494d88e8a82601af905143e0de838c776c1241d92021e9256d5515b3645 \ - --hash=sha256:ac88856a8cbccfc14f1b2d0b829af354cc1743cb375e7f04251ae73b2af6adf8 \ - --hash=sha256:b4c220a1fe0d2c622493b0a1fd48f8f991998fb447d3cd368033a4b86cf1127a \ - --hash=sha256:b844fb09bd9936ed158ff9df0ab601e2045b316b17aa8b931857365ea8586906 \ - --hash=sha256:bdc178caebd0f338d57ae445ef8e9b737ddf8fbc3ea187603f65aec5b041248f \ - --hash=sha256:c206587c83e795d417ed3adc8453a791f6d36b67c81416676cad053b4104152c \ - --hash=sha256:c61dcc1cf9fd165127a2853e2c31eb4fb961a4f26b394ac9fe5669c7a6592892 \ - --hash=sha256:c7cb4c512d2d3b0870e00fbbac2f291d4b4bf2634d59a31176a87afe2777c6f0 \ - --hash=sha256:d4a332404baa6665b54e5d283b4262f41f2103c255897084ec8f5487ce7b9e8e \ - --hash=sha256:d5111d4c843d80202e62b4fdbb4920db1dcee4f9366d6b03294f45ed7b18b42e \ - --hash=sha256:e1e8406b895aba6caa63d9fd1b6b1700d7e4825f78ccb1e5260551d168db38ed \ - --hash=sha256:e8690ed94481f219a7a967c118abaf71ccc440f69acd583cab721b90eeedb77c \ - --hash=sha256:ed283ab3a01d8b53de3a05bfdf4473ae24e43caee7dcb5584e86f3f3e5ab4374 \ - --hash=sha256:ed4b50355b066796dacdd1cf538f2ce57275d001838f9b132fab80b75e8c84dd \ - --hash=sha256:ee329d0387b5b41a5dddbb6243a21cb7896587a651bebb957e2d2bb8b63c0791 \ - --hash=sha256:f3bf1bc02bc421047bfec3343729c4bbbea42605bcfd6d6bfe2c07ade8b12d2a \ - --hash=sha256:f585cbbeecb35f35609edccb95efd95a3e35824cd7752b586503f7e6087303f1 \ - --hash=sha256:f60667673ff9c249709160529ab39667d1ae9fd38634e006bec95611f632e759 - # via black requests==2.26.0 \ --hash=sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24 \ --hash=sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7 @@ -308,144 +318,202 @@ six==1.11.0 \ # furl # orderedmultidict # vcrpy -toml==0.10.2 \ - --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ - --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f - # via mypy -tomli==1.2.1 \ - --hash=sha256:8dd0e9524d6f386271a36b41dbf6c57d8e32fd96fd22b6584679dc569d20899f \ - --hash=sha256:a5b75cb6f3968abb47af1b40c1819dc519ea82bcc065776a866e8d74c5ca9442 - # via - # black - # pep517 -typed-ast==1.4.3 \ - --hash=sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace \ - --hash=sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff \ - --hash=sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266 \ - --hash=sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528 \ - --hash=sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6 \ - --hash=sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808 \ - --hash=sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4 \ - --hash=sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363 \ - --hash=sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341 \ - --hash=sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04 \ - --hash=sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41 \ - --hash=sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e \ - --hash=sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3 \ - --hash=sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899 \ - --hash=sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805 \ - --hash=sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c \ - --hash=sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c \ - --hash=sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39 \ - --hash=sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a \ - --hash=sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3 \ - --hash=sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7 \ - --hash=sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f \ - --hash=sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075 \ - --hash=sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0 \ - --hash=sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40 \ - --hash=sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428 \ - --hash=sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927 \ - --hash=sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3 \ - --hash=sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f \ - --hash=sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65 +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f # via # black + # build # mypy -types-pyyaml==5.4.10 \ - --hash=sha256:1d9e431e9f1f78a65ea957c558535a3b15ad67ea4912bce48a6c1b613dcf81ad \ - --hash=sha256:f1d1357168988e45fa20c65aecb3911462246a84809015dd889ebf8b1db74124 +types-pyyaml==6.0.12.2 \ + --hash=sha256:1e94e80aafee07a7e798addb2a320e32956a373f376655128ae20637adb2655b \ + --hash=sha256:6840819871c92deebe6a2067fb800c11b8a063632eb4e3e755914e7ab3604e83 # via -r requirements/dev-requirements.in -types-requests==2.25.6 \ - --hash=sha256:a5a305b43ea57bf64d6731f89816946a405b591eff6de28d4c0fd58422cee779 \ - --hash=sha256:e21541c0f55c066c491a639309159556dd8c5833e49fcde929c4c47bdb0002ee +types-requests==2.28.11.5 \ + --hash=sha256:091d4a5a33c1b4f20d8b1b952aa8fa27a6e767c44c3cf65e56580df0b05fd8a9 \ + --hash=sha256:a7df37cc6fb6187a84097da951f8e21d335448aa2501a6b0a39cbd1d7ca9ee2a # via -r requirements/dev-requirements.in -typing-extensions==3.10.0.2 \ - --hash=sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e \ - --hash=sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7 \ - --hash=sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34 +types-urllib3==1.26.25.4 \ + --hash=sha256:ed6b9e8a8be488796f72306889a06a3fc3cb1aa99af02ab8afb50144d7317e49 \ + --hash=sha256:eec5556428eec862b1ac578fb69aab3877995a99ffec9e5a12cf7fbd0cc9daee + # via types-requests +typing-extensions==4.4.0 \ + --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ + --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via # black - # importlib-metadata # mypy - # yarl urllib3==1.26.6 \ --hash=sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4 \ --hash=sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f # via # -r requirements/requirements.in # requests -vcrpy==4.1.1 \ - --hash=sha256:12c3fcdae7b88ecf11fc0d3e6d77586549d4575a2ceee18e82eee75c1f626162 \ - --hash=sha256:57095bf22fc0a2d99ee9674cdafebed0f3ba763018582450706f7d3a74fff599 +vcrpy==4.2.1 \ + --hash=sha256:7cd3e81a2c492e01c281f180bcc2a86b520b173d2b656cb5d89d99475423e013 \ + --hash=sha256:efac3e2e0b2af7686f83a266518180af7a048619b2f696e7bad9520f5e2eac09 # via -r requirements/dev-requirements.in werkzeug==2.0.2 \ --hash=sha256:63d3dc1cf60e7b7e35e97fa9861f7397283b75d765afcaefd993d6046899de8f \ --hash=sha256:aa2bb6fc8dee8d6c504c0ac1e7f5f7dc5810a9903e793b6f715a9f015bdadb9a # via -r requirements/requirements.in -wheel==0.37.0 \ - --hash=sha256:21014b2bd93c6d0034b6ba5d35e4eb284340e09d63c59aef6fc14b0f346146fd \ - --hash=sha256:e2ef7239991699e3355d54f8e968a21bb940a1dbf34a4d226741e64462516fad +wheel==0.38.4 \ + --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ + --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 # via pip-tools -wrapt==1.12.1 \ - --hash=sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7 +wrapt==1.14.1 \ + --hash=sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3 \ + --hash=sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b \ + --hash=sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4 \ + --hash=sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2 \ + --hash=sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656 \ + --hash=sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3 \ + --hash=sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff \ + --hash=sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310 \ + --hash=sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a \ + --hash=sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57 \ + --hash=sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069 \ + --hash=sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383 \ + --hash=sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe \ + --hash=sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87 \ + --hash=sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d \ + --hash=sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b \ + --hash=sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907 \ + --hash=sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f \ + --hash=sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0 \ + --hash=sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28 \ + --hash=sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1 \ + --hash=sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853 \ + --hash=sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc \ + --hash=sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3 \ + --hash=sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3 \ + --hash=sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164 \ + --hash=sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1 \ + --hash=sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c \ + --hash=sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1 \ + --hash=sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7 \ + --hash=sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1 \ + --hash=sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320 \ + --hash=sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed \ + --hash=sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1 \ + --hash=sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248 \ + --hash=sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c \ + --hash=sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456 \ + --hash=sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77 \ + --hash=sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef \ + --hash=sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1 \ + --hash=sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7 \ + --hash=sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86 \ + --hash=sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4 \ + --hash=sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d \ + --hash=sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d \ + --hash=sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8 \ + --hash=sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5 \ + --hash=sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471 \ + --hash=sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00 \ + --hash=sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68 \ + --hash=sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3 \ + --hash=sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d \ + --hash=sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735 \ + --hash=sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d \ + --hash=sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569 \ + --hash=sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7 \ + --hash=sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59 \ + --hash=sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5 \ + --hash=sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb \ + --hash=sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b \ + --hash=sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f \ + --hash=sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462 \ + --hash=sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015 \ + --hash=sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af # via # -r requirements/dev-requirements.in # vcrpy -yarl==1.6.3 \ - --hash=sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e \ - --hash=sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434 \ - --hash=sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366 \ - --hash=sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3 \ - --hash=sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec \ - --hash=sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959 \ - --hash=sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e \ - --hash=sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c \ - --hash=sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6 \ - --hash=sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a \ - --hash=sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6 \ - --hash=sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424 \ - --hash=sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e \ - --hash=sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f \ - --hash=sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50 \ - --hash=sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2 \ - --hash=sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc \ - --hash=sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4 \ - --hash=sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970 \ - --hash=sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10 \ - --hash=sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0 \ - --hash=sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406 \ - --hash=sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896 \ - --hash=sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643 \ - --hash=sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721 \ - --hash=sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478 \ - --hash=sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724 \ - --hash=sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e \ - --hash=sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8 \ - --hash=sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96 \ - --hash=sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25 \ - --hash=sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76 \ - --hash=sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2 \ - --hash=sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2 \ - --hash=sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c \ - --hash=sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a \ - --hash=sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71 +yarl==1.8.2 \ + --hash=sha256:009a028127e0a1755c38b03244c0bea9d5565630db9c4cf9572496e947137a87 \ + --hash=sha256:0414fd91ce0b763d4eadb4456795b307a71524dbacd015c657bb2a39db2eab89 \ + --hash=sha256:0978f29222e649c351b173da2b9b4665ad1feb8d1daa9d971eb90df08702668a \ + --hash=sha256:0ef8fb25e52663a1c85d608f6dd72e19bd390e2ecaf29c17fb08f730226e3a08 \ + --hash=sha256:10b08293cda921157f1e7c2790999d903b3fd28cd5c208cf8826b3b508026996 \ + --hash=sha256:1684a9bd9077e922300ecd48003ddae7a7474e0412bea38d4631443a91d61077 \ + --hash=sha256:1b372aad2b5f81db66ee7ec085cbad72c4da660d994e8e590c997e9b01e44901 \ + --hash=sha256:1e21fb44e1eff06dd6ef971d4bdc611807d6bd3691223d9c01a18cec3677939e \ + --hash=sha256:2305517e332a862ef75be8fad3606ea10108662bc6fe08509d5ca99503ac2aee \ + --hash=sha256:24ad1d10c9db1953291f56b5fe76203977f1ed05f82d09ec97acb623a7976574 \ + --hash=sha256:272b4f1599f1b621bf2aabe4e5b54f39a933971f4e7c9aa311d6d7dc06965165 \ + --hash=sha256:2a1fca9588f360036242f379bfea2b8b44cae2721859b1c56d033adfd5893634 \ + --hash=sha256:2b4fa2606adf392051d990c3b3877d768771adc3faf2e117b9de7eb977741229 \ + --hash=sha256:3150078118f62371375e1e69b13b48288e44f6691c1069340081c3fd12c94d5b \ + --hash=sha256:326dd1d3caf910cd26a26ccbfb84c03b608ba32499b5d6eeb09252c920bcbe4f \ + --hash=sha256:34c09b43bd538bf6c4b891ecce94b6fa4f1f10663a8d4ca589a079a5018f6ed7 \ + --hash=sha256:388a45dc77198b2460eac0aca1efd6a7c09e976ee768b0d5109173e521a19daf \ + --hash=sha256:3adeef150d528ded2a8e734ebf9ae2e658f4c49bf413f5f157a470e17a4a2e89 \ + --hash=sha256:3edac5d74bb3209c418805bda77f973117836e1de7c000e9755e572c1f7850d0 \ + --hash=sha256:3f6b4aca43b602ba0f1459de647af954769919c4714706be36af670a5f44c9c1 \ + --hash=sha256:3fc056e35fa6fba63248d93ff6e672c096f95f7836938241ebc8260e062832fe \ + --hash=sha256:418857f837347e8aaef682679f41e36c24250097f9e2f315d39bae3a99a34cbf \ + --hash=sha256:42430ff511571940d51e75cf42f1e4dbdded477e71c1b7a17f4da76c1da8ea76 \ + --hash=sha256:44ceac0450e648de86da8e42674f9b7077d763ea80c8ceb9d1c3e41f0f0a9951 \ + --hash=sha256:47d49ac96156f0928f002e2424299b2c91d9db73e08c4cd6742923a086f1c863 \ + --hash=sha256:48dd18adcf98ea9cd721a25313aef49d70d413a999d7d89df44f469edfb38a06 \ + --hash=sha256:49d43402c6e3013ad0978602bf6bf5328535c48d192304b91b97a3c6790b1562 \ + --hash=sha256:4d04acba75c72e6eb90745447d69f84e6c9056390f7a9724605ca9c56b4afcc6 \ + --hash=sha256:57a7c87927a468e5a1dc60c17caf9597161d66457a34273ab1760219953f7f4c \ + --hash=sha256:58a3c13d1c3005dbbac5c9f0d3210b60220a65a999b1833aa46bd6677c69b08e \ + --hash=sha256:5df5e3d04101c1e5c3b1d69710b0574171cc02fddc4b23d1b2813e75f35a30b1 \ + --hash=sha256:63243b21c6e28ec2375f932a10ce7eda65139b5b854c0f6b82ed945ba526bff3 \ + --hash=sha256:64dd68a92cab699a233641f5929a40f02a4ede8c009068ca8aa1fe87b8c20ae3 \ + --hash=sha256:6604711362f2dbf7160df21c416f81fac0de6dbcf0b5445a2ef25478ecc4c778 \ + --hash=sha256:6c4fcfa71e2c6a3cb568cf81aadc12768b9995323186a10827beccf5fa23d4f8 \ + --hash=sha256:6d88056a04860a98341a0cf53e950e3ac9f4e51d1b6f61a53b0609df342cc8b2 \ + --hash=sha256:705227dccbe96ab02c7cb2c43e1228e2826e7ead880bb19ec94ef279e9555b5b \ + --hash=sha256:728be34f70a190566d20aa13dc1f01dc44b6aa74580e10a3fb159691bc76909d \ + --hash=sha256:74dece2bfc60f0f70907c34b857ee98f2c6dd0f75185db133770cd67300d505f \ + --hash=sha256:75c16b2a900b3536dfc7014905a128a2bea8fb01f9ee26d2d7d8db0a08e7cb2c \ + --hash=sha256:77e913b846a6b9c5f767b14dc1e759e5aff05502fe73079f6f4176359d832581 \ + --hash=sha256:7a66c506ec67eb3159eea5096acd05f5e788ceec7b96087d30c7d2865a243918 \ + --hash=sha256:8c46d3d89902c393a1d1e243ac847e0442d0196bbd81aecc94fcebbc2fd5857c \ + --hash=sha256:93202666046d9edadfe9f2e7bf5e0782ea0d497b6d63da322e541665d65a044e \ + --hash=sha256:97209cc91189b48e7cfe777237c04af8e7cc51eb369004e061809bcdf4e55220 \ + --hash=sha256:a48f4f7fea9a51098b02209d90297ac324241bf37ff6be6d2b0149ab2bd51b37 \ + --hash=sha256:a783cd344113cb88c5ff7ca32f1f16532a6f2142185147822187913eb989f739 \ + --hash=sha256:ae0eec05ab49e91a78700761777f284c2df119376e391db42c38ab46fd662b77 \ + --hash=sha256:ae4d7ff1049f36accde9e1ef7301912a751e5bae0a9d142459646114c70ecba6 \ + --hash=sha256:b05df9ea7496df11b710081bd90ecc3a3db6adb4fee36f6a411e7bc91a18aa42 \ + --hash=sha256:baf211dcad448a87a0d9047dc8282d7de59473ade7d7fdf22150b1d23859f946 \ + --hash=sha256:bb81f753c815f6b8e2ddd2eef3c855cf7da193b82396ac013c661aaa6cc6b0a5 \ + --hash=sha256:bcd7bb1e5c45274af9a1dd7494d3c52b2be5e6bd8d7e49c612705fd45420b12d \ + --hash=sha256:bf071f797aec5b96abfc735ab97da9fd8f8768b43ce2abd85356a3127909d146 \ + --hash=sha256:c15163b6125db87c8f53c98baa5e785782078fbd2dbeaa04c6141935eb6dab7a \ + --hash=sha256:cb6d48d80a41f68de41212f3dfd1a9d9898d7841c8f7ce6696cf2fd9cb57ef83 \ + --hash=sha256:ceff9722e0df2e0a9e8a79c610842004fa54e5b309fe6d218e47cd52f791d7ef \ + --hash=sha256:cfa2bbca929aa742b5084fd4663dd4b87c191c844326fcb21c3afd2d11497f80 \ + --hash=sha256:d617c241c8c3ad5c4e78a08429fa49e4b04bedfc507b34b4d8dceb83b4af3588 \ + --hash=sha256:d881d152ae0007809c2c02e22aa534e702f12071e6b285e90945aa3c376463c5 \ + --hash=sha256:da65c3f263729e47351261351b8679c6429151ef9649bba08ef2528ff2c423b2 \ + --hash=sha256:de986979bbd87272fe557e0a8fcb66fd40ae2ddfe28a8b1ce4eae22681728fef \ + --hash=sha256:df60a94d332158b444301c7f569659c926168e4d4aad2cfbf4bce0e8fb8be826 \ + --hash=sha256:dfef7350ee369197106805e193d420b75467b6cceac646ea5ed3049fcc950a05 \ + --hash=sha256:e59399dda559688461762800d7fb34d9e8a6a7444fd76ec33220a926c8be1516 \ + --hash=sha256:e6f3515aafe0209dd17fb9bdd3b4e892963370b3de781f53e1746a521fb39fc0 \ + --hash=sha256:e7fd20d6576c10306dea2d6a5765f46f0ac5d6f53436217913e952d19237efc4 \ + --hash=sha256:ebb78745273e51b9832ef90c0898501006670d6e059f2cdb0e999494eb1450c2 \ + --hash=sha256:efff27bd8cbe1f9bd127e7894942ccc20c857aa8b5a0327874f30201e5ce83d0 \ + --hash=sha256:f37db05c6051eff17bc832914fe46869f8849de5b92dc4a3466cd63095d23dfd \ + --hash=sha256:f8ca8ad414c85bbc50f49c0a106f951613dfa5f948ab69c10ce9b128d368baf8 \ + --hash=sha256:fb742dcdd5eec9f26b61224c23baea46c9055cf16f62475e11b9b15dfd5c117b \ + --hash=sha256:fc77086ce244453e074e445104f0ecb27530d6fd3a46698e33f6c38951d5a0f1 \ + --hash=sha256:ff205b58dc2929191f68162633d5e10e8044398d7a45265f90a0f1d51f85f72c # via # -r requirements/dev-requirements.in # vcrpy -zipp==3.5.0 \ - --hash=sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3 \ - --hash=sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4 - # via - # importlib-metadata - # pep517 # The following packages are considered to be unsafe in a requirements file: -pip==21.2.4 \ - --hash=sha256:0eb8a1516c3d138ae8689c0c1a60fde7143310832f9dc77e11d8a4bc62de193b \ - --hash=sha256:fa9ebb85d3fd607617c0c44aca302b1b45d87f9c2a1649b46c26167ca4296323 +pip==22.3.1 \ + --hash=sha256:65fd48317359f3af8e593943e6ae1506b66325085ea64b706a998c6e83eeaf38 \ + --hash=sha256:908c78e6bc29b676ede1c4d57981d490cb892eb45cd8c214ab6298125119e077 # via pip-tools -setuptools==58.0.4 \ - --hash=sha256:69cc739bc2662098a68a9bc575cd974a57969e70c1d58ade89d104ab73d79770 \ - --hash=sha256:f10059f0152e0b7fb6b2edd77bcb1ecd4c9ed7048a826eb2d79f72fd2e6e237b +setuptools==65.6.3 \ + --hash=sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54 \ + --hash=sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75 # via pip-tools diff --git a/requirements/requirements.in b/requirements/requirements.in index 9354373d7..66c290ef2 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -1,4 +1,4 @@ -certifi>=2021.5.30 +certifi>=2022.12.07 charset-normalizer>=2.0.4 furl==2.0.0 idna>=2.7 diff --git a/requirements/requirements.txt b/requirements/requirements.txt index cd33d0ed7..94b944bd8 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -1,12 +1,12 @@ # -# This file is autogenerated by pip-compile with python 3.7 +# This file is autogenerated by pip-compile with python 3.9 # To update, run: # # pip-compile --generate-hashes --output-file=requirements/requirements.txt requirements/requirements.in # -certifi==2021.5.30 \ - --hash=sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee \ - --hash=sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8 +certifi==2022.12.7 \ + --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ + --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 # via # -r requirements/requirements.in # requests From 9484a1ff1a079241b8f4089baf087ce8166fbf57 Mon Sep 17 00:00:00 2001 From: Ro Date: Fri, 16 Dec 2022 12:02:14 -0800 Subject: [PATCH 297/352] Update developer requirements --- requirements/dev-bookworm-requirements.txt | 256 ++++++++++----------- requirements/dev-bullseye-requirements.txt | 256 ++++++++++----------- 2 files changed, 246 insertions(+), 266 deletions(-) diff --git a/requirements/dev-bookworm-requirements.txt b/requirements/dev-bookworm-requirements.txt index fd153a461..46cccac92 100644 --- a/requirements/dev-bookworm-requirements.txt +++ b/requirements/dev-bookworm-requirements.txt @@ -1,8 +1,8 @@ # -# This file is autogenerated by pip-compile with python 3.10 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: # -# pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-bookworm-requirements.txt requirements/dev-bookworm-requirements.in +# pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-bookworm-requirements.txt requirements/dev-bookworm-requirements.in requirements/requirements.in # attrs==21.4.0 \ --hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4 \ @@ -12,28 +12,19 @@ attrs==21.4.0 \ # jsonschema # pytest # semgrep -black==22.10.0 \ - --hash=sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7 \ - --hash=sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6 \ - --hash=sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650 \ - --hash=sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb \ - --hash=sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d \ - --hash=sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d \ - --hash=sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de \ - --hash=sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395 \ - --hash=sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae \ - --hash=sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa \ - --hash=sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef \ - --hash=sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383 \ - --hash=sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66 \ - --hash=sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87 \ - --hash=sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d \ - --hash=sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0 \ - --hash=sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b \ - --hash=sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458 \ - --hash=sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4 \ - --hash=sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1 \ - --hash=sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff +black==22.12.0 \ + --hash=sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320 \ + --hash=sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351 \ + --hash=sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350 \ + --hash=sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f \ + --hash=sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf \ + --hash=sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148 \ + --hash=sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4 \ + --hash=sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d \ + --hash=sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc \ + --hash=sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d \ + --hash=sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2 \ + --hash=sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f # via -r requirements/dev-bookworm-requirements.in boltons==21.0.0 \ --hash=sha256:65e70a79a731a7fe6e98592ecfb5ccf2115873d01dbc576079874629e5c90f13 \ @@ -50,9 +41,9 @@ build==0.9.0 \ --hash=sha256:1a07724e891cbd898923145eb7752ee7653674c511378eb9c7691aab1612bc3c \ --hash=sha256:38a7a2b7a0bdc61a42a0a67509d88c71ecfc37b393baba770fae34e20929ff69 # via pip-tools -certifi==2022.9.24 \ - --hash=sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14 \ - --hash=sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382 +certifi==2022.12.7 \ + --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ + --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 # via requests charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ @@ -138,9 +129,9 @@ face==22.0.0 \ --hash=sha256:344fe31562d0f6f444a45982418f3793d4b14f9abb98ccca1509d22e0a3e7e35 \ --hash=sha256:d5d692f90bc8f5987b636e47e36384b9bbda499aaf0a77aa0b0bbe834c76923d # via glom -flake8==5.0.4 \ - --hash=sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db \ - --hash=sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248 +flake8==6.0.0 \ + --hash=sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7 \ + --hash=sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181 # via -r requirements/dev-bookworm-requirements.in glom==22.1.0 \ --hash=sha256:1510c6587a8f9c64a246641b70033cbc5ebde99f02ad245693678038e821aeb5 \ @@ -154,9 +145,9 @@ iniconfig==1.1.1 \ --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \ --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32 # via pytest -jsonschema==4.17.1 \ - --hash=sha256:05b2d22c83640cde0b7e0aa329ca7754fbd98ea66ad8ae24aa61328dfe057fa3 \ - --hash=sha256:410ef23dcdbca4eaedc08b850079179883c2ed09378bd1f760d4af4aacfa28d7 +jsonschema==4.17.3 \ + --hash=sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d \ + --hash=sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6 # via semgrep mccabe==0.7.0 \ --hash=sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325 \ @@ -173,9 +164,9 @@ packaging==21.3 \ # build # pytest # semgrep -pathspec==0.10.2 \ - --hash=sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5 \ - --hash=sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0 +pathspec==0.10.3 \ + --hash=sha256:3c95343af8b756205e2aba76e843ba9520a24dd84f68c22b9f93251507509dd6 \ + --hash=sha256:56200de4077d9d0791465aa9095a01d421861e405b5096955051deefd697d6f6 # via black peewee==3.15.4 \ --hash=sha256:2581520c8dfbacd9d580c2719ae259f0637a9e46eda47dfc0ce01864c6366205 @@ -184,25 +175,29 @@ pep517==0.13.0 \ --hash=sha256:4ba4446d80aed5b5eac6509ade100bff3e7943a8489de249654a5ae9b33ee35b \ --hash=sha256:ae69927c5c172be1add9203726d4b84cf3ebad1edcd5f71fcdc746e66e829f59 # via build -pip-tools==6.10.0 \ - --hash=sha256:57ac98392548f5ca96c2831927deec3035efe81ff476e3c744bd474ca9c6a1f2 \ - --hash=sha256:7f9f7356052db6942b5aaabc8eba29983591ca0ad75affbf2f0a25d9361be624 +pip==22.3.1 \ + --hash=sha256:65fd48317359f3af8e593943e6ae1506b66325085ea64b706a998c6e83eeaf38 \ + --hash=sha256:908c78e6bc29b676ede1c4d57981d490cb892eb45cd8c214ab6298125119e077 + # via pip-tools +pip-tools==6.12.0 \ + --hash=sha256:8e22fbc84ede7ca522ba4b033c4fcf6a6419adabc75d24747be3d8262504489a \ + --hash=sha256:f441603c63b16f4af0dd5026f7522a49eddec2bc8a4a4979af44e1f6b0a1c13e # via -r requirements/dev-bookworm-requirements.in -platformdirs==2.5.4 \ - --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ - --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 +platformdirs==2.6.0 \ + --hash=sha256:1a89a12377800c81983db6be069ec068eee989748799b946cce2a6e80dcc54ca \ + --hash=sha256:b46ffafa316e6b83b47489d240ce17173f123a9b9c83282141c3daf26ad9ac2e # via black pluggy==1.0.0 \ --hash=sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159 \ --hash=sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3 # via pytest -pycodestyle==2.9.1 \ - --hash=sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785 \ - --hash=sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b +pycodestyle==2.10.0 \ + --hash=sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053 \ + --hash=sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610 # via flake8 -pyflakes==2.5.0 \ - --hash=sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2 \ - --hash=sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3 +pyflakes==3.0.1 \ + --hash=sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf \ + --hash=sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd # via flake8 pyparsing==3.0.9 \ --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ @@ -269,6 +264,7 @@ ruamel-yaml-clib==0.2.7 \ --hash=sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697 \ --hash=sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763 \ --hash=sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282 \ + --hash=sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94 \ --hash=sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1 \ --hash=sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072 \ --hash=sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9 \ @@ -294,12 +290,16 @@ ruamel-yaml-clib==0.2.7 \ --hash=sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0 \ --hash=sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646 # via ruamel-yaml -semgrep==0.122.0 \ - --hash=sha256:6116391b0c8c87581d9d72113702b6f8c2938d799cdae7d71a845ec89249566c \ - --hash=sha256:a4c7400eb8bec9fe8df25520d1ffcb5d78b87c73dc654f1c2aec1195789bc611 \ - --hash=sha256:c7002b9aba97deb6677f4cabfa5dcc8faef2808ce6a6f28ecdd70cd8e90b01b5 \ - --hash=sha256:e3fb9956e2bb926cfeff52deafe4cec24d5f1e91fe6d3fc4f81e86ec452b2ad5 +semgrep==1.2.0 \ + --hash=sha256:31f5f764ff114e2e56b3a93b09829f738cb9e287af7479e2c4714c77b10dc5c0 \ + --hash=sha256:cba38f882c9fedd00462247474a991715d5c8faf169e38cfbf299c7c89ccad55 \ + --hash=sha256:d7b9ccffab1cbecb7870e6792dc274f6a63133910150f33b6ba07d28f5cf00d5 \ + --hash=sha256:e04dbc4a95ddfc9b07550b09b88f61c5c7d81817fac1c86683d8c2534514ac6c # via -r requirements/dev-bookworm-requirements.in +setuptools==65.6.3 \ + --hash=sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54 \ + --hash=sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75 + # via pip-tools tomli==2.0.1 \ --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f @@ -318,76 +318,76 @@ typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via semgrep -ujson==5.5.0 \ - --hash=sha256:0762a4fdf86e01f3f8d8b6b7158d01fdd870799ff3f402b676e358fcd879e7eb \ - --hash=sha256:10095160dbe6bba8059ad6677a01da251431f4c68041bf796dcac0956b34f8f7 \ - --hash=sha256:1a485117f97312bef45f5d79d2ff97eff4da503b8a04f3691f59d31141686459 \ - --hash=sha256:1cef44ea4973344baed3d50a5da4a8843de3a6af7dea7fadf0a594e53ce5892f \ - --hash=sha256:1dc2f46c31ef22b0aaa28cd71be897bea271e700636658d573df9c43c49ebbd0 \ - --hash=sha256:21678d7e068707e4d54bdfeb8c250ebc548b51e499aed778b22112ca31a79669 \ - --hash=sha256:278aa9d7cb56435c96d19f5d702e026bcf69f824e24b41e9b52706abd3565837 \ - --hash=sha256:2ab011e3556a9a1d9461bd686870c527327765ed02fe53550531d6609a8a33ff \ - --hash=sha256:2d90414e3b4b44b39825049185959488e084ea7fcaf6124afd5c00893938b09d \ - --hash=sha256:2e506ecf89b6b9d304362ccef770831ec242a52c89dab1b4aabf1ab0eb1d5ed6 \ - --hash=sha256:33cd9084fefc74cbacf88c92fd260b61211e00bcde38d640c369e5dc34a2b4e1 \ - --hash=sha256:3b74467564814fbce322427a5664e6bcc7dae6dbc8acbef76300fe43ca4072ab \ - --hash=sha256:3f3f4240d99d55eb97cb012e9adf401f5ed9cd827af0341ac44603832202b0d2 \ - --hash=sha256:3fe1aea596f9539fc20cd9e52f098c842afc090168824fd4ca9744fe13151a03 \ - --hash=sha256:4a8cb3c8637006c5bd8237ebb5992a76ba06e39988ad5cff2096227443e8fd6a \ - --hash=sha256:4ef4ab8352861b99bd7fedb1fc6df3ea7f7d5216c789ba6d859e4ea06f1a4c45 \ - --hash=sha256:5035bb997d163f346c22abcec75190e7e756a5349e7c708bd3d5fd7066a9a854 \ - --hash=sha256:593a0f6fb0e186c5ba65465ed6f6215a30d1efa898c25e74de1c8577a1bff6d0 \ - --hash=sha256:59cdcd934385f36e8bd76aedc234371cc75c848d95bdce804ac8aa8744cfeffa \ - --hash=sha256:5a9b1320d8363a42d857fae8065a2174d38217cdd58cd8dc4f48d54e0591271e \ - --hash=sha256:5f9681ec4c60d0da590552427d770636d9079038c30b265f507ccde23caa7823 \ - --hash=sha256:5fd797a4837ba10671954e7c09010cec7aca67e09d193f4920a16beea5f66f65 \ - --hash=sha256:6019e3480d933d3698f2ecb4b46d64bfadd64e718f04fac36e681f3254b49a93 \ - --hash=sha256:603607f56a0ee84d9cd2c7e9b1d29b18a70684b94ee34f07b9ffe8dc9c8a9f81 \ - --hash=sha256:60a4b481978ea2aad8fe8af1ecc271624d01b3cf4b09e9b643dd2fe19c07634c \ - --hash=sha256:6b9812638d7aa8ecda2e8e1513fb4da999249603bffab7439a5f8f0bb362b0db \ - --hash=sha256:6c7ae6e0778ab9610f5e80e0595957d101ab8de18c32a8c053a19943ef4831d0 \ - --hash=sha256:6f83be8257b2f2dd6dea5ee62cd28db90584da7a7af1fba77a2102fc7943638a \ - --hash=sha256:701e81e047f5c0cffd4ac828efca68b0bd270c616654966a051e9a5f836b385e \ - --hash=sha256:703fd69d9cb21d6ec2086789df9be2cf8140a76ff127050c24007ea8940dcd3b \ - --hash=sha256:7471d4486f23518cff343f1eec6c68d1b977ed74c3e6cc3e1ac896b9b7d68645 \ - --hash=sha256:765d46f3d5e7a1d48075035e2d1a9164f683e3fccde834ca04602e6c588835bc \ - --hash=sha256:7a09d203983104918c62f2eef9406f24c355511f9217967df23e70fa7f5b54ff \ - --hash=sha256:7c20cc83b0df47129ec6ed8a47fa7dcfc309c5bad029464004162738502568bb \ - --hash=sha256:7d7cfac2547c93389fa303fc0c0eb6698825564e8389c41c9b60009c746207b6 \ - --hash=sha256:7d87c817b292efb748f1974f37e8bb8a8772ef92f05f84e507159360814bcc3f \ - --hash=sha256:8141f654432cf75144d6103bfac2286b8adf23467201590b173a74535d6be22d \ - --hash=sha256:849f2ff40264152f25589cb48ddb4a43d14db811f841ec73989bfc0c8c4853fa \ - --hash=sha256:880c84ce59f49776cf120f77e7ca04877c97c6887917078dbc369eb47004d7cf \ - --hash=sha256:94874584b733a18b310b0e954d53168e62cd4a0fd9db85b1903f0902a7eb33e8 \ - --hash=sha256:95603eff711b8f3b9596e1c961dbeb745a792ba1904141612f194e07edd71e5f \ - --hash=sha256:9585892091ae86045135d6a6129a644142d6a51b23e1428bb5de6d10bc0ce0c7 \ - --hash=sha256:977bf5be704a88d46bf5b228df8b44521b1f3119d741062191608b3a6a38f224 \ - --hash=sha256:9cdc46859024501c20ab74ad542cdf2f08b94b5ce384f2f569483fa3ed926d04 \ - --hash=sha256:a34a5f034b339f69ef7f6a134c22d04b92e07b6ddc1dd65382e7e4ec65d6437d \ - --hash=sha256:a655f7b755cfc5c07f2116b6dcf0ba148c89adef9a6d40c1b0f1fada878c4345 \ - --hash=sha256:a7d12f2d2df195c8c4e49d2cdbad640353a856c62ca2c624d8b47aa33b65a2a2 \ - --hash=sha256:abfe83e082c9208891e2158c1b5044a650ecec408b823bf6bf16cd7f8085cafa \ - --hash=sha256:b25077a971c7da47bd6846a912a747f6963776d90720c88603b1b55d81790780 \ - --hash=sha256:bf416a93e1331820c77e3429df26946dbd4fe105e9b487cd2d1b7298b75784a8 \ - --hash=sha256:c04ae27e076d81a3839047d8eed57c1e17e361640616fd520d752375e3ba8f0c \ - --hash=sha256:d5bea13c73f36c4346808df3fa806596163a7962b6d28001ca2a391cab856089 \ - --hash=sha256:d75bef34e69e7effb7b4849e3f830e3174d2cc6ec7273503fdde111c222dc9b3 \ - --hash=sha256:d93940664a5ccfd79f72dcb939b0c31a3479889f14f0eb95ec52976f8c0cae7d \ - --hash=sha256:d9c89c521dc90c7564358e525f849b93ad1d710553c1491f66b8cce8113bc901 \ - --hash=sha256:e0b36257dc90194784531c3b922d8d31fb2b4d8e5adfd27aff4eee7174176365 \ - --hash=sha256:e1135264bcd40965cd35b0869e36952f54825024befdc7a923df9a7d83cfd800 \ - --hash=sha256:e510d288e613d6927796dfb728e13e4530fc83b9ccac5888a21f7860486eab21 \ - --hash=sha256:ee9a2c9a4b2421e77f8fe33ed0621dea03c66c710707553020b1e32f3afb6240 \ - --hash=sha256:f19f11055ba2961eb39bdb1ff15763a53fca4fa0b5b624da3c7a528e83cdd09c \ - --hash=sha256:f26544bc10c83a2ff9aa2e093500c1b473f327faae31fb468d591e5823333376 \ - --hash=sha256:f4875cafc9a6482c04c7df52a725d1c41beb74913c0ff4ec8f189f1954a2afe9 \ - --hash=sha256:f5179088ef6487c475604b7898731a6ddeeada7702cfb2162155b016703a8475 \ - --hash=sha256:f63d1ae1ca17bb2c847e298c7bcf084a73d56d434b4c50509fb93a4b4300b0b2 \ - --hash=sha256:ff4928dc1e9704b567171c16787238201fdbf023665573c12c02146fe1e02eec +ujson==5.6.0 \ + --hash=sha256:0f0f21157d1a84ad5fb54388f31767cde9c1a48fb29de7ef91d8887fdc2ca92b \ + --hash=sha256:1217326ba80eab1ff3f644f9eee065bd4fcc4e0c068a2f86f851cafd05737169 \ + --hash=sha256:169b3fbd1188647c6ce00cb690915526aff86997c89a94c1b50432010ad7ae0f \ + --hash=sha256:1a7e4023c79d9a053c0c6b7c6ec50ea0af78381539ab27412e6af8d9410ae555 \ + --hash=sha256:20d929a27822cb79e034cc5e0bb62daa0257ab197247cb6f35d5149f2f438983 \ + --hash=sha256:213e41dc501b4a6d029873039da3e45ba7766b9f9eba97ecc4287c371f5403cc \ + --hash=sha256:23051f062bb257a87f3e55ea5a055ea98d56f08185fd415b34313268fa4d814e \ + --hash=sha256:24d40e01accbf4f0ba5181c4db1bac83749fdc1a5413466da582529f2a096085 \ + --hash=sha256:2a24b9a96364f943a4754fa00b47855d0a01b84ac4b8b11ebf058c8fb68c1f77 \ + --hash=sha256:2cb7a4bd91de97b4c8e57fb5289d1e5f3f019723b59d01d79e2df83783dce5a6 \ + --hash=sha256:31288f85db6295ec63e128daff7285bb0bc220935e1b5107bd2d67e2dc687b7e \ + --hash=sha256:35423460954d0c61602da734697724e8dd5326a8aa7900123e584b935116203e \ + --hash=sha256:355ef5311854936b9edc7f1ce638f8257cb45fb6b9873f6b2d16a715eafc9570 \ + --hash=sha256:3a68a204386648ec92ae9b526c1ffca528f38221eca70f98b4709390c3204275 \ + --hash=sha256:3ad74eb53ee07e76c82f9ef8e7256c33873b81bd1f97a274fdb65ed87c2801f6 \ + --hash=sha256:3b49a1014d396b962cb1d6c5f867f88b2c9aa9224c3860ee6ff63b2837a2965b \ + --hash=sha256:3f00dff3bf26bbb96791ceaf51ca95a3f34e2a21985748da855a650c38633b99 \ + --hash=sha256:3f8b9e8c0420ce3dcc193ab6dd5628840ba79ad1b76e1816ac7ca6752c6bf035 \ + --hash=sha256:52f536712d16a1f4e0f9d084982c28e11b7e70c397a1059069e4d28d53b3f522 \ + --hash=sha256:551408a5c4306839b4a4f91503c96069204dbef2c7ed91a9dab08874ac1ed679 \ + --hash=sha256:57904e5b49ffe93189349229dcd83f73862ef9bb8517e8f1e62d0ff73f313847 \ + --hash=sha256:5e5715b0e2767b1987ceed0066980fc0a53421dd2f197b4f88460d474d6aef4c \ + --hash=sha256:61fdf24f7bddc402ce06b25e4bed7bf5ee4f03e23028a0a09116835c21d54888 \ + --hash=sha256:6d0a60c5f065737a81249c819475d001a86da9a41900d888287e34619c9b4851 \ + --hash=sha256:6ea9024749a41864bffb12da15aace4a3193c03ea97e77b069557aefa342811f \ + --hash=sha256:7174e81c137d480abe2f8036e9fb69157e509f2db0bfdee4488eb61dc3f0ff6b \ + --hash=sha256:72fa6e850831280a46704032721c75155fd41b839ddadabb6068ab218c56a37a \ + --hash=sha256:74671d1bde8c03daeb92abdbd972960978347b1a1d432c4c1b3c9284ce4094cf \ + --hash=sha256:798116b88158f13ed687417526100ef353ba4692e0aef8afbc622bd4bf7e9057 \ + --hash=sha256:7a66c5a75b46545361271b4cf55560d9ad8bad794dd054a14b3fbb031407948e \ + --hash=sha256:7bde16cb18b95a8f68cc48715e4652b394b4fee68cb3f9fee0fd7d26b29a53b6 \ + --hash=sha256:82bf24ea72a73c7d77402a7adc954931243e7ec4241d5738ae74894b53944458 \ + --hash=sha256:87578ccfc35461c77e73660fb7d89bc577732f671364f442bda9e2c58b571765 \ + --hash=sha256:91000612a2c30f50c6a009e6459a677e5c1972e51b59ecefd6063543dc47a4e9 \ + --hash=sha256:9cf04fcc958bb52a6b6c301b780cb9afab3ec68713b17ca5aa423e1f99c2c1cf \ + --hash=sha256:9f4efcac06f45183b6ed8e2321554739a964a02d8aa3089ec343253d86bf2804 \ + --hash=sha256:a51cbe614acb5ea8e2006e4fd80b4e8ea7c51ae51e42c75290012f4925a9d6ab \ + --hash=sha256:a8795de7ceadf84bcef88f947f91900d647eda234a2c6cc89912c25048cc0490 \ + --hash=sha256:ae723b8308ac17a591bb8be9478b58c2c26fada23fd2211fc323796801ad7ff5 \ + --hash=sha256:aff708a1b9e2d4979f74375ade0bff978be72c8bd90422a756d24d8a46d78059 \ + --hash=sha256:b2aece7a92dffc9c78787f5f36e47e24b95495812270c27abc2fa430435a931d \ + --hash=sha256:b4420bfff18ca6aa39cfb22fe35d8aba3811fa1190c4f4e1ad816b0aad72f7e3 \ + --hash=sha256:b64d2ac99503a9a5846157631addacc9f74e23f64d5a886fe910e9662660fa10 \ + --hash=sha256:b72d4d948749e9c6afcd3d7af9ecc780fccde84e26d275c97273dd83c68a488b \ + --hash=sha256:b74396a655ac8a5299dcb765b4a17ba706e45c0df95818bcc6c13c4645a1c38e \ + --hash=sha256:b9e9d26600020cf635a4e58763959f5a59f8c70f75d72ebf26ceae94c2efac74 \ + --hash=sha256:bca074d08f0677f05df8170b25ce6e61db3bcdfda78062444972fa6508dc825f \ + --hash=sha256:bca3c06c3f10ce03fa80b1301dce53765815c2578a24bd141ce4e5769bb7b709 \ + --hash=sha256:bfb1fdf61763fafc0f8a20becf9cc4287c14fc41c0e14111d28c0d0dfda9ba56 \ + --hash=sha256:c169e12642f0edf1dde607fb264721b88787b55a6da5fb3824302a9cac6f9405 \ + --hash=sha256:c4277f6b1d24be30b7f87ec5346a87693cbc1e55bbc5877f573381b2250c4dd6 \ + --hash=sha256:ceee5aef3e234c7e998fdb52e5236c41e50cdedc116360f7f1874a04829f6490 \ + --hash=sha256:d1b5e233e42f53bbbc6961caeb492986e9f3aeacd30be811467583203873bad2 \ + --hash=sha256:d6f4be832d97836d62ac0c148026ec021f9f36481f38e455b51538fcd949ed2a \ + --hash=sha256:dd5ccc036b0f4721b98e1c03ccc604e7f3e1db53866ccc92b2add40ace1782f7 \ + --hash=sha256:dde59d2f06297fc4e70b2bae6e4a6b3ce89ca89697ab2c41e641abae3be96b0c \ + --hash=sha256:e4be7d865cb5161824e12db71cee83290ab72b3523566371a30d6ba1bd63402a \ + --hash=sha256:e97af10b6f13a498de197fb852e9242064217c25dfca79ebe7ad0cf2b0dd0cb7 \ + --hash=sha256:f2d70b7f0b485f85141bbc518d0581ae96b912d9f8b070eaf68a9beef8eb1e60 \ + --hash=sha256:f3e651f04b7510fae7d4706a4600cd43457f015df08702ece82a71339fc15c3d \ + --hash=sha256:f63535d51e039a984b2fb67ff87057ffe4216d4757c3cedf2fc846af88253cb7 \ + --hash=sha256:f881e2d8a022e9285aa2eab6ba8674358dbcb2b57fa68618d88d62937ac3ff04 \ + --hash=sha256:fadebaddd3eb71a5c986f0bdc7bb28b072bfc585c141eef37474fc66d1830b0a \ + --hash=sha256:fb1632b27e12c0b0df62f924c362206daf246a42c0080e959dd465810dc3482e \ + --hash=sha256:fecf83b2ef3cbce4f5cc573df6f6ded565e5e27c1af84038bae5ade306686d82 # via python-lsp-jsonrpc -urllib3==1.26.12 \ - --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ - --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 +urllib3==1.26.13 \ + --hash=sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc \ + --hash=sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8 # via # requests # semgrep @@ -399,13 +399,3 @@ wheel==0.38.4 \ --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 # via pip-tools - -# The following packages are considered to be unsafe in a requirements file: -pip==22.3.1 \ - --hash=sha256:65fd48317359f3af8e593943e6ae1506b66325085ea64b706a998c6e83eeaf38 \ - --hash=sha256:908c78e6bc29b676ede1c4d57981d490cb892eb45cd8c214ab6298125119e077 - # via pip-tools -setuptools==65.6.0 \ - --hash=sha256:6211d2f5eddad8757bd0484923ca7c0a6302ebc4ab32ea5e94357176e0ca0840 \ - --hash=sha256:d1eebf881c6114e51df1664bc2c9133d022f78d12d5f4f665b9191f084e2862d - # via pip-tools diff --git a/requirements/dev-bullseye-requirements.txt b/requirements/dev-bullseye-requirements.txt index 3271ab69c..36465c3fb 100644 --- a/requirements/dev-bullseye-requirements.txt +++ b/requirements/dev-bullseye-requirements.txt @@ -1,8 +1,8 @@ # -# This file is autogenerated by pip-compile with python 3.9 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: # -# pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-bullseye-requirements.txt requirements/dev-bullseye-requirements.in +# pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-bullseye-requirements.txt requirements/dev-bullseye-requirements.in requirements/requirements.in # attrs==21.4.0 \ --hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4 \ @@ -12,28 +12,19 @@ attrs==21.4.0 \ # jsonschema # pytest # semgrep -black==22.10.0 \ - --hash=sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7 \ - --hash=sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6 \ - --hash=sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650 \ - --hash=sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb \ - --hash=sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d \ - --hash=sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d \ - --hash=sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de \ - --hash=sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395 \ - --hash=sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae \ - --hash=sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa \ - --hash=sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef \ - --hash=sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383 \ - --hash=sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66 \ - --hash=sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87 \ - --hash=sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d \ - --hash=sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0 \ - --hash=sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b \ - --hash=sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458 \ - --hash=sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4 \ - --hash=sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1 \ - --hash=sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff +black==22.12.0 \ + --hash=sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320 \ + --hash=sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351 \ + --hash=sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350 \ + --hash=sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f \ + --hash=sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf \ + --hash=sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148 \ + --hash=sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4 \ + --hash=sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d \ + --hash=sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc \ + --hash=sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d \ + --hash=sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2 \ + --hash=sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f # via -r requirements/dev-bullseye-requirements.in boltons==21.0.0 \ --hash=sha256:65e70a79a731a7fe6e98592ecfb5ccf2115873d01dbc576079874629e5c90f13 \ @@ -50,9 +41,9 @@ build==0.9.0 \ --hash=sha256:1a07724e891cbd898923145eb7752ee7653674c511378eb9c7691aab1612bc3c \ --hash=sha256:38a7a2b7a0bdc61a42a0a67509d88c71ecfc37b393baba770fae34e20929ff69 # via pip-tools -certifi==2022.9.24 \ - --hash=sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14 \ - --hash=sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382 +certifi==2022.12.7 \ + --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ + --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 # via requests charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ @@ -138,9 +129,9 @@ face==22.0.0 \ --hash=sha256:344fe31562d0f6f444a45982418f3793d4b14f9abb98ccca1509d22e0a3e7e35 \ --hash=sha256:d5d692f90bc8f5987b636e47e36384b9bbda499aaf0a77aa0b0bbe834c76923d # via glom -flake8==5.0.4 \ - --hash=sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db \ - --hash=sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248 +flake8==6.0.0 \ + --hash=sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7 \ + --hash=sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181 # via -r requirements/dev-bullseye-requirements.in glom==22.1.0 \ --hash=sha256:1510c6587a8f9c64a246641b70033cbc5ebde99f02ad245693678038e821aeb5 \ @@ -154,9 +145,9 @@ iniconfig==1.1.1 \ --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \ --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32 # via pytest -jsonschema==4.17.1 \ - --hash=sha256:05b2d22c83640cde0b7e0aa329ca7754fbd98ea66ad8ae24aa61328dfe057fa3 \ - --hash=sha256:410ef23dcdbca4eaedc08b850079179883c2ed09378bd1f760d4af4aacfa28d7 +jsonschema==4.17.3 \ + --hash=sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d \ + --hash=sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6 # via semgrep mccabe==0.7.0 \ --hash=sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325 \ @@ -173,9 +164,9 @@ packaging==21.3 \ # build # pytest # semgrep -pathspec==0.10.2 \ - --hash=sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5 \ - --hash=sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0 +pathspec==0.10.3 \ + --hash=sha256:3c95343af8b756205e2aba76e843ba9520a24dd84f68c22b9f93251507509dd6 \ + --hash=sha256:56200de4077d9d0791465aa9095a01d421861e405b5096955051deefd697d6f6 # via black peewee==3.15.4 \ --hash=sha256:2581520c8dfbacd9d580c2719ae259f0637a9e46eda47dfc0ce01864c6366205 @@ -184,25 +175,29 @@ pep517==0.13.0 \ --hash=sha256:4ba4446d80aed5b5eac6509ade100bff3e7943a8489de249654a5ae9b33ee35b \ --hash=sha256:ae69927c5c172be1add9203726d4b84cf3ebad1edcd5f71fcdc746e66e829f59 # via build -pip-tools==6.10.0 \ - --hash=sha256:57ac98392548f5ca96c2831927deec3035efe81ff476e3c744bd474ca9c6a1f2 \ - --hash=sha256:7f9f7356052db6942b5aaabc8eba29983591ca0ad75affbf2f0a25d9361be624 +pip==22.3.1 \ + --hash=sha256:65fd48317359f3af8e593943e6ae1506b66325085ea64b706a998c6e83eeaf38 \ + --hash=sha256:908c78e6bc29b676ede1c4d57981d490cb892eb45cd8c214ab6298125119e077 + # via pip-tools +pip-tools==6.12.0 \ + --hash=sha256:8e22fbc84ede7ca522ba4b033c4fcf6a6419adabc75d24747be3d8262504489a \ + --hash=sha256:f441603c63b16f4af0dd5026f7522a49eddec2bc8a4a4979af44e1f6b0a1c13e # via -r requirements/dev-bullseye-requirements.in -platformdirs==2.5.4 \ - --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ - --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 +platformdirs==2.6.0 \ + --hash=sha256:1a89a12377800c81983db6be069ec068eee989748799b946cce2a6e80dcc54ca \ + --hash=sha256:b46ffafa316e6b83b47489d240ce17173f123a9b9c83282141c3daf26ad9ac2e # via black pluggy==1.0.0 \ --hash=sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159 \ --hash=sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3 # via pytest -pycodestyle==2.9.1 \ - --hash=sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785 \ - --hash=sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b +pycodestyle==2.10.0 \ + --hash=sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053 \ + --hash=sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610 # via flake8 -pyflakes==2.5.0 \ - --hash=sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2 \ - --hash=sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3 +pyflakes==3.0.1 \ + --hash=sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf \ + --hash=sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd # via flake8 pyparsing==3.0.9 \ --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ @@ -269,6 +264,7 @@ ruamel-yaml-clib==0.2.7 \ --hash=sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697 \ --hash=sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763 \ --hash=sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282 \ + --hash=sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94 \ --hash=sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1 \ --hash=sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072 \ --hash=sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9 \ @@ -294,12 +290,16 @@ ruamel-yaml-clib==0.2.7 \ --hash=sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0 \ --hash=sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646 # via ruamel-yaml -semgrep==0.122.0 \ - --hash=sha256:6116391b0c8c87581d9d72113702b6f8c2938d799cdae7d71a845ec89249566c \ - --hash=sha256:a4c7400eb8bec9fe8df25520d1ffcb5d78b87c73dc654f1c2aec1195789bc611 \ - --hash=sha256:c7002b9aba97deb6677f4cabfa5dcc8faef2808ce6a6f28ecdd70cd8e90b01b5 \ - --hash=sha256:e3fb9956e2bb926cfeff52deafe4cec24d5f1e91fe6d3fc4f81e86ec452b2ad5 +semgrep==1.2.0 \ + --hash=sha256:31f5f764ff114e2e56b3a93b09829f738cb9e287af7479e2c4714c77b10dc5c0 \ + --hash=sha256:cba38f882c9fedd00462247474a991715d5c8faf169e38cfbf299c7c89ccad55 \ + --hash=sha256:d7b9ccffab1cbecb7870e6792dc274f6a63133910150f33b6ba07d28f5cf00d5 \ + --hash=sha256:e04dbc4a95ddfc9b07550b09b88f61c5c7d81817fac1c86683d8c2534514ac6c # via -r requirements/dev-bullseye-requirements.in +setuptools==65.6.3 \ + --hash=sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54 \ + --hash=sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75 + # via pip-tools tomli==2.0.1 \ --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f @@ -320,76 +320,76 @@ typing-extensions==4.4.0 \ # via # black # semgrep -ujson==5.5.0 \ - --hash=sha256:0762a4fdf86e01f3f8d8b6b7158d01fdd870799ff3f402b676e358fcd879e7eb \ - --hash=sha256:10095160dbe6bba8059ad6677a01da251431f4c68041bf796dcac0956b34f8f7 \ - --hash=sha256:1a485117f97312bef45f5d79d2ff97eff4da503b8a04f3691f59d31141686459 \ - --hash=sha256:1cef44ea4973344baed3d50a5da4a8843de3a6af7dea7fadf0a594e53ce5892f \ - --hash=sha256:1dc2f46c31ef22b0aaa28cd71be897bea271e700636658d573df9c43c49ebbd0 \ - --hash=sha256:21678d7e068707e4d54bdfeb8c250ebc548b51e499aed778b22112ca31a79669 \ - --hash=sha256:278aa9d7cb56435c96d19f5d702e026bcf69f824e24b41e9b52706abd3565837 \ - --hash=sha256:2ab011e3556a9a1d9461bd686870c527327765ed02fe53550531d6609a8a33ff \ - --hash=sha256:2d90414e3b4b44b39825049185959488e084ea7fcaf6124afd5c00893938b09d \ - --hash=sha256:2e506ecf89b6b9d304362ccef770831ec242a52c89dab1b4aabf1ab0eb1d5ed6 \ - --hash=sha256:33cd9084fefc74cbacf88c92fd260b61211e00bcde38d640c369e5dc34a2b4e1 \ - --hash=sha256:3b74467564814fbce322427a5664e6bcc7dae6dbc8acbef76300fe43ca4072ab \ - --hash=sha256:3f3f4240d99d55eb97cb012e9adf401f5ed9cd827af0341ac44603832202b0d2 \ - --hash=sha256:3fe1aea596f9539fc20cd9e52f098c842afc090168824fd4ca9744fe13151a03 \ - --hash=sha256:4a8cb3c8637006c5bd8237ebb5992a76ba06e39988ad5cff2096227443e8fd6a \ - --hash=sha256:4ef4ab8352861b99bd7fedb1fc6df3ea7f7d5216c789ba6d859e4ea06f1a4c45 \ - --hash=sha256:5035bb997d163f346c22abcec75190e7e756a5349e7c708bd3d5fd7066a9a854 \ - --hash=sha256:593a0f6fb0e186c5ba65465ed6f6215a30d1efa898c25e74de1c8577a1bff6d0 \ - --hash=sha256:59cdcd934385f36e8bd76aedc234371cc75c848d95bdce804ac8aa8744cfeffa \ - --hash=sha256:5a9b1320d8363a42d857fae8065a2174d38217cdd58cd8dc4f48d54e0591271e \ - --hash=sha256:5f9681ec4c60d0da590552427d770636d9079038c30b265f507ccde23caa7823 \ - --hash=sha256:5fd797a4837ba10671954e7c09010cec7aca67e09d193f4920a16beea5f66f65 \ - --hash=sha256:6019e3480d933d3698f2ecb4b46d64bfadd64e718f04fac36e681f3254b49a93 \ - --hash=sha256:603607f56a0ee84d9cd2c7e9b1d29b18a70684b94ee34f07b9ffe8dc9c8a9f81 \ - --hash=sha256:60a4b481978ea2aad8fe8af1ecc271624d01b3cf4b09e9b643dd2fe19c07634c \ - --hash=sha256:6b9812638d7aa8ecda2e8e1513fb4da999249603bffab7439a5f8f0bb362b0db \ - --hash=sha256:6c7ae6e0778ab9610f5e80e0595957d101ab8de18c32a8c053a19943ef4831d0 \ - --hash=sha256:6f83be8257b2f2dd6dea5ee62cd28db90584da7a7af1fba77a2102fc7943638a \ - --hash=sha256:701e81e047f5c0cffd4ac828efca68b0bd270c616654966a051e9a5f836b385e \ - --hash=sha256:703fd69d9cb21d6ec2086789df9be2cf8140a76ff127050c24007ea8940dcd3b \ - --hash=sha256:7471d4486f23518cff343f1eec6c68d1b977ed74c3e6cc3e1ac896b9b7d68645 \ - --hash=sha256:765d46f3d5e7a1d48075035e2d1a9164f683e3fccde834ca04602e6c588835bc \ - --hash=sha256:7a09d203983104918c62f2eef9406f24c355511f9217967df23e70fa7f5b54ff \ - --hash=sha256:7c20cc83b0df47129ec6ed8a47fa7dcfc309c5bad029464004162738502568bb \ - --hash=sha256:7d7cfac2547c93389fa303fc0c0eb6698825564e8389c41c9b60009c746207b6 \ - --hash=sha256:7d87c817b292efb748f1974f37e8bb8a8772ef92f05f84e507159360814bcc3f \ - --hash=sha256:8141f654432cf75144d6103bfac2286b8adf23467201590b173a74535d6be22d \ - --hash=sha256:849f2ff40264152f25589cb48ddb4a43d14db811f841ec73989bfc0c8c4853fa \ - --hash=sha256:880c84ce59f49776cf120f77e7ca04877c97c6887917078dbc369eb47004d7cf \ - --hash=sha256:94874584b733a18b310b0e954d53168e62cd4a0fd9db85b1903f0902a7eb33e8 \ - --hash=sha256:95603eff711b8f3b9596e1c961dbeb745a792ba1904141612f194e07edd71e5f \ - --hash=sha256:9585892091ae86045135d6a6129a644142d6a51b23e1428bb5de6d10bc0ce0c7 \ - --hash=sha256:977bf5be704a88d46bf5b228df8b44521b1f3119d741062191608b3a6a38f224 \ - --hash=sha256:9cdc46859024501c20ab74ad542cdf2f08b94b5ce384f2f569483fa3ed926d04 \ - --hash=sha256:a34a5f034b339f69ef7f6a134c22d04b92e07b6ddc1dd65382e7e4ec65d6437d \ - --hash=sha256:a655f7b755cfc5c07f2116b6dcf0ba148c89adef9a6d40c1b0f1fada878c4345 \ - --hash=sha256:a7d12f2d2df195c8c4e49d2cdbad640353a856c62ca2c624d8b47aa33b65a2a2 \ - --hash=sha256:abfe83e082c9208891e2158c1b5044a650ecec408b823bf6bf16cd7f8085cafa \ - --hash=sha256:b25077a971c7da47bd6846a912a747f6963776d90720c88603b1b55d81790780 \ - --hash=sha256:bf416a93e1331820c77e3429df26946dbd4fe105e9b487cd2d1b7298b75784a8 \ - --hash=sha256:c04ae27e076d81a3839047d8eed57c1e17e361640616fd520d752375e3ba8f0c \ - --hash=sha256:d5bea13c73f36c4346808df3fa806596163a7962b6d28001ca2a391cab856089 \ - --hash=sha256:d75bef34e69e7effb7b4849e3f830e3174d2cc6ec7273503fdde111c222dc9b3 \ - --hash=sha256:d93940664a5ccfd79f72dcb939b0c31a3479889f14f0eb95ec52976f8c0cae7d \ - --hash=sha256:d9c89c521dc90c7564358e525f849b93ad1d710553c1491f66b8cce8113bc901 \ - --hash=sha256:e0b36257dc90194784531c3b922d8d31fb2b4d8e5adfd27aff4eee7174176365 \ - --hash=sha256:e1135264bcd40965cd35b0869e36952f54825024befdc7a923df9a7d83cfd800 \ - --hash=sha256:e510d288e613d6927796dfb728e13e4530fc83b9ccac5888a21f7860486eab21 \ - --hash=sha256:ee9a2c9a4b2421e77f8fe33ed0621dea03c66c710707553020b1e32f3afb6240 \ - --hash=sha256:f19f11055ba2961eb39bdb1ff15763a53fca4fa0b5b624da3c7a528e83cdd09c \ - --hash=sha256:f26544bc10c83a2ff9aa2e093500c1b473f327faae31fb468d591e5823333376 \ - --hash=sha256:f4875cafc9a6482c04c7df52a725d1c41beb74913c0ff4ec8f189f1954a2afe9 \ - --hash=sha256:f5179088ef6487c475604b7898731a6ddeeada7702cfb2162155b016703a8475 \ - --hash=sha256:f63d1ae1ca17bb2c847e298c7bcf084a73d56d434b4c50509fb93a4b4300b0b2 \ - --hash=sha256:ff4928dc1e9704b567171c16787238201fdbf023665573c12c02146fe1e02eec +ujson==5.6.0 \ + --hash=sha256:0f0f21157d1a84ad5fb54388f31767cde9c1a48fb29de7ef91d8887fdc2ca92b \ + --hash=sha256:1217326ba80eab1ff3f644f9eee065bd4fcc4e0c068a2f86f851cafd05737169 \ + --hash=sha256:169b3fbd1188647c6ce00cb690915526aff86997c89a94c1b50432010ad7ae0f \ + --hash=sha256:1a7e4023c79d9a053c0c6b7c6ec50ea0af78381539ab27412e6af8d9410ae555 \ + --hash=sha256:20d929a27822cb79e034cc5e0bb62daa0257ab197247cb6f35d5149f2f438983 \ + --hash=sha256:213e41dc501b4a6d029873039da3e45ba7766b9f9eba97ecc4287c371f5403cc \ + --hash=sha256:23051f062bb257a87f3e55ea5a055ea98d56f08185fd415b34313268fa4d814e \ + --hash=sha256:24d40e01accbf4f0ba5181c4db1bac83749fdc1a5413466da582529f2a096085 \ + --hash=sha256:2a24b9a96364f943a4754fa00b47855d0a01b84ac4b8b11ebf058c8fb68c1f77 \ + --hash=sha256:2cb7a4bd91de97b4c8e57fb5289d1e5f3f019723b59d01d79e2df83783dce5a6 \ + --hash=sha256:31288f85db6295ec63e128daff7285bb0bc220935e1b5107bd2d67e2dc687b7e \ + --hash=sha256:35423460954d0c61602da734697724e8dd5326a8aa7900123e584b935116203e \ + --hash=sha256:355ef5311854936b9edc7f1ce638f8257cb45fb6b9873f6b2d16a715eafc9570 \ + --hash=sha256:3a68a204386648ec92ae9b526c1ffca528f38221eca70f98b4709390c3204275 \ + --hash=sha256:3ad74eb53ee07e76c82f9ef8e7256c33873b81bd1f97a274fdb65ed87c2801f6 \ + --hash=sha256:3b49a1014d396b962cb1d6c5f867f88b2c9aa9224c3860ee6ff63b2837a2965b \ + --hash=sha256:3f00dff3bf26bbb96791ceaf51ca95a3f34e2a21985748da855a650c38633b99 \ + --hash=sha256:3f8b9e8c0420ce3dcc193ab6dd5628840ba79ad1b76e1816ac7ca6752c6bf035 \ + --hash=sha256:52f536712d16a1f4e0f9d084982c28e11b7e70c397a1059069e4d28d53b3f522 \ + --hash=sha256:551408a5c4306839b4a4f91503c96069204dbef2c7ed91a9dab08874ac1ed679 \ + --hash=sha256:57904e5b49ffe93189349229dcd83f73862ef9bb8517e8f1e62d0ff73f313847 \ + --hash=sha256:5e5715b0e2767b1987ceed0066980fc0a53421dd2f197b4f88460d474d6aef4c \ + --hash=sha256:61fdf24f7bddc402ce06b25e4bed7bf5ee4f03e23028a0a09116835c21d54888 \ + --hash=sha256:6d0a60c5f065737a81249c819475d001a86da9a41900d888287e34619c9b4851 \ + --hash=sha256:6ea9024749a41864bffb12da15aace4a3193c03ea97e77b069557aefa342811f \ + --hash=sha256:7174e81c137d480abe2f8036e9fb69157e509f2db0bfdee4488eb61dc3f0ff6b \ + --hash=sha256:72fa6e850831280a46704032721c75155fd41b839ddadabb6068ab218c56a37a \ + --hash=sha256:74671d1bde8c03daeb92abdbd972960978347b1a1d432c4c1b3c9284ce4094cf \ + --hash=sha256:798116b88158f13ed687417526100ef353ba4692e0aef8afbc622bd4bf7e9057 \ + --hash=sha256:7a66c5a75b46545361271b4cf55560d9ad8bad794dd054a14b3fbb031407948e \ + --hash=sha256:7bde16cb18b95a8f68cc48715e4652b394b4fee68cb3f9fee0fd7d26b29a53b6 \ + --hash=sha256:82bf24ea72a73c7d77402a7adc954931243e7ec4241d5738ae74894b53944458 \ + --hash=sha256:87578ccfc35461c77e73660fb7d89bc577732f671364f442bda9e2c58b571765 \ + --hash=sha256:91000612a2c30f50c6a009e6459a677e5c1972e51b59ecefd6063543dc47a4e9 \ + --hash=sha256:9cf04fcc958bb52a6b6c301b780cb9afab3ec68713b17ca5aa423e1f99c2c1cf \ + --hash=sha256:9f4efcac06f45183b6ed8e2321554739a964a02d8aa3089ec343253d86bf2804 \ + --hash=sha256:a51cbe614acb5ea8e2006e4fd80b4e8ea7c51ae51e42c75290012f4925a9d6ab \ + --hash=sha256:a8795de7ceadf84bcef88f947f91900d647eda234a2c6cc89912c25048cc0490 \ + --hash=sha256:ae723b8308ac17a591bb8be9478b58c2c26fada23fd2211fc323796801ad7ff5 \ + --hash=sha256:aff708a1b9e2d4979f74375ade0bff978be72c8bd90422a756d24d8a46d78059 \ + --hash=sha256:b2aece7a92dffc9c78787f5f36e47e24b95495812270c27abc2fa430435a931d \ + --hash=sha256:b4420bfff18ca6aa39cfb22fe35d8aba3811fa1190c4f4e1ad816b0aad72f7e3 \ + --hash=sha256:b64d2ac99503a9a5846157631addacc9f74e23f64d5a886fe910e9662660fa10 \ + --hash=sha256:b72d4d948749e9c6afcd3d7af9ecc780fccde84e26d275c97273dd83c68a488b \ + --hash=sha256:b74396a655ac8a5299dcb765b4a17ba706e45c0df95818bcc6c13c4645a1c38e \ + --hash=sha256:b9e9d26600020cf635a4e58763959f5a59f8c70f75d72ebf26ceae94c2efac74 \ + --hash=sha256:bca074d08f0677f05df8170b25ce6e61db3bcdfda78062444972fa6508dc825f \ + --hash=sha256:bca3c06c3f10ce03fa80b1301dce53765815c2578a24bd141ce4e5769bb7b709 \ + --hash=sha256:bfb1fdf61763fafc0f8a20becf9cc4287c14fc41c0e14111d28c0d0dfda9ba56 \ + --hash=sha256:c169e12642f0edf1dde607fb264721b88787b55a6da5fb3824302a9cac6f9405 \ + --hash=sha256:c4277f6b1d24be30b7f87ec5346a87693cbc1e55bbc5877f573381b2250c4dd6 \ + --hash=sha256:ceee5aef3e234c7e998fdb52e5236c41e50cdedc116360f7f1874a04829f6490 \ + --hash=sha256:d1b5e233e42f53bbbc6961caeb492986e9f3aeacd30be811467583203873bad2 \ + --hash=sha256:d6f4be832d97836d62ac0c148026ec021f9f36481f38e455b51538fcd949ed2a \ + --hash=sha256:dd5ccc036b0f4721b98e1c03ccc604e7f3e1db53866ccc92b2add40ace1782f7 \ + --hash=sha256:dde59d2f06297fc4e70b2bae6e4a6b3ce89ca89697ab2c41e641abae3be96b0c \ + --hash=sha256:e4be7d865cb5161824e12db71cee83290ab72b3523566371a30d6ba1bd63402a \ + --hash=sha256:e97af10b6f13a498de197fb852e9242064217c25dfca79ebe7ad0cf2b0dd0cb7 \ + --hash=sha256:f2d70b7f0b485f85141bbc518d0581ae96b912d9f8b070eaf68a9beef8eb1e60 \ + --hash=sha256:f3e651f04b7510fae7d4706a4600cd43457f015df08702ece82a71339fc15c3d \ + --hash=sha256:f63535d51e039a984b2fb67ff87057ffe4216d4757c3cedf2fc846af88253cb7 \ + --hash=sha256:f881e2d8a022e9285aa2eab6ba8674358dbcb2b57fa68618d88d62937ac3ff04 \ + --hash=sha256:fadebaddd3eb71a5c986f0bdc7bb28b072bfc585c141eef37474fc66d1830b0a \ + --hash=sha256:fb1632b27e12c0b0df62f924c362206daf246a42c0080e959dd465810dc3482e \ + --hash=sha256:fecf83b2ef3cbce4f5cc573df6f6ded565e5e27c1af84038bae5ade306686d82 # via python-lsp-jsonrpc -urllib3==1.26.12 \ - --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ - --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 +urllib3==1.26.13 \ + --hash=sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc \ + --hash=sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8 # via # requests # semgrep @@ -401,13 +401,3 @@ wheel==0.38.4 \ --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 # via pip-tools - -# The following packages are considered to be unsafe in a requirements file: -pip==22.3.1 \ - --hash=sha256:65fd48317359f3af8e593943e6ae1506b66325085ea64b706a998c6e83eeaf38 \ - --hash=sha256:908c78e6bc29b676ede1c4d57981d490cb892eb45cd8c214ab6298125119e077 - # via pip-tools -setuptools==65.6.0 \ - --hash=sha256:6211d2f5eddad8757bd0484923ca7c0a6302ebc4ab32ea5e94357176e0ca0840 \ - --hash=sha256:d1eebf881c6114e51df1664bc2c9133d022f78d12d5f4f665b9191f084e2862d - # via pip-tools From a5afefe3180cdf907345ba5e719374810996b19a Mon Sep 17 00:00:00 2001 From: Kevin O'Gorman Date: Wed, 21 Dec 2022 11:13:33 -0500 Subject: [PATCH 298/352] update CI build env to 3.9-bullseye --- .circleci/config.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index f1821abf3..a771f86f1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -52,9 +52,9 @@ common-steps: version: 2 jobs: - build-buster: + build-bullseye: docker: - - image: circleci/python:3.7-buster + - image: circleci/python:3.9-bullseye steps: - checkout - *removevirtualenv @@ -63,9 +63,9 @@ jobs: - *make_source_tarball - *build_debian_package - test-buster: + test-bullseye: docker: - - image: circleci/python:3.7-buster + - image: circleci/python:3.9-bullseye steps: - checkout - *run_tests @@ -74,8 +74,8 @@ workflows: version: 2 securedrop_proxy_ci: jobs: - - test-buster - - build-buster + - test-bullseye + - build-bullseye nightly: triggers: @@ -86,5 +86,5 @@ workflows: only: - main jobs: - - test-buster - - build-buster + - test-bullseye + - build-bullseye From e8d617b0f303923abf32cec11d2bae2043e4c6f5 Mon Sep 17 00:00:00 2001 From: Kevin O'Gorman Date: Wed, 21 Dec 2022 11:19:27 -0500 Subject: [PATCH 299/352] Used Python 3-style type syntax to clear flake8 errors. --- securedrop_proxy/proxy.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index 36854bb05..e5a7c2da7 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -31,14 +31,14 @@ def __init__(self) -> None: self.method = "" self.path_query = "" self.body = "" - self.headers = {} # type: Dict[str, str] + self.headers: Dict[str, str] = {} class Response: def __init__(self, status: int) -> None: self.status = status self.body = "" - self.headers = {} # type: Dict[str, str] + self.headers: Dict[str, str] = {} self.version = version.version @@ -47,10 +47,10 @@ def __init__(self, conf_path: str, req: Req = Req(), timeout: float = 10.0) -> N self.read_conf(conf_path) self.req = req - self.res = None # type: Optional[Response] + self.res: Optional[Response] = None self.timeout = float(timeout) - self._prepared_request = None # type: Optional[requests.PreparedRequest] + self._prepared_request: Optional[requests.PreparedRequest] = None def on_done(self) -> None: print(json.dumps(self.res.__dict__)) From 51828f02e664dcdaaeec40d0a7f1b1e19f85a1a9 Mon Sep 17 00:00:00 2001 From: Kevin O'Gorman Date: Wed, 21 Dec 2022 15:33:11 -0500 Subject: [PATCH 300/352] Updated certifi in build-requirements.txt --- requirements/build-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/build-requirements.txt b/requirements/build-requirements.txt index f69c6fe8d..af35aa35d 100644 --- a/requirements/build-requirements.txt +++ b/requirements/build-requirements.txt @@ -1,4 +1,4 @@ -certifi==2021.5.30 --hash=sha256:2dee87f82eab7a047280038bacb5bdbc4ee308ae39522ccd9da2ceefe7b6f84e +certifi==2022.12.7 --hash=sha256:7f205a1a4f02f4970fb5d0e16457964bb30d6b678a766515278bc56e6eeb645f charset-normalizer==2.0.4 --hash=sha256:cd9a4492eef4e5276c07f9c0dc1338e7be3e95f2a536bf2c5b620b1f27d03d74 furl==2.0.0 --hash=sha256:9f50360f6e4a0f1d0a35fb4997878e7186a73331f0fde5f6fc9b1bb9f006e6cc idna==3.2 --hash=sha256:691d9fc304505c65ea9ceb8eb7385d63988e344c065cacbbd2156ff9bdfcf0c1 From be061b381664fa51a38cd5dad55e5af866a5ce53 Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Tue, 10 Jan 2023 16:28:19 -0500 Subject: [PATCH 301/352] Use relative import for unittest.mock.patch() It's already imported, we just weren't using it. --- tests/test_entrypoint.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/tests/test_entrypoint.py b/tests/test_entrypoint.py index d12e661fb..58102ca47 100644 --- a/tests/test_entrypoint.py +++ b/tests/test_entrypoint.py @@ -5,7 +5,7 @@ import os import platform import tempfile -import unittest.mock +import unittest from unittest.mock import patch import vcr @@ -33,9 +33,9 @@ def test_missing_config(self): self.assertFalse(os.path.exists(config_path)) output = None - with unittest.mock.patch( + with patch( "sys.argv", new_callable=lambda: ["sd-proxy", config_path] - ) as mock_argv, unittest.mock.patch( # noqa: F841 + ) as mock_argv, patch( # noqa: F841 "sys.stdout", new_callable=io.StringIO ) as mock_stdout: with self.assertRaises(SystemExit), sdhome(): @@ -72,7 +72,7 @@ def test_unwritable_log_folder(self): output = None with sdhome() as home: os.chmod(home, 0o0444) - with unittest.mock.patch("sys.stdout", new_callable=io.StringIO) as mock_stdout: + with patch("sys.stdout", new_callable=io.StringIO) as mock_stdout: with self.assertRaises(SystemExit): entrypoint.start() output = mock_stdout.getvalue() @@ -85,9 +85,9 @@ def test_unwritable_log_folder(self): def test_wrong_number_of_arguments(self): with sdhome() as home: # noqa: F841 - with unittest.mock.patch( + with patch( "sys.argv", new_callable=lambda: ["sd-proxy"] - ) as mock_argv, unittest.mock.patch( # noqa: F841 + ) as mock_argv, patch( # noqa: F841 "sys.stdout", new_callable=io.StringIO ) as mock_stdout: with self.assertRaises(SystemExit): @@ -106,11 +106,11 @@ def test_empty_input(self): self.assertTrue(os.path.exists(config_path)) with sdhome() as home: # noqa: F841 - with unittest.mock.patch( + with patch( "sys.stdin", new_callable=lambda: io.StringIO("") - ) as mock_stdin, unittest.mock.patch( # noqa: F841 + ) as mock_stdin, patch( # noqa: F841 "sys.stdout", new_callable=io.StringIO - ) as mock_stdout, unittest.mock.patch( + ) as mock_stdout, patch( "sys.argv", new_callable=lambda: ["sd-proxy", config_path] ) as mock_argv: # noqa: F841 entrypoint.start() @@ -132,11 +132,11 @@ def test_json_response(self): } output = None - with sdhome() as home, unittest.mock.patch( # noqa: F841 + with sdhome() as home, patch( # noqa: F841 "sys.stdin", new_callable=lambda: io.StringIO(json.dumps(test_input)) - ) as mock_stding, unittest.mock.patch( # noqa: F841 + ) as mock_stding, patch( # noqa: F841 "sys.stdout", new_callable=io.StringIO - ) as mock_stdout, unittest.mock.patch( + ) as mock_stdout, patch( "sys.argv", new_callable=lambda: ["sd-proxy", config_path] ) as mock_argv: # noqa: F841 entrypoint.start() From 7ae672fae32f073123d1ffa804ac74d0e707152e Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Tue, 10 Jan 2023 16:24:33 -0500 Subject: [PATCH 302/352] Refactor test_unwritable_log_folder to use mocking Manually manipulating the permissions doesn't work when tests are being run as root, as we'd like to switch CI to. Instead we can simply mock the PermissionsError being thrown to test the error handling behavior regardless of the actual filesystem and permissions state. --- tests/test_entrypoint.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/tests/test_entrypoint.py b/tests/test_entrypoint.py index 58102ca47..0e5af9b92 100644 --- a/tests/test_entrypoint.py +++ b/tests/test_entrypoint.py @@ -69,14 +69,14 @@ def test_unwritable_log_folder(self): """ Tests a permission problem in `configure_logging`. """ - output = None - with sdhome() as home: - os.chmod(home, 0o0444) - with patch("sys.stdout", new_callable=io.StringIO) as mock_stdout: - with self.assertRaises(SystemExit): - entrypoint.start() - output = mock_stdout.getvalue() - os.chmod(home, 0o0700) + with sdhome(): + with patch( + "os.makedirs", side_effect=PermissionError("[Errno 13] Permission denied: '/foo'") + ): + with patch("sys.stdout", new_callable=io.StringIO) as mock_stdout: + with self.assertRaises(SystemExit): + entrypoint.start() + output = mock_stdout.getvalue() response = json.loads(output) self.assertEqual(response["status"], http.HTTPStatus.INTERNAL_SERVER_ERROR) From 70dda4f64da5d17c26fea344362da245df257a28 Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Mon, 9 Jan 2023 11:58:17 -0500 Subject: [PATCH 303/352] Use plain Debian containers for CI --- .circleci/config.yml | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index a771f86f1..abb89ba3e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,14 +1,10 @@ --- common-steps: - - &removevirtualenv - run: - name: Removes the upstream virtualenv from the original container image - command: sudo pip uninstall virtualenv -y - - &run_tests run: name: Install requirements and run tests command: | + apt-get update && apt-get install -y make python3-venv make venv source .venv/bin/activate make check @@ -17,8 +13,8 @@ common-steps: run: name: Install Debian packaging dependencies and download wheels command: | + apt-get update && apt-get install -y git git-lfs make sudo mkdir ~/packaging && cd ~/packaging - git config --global --unset url.ssh://git@github.com.insteadof git clone https://github.com/freedomofpress/securedrop-debian-packaging.git cd securedrop-debian-packaging make install-deps @@ -47,17 +43,16 @@ common-steps: command: | cd ~/packaging/securedrop-debian-packaging export PKG_VERSION=1000.0 - export PKG_PATH=/home/circleci/project/dist/securedrop-proxy-$PKG_VERSION.tar.gz + export PKG_PATH=~/project make securedrop-proxy version: 2 jobs: build-bullseye: docker: - - image: circleci/python:3.9-bullseye + - image: debian:bullseye steps: - checkout - - *removevirtualenv - *install_packaging_dependencies - *verify_requirements - *make_source_tarball @@ -65,7 +60,7 @@ jobs: test-bullseye: docker: - - image: circleci/python:3.9-bullseye + - image: debian:bullseye steps: - checkout - *run_tests From 0b503c7797907e89b1b0c632cd61cb033e44f7d4 Mon Sep 17 00:00:00 2001 From: ro Date: Tue, 17 May 2022 17:01:56 -0400 Subject: [PATCH 304/352] Separate CLI wrapper into its own module. Add specific Export Exceptions. Reorganize Print actions into methods in service class. Use commands to separate different types of export and print actions. Rename SDExport to Archive. Use methods instead of classes for each export routine. Move ExportException to common directory and add Command enum for supported export commands. --- securedrop_export/archive.py | 84 ++++ securedrop_export/disk/actions.py | 252 ------------ securedrop_export/disk/cli.py | 380 ++++++++++++++++++ securedrop_export/disk/service.py | 94 +++++ securedrop_export/disk/status.py | 22 + securedrop_export/disk/volume.py | 57 +++ securedrop_export/entrypoint.py | 4 +- securedrop_export/enums.py | 40 ++ securedrop_export/exceptions.py | 52 +-- securedrop_export/export.py | 151 ------- securedrop_export/main.py | 99 +++-- .../print/{actions.py => service.py} | 187 +++++---- securedrop_export/print/status.py | 13 + securedrop_export/utils.py | 13 +- setup.py | 4 +- tests/disk/test_actions.py | 272 +++++-------- tests/disk/test_cli.py | 376 +++++++++++++++++ tests/disk/test_status.py | 0 tests/disk/test_volume.py | 47 +++ tests/print/test_actions.py | 12 +- tests/test_export.py | 33 +- 21 files changed, 1427 insertions(+), 765 deletions(-) create mode 100755 securedrop_export/archive.py delete mode 100644 securedrop_export/disk/actions.py create mode 100644 securedrop_export/disk/cli.py create mode 100644 securedrop_export/disk/service.py create mode 100644 securedrop_export/disk/status.py create mode 100644 securedrop_export/disk/volume.py create mode 100644 securedrop_export/enums.py delete mode 100755 securedrop_export/export.py rename securedrop_export/print/{actions.py => service.py} (58%) create mode 100644 securedrop_export/print/status.py create mode 100644 tests/disk/test_cli.py create mode 100644 tests/disk/test_status.py create mode 100644 tests/disk/test_volume.py diff --git a/securedrop_export/archive.py b/securedrop_export/archive.py new file mode 100755 index 000000000..76e18f61c --- /dev/null +++ b/securedrop_export/archive.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python3 + +import abc +import datetime +import json +import logging +import os +import shutil +import subprocess +import sys +import tempfile + +from securedrop_export.enums import Command +from securedrop_export.exceptions import ExportStatus +from securedrop_export.utils import safe_extractall + +logger = logging.getLogger(__name__) + + +class Metadata(object): + """ + Object to parse, validate and store json metadata from the sd-export archive. + """ + + METADATA_FILE = "metadata.json" + + SUPPORTED_ENCRYPTION_METHODS = ["luks"] + + def __init__(self, archive_path): + # Calling create_and_validate() is the preferred way to initialize + self.metadata_path = os.path.join(archive_path, self.METADATA_FILE) + + @staticmethod + def create_and_validate(cls, archive_path) -> 'Metadata': + """ + Create and validate metadata object. Raise ExportException for invalid metadata. + """ + md = cls(archive_path) + + try: + with open(md.metadata_path) as f: + logger.info("Parsing archive metadata") + json_config = json.loads(f.read()) + md.export_method = json_config.get("device", None) + md.encryption_method = json_config.get("encryption_method", None) + md.encryption_key = json_config.get("encryption_key", None) + logger.info( + "Exporting to device {} with encryption_method {}".format( + md.export_method, md.encryption_method + ) + ) + + # Validate metadata - this will fail if command is not in list of supported commands + md.command = Commmand.value_of(md.export_method) + if md.command is Commmand.EXPORT and not md.encryption_method in md.SUPPORTED_ENCRYPTION_METHODS: + logger.error("Unsuported encryption method") + raise ExportException(ExportStatus.ERROR_ARCHIVE_METADATA) + + except Exception as ex: + logger.error("Metadata parsing failure") + raise ExportException(ExportStatus.ERROR_METADATA_PARSING) from ex + + return md + + +class Archive(object): + def __init__(self, archive, config_path): + os.umask(0o077) + self.archive = archive + self.submission_dirname = os.path.basename(self.archive).split(".")[0] + self.target_dirname = "sd-export-{}".format( + datetime.datetime.now().strftime("%Y%m%d-%H%M%S") + ) + self.tmpdir = tempfile.mkdtemp() + + def extract_tarball(self): + try: + logger.info("Extracting tarball {} into {}".format(self.archive, self.tmpdir)) + safe_extractall(self.archive, self.tmpdir) + except Exception as ex: + logger.error("Unable to extract tarball: {}".format(ex)) + raise ExportException(ExportStatus.ERROR_EXTRACTION) from ex + + \ No newline at end of file diff --git a/securedrop_export/disk/actions.py b/securedrop_export/disk/actions.py deleted file mode 100644 index 9619aba03..000000000 --- a/securedrop_export/disk/actions.py +++ /dev/null @@ -1,252 +0,0 @@ -import logging -import os -import subprocess -import sys - -from typing import List - -from securedrop_export.export import ExportAction -from securedrop_export.exceptions import ExportStatus - -MOUNTPOINT = "/media/usb" -ENCRYPTED_DEVICE = "encrypted_volume" - -logger = logging.getLogger(__name__) - - -class DiskAction(ExportAction): - def __init__(self, submission): - self.submission = submission - self.device = None # Optional[str] - self.mountpoint = MOUNTPOINT - self.encrypted_device = ENCRYPTED_DEVICE - - def run(self) -> None: - """Run logic""" - raise NotImplementedError - - def check_usb_connected(self, exit=False) -> None: - usb_devices = self._get_connected_usbs() - - if len(usb_devices) == 0: - logger.info("0 USB devices connected") - self.submission.exit_gracefully(ExportStatus.USB_NOT_CONNECTED.value) - elif len(usb_devices) == 1: - logger.info("1 USB device connected") - self.device = usb_devices[0] - if exit: - self.submission.exit_gracefully(ExportStatus.USB_CONNECTED.value) - elif len(usb_devices) > 1: - logger.info(">1 USB devices connected") - # Return generic error until freedomofpress/securedrop-export/issues/25 - self.submission.exit_gracefully(ExportStatus.ERROR_GENERIC.value) - - def _get_connected_usbs(self) -> List[str]: - logger.info("Performing usb preflight") - # List all block devices attached to VM that are disks and not partitions. - try: - lsblk = subprocess.Popen( - ["lsblk", "-o", "NAME,TYPE"], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - grep = subprocess.Popen( - ["grep", "disk"], - stdin=lsblk.stdout, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - command_output = grep.stdout.readlines() - - # The first word in each element of the command_output list is the device name - attached_devices = [x.decode("utf8").split()[0] for x in command_output] - except subprocess.CalledProcessError: - self.submission.exit_gracefully(ExportStatus.ERROR_GENERIC.value) - - # Determine which are USBs by selecting those block devices that are removable disks. - usb_devices = [] - for device in attached_devices: - try: - removable = subprocess.check_output( - ["cat", "/sys/class/block/{}/removable".format(device)], - stderr=subprocess.PIPE, - ) - is_removable = int(removable.decode("utf8").strip()) - except subprocess.CalledProcessError: - is_removable = False - - if is_removable: - usb_devices.append("/dev/{}".format(device)) - - return usb_devices - - def set_extracted_device_name(self): - try: - device_and_partitions = subprocess.check_output( - ["lsblk", "-o", "TYPE", "--noheadings", self.device], - stderr=subprocess.PIPE, - ) - - # we don't support multiple partitions - partition_count = ( - device_and_partitions.decode("utf-8").split("\n").count("part") - ) - if partition_count > 1: - logger.debug("multiple partitions not supported") - self.submission.exit_gracefully( - ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value - ) - - # redefine device to /dev/sda if disk is encrypted, /dev/sda1 if partition encrypted - self.device = self.device if partition_count == 0 else self.device + "1" - except subprocess.CalledProcessError: - self.submission.exit_gracefully( - ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value - ) - - def check_luks_volume(self): - # cryptsetup isLuks returns 0 if the device is a luks volume - # subprocess with throw if the device is not luks (rc !=0) - logger.info("Checking if volume is luks-encrypted") - self.set_extracted_device_name() - logger.debug("checking if {} is luks encrypted".format(self.device)) - self.submission.safe_check_call( - command=["sudo", "cryptsetup", "isLuks", self.device], - error_message=ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value, - ) - self.submission.exit_gracefully(ExportStatus.USB_ENCRYPTED.value) - - def unlock_luks_volume(self, encryption_key): - try: - # get the encrypted device name - self.set_extracted_device_name() - luks_header = subprocess.check_output( - ["sudo", "cryptsetup", "luksDump", self.device] - ) - luks_header_list = luks_header.decode("utf-8").split("\n") - for line in luks_header_list: - items = line.split("\t") - if "UUID" in items[0]: - self.encrypted_device = "luks-" + items[1] - - # the luks device is already unlocked - if os.path.exists(os.path.join("/dev/mapper/", self.encrypted_device)): - logger.debug("Device already unlocked") - return - - logger.debug("Unlocking luks volume {}".format(self.encrypted_device)) - p = subprocess.Popen( - ["sudo", "cryptsetup", "luksOpen", self.device, self.encrypted_device], - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - logger.debug("Passing key") - p.communicate(input=str.encode(encryption_key, "utf-8")) - rc = p.returncode - if rc != 0: - logger.error("Bad phassphrase for {}".format(self.encrypted_device)) - self.submission.exit_gracefully(ExportStatus.USB_BAD_PASSPHRASE.value) - except subprocess.CalledProcessError: - self.submission.exit_gracefully(ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED) - - def mount_volume(self): - # If the drive is already mounted then we don't need to mount it again - output = subprocess.check_output( - ["lsblk", "-o", "MOUNTPOINT", "--noheadings", self.device] - ) - mountpoint = output.decode("utf-8").strip() - if mountpoint: - logger.debug("The device is already mounted") - self.mountpoint = mountpoint - return - - # mount target not created, create folder - if not os.path.exists(self.mountpoint): - self.submission.safe_check_call( - command=["sudo", "mkdir", self.mountpoint], - error_message=ExportStatus.ERROR_USB_MOUNT, - ) - - mapped_device_path = os.path.join("/dev/mapper/", self.encrypted_device) - logger.info("Mounting {}".format(mapped_device_path)) - self.submission.safe_check_call( - command=["sudo", "mount", mapped_device_path, self.mountpoint], - error_message=ExportStatus.ERROR_USB_MOUNT.value, - ) - self.submission.safe_check_call( - command=["sudo", "chown", "-R", "user:user", self.mountpoint], - error_message=ExportStatus.ERROR_USB_MOUNT.value, - ) - - def copy_submission(self): - # move files to drive (overwrites files with same filename) and unmount drive - # we don't use safe_check_call here because we must lock and - # unmount the drive as part of the finally block - try: - target_path = os.path.join(self.mountpoint, self.submission.target_dirname) - subprocess.check_call(["mkdir", target_path]) - export_data = os.path.join(self.submission.tmpdir, "export_data/") - logger.info("Copying file to {}".format(self.submission.target_dirname)) - subprocess.check_call(["cp", "-r", export_data, target_path]) - logger.info( - "File copied successfully to {}".format(self.submission.target_dirname) - ) - except (subprocess.CalledProcessError, OSError): - self.submission.exit_gracefully(ExportStatus.ERROR_USB_WRITE.value) - finally: - logger.info("Syncing filesystems") - subprocess.check_call(["sync"]) - - if os.path.exists(self.mountpoint): - logger.info("Unmounting drive from {}".format(self.mountpoint)) - subprocess.check_call(["sudo", "umount", self.mountpoint]) - - if os.path.exists(os.path.join("/dev/mapper", self.encrypted_device)): - logger.info("Locking luks volume {}".format(self.encrypted_device)) - subprocess.check_call( - ["sudo", "cryptsetup", "luksClose", self.encrypted_device] - ) - - logger.info( - "Deleting temporary directory {}".format(self.submission.tmpdir) - ) - subprocess.check_call(["rm", "-rf", self.submission.tmpdir]) - sys.exit(0) - - -class USBTestAction(DiskAction): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def run(self): - logger.info("Export archive is usb-test") - self.check_usb_connected(exit=True) - - -class DiskTestAction(DiskAction): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def run(self): - logger.info("Export archive is disk-test") - # check_usb_connected looks for the drive, sets the drive to use - self.check_usb_connected() - self.check_luks_volume() - - -class DiskExportAction(DiskAction): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def run(self): - logger.info("Export archive is disk") - # check_usb_connected looks for the drive, sets the drive to use - self.check_usb_connected() - logger.info("Unlocking volume") - # exports all documents in the archive to luks-encrypted volume - self.unlock_luks_volume(self.submission.archive_metadata.encryption_key) - logger.info("Mounting volume") - self.mount_volume() - logger.info("Copying submission to drive") - self.copy_submission() diff --git a/securedrop_export/disk/cli.py b/securedrop_export/disk/cli.py new file mode 100644 index 000000000..792abe8f7 --- /dev/null +++ b/securedrop_export/disk/cli.py @@ -0,0 +1,380 @@ +import datetime +import json +import logging +import os +import shutil +import subprocess +import tempfile +import sys + +from typing import List, Optional + +from .volume import EncryptionScheme, Volume +from .exceptions import ExportException +from .status import Status + +logger = logging.getLogger(__name__) + + +class CLI: + """ + A Python wrapper for various shell commands required to detect, map, and + mount Export devices. + """ + + # Default mountpoint (unless drive is already mounted manually by the user) + _DEFAULT_MOUNTPOINT = "/media/usb" + + def get_connected_devices(self) -> List[str]: + """ + List all block devices attached to VM that are disks and not partitions. + Return list of all removable connected block devices. + + Raise ExportException if any commands fail. + """ + try: + lsblk = subprocess.Popen( + ["lsblk", "-o", "NAME,TYPE"], stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + grep = subprocess.Popen( + ["grep", "disk"], stdin=lsblk.stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + command_output = grep.stdout.readlines() + + # The first word in each element of the command_output list is the device name + attached_devices = [x.decode("utf8").split()[0] for x in command_output] + + except subprocess.CalledProcessError as ex: + raise ExportException(status=Status.DEVICE_ERROR) from ex + + # Determine which are USBs by selecting those block devices that are removable disks. + usb_devices = [] + for device in attached_devices: + is_removable = False + try: + removable = subprocess.check_output( + ["cat", f"/sys/class/block/{device}/removable"], stderr=subprocess.PIPE + ) + + # 0 for non-removable device, 1 for removable + is_removable = int(removable.decode("utf8").strip()) + + except subprocess.CalledProcessError: + # Not a removable device + continue + + if is_removable: + usb_devices.append(f"/dev/{device}") + + return usb_devices + + def get_partitioned_device(self, blkid: str) -> str: + """ + Given a string representing a block device, return string that includes correct partition + (such as "/dev/sda" or "/dev/sda1"). + + Raise ExportException if partition check fails or device has unsupported partition scheme + (currently, multiple partitions are unsupported). + """ + try: + + device_and_partitions = subprocess.check_output( + ["lsblk", "-o", "TYPE", "--noheadings", blkid], stderr=subprocess.PIPE + ) + + if device_and_partitions: + partition_count = device_and_partitions.decode("utf-8").split("\n").count("part") + if partition_count > 1: + # We don't currently support devices with multiple partitions + logger.error( + f"Multiple partitions not supported (found {partition_count} partitions on {blkid}" + ) + raise ExportException(status=Status.INVALID_DEVICE_DETECTED) + + # redefine device to /dev/sda if disk is encrypted, /dev/sda1 if partition encrypted + if partition_count == 1: + blkid += "1" + + return blkid + + else: + # lsblk did not return output we could process + raise ExportException(status=Status.DEVICE_ERROR) + + except subprocess.CalledProcessError as ex: + logger.error(f"Error checking block deivce {blkid}") + raise ExportException(status=Status.DEVICE_ERROR) from ex + + def is_luks_volume(self, device: str) -> bool: + """ + Given a string representing a volume (/dev/sdX or /dev/sdX1), return True if volume is + LUKS-encrypted, otherwise False. + """ + isLuks = False + + try: + logger.debug(f"Checking if {device} is luks encrypted") + + # cryptsetup isLuks returns 0 if the device is a luks volume + # subprocess will throw if the device is not luks (rc !=0) + subprocess.check_call(["sudo", "cryptsetup", "isLuks", device]) + + # Status.LEGACY_USB_ENCRYPTED + isLuks = True + + except subprocess.CalledProcessError as ex: + # Not necessarily an error state, just means the volume is not LUKS encrypted + logger.debug(f"{device} is not LUKS-encrypted") + + return isLuks + + def _get_luks_name_from_headers(self, device: str) -> str: + """ + Dump LUKS header and determine name of volume. + + Raise ExportException if errors encounterd during attempt to parse LUKS headers. + """ + try: + luks_header = subprocess.check_output(["sudo", "cryptsetup", "luksDump", device]) + if luks_header: + luks_header_list = luks_header.decode("utf-8").split("\n") + for line in luks_header_list: + items = line.split("\t") + if "UUID" in items[0]: + return "luks-" + items[1] + else: + logger.error( + f"Failed to dump LUKS headers; {device} may not be correctly formatted" + ) + raise ExportException(status=Status.INVALID_DEVICE_DETECTED) + except subprocess.CalledProcessError as ex: + raise ExportException(Status=Status.DEVICE_ERROR) from ex + + def get_luks_volume(self, device: str) -> Volume: + """ + Given a string corresponding to a LUKS-partitioned volume, return a corresponding Volume + object. + + If LUKS volume is already mounted, existing mountpoint will be preserved. + If LUKS volume is unlocked but not mounted, volume will be mounted at _DEFAULT_MOUNTPOINT. + + If device is still locked, mountpoint will not be set. Once the decrpytion passphrase is + available, call unlock_luks_volume(), passing the Volume object and passphrase, to + unlock the volume. + + Raise ExportException if errors are encountered. + """ + try: + mapped_name = self._get_luks_name_from_headers(device) + + # Setting the mapped_name does not mean the device has already been unlocked. + luks_volume = Volume( + device_name=device, mapped_name=mapped_name, encryption=EncryptionScheme.LUKS + ) + + # If the device has been unlocked, we can see if it's mounted and + # use the existing mountpoint, or mount it ourselves. + if os.path.exists(os.path.join("/dev/mapper/", mapped_name)): + return self.mount_volume(luks_volume) + + # It's still locked + else: + return luks_volume + + except ExportException: + logger.error("Failed to return luks volume") + raise + + def unlock_luks_volume(self, volume: Volume, decryption_key: str) -> Volume: + """ + Unlock a LUKS-encrypted volume. + + Raise ExportException if errors are encountered during device unlocking. + """ + if not volume.encryption is EncryptionScheme.LUKS: + logger.error("Must call unlock_luks_volume() on LUKS-encrypted device") + raise ExportException(Status.DEVICE_ERROR) + + try: + logger.debug("Unlocking luks volume {}".format(volume.device_name)) + p = subprocess.Popen( + ["sudo", "cryptsetup", "luksOpen", volume.device_name, volume.mapped_name], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + logger.debug("Passing key") + p.communicate(input=str.encode(decryption_key, "utf-8")) + rc = p.returncode + + if rc == 0: + return Volume( + device_name=volume.device_name, mapped_name=volume.mapped_name, encryption=EncryptionScheme.LUKS + ) + else: + logger.error("Bad volume passphrase") + raise ExportException(Status.ERROR_UNLOCK_LUKS) + + except subprocess.CalledProcessError as ex: + raise ExportException(Status.DEVICE_ERROR) from ex + + def _get_mountpoint(self, volume: Volume) -> Optional[str]: + """ + Check for existing mountpoint. + Raise ExportException if errors encountered during command. + """ + try: + output = subprocess.check_output( + ["lsblk", "-o", "MOUNTPOINT", "--noheadings", volume.device_name] + ) + return output.decode("utf-8").strip() + + except subprocess.CalledProcessError as ex: + logger.error(ex) + raise ExportException(Status.ERROR_MOUNT) from ex + + def mount_volume(self, volume: Volume) -> Volume: + """ + Given an unlocked LUKS volume, return a mounted LUKS volume. + + If volume is already mounted, mountpoint is not changed. Otherwise, + volume is mounted at _DEFAULT_MOUNTPOINT. + + Raises ExportException if errors are encountered during mounting. + """ + if not volume.unlocked: + raise ExportException("Unlock volume before mounting") + + mountpoint = self._get_mountpoint(volume) + + if mountpoint: + logger.debug("The device is already mounted") + if volume.mountpoint is not mountpoint: + # This should not happen, but if a user edits their veracrypt drive mountpoint on the fly. + logger.warning(f"Mountpoint was inaccurate, updating") + + volume.mountpoint = mountpoint + + else: + if not os.path.exists(_DEFAULT_MOUNTPOINT): + try: + subprocess.check_call(["sudo", "mkdir", _DEFAULT_MOUNTPOINT]) + except subprocess.CalledProcessError as ex: + logger.error(ex) + raise ExportException(Status.ERROR_MOUNT) from ex + + # Mount device /dev/mapper/{mapped_name} at /media/usb/ + mapped_device_path = os.path.join(device.MAPPED_VOLUME_PREFIX, device.mapped_name) + + try: + logger.debug(f"Mounting volume {volume.device_name} at {_DEFAULT_MOUNTPOINT}") + subprocess.check_call(["sudo", "mount", mapped_device_path, _DEFAULT_MOUNTPOINT]) + subprocess.check_call(["sudo", "chown", "-R", "user:user", _DEFAULT_MOUNTPOINT]) + + volume.mountpoint = _DEFAULT_MOUNTPOINT + + except subprocess.CalledProcessError as ex: + logger.error(ex) + raise ExportException(Status.ERROR_MOUNT) from ex + + return volume + + def write_data_to_device( + self, submission_tmpdir: str, submission_target_dirname: str, device: Volume + ): + """ + Move files to drive (overwrites files with same filename) and unmount drive. + Drive is unmounted and files are cleaned up as part of the `finally` block to ensure + that cleanup happens even if export fails or only partially succeeds. + + The calling method *must* handle ExportException and exit with sys.exit(0) so that + another program does not attempt to open the submission. + """ + + try: + target_path = os.path.join(device.mountpoint, submission_target_dirname) + subprocess.check_call(["mkdir", target_path]) + + export_data = os.path.join(submission_tmpdir, "export_data/") + logger.info("Copying file to {}".format(submission_target_dirname)) + + subprocess.check_call(["cp", "-r", export_data, target_path]) + logger.info("File copied successfully to {}".format(submission_target_dirname)) + + except (subprocess.CalledProcessError, OSError) as ex: + raise ExportException(status=Status.ERROR_EXPORT) from ex + + finally: + self.cleanup_drive_and_tmpdir(device, submission_tmpdir) + + def cleanup_drive_and_tmpdir(self, volume: Volume, submission_tmpdir: str): + """ + Post-export cleanup method. Unmount and lock drive and remove temporary + directory. Currently called at end of `write_data_to_device()` to ensure + device is always locked after export. + + Raise ExportException if errors during cleanup are encoutered. + """ + logger.info("Syncing filesystems") + try: + subprocess.check_call(["sync"]) + umounted = self._unmount_volume(volume) + if umounted: + self._close_luks_volume(umounted) + self._remove_temp_directory(submission_tmpdir) + + except subprocess.CalledProcessError as ex: + logger.error("Error syncing filesystem") + raise ExportException(Status.ERROR_EXPORT_CLEANUP) from ex + + def _unmount_volume(self, volume: Volume) -> Volume: + """ + Helper. Unmount volume + """ + if os.path.exists(volume.mountpoint): + logger.debug(f"Unmounting drive from {volume.mountpoint}") + try: + subprocess.check_call(["sudo", "umount", volume.mountpoint]) + volume.mountpoint = None + + except subprocess.CalledProcessError as ex: + logger.error("Error unmounting device") + raise ExportException(Status.ERROR_MOUNT) from ex + else: + logger.info("Mountpoint does not exist; volume was already unmounted") + + def _close_luks_volume(self, unlocked_device: Volume) -> None: + """ + Helper. Close LUKS volume + """ + if os.path.exists(os.path.join("/dev/mapper", unlocked_device.mapped_name)): + logger.debug("Locking luks volume {}".format(unlocked_device)) + try: + subprocess.check_call( + ["sudo", "cryptsetup", "luksClose", unlocked_device.mapped_name] + ) + + except subprocess.CalledProcessError as ex: + logger.error("Error closing device") + raise ExportException(Status.DEVICE_ERROR) from ex + + def _remove_temp_directory(self, tmpdir: str): + """ + Helper. Remove temporary directory used during archive export. + """ + logger.debug(f"Deleting temporary directory {tmpdir}") + try: + subprocess.check_call(["rm", "-rf", tmpdir]) + except subprocess.CalledProcessError as ex: + logger.error("Error removing temporary directory") + raise ExportException(Status.DEVICE_ERROR) from ex + + def write_status(self, status: Status): + """ + Write string to stdout. + """ + if status: + sys.stdout.write(status.value) + sys.stdout.write("\n") + else: + logger.warning("No status value supplied") diff --git a/securedrop_export/disk/service.py b/securedrop_export/disk/service.py new file mode 100644 index 000000000..23fb10145 --- /dev/null +++ b/securedrop_export/disk/service.py @@ -0,0 +1,94 @@ +import logging +import os +import subprocess +import sys + +from typing import List + +from securedrop_export.export import Archive +from securedrop_export.exceptions import ExportException + +from .cli import CLI +from .status import Status + +logger = logging.getLogger(__name__) + + +class Service(): + + def __init__(self, submission): + self.submission = submission + self.cli = CLI() + + def usb_test(self): + """ + Check if single USB is inserted. + """ + logger.info("Export archive is usb-test") + status = Status.LEGACY_ERROR_GENERIC + + try: + all_devices = self.cli.get_connected_devices() + num_devices = len(all_devices) + + if num_devices == 0: + raise ExportException(Status.LEGACY_USB_NOT_CONNECTED) + elif num_devices == 1: + status = Status.LEGACY_USB_CONNECTED + elif num_devices > 1: + raise ExportException(Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED) + + except ExportException: + raise + + + def disk_format_test(self): + """ + Check if volume is correctly formatted for export. + """ + try: + all_devices = self.cli.get_connected_devices() + + if len(all_devices) == 1: + device = self.cli.get_partitioned_device(all_devices) + if self.cli.is_luks_volume(device): + status = Status.LEGACY_USB_ENCRYPTED + else: + raise ExportException(Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED) + + except ExportException: + raise + + + def export(self): + """ + Export all files to target device. + """ + logger.info("Export archive is disk") + + try: + all_devices = self.cli.get_connected_devices() + + if len(all_devices) == 1: + device = self.cli.get_partitioned_device(all_devices[0]) + + # Decide what kind of volume it is + if self.cli.is_luks_volume(device): + volume = self.cli.get_luks_volume(device) + if not volume.writable: + unlocked = self.cli.unlock_luks_volume( + volume, self.submission.archive_metadata.encryption_key + ) + mounted = self.cli.mount_volume(unlocked) + + logger.debug(f"Export submission to {mounted.mountpoint}") + self.cli.write_data_to_device(self.submission.tmpdir, self.submission.target_dirname, mounted) + + else: + # Another kind of drive: VeraCrypt/TC, or unsupported + logger.error(f"Export failed because {device} is not supported") + raise ExportException(Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED) + + except ExportException as ex: + raise + diff --git a/securedrop_export/disk/status.py b/securedrop_export/disk/status.py new file mode 100644 index 000000000..d8472642b --- /dev/null +++ b/securedrop_export/disk/status.py @@ -0,0 +1,22 @@ +from securedrop_export.exceptions import ExportEnum + +class Status(ExportEnum): + + LEGACY_ERROR_FILE_NOT_FOUND = "ERROR_FILE_NOT_FOUND" + LEGACY_ERROR_EXTRACTION = "ERROR_EXTRACTION" + LEGACY_ERROR_METADATA_PARSING = "ERROR_METADATA_PARSING" + LEGACY_ERROR_ARCHIVE_METADATA = "ERROR_ARCHIVE_METADATA" + LEGACY_ERROR_GENERIC = "ERROR_GENERIC" + + # Legacy USB preflight related + LEGACY_USB_CONNECTED = "USB_CONNECTED" + LEGACY_ERROR_USB_CHECK = "ERROR_USB_CHECK" + + # Legacy USB Disk preflight related errors + LEGACY_USB_ENCRYPTED = "USB_ENCRYPTED" + LEGACY_USB_ENCRYPTION_NOT_SUPPORTED = "USB_ENCRYPTION_NOT_SUPPORTED" + + # Legacy Disk export errors + LEGACY_USB_BAD_PASSPHRASE = "USB_BAD_PASSPHRASE" + LEGACY_ERROR_USB_MOUNT = "ERROR_USB_MOUNT" + LEGACY_ERROR_USB_WRITE = "ERROR_USB_WRITE" diff --git a/securedrop_export/disk/volume.py b/securedrop_export/disk/volume.py new file mode 100644 index 000000000..a3049e16e --- /dev/null +++ b/securedrop_export/disk/volume.py @@ -0,0 +1,57 @@ +from enum import Enum +import os + + +class EncryptionScheme(Enum): + """ + Supported encryption schemes. + """ + + UNKNOWN = 0 + LUKS = 1 + + +class Volume: + + MAPPED_VOLUME_PREFIX = "/dev/mapper/" + + """ + A volume on a removable device. + Volumes have a device name ("/dev/sdX"), a mapped name ("/dev/mapper/xxx"), an encryption scheme, + and a mountpoint if they are mounted. + """ + + def __init__( + self, + device_name: str, + mapped_name: str, + encryption: EncryptionScheme, + mountpoint: str = None, + ): + self.device_name = device_name + self.mapped_name = mapped_name + self.mountpoint = mountpoint + self.encryption = encryption + + @property + def encryption(self): + return self._encryption + + @encryption.setter + def encryption(self, scheme: EncryptionScheme): + if scheme: + self._encryption = scheme + else: + self._encryption = EncryptionScheme.UNKNOWN + + @property + def writable(self) -> bool: + return self.unlocked and self.mountpoint is not None + + @property + def unlocked(self) -> bool: + return ( + self.mapped_name is not None + and self.encryption is not EncryptionScheme.UNKNOWN + and os.path.exists(os.path.join(self.MAPPED_VOLUME_PREFIX, self.mapped_name)) + ) diff --git a/securedrop_export/entrypoint.py b/securedrop_export/entrypoint.py index 3bb86baad..5ed1dd7f4 100755 --- a/securedrop_export/entrypoint.py +++ b/securedrop_export/entrypoint.py @@ -28,9 +28,7 @@ def configure_logging(): log_file = os.path.join(DEFAULT_HOME, LOG_DIR_NAME, EXPORT_LOG_FILENAME) # set logging format - log_fmt = ( - "%(asctime)s - %(name)s:%(lineno)d(%(funcName)s) " "%(levelname)s: %(message)s" - ) + log_fmt = "%(asctime)s - %(name)s:%(lineno)d(%(funcName)s) " "%(levelname)s: %(message)s" formatter = logging.Formatter(log_fmt) handler = TimedRotatingFileHandler(log_file) diff --git a/securedrop_export/enums.py b/securedrop_export/enums.py new file mode 100644 index 000000000..7396b3adf --- /dev/null +++ b/securedrop_export/enums.py @@ -0,0 +1,40 @@ +from enum import Enum +from typing import TypeVar, Type + +T = TypeVar('T', bound=ExportEnum) + +class ExportEnum(Enum): + """ + Parent class for export and print statuses. + """ + @classmethod + def value_of(cls: Type[T], target: str) -> T: + for key, value in cls.__members__.items(): + if key == target: + return value + # Don't print the value since we don't know what it is + raise ValueError("No valid entry found for provided value") + + +class Command(ExportEnum): + """ + All supported commands. + + Values are as supplied by the calling VM (sd-app), and a change in any values require + corresponding changes in the calling VM. + """ + PRINTER_PREFLIGHT = "printer-preflight" + PRINTER_TEST = "printer-test" + PRINT = "printer" + CHECK_USBS = "usb-test" + CHECK_VOLUME = "disk-test" + EXPORT = "disk" + START_VM = "" + + @classmethod + def printer_actions(cls): + return (cls.PRINTER_PREFLIGHT, cls.PRINTER_TEST, cls.PRINT) + + @classmethod + def export_actions(cls): + return (cls.EXPORT, cls.CHECK_USBS, cls.CHECK_VOLUME) diff --git a/securedrop_export/exceptions.py b/securedrop_export/exceptions.py index 11855c0ff..282844b31 100644 --- a/securedrop_export/exceptions.py +++ b/securedrop_export/exceptions.py @@ -1,43 +1,33 @@ -from enum import Enum +import logging +from typing import Optional -class ExportStatus(Enum): +from .enums import ExportEnum - # General errors - ERROR_FILE_NOT_FOUND = "ERROR_FILE_NOT_FOUND" - ERROR_EXTRACTION = "ERROR_EXTRACTION" - ERROR_METADATA_PARSING = "ERROR_METADATA_PARSING" - ERROR_ARCHIVE_METADATA = "ERROR_ARCHIVE_METADATA" - ERROR_USB_CONFIGURATION = "ERROR_USB_CONFIGURATION" - ERROR_GENERIC = "ERROR_GENERIC" +logger = logging.getLogger(__name__) - # USB preflight related errors - USB_CONNECTED = "USB_CONNECTED" - USB_NOT_CONNECTED = "USB_NOT_CONNECTED" - ERROR_USB_CHECK = "ERROR_USB_CHECK" - # USB Disk preflight related errors - USB_ENCRYPTED = "USB_ENCRYPTED" - USB_ENCRYPTION_NOT_SUPPORTED = "USB_ENCRYPTION_NOT_SUPPORTED" - USB_DISK_ERROR = "USB_DISK_ERROR" - - # Printer preflight related errors - ERROR_MULTIPLE_PRINTERS_FOUND = "ERROR_MULTIPLE_PRINTERS_FOUND" - ERROR_PRINTER_NOT_FOUND = "ERROR_PRINTER_NOT_FOUND" - ERROR_PRINTER_NOT_SUPPORTED = "ERROR_PRINTER_NOT_SUPPORTED" - ERROR_PRINTER_DRIVER_UNAVAILABLE = "ERROR_PRINTER_DRIVER_UNAVAILABLE" - ERROR_PRINTER_INSTALL = "ERROR_PRINTER_INSTALL" +class ExportException(Exception): + """ + Base class for exceptions encountered during export. + """ - # Disk export errors - USB_BAD_PASSPHRASE = "USB_BAD_PASSPHRASE" - ERROR_USB_MOUNT = "ERROR_USB_MOUNT" - ERROR_USB_WRITE = "ERROR_USB_WRITE" + def __init__(self, *args, **kwargs): + super().__init__(*args) + self._status = kwargs.get("status") - # Printer export errors - ERROR_PRINT = "ERROR_PRINT" + @property + def status(self) -> Optional[ExportEnum]: + try: + return ExportEnum.value_of(self._status) + except ValueError: + logger.error( + "Unexpected value passed to ExportException (ExportEnum is required)." + ) + pass # Don't return a status -class TimeoutException(Exception): +class TimeoutException(ExportException): pass diff --git a/securedrop_export/export.py b/securedrop_export/export.py deleted file mode 100755 index 02d12c642..000000000 --- a/securedrop_export/export.py +++ /dev/null @@ -1,151 +0,0 @@ -#!/usr/bin/env python3 - -import abc -import datetime -import json -import logging -import os -import shutil -import subprocess -import sys -import tempfile - -from securedrop_export.exceptions import ExportStatus -from securedrop_export.utils import safe_extractall - -logger = logging.getLogger(__name__) - - -class Metadata(object): - """ - Object to parse, validate and store json metadata from the sd-export archive. - """ - - METADATA_FILE = "metadata.json" - SUPPORTED_EXPORT_METHODS = [ - "start-vm", - "usb-test", # general preflight check - "disk", - "disk-test", # disk preflight test - "printer", - "printer-test", # print test page - "printer-preflight", - ] - SUPPORTED_ENCRYPTION_METHODS = ["luks"] - - def __init__(self, archive_path): - self.metadata_path = os.path.join(archive_path, self.METADATA_FILE) - - try: - with open(self.metadata_path) as f: - logger.info("Parsing archive metadata") - json_config = json.loads(f.read()) - self.export_method = json_config.get("device", None) - self.encryption_method = json_config.get("encryption_method", None) - self.encryption_key = json_config.get("encryption_key", None) - logger.info( - "Exporting to device {} with encryption_method {}".format( - self.export_method, self.encryption_method - ) - ) - - except Exception: - logger.error("Metadata parsing failure") - raise - - def is_valid(self): - logger.info("Validating metadata contents") - if self.export_method not in self.SUPPORTED_EXPORT_METHODS: - logger.error( - "Archive metadata: Export method {} is not supported".format( - self.export_method - ) - ) - return False - - if self.export_method == "disk": - if self.encryption_method not in self.SUPPORTED_ENCRYPTION_METHODS: - logger.error( - "Archive metadata: Encryption method {} is not supported".format( - self.encryption_method - ) - ) - return False - return True - - -class SDExport(object): - def __init__(self, archive, config_path): - os.umask(0o077) - self.archive = archive - self.submission_dirname = os.path.basename(self.archive).split(".")[0] - self.target_dirname = "sd-export-{}".format( - datetime.datetime.now().strftime("%Y%m%d-%H%M%S") - ) - self.tmpdir = tempfile.mkdtemp() - - def extract_tarball(self): - try: - logger.info( - "Extracting tarball {} into {}".format(self.archive, self.tmpdir) - ) - safe_extractall(self.archive, self.tmpdir) - except Exception as ex: - logger.error("Unable to extract tarball: {}".format(ex)) - self.exit_gracefully(ExportStatus.ERROR_EXTRACTION.value) - - def exit_gracefully(self, msg, e=False): - """ - Utility to print error messages, mostly used during debugging, - then exits successfully despite the error. Always exits 0, - since non-zero exit values will cause system to try alternative - solutions for mimetype handling, which we want to avoid. - """ - logger.info("Exiting with message: {}".format(msg)) - if e: - logger.error("Captured exception output: {}".format(e.output)) - try: - # If the file archive was extracted, delete before returning - if os.path.isdir(self.tmpdir): - shutil.rmtree(self.tmpdir) - # Do this after deletion to avoid giving the client two error messages in case of the - # block above failing - sys.stderr.write(msg) - sys.stderr.write("\n") - except Exception as ex: - logger.error("Unhandled exception: {}".format(ex)) - sys.stderr.write(ExportStatus.ERROR_GENERIC.value) - # exit with 0 return code otherwise the os will attempt to open - # the file with another application - sys.exit(0) - - def safe_check_call(self, command, error_message, ignore_stderr_startswith=None): - """ - Safely wrap subprocess.check_output to ensure we always return 0 and - log the error messages - """ - try: - err = subprocess.run(command, check=True, capture_output=True).stderr - # ppdc and lpadmin may emit warnings we are aware of which should not be treated as - # user facing errors - if ignore_stderr_startswith and err.startswith(ignore_stderr_startswith): - logger.info("Encountered warning: {}".format(err.decode("utf-8"))) - elif err == b"": - # Nothing on stderr and returncode is 0, we're good - pass - else: - self.exit_gracefully(msg=error_message, e=err) - except subprocess.CalledProcessError as ex: - self.exit_gracefully(msg=error_message, e=ex.output) - - -class ExportAction(abc.ABC): - """ - This export interface defines the method that export - methods should implement. - """ - - @abc.abstractmethod - def run(self) -> None: - """Run logic""" - pass diff --git a/securedrop_export/main.py b/securedrop_export/main.py index 042c0cd9b..27600f87d 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -1,18 +1,18 @@ import logging +import sys from securedrop_export import export -from securedrop_export.exceptions import ExportStatus -from securedrop_export.print.actions import ( - PrintExportAction, - PrintTestPageAction, - PrintPreflightAction, -) +from securedrop_export.exceptions import Command, ExportStatus, PrintStatus + from securedrop_export.disk.actions import ( DiskTestAction, DiskExportAction, USBTestAction, ) +from securedrop_export.disk.service import Service as ExportService +from securedrop_export.print.service import Service as PrintService + logger = logging.getLogger(__name__) @@ -22,25 +22,72 @@ def __main__(submission): try: submission.archive_metadata = export.Metadata(submission.tmpdir) except Exception: - submission.exit_gracefully(ExportStatus.ERROR_METADATA_PARSING.value) + exit_gracefully(submission, ExportStatus.ERROR_METADATA_PARSING) if not submission.archive_metadata.is_valid(): - submission.exit_gracefully(ExportStatus.ERROR_ARCHIVE_METADATA.value) - - if submission.archive_metadata.export_method == "start-vm": - submission.exit_gracefully("") - - if submission.archive_metadata.export_method == "usb-test": - action = USBTestAction(submission) - elif submission.archive_metadata.export_method == "disk": - action = DiskExportAction(submission) - elif submission.archive_metadata.export_method == "disk-test": - action = DiskTestAction(submission) - elif submission.archive_metadata.export_method == "printer-preflight": - action = PrintPreflightAction(submission) - elif submission.archive_metadata.export_method == "printer": - action = PrintExportAction(submission) - elif submission.archive_metadata.export_method == "printer-test": - action = PrintTestPageAction(submission) - - action.run() + exit_gracefully(submission, ExportStatus.ERROR_ARCHIVE_METADATA) + + try: + command = Command.value_of(submission.archive_metadata.export_method) + + if command is Command.START_VM: + # No further operations + exit_gracefully(submission, command) + else: + status = None + try: + if command in Command.printer_actions(): + service = ExportService(submission) + status = service.run(command) + + elif command in Command.export_actions(): + service = PrintService(submission) + status = service.run(command) + + except ExportException as ex: + if ex.status: + status = ex.status + + finally: + exit_gracefully(submission, status) + + except ValueError: + # An unsupported command was sent from the calling VM + logger.error("Unsuported command, exiting") + exit_gracefully(submission) + + +def exit_gracefully(submission: SDExport, status: Status=None, e=None): + """ + Utility to print error messages, mostly used during debugging, + then exits successfully despite the error. Always exits 0, + since non-zero exit values will cause system to try alternative + solutions for mimetype handling, which we want to avoid. + """ + logger.info("Exiting with message: {}".format(msg)) + if e: + logger.error("Captured exception output: {}".format(e.output)) + try: + # If the file archive was extracted, delete before returning + if os.path.isdir(submission.tmpdir): + shutil.rmtree(submission.tmpdir) + # Do this after deletion to avoid giving the client two error messages in case of the + # block above failing + write_status(status) + except Exception as ex: + logger.error("Unhandled exception: {}".format(ex)) + write_status(ExportStatus.LEGACY_ERROR_GENERIC) + # exit with 0 return code otherwise the os will attempt to open + # the file with another application + sys.exit(0) + + +def _write_status(self, status: Status): + """ + Write string to stderr. + """ + if status: + sys.stderr.write(status.value) + sys.stderr.write("\n") + else: + logger.info("No status value supplied") diff --git a/securedrop_export/print/actions.py b/securedrop_export/print/service.py similarity index 58% rename from securedrop_export/print/actions.py rename to securedrop_export/print/service.py index 78a5e3aa3..98af6a0b1 100644 --- a/securedrop_export/print/actions.py +++ b/securedrop_export/print/service.py @@ -3,10 +3,10 @@ import signal import subprocess import time +from enum import Enum -from securedrop_export.exceptions import ExportStatus, handler, TimeoutException -from securedrop_export.export import ExportAction - +from .exceptions import ExportStatus, handler, TimeoutException +from .status import Status PRINTER_NAME = "sdw-printer" PRINTER_WAIT_TIMEOUT = 60 @@ -18,27 +18,55 @@ logger = logging.getLogger(__name__) -class PrintAction(ExportAction): +class Service(): + """ + Printer service + """ + def __init__(self, submission): self.submission = submission self.printer_name = PRINTER_NAME self.printer_wait_timeout = PRINTER_WAIT_TIMEOUT - def run(self) -> None: - """Run logic""" - raise NotImplementedError - - def wait_for_print(self): - # use lpstat to ensure the job was fully transfered to the printer - # returns True if print was successful, otherwise will throw exceptions + def print(self): + """ + Routine to print all files. + Throws ExportException if an error is encountered. + """ + logger.info("Printing all files from archive") + self._check_printer_setup() + self._print_all_files() + + def printer_preflight(self): + """ + Routine to perform preflight printer testing. + + Throws ExportException if an error is encoutered. + """ + logger.info("Running printer preflight") + self._check_printer_setup() + + def printer_test(self): + """ + Routine to print a test page. + + Throws ExportException if an error is encountered. + """ + logger.info("Printing test page") + self._check_printer_setup() + self._print_test_page() + + def _wait_for_print(self): + """ + Use lpstat to ensure the job was fully transfered to the printer + Return True if print was successful, otherwise throw ExportException. + """ signal.signal(signal.SIGALRM, handler) signal.alarm(self.printer_wait_timeout) printer_idle_string = "printer {} is idle".format(self.printer_name) while True: try: - logger.info( - "Running lpstat waiting for printer {}".format(self.printer_name) - ) + logger.info("Running lpstat waiting for printer {}".format(self.printer_name)) output = subprocess.check_output(["lpstat", "-p", self.printer_name]) if printer_idle_string in output.decode("utf-8"): logger.info("Print completed") @@ -46,52 +74,54 @@ def wait_for_print(self): else: time.sleep(5) except subprocess.CalledProcessError: - self.submission.exit_gracefully(ExportStatus.ERROR_PRINT.value) + raise ExportException(Status.ERROR_PRINT) except TimeoutException: logger.error("Timeout waiting for printer {}".format(self.printer_name)) - self.submission.exit_gracefully(ExportStatus.ERROR_PRINT.value) + raise ExportException(Status.ERROR_PRINT) return True - def check_printer_setup(self) -> None: + def _check_printer_setup(self) -> None: + """ + Check printer setup. + Raise ExportException if supported setup is not found. + """ try: logger.info("Searching for printer") output = subprocess.check_output(["sudo", "lpinfo", "-v"]) printers = [x for x in output.decode("utf-8").split() if "usb://" in x] if not printers: logger.info("No usb printers connected") - self.submission.exit_gracefully( - ExportStatus.ERROR_PRINTER_NOT_FOUND.value - ) + raise ExportException(Status.ERROR_PRINTER_NOT_FOUND) supported_printers = [ p for p in printers if any(sub in p for sub in ("Brother", "LaserJet")) ] if not supported_printers: logger.info("{} are unsupported printers".format(printers)) - self.submission.exit_gracefully( - ExportStatus.ERROR_PRINTER_NOT_SUPPORTED.value - ) + raise ExportException(Status.ERROR_PRINTER_NOT_SUPPORTED) if len(supported_printers) > 1: logger.info("Too many usb printers connected") - self.submission.exit_gracefully( - ExportStatus.ERROR_MULTIPLE_PRINTERS_FOUND.value - ) + raise ExportException(Status.ERROR_MULTIPLE_PRINTERS_FOUND) printer_uri = printers[0] - printer_ppd = self.install_printer_ppd(printer_uri) + printer_ppd = self._install_printer_ppd(printer_uri) self.setup_printer(printer_uri, printer_ppd) except subprocess.CalledProcessError as e: logger.error(e) - self.submission.exit_gracefully(ExportStatus.ERROR_GENERIC.value) + raise ExportException(Status.ERROR_GENERIC) + + def _get_printer_uri(self) -> str: + """ + Get the URI via lpinfo. Only accept URIs of supported printers. - def get_printer_uri(self): - # Get the URI via lpinfo and only accept URIs of supported printers + Raise ExportException if supported setup is not found. + """ printer_uri = "" try: output = subprocess.check_output(["sudo", "lpinfo", "-v"]) except subprocess.CalledProcessError: - self.submission.exit_gracefully(ExportStatus.ERROR_PRINTER_URI.value) + raise ExportException(Status.ERROR_PRINTER_URI) # fetch the usb printer uri for line in output.split(): @@ -99,29 +129,23 @@ def get_printer_uri(self): printer_uri = line.decode("utf-8") logger.info("lpinfo usb printer: {}".format(printer_uri)) - # verify that the printer is supported, else exit + # verify that the printer is supported, else throw if printer_uri == "": # No usb printer is connected logger.info("No usb printers connected") - self.submission.exit_gracefully(ExportStatus.ERROR_PRINTER_NOT_FOUND.value) + raise ExportException(Status.ERROR_PRINTER_NOT_FOUND) elif not any(x in printer_uri for x in ("Brother", "LaserJet")): # printer url is a make that is unsupported logger.info("Printer {} is unsupported".format(printer_uri)) - self.submission.exit_gracefully( - ExportStatus.ERROR_PRINTER_NOT_SUPPORTED.value - ) + raise ExportException(Status.ERROR_PRINTER_NOT_SUPPORTED) logger.info("Printer {} is supported".format(printer_uri)) return printer_uri def install_printer_ppd(self, uri): if not any(x in uri for x in ("Brother", "LaserJet")): - logger.error( - "Cannot install printer ppd for unsupported printer: {}".format(uri) - ) - self.submission.exit_gracefully( - msg=ExportStatus.ERROR_PRINTER_NOT_SUPPORTED.value - ) + logger.error("Cannot install printer ppd for unsupported printer: {}".format(uri)) + raise ExportException(Status.ERROR_PRINTER_NOT_SUPPORTED) return if "Brother" in uri: @@ -134,7 +158,7 @@ def install_printer_ppd(self, uri): # Compile and install drivers that are not already installed if not os.path.exists(printer_ppd): logger.info("Installing printer drivers") - self.submission.safe_check_call( + self._safe_check_call( command=[ "sudo", "ppdc", @@ -142,16 +166,16 @@ def install_printer_ppd(self, uri): "-d", "/usr/share/cups/model/", ], - error_message=ExportStatus.ERROR_PRINTER_DRIVER_UNAVAILABLE.value, + error_message=Status.ERROR_PRINTER_DRIVER_UNAVAILABLE.value, ignore_stderr_startswith=b"ppdc: Warning", ) return printer_ppd - def setup_printer(self, printer_uri, printer_ppd): + def _setup_printer(self, printer_uri, printer_ppd): # Add the printer using lpadmin logger.info("Setting up printer {}".format(self.printer_name)) - self.submission.safe_check_call( + self._safe_check_call( command=[ "sudo", "lpadmin", @@ -165,25 +189,25 @@ def setup_printer(self, printer_uri, printer_ppd): "-u", "allow:user", ], - error_message=ExportStatus.ERROR_PRINTER_INSTALL.value, + error_message=Status.ERROR_PRINTER_INSTALL.value, ignore_stderr_startswith=b"lpadmin: Printer drivers", ) - def print_test_page(self): + def _print_test_page(self): logger.info("Printing test page") - self.print_file("/usr/share/cups/data/testprint") + self._print_file("/usr/share/cups/data/testprint") - def print_all_files(self): + def _print_all_files(self): files_path = os.path.join(self.submission.tmpdir, "export_data/") files = os.listdir(files_path) print_count = 0 for f in files: file_path = os.path.join(files_path, f) - self.print_file(file_path) + self._print_file(file_path) print_count += 1 logger.info("Printing document {} of {}".format(print_count, len(files))) - def is_open_office_file(self, filename): + def _is_open_office_file(self, filename): OPEN_OFFICE_FORMATS = [ ".doc", ".docx", @@ -201,7 +225,7 @@ def is_open_office_file(self, filename): return True return False - def print_file(self, file_to_print): + def _print_file(self, file_to_print): # If the file to print is an (open)office document, we need to call unoconf to # convert the file to pdf as printer drivers do not support this format if self.is_open_office_file(file_to_print): @@ -209,45 +233,36 @@ def print_file(self, file_to_print): folder = os.path.dirname(file_to_print) converted_filename = file_to_print + ".pdf" converted_path = os.path.join(folder, converted_filename) - self.submission.safe_check_call( + self.safe_check_call( command=["unoconv", "-o", converted_path, file_to_print], - error_message=ExportStatus.ERROR_PRINT.value, + error_message=Status.ERROR_PRINT.value, ) file_to_print = converted_path logger.info("Sending file to printer {}".format(self.printer_name)) - self.submission.safe_check_call( + + # todo + self._safe_check_call( command=["xpp", "-P", self.printer_name, file_to_print], - error_message=ExportStatus.ERROR_PRINT.value, + error_message=Status.ERROR_PRINT.value, ) -class PrintExportAction(PrintAction): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def run(self): - logger.info("Export archive is printer") - self.check_printer_setup() - # prints all documents in the archive - self.print_all_files() - - -class PrintTestPageAction(PrintAction): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def run(self): - logger.info("Export archive is printer-test") - self.check_printer_setup() - # Prints a test page to ensure the printer is functional - self.print_test_page() - - -class PrintPreflightAction(PrintAction): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def run(self): - logger.info("Export archive is printer-preflight") - self.check_printer_setup() + def _safe_check_call(self, command: str, status: Status, ignore_stderr_startswith=None): + """ + Safely wrap subprocess.check_output to ensure we always return 0 and + log the error messages + """ + try: + err = subprocess.run(command, check=True, capture_output=True).stderr + # ppdc and lpadmin may emit warnings we are aware of which should not be treated as + # user facing errors + if ignore_stderr_startswith and err.startswith(ignore_stderr_startswith): + logger.info("Encountered warning: {}".format(err.decode("utf-8"))) + elif err == b"": + # Nothing on stderr and returncode is 0, we're good + pass + else: + raise ExportException(status=status, e=err) + except subprocess.CalledProcessError as ex: + raise ExportException(status=status, e=ex.output) diff --git a/securedrop_export/print/status.py b/securedrop_export/print/status.py new file mode 100644 index 000000000..86e3bf7ff --- /dev/null +++ b/securedrop_export/print/status.py @@ -0,0 +1,13 @@ +from securedrop_export.exceptions import ExportEnum + +class Status(ExportEnum): + + # Printer preflight related errors + ERROR_MULTIPLE_PRINTERS_FOUND = "ERROR_MULTIPLE_PRINTERS_FOUND" + ERROR_PRINTER_NOT_FOUND = "ERROR_PRINTER_NOT_FOUND" + ERROR_PRINTER_NOT_SUPPORTED = "ERROR_PRINTER_NOT_SUPPORTED" + ERROR_PRINTER_DRIVER_UNAVAILABLE = "ERROR_PRINTER_DRIVER_UNAVAILABLE" + ERROR_PRINTER_INSTALL = "ERROR_PRINTER_INSTALL" + + # Printer export errors + ERROR_PRINT = "ERROR_PRINT" diff --git a/securedrop_export/utils.py b/securedrop_export/utils.py index f5e1229ce..a60eca09f 100644 --- a/securedrop_export/utils.py +++ b/securedrop_export/utils.py @@ -101,9 +101,7 @@ def check_path_traversal(filename_or_filepath: Union[str, Path]) -> None: if filename_or_filepath.is_absolute(): base_path = filename_or_filepath else: - base_path = ( - Path.cwd() - ) # use cwd so we can next ensure relative path does not traverse up + base_path = Path.cwd() # use cwd so we can next ensure relative path does not traverse up try: relative_path = relative_filepath(filename_or_filepath, base_path) @@ -112,10 +110,7 @@ def check_path_traversal(filename_or_filepath: Union[str, Path]) -> None: # base, but can still have harmful side effects to the application. If this kind of # traversal is needed, then call relative_filepath instead in order to check that the # desired traversal does not go past a safe base directory. - if ( - relative_path != filename_or_filepath - and not filename_or_filepath.is_absolute() - ): + if relative_path != filename_or_filepath and not filename_or_filepath.is_absolute(): raise ValueError except ValueError: raise ValueError(f"Unsafe file or directory name: '{filename_or_filepath}'") @@ -148,6 +143,4 @@ def check_dir_permissions(dir_path: Union[str, Path]) -> None: stat_res = os.stat(dir_path).st_mode masked = stat_res & 0o777 if masked & 0o077: - raise RuntimeError( - "Unsafe permissions ({}) on {}".format(oct(stat_res), dir_path) - ) + raise RuntimeError("Unsafe permissions ({}) on {}".format(oct(stat_res), dir_path)) diff --git a/setup.py b/setup.py index d21514991..e64b673fa 100644 --- a/setup.py +++ b/setup.py @@ -31,7 +31,5 @@ "Intended Audience :: Developers", "Operating System :: OS Independent", ), - entry_points={ - "console_scripts": ["send-to-usb = securedrop_export.entrypoint:start"] - }, + entry_points={"console_scripts": ["send-to-usb = securedrop_export.entrypoint:start"]}, ) diff --git a/tests/disk/test_actions.py b/tests/disk/test_actions.py index 7d5d24d2a..099836773 100644 --- a/tests/disk/test_actions.py +++ b/tests/disk/test_actions.py @@ -1,215 +1,149 @@ +import pytest from unittest import mock import os import pytest import sys +import tempfile +import subprocess from subprocess import CalledProcessError +from securedrop_export.disk.exceptions import ExportException +from securedrop_export.disk.status import Status + from securedrop_export import export -from securedrop_export.disk.actions import DiskExportAction, DiskTestAction +from securedrop_export.disk.actions import DiskExportAction, DiskTestAction, USBTestAction TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") -SAMPLE_OUTPUT_NO_PART = b"disk\ncrypt" # noqa -SAMPLE_OUTPUT_ONE_PART = b"disk\npart\ncrypt" # noqa -SAMPLE_OUTPUT_MULTI_PART = b"disk\npart\npart\npart\ncrypt" # noqa +SAMPLE_OUTPUT_LSBLK_NO_PART = b"disk\ncrypt" # noqa +SAMPLE_OUTPUT_LSBLK_ONE_PART = b"disk\npart\ncrypt" # noqa +SAMPLE_OUTPUT_LSBLK_MULTI_PART = b"disk\npart\npart\npart\ncrypt" # noqa SAMPLE_OUTPUT_USB = b"/dev/sda" # noqa -def test_usb_precheck_disconnected(capsys, mocker): - """Tests the scenario where there are disks connected, but none of them are USB""" - submission = export.SDExport("testfile", TEST_CONFIG) - action = DiskTestAction(submission) - expected_message = "USB_NOT_CONNECTED" - assert export.ExportStatus.USB_NOT_CONNECTED.value == expected_message - - # Popen call returns lsblk output - command_output = mock.MagicMock() - command_output.stdout = mock.MagicMock() - command_output.stdout.readlines = mock.MagicMock( - return_value=[b"sda disk\n", b"sdb disk\n"] - ) - mocker.patch("subprocess.Popen", return_value=command_output) - - # check_output returns removable status - mocker.patch("subprocess.check_output", return_value=[b"0\n", b"0\n"]) - - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - - mocker.patch( - "subprocess.check_output", side_effect=CalledProcessError(1, "check_output") - ) - - action.check_usb_connected(exit=True) - - mocked_exit.assert_called_once_with(expected_message) - assert action.device is None - - -def test_usb_precheck_connected(capsys, mocker): - """Tests the scenario where there is one USB connected""" - submission = export.SDExport("testfile", TEST_CONFIG) - action = DiskTestAction(submission) - - # Popen call returns lsblk output - command_output = mock.MagicMock() - command_output.stdout = mock.MagicMock() - command_output.stdout.readlines = mock.MagicMock(return_value=[b"sdb disk\n"]) - mocker.patch("subprocess.Popen", return_value=command_output) - - # check_output returns removable status - mocker.patch("subprocess.check_output", return_value=b"1\n") - - expected_message = "USB_CONNECTED" - assert export.ExportStatus.USB_CONNECTED.value == expected_message - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - - action.check_usb_connected(exit=True) - - mocked_exit.assert_called_once_with(expected_message) - assert action.device == "/dev/sdb" - - -def test_usb_precheck_multiple_devices_connected(capsys, mocker): - """Tests the scenario where there are multiple USB drives connected""" - submission = export.SDExport("testfile", TEST_CONFIG) - action = DiskTestAction(submission) - - # Popen call returns lsblk output - command_output = mock.MagicMock() - command_output.stdout = mock.MagicMock() - command_output.stdout.readlines = mock.MagicMock( - return_value=[b"sdb disk\n", b"sdc disk\n"] - ) - mocker.patch("subprocess.Popen", return_value=command_output) - - # check_output returns removable status - mocker.patch("subprocess.check_output", return_value=b"1\n") - - expected_message = "ERROR_GENERIC" - assert export.ExportStatus.ERROR_GENERIC.value == expected_message - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - - action.check_usb_connected(exit=True) - - mocked_exit.assert_called_once_with(expected_message) - assert action.device is None - - -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PART) -def test_extract_device_name_no_part(mocked_call, capsys): - submission = export.SDExport("testfile", TEST_CONFIG) - action = DiskExportAction(submission) - - action.device = "/dev/sda" - - action.set_extracted_device_name() - - assert action.device == "/dev/sda" +class TestExportAction: + def _setup_submission(self) -> export.SDExport: + """ + Helper method to set up stub export object + """ + submission = export.SDExport("testfile", TEST_CONFIG) + temp_folder = tempfile.mkdtemp() + metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) + with open(metadata, "w") as f: + f.write('{"device": "disk", "encryption_method": "luks", "encryption_key": "hunter1"}') + submission.archive_metadata = export.Metadata(temp_folder) -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) -def test_extract_device_name_single_part(mocked_call, capsys): - submission = export.SDExport("testfile", TEST_CONFIG) - action = DiskExportAction(submission) + return submission - action.device = "/dev/sda" + @mock.patch("sys.exit") + @mock.patch("securedrop_export.disk.actions.CLI") + def test_run_usbtestaction(self, mock_cli, mock_sys,): - action.set_extracted_device_name() + mock_cli.write_status = mock.MagicMock() + usb = USBTestAction(self._setup_submission()) - assert action.device == "/dev/sda1" + usb.run() + mock_cli.write_status.assert_called_once_with(Status.LEGACY_USB_CONNECTED) -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_MULTI_PART) -def test_extract_device_name_multiple_part(mocked_call, capsys, mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - action = DiskExportAction(submission) - action.device = "/dev/sda" - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - expected_message = export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value + @mock.patch("securedrop_export.disk.actions.CLI") + def test_run_usbtestaction_error(self, mock_cli, capsys): + mock_cli.get_connected_devices.side_effect = ExportException(Status.LEGACY_ERROR_USB_CHECK) + usb = USBTestAction(self._setup_submission()) - action.set_extracted_device_name() + mock_cli.write_status = mock.MagicMock() + + usb.run() + mock_cli.write_status.assert_called_once_with(Status.LEGACY_ERROR_USB_CHECK) - mocked_exit.assert_called_once_with(expected_message) + @mock.patch("sys.exit") + @mock.patch("securedrop_export.disk.actions.CLI") + def test_run_disktestaction(self, mock_sys, mock_cli): + mock_cli.is_luks_volume.return_value=True + mock_cli.write_status = mock.MagicMock() -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PART) -def test_luks_precheck_encrypted_fde(mocked_call, capsys, mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - action = DiskExportAction(submission) + test_export = DiskTestAction(self._setup_submission()) + test_export.run() - command_output = mock.MagicMock() - command_output.stderr = b"" - mocker.patch("subprocess.run", return_value=command_output) + mock_cli.write_status.assert_called_once_with(Status.SUCCESS_EXPORT) - expected_message = export.ExportStatus.USB_ENCRYPTED.value - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) + @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_LSBLK_NO_PART) + @mock.patch("subprocess.check_call", return_value=0) + def test_luks_precheck_encrypted_fde(mocked_call, capsys, mocker): + submission = export.SDExport("testfile", TEST_CONFIG) + action = DiskExportAction(submission) - action.check_luks_volume() + command_output = mock.MagicMock() + command_output.stderr = b"" + mocker.patch("subprocess.run", return_value=command_output) - mocked_exit.assert_called_once_with(expected_message) + expected_message = Status.LEGACY_USB_ENCRYPTED.value + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) + @mock.patch("sys.exit") + @mock.patch("securedrop_export.disk.actions.CLI") + def test_run_disktestaction_error(self, mock_cli, mocker): + mock_cli.patch("get_partitioned_device", side_effect=ExportException(Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED)) -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) -def test_luks_precheck_encrypted_single_part(mocked_call, capsys, mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - action = DiskExportAction(submission) - action.device = "/dev/sda" - expected_message = export.ExportStatus.USB_ENCRYPTED.value - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) + status_mock = mock_cli.patch("write_status") + test_export = DiskTestAction(self._setup_submission()) + test_export.run() + status_mock.assert_called_once_with(Status.LEGACY_ERROR_USB_WRITE) - command_output = mock.MagicMock() - command_output.stderr = b"" - mocker.patch("subprocess.run", return_value=command_output) + @mock.patch("sys.exit") + @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_LSBLK_ONE_PART) + @mock.patch("subprocess.check_call", return_value=0) + def test_luks_precheck_encrypted_single_part(mocked_call, mock_output, capsys, mocker): + submission = export.SDExport("testfile", TEST_CONFIG) + action = DiskTestAction(submission) + action.device = "/dev/sda" + expected_message = Status.LEGACY_USB_ENCRYPTED.value + mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - action.check_luks_volume() + command_output = mock.MagicMock() + command_output.stderr = b"" + mocker.patch("subprocess.run", return_value=command_output) - mocked_exit.assert_called_once_with(expected_message) + action.run() + @mock.patch("sys.exit") + @mock.patch("securedrop_export.disk.actions.CLI") + def test_run_diskexportaction(self, mock_cli, mock_sys): -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_MULTI_PART) -def test_luks_precheck_encrypted_multi_part(mocked_call, capsys, mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - action = DiskExportAction(submission) - action.device = "/dev/sda" - expected_message = export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value + mock_cli.patch("is_luks_volume", return_value=True) + status_mock = mock_cli.patch("write_status") - # Here we need to mock the exit_gracefully method with a side effect otherwise - # program execution will continue after exit_gracefully and exit_gracefully - # may be called a second time. - mocked_exit = mocker.patch.object( - submission, "exit_gracefully", side_effect=lambda x: sys.exit(0) - ) + test_export = DiskExportAction(self._setup_submission()) + test_export.run() - # Output of `lsblk -o TYPE --noheadings DEVICE_NAME` when a drive has multiple - # partitions - multi_partition_lsblk_output = b"disk\npart\npart\n" - mocker.patch("subprocess.check_output", return_value=multi_partition_lsblk_output) + status_mock.assert_called_once_with(Status.SUCCESS_EXPORT) - with pytest.raises(SystemExit): - action.check_luks_volume() + @mock.patch("sys.exit") + @mock.patch("securedrop_export.disk.actions.CLI") + def test_run_diskexportaction_disk_not_supported(self, mock_cli, mock_sys): - mocked_exit.assert_called_once_with(expected_message) + mock_cli.patch("get_partitioned_device") + mock_cli.patch("is_luks_volume", return_value=False) + status_mock = mock_cli.patch("write_status") + test_export = DiskExportAction(self._setup_submission()) + test_export.run() -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_ONE_PART) -def test_luks_precheck_encrypted_luks_error(mocked_call, capsys, mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - action = DiskExportAction(submission) - action.device = "/dev/sda" - expected_message = "USB_ENCRYPTION_NOT_SUPPORTED" - assert expected_message == export.ExportStatus.USB_ENCRYPTION_NOT_SUPPORTED.value + status_mock.assert_called_once_with(Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED) - mocked_exit = mocker.patch.object( - submission, "exit_gracefully", side_effect=lambda msg, e: sys.exit(0) - ) + @mock.patch("sys.exit") + @mock.patch("securedrop_export.disk.actions.CLI") + def test_run_diskexportaction_not_supported(self, mock_sys, mock_cli): - single_partition_lsblk_output = b"disk\npart\n" - mocker.patch("subprocess.check_output", return_value=single_partition_lsblk_output) - mocker.patch("subprocess.run", side_effect=CalledProcessError(1, "run")) + status_mock = mock_cli.patch("write_status") + mock_cli.patch("get_partitioned_device") + mock_cli.is_luks_volume.return_value=True + mock_cli.write_data_to_device.side_effect = Status.LEGACY_ERROR_USB_WRITE - with pytest.raises(SystemExit): - action.check_luks_volume() + test_export = DiskExportAction(self._setup_submission()) + test_export.run() - assert mocked_exit.mock_calls[0][2]["msg"] == expected_message - assert mocked_exit.mock_calls[0][2]["e"] is None + status_mock.assert_called_once_with(Status.LEGACY_ERROR_USB_WRITE) diff --git a/tests/disk/test_cli.py b/tests/disk/test_cli.py new file mode 100644 index 000000000..7560630f7 --- /dev/null +++ b/tests/disk/test_cli.py @@ -0,0 +1,376 @@ +import pytest +from unittest import mock + +import os +import pytest +import sys + +import subprocess + +from securedrop_export.disk.cli import CLI +from securedrop_export.disk.volume import EncryptionScheme, Volume +from securedrop_export.disk.exceptions import ExportException +from securedrop_export.disk.status import Status + +from securedrop_export import export + +TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") + +_DEFAULT_USB_DEVICE = "/dev/sda" +_DEFAULT_USB_DEVICE_ONE_PART = "/dev/sda1" + +_PRETEND_LUKS_ID = "luks-id-123456" + +# Sample stdout from shell commands +_SAMPLE_OUTPUT_NO_PART = b"disk\ncrypt" # noqa +_SAMPLE_OUTPUT_ONE_PART = b"disk\npart\ncrypt" # noqa +_SAMPLE_OUTPUT_MULTI_PART = b"disk\npart\npart\npart\ncrypt" # noqa +_SAMPLE_OUTPUT_USB = b"/dev/sda" # noqa + +_SAMPLE_LUKS_HEADER = b"\n\nUUID:\t123456-DEADBEEF" # noqa + + +class TestCli: + """ + Test the CLI wrapper that handless identification and locking/unlocking of + USB volumes. + """ + + def _setup_usb_devices(self, mocker, disks, is_removable): + """ + Helper function to set up mocked shell calls representing + the search for attached USB devices. + The original calls are `lsblk | grep disk` and + `cat /sys/class/block/{disk}/removable` + + Parameters: + disks (byte array): Array of disk names separated by newline. + is_removable (byte array): Array of removable status results (1 for removable) separated by newline + """ + + # Patch commandline calls to `lsblk | grep disk` + command_output = mock.MagicMock() + command_output.stdout = mock.MagicMock() + command_output.stdout.readlines = mock.MagicMock(return_value=disks) + mocker.patch("subprocess.Popen", return_value=command_output) + + # Pactch commandline call to 'cat /sys/class/block/{device}/removable' + + # Using side_effect with an iterable allows for different return value each time, + # which matches what would happen if iterating through list of devices + mocker.patch("subprocess.check_output", side_effect=is_removable) + + def test_get_connected_devices(self, mocker): + disks = [b"sda disk\n", b"sdb disk\n"] + removable = [b"1\n", b"1\n"] + + self._setup_usb_devices(mocker, disks, removable) + cli = CLI() + result = cli.get_connected_devices() + + assert result[0] == "/dev/sda" and result[1] == "/dev/sdb" + + @mock.patch("subprocess.Popen", side_effect=subprocess.CalledProcessError(1, "Popen")) + def test_get_connected_devices_error(self, mocked_subprocess): + cli = CLI() + + with pytest.raises(ExportException): + cli.get_connected_devices() + + @mock.patch("subprocess.check_output", return_value=_SAMPLE_OUTPUT_NO_PART) + def test_get_partitioned_device_no_partition(self, mocked_call): + cli = CLI() + + result = cli.get_partitioned_device(_DEFAULT_USB_DEVICE) + assert result == _DEFAULT_USB_DEVICE + + @mock.patch("subprocess.check_output", return_value=_SAMPLE_OUTPUT_ONE_PART) + def test_get_partitioned_device_one_partition(self, mocked_call): + cli = CLI() + + result = cli.get_partitioned_device(_DEFAULT_USB_DEVICE) + assert result == _DEFAULT_USB_DEVICE+"1" + + @mock.patch("subprocess.check_output", return_value=_SAMPLE_OUTPUT_MULTI_PART) + def test_get_partitioned_device_multi_partition(self, mocked_call): + cli = CLI() + + with pytest.raises(ExportException): + result = cli.get_partitioned_device(_SAMPLE_OUTPUT_MULTI_PART) + + @mock.patch( + "subprocess.check_output", side_effect=subprocess.CalledProcessError(1, "check_output") + ) + def test_get_partitioned_device_multi_partition_error(self, mocked_call): + cli = CLI() + + # Make sure we wrap CalledProcessError and throw our own exception + with pytest.raises(ExportException): + cli.get_partitioned_device(_DEFAULT_USB_DEVICE) + + @mock.patch("subprocess.check_call", return_value=0) + def test_is_luks_volume_true(self, mocked_call): + cli = CLI() + + # `sudo cryptsetup isLuks` returns 0 if true + assert cli.is_luks_volume(_SAMPLE_OUTPUT_ONE_PART) + + @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) + def test_is_luks_volume_false(self, mocked_subprocess): + cli = CLI() + + # `sudo cryptsetup isLuks` returns 1 if false; CalledProcessError is thrown + assert not cli.is_luks_volume(_SAMPLE_OUTPUT_ONE_PART) + + @mock.patch("subprocess.check_output", return_value=_SAMPLE_LUKS_HEADER) + def test__get_luks_name_from_headers(self, mocked_subprocess): + cli = CLI() + + result = cli._get_luks_name_from_headers(_DEFAULT_USB_DEVICE) + + assert result is not None and result.split("-")[1] in _SAMPLE_LUKS_HEADER.decode("utf8") + + @mock.patch("subprocess.check_output", return_value=b"corrupted-or-invalid-header\n") + def test__get_luks_name_from_headers_error(self, mocked_subprocess): + cli = CLI() + + with pytest.raises(ExportException): + result = cli._get_luks_name_from_headers(_DEFAULT_USB_DEVICE) + + @mock.patch( + "subprocess.check_output", side_effect=subprocess.CalledProcessError(1, "check_output") + ) + def test__get_luks_name_from_headers_error(self, mocked_subprocess): + cli = CLI() + + with pytest.raises(ExportException): + result = cli._get_luks_name_from_headers(_DEFAULT_USB_DEVICE) + + @mock.patch("os.path.exists", return_value=True) + @mock.patch("subprocess.check_output", return_value=_SAMPLE_LUKS_HEADER) + def test_get_luks_volume_already_unlocked(self, mocked_subprocess, mocked_os_call): + cli = CLI() + result = cli.get_luks_volume(_DEFAULT_USB_DEVICE_ONE_PART) + + assert result.encryption is EncryptionScheme.LUKS + assert result.unlocked + + @mock.patch("os.path.exists", return_value=True) + def test__unlock_luks_volume_success(self, mocker): + cli = CLI() + + mock_popen = mocker.MagicMock() + mock_popen_communicate = mocker.MagicMock() + mock_popen.returncode = 0 + + mocker.patch("subprocess.Popen", return_value=mock_popen) + mocker.patch("subprocess.Popen.communicate", return_value=mock_popen_communicate) + + mapped_name = "luks-id-123456" + vol = Volume(device_name=_DEFAULT_USB_DEVICE, mapped_name=mapped_name, encryption=EncryptionScheme.LUKS) + key = "A key!&8*%_ A KEY" + result = cli.unlock_luks_volume(vol, key) + assert vol.unlocked + + def test_unlock_luks_volume_not_luks(self, mocker): + cli = CLI() + + mock_popen = mocker.MagicMock() + mock_popen.communicate = mocker.MagicMock() + mock_popen.communicate.returncode = 1 # An error unlocking + + mocker.patch("subprocess.Popen", mock_popen) + + vol = Volume(device_name=_DEFAULT_USB_DEVICE, mapped_name=_PRETEND_LUKS_ID, encryption=EncryptionScheme.UNKNOWN) + key = "a key!" + mapped_name = "luks-id-123456" + + with pytest.raises(ExportException): + cli.unlock_luks_volume(vol, key) + + def test_unlock_luks_volume_passphrase_failure(self, mocker): + cli = CLI() + + mock_popen = mocker.MagicMock() + mock_popen.communicate = mocker.MagicMock() + mock_popen.communicate.returncode = 1 # An error unlocking + + mocker.patch("subprocess.Popen", mock_popen) + + vol = Volume(device_name=_DEFAULT_USB_DEVICE, mapped_name=_PRETEND_LUKS_ID, encryption=EncryptionScheme.LUKS) + key = "a key!" + mapped_name = "luks-id-123456" + + with pytest.raises(ExportException): + cli.unlock_luks_volume(vol, key) + + @mock.patch("subprocess.Popen", side_effect=subprocess.CalledProcessError("1", "Popen")) + def test_unlock_luks_volume_luksOpen_exception(self, mocked_subprocess): + cli = CLI() + pd = Volume(device_name=_DEFAULT_USB_DEVICE, mapped_name=_PRETEND_LUKS_ID, encryption=EncryptionScheme.LUKS) + key = "a key!" + mapped_name = "luks-id-123456" + + with pytest.raises(ExportException): + cli.unlock_luks_volume(pd, key) + + @mock.patch("subprocess.check_output", return_value=b"\n") + @mock.patch("subprocess.check_call", return_value=0) + def test_mount_volume(self, mocked_output, mocked_call): + cli = CLI() + vol = Volume( + device_name=_DEFAULT_USB_DEVICE_ONE_PART, + mapped_name=_PRETEND_LUKS_ID, + encryption=EncryptionScheme.LUKS, + ) + result = cli.mount_volume(vol) + + @mock.patch("subprocess.check_output", return_value=b"/dev/pretend/luks-id-123456\n") + @mock.patch("subprocess.check_call", return_value=0) + def test_mount_volume_already_mounted(self, mocked_output, mocked_call): + cli = CLI() + md = Volume( + device_name=_DEFAULT_USB_DEVICE_ONE_PART, + mapped_name=_PRETEND_LUKS_ID, + encryption=EncryptionScheme.LUKS, + ) + result = cli.mount_volume(md) + + @mock.patch("subprocess.check_output", return_value=b"\n") + @mock.patch("subprocess.check_call", return_value=0) + def test_mount_volume_mkdir(self, mocked_output, mocked_subprocess): + cli = CLI() + md = Volume( + device_name=_DEFAULT_USB_DEVICE_ONE_PART, + mapped_name=_PRETEND_LUKS_ID, + encryption=EncryptionScheme.LUKS, + ) + result = cli.mount_volume(md) + + assert result.mapped_name == _PRETEND_LUKS_ID + + @mock.patch("subprocess.check_output", return_value=b"\n") + @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) + def test_mount_volume_error(self, mocked_subprocess, mocked_output): + cli = CLI() + + md = Volume( + device_name=_DEFAULT_USB_DEVICE_ONE_PART, + mapped_name=_PRETEND_LUKS_ID, + encryption=EncryptionScheme.LUKS, + ) + + with pytest.raises(ExportException): + cli.mount_volume(md) + + @mock.patch("os.path.exists", return_value=True) + @mock.patch("subprocess.check_call", return_value=0) + def test__unmount_volume(self, mocked_subprocess, mocked_mountpath): + cli = CLI() + + mounted = Volume( + device_name=_DEFAULT_USB_DEVICE_ONE_PART, + mapped_name=_PRETEND_LUKS_ID, + mountpoint=cli._DEFAULT_MOUNTPOINT, + encryption=EncryptionScheme.LUKS, + ) + + result = cli._unmount_volume(mounted) + + + @mock.patch("os.path.exists", return_value=True) + @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) + def test__unmount_volume_error(self, mocked_subprocess, mocked_mountpath): + cli = CLI() + + mounted = Volume( + device_name=_DEFAULT_USB_DEVICE_ONE_PART, + mapped_name=_PRETEND_LUKS_ID, + mountpoint=cli._DEFAULT_MOUNTPOINT, + encryption=EncryptionScheme.LUKS, + ) + + with pytest.raises(ExportException): + result = cli._unmount_volume(mounted) + + @mock.patch("os.path.exists", return_value=True) + @mock.patch("subprocess.check_call", return_value=0) + def test__close_luks_volume(self, mocked_subprocess, mocked_os_call): + cli = CLI() + + mapped = Volume( + device_name=_DEFAULT_USB_DEVICE_ONE_PART, + mapped_name=_PRETEND_LUKS_ID, + encryption=EncryptionScheme.LUKS, + ) + + # If call completes without error, drive was successfully closed with luksClose + cli._close_luks_volume(mapped) + + @mock.patch("os.path.exists", return_value=True) + @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) + def test__close_luks_volume_error(self, mocked_subprocess, mocked_os_call): + cli = CLI() + + mapped = Volume( + device_name=_DEFAULT_USB_DEVICE_ONE_PART, + mapped_name=_PRETEND_LUKS_ID, + encryption=EncryptionScheme.LUKS, + ) + + with pytest.raises(ExportException): + cli._close_luks_volume(mapped) + + @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) + def test__remove_temp_directory_error(self, mocked_subprocess): + cli = CLI() + + with pytest.raises(ExportException): + cli._remove_temp_directory("tmp") + + @mock.patch("subprocess.check_call", return_value=0) + def test_write_to_disk(self, mock_check_call): + cli = CLI() + + vol = Volume( + device_name=_DEFAULT_USB_DEVICE_ONE_PART, + mapped_name=_PRETEND_LUKS_ID, + mountpoint=cli._DEFAULT_MOUNTPOINT, + encryption=EncryptionScheme.LUKS, + ) + + submission = export.SDExport("testfile", TEST_CONFIG) + + cli.write_data_to_device(submission.tmpdir, submission.target_dirname, vol) + + @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) + def test_write_to_disk_error_still_does_cleanup(self, mock_call, mocker): + cli = CLI() + cli.cleanup_drive_and_tmpdir = mocker.MagicMock() + + vol = Volume( + device_name=_DEFAULT_USB_DEVICE_ONE_PART, + mapped_name=_PRETEND_LUKS_ID, + mountpoint=cli._DEFAULT_MOUNTPOINT, + encryption=EncryptionScheme.LUKS, + ) + submission = export.SDExport("testfile", TEST_CONFIG) + + with pytest.raises(ExportException): + cli.write_data_to_device(submission.tmpdir, submission.target_dirname, vol) + cleanup_mock.assert_called_once() + + @pytest.mark.parametrize("status", [s for s in Status]) + def test_write_status(self, status, capsys): + cli = CLI() + + cli.write_status(status) + captured = capsys.readouterr() + assert captured.out == status.value + "\n" + + @pytest.mark.parametrize("invalid_status", ["foo", ";ls", "&& echo 0"]) + def test_write_status_error(self, invalid_status, capsys): + cli = CLI() + + with pytest.raises(ValueError): + cli.write_status(Status.value_of(invalid_status)) diff --git a/tests/disk/test_status.py b/tests/disk/test_status.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/disk/test_volume.py b/tests/disk/test_volume.py new file mode 100644 index 000000000..8651bdbef --- /dev/null +++ b/tests/disk/test_volume.py @@ -0,0 +1,47 @@ +import pytest +from unittest import mock + +from securedrop_export.disk.volume import Volume, EncryptionScheme + + +class TestVolume: + def test_overwrite_valid_encryption_scheme(self): + volume = Volume(device_name="/dev/sda", mapped_name="pretend-luks-mapper-id", encryption=EncryptionScheme.LUKS) + assert volume.encryption is EncryptionScheme.LUKS + volume.encryption = None + assert volume.encryption is EncryptionScheme.UNKNOWN + + @mock.patch("os.path.exists", return_value=True) + def test_is_unlocked_true(self, mock_os_path): + volume = Volume( + device_name="/dev/sda1", mapped_name="pretend-luks-mapper-id", encryption=EncryptionScheme.LUKS + ) + + assert volume.unlocked + + @mock.patch("os.path.exists", return_value=False) + def test_is_unlocked_false_no_path(self, mock_os_path): + volume = Volume( + device_name="/dev/sda1", mapped_name="pretend-luks-mapper-id", encryption=EncryptionScheme.LUKS + ) + + assert not volume.unlocked + + @mock.patch("os.path.exists", return_value=True) + def test_writable_false(self, mock_os_path): + vol = Volume( + device_name="dev/sda1", mapped_name="pretend-luks-id", encryption=EncryptionScheme.LUKS + ) + + assert not vol.writable + + @mock.patch("os.path.exists", return_value=True) + def test_writable_false(self, mock_os_path): + vol = Volume( + device_name="dev/sda1", + mapped_name="pretend-luks-id", + encryption=EncryptionScheme.LUKS, + mountpoint="/media/usb", + ) + + assert vol.writable diff --git a/tests/print/test_actions.py b/tests/print/test_actions.py index 37b2ea9c4..17c3397fb 100644 --- a/tests/print/test_actions.py +++ b/tests/print/test_actions.py @@ -84,9 +84,7 @@ def test_is_not_open_office_file(capsys, open_office_paths): def test_install_printer_ppd_laserjet(mocker): submission = export.SDExport("testfile", TEST_CONFIG) action = PrintExportAction(submission) - ppd = action.install_printer_ppd( - "usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A00000" - ) + ppd = action.install_printer_ppd("usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A00000") assert ppd == "/usr/share/cups/model/hp-laserjet_6l.ppd" @@ -94,9 +92,7 @@ def test_install_printer_ppd_laserjet(mocker): def test_install_printer_ppd_brother(mocker): submission = export.SDExport("testfile", TEST_CONFIG) action = PrintExportAction(submission) - ppd = action.install_printer_ppd( - "usb://Brother/HL-L2320D%20series?serial=A00000A000000" - ) + ppd = action.install_printer_ppd("usb://Brother/HL-L2320D%20series?serial=A00000A000000") assert ppd == "/usr/share/cups/model/br7030.ppd" @@ -106,9 +102,7 @@ def test_install_printer_ppd_error_no_driver(mocker): mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) mocker.patch("subprocess.run", side_effect=CalledProcessError(1, "run")) - action.install_printer_ppd( - "usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000" - ) + action.install_printer_ppd("usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000") assert mocked_exit.mock_calls[0][2]["msg"] == "ERROR_PRINTER_DRIVER_UNAVAILABLE" assert mocked_exit.mock_calls[0][2]["e"] is None diff --git a/tests/test_export.py b/tests/test_export.py index fb6f5868d..4b596c9a7 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -13,9 +13,7 @@ TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad.json") -ANOTHER_BAD_TEST_CONFIG = os.path.join( - os.path.dirname(__file__), "sd-export-config-bad-2.json" -) +ANOTHER_BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad-2.json") def test_extract_tarball(): @@ -53,22 +51,15 @@ def test_extract_tarball(): submission.extract_tarball() - extracted_file_path = os.path.join( - submission.tmpdir, "some", "dirs", "file.txt" - ) + extracted_file_path = os.path.join(submission.tmpdir, "some", "dirs", "file.txt") assert os.path.exists(extracted_file_path) assert oct(os.stat(extracted_file_path).st_mode) == "0o100600" # Subdirectories that are added as members are extracted with 700 permissions - assert ( - oct(os.stat(os.path.join(submission.tmpdir, "some")).st_mode) == "0o40700" - ) + assert oct(os.stat(os.path.join(submission.tmpdir, "some")).st_mode) == "0o40700" # Subdirectories that are not added as members are extracted with 700 permissions # because os.umask(0o077) is set in the SDExport constructor. - assert ( - oct(os.stat(os.path.join(submission.tmpdir, "some", "dirs")).st_mode) - == "0o40700" - ) + assert oct(os.stat(os.path.join(submission.tmpdir, "some", "dirs")).st_mode) == "0o40700" def test_extract_tarball_with_symlink(): @@ -124,9 +115,7 @@ def test_extract_tarball_raises_if_doing_path_traversal(): metadata_file_info.size = len(metadata_str) archive.addfile(metadata_file_info, metadata_bytes) content = b"test" - traversed_file_info = tarfile.TarInfo( - "../../../../../../../../../tmp/traversed" - ) + traversed_file_info = tarfile.TarInfo("../../../../../../../../../tmp/traversed") traversed_file_info.size = len(content) archive.addfile(traversed_file_info, BytesIO(content)) archive.close() @@ -303,9 +292,7 @@ def test_extract_tarball_raises_if_name_has_unsafe_absolute_path_with_symlink(): archive_path = os.path.join(temp_dir, "archive.sd-export") symlink_path = os.path.join(temp_dir, "symlink") - os.system( - f"ln -s {tmp}/unsafe {symlink_path}" - ) # create symlink to "/tmp/unsafe" + os.system(f"ln -s {tmp}/unsafe {symlink_path}") # create symlink to "/tmp/unsafe" with tarfile.open(archive_path, "w:gz") as archive: metadata = { @@ -489,9 +476,7 @@ def test_invalid_encryption_config(capsys): temp_folder = tempfile.mkdtemp() metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) with open(metadata, "w") as f: - f.write( - '{"device": "disk", "encryption_method": "base64", "encryption_key": "hunter1"}' - ) + f.write('{"device": "disk", "encryption_method": "base64", "encryption_key": "hunter1"}') config = export.Metadata(temp_folder) @@ -505,9 +490,7 @@ def test_valid_encryption_config(capsys): temp_folder = tempfile.mkdtemp() metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) with open(metadata, "w") as f: - f.write( - '{"device": "disk", "encryption_method": "luks", "encryption_key": "hunter1"}' - ) + f.write('{"device": "disk", "encryption_method": "luks", "encryption_key": "hunter1"}') config = export.Metadata(temp_folder) From f7442aea66f5d2a8bbf6cdb983ddc8828d21e916 Mon Sep 17 00:00:00 2001 From: Ro Date: Wed, 28 Sep 2022 16:52:57 -0700 Subject: [PATCH 305/352] Use Commands to start export Services. Refactor test suite to match new export and print services and CLI wrapper. --- securedrop_export/archive.py | 65 +++++--- securedrop_export/disk/cli.py | 55 +++---- securedrop_export/disk/new_service.py | 127 +++++++++++++++ securedrop_export/disk/new_status.py | 20 +++ securedrop_export/disk/service.py | 78 +++++++--- securedrop_export/disk/status.py | 13 +- securedrop_export/entrypoint.py | 10 +- securedrop_export/enums.py | 11 -- securedrop_export/exceptions.py | 17 +- securedrop_export/main.py | 181 ++++++++++++++++------ securedrop_export/print/service.py | 77 ++++----- securedrop_export/print/status.py | 7 +- securedrop_export/utils.py | 51 ++++-- tests/disk/test_actions.py | 149 ------------------ tests/disk/test_cli.py | 32 ++-- tests/disk/test_service.py | 137 ++++++++++++++++ tests/print/test_actions.py | 134 ---------------- tests/print/test_service.py | 130 ++++++++++++++++ tests/{test_export.py => test_archive.py} | 159 ++++++------------- tests/test_main.py | 64 +++++++- tests/test_util.py | 39 +++++ 21 files changed, 935 insertions(+), 621 deletions(-) create mode 100644 securedrop_export/disk/new_service.py create mode 100644 securedrop_export/disk/new_status.py delete mode 100644 tests/disk/test_actions.py create mode 100644 tests/disk/test_service.py delete mode 100644 tests/print/test_actions.py create mode 100644 tests/print/test_service.py rename tests/{test_export.py => test_archive.py} (78%) create mode 100644 tests/test_util.py diff --git a/securedrop_export/archive.py b/securedrop_export/archive.py index 76e18f61c..f5a780b23 100755 --- a/securedrop_export/archive.py +++ b/securedrop_export/archive.py @@ -10,57 +10,80 @@ import sys import tempfile -from securedrop_export.enums import Command -from securedrop_export.exceptions import ExportStatus +from securedrop_export.exceptions import ExportException +from securedrop_export.enums import Command, ExportEnum from securedrop_export.utils import safe_extractall logger = logging.getLogger(__name__) +class Status(ExportEnum): + ERROR_ARCHIVE_METADATA = "ERROR_ARCHIVE_METADATA" + ERROR_METADATA_PARSING = "ERROR_METADATA_PARSING" + ERROR_EXTRACTION = "ERROR_EXTRACTION" + class Metadata(object): """ Object to parse, validate and store json metadata from the sd-export archive. + + Create a Metadata object by using the `create_and_validate()` method to + ensure well-formed and valid metadata. """ METADATA_FILE = "metadata.json" - SUPPORTED_ENCRYPTION_METHODS = ["luks"] - def __init__(self, archive_path): - # Calling create_and_validate() is the preferred way to initialize + # Slightly underhanded way of ensuring that a Metadata object is not instantiated + # directly; instead, the create_and_validate() method is used + __key = object() + + + def __init__(self, key, archive_path): + if not key == Metadata.__key: + raise ValueError("Must use create_and_validate() to create Metadata object") + + # Initialize self.metadata_path = os.path.join(archive_path, self.METADATA_FILE) - @staticmethod + + @classmethod def create_and_validate(cls, archive_path) -> 'Metadata': """ Create and validate metadata object. Raise ExportException for invalid metadata. """ - md = cls(archive_path) + md = Metadata(cls.__key, archive_path) + md.validate() + return md + + + def validate(self): + """ + Validate Metadata. + Throw ExportException if invalid state is found. + """ try: - with open(md.metadata_path) as f: + with open(self.metadata_path) as f: logger.info("Parsing archive metadata") json_config = json.loads(f.read()) - md.export_method = json_config.get("device", None) - md.encryption_method = json_config.get("encryption_method", None) - md.encryption_key = json_config.get("encryption_key", None) + self.export_method = json_config.get("device", None) + self.encryption_method = json_config.get("encryption_method", None) + self.encryption_key = json_config.get("encryption_key", None) logger.info( "Exporting to device {} with encryption_method {}".format( - md.export_method, md.encryption_method + self.export_method, self.encryption_method ) ) - # Validate metadata - this will fail if command is not in list of supported commands - md.command = Commmand.value_of(md.export_method) - if md.command is Commmand.EXPORT and not md.encryption_method in md.SUPPORTED_ENCRYPTION_METHODS: - logger.error("Unsuported encryption method") - raise ExportException(ExportStatus.ERROR_ARCHIVE_METADATA) + # Validate metadata - this will fail if command is not in list of supported commands + self.command = Command(self.export_method) + if self.command is Command.EXPORT and not self.encryption_method in self.SUPPORTED_ENCRYPTION_METHODS: + logger.error("Unsuported encryption method") + raise ExportException(sdstatus=Status.ERROR_ARCHIVE_METADATA) except Exception as ex: logger.error("Metadata parsing failure") - raise ExportException(ExportStatus.ERROR_METADATA_PARSING) from ex - - return md + raise ExportException(sdstatus=Status.ERROR_METADATA_PARSING) from ex class Archive(object): @@ -79,6 +102,6 @@ def extract_tarball(self): safe_extractall(self.archive, self.tmpdir) except Exception as ex: logger.error("Unable to extract tarball: {}".format(ex)) - raise ExportException(ExportStatus.ERROR_EXTRACTION) from ex + raise ExportException(sdstatus=Status.ERROR_EXTRACTION) from ex \ No newline at end of file diff --git a/securedrop_export/disk/cli.py b/securedrop_export/disk/cli.py index 792abe8f7..579a102e5 100644 --- a/securedrop_export/disk/cli.py +++ b/securedrop_export/disk/cli.py @@ -9,9 +9,10 @@ from typing import List, Optional +from securedrop_export.exceptions import ExportException + from .volume import EncryptionScheme, Volume -from .exceptions import ExportException -from .status import Status +from .new_status import Status logger = logging.getLogger(__name__) @@ -45,7 +46,7 @@ def get_connected_devices(self) -> List[str]: attached_devices = [x.decode("utf8").split()[0] for x in command_output] except subprocess.CalledProcessError as ex: - raise ExportException(status=Status.DEVICE_ERROR) from ex + raise ExportException(sdstatus=Status.DEVICE_ERROR) from ex # Determine which are USBs by selecting those block devices that are removable disks. usb_devices = [] @@ -89,7 +90,7 @@ def get_partitioned_device(self, blkid: str) -> str: logger.error( f"Multiple partitions not supported (found {partition_count} partitions on {blkid}" ) - raise ExportException(status=Status.INVALID_DEVICE_DETECTED) + raise ExportException(sdstatus=Status.INVALID_DEVICE_DETECTED) # redefine device to /dev/sda if disk is encrypted, /dev/sda1 if partition encrypted if partition_count == 1: @@ -99,11 +100,11 @@ def get_partitioned_device(self, blkid: str) -> str: else: # lsblk did not return output we could process - raise ExportException(status=Status.DEVICE_ERROR) + raise ExportException(sdstatus=Status.DEVICE_ERROR) except subprocess.CalledProcessError as ex: logger.error(f"Error checking block deivce {blkid}") - raise ExportException(status=Status.DEVICE_ERROR) from ex + raise ExportException(sdstatus=Status.DEVICE_ERROR) from ex def is_luks_volume(self, device: str) -> bool: """ @@ -146,9 +147,9 @@ def _get_luks_name_from_headers(self, device: str) -> str: logger.error( f"Failed to dump LUKS headers; {device} may not be correctly formatted" ) - raise ExportException(status=Status.INVALID_DEVICE_DETECTED) + raise ExportException(sdstatus=Status.INVALID_DEVICE_DETECTED) except subprocess.CalledProcessError as ex: - raise ExportException(Status=Status.DEVICE_ERROR) from ex + raise ExportException(sdtatus=Status.DEVICE_ERROR) from ex def get_luks_volume(self, device: str) -> Volume: """ @@ -193,7 +194,7 @@ def unlock_luks_volume(self, volume: Volume, decryption_key: str) -> Volume: """ if not volume.encryption is EncryptionScheme.LUKS: logger.error("Must call unlock_luks_volume() on LUKS-encrypted device") - raise ExportException(Status.DEVICE_ERROR) + raise ExportException(sdstatus=Status.DEVICE_ERROR) try: logger.debug("Unlocking luks volume {}".format(volume.device_name)) @@ -213,10 +214,10 @@ def unlock_luks_volume(self, volume: Volume, decryption_key: str) -> Volume: ) else: logger.error("Bad volume passphrase") - raise ExportException(Status.ERROR_UNLOCK_LUKS) + raise ExportException(sdstatus=Status.ERROR_UNLOCK_LUKS) except subprocess.CalledProcessError as ex: - raise ExportException(Status.DEVICE_ERROR) from ex + raise ExportException(sdstatus=Status.DEVICE_ERROR) from ex def _get_mountpoint(self, volume: Volume) -> Optional[str]: """ @@ -231,7 +232,7 @@ def _get_mountpoint(self, volume: Volume) -> Optional[str]: except subprocess.CalledProcessError as ex: logger.error(ex) - raise ExportException(Status.ERROR_MOUNT) from ex + raise ExportException(sdstatus=Status.ERROR_MOUNT) from ex def mount_volume(self, volume: Volume) -> Volume: """ @@ -243,7 +244,7 @@ def mount_volume(self, volume: Volume) -> Volume: Raises ExportException if errors are encountered during mounting. """ if not volume.unlocked: - raise ExportException("Unlock volume before mounting") + raise ExportException(sdstatus=Status.ERROR_MOUNT) mountpoint = self._get_mountpoint(volume) @@ -256,26 +257,26 @@ def mount_volume(self, volume: Volume) -> Volume: volume.mountpoint = mountpoint else: - if not os.path.exists(_DEFAULT_MOUNTPOINT): + if not os.path.exists(self._DEFAULT_MOUNTPOINT): try: - subprocess.check_call(["sudo", "mkdir", _DEFAULT_MOUNTPOINT]) + subprocess.check_call(["sudo", "mkdir", self._DEFAULT_MOUNTPOINT]) except subprocess.CalledProcessError as ex: logger.error(ex) - raise ExportException(Status.ERROR_MOUNT) from ex + raise ExportException(sdstatus=Status.ERROR_MOUNT) from ex # Mount device /dev/mapper/{mapped_name} at /media/usb/ - mapped_device_path = os.path.join(device.MAPPED_VOLUME_PREFIX, device.mapped_name) + mapped_device_path = os.path.join(volume.MAPPED_VOLUME_PREFIX, volume.mapped_name) try: - logger.debug(f"Mounting volume {volume.device_name} at {_DEFAULT_MOUNTPOINT}") - subprocess.check_call(["sudo", "mount", mapped_device_path, _DEFAULT_MOUNTPOINT]) - subprocess.check_call(["sudo", "chown", "-R", "user:user", _DEFAULT_MOUNTPOINT]) + logger.debug(f"Mounting volume {volume.device_name} at {self._DEFAULT_MOUNTPOINT}") + subprocess.check_call(["sudo", "mount", mapped_device_path, self._DEFAULT_MOUNTPOINT]) + subprocess.check_call(["sudo", "chown", "-R", "user:user", self._DEFAULT_MOUNTPOINT]) - volume.mountpoint = _DEFAULT_MOUNTPOINT + volume.mountpoint = self._DEFAULT_MOUNTPOINT except subprocess.CalledProcessError as ex: logger.error(ex) - raise ExportException(Status.ERROR_MOUNT) from ex + raise ExportException(sdstatus=Status.ERROR_MOUNT) from ex return volume @@ -302,7 +303,7 @@ def write_data_to_device( logger.info("File copied successfully to {}".format(submission_target_dirname)) except (subprocess.CalledProcessError, OSError) as ex: - raise ExportException(status=Status.ERROR_EXPORT) from ex + raise ExportException(sdstatus=Status.ERROR_EXPORT) from ex finally: self.cleanup_drive_and_tmpdir(device, submission_tmpdir) @@ -325,7 +326,7 @@ def cleanup_drive_and_tmpdir(self, volume: Volume, submission_tmpdir: str): except subprocess.CalledProcessError as ex: logger.error("Error syncing filesystem") - raise ExportException(Status.ERROR_EXPORT_CLEANUP) from ex + raise ExportException(sdstatus=Status.ERROR_EXPORT_CLEANUP) from ex def _unmount_volume(self, volume: Volume) -> Volume: """ @@ -339,7 +340,7 @@ def _unmount_volume(self, volume: Volume) -> Volume: except subprocess.CalledProcessError as ex: logger.error("Error unmounting device") - raise ExportException(Status.ERROR_MOUNT) from ex + raise ExportException(sdstatus=Status.ERROR_MOUNT) from ex else: logger.info("Mountpoint does not exist; volume was already unmounted") @@ -356,7 +357,7 @@ def _close_luks_volume(self, unlocked_device: Volume) -> None: except subprocess.CalledProcessError as ex: logger.error("Error closing device") - raise ExportException(Status.DEVICE_ERROR) from ex + raise ExportException(sdstatus=Status.DEVICE_ERROR) from ex def _remove_temp_directory(self, tmpdir: str): """ @@ -367,7 +368,7 @@ def _remove_temp_directory(self, tmpdir: str): subprocess.check_call(["rm", "-rf", tmpdir]) except subprocess.CalledProcessError as ex: logger.error("Error removing temporary directory") - raise ExportException(Status.DEVICE_ERROR) from ex + raise ExportException(sdstatus=Status.DEVICE_ERROR) from ex def write_status(self, status: Status): """ diff --git a/securedrop_export/disk/new_service.py b/securedrop_export/disk/new_service.py new file mode 100644 index 000000000..1084f751a --- /dev/null +++ b/securedrop_export/disk/new_service.py @@ -0,0 +1,127 @@ +import logging +import os +import subprocess +import sys + +from enum import Enum + +from typing import List + +from securedrop_export.archive import Archive + +from .cli import CLI +from .status import Status +from .volume import EncryptionScheme, Volume +from securedrop_export.exceptions import ExportException + + +logger = logging.getLogger(__name__) + + +class Service: + """ + Checks that can be performed against the device(s). + This is the "API" portion of the export workflow. + """ + + def __init__(self, cli: CLI): + self.cli = cli + + def run(self, arg: str) -> Status: + """ + Run export actions. + """ + + def scan_all_devices(self) -> Status: + """ + Check all connected devices and return current device + status. + """ + try: + all_devices = self.cli.get_connected_devices() + number_devices = len(all_devices) + + if number_devices == 0: + return Status.NO_DEVICE_DETECTED + elif number_devices > 1: + return Status.MULTI_DEVICE_DETECTED + else: + return scan_single_device(all_devices[0]) + + except ExportException: + logger.error(ex) + return Status.DEVICE_ERROR # Could not assess devices + + def scan_single_device(self, str: blkid) -> Status: + """ + Given a string representing a single block device, see if it + is a suitable export target and return information about its state. + """ + try: + target = self.cli.get_partitioned_device(blkid) + + # See if it's a LUKS drive + if self.cli.is_luks_volume(target): + + # Returns Volume or throws ExportException + self.volume = self.cli.get_luks_volume(target) + + # See if it's unlocked and mounted + if self.volume.writable: + logger.debug("LUKS device is already mounted") + return Status.DEVICE_WRITABLE + else: + # Prompt for passphrase + return Status.DEVICE_LOCKED + else: + # Might be VeraCrypt, might be madness + logger.info("LUKS drive not found") + + # Currently we don't support anything other than LUKS. + # In future, we will support TC/VC volumes as well + return Status.INVALID_DEVICE_DETECTED + + except ExportException as ex: + logger.error(ex) + if ex.sdstatus: + return ex.sdstatus + else: + return Status.DEVICE_ERROR + + def unlock_device(self, passphrase: str, volume: Volume) -> Status: + """ + Given provided passphrase, unlock target volume. Currently, + LUKS volumes are supported. + """ + if volume: + try: + self.volume = self.cli.unlock_luks_volume(volume, passphrase) + + if volume.writable: + return Status.DEVICE_WRITABLE + else: + return Status.ERROR_UNLOCK_LUKS + + except ExportException as ex: + logger.error(ex) + return Status.ERROR_UNLOCK_LUKS + else: + # Trying to unlock devices before having an active device + logger.warning("Tried to unlock_device but no current volume detected.") + return Status.NO_DEVICE_DETECTED + + def write_to_device(self, volume: Volume, data: Archive) -> Status: + """ + Export data to volume. CLI unmounts and locks volume on completion, even + if export was unsuccessful. + """ + try: + self.cli.write_data_to_device(data.tmpdir, data.target_dirname, volume) + return Status.SUCCESS_EXPORT + + except ExportException as ex: + logger.error(ex) + if ex.sdstatus: + return ex.sdstatus + else: + return Status.ERROR_EXPORT diff --git a/securedrop_export/disk/new_status.py b/securedrop_export/disk/new_status.py new file mode 100644 index 000000000..d6f8dadda --- /dev/null +++ b/securedrop_export/disk/new_status.py @@ -0,0 +1,20 @@ +from securedrop_export.enums import ExportEnum + +class Status(ExportEnum): + + NO_DEVICE_DETECTED = "NO_DEVICE_DETECTED" + INVALID_DEVICE_DETECTED = "INVALID_DEVICE_DETECTED" # Multi partitioned, not encrypted, etc + MULTI_DEVICE_DETECTED = "MULTI_DEVICE_DETECTED" # Not currently supported + + DEVICE_LOCKED = "DEVICE_LOCKED" # One device detected, and it's locked + DEVICE_WRITABLE = "DEVICE_WRITABLE" # One device detected, and it's unlocked (and mounted) + + ERROR_UNLOCK_LUKS = "ERROR_UNLOCK_LUKS" + ERROR_UNLOCK_GENERIC = "ERROR_UNLOCK_GENERIC" + ERROR_MOUNT = "ERROR_MOUNT" # Unlocked but not mounted + + SUCCESS_EXPORT = "SUCCESS_EXPORT" + ERROR_EXPORT = "ERROR_EXPORT" # Could not write to disk + ERROR_EXPORT_CLEANUP = "ERROR_EXPORT_CLEANUP" # If export succeeds but drives were not properly unmounted + + DEVICE_ERROR = "DEVICE_ERROR" # Something went wrong while trying to check the device diff --git a/securedrop_export/disk/service.py b/securedrop_export/disk/service.py index 23fb10145..56531aa13 100644 --- a/securedrop_export/disk/service.py +++ b/securedrop_export/disk/service.py @@ -5,22 +5,23 @@ from typing import List -from securedrop_export.export import Archive +from securedrop_export.archive import Archive from securedrop_export.exceptions import ExportException from .cli import CLI from .status import Status +from .new_status import Status as NewStatus logger = logging.getLogger(__name__) -class Service(): +class Service: - def __init__(self, submission): + def __init__(self, submission, cli=None): self.submission = submission - self.cli = CLI() + self.cli = cli or CLI() - def usb_test(self): + def check_connected_devices(self) -> Status: """ Check if single USB is inserted. """ @@ -32,17 +33,18 @@ def usb_test(self): num_devices = len(all_devices) if num_devices == 0: - raise ExportException(Status.LEGACY_USB_NOT_CONNECTED) + raise ExportException(sdstatus=Status.LEGACY_USB_NOT_CONNECTED) elif num_devices == 1: - status = Status.LEGACY_USB_CONNECTED + return Status.LEGACY_USB_CONNECTED elif num_devices > 1: - raise ExportException(Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED) + raise ExportException(sdstatus=Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED) - except ExportException: - raise + except ExportException as ex: + # Use legacy status instead of new status values + raise ExportException(sdstatus=Status.LEGACY_ERROR_GENERIC) from ex - def disk_format_test(self): + def check_disk_format(self) -> Status: """ Check if volume is correctly formatted for export. """ @@ -51,16 +53,25 @@ def disk_format_test(self): if len(all_devices) == 1: device = self.cli.get_partitioned_device(all_devices) - if self.cli.is_luks_volume(device): - status = Status.LEGACY_USB_ENCRYPTED - else: - raise ExportException(Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED) + if not self.cli.is_luks_volume(device): + raise ExportException(sdstatus=Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED) + # We can support checking if a drive is already unlocked, but for + # backwards compatibility, this is the only expected status + # at this stage + return Status.LEGACY_USB_ENCRYPTED - except ExportException: - raise + except ExportException as ex: + # Return legacy status values for now for ongoing client compatibility + if ex.sdstatus in [s for s in NewStatus]: + status = self._legacy_status(ex.sdstatus) + raise ExportException(sdstatus=status) + elif ex.sdstatus: + raise + else: + raise ExportException(sdstatus=Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED) - def export(self): + def export(self) -> Status: """ Export all files to target device. """ @@ -83,12 +94,37 @@ def export(self): logger.debug(f"Export submission to {mounted.mountpoint}") self.cli.write_data_to_device(self.submission.tmpdir, self.submission.target_dirname, mounted) + return Status.SUCCESS_EXPORT else: - # Another kind of drive: VeraCrypt/TC, or unsupported + # Another kind of drive: VeraCrypt/TC, or unsupported. + # For now this is an error--in future there will be support + # for additional encryption formats logger.error(f"Export failed because {device} is not supported") - raise ExportException(Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED) + raise ExportException(sdstatus=Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED) except ExportException as ex: - raise + # Return legacy status values for now for ongoing client compatibility + if ex.sdstatus in [s for s in NewStatus]: + status = self._legacy_status(ex.sdstatus) + raise ExportException(sdstatus=status) + elif ex.sdstatus: + raise + else: + raise ExportException(sdstatus=Status.LEGACY_ERROR_GENERIC) + + def _legacy_status(self, status: NewStatus): + """ + Backwards-compatibility - status values that client (@0.7.0) is expecting. + """ + if status is NewStatus.ERROR_MOUNT: + return Status.LEGACY_ERROR_USB_MOUNT + elif status in [NewStatus.ERROR_EXPORT, NewStatus.ERROR_EXPORT_CLEANUP]: + return Status.LEGACY_ERROR_USB_WRITE + elif status in [NewStatus.ERROR_UNLOCK_LUKS, NewStatus.ERROR_UNLOCK_GENERIC]: + return Status.LEGACY_USB_BAD_PASSPHRASE + elif status in [NewStatus.INVALID_DEVICE_DETECTED, NewStatus.MULTI_DEVICE_DETECTED]: + return Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED + else: + return Status.LEGACY_ERROR_GENERIC diff --git a/securedrop_export/disk/status.py b/securedrop_export/disk/status.py index d8472642b..e4be49d36 100644 --- a/securedrop_export/disk/status.py +++ b/securedrop_export/disk/status.py @@ -1,22 +1,21 @@ -from securedrop_export.exceptions import ExportEnum +from securedrop_export.enums import ExportEnum class Status(ExportEnum): - LEGACY_ERROR_FILE_NOT_FOUND = "ERROR_FILE_NOT_FOUND" - LEGACY_ERROR_EXTRACTION = "ERROR_EXTRACTION" - LEGACY_ERROR_METADATA_PARSING = "ERROR_METADATA_PARSING" - LEGACY_ERROR_ARCHIVE_METADATA = "ERROR_ARCHIVE_METADATA" LEGACY_ERROR_GENERIC = "ERROR_GENERIC" # Legacy USB preflight related - LEGACY_USB_CONNECTED = "USB_CONNECTED" + LEGACY_USB_CONNECTED = "USB_CONNECTED" # Success LEGACY_ERROR_USB_CHECK = "ERROR_USB_CHECK" # Legacy USB Disk preflight related errors - LEGACY_USB_ENCRYPTED = "USB_ENCRYPTED" + LEGACY_USB_ENCRYPTED = "USB_ENCRYPTED" # Success LEGACY_USB_ENCRYPTION_NOT_SUPPORTED = "USB_ENCRYPTION_NOT_SUPPORTED" # Legacy Disk export errors LEGACY_USB_BAD_PASSPHRASE = "USB_BAD_PASSPHRASE" LEGACY_ERROR_USB_MOUNT = "ERROR_USB_MOUNT" LEGACY_ERROR_USB_WRITE = "ERROR_USB_WRITE" + + # New + SUCCESS_EXPORT = "SUCCESS_EXPORT" \ No newline at end of file diff --git a/securedrop_export/entrypoint.py b/securedrop_export/entrypoint.py index 5ed1dd7f4..4ae235d1f 100755 --- a/securedrop_export/entrypoint.py +++ b/securedrop_export/entrypoint.py @@ -6,7 +6,7 @@ from logging.handlers import TimedRotatingFileHandler, SysLogHandler from securedrop_export import __version__ -from securedrop_export import export +from securedrop_export.archive import Archive from securedrop_export import main from securedrop_export.utils import safe_mkdir @@ -57,17 +57,17 @@ def start(): configure_logging() except Exception: msg = "ERROR_LOGGING" - export.SDExport.exit_gracefully(msg) + main._exit_gracefully(None, msg) logger.info("Starting SecureDrop Export {}".format(__version__)) - my_sub = export.SDExport(sys.argv[1], CONFIG_PATH) + my_sub = Archive(sys.argv[1], CONFIG_PATH) try: # Halt immediately if target file is absent if not os.path.exists(my_sub.archive): logger.info("Archive is not found {}.".format(my_sub.archive)) msg = "ERROR_FILE_NOT_FOUND" - my_sub.exit_gracefully(msg) + main._exit_gracefully(my_sub, msg) main.__main__(my_sub) # Delete extracted achive from tempfile shutil.rmtree(my_sub.tmpdir) @@ -76,4 +76,4 @@ def start(): # the file with another application logger.error(e) msg = "ERROR_GENERIC" - my_sub.exit_gracefully(msg) + main._exit_gracefully(my_sub, msg) diff --git a/securedrop_export/enums.py b/securedrop_export/enums.py index 7396b3adf..26d7a9cb1 100644 --- a/securedrop_export/enums.py +++ b/securedrop_export/enums.py @@ -1,20 +1,9 @@ from enum import Enum -from typing import TypeVar, Type - -T = TypeVar('T', bound=ExportEnum) class ExportEnum(Enum): """ Parent class for export and print statuses. """ - @classmethod - def value_of(cls: Type[T], target: str) -> T: - for key, value in cls.__members__.items(): - if key == target: - return value - # Don't print the value since we don't know what it is - raise ValueError("No valid entry found for provided value") - class Command(ExportEnum): """ diff --git a/securedrop_export/exceptions.py b/securedrop_export/exceptions.py index 282844b31..b13e79d36 100644 --- a/securedrop_export/exceptions.py +++ b/securedrop_export/exceptions.py @@ -1,5 +1,4 @@ import logging - from typing import Optional from .enums import ExportEnum @@ -10,22 +9,14 @@ class ExportException(Exception): """ Base class for exceptions encountered during export. + In order to make use of additional attributes `sdstatus` and `sderror`, + pass them as keyword arguments when raising ExportException. """ def __init__(self, *args, **kwargs): super().__init__(*args) - self._status = kwargs.get("status") - - @property - def status(self) -> Optional[ExportEnum]: - try: - return ExportEnum.value_of(self._status) - except ValueError: - logger.error( - "Unexpected value passed to ExportException (ExportEnum is required)." - ) - pass # Don't return a status - + self.sdstatus = kwargs.get("sdstatus") + self.sderror = kwargs.get("sderror") class TimeoutException(ExportException): pass diff --git a/securedrop_export/main.py b/securedrop_export/main.py index 27600f87d..3b198187f 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -1,88 +1,180 @@ +import os +import shutil +import platform import logging import sys +import subprocess -from securedrop_export import export -from securedrop_export.exceptions import Command, ExportStatus, PrintStatus - -from securedrop_export.disk.actions import ( - DiskTestAction, - DiskExportAction, - USBTestAction, -) +from securedrop_export.archive import Archive, Metadata +from securedrop_export.enums import Command, ExportEnum from securedrop_export.disk.service import Service as ExportService from securedrop_export.print.service import Service as PrintService +from logging.handlers import TimedRotatingFileHandler, SysLogHandler +from securedrop_export import __version__ +from securedrop_export.utils import safe_mkdir + +CONFIG_PATH = "/etc/sd-export-config.json" +DEFAULT_HOME = os.path.join(os.path.expanduser("~"), ".securedrop_export") +LOG_DIR_NAME = "logs" +EXPORT_LOG_FILENAME = "export.log" + logger = logging.getLogger(__name__) +class Status(ExportEnum): + """ + Errors initializing export + """ + ERROR_LOGGING = "ERROR_LOGGING" + ERROR_GENERIC = "ERROR_GENERIC" + ERROR_FILE_NOT_FOUND = "ERROR_FILE_NOT_FOUND" -def __main__(submission): - submission.extract_tarball() +def start(): try: - submission.archive_metadata = export.Metadata(submission.tmpdir) + configure_logging() except Exception: - exit_gracefully(submission, ExportStatus.ERROR_METADATA_PARSING) + _exit_gracefully(submission=None, status=Status.ERROR_LOGGING) - if not submission.archive_metadata.is_valid(): - exit_gracefully(submission, ExportStatus.ERROR_ARCHIVE_METADATA) + logger.info("Starting SecureDrop Export {}".format(__version__)) + data = Archive(sys.argv[1], CONFIG_PATH) try: - command = Command.value_of(submission.archive_metadata.export_method) + # Halt immediately if target file is absent + if not os.path.exists(data.archive): + logger.info("Archive is not found {}.".format(data.archive)) + _exit_gracefully(data, Status.ERROR_FILE_NOT_FOUND) + + # The main event. Extract archive and either print or export to disk. + # Includes cleanup logic, which removes any temporary directories associated with + # the archive. + _extract_and_run(data) + + except Exception as e: + _exit_gracefully(data, Status.ERROR_GENERIC, e.output) + + +def _configure_logging(): + """ + All logging related settings are set up by this function. + """ + safe_mkdir(DEFAULT_HOME) + safe_mkdir(DEFAULT_HOME, LOG_DIR_NAME) + + log_file = os.path.join(DEFAULT_HOME, LOG_DIR_NAME, EXPORT_LOG_FILENAME) + + # set logging format + log_fmt = "%(asctime)s - %(name)s:%(lineno)d(%(funcName)s) " "%(levelname)s: %(message)s" + formatter = logging.Formatter(log_fmt) + + handler = TimedRotatingFileHandler(log_file) + handler.setFormatter(formatter) + + # For rsyslog handler + if platform.system() != "Linux": # pragma: no cover + syslog_file = "/var/run/syslog" + else: + syslog_file = "/dev/log" - if command is Command.START_VM: - # No further operations - exit_gracefully(submission, command) + sysloghandler = SysLogHandler(address=syslog_file) + sysloghandler.setFormatter(formatter) + handler.setLevel(logging.DEBUG) + + # set up primary log + log = logging.getLogger() + log.setLevel(logging.DEBUG) + log.addHandler(handler) + # add the second logger + log.addHandler(sysloghandler) + + +def _extract_and_run(submission: Archive): + """ + Extract tarball and metadata and run appropriate command + based on metadata instruction. + """ + status = Status.ERROR_GENERIC + stacktrace = None + + try: + submission.extract_tarball() + + # Validates metadata and ensures requested action is supported + submission.archive_metadata = Metadata.create_and_validate(submission.tmpdir) + + # If we just wanted to start the VM, our work here is done + if submission.archive_metadata.command is Command.START_VM: + _exit_gracefully(submission) else: - status = None - try: - if command in Command.printer_actions(): - service = ExportService(submission) - status = service.run(command) + status = _start_service(submission, command) + + except ExportException as ex: + status = ex.sdstatus + stacktrace = ex.output - elif command in Command.export_actions(): - service = PrintService(submission) - status = service.run(command) + except Exception as exc: + # All exceptions are wrapped in ExportException, but we are being cautious + logger.error("Encountered exception during export, exiting") + status = Status.ERROR_GENERIC + stacktrace = exc.output + + finally: + _exit_gracefully(submission, status, stacktrace) - except ExportException as ex: - if ex.status: - status = ex.status - finally: - exit_gracefully(submission, status) +def _start_service(submission: Archive, cmd: Command) -> Status: + """ + Start print or export routine. + """ + if cmd in Command.printer_actions(): + service = PrintService(submission) - except ValueError: - # An unsupported command was sent from the calling VM - logger.error("Unsuported command, exiting") - exit_gracefully(submission) + if cmd is Commmand.PRINTER: + return service.print() + elif cmd is Commmand.PRINTER_TEST: + return service.printer_preflight() + elif cmd is Commmand.PRINTER_TEST: + return service.printer_test() + elif cmd in Command.export_actions(): + service = ExportService(submission) -def exit_gracefully(submission: SDExport, status: Status=None, e=None): + if cmd is Commmand.EXPORT: + return service.export() + elif cmd is Commmand.CHECK_USBS: + return service.check_connected_devices() + elif cmd is Commmand.CHECK_VOLUME: + return service.checK_disk_format() + + +def _exit_gracefully(submission: Archive, status: Status=None, e=None): """ Utility to print error messages, mostly used during debugging, then exits successfully despite the error. Always exits 0, since non-zero exit values will cause system to try alternative solutions for mimetype handling, which we want to avoid. """ - logger.info("Exiting with message: {}".format(msg)) + logger.info(f"Exiting with status: {status.value}") if e: logger.error("Captured exception output: {}".format(e.output)) try: # If the file archive was extracted, delete before returning - if os.path.isdir(submission.tmpdir): + if submission and os.path.isdir(submission.tmpdir): shutil.rmtree(submission.tmpdir) # Do this after deletion to avoid giving the client two error messages in case of the # block above failing - write_status(status) + _write_status(status) except Exception as ex: logger.error("Unhandled exception: {}".format(ex)) - write_status(ExportStatus.LEGACY_ERROR_GENERIC) - # exit with 0 return code otherwise the os will attempt to open - # the file with another application - sys.exit(0) + _write_status(Status.ERROR_GENERIC) + finally: + # exit with 0 return code otherwise the os will attempt to open + # the file with another application + sys.exit(0) -def _write_status(self, status: Status): +def _write_status(status: Status): """ Write string to stderr. """ @@ -91,3 +183,4 @@ def _write_status(self, status: Status): sys.stderr.write("\n") else: logger.info("No status value supplied") + diff --git a/securedrop_export/print/service.py b/securedrop_export/print/service.py index 98af6a0b1..f36045c1d 100644 --- a/securedrop_export/print/service.py +++ b/securedrop_export/print/service.py @@ -3,9 +3,9 @@ import signal import subprocess import time -from enum import Enum -from .exceptions import ExportStatus, handler, TimeoutException +from securedrop_export.exceptions import handler, TimeoutException, ExportException +from securedrop_export.utils import safe_check_call from .status import Status PRINTER_NAME = "sdw-printer" @@ -18,7 +18,7 @@ logger = logging.getLogger(__name__) -class Service(): +class Service: """ Printer service """ @@ -28,7 +28,7 @@ def __init__(self, submission): self.printer_name = PRINTER_NAME self.printer_wait_timeout = PRINTER_WAIT_TIMEOUT - def print(self): + def print(self) -> Status: """ Routine to print all files. Throws ExportException if an error is encountered. @@ -36,8 +36,9 @@ def print(self): logger.info("Printing all files from archive") self._check_printer_setup() self._print_all_files() + return Status.PRINT_SUCCESS - def printer_preflight(self): + def printer_preflight(self) -> Status: """ Routine to perform preflight printer testing. @@ -45,8 +46,9 @@ def printer_preflight(self): """ logger.info("Running printer preflight") self._check_printer_setup() + return Status.PREFLIGHT_SUCCESS - def printer_test(self): + def printer_test(self) -> Status: """ Routine to print a test page. @@ -55,6 +57,7 @@ def printer_test(self): logger.info("Printing test page") self._check_printer_setup() self._print_test_page() + return Status.PRINT_SUCCESS def _wait_for_print(self): """ @@ -74,10 +77,10 @@ def _wait_for_print(self): else: time.sleep(5) except subprocess.CalledProcessError: - raise ExportException(Status.ERROR_PRINT) + raise ExportException(sdstatus=Status.ERROR_PRINT) except TimeoutException: logger.error("Timeout waiting for printer {}".format(self.printer_name)) - raise ExportException(Status.ERROR_PRINT) + raise ExportException(sdstatus=Status.ERROR_PRINT) return True def _check_printer_setup(self) -> None: @@ -91,25 +94,25 @@ def _check_printer_setup(self) -> None: printers = [x for x in output.decode("utf-8").split() if "usb://" in x] if not printers: logger.info("No usb printers connected") - raise ExportException(Status.ERROR_PRINTER_NOT_FOUND) + raise ExportException(sdstatus=Status.ERROR_PRINTER_NOT_FOUND) supported_printers = [ p for p in printers if any(sub in p for sub in ("Brother", "LaserJet")) ] if not supported_printers: logger.info("{} are unsupported printers".format(printers)) - raise ExportException(Status.ERROR_PRINTER_NOT_SUPPORTED) + raise ExportException(sdstatus=Status.ERROR_PRINTER_NOT_SUPPORTED) if len(supported_printers) > 1: logger.info("Too many usb printers connected") - raise ExportException(Status.ERROR_MULTIPLE_PRINTERS_FOUND) + raise ExportException(sdstatus=Status.ERROR_MULTIPLE_PRINTERS_FOUND) printer_uri = printers[0] printer_ppd = self._install_printer_ppd(printer_uri) self.setup_printer(printer_uri, printer_ppd) except subprocess.CalledProcessError as e: logger.error(e) - raise ExportException(Status.ERROR_GENERIC) + raise ExportException(sdstatus=Status.ERROR_GENERIC) def _get_printer_uri(self) -> str: """ @@ -121,7 +124,7 @@ def _get_printer_uri(self) -> str: try: output = subprocess.check_output(["sudo", "lpinfo", "-v"]) except subprocess.CalledProcessError: - raise ExportException(Status.ERROR_PRINTER_URI) + raise ExportException(sdstatus=Status.ERROR_PRINTER_URI) # fetch the usb printer uri for line in output.split(): @@ -133,19 +136,19 @@ def _get_printer_uri(self) -> str: if printer_uri == "": # No usb printer is connected logger.info("No usb printers connected") - raise ExportException(Status.ERROR_PRINTER_NOT_FOUND) + raise ExportException(sdstatus=Status.ERROR_PRINTER_NOT_FOUND) elif not any(x in printer_uri for x in ("Brother", "LaserJet")): # printer url is a make that is unsupported logger.info("Printer {} is unsupported".format(printer_uri)) - raise ExportException(Status.ERROR_PRINTER_NOT_SUPPORTED) + raise ExportException(sdstatus=Status.ERROR_PRINTER_NOT_SUPPORTED) logger.info("Printer {} is supported".format(printer_uri)) return printer_uri - def install_printer_ppd(self, uri): + def _install_printer_ppd(self, uri): if not any(x in uri for x in ("Brother", "LaserJet")): logger.error("Cannot install printer ppd for unsupported printer: {}".format(uri)) - raise ExportException(Status.ERROR_PRINTER_NOT_SUPPORTED) + raise ExportException(sdstatus=Status.ERROR_PRINTER_NOT_SUPPORTED) return if "Brother" in uri: @@ -158,7 +161,7 @@ def install_printer_ppd(self, uri): # Compile and install drivers that are not already installed if not os.path.exists(printer_ppd): logger.info("Installing printer drivers") - self._safe_check_call( + safe_check_call( command=[ "sudo", "ppdc", @@ -166,7 +169,7 @@ def install_printer_ppd(self, uri): "-d", "/usr/share/cups/model/", ], - error_message=Status.ERROR_PRINTER_DRIVER_UNAVAILABLE.value, + error_status=Status.ERROR_PRINTER_DRIVER_UNAVAILABLE, ignore_stderr_startswith=b"ppdc: Warning", ) @@ -175,7 +178,7 @@ def install_printer_ppd(self, uri): def _setup_printer(self, printer_uri, printer_ppd): # Add the printer using lpadmin logger.info("Setting up printer {}".format(self.printer_name)) - self._safe_check_call( + safe_check_call( command=[ "sudo", "lpadmin", @@ -189,7 +192,7 @@ def _setup_printer(self, printer_uri, printer_ppd): "-u", "allow:user", ], - error_message=Status.ERROR_PRINTER_INSTALL.value, + error_status=Status.ERROR_PRINTER_INSTALL, ignore_stderr_startswith=b"lpadmin: Printer drivers", ) @@ -228,41 +231,21 @@ def _is_open_office_file(self, filename): def _print_file(self, file_to_print): # If the file to print is an (open)office document, we need to call unoconf to # convert the file to pdf as printer drivers do not support this format - if self.is_open_office_file(file_to_print): + if self._is_open_office_file(file_to_print): logger.info("Converting Office document to pdf") folder = os.path.dirname(file_to_print) converted_filename = file_to_print + ".pdf" converted_path = os.path.join(folder, converted_filename) - self.safe_check_call( + safe_check_call( command=["unoconv", "-o", converted_path, file_to_print], - error_message=Status.ERROR_PRINT.value, + error_status=Status.ERROR_PRINT, ) file_to_print = converted_path logger.info("Sending file to printer {}".format(self.printer_name)) - - # todo - self._safe_check_call( + + safe_check_call( command=["xpp", "-P", self.printer_name, file_to_print], - error_message=Status.ERROR_PRINT.value, + error_status=Status.ERROR_PRINT, ) - - def _safe_check_call(self, command: str, status: Status, ignore_stderr_startswith=None): - """ - Safely wrap subprocess.check_output to ensure we always return 0 and - log the error messages - """ - try: - err = subprocess.run(command, check=True, capture_output=True).stderr - # ppdc and lpadmin may emit warnings we are aware of which should not be treated as - # user facing errors - if ignore_stderr_startswith and err.startswith(ignore_stderr_startswith): - logger.info("Encountered warning: {}".format(err.decode("utf-8"))) - elif err == b"": - # Nothing on stderr and returncode is 0, we're good - pass - else: - raise ExportException(status=status, e=err) - except subprocess.CalledProcessError as ex: - raise ExportException(status=status, e=ex.output) diff --git a/securedrop_export/print/status.py b/securedrop_export/print/status.py index 86e3bf7ff..a8c5221de 100644 --- a/securedrop_export/print/status.py +++ b/securedrop_export/print/status.py @@ -1,4 +1,4 @@ -from securedrop_export.exceptions import ExportEnum +from securedrop_export.enums import ExportEnum class Status(ExportEnum): @@ -11,3 +11,8 @@ class Status(ExportEnum): # Printer export errors ERROR_PRINT = "ERROR_PRINT" + + # New + PREFLIGHT_SUCCESS = "PRINTER_PREFLIGHT_SUCCESS" + TEST_SUCCESS = "PRINTER_TEST_SUCCESS" + PRINT_SUCCESS = "PRINTER_SUCCESS" diff --git a/securedrop_export/utils.py b/securedrop_export/utils.py index a60eca09f..9de1cf979 100644 --- a/securedrop_export/utils.py +++ b/securedrop_export/utils.py @@ -2,6 +2,14 @@ import tarfile from pathlib import Path from typing import Optional, Union +import subprocess +import logging + +from securedrop_export.enums import ExportEnum as Status +from securedrop_export.exceptions import ExportException + + +logger = logging.getLogger(__name__) def safe_mkdir( @@ -23,10 +31,10 @@ def safe_mkdir( if not base_path.is_absolute(): raise ValueError(f"Base directory '{base_path}' must be an absolute path") - check_path_traversal(base_path) + _check_path_traversal(base_path) if relative_path: - check_path_traversal(relative_path) + _check_path_traversal(relative_path) full_path = base_path.joinpath(relative_path) else: full_path = base_path @@ -35,7 +43,7 @@ def safe_mkdir( # # Note: We do not use parents=True because the parent directories will not be created with the # specified mode. Parents are created using system default permissions, which we modify to be - # 700 via os.umask in the SDExport contructor. Creating directories one-by-one with mode=0o0700 + # 700 via os.umask in the Archive contructor. Creating directories one-by-one with mode=0o0700 # is not necessary but adds defense in depth. relative_path = relative_filepath(full_path, base_path) for parent in reversed(relative_path.parents): @@ -45,7 +53,7 @@ def safe_mkdir( full_path.mkdir(mode=0o0700, exist_ok=True) # Check permissions after creating the directories - check_all_permissions(relative_path, base_path) + _check_all_permissions(relative_path, base_path) def safe_extractall(archive_file_path: str, dest_path: str) -> None: @@ -65,14 +73,14 @@ def safe_extractall(archive_file_path: str, dest_path: str) -> None: for file_info in tar.getmembers(): file_info.mode = 0o700 if file_info.isdir() else 0o600 - check_path_traversal(file_info.name) + _check_path_traversal(file_info.name) # If the path is relative then we don't need to check that it resolves to dest_path if Path(file_info.name).is_absolute(): relative_filepath(file_info.name, dest_path) if file_info.islnk() or file_info.issym(): - check_path_traversal(file_info.linkname) + _check_path_traversal(file_info.linkname) # If the path is relative then we don't need to check that it resolves to dest_path if Path(file_info.linkname).is_absolute(): relative_filepath(file_info.linkname, dest_path) @@ -92,7 +100,7 @@ def relative_filepath(filepath: Union[str, Path], base_dir: Union[str, Path]) -> return Path(filepath).resolve().relative_to(base_dir) -def check_path_traversal(filename_or_filepath: Union[str, Path]) -> None: +def _check_path_traversal(filename_or_filepath: Union[str, Path]) -> None: """ Raise ValueError if filename_or_filepath does any path traversal. This works on filenames, relative paths, and absolute paths. @@ -116,7 +124,7 @@ def check_path_traversal(filename_or_filepath: Union[str, Path]) -> None: raise ValueError(f"Unsafe file or directory name: '{filename_or_filepath}'") -def check_all_permissions(path: Union[str, Path], base_path: Union[str, Path]) -> None: +def _check_all_permissions(path: Union[str, Path], base_path: Union[str, Path]) -> None: """ Check that the permissions of each directory between base_path and path are set to 700. """ @@ -126,16 +134,16 @@ def check_all_permissions(path: Union[str, Path], base_path: Union[str, Path]) - return Path(full_path).chmod(0o700) - check_dir_permissions(full_path) + _check_dir_permissions(full_path) relative_path = relative_filepath(full_path, base_path) for parent in relative_path.parents: full_path = base_path.joinpath(parent) Path(full_path).chmod(0o700) - check_dir_permissions(str(full_path)) + _check_dir_permissions(str(full_path)) -def check_dir_permissions(dir_path: Union[str, Path]) -> None: +def _check_dir_permissions(dir_path: Union[str, Path]) -> None: """ Check that a directory has ``700`` as the final 3 bytes. Raises a ``RuntimeError`` otherwise. """ @@ -144,3 +152,24 @@ def check_dir_permissions(dir_path: Union[str, Path]) -> None: masked = stat_res & 0o777 if masked & 0o077: raise RuntimeError("Unsafe permissions ({}) on {}".format(oct(stat_res), dir_path)) + + + +def safe_check_call(command: str, error_status: Status, ignore_stderr_startswith=None): + """ + Wrap subprocess.check_output to ensure we wrap CalledProcessError and return + our own exception, and log the error messages. + """ + try: + err = subprocess.run(command, check=True, capture_output=True).stderr + # ppdc and lpadmin may emit warnings we are aware of which should not be treated as + # user facing errors + if ignore_stderr_startswith and err.startswith(ignore_stderr_startswith): + logger.info("Encountered warning: {}".format(err.decode("utf-8"))) + elif err == b"": + # Nothing on stderr and returncode is 0, we're good + pass + else: + raise ExportException(sdstatus=error_status, sderror=err) + except subprocess.CalledProcessError as ex: + raise ExportException(sdstatus=error_status, sderror=ex.output) \ No newline at end of file diff --git a/tests/disk/test_actions.py b/tests/disk/test_actions.py deleted file mode 100644 index 099836773..000000000 --- a/tests/disk/test_actions.py +++ /dev/null @@ -1,149 +0,0 @@ -import pytest -from unittest import mock - -import os -import pytest -import sys -import tempfile - -import subprocess -from subprocess import CalledProcessError - -from securedrop_export.disk.exceptions import ExportException -from securedrop_export.disk.status import Status - -from securedrop_export import export -from securedrop_export.disk.actions import DiskExportAction, DiskTestAction, USBTestAction - -TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") -SAMPLE_OUTPUT_LSBLK_NO_PART = b"disk\ncrypt" # noqa -SAMPLE_OUTPUT_LSBLK_ONE_PART = b"disk\npart\ncrypt" # noqa -SAMPLE_OUTPUT_LSBLK_MULTI_PART = b"disk\npart\npart\npart\ncrypt" # noqa -SAMPLE_OUTPUT_USB = b"/dev/sda" # noqa - - -class TestExportAction: - def _setup_submission(self) -> export.SDExport: - """ - Helper method to set up stub export object - """ - submission = export.SDExport("testfile", TEST_CONFIG) - temp_folder = tempfile.mkdtemp() - metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) - with open(metadata, "w") as f: - f.write('{"device": "disk", "encryption_method": "luks", "encryption_key": "hunter1"}') - - submission.archive_metadata = export.Metadata(temp_folder) - - return submission - - @mock.patch("sys.exit") - @mock.patch("securedrop_export.disk.actions.CLI") - def test_run_usbtestaction(self, mock_cli, mock_sys,): - - mock_cli.write_status = mock.MagicMock() - usb = USBTestAction(self._setup_submission()) - - usb.run() - mock_cli.write_status.assert_called_once_with(Status.LEGACY_USB_CONNECTED) - - - @mock.patch("securedrop_export.disk.actions.CLI") - def test_run_usbtestaction_error(self, mock_cli, capsys): - mock_cli.get_connected_devices.side_effect = ExportException(Status.LEGACY_ERROR_USB_CHECK) - usb = USBTestAction(self._setup_submission()) - - mock_cli.write_status = mock.MagicMock() - - usb.run() - mock_cli.write_status.assert_called_once_with(Status.LEGACY_ERROR_USB_CHECK) - - @mock.patch("sys.exit") - @mock.patch("securedrop_export.disk.actions.CLI") - def test_run_disktestaction(self, mock_sys, mock_cli): - - mock_cli.is_luks_volume.return_value=True - mock_cli.write_status = mock.MagicMock() - - test_export = DiskTestAction(self._setup_submission()) - test_export.run() - - mock_cli.write_status.assert_called_once_with(Status.SUCCESS_EXPORT) - - @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_LSBLK_NO_PART) - @mock.patch("subprocess.check_call", return_value=0) - def test_luks_precheck_encrypted_fde(mocked_call, capsys, mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - action = DiskExportAction(submission) - - command_output = mock.MagicMock() - command_output.stderr = b"" - mocker.patch("subprocess.run", return_value=command_output) - - expected_message = Status.LEGACY_USB_ENCRYPTED.value - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - - @mock.patch("sys.exit") - @mock.patch("securedrop_export.disk.actions.CLI") - def test_run_disktestaction_error(self, mock_cli, mocker): - mock_cli.patch("get_partitioned_device", side_effect=ExportException(Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED)) - - status_mock = mock_cli.patch("write_status") - test_export = DiskTestAction(self._setup_submission()) - test_export.run() - status_mock.assert_called_once_with(Status.LEGACY_ERROR_USB_WRITE) - - @mock.patch("sys.exit") - @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_LSBLK_ONE_PART) - @mock.patch("subprocess.check_call", return_value=0) - def test_luks_precheck_encrypted_single_part(mocked_call, mock_output, capsys, mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - action = DiskTestAction(submission) - action.device = "/dev/sda" - expected_message = Status.LEGACY_USB_ENCRYPTED.value - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - - command_output = mock.MagicMock() - command_output.stderr = b"" - mocker.patch("subprocess.run", return_value=command_output) - - action.run() - - @mock.patch("sys.exit") - @mock.patch("securedrop_export.disk.actions.CLI") - def test_run_diskexportaction(self, mock_cli, mock_sys): - - mock_cli.patch("is_luks_volume", return_value=True) - status_mock = mock_cli.patch("write_status") - - test_export = DiskExportAction(self._setup_submission()) - test_export.run() - - status_mock.assert_called_once_with(Status.SUCCESS_EXPORT) - - @mock.patch("sys.exit") - @mock.patch("securedrop_export.disk.actions.CLI") - def test_run_diskexportaction_disk_not_supported(self, mock_cli, mock_sys): - - mock_cli.patch("get_partitioned_device") - mock_cli.patch("is_luks_volume", return_value=False) - status_mock = mock_cli.patch("write_status") - - test_export = DiskExportAction(self._setup_submission()) - test_export.run() - - status_mock.assert_called_once_with(Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED) - - @mock.patch("sys.exit") - @mock.patch("securedrop_export.disk.actions.CLI") - def test_run_diskexportaction_not_supported(self, mock_sys, mock_cli): - - status_mock = mock_cli.patch("write_status") - mock_cli.patch("get_partitioned_device") - mock_cli.is_luks_volume.return_value=True - mock_cli.write_data_to_device.side_effect = Status.LEGACY_ERROR_USB_WRITE - - test_export = DiskExportAction(self._setup_submission()) - test_export.run() - - status_mock.assert_called_once_with(Status.LEGACY_ERROR_USB_WRITE) diff --git a/tests/disk/test_cli.py b/tests/disk/test_cli.py index 7560630f7..a26bb70f4 100644 --- a/tests/disk/test_cli.py +++ b/tests/disk/test_cli.py @@ -7,12 +7,13 @@ import subprocess +from securedrop_export.enums import ExportEnum from securedrop_export.disk.cli import CLI from securedrop_export.disk.volume import EncryptionScheme, Volume -from securedrop_export.disk.exceptions import ExportException +from securedrop_export.exceptions import ExportException from securedrop_export.disk.status import Status -from securedrop_export import export +from securedrop_export.archive import Archive TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") @@ -214,9 +215,10 @@ def test_unlock_luks_volume_luksOpen_exception(self, mocked_subprocess): with pytest.raises(ExportException): cli.unlock_luks_volume(pd, key) + @mock.patch("os.path.exists", return_value=True) @mock.patch("subprocess.check_output", return_value=b"\n") @mock.patch("subprocess.check_call", return_value=0) - def test_mount_volume(self, mocked_output, mocked_call): + def test_mount_volume(self, mocked_output, mocked_call, mocked_path): cli = CLI() vol = Volume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, @@ -225,9 +227,10 @@ def test_mount_volume(self, mocked_output, mocked_call): ) result = cli.mount_volume(vol) + @mock.patch("os.path.exists", return_value=True) @mock.patch("subprocess.check_output", return_value=b"/dev/pretend/luks-id-123456\n") @mock.patch("subprocess.check_call", return_value=0) - def test_mount_volume_already_mounted(self, mocked_output, mocked_call): + def test_mount_volume_already_mounted(self, mocked_output, mocked_call, mocked_path): cli = CLI() md = Volume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, @@ -236,9 +239,10 @@ def test_mount_volume_already_mounted(self, mocked_output, mocked_call): ) result = cli.mount_volume(md) + @mock.patch("os.path.exists", return_value=True) @mock.patch("subprocess.check_output", return_value=b"\n") @mock.patch("subprocess.check_call", return_value=0) - def test_mount_volume_mkdir(self, mocked_output, mocked_subprocess): + def test_mount_volume_mkdir(self, mocked_output, mocked_subprocess, mocked_path): cli = CLI() md = Volume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, @@ -339,7 +343,7 @@ def test_write_to_disk(self, mock_check_call): encryption=EncryptionScheme.LUKS, ) - submission = export.SDExport("testfile", TEST_CONFIG) + submission = Archive("testfile", TEST_CONFIG) cli.write_data_to_device(submission.tmpdir, submission.target_dirname, vol) @@ -354,23 +358,9 @@ def test_write_to_disk_error_still_does_cleanup(self, mock_call, mocker): mountpoint=cli._DEFAULT_MOUNTPOINT, encryption=EncryptionScheme.LUKS, ) - submission = export.SDExport("testfile", TEST_CONFIG) + submission = Archive("testfile", TEST_CONFIG) with pytest.raises(ExportException): cli.write_data_to_device(submission.tmpdir, submission.target_dirname, vol) cleanup_mock.assert_called_once() - @pytest.mark.parametrize("status", [s for s in Status]) - def test_write_status(self, status, capsys): - cli = CLI() - - cli.write_status(status) - captured = capsys.readouterr() - assert captured.out == status.value + "\n" - - @pytest.mark.parametrize("invalid_status", ["foo", ";ls", "&& echo 0"]) - def test_write_status_error(self, invalid_status, capsys): - cli = CLI() - - with pytest.raises(ValueError): - cli.write_status(Status.value_of(invalid_status)) diff --git a/tests/disk/test_service.py b/tests/disk/test_service.py new file mode 100644 index 000000000..9a418593b --- /dev/null +++ b/tests/disk/test_service.py @@ -0,0 +1,137 @@ +import pytest +from unittest import mock + +import os +import pytest +import sys +import tempfile + +import subprocess +from subprocess import CalledProcessError + +from securedrop_export.enums import ExportEnum +from securedrop_export.exceptions import ExportException +from securedrop_export.disk.status import Status +from securedrop_export.disk.new_status import Status as NewStatus +from securedrop_export.disk.volume import Volume, EncryptionScheme + +from securedrop_export.archive import Archive, Metadata +from securedrop_export.disk.service import Service +from securedrop_export.disk.cli import CLI + +TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") +SAMPLE_OUTPUT_LSBLK_NO_PART = b"disk\ncrypt" # noqa +SAMPLE_OUTPUT_USB = "/dev/sda" # noqa +SAMPLE_OUTPUT_USB_PARTITIONED = "/dev/sda1" + +class TestExportService: + + @classmethod + def setup_class(cls): + cls.mock_cli = mock.MagicMock(CLI) + cls.mock_submission = cls._setup_submission() + + cls.mock_luks_volume_unmounted = Volume(device_name=SAMPLE_OUTPUT_USB, mapped_name="fake-luks-id-123456", encryption=EncryptionScheme.LUKS) + cls.mock_luks_volume_mounted = Volume(device_name=SAMPLE_OUTPUT_USB, mapped_name="fake-luks-id-123456", mountpoint="/media/usb", encryption=EncryptionScheme.LUKS) + + cls.service = Service(cls.mock_submission, cls.mock_cli) + + @classmethod + def teardown_class(cls): + cls.mock_cli = None + cls.mock_submission = None + cls.service = None + + @classmethod + def _setup_submission(cls) -> Archive: + """ + Helper method to set up sample archive + """ + submission = Archive("testfile", TEST_CONFIG) + temp_folder = tempfile.mkdtemp() + metadata = os.path.join(temp_folder, Metadata.METADATA_FILE) + with open(metadata, "w") as f: + f.write('{"device": "disk", "encryption_method": "luks", "encryption_key": "hunter1"}') + + submission.archive_metadata = Metadata.create_and_validate(temp_folder) + + return submission + + def setup_method(self, method): + """ + By default, mock CLI will return the "happy path" of a correctly-formatted LUKS drive. + Override this behaviour in the target method as required, for example to simulate CLI + errors. `teardown_method()` will reset the side effects so they do not affect subsequent + test methods. + """ + self.mock_cli.get_connected_devices.return_value = [SAMPLE_OUTPUT_USB] + self.mock_cli.get_partitioned_device.return_value = SAMPLE_OUTPUT_USB_PARTITIONED + self.mock_cli.get_luks_volume.return_value = self.mock_luks_volume_unmounted + self.mock_cli.mount_volume.return_value = self.mock_luks_volume_mounted + + def teardown_method(self, method): + self.mock_cli.reset_mock(return_value=True, side_effect=True) + + def test_check_usb(self): + status = self.service.check_connected_devices() + + assert status is Status.LEGACY_USB_CONNECTED + + def test_check_usb_error_no_devices(self): + self.mock_cli.get_connected_devices.side_effect = ExportException(sdstatus=NewStatus.NO_DEVICE_DETECTED) + + with pytest.raises(ExportException) as ex: + self.service.check_connected_devices() + + assert ex.value.sdstatus is Status.LEGACY_ERROR_GENERIC + + def test_check_usb_error_multiple_devices(self): + self.mock_cli.get_connected_devices.side_effect = ExportException(sdstatus=NewStatus.MULTI_DEVICE_DETECTED) + + with pytest.raises(ExportException) as ex: + self.service.check_connected_devices() + + assert ex.value.sdstatus is Status.LEGACY_ERROR_GENERIC + + def test_check_usb_error_while_checking(self): + self.mock_cli.get_connected_devices.side_effect = ExportException(sdstatus=Status.LEGACY_ERROR_USB_CHECK) + + with pytest.raises(ExportException) as ex: + self.service.check_connected_devices() + + assert ex.value.sdstatus is Status.LEGACY_ERROR_GENERIC + + def test_check_disk_format(self): + status = self.service.check_disk_format() + + assert status is Status.LEGACY_USB_ENCRYPTED + + def test_check_disk_format_error(self): + self.mock_cli.get_partitioned_device.side_effect=ExportException(sdstatus=NewStatus.INVALID_DEVICE_DETECTED) + + with pytest.raises(ExportException) as ex: + self.service.check_disk_format() + + # We still return the legacy status for now + assert ex.value.sdstatus is Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED + + def test_export(self): + status = self.service.export() + assert status is Status.SUCCESS_EXPORT + + def test_export_disk_not_supported(self): + self.mock_cli.is_luks_volume.return_value = False + + with pytest.raises(ExportException) as ex: + self.service.export() + + assert ex.value.sdstatus is Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED + + def test_export_write_error(self): + self.mock_cli.is_luks_volume.return_value=True + self.mock_cli.write_data_to_device.side_effect = ExportException(sdstatus=Status.LEGACY_ERROR_USB_WRITE) + + with pytest.raises(ExportException) as ex: + self.service.export() + + assert ex.value.sdstatus is Status.LEGACY_ERROR_USB_WRITE diff --git a/tests/print/test_actions.py b/tests/print/test_actions.py deleted file mode 100644 index 17c3397fb..000000000 --- a/tests/print/test_actions.py +++ /dev/null @@ -1,134 +0,0 @@ -from unittest import mock - -import os -import pytest -from subprocess import CalledProcessError -import sys - -from securedrop_export import export -from securedrop_export.print.actions import PrintExportAction - - -SAMPLE_OUTPUT_NO_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\nnetwork lpd" # noqa -SAMPLE_OUTPUT_BROTHER_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\ndirect usb://Brother/HL-L2320D%20series?serial=A00000A000000\nnetwork lpd" # noqa -SAMPLE_OUTPUT_LASERJET_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\ndirect usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000\nnetwork lpd" # noqa -TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") - - -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_BROTHER_PRINTER) -def test_get_good_printer_uri_laserjet(mocked_call): - submission = export.SDExport("testfile", TEST_CONFIG) - action = PrintExportAction(submission) - - result = action.get_printer_uri() - - assert result == "usb://Brother/HL-L2320D%20series?serial=A00000A000000" - - -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_LASERJET_PRINTER) -def test_get_good_printer_uri_brother(mocked_call): - submission = export.SDExport("testfile", TEST_CONFIG) - action = PrintExportAction(submission) - - result = action.get_printer_uri() - assert result == "usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000" - - -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PRINTER) -def test_get_bad_printer_uri(mocked_call, capsys, mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - action = PrintExportAction(submission) - expected_message = "ERROR_PRINTER_NOT_FOUND" - assert export.ExportStatus.ERROR_PRINTER_NOT_FOUND.value == expected_message - mocked_exit = mocker.patch.object( - submission, "exit_gracefully", side_effect=lambda x: sys.exit(0) - ) - - with pytest.raises(SystemExit): - action.get_printer_uri() - - mocked_exit.assert_called_once_with(expected_message) - - -@pytest.mark.parametrize( - "open_office_paths", - [ - "/tmp/whatver/thisisadoc.doc" - "/home/user/Downloads/thisisadoc.xlsx" - "/home/user/Downloads/file.odt" - "/tmp/tmpJf83j9/secret.pptx" - ], -) -def test_is_open_office_file(capsys, open_office_paths): - submission = export.SDExport("", TEST_CONFIG) - action = PrintExportAction(submission) - assert action.is_open_office_file(open_office_paths) - - -@pytest.mark.parametrize( - "open_office_paths", - [ - "/tmp/whatver/thisisadoc.doccc" - "/home/user/Downloads/thisisa.xlsx.zip" - "/home/user/Downloads/file.odz" - "/tmp/tmpJf83j9/secret.gpg" - ], -) -def test_is_not_open_office_file(capsys, open_office_paths): - submission = export.SDExport("", TEST_CONFIG) - action = PrintExportAction(submission) - assert not action.is_open_office_file(open_office_paths) - - -@mock.patch("subprocess.run") -def test_install_printer_ppd_laserjet(mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - action = PrintExportAction(submission) - ppd = action.install_printer_ppd("usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A00000") - assert ppd == "/usr/share/cups/model/hp-laserjet_6l.ppd" - - -@mock.patch("subprocess.run") -def test_install_printer_ppd_brother(mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - action = PrintExportAction(submission) - ppd = action.install_printer_ppd("usb://Brother/HL-L2320D%20series?serial=A00000A000000") - assert ppd == "/usr/share/cups/model/br7030.ppd" - - -def test_install_printer_ppd_error_no_driver(mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - action = PrintExportAction(submission) - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - mocker.patch("subprocess.run", side_effect=CalledProcessError(1, "run")) - - action.install_printer_ppd("usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000") - - assert mocked_exit.mock_calls[0][2]["msg"] == "ERROR_PRINTER_DRIVER_UNAVAILABLE" - assert mocked_exit.mock_calls[0][2]["e"] is None - - -def test_install_printer_ppd_error_not_supported(mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - action = PrintExportAction(submission) - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - mocker.patch("subprocess.run", side_effect=CalledProcessError(1, "run")) - - action.install_printer_ppd("usb://Not/Supported?serial=A00000A000000") - - assert mocked_exit.mock_calls[0][2]["msg"] == "ERROR_PRINTER_NOT_SUPPORTED" - - -def test_setup_printer_error(mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - action = PrintExportAction(submission) - mocked_exit = mocker.patch.object(submission, "exit_gracefully", return_value=0) - mocker.patch("subprocess.run", side_effect=CalledProcessError(1, "run")) - - action.setup_printer( - "usb://Brother/HL-L2320D%20series?serial=A00000A000000", - "/usr/share/cups/model/br7030.ppd", - ) - - assert mocked_exit.mock_calls[0][2]["msg"] == "ERROR_PRINTER_INSTALL" - assert mocked_exit.mock_calls[0][2]["e"] is None diff --git a/tests/print/test_service.py b/tests/print/test_service.py new file mode 100644 index 000000000..569ecdd2c --- /dev/null +++ b/tests/print/test_service.py @@ -0,0 +1,130 @@ +from unittest import mock + +import os +import pytest +from subprocess import CalledProcessError +import sys + +from securedrop_export.exceptions import ExportException +from securedrop_export.archive import Archive +from securedrop_export.print.service import Service +from securedrop_export.print.service import Status + + +SAMPLE_OUTPUT_NO_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\nnetwork lpd" # noqa +SAMPLE_OUTPUT_BROTHER_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\ndirect usb://Brother/HL-L2320D%20series?serial=A00000A000000\nnetwork lpd" # noqa +SAMPLE_OUTPUT_LASERJET_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\ndirect usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000\nnetwork lpd" # noqa +TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_BROTHER_PRINTER) +def test_get_good_printer_uri_laserjet(mocked_call): + submission = Archive("testfile", TEST_CONFIG) + service = Service(submission) + + result = service._get_printer_uri() + + assert result == "usb://Brother/HL-L2320D%20series?serial=A00000A000000" + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_LASERJET_PRINTER) +def test_get_good_printer_uri_brother(mocked_call): + submission = Archive("testfile", TEST_CONFIG) + service = Service(submission) + + result = service._get_printer_uri() + assert result == "usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000" + + +@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PRINTER) +def test_get_bad_printer_uri(mocked_call, capsys, mocker): + submission = Archive("testfile", TEST_CONFIG) + service = Service(submission) + expected_status = Status.ERROR_PRINTER_NOT_FOUND # todo + + with pytest.raises(ExportException): + service._get_printer_uri() + + +@pytest.mark.parametrize( + "open_office_paths", + [ + "/tmp/whatver/thisisadoc.doc" + "/home/user/Downloads/thisisadoc.xlsx" + "/home/user/Downloads/file.odt" + "/tmp/tmpJf83j9/secret.pptx" + ], +) +def test_is_open_office_file(capsys, open_office_paths): + submission = Archive("", TEST_CONFIG) + service = Service(submission) + assert service._is_open_office_file(open_office_paths) + + +@pytest.mark.parametrize( + "open_office_paths", + [ + "/tmp/whatver/thisisadoc.doccc" + "/home/user/Downloads/thisisa.xlsx.zip" + "/home/user/Downloads/file.odz" + "/tmp/tmpJf83j9/secret.gpg" + ], +) +def test_is_not_open_office_file(capsys, open_office_paths): + submission = Archive("", TEST_CONFIG) + service = Service(submission) + assert not service._is_open_office_file(open_office_paths) + + +@mock.patch("subprocess.run") +def test_install_printer_ppd_laserjet(mocker): + submission = Archive("testfile", TEST_CONFIG) + service = Service(submission) + ppd = service._install_printer_ppd("usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A00000") + assert ppd == "/usr/share/cups/model/hp-laserjet_6l.ppd" + + +@mock.patch("subprocess.run") +def test_install_printer_ppd_brother(mocker): + submission = Archive("testfile", TEST_CONFIG) + service = Service(submission) + ppd = service._install_printer_ppd("usb://Brother/HL-L2320D%20series?serial=A00000A000000") + assert ppd == "/usr/share/cups/model/br7030.ppd" + + +def test_install_printer_ppd_error_no_driver(mocker): + submission = Archive("testfile", TEST_CONFIG) + service = Service(submission) + + mocker.patch("subprocess.run", side_effect=CalledProcessError(1, "run")) + + with pytest.raises(ExportException) as ex: + service._install_printer_ppd("usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000") + + assert ex.value.sdstatus is Status.ERROR_PRINTER_DRIVER_UNAVAILABLE + + +def test_install_printer_ppd_error_not_supported(mocker): + submission = Archive("testfile", TEST_CONFIG) + service = Service(submission) + mocker.patch("subprocess.run", side_effect=CalledProcessError(1, "run")) + + with pytest.raises(ExportException) as ex: + service._install_printer_ppd("usb://Not/Supported?serial=A00000A000000") + + assert ex.value.sdstatus is Status.ERROR_PRINTER_NOT_SUPPORTED + + +def test_setup_printer_error(mocker): + submission = Archive("testfile", TEST_CONFIG) + service = Service(submission) + + mocker.patch("subprocess.run", side_effect=CalledProcessError(1, "run")) + + with pytest.raises(ExportException) as ex: + service._setup_printer( + "usb://Brother/HL-L2320D%20series?serial=A00000A000000", + "/usr/share/cups/model/br7030.ppd", + ) + + assert ex.value.sdstatus is Status.ERROR_PRINTER_INSTALL \ No newline at end of file diff --git a/tests/test_export.py b/tests/test_archive.py similarity index 78% rename from tests/test_export.py rename to tests/test_archive.py index 4b596c9a7..0437e94ff 100644 --- a/tests/test_export.py +++ b/tests/test_archive.py @@ -9,7 +9,8 @@ import tarfile from io import BytesIO -from securedrop_export import export +from securedrop_export.exceptions import ExportException +from securedrop_export.archive import Archive, Metadata, Status TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad.json") @@ -46,7 +47,7 @@ def test_extract_tarball(): archive.close() - submission = export.SDExport(archive_path, TEST_CONFIG) + submission = Archive(archive_path, TEST_CONFIG) assert oct(os.stat(submission.tmpdir).st_mode) == "0o40700" submission.extract_tarball() @@ -58,7 +59,7 @@ def test_extract_tarball(): # Subdirectories that are added as members are extracted with 700 permissions assert oct(os.stat(os.path.join(submission.tmpdir, "some")).st_mode) == "0o40700" # Subdirectories that are not added as members are extracted with 700 permissions - # because os.umask(0o077) is set in the SDExport constructor. + # because os.umask(0o077) is set in the Archive constructor. assert oct(os.stat(os.path.join(submission.tmpdir, "some", "dirs")).st_mode) == "0o40700" @@ -85,7 +86,7 @@ def test_extract_tarball_with_symlink(): archive.addfile(symlink_info) archive.close() - submission = export.SDExport(archive_path, TEST_CONFIG) + submission = Archive(archive_path, TEST_CONFIG) assert oct(os.stat(submission.tmpdir).st_mode) == "0o40700" submission.extract_tarball() @@ -120,9 +121,9 @@ def test_extract_tarball_raises_if_doing_path_traversal(): archive.addfile(traversed_file_info, BytesIO(content)) archive.close() - submission = export.SDExport(archive_path, TEST_CONFIG) + submission = Archive(archive_path, TEST_CONFIG) - with pytest.raises(SystemExit): + with pytest.raises(ExportException): # prev: SystemExit submission.extract_tarball() assert not os.path.exists("/tmp/traversed") @@ -157,9 +158,9 @@ def test_extract_tarball_raises_if_doing_path_traversal_with_dir(): archive.addfile(dir_info) archive.close() - submission = export.SDExport(archive_path, TEST_CONFIG) + submission = Archive(archive_path, TEST_CONFIG) - with pytest.raises(SystemExit): + with pytest.raises(ExportException): # prev: SystemExit submission.extract_tarball() assert not os.path.exists("/tmp/traversed") @@ -196,9 +197,9 @@ def test_extract_tarball_raises_if_doing_path_traversal_with_symlink(): archive.addfile(symlink_info, BytesIO(content)) archive.close() - submission = export.SDExport(archive_path, TEST_CONFIG) + submission = Archive(archive_path, TEST_CONFIG) - with pytest.raises(SystemExit): + with pytest.raises(ExportException): submission.extract_tarball() assert not os.path.exists("/tmp/traversed") @@ -235,9 +236,9 @@ def test_extract_tarball_raises_if_doing_path_traversal_with_symlink_linkname(): archive.addfile(symlink_info, BytesIO(content)) archive.close() - submission = export.SDExport(archive_path, TEST_CONFIG) + submission = Archive(archive_path, TEST_CONFIG) - with pytest.raises(SystemExit): + with pytest.raises(ExportException): submission.extract_tarball() assert not os.path.exists("/tmp/traversed") @@ -271,9 +272,9 @@ def test_extract_tarball_raises_if_name_has_unsafe_absolute_path(): archive.addfile(file_info, BytesIO(content)) archive.close() - submission = export.SDExport(archive_path, TEST_CONFIG) + submission = Archive(archive_path, TEST_CONFIG) - with pytest.raises(SystemExit): + with pytest.raises(ExportException): submission.extract_tarball() assert not os.path.exists("/tmp/unsafe") @@ -308,9 +309,9 @@ def test_extract_tarball_raises_if_name_has_unsafe_absolute_path_with_symlink(): archive.add(symlink_path, "symlink") archive.close() - submission = export.SDExport(archive_path, TEST_CONFIG) + submission = Archive(archive_path, TEST_CONFIG) - with pytest.raises(SystemExit): + with pytest.raises(ExportException): submission.extract_tarball() assert not os.path.exists("/tmp/unsafe") @@ -353,9 +354,9 @@ def test_extract_tarball_raises_if_name_has_unsafe_absolute_path_with_symlink_to archive.add(file_path, "symlink/unsafe") archive.close() - submission = export.SDExport(archive_path, TEST_CONFIG) + submission = Archive(archive_path, TEST_CONFIG) - with pytest.raises(SystemExit): + with pytest.raises(ExportException): submission.extract_tarball() assert not os.path.exists("/tmp/unsafe") @@ -390,145 +391,87 @@ def test_extract_tarball_raises_if_linkname_has_unsafe_absolute_path(): archive.addfile(symlink_info, BytesIO(content)) archive.close() - submission = export.SDExport(archive_path, TEST_CONFIG) + submission = Archive(archive_path, TEST_CONFIG) - with pytest.raises(SystemExit): + with pytest.raises(ExportException): submission.extract_tarball() assert not os.path.exists("/tmp/unsafe") -def test_exit_gracefully_no_exception(capsys): - submission = export.SDExport("testfile", TEST_CONFIG) - test_msg = "test" - - with pytest.raises(SystemExit) as sysexit: - submission.exit_gracefully(test_msg) - - # A graceful exit means a return code of 0 - assert sysexit.value.code == 0 - - captured = capsys.readouterr() - assert captured.err == "{}\n".format(test_msg) - assert captured.out == "" - - -def test_exit_gracefully_exception(capsys): - submission = export.SDExport("testfile", TEST_CONFIG) - test_msg = "ERROR_GENERIC" - - with pytest.raises(SystemExit) as sysexit: - exception = mock.MagicMock() - exception.output = "BANG!" - submission.exit_gracefully(test_msg, e=exception) - - # A graceful exit means a return code of 0 - assert sysexit.value.code == 0 - - captured = capsys.readouterr() - assert captured.err.rstrip() == export.ExportStatus.ERROR_GENERIC.value - assert captured.out == "" - - def test_empty_config(capsys): - export.SDExport("testfile", TEST_CONFIG) + Archive("testfile", TEST_CONFIG) temp_folder = tempfile.mkdtemp() - metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) + metadata = os.path.join(temp_folder, Metadata.METADATA_FILE) with open(metadata, "w") as f: f.write("{}") - config = export.Metadata(temp_folder) - - assert not config.is_valid() + with pytest.raises(ExportException) as ex: + config = Metadata.create_and_validate(temp_folder) def test_valid_printer_test_config(capsys): - export.SDExport("testfile", TEST_CONFIG) + Archive("testfile", TEST_CONFIG) temp_folder = tempfile.mkdtemp() - metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) + metadata = os.path.join(temp_folder, Metadata.METADATA_FILE) with open(metadata, "w") as f: f.write('{"device": "printer-test"}') - config = export.Metadata(temp_folder) + config = Metadata.create_and_validate(temp_folder) - assert config.is_valid() assert config.encryption_key is None assert config.encryption_method is None def test_valid_printer_config(capsys): - export.SDExport("", TEST_CONFIG) + Archive("", TEST_CONFIG) temp_folder = tempfile.mkdtemp() - metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) + metadata = os.path.join(temp_folder, Metadata.METADATA_FILE) with open(metadata, "w") as f: f.write('{"device": "printer"}') - config = export.Metadata(temp_folder) + config = Metadata.create_and_validate(temp_folder) - assert config.is_valid() assert config.encryption_key is None assert config.encryption_method is None def test_invalid_encryption_config(capsys): - export.SDExport("testfile", TEST_CONFIG) + Archive("testfile", TEST_CONFIG) temp_folder = tempfile.mkdtemp() - metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) + metadata = os.path.join(temp_folder, Metadata.METADATA_FILE) with open(metadata, "w") as f: f.write('{"device": "disk", "encryption_method": "base64", "encryption_key": "hunter1"}') - config = export.Metadata(temp_folder) + with pytest.raises(ExportException) as ex: + config = Metadata.create_and_validate(temp_folder) - assert config.encryption_key == "hunter1" - assert config.encryption_method == "base64" - assert not config.is_valid() + assert ex.value.sdstatus is Status.ERROR_ARCHIVE_METADATA +def test_malforned_config(capsys): + Archive("testfile", TEST_CONFIG) + + temp_folder = tempfile.mkdtemp() + metadata = os.path.join(temp_folder, Metadata.METADATA_FILE) + with open(metadata, "w") as f: + f.write('{"device": "asdf", "encryption_method": "OHNO"}') + + with pytest.raises(ExportException) as ex: + config = Metadata.create_and_validate(temp_folder) + + assert ex.value.sdstatus is Status.ERROR_METADATA_PARSING def test_valid_encryption_config(capsys): - export.SDExport("testfile", TEST_CONFIG) + Archive("testfile", TEST_CONFIG) temp_folder = tempfile.mkdtemp() - metadata = os.path.join(temp_folder, export.Metadata.METADATA_FILE) + metadata = os.path.join(temp_folder, Metadata.METADATA_FILE) with open(metadata, "w") as f: f.write('{"device": "disk", "encryption_method": "luks", "encryption_key": "hunter1"}') - config = export.Metadata(temp_folder) + config = Metadata.create_and_validate(temp_folder) assert config.encryption_key == "hunter1" assert config.encryption_method == "luks" - assert config.is_valid() - - -def test_safe_check_call(capsys, mocker): - submission = export.SDExport("testfile", TEST_CONFIG) - submission.safe_check_call(["ls"], "this will work") - expected_message = "uh oh!!!!" - - with pytest.raises(SystemExit) as sysexit: - submission.safe_check_call(["ls", "kjdsfhkdjfh"], expected_message) - - assert sysexit.value.code == 0 - - captured = capsys.readouterr() - assert captured.err == "{}\n".format(expected_message) - assert captured.out == "" - - # This should work too - submission.safe_check_call( - ["python3", "-c", "import sys;sys.stderr.write('hello')"], - expected_message, - ignore_stderr_startswith=b"hello", - ) - - with pytest.raises(SystemExit) as sysexit: - submission.safe_check_call( - ["python3", "-c", "import sys;sys.stderr.write('hello\n')"], - expected_message, - ignore_stderr_startswith=b"world", - ) - assert sysexit.value.code == 0 - captured = capsys.readouterr() - assert captured.err == "{}\n".format(expected_message) - assert captured.out == "" diff --git a/tests/test_main.py b/tests/test_main.py index efa2a6e18..aa020acc6 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -1,4 +1,66 @@ -from securedrop_export.main import __main__ # noqa: F401 +import pytest +from unittest import mock +import os +#from securedrop_export.main import __main__, _exit_gracefully # noqa: F401 +from securedrop_export.main import Status, _extract_and_run, _exit_gracefully, _write_status # noqa: F401 +from securedrop_export.archive import Archive # This import ensures at least the imports in main.__main__ # are executed during a test run + +TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") +BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad.json") +ANOTHER_BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad-2.json") + + +class TestMain(): + + def test_exit_gracefully_no_exception(self, capsys): + submission = Archive("testfile", TEST_CONFIG) + + with pytest.raises(SystemExit) as sysexit: + _exit_gracefully(submission, Status.ERROR_GENERIC) + + # A graceful exit means a return code of 0 + assert sysexit.value.code == 0 + + captured = capsys.readouterr() + assert captured.err == "{}\n".format(Status.ERROR_GENERIC.value) + assert captured.out == "" + + + def test_exit_gracefully_exception(self, capsys): + submission = Archive("testfile", TEST_CONFIG) + + with pytest.raises(SystemExit) as sysexit: + exception = mock.MagicMock() + exception.output = "BANG!" + _exit_gracefully(submission, Status.ERROR_GENERIC, e=exception) + + # A graceful exit means a return code of 0 + assert sysexit.value.code == 0 + + captured = capsys.readouterr() + assert captured.err.rstrip() == Status.ERROR_GENERIC.value + assert captured.out == "" + + + @pytest.mark.parametrize("status", [s for s in Status]) + def test_write_status(self, status, capsys): + _write_status(status) + captured = capsys.readouterr() + assert captured.err == status.value + "\n" + + @pytest.mark.parametrize("invalid_status", ["foo", ";ls", "&& echo 0"]) + def test_write_status_error(self, invalid_status, capsys): + + with pytest.raises(ValueError): + _write_status(Status(invalid_status)) + + + def test__extract_and_run(self): + pass + + + def test__extract_and_run_failure(self): + pass diff --git a/tests/test_util.py b/tests/test_util.py new file mode 100644 index 000000000..297f08b93 --- /dev/null +++ b/tests/test_util.py @@ -0,0 +1,39 @@ +import pytest + +from securedrop_export import utils +from securedrop_export.enums import ExportEnum +from securedrop_export.exceptions import ExportException + +class FakeStatus(ExportEnum): + OH_NO = "Oh No!" + NO_PROBLEM = "No Problem!" + +class TestUtil: + + def test_safe_check_call(self): + # This works, since `ls` is a valid comand + utils.safe_check_call(["ls"], FakeStatus.NO_PROBLEM) + + def test_safe_check_call_invalid_call(self): + with pytest.raises(ExportException) as ex: + utils.safe_check_call(["ls", "kjdsfhkdjfh"], FakeStatus.OH_NO) + + assert ex.value.sdstatus is FakeStatus.OH_NO + + def test_safe_check_call_write_to_stderr_and_ignore_error(self): + utils.safe_check_call( + ["python3", "-c", "import sys;sys.stderr.write('hello')"], + FakeStatus.NO_PROBLEM, + ignore_stderr_startswith=b"hello", + ) + + def test_safe_check_call_write_to_stderr_wrong_ignore_param(self): + # This one writes to stderr and ignores the wrong string, so we expect an exception + with pytest.raises(ExportException) as ex: + utils.safe_check_call( + ["python3", "-c", "import sys;sys.stderr.write('hello\n')"], + FakeStatus.OH_NO, + ignore_stderr_startswith=b"world", + ) + + assert ex.value.sdstatus is FakeStatus.OH_NO \ No newline at end of file From 4141484b9a8dea1e50e6d5110772d90a8568de09 Mon Sep 17 00:00:00 2001 From: Ro Date: Mon, 3 Oct 2022 20:12:55 -0700 Subject: [PATCH 306/352] Remove entrypoint.py. Update setup.py to launch script from main.py --- securedrop_export/entrypoint.py | 79 --------------------------------- securedrop_export/main.py | 25 ++++++----- setup.py | 2 +- 3 files changed, 16 insertions(+), 90 deletions(-) delete mode 100755 securedrop_export/entrypoint.py diff --git a/securedrop_export/entrypoint.py b/securedrop_export/entrypoint.py deleted file mode 100755 index 4ae235d1f..000000000 --- a/securedrop_export/entrypoint.py +++ /dev/null @@ -1,79 +0,0 @@ -import logging -import os -import shutil -import sys -import platform - -from logging.handlers import TimedRotatingFileHandler, SysLogHandler -from securedrop_export import __version__ -from securedrop_export.archive import Archive -from securedrop_export import main -from securedrop_export.utils import safe_mkdir - -CONFIG_PATH = "/etc/sd-export-config.json" -DEFAULT_HOME = os.path.join(os.path.expanduser("~"), ".securedrop_export") -LOG_DIR_NAME = "logs" -EXPORT_LOG_FILENAME = "export.log" - -logger = logging.getLogger(__name__) - - -def configure_logging(): - """ - All logging related settings are set up by this function. - """ - safe_mkdir(DEFAULT_HOME) - safe_mkdir(DEFAULT_HOME, LOG_DIR_NAME) - - log_file = os.path.join(DEFAULT_HOME, LOG_DIR_NAME, EXPORT_LOG_FILENAME) - - # set logging format - log_fmt = "%(asctime)s - %(name)s:%(lineno)d(%(funcName)s) " "%(levelname)s: %(message)s" - formatter = logging.Formatter(log_fmt) - - handler = TimedRotatingFileHandler(log_file) - handler.setFormatter(formatter) - - # For rsyslog handler - if platform.system() != "Linux": # pragma: no cover - syslog_file = "/var/run/syslog" - else: - syslog_file = "/dev/log" - - sysloghandler = SysLogHandler(address=syslog_file) - sysloghandler.setFormatter(formatter) - handler.setLevel(logging.DEBUG) - - # set up primary log - log = logging.getLogger() - log.setLevel(logging.DEBUG) - log.addHandler(handler) - # add the second logger - log.addHandler(sysloghandler) - - -def start(): - try: - configure_logging() - except Exception: - msg = "ERROR_LOGGING" - main._exit_gracefully(None, msg) - - logger.info("Starting SecureDrop Export {}".format(__version__)) - my_sub = Archive(sys.argv[1], CONFIG_PATH) - - try: - # Halt immediately if target file is absent - if not os.path.exists(my_sub.archive): - logger.info("Archive is not found {}.".format(my_sub.archive)) - msg = "ERROR_FILE_NOT_FOUND" - main._exit_gracefully(my_sub, msg) - main.__main__(my_sub) - # Delete extracted achive from tempfile - shutil.rmtree(my_sub.tmpdir) - except Exception as e: - # exit with 0 return code otherwise the os will attempt to open - # the file with another application - logger.error(e) - msg = "ERROR_GENERIC" - main._exit_gracefully(my_sub, msg) diff --git a/securedrop_export/main.py b/securedrop_export/main.py index 3b198187f..6f74f5f4b 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -24,16 +24,21 @@ class Status(ExportEnum): """ - Errors initializing export + Status values that can occur during initialization. """ ERROR_LOGGING = "ERROR_LOGGING" ERROR_GENERIC = "ERROR_GENERIC" ERROR_FILE_NOT_FOUND = "ERROR_FILE_NOT_FOUND" -def start(): +def entrypoint(): + """ + Entrypoint for setuptools. + Configure logging, extract tarball, and run desired export service, + exiting with return code 0. + """ try: - configure_logging() + _configure_logging() except Exception: _exit_gracefully(submission=None, status=Status.ERROR_LOGGING) @@ -46,7 +51,7 @@ def start(): logger.info("Archive is not found {}.".format(data.archive)) _exit_gracefully(data, Status.ERROR_FILE_NOT_FOUND) - # The main event. Extract archive and either print or export to disk. + # Extract archive and either print or export to disk. # Includes cleanup logic, which removes any temporary directories associated with # the archive. _extract_and_run(data) @@ -91,10 +96,11 @@ def _configure_logging(): def _extract_and_run(submission: Archive): """ - Extract tarball and metadata and run appropriate command - based on metadata instruction. + Extract tarball and metadata and run appropriate command based on metadata instruction. + Always exits by writing status, if applicable, to stdout. + """ - status = Status.ERROR_GENERIC + status = None stacktrace = None try: @@ -110,7 +116,7 @@ def _extract_and_run(submission: Archive): status = _start_service(submission, command) except ExportException as ex: - status = ex.sdstatus + status = ex.value.sdstatus stacktrace = ex.output except Exception as exc: @@ -145,7 +151,7 @@ def _start_service(submission: Archive, cmd: Command) -> Status: elif cmd is Commmand.CHECK_USBS: return service.check_connected_devices() elif cmd is Commmand.CHECK_VOLUME: - return service.checK_disk_format() + return service.check_disk_format() def _exit_gracefully(submission: Archive, status: Status=None, e=None): @@ -183,4 +189,3 @@ def _write_status(status: Status): sys.stderr.write("\n") else: logger.info("No status value supplied") - diff --git a/setup.py b/setup.py index e64b673fa..485a88084 100644 --- a/setup.py +++ b/setup.py @@ -31,5 +31,5 @@ "Intended Audience :: Developers", "Operating System :: OS Independent", ), - entry_points={"console_scripts": ["send-to-usb = securedrop_export.entrypoint:start"]}, + entry_points={"console_scripts": ["send-to-usb = securedrop_export.main:entrypoint"]}, ) From b1e12bff9b740d29a60233055987c6c2051372ca Mon Sep 17 00:00:00 2001 From: Ro Date: Tue, 4 Oct 2022 09:57:13 -0700 Subject: [PATCH 307/352] Fix metadata validation method. Add Archive and Metadata test coverage, improve CLI test coverage. --- securedrop_export/archive.py | 24 +- securedrop_export/{enums.py => command.py} | 15 +- securedrop_export/disk/cli.py | 84 +++--- securedrop_export/disk/new_status.py | 4 +- securedrop_export/disk/service.py | 24 +- securedrop_export/disk/status.py | 8 +- securedrop_export/exceptions.py | 2 - securedrop_export/main.py | 50 ++-- securedrop_export/print/service.py | 25 +- securedrop_export/print/status.py | 4 +- securedrop_export/status.py | 10 + securedrop_export/utils.py | 21 -- tests/disk/test_cli.py | 320 +++++++++++++++------ tests/disk/test_service.py | 63 +++- tests/print/test_service.py | 176 ++++++------ tests/test_archive.py | 17 ++ tests/test_exceptions.py | 20 ++ tests/test_main.py | 13 +- tests/test_util.py | 33 +-- 19 files changed, 548 insertions(+), 365 deletions(-) rename securedrop_export/{enums.py => command.py} (53%) create mode 100644 securedrop_export/status.py create mode 100644 tests/test_exceptions.py diff --git a/securedrop_export/archive.py b/securedrop_export/archive.py index f5a780b23..c2b192e83 100755 --- a/securedrop_export/archive.py +++ b/securedrop_export/archive.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -import abc import datetime import json import logging @@ -11,13 +10,13 @@ import tempfile from securedrop_export.exceptions import ExportException -from securedrop_export.enums import Command, ExportEnum +from securedrop_export.status import BaseStatus +from securedrop_export.command import Command from securedrop_export.utils import safe_extractall logger = logging.getLogger(__name__) - -class Status(ExportEnum): +class Status(BaseStatus): ERROR_ARCHIVE_METADATA = "ERROR_ARCHIVE_METADATA" ERROR_METADATA_PARSING = "ERROR_METADATA_PARSING" ERROR_EXTRACTION = "ERROR_EXTRACTION" @@ -38,7 +37,7 @@ class Metadata(object): __key = object() - def __init__(self, key, archive_path): + def __init__(self, key: object, archive_path: str): if not key == Metadata.__key: raise ValueError("Must use create_and_validate() to create Metadata object") @@ -75,16 +74,19 @@ def validate(self): ) ) - # Validate metadata - this will fail if command is not in list of supported commands - self.command = Command(self.export_method) - if self.command is Command.EXPORT and not self.encryption_method in self.SUPPORTED_ENCRYPTION_METHODS: - logger.error("Unsuported encryption method") - raise ExportException(sdstatus=Status.ERROR_ARCHIVE_METADATA) - except Exception as ex: logger.error("Metadata parsing failure") raise ExportException(sdstatus=Status.ERROR_METADATA_PARSING) from ex + # Validate metadata - this will fail if command is not in list of supported commands + try: + self.command = Command(self.export_method) + if self.command is Command.EXPORT and not self.encryption_method in self.SUPPORTED_ENCRYPTION_METHODS: + logger.error("Unsupported encryption method") + raise ExportException(sdstatus=Status.ERROR_ARCHIVE_METADATA) + except ValueError as v: + raise ExportException(sdstatus=Status.ERROR_METADATA_PARSING) from v + class Archive(object): def __init__(self, archive, config_path): diff --git a/securedrop_export/enums.py b/securedrop_export/command.py similarity index 53% rename from securedrop_export/enums.py rename to securedrop_export/command.py index 26d7a9cb1..382b4feff 100644 --- a/securedrop_export/enums.py +++ b/securedrop_export/command.py @@ -1,11 +1,6 @@ from enum import Enum -class ExportEnum(Enum): - """ - Parent class for export and print statuses. - """ - -class Command(ExportEnum): +class Command(Enum): """ All supported commands. @@ -19,11 +14,3 @@ class Command(ExportEnum): CHECK_VOLUME = "disk-test" EXPORT = "disk" START_VM = "" - - @classmethod - def printer_actions(cls): - return (cls.PRINTER_PREFLIGHT, cls.PRINTER_TEST, cls.PRINT) - - @classmethod - def export_actions(cls): - return (cls.EXPORT, cls.CHECK_USBS, cls.CHECK_VOLUME) diff --git a/securedrop_export/disk/cli.py b/securedrop_export/disk/cli.py index 579a102e5..8f9dc6d43 100644 --- a/securedrop_export/disk/cli.py +++ b/securedrop_export/disk/cli.py @@ -21,6 +21,9 @@ class CLI: """ A Python wrapper for various shell commands required to detect, map, and mount Export devices. + + CLI callers must handle ExportException and all exceptions and exit with + sys.exit(0) so that another program does not attempt to open the submission. """ # Default mountpoint (unless drive is already mounted manually by the user) @@ -48,7 +51,12 @@ def get_connected_devices(self) -> List[str]: except subprocess.CalledProcessError as ex: raise ExportException(sdstatus=Status.DEVICE_ERROR) from ex - # Determine which are USBs by selecting those block devices that are removable disks. + return self._get_removable_devices(attached_devices) + + def _get_removable_devices(self, attached_devices: List[str]) -> List[str]: + """ + Determine which block devices are USBs by selecting those that are removable. + """ usb_devices = [] for device in attached_devices: is_removable = False @@ -120,7 +128,6 @@ def is_luks_volume(self, device: str) -> bool: # subprocess will throw if the device is not luks (rc !=0) subprocess.check_call(["sudo", "cryptsetup", "isLuks", device]) - # Status.LEGACY_USB_ENCRYPTED isLuks = True except subprocess.CalledProcessError as ex: @@ -143,13 +150,13 @@ def _get_luks_name_from_headers(self, device: str) -> str: items = line.split("\t") if "UUID" in items[0]: return "luks-" + items[1] - else: - logger.error( - f"Failed to dump LUKS headers; {device} may not be correctly formatted" - ) - raise ExportException(sdstatus=Status.INVALID_DEVICE_DETECTED) + + # If no header or no UUID field, we can't use this drive + logger.error(f"Failed to get UUID from LUKS header; {device} may not be correctly formatted") + raise ExportException(sdstatus=Status.INVALID_DEVICE_DETECTED) except subprocess.CalledProcessError as ex: - raise ExportException(sdtatus=Status.DEVICE_ERROR) from ex + logger.error(f"Failed to dump LUKS header") + raise ExportException(sdstatus=Status.DEVICE_ERROR) from ex def get_luks_volume(self, device: str) -> Volume: """ @@ -241,7 +248,7 @@ def mount_volume(self, volume: Volume) -> Volume: If volume is already mounted, mountpoint is not changed. Otherwise, volume is mounted at _DEFAULT_MOUNTPOINT. - Raises ExportException if errors are encountered during mounting. + Raise ExportException if errors are encountered during mounting. """ if not volume.unlocked: raise ExportException(sdstatus=Status.ERROR_MOUNT) @@ -251,33 +258,44 @@ def mount_volume(self, volume: Volume) -> Volume: if mountpoint: logger.debug("The device is already mounted") if volume.mountpoint is not mountpoint: - # This should not happen, but if a user edits their veracrypt drive mountpoint on the fly. logger.warning(f"Mountpoint was inaccurate, updating") volume.mountpoint = mountpoint + return volume else: - if not os.path.exists(self._DEFAULT_MOUNTPOINT): - try: - subprocess.check_call(["sudo", "mkdir", self._DEFAULT_MOUNTPOINT]) - except subprocess.CalledProcessError as ex: - logger.error(ex) - raise ExportException(sdstatus=Status.ERROR_MOUNT) from ex + return self._mount_at_mountpoint(volume, self._DEFAULT_MOUNTPOINT) - # Mount device /dev/mapper/{mapped_name} at /media/usb/ - mapped_device_path = os.path.join(volume.MAPPED_VOLUME_PREFIX, volume.mapped_name) - try: - logger.debug(f"Mounting volume {volume.device_name} at {self._DEFAULT_MOUNTPOINT}") - subprocess.check_call(["sudo", "mount", mapped_device_path, self._DEFAULT_MOUNTPOINT]) - subprocess.check_call(["sudo", "chown", "-R", "user:user", self._DEFAULT_MOUNTPOINT]) - - volume.mountpoint = self._DEFAULT_MOUNTPOINT + def _mount_at_mountpoint(self, volume: Volume, mountpoint: str) -> Volume: + """ + Mount a volume at the supplied mountpoint, creating the mountpoint directory and + adjusting permissions (user:user) if need be. `mountpoint` must be a full path. + Return Volume object. + Raise ExportException if unable to mount volume at target mountpoint. + """ + if not os.path.exists(mountpoint): + try: + subprocess.check_call(["sudo", "mkdir", mountpoint]) except subprocess.CalledProcessError as ex: logger.error(ex) raise ExportException(sdstatus=Status.ERROR_MOUNT) from ex + # Mount device /dev/mapper/{mapped_name} at /media/usb/ + mapped_device_path = os.path.join(volume.MAPPED_VOLUME_PREFIX, volume.mapped_name) + + try: + logger.debug(f"Mounting volume {volume.device_name} at {mountpoint}") + subprocess.check_call(["sudo", "mount", mapped_device_path, mountpoint]) + subprocess.check_call(["sudo", "chown", "-R", "user:user", mountpoint]) + + volume.mountpoint = mountpoint + + except subprocess.CalledProcessError as ex: + logger.error(ex) + raise ExportException(sdstatus=Status.ERROR_MOUNT) from ex + return volume def write_data_to_device( @@ -287,11 +305,7 @@ def write_data_to_device( Move files to drive (overwrites files with same filename) and unmount drive. Drive is unmounted and files are cleaned up as part of the `finally` block to ensure that cleanup happens even if export fails or only partially succeeds. - - The calling method *must* handle ExportException and exit with sys.exit(0) so that - another program does not attempt to open the submission. """ - try: target_path = os.path.join(device.mountpoint, submission_target_dirname) subprocess.check_call(["mkdir", target_path]) @@ -340,10 +354,12 @@ def _unmount_volume(self, volume: Volume) -> Volume: except subprocess.CalledProcessError as ex: logger.error("Error unmounting device") - raise ExportException(sdstatus=Status.ERROR_MOUNT) from ex + raise ExportException(sdstatus=Status.DEVICE_ERROR) from ex else: logger.info("Mountpoint does not exist; volume was already unmounted") + return volume + def _close_luks_volume(self, unlocked_device: Volume) -> None: """ Helper. Close LUKS volume @@ -369,13 +385,3 @@ def _remove_temp_directory(self, tmpdir: str): except subprocess.CalledProcessError as ex: logger.error("Error removing temporary directory") raise ExportException(sdstatus=Status.DEVICE_ERROR) from ex - - def write_status(self, status: Status): - """ - Write string to stdout. - """ - if status: - sys.stdout.write(status.value) - sys.stdout.write("\n") - else: - logger.warning("No status value supplied") diff --git a/securedrop_export/disk/new_status.py b/securedrop_export/disk/new_status.py index d6f8dadda..2bb0c242a 100644 --- a/securedrop_export/disk/new_status.py +++ b/securedrop_export/disk/new_status.py @@ -1,6 +1,6 @@ -from securedrop_export.enums import ExportEnum +from securedrop_export.status import BaseStatus -class Status(ExportEnum): +class Status(BaseStatus): NO_DEVICE_DETECTED = "NO_DEVICE_DETECTED" INVALID_DEVICE_DETECTED = "INVALID_DEVICE_DETECTED" # Multi partitioned, not encrypted, etc diff --git a/securedrop_export/disk/service.py b/securedrop_export/disk/service.py index 56531aa13..adabf4e4e 100644 --- a/securedrop_export/disk/service.py +++ b/securedrop_export/disk/service.py @@ -26,22 +26,21 @@ def check_connected_devices(self) -> Status: Check if single USB is inserted. """ logger.info("Export archive is usb-test") - status = Status.LEGACY_ERROR_GENERIC try: all_devices = self.cli.get_connected_devices() num_devices = len(all_devices) - if num_devices == 0: - raise ExportException(sdstatus=Status.LEGACY_USB_NOT_CONNECTED) - elif num_devices == 1: - return Status.LEGACY_USB_CONNECTED - elif num_devices > 1: - raise ExportException(sdstatus=Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED) - except ExportException as ex: # Use legacy status instead of new status values - raise ExportException(sdstatus=Status.LEGACY_ERROR_GENERIC) from ex + raise ExportException(sdstatus=Status.LEGACY_ERROR_USB_CHECK) from ex + + if num_devices == 0: + raise ExportException(sdstatus=Status.LEGACY_USB_NOT_CONNECTED) + elif num_devices == 1: + return Status.LEGACY_USB_CONNECTED + elif num_devices > 1: + raise ExportException(sdstatus=Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED) def check_disk_format(self) -> Status: @@ -104,6 +103,7 @@ def export(self) -> Status: raise ExportException(sdstatus=Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED) except ExportException as ex: + print(ex) # Return legacy status values for now for ongoing client compatibility if ex.sdstatus in [s for s in NewStatus]: status = self._legacy_status(ex.sdstatus) @@ -114,7 +114,7 @@ def export(self) -> Status: raise ExportException(sdstatus=Status.LEGACY_ERROR_GENERIC) - def _legacy_status(self, status: NewStatus): + def _legacy_status(self, status: NewStatus) -> Status: """ Backwards-compatibility - status values that client (@0.7.0) is expecting. """ @@ -126,5 +126,7 @@ def _legacy_status(self, status: NewStatus): return Status.LEGACY_USB_BAD_PASSPHRASE elif status in [NewStatus.INVALID_DEVICE_DETECTED, NewStatus.MULTI_DEVICE_DETECTED]: return Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED + # The other status values, such as Status.NO_DEVICE_DETECTED, are not returned by the + # CLI, so we don't need to check for them here else: - return Status.LEGACY_ERROR_GENERIC + return Status.LEGACY_ERROR_GENERIC \ No newline at end of file diff --git a/securedrop_export/disk/status.py b/securedrop_export/disk/status.py index e4be49d36..4a3aa8881 100644 --- a/securedrop_export/disk/status.py +++ b/securedrop_export/disk/status.py @@ -1,17 +1,21 @@ -from securedrop_export.enums import ExportEnum +from securedrop_export.status import BaseStatus -class Status(ExportEnum): +class Status(BaseStatus): LEGACY_ERROR_GENERIC = "ERROR_GENERIC" # Legacy USB preflight related LEGACY_USB_CONNECTED = "USB_CONNECTED" # Success + LEGACY_USB_NOT_CONNECTED = "USB_NOT_CONNECTED" LEGACY_ERROR_USB_CHECK = "ERROR_USB_CHECK" # Legacy USB Disk preflight related errors LEGACY_USB_ENCRYPTED = "USB_ENCRYPTED" # Success LEGACY_USB_ENCRYPTION_NOT_SUPPORTED = "USB_ENCRYPTION_NOT_SUPPORTED" + #@todo - this can be raised during disk format check + LEGACY_USB_DISK_ERROR = "USB_DISK_ERROR" + # Legacy Disk export errors LEGACY_USB_BAD_PASSPHRASE = "USB_BAD_PASSPHRASE" LEGACY_ERROR_USB_MOUNT = "ERROR_USB_MOUNT" diff --git a/securedrop_export/exceptions.py b/securedrop_export/exceptions.py index b13e79d36..d740fc36b 100644 --- a/securedrop_export/exceptions.py +++ b/securedrop_export/exceptions.py @@ -1,8 +1,6 @@ import logging from typing import Optional -from .enums import ExportEnum - logger = logging.getLogger(__name__) diff --git a/securedrop_export/main.py b/securedrop_export/main.py index 6f74f5f4b..9860bbe12 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -6,7 +6,8 @@ import subprocess from securedrop_export.archive import Archive, Metadata -from securedrop_export.enums import Command, ExportEnum +from securedrop_export.command import Command +from securedrop_export.status import BaseStatus from securedrop_export.disk.service import Service as ExportService from securedrop_export.print.service import Service as PrintService @@ -22,7 +23,7 @@ logger = logging.getLogger(__name__) -class Status(ExportEnum): +class Status(BaseStatus): """ Status values that can occur during initialization. """ @@ -30,10 +31,9 @@ class Status(ExportEnum): ERROR_GENERIC = "ERROR_GENERIC" ERROR_FILE_NOT_FOUND = "ERROR_FILE_NOT_FOUND" - def entrypoint(): """ - Entrypoint for setuptools. + Entrypoint method (Note: a method is required for setuptools). Configure logging, extract tarball, and run desired export service, exiting with return code 0. """ @@ -97,8 +97,7 @@ def _configure_logging(): def _extract_and_run(submission: Archive): """ Extract tarball and metadata and run appropriate command based on metadata instruction. - Always exits by writing status, if applicable, to stdout. - + Always exits with return code 0 and writes exit status, if applicable, to stderr. """ status = None stacktrace = None @@ -131,28 +130,23 @@ def _extract_and_run(submission: Archive): def _start_service(submission: Archive, cmd: Command) -> Status: """ - Start print or export routine. + Start print or export service. """ - if cmd in Command.printer_actions(): - service = PrintService(submission) - - if cmd is Commmand.PRINTER: - return service.print() - elif cmd is Commmand.PRINTER_TEST: - return service.printer_preflight() - elif cmd is Commmand.PRINTER_TEST: - return service.printer_test() - - elif cmd in Command.export_actions(): - service = ExportService(submission) - - if cmd is Commmand.EXPORT: - return service.export() - elif cmd is Commmand.CHECK_USBS: - return service.check_connected_devices() - elif cmd is Commmand.CHECK_VOLUME: - return service.check_disk_format() - + # Print Routines + if cmd is Commmand.PRINTER: + return PrintService(submission).print() + elif cmd is Commmand.PRINTER_TEST: + return PrintService(submission).printer_preflight() + elif cmd is Commmand.PRINTER_TEST: + return PrintService(submission).printer_test() + + # Export routines + elif cmd is Commmand.EXPORT: + return ExportService(submission).export() + elif cmd is Commmand.CHECK_USBS: + return ExportService(submission).check_connected_devices() + elif cmd is Commmand.CHECK_VOLUME: + return ExportService(submission).check_disk_format() def _exit_gracefully(submission: Archive, status: Status=None, e=None): """ @@ -180,7 +174,7 @@ def _exit_gracefully(submission: Archive, status: Status=None, e=None): sys.exit(0) -def _write_status(status: Status): +def _write_status(status: BaseStatus): """ Write string to stderr. """ diff --git a/securedrop_export/print/service.py b/securedrop_export/print/service.py index f36045c1d..a8e22d24c 100644 --- a/securedrop_export/print/service.py +++ b/securedrop_export/print/service.py @@ -5,7 +5,6 @@ import time from securedrop_export.exceptions import handler, TimeoutException, ExportException -from securedrop_export.utils import safe_check_call from .status import Status PRINTER_NAME = "sdw-printer" @@ -161,7 +160,7 @@ def _install_printer_ppd(self, uri): # Compile and install drivers that are not already installed if not os.path.exists(printer_ppd): logger.info("Installing printer drivers") - safe_check_call( + self.safe_check_call( command=[ "sudo", "ppdc", @@ -244,8 +243,28 @@ def _print_file(self, file_to_print): logger.info("Sending file to printer {}".format(self.printer_name)) - safe_check_call( + self.safe_check_call( command=["xpp", "-P", self.printer_name, file_to_print], error_status=Status.ERROR_PRINT, ) + + def safe_check_call(command: str, error_status: Status, ignore_stderr_startswith=None): + """ + Wrap subprocess.check_output to ensure we wrap CalledProcessError and return + our own exception, and log the error messages. + """ + try: + err = subprocess.run(command, check=True, capture_output=True).stderr + # ppdc and lpadmin may emit warnings we are aware of which should not be treated as + # user facing errors + if ignore_stderr_startswith and err.startswith(ignore_stderr_startswith): + logger.info("Encountered warning: {}".format(err.decode("utf-8"))) + elif err == b"": + # Nothing on stderr and returncode is 0, we're good + pass + else: + raise ExportException(sdstatus=error_status, sderror=err) + except subprocess.CalledProcessError as ex: + raise ExportException(sdstatus=error_status, sderror=ex.output) + diff --git a/securedrop_export/print/status.py b/securedrop_export/print/status.py index a8c5221de..fef0dbdf1 100644 --- a/securedrop_export/print/status.py +++ b/securedrop_export/print/status.py @@ -1,6 +1,6 @@ -from securedrop_export.enums import ExportEnum +from securedrop_export.status import BaseStatus -class Status(ExportEnum): +class Status(BaseStatus): # Printer preflight related errors ERROR_MULTIPLE_PRINTERS_FOUND = "ERROR_MULTIPLE_PRINTERS_FOUND" diff --git a/securedrop_export/status.py b/securedrop_export/status.py new file mode 100644 index 000000000..29b304574 --- /dev/null +++ b/securedrop_export/status.py @@ -0,0 +1,10 @@ +from enum import Enum + +class BaseStatus(Enum): + """ + Base class for export and print statuses. A Status represents a string that can be returned + to the calling VM via stderr to provide diagnostic information about the success of a call. + Status values are defined in subclasses in their respective packages. A full list is available + in the project's README. + """ + pass diff --git a/securedrop_export/utils.py b/securedrop_export/utils.py index 9de1cf979..c7f9557d4 100644 --- a/securedrop_export/utils.py +++ b/securedrop_export/utils.py @@ -5,7 +5,6 @@ import subprocess import logging -from securedrop_export.enums import ExportEnum as Status from securedrop_export.exceptions import ExportException @@ -153,23 +152,3 @@ def _check_dir_permissions(dir_path: Union[str, Path]) -> None: if masked & 0o077: raise RuntimeError("Unsafe permissions ({}) on {}".format(oct(stat_res), dir_path)) - - -def safe_check_call(command: str, error_status: Status, ignore_stderr_startswith=None): - """ - Wrap subprocess.check_output to ensure we wrap CalledProcessError and return - our own exception, and log the error messages. - """ - try: - err = subprocess.run(command, check=True, capture_output=True).stderr - # ppdc and lpadmin may emit warnings we are aware of which should not be treated as - # user facing errors - if ignore_stderr_startswith and err.startswith(ignore_stderr_startswith): - logger.info("Encountered warning: {}".format(err.decode("utf-8"))) - elif err == b"": - # Nothing on stderr and returncode is 0, we're good - pass - else: - raise ExportException(sdstatus=error_status, sderror=err) - except subprocess.CalledProcessError as ex: - raise ExportException(sdstatus=error_status, sderror=ex.output) \ No newline at end of file diff --git a/tests/disk/test_cli.py b/tests/disk/test_cli.py index a26bb70f4..bd92e4abd 100644 --- a/tests/disk/test_cli.py +++ b/tests/disk/test_cli.py @@ -3,15 +3,13 @@ import os import pytest -import sys - import subprocess +import sys -from securedrop_export.enums import ExportEnum from securedrop_export.disk.cli import CLI from securedrop_export.disk.volume import EncryptionScheme, Volume from securedrop_export.exceptions import ExportException -from securedrop_export.disk.status import Status +from securedrop_export.disk.new_status import Status from securedrop_export.archive import Archive @@ -36,6 +34,13 @@ class TestCli: Test the CLI wrapper that handless identification and locking/unlocking of USB volumes. """ + @classmethod + def setup_class(cls): + cls.cli = CLI() + + @classmethod + def teardown_class(cls): + cls.cli = None def _setup_usb_devices(self, mocker, disks, is_removable): """ @@ -66,100 +71,134 @@ def test_get_connected_devices(self, mocker): removable = [b"1\n", b"1\n"] self._setup_usb_devices(mocker, disks, removable) - cli = CLI() - result = cli.get_connected_devices() + result = self.cli.get_connected_devices() assert result[0] == "/dev/sda" and result[1] == "/dev/sdb" + @mock.patch("subprocess.check_output", side_effect=subprocess.CalledProcessError(1, "check_output")) + def test_get_removable_devices_none_removable(self, mocker): + disks = [b"sda disk\n", b"sdb disk\n"] + removable = [b"0\n", b"0\n"] + + self._setup_usb_devices(mocker, disks, removable) + + result = self.cli._get_removable_devices(disks) + assert len(result) == 0 + @mock.patch("subprocess.Popen", side_effect=subprocess.CalledProcessError(1, "Popen")) def test_get_connected_devices_error(self, mocked_subprocess): - cli = CLI() with pytest.raises(ExportException): - cli.get_connected_devices() + self.cli.get_connected_devices() @mock.patch("subprocess.check_output", return_value=_SAMPLE_OUTPUT_NO_PART) def test_get_partitioned_device_no_partition(self, mocked_call): - cli = CLI() - - result = cli.get_partitioned_device(_DEFAULT_USB_DEVICE) - assert result == _DEFAULT_USB_DEVICE + assert self.cli.get_partitioned_device(_DEFAULT_USB_DEVICE) == _DEFAULT_USB_DEVICE @mock.patch("subprocess.check_output", return_value=_SAMPLE_OUTPUT_ONE_PART) def test_get_partitioned_device_one_partition(self, mocked_call): - cli = CLI() - - result = cli.get_partitioned_device(_DEFAULT_USB_DEVICE) - assert result == _DEFAULT_USB_DEVICE+"1" + assert self.cli.get_partitioned_device(_DEFAULT_USB_DEVICE) == _DEFAULT_USB_DEVICE+"1" @mock.patch("subprocess.check_output", return_value=_SAMPLE_OUTPUT_MULTI_PART) def test_get_partitioned_device_multi_partition(self, mocked_call): - cli = CLI() + + with pytest.raises(ExportException) as ex: + self.cli.get_partitioned_device(_SAMPLE_OUTPUT_MULTI_PART) - with pytest.raises(ExportException): - result = cli.get_partitioned_device(_SAMPLE_OUTPUT_MULTI_PART) + assert ex.value.sdstatus is Status.INVALID_DEVICE_DETECTED + + @mock.patch("subprocess.check_output", return_value=None) + def test_get_partitioned_device_lsblk_error(self, mocked_subprocess): + with pytest.raises(ExportException) as ex: + self.cli.get_partitioned_device(_SAMPLE_OUTPUT_ONE_PART) + + assert ex.value.sdstatus is Status.DEVICE_ERROR @mock.patch( "subprocess.check_output", side_effect=subprocess.CalledProcessError(1, "check_output") ) def test_get_partitioned_device_multi_partition_error(self, mocked_call): - cli = CLI() - + # Make sure we wrap CalledProcessError and throw our own exception - with pytest.raises(ExportException): - cli.get_partitioned_device(_DEFAULT_USB_DEVICE) + with pytest.raises(ExportException) as ex: + self.cli.get_partitioned_device(_DEFAULT_USB_DEVICE) + + assert ex.value.sdstatus is Status.DEVICE_ERROR @mock.patch("subprocess.check_call", return_value=0) def test_is_luks_volume_true(self, mocked_call): - cli = CLI() - + # `sudo cryptsetup isLuks` returns 0 if true - assert cli.is_luks_volume(_SAMPLE_OUTPUT_ONE_PART) + assert self.cli.is_luks_volume(_SAMPLE_OUTPUT_ONE_PART) @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) def test_is_luks_volume_false(self, mocked_subprocess): - cli = CLI() # `sudo cryptsetup isLuks` returns 1 if false; CalledProcessError is thrown - assert not cli.is_luks_volume(_SAMPLE_OUTPUT_ONE_PART) + assert not self.cli.is_luks_volume(_SAMPLE_OUTPUT_ONE_PART) @mock.patch("subprocess.check_output", return_value=_SAMPLE_LUKS_HEADER) def test__get_luks_name_from_headers(self, mocked_subprocess): - cli = CLI() - - result = cli._get_luks_name_from_headers(_DEFAULT_USB_DEVICE) + result = self.cli._get_luks_name_from_headers(_DEFAULT_USB_DEVICE) assert result is not None and result.split("-")[1] in _SAMPLE_LUKS_HEADER.decode("utf8") @mock.patch("subprocess.check_output", return_value=b"corrupted-or-invalid-header\n") def test__get_luks_name_from_headers_error(self, mocked_subprocess): - cli = CLI() - with pytest.raises(ExportException): - result = cli._get_luks_name_from_headers(_DEFAULT_USB_DEVICE) + with pytest.raises(ExportException) as ex: + self.cli._get_luks_name_from_headers(_DEFAULT_USB_DEVICE) + + assert ex.value.sdstatus is Status.INVALID_DEVICE_DETECTED + + @mock.patch("subprocess.check_output", return_value=b"\n") + def test__get_luks_name_from_headers_error_no_header(self, mocked_subprocess): + + with pytest.raises(ExportException) as ex: + self.cli._get_luks_name_from_headers(_DEFAULT_USB_DEVICE) + + assert ex.value.sdstatus is Status.INVALID_DEVICE_DETECTED + + @mock.patch("subprocess.check_output", return_value=None) + def test__get_luks_name_from_headers_error_nothing_returned(self, mocked_subprocess): + + with pytest.raises(ExportException) as ex: + self.cli._get_luks_name_from_headers(_DEFAULT_USB_DEVICE) + + assert ex.value.sdstatus is Status.INVALID_DEVICE_DETECTED @mock.patch( "subprocess.check_output", side_effect=subprocess.CalledProcessError(1, "check_output") ) def test__get_luks_name_from_headers_error(self, mocked_subprocess): - cli = CLI() - with pytest.raises(ExportException): - result = cli._get_luks_name_from_headers(_DEFAULT_USB_DEVICE) + self.cli._get_luks_name_from_headers(_DEFAULT_USB_DEVICE) @mock.patch("os.path.exists", return_value=True) @mock.patch("subprocess.check_output", return_value=_SAMPLE_LUKS_HEADER) def test_get_luks_volume_already_unlocked(self, mocked_subprocess, mocked_os_call): - cli = CLI() - result = cli.get_luks_volume(_DEFAULT_USB_DEVICE_ONE_PART) + result = self.cli.get_luks_volume(_DEFAULT_USB_DEVICE_ONE_PART) assert result.encryption is EncryptionScheme.LUKS assert result.unlocked - @mock.patch("os.path.exists", return_value=True) - def test__unlock_luks_volume_success(self, mocker): - cli = CLI() + @mock.patch("os.path.exists", return_value=False) + @mock.patch("subprocess.check_output", return_value=_SAMPLE_LUKS_HEADER) + def test_get_luks_volume_still_locked(self, mocked_subprocess, mocked_os_call): + result = self.cli.get_luks_volume(_DEFAULT_USB_DEVICE_ONE_PART) + assert result.encryption is EncryptionScheme.LUKS + assert not result.unlocked + + @mock.patch("subprocess.check_output", side_effect=subprocess.CalledProcessError("check_output", 1)) + def test_get_luks_volume_error(self, mocked_subprocess): + with pytest.raises(ExportException) as ex: + self.cli.get_luks_volume(_DEFAULT_USB_DEVICE_ONE_PART) + + assert ex.value.sdstatus is Status.DEVICE_ERROR + + @mock.patch("os.path.exists", return_value=True) + def test_unlock_luks_volume_success(self, mock_path, mocker): mock_popen = mocker.MagicMock() mock_popen_communicate = mocker.MagicMock() mock_popen.returncode = 0 @@ -169,13 +208,12 @@ def test__unlock_luks_volume_success(self, mocker): mapped_name = "luks-id-123456" vol = Volume(device_name=_DEFAULT_USB_DEVICE, mapped_name=mapped_name, encryption=EncryptionScheme.LUKS) - key = "A key!&8*%_ A KEY" - result = cli.unlock_luks_volume(vol, key) + key = "a_key&_!" + result = self.cli.unlock_luks_volume(vol, key) assert vol.unlocked + @mock.patch("os.path.exists", return_value=True) def test_unlock_luks_volume_not_luks(self, mocker): - cli = CLI() - mock_popen = mocker.MagicMock() mock_popen.communicate = mocker.MagicMock() mock_popen.communicate.returncode = 1 # An error unlocking @@ -186,12 +224,12 @@ def test_unlock_luks_volume_not_luks(self, mocker): key = "a key!" mapped_name = "luks-id-123456" - with pytest.raises(ExportException): - cli.unlock_luks_volume(vol, key) + with pytest.raises(ExportException) as ex: + self.cli.unlock_luks_volume(vol, key) - def test_unlock_luks_volume_passphrase_failure(self, mocker): - cli = CLI() + assert ex.value.sdstatus is Status.DEVICE_ERROR + def test_unlock_luks_volume_passphrase_failure(self, mocker): mock_popen = mocker.MagicMock() mock_popen.communicate = mocker.MagicMock() mock_popen.communicate.returncode = 1 # An error unlocking @@ -203,105 +241,129 @@ def test_unlock_luks_volume_passphrase_failure(self, mocker): mapped_name = "luks-id-123456" with pytest.raises(ExportException): - cli.unlock_luks_volume(vol, key) + self.cli.unlock_luks_volume(vol, key) @mock.patch("subprocess.Popen", side_effect=subprocess.CalledProcessError("1", "Popen")) def test_unlock_luks_volume_luksOpen_exception(self, mocked_subprocess): - cli = CLI() pd = Volume(device_name=_DEFAULT_USB_DEVICE, mapped_name=_PRETEND_LUKS_ID, encryption=EncryptionScheme.LUKS) key = "a key!" mapped_name = "luks-id-123456" - with pytest.raises(ExportException): - cli.unlock_luks_volume(pd, key) + with pytest.raises(ExportException) as ex: + self.cli.unlock_luks_volume(pd, key) + + assert ex.value.sdstatus is Status.DEVICE_ERROR @mock.patch("os.path.exists", return_value=True) @mock.patch("subprocess.check_output", return_value=b"\n") @mock.patch("subprocess.check_call", return_value=0) - def test_mount_volume(self, mocked_output, mocked_call, mocked_path): - cli = CLI() + def test_mount_volume(self, mocked_call, mocked_output, mocked_path): vol = Volume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, mapped_name=_PRETEND_LUKS_ID, encryption=EncryptionScheme.LUKS, ) - result = cli.mount_volume(vol) + result = self.cli.mount_volume(vol) + assert vol.mountpoint is self.cli._DEFAULT_MOUNTPOINT @mock.patch("os.path.exists", return_value=True) @mock.patch("subprocess.check_output", return_value=b"/dev/pretend/luks-id-123456\n") @mock.patch("subprocess.check_call", return_value=0) def test_mount_volume_already_mounted(self, mocked_output, mocked_call, mocked_path): - cli = CLI() md = Volume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, mapped_name=_PRETEND_LUKS_ID, encryption=EncryptionScheme.LUKS, ) - result = cli.mount_volume(md) + result = self.cli.mount_volume(md) + assert result.mountpoint == "/dev/pretend/luks-id-123456" @mock.patch("os.path.exists", return_value=True) @mock.patch("subprocess.check_output", return_value=b"\n") @mock.patch("subprocess.check_call", return_value=0) def test_mount_volume_mkdir(self, mocked_output, mocked_subprocess, mocked_path): - cli = CLI() md = Volume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, mapped_name=_PRETEND_LUKS_ID, encryption=EncryptionScheme.LUKS, ) - result = cli.mount_volume(md) - - assert result.mapped_name == _PRETEND_LUKS_ID + assert self.cli.mount_volume(md).mapped_name == _PRETEND_LUKS_ID @mock.patch("subprocess.check_output", return_value=b"\n") @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) def test_mount_volume_error(self, mocked_subprocess, mocked_output): - cli = CLI() + md = Volume( + device_name=_DEFAULT_USB_DEVICE_ONE_PART, + mapped_name=_PRETEND_LUKS_ID, + encryption=EncryptionScheme.LUKS, + ) + with pytest.raises(ExportException) as ex: + self.cli.mount_volume(md) + + assert ex.value.sdstatus is Status.ERROR_MOUNT + + @mock.patch("os.path.exists", return_value=False) + @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) + def test_mount_at_mountpoint_mkdir_error(self, mocked_subprocess, mocked_path): md = Volume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, mapped_name=_PRETEND_LUKS_ID, encryption=EncryptionScheme.LUKS, ) - with pytest.raises(ExportException): - cli.mount_volume(md) + with pytest.raises(ExportException) as ex: + volume = self.cli._mount_at_mountpoint(md, self.cli._DEFAULT_MOUNTPOINT) + assert not volume.writable + + assert ex.value.sdstatus is Status.ERROR_MOUNT + + @mock.patch("os.path.exists", return_value=True) + @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) + def test_mount_at_mountpoint_mounting_error(self, mocked_subprocess, mocked_path): + md = Volume( + device_name=_DEFAULT_USB_DEVICE_ONE_PART, + mapped_name=_PRETEND_LUKS_ID, + encryption=EncryptionScheme.LUKS, + ) + + with pytest.raises(ExportException) as ex: + volume = self.cli._mount_at_mountpoint(md, self.cli._DEFAULT_MOUNTPOINT) + assert not volume.writable + + assert ex.value.sdstatus is Status.ERROR_MOUNT @mock.patch("os.path.exists", return_value=True) @mock.patch("subprocess.check_call", return_value=0) def test__unmount_volume(self, mocked_subprocess, mocked_mountpath): - cli = CLI() - mounted = Volume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, mapped_name=_PRETEND_LUKS_ID, - mountpoint=cli._DEFAULT_MOUNTPOINT, + mountpoint=self.cli._DEFAULT_MOUNTPOINT, encryption=EncryptionScheme.LUKS, ) - result = cli._unmount_volume(mounted) - + result = self.cli._unmount_volume(mounted) + assert result.mountpoint is None @mock.patch("os.path.exists", return_value=True) @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) def test__unmount_volume_error(self, mocked_subprocess, mocked_mountpath): - cli = CLI() - mounted = Volume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, mapped_name=_PRETEND_LUKS_ID, - mountpoint=cli._DEFAULT_MOUNTPOINT, + mountpoint=self.cli._DEFAULT_MOUNTPOINT, encryption=EncryptionScheme.LUKS, ) - with pytest.raises(ExportException): - result = cli._unmount_volume(mounted) + with pytest.raises(ExportException) as ex: + self.cli._unmount_volume(mounted) + + assert ex.value.sdstatus is Status.DEVICE_ERROR @mock.patch("os.path.exists", return_value=True) @mock.patch("subprocess.check_call", return_value=0) def test__close_luks_volume(self, mocked_subprocess, mocked_os_call): - cli = CLI() - mapped = Volume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, mapped_name=_PRETEND_LUKS_ID, @@ -309,58 +371,126 @@ def test__close_luks_volume(self, mocked_subprocess, mocked_os_call): ) # If call completes without error, drive was successfully closed with luksClose - cli._close_luks_volume(mapped) + self.cli._close_luks_volume(mapped) @mock.patch("os.path.exists", return_value=True) @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) def test__close_luks_volume_error(self, mocked_subprocess, mocked_os_call): - cli = CLI() - mapped = Volume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, mapped_name=_PRETEND_LUKS_ID, encryption=EncryptionScheme.LUKS, ) - with pytest.raises(ExportException): - cli._close_luks_volume(mapped) + with pytest.raises(ExportException) as ex: + self.cli._close_luks_volume(mapped) + + assert ex.value.sdstatus is Status.DEVICE_ERROR @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) def test__remove_temp_directory_error(self, mocked_subprocess): - cli = CLI() - with pytest.raises(ExportException): - cli._remove_temp_directory("tmp") + self.cli._remove_temp_directory("tmp") @mock.patch("subprocess.check_call", return_value=0) def test_write_to_disk(self, mock_check_call): - cli = CLI() + # Temporarily patch a method, to later assert it is called + patch = mock.patch.object(self.cli, "cleanup_drive_and_tmpdir") + patch.return_value = mock.MagicMock() + patch.start() vol = Volume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, mapped_name=_PRETEND_LUKS_ID, - mountpoint=cli._DEFAULT_MOUNTPOINT, + mountpoint=self.cli._DEFAULT_MOUNTPOINT, encryption=EncryptionScheme.LUKS, ) submission = Archive("testfile", TEST_CONFIG) - cli.write_data_to_device(submission.tmpdir, submission.target_dirname, vol) + self.cli.write_data_to_device(submission.tmpdir, submission.target_dirname, vol) + self.cli.cleanup_drive_and_tmpdir.assert_called_once() + + # Don't want to patch it indefinitely though, that will mess with the other tests + patch.stop() @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) - def test_write_to_disk_error_still_does_cleanup(self, mock_call, mocker): - cli = CLI() - cli.cleanup_drive_and_tmpdir = mocker.MagicMock() + def test_write_to_disk_error_still_does_cleanup(self, mock_call): + # see above - patch internal method only for this test + patch = mock.patch.object(self.cli, "cleanup_drive_and_tmpdir") + patch.return_value = mock.MagicMock() + patch.start() vol = Volume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, mapped_name=_PRETEND_LUKS_ID, - mountpoint=cli._DEFAULT_MOUNTPOINT, + mountpoint=self.cli._DEFAULT_MOUNTPOINT, encryption=EncryptionScheme.LUKS, ) submission = Archive("testfile", TEST_CONFIG) with pytest.raises(ExportException): - cli.write_data_to_device(submission.tmpdir, submission.target_dirname, vol) - cleanup_mock.assert_called_once() + self.cli.write_data_to_device(submission.tmpdir, submission.target_dirname, vol) + self.cli.cleanup_drive_and_tmpdir.assert_called_once() + + patch.stop() + + @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) + def test_cleanup_drive_and_tmpdir_error(self, mocked_subprocess): + submission = Archive("testfile", TEST_CONFIG) + mock_volume = mock.MagicMock(Volume) + + with pytest.raises(ExportException) as ex: + self.cli.cleanup_drive_and_tmpdir(mock_volume, submission.tmpdir) + assert ex.value.sdstatus is Status.ERROR_EXPORT_CLEANUP + + @mock.patch("os.path.exists", return_value=False) + @mock.patch("subprocess.check_call", return_value=0) + def test_cleanup_drive_and_tmpdir(self, mock_subprocess, mocked_path): + submission = Archive("testfile", TEST_CONFIG) + vol = Volume( + device_name=_DEFAULT_USB_DEVICE_ONE_PART, + mapped_name=_PRETEND_LUKS_ID, + mountpoint=self.cli._DEFAULT_MOUNTPOINT, + encryption=EncryptionScheme.LUKS, + ) + + close_patch = mock.patch.object(self.cli, "_close_luks_volume") + remove_tmpdir_patch = mock.patch.object(self.cli, "_remove_temp_directory") + + close_mock = close_patch.start() + rm_tpdir_mock = remove_tmpdir_patch.start() + + # That was all setup. Here's our test + self.cli.cleanup_drive_and_tmpdir(vol, submission.tmpdir) + + close_mock.assert_called_once_with(vol) + rm_tpdir_mock.assert_called_once_with(submission.tmpdir) + + # Undo patch changes + close_patch.stop() + remove_tmpdir_patch.stop() + + @mock.patch("subprocess.check_output", side_effect=subprocess.CalledProcessError(1, "check_output")) + def test_mountpoint_error(self, mock_subprocess): + with pytest.raises(ExportException) as ex: + self.cli._get_mountpoint(Volume(device_name=_DEFAULT_USB_DEVICE, mapped_name=_PRETEND_LUKS_ID, encryption=EncryptionScheme.LUKS)) + + assert ex.value.sdstatus is Status.ERROR_MOUNT + + @mock.patch("os.path.exists", return_value=False) + def test_mount_mkdir_fails(self, mocked_path): + mock_mountpoint = mock.patch.object(self.cli, "_get_mountpoint") + mock_mountpoint.return_value = None + # mock.patch("subprocess.check_output", side_effect=subprocess.CalledProcessError(1, "check_output")) + + mock_volume = mock.MagicMock() + mock_volume.device_name = _DEFAULT_USB_DEVICE_ONE_PART + mock_volume.mapped_name = _PRETEND_LUKS_ID + mock_volume.EncryptionScheme = EncryptionScheme.LUKS + mock_volume.unlocked = True + + with pytest.raises(ExportException) as ex: + self.cli.mount_volume(mock_volume) + assert ex.value.sdstatus is Status.ERROR_MOUNT \ No newline at end of file diff --git a/tests/disk/test_service.py b/tests/disk/test_service.py index 9a418593b..1454cbf0c 100644 --- a/tests/disk/test_service.py +++ b/tests/disk/test_service.py @@ -9,7 +9,6 @@ import subprocess from subprocess import CalledProcessError -from securedrop_export.enums import ExportEnum from securedrop_export.exceptions import ExportException from securedrop_export.disk.status import Status from securedrop_export.disk.new_status import Status as NewStatus @@ -77,29 +76,37 @@ def test_check_usb(self): assert status is Status.LEGACY_USB_CONNECTED - def test_check_usb_error_no_devices(self): - self.mock_cli.get_connected_devices.side_effect = ExportException(sdstatus=NewStatus.NO_DEVICE_DETECTED) + def test_no_devices_connected(self): + self.mock_cli.get_connected_devices.return_value = [] + with pytest.raises(ExportException) as ex: + self.service.check_connected_devices() + assert ex.value.sdstatus is Status.LEGACY_USB_NOT_CONNECTED + + def test_too_many_devices_connected(self): + self.mock_cli.get_connected_devices.return_value = [SAMPLE_OUTPUT_USB, "/dev/sdb"] with pytest.raises(ExportException) as ex: self.service.check_connected_devices() - assert ex.value.sdstatus is Status.LEGACY_ERROR_GENERIC + assert ex.value.sdstatus is Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED - def test_check_usb_error_multiple_devices(self): - self.mock_cli.get_connected_devices.side_effect = ExportException(sdstatus=NewStatus.MULTI_DEVICE_DETECTED) + def test_device_is_not_luks(self): + self.mock_cli.is_luks_volume.return_value = False + # When VeraCrypt is supported, this will no longer be an exception + # and the return status will change with pytest.raises(ExportException) as ex: - self.service.check_connected_devices() + self.service.check_disk_format() - assert ex.value.sdstatus is Status.LEGACY_ERROR_GENERIC + assert ex.value.sdstatus is Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED - def test_check_usb_error_while_checking(self): + def test_check_usb_error(self): self.mock_cli.get_connected_devices.side_effect = ExportException(sdstatus=Status.LEGACY_ERROR_USB_CHECK) with pytest.raises(ExportException) as ex: self.service.check_connected_devices() - assert ex.value.sdstatus is Status.LEGACY_ERROR_GENERIC + assert ex.value.sdstatus is Status.LEGACY_ERROR_USB_CHECK def test_check_disk_format(self): status = self.service.check_disk_format() @@ -135,3 +142,39 @@ def test_export_write_error(self): self.service.export() assert ex.value.sdstatus is Status.LEGACY_ERROR_USB_WRITE + + def test_export_throws_new_exception_return_legacy_status(self): + self.mock_cli.get_connected_devices.side_effect = ExportException(sdstatus=NewStatus.ERROR_MOUNT) + + with pytest.raises(ExportException) as ex: + self.service.export() + + assert ex.value.sdstatus is Status.LEGACY_ERROR_USB_MOUNT + + @mock.patch("os.path.exists", return_value=True) + def test_write_error_returns_legacy_status(self, mock_path): + self.mock_cli.is_luks_volume.return_value=True + self.mock_cli.write_data_to_device.side_effect = ExportException(sdstatus=NewStatus.ERROR_EXPORT) + + with pytest.raises(ExportException) as ex: + self.service.export() + + assert ex.value.sdstatus is Status.LEGACY_ERROR_USB_WRITE + + @mock.patch("os.path.exists", return_value=True) + def test_unlock_error_returns_legacy_status(self, mock_path): + self.mock_cli.unlock_luks_volume.side_effect = ExportException(sdstatus=NewStatus.ERROR_UNLOCK_LUKS) + + with pytest.raises(ExportException) as ex: + self.service.export() + + assert ex.value.sdstatus is Status.LEGACY_USB_BAD_PASSPHRASE + + @mock.patch("os.path.exists", return_value=True) + def test_unexpected_error_returns_legacy_status_generic(self, mock_path): + self.mock_cli.unlock_luks_volume.side_effect = ExportException(sdstatus=NewStatus.DEVICE_ERROR) + + with pytest.raises(ExportException) as ex: + self.service.export() + + assert ex.value.sdstatus is Status.LEGACY_ERROR_GENERIC \ No newline at end of file diff --git a/tests/print/test_service.py b/tests/print/test_service.py index 569ecdd2c..317cc992d 100644 --- a/tests/print/test_service.py +++ b/tests/print/test_service.py @@ -8,8 +8,7 @@ from securedrop_export.exceptions import ExportException from securedrop_export.archive import Archive from securedrop_export.print.service import Service -from securedrop_export.print.service import Status - +from securedrop_export.print.status import Status SAMPLE_OUTPUT_NO_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\nnetwork lpd" # noqa SAMPLE_OUTPUT_BROTHER_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\ndirect usb://Brother/HL-L2320D%20series?serial=A00000A000000\nnetwork lpd" # noqa @@ -17,114 +16,121 @@ TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_BROTHER_PRINTER) -def test_get_good_printer_uri_laserjet(mocked_call): - submission = Archive("testfile", TEST_CONFIG) - service = Service(submission) - - result = service._get_printer_uri() +class PrinterTest: - assert result == "usb://Brother/HL-L2320D%20series?serial=A00000A000000" + @classmethod + def setup_class(cls): + cls.submission = Archive("testfile", TEST_CONFIG) + cls.service = Service(submission) + @classmethod + def teardown_class(cls): + cls.service = None + cls.submission = None -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_LASERJET_PRINTER) -def test_get_good_printer_uri_brother(mocked_call): - submission = Archive("testfile", TEST_CONFIG) - service = Service(submission) + @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_BROTHER_PRINTER) + def test_get_good_printer_uri_laserjet(mocked_call): + assert self.service._get_printer_uri() == "usb://Brother/HL-L2320D%20series?serial=A00000A000000" - result = service._get_printer_uri() - assert result == "usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000" + @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_LASERJET_PRINTER) + def test_get_good_printer_uri_brother(mocked_call): + assert self.service._get_printer_uri() == "usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000" -@mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PRINTER) -def test_get_bad_printer_uri(mocked_call, capsys, mocker): - submission = Archive("testfile", TEST_CONFIG) - service = Service(submission) - expected_status = Status.ERROR_PRINTER_NOT_FOUND # todo + @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PRINTER) + def test_get_bad_printer_uri(mocked_call, capsys, mocker): + with pytest.raises(ExportException) as ex: + self.service._get_printer_uri() - with pytest.raises(ExportException): - service._get_printer_uri() + assert ex.value.sdstatus is Status.ERROR_PRINTER_NOT_FOUND + @pytest.mark.parametrize( + "open_office_paths", + [ + "/tmp/whatver/thisisadoc.doc" + "/home/user/Downloads/thisisadoc.xlsx" + "/home/user/Downloads/file.odt" + "/tmp/tmpJf83j9/secret.pptx" + ], + ) + def test_is_open_office_file(capsys, open_office_paths): + assert self.service._is_open_office_file(open_office_paths) -@pytest.mark.parametrize( - "open_office_paths", - [ - "/tmp/whatver/thisisadoc.doc" - "/home/user/Downloads/thisisadoc.xlsx" - "/home/user/Downloads/file.odt" - "/tmp/tmpJf83j9/secret.pptx" - ], -) -def test_is_open_office_file(capsys, open_office_paths): - submission = Archive("", TEST_CONFIG) - service = Service(submission) - assert service._is_open_office_file(open_office_paths) + @pytest.mark.parametrize( + "open_office_paths", + [ + "/tmp/whatver/thisisadoc.doccc" + "/home/user/Downloads/thisisa.xlsx.zip" + "/home/user/Downloads/file.odz" + "/tmp/tmpJf83j9/secret.gpg" + ], + ) + def test_is_not_open_office_file(capsys, open_office_paths): + assert not self.service._is_open_office_file(open_office_paths) + @mock.patch("subprocess.run") + def test_install_printer_ppd_laserjet(mocker): + ppd = self.service._install_printer_ppd("usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A00000") + assert ppd == "/usr/share/cups/model/hp-laserjet_6l.ppd" -@pytest.mark.parametrize( - "open_office_paths", - [ - "/tmp/whatver/thisisadoc.doccc" - "/home/user/Downloads/thisisa.xlsx.zip" - "/home/user/Downloads/file.odz" - "/tmp/tmpJf83j9/secret.gpg" - ], -) -def test_is_not_open_office_file(capsys, open_office_paths): - submission = Archive("", TEST_CONFIG) - service = Service(submission) - assert not service._is_open_office_file(open_office_paths) + @mock.patch("subprocess.run") + def test_install_printer_ppd_brother(mocker): + ppd = self.service._install_printer_ppd("usb://Brother/HL-L2320D%20series?serial=A00000A000000") + assert ppd == "/usr/share/cups/model/br7030.ppd" -@mock.patch("subprocess.run") -def test_install_printer_ppd_laserjet(mocker): - submission = Archive("testfile", TEST_CONFIG) - service = Service(submission) - ppd = service._install_printer_ppd("usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A00000") - assert ppd == "/usr/share/cups/model/hp-laserjet_6l.ppd" + def test_install_printer_ppd_error_no_driver(mocker): + mocker.patch("subprocess.run", side_effect=CalledProcessError(1, "run")) + with pytest.raises(ExportException) as ex: + self.service._install_printer_ppd("usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000") -@mock.patch("subprocess.run") -def test_install_printer_ppd_brother(mocker): - submission = Archive("testfile", TEST_CONFIG) - service = Service(submission) - ppd = service._install_printer_ppd("usb://Brother/HL-L2320D%20series?serial=A00000A000000") - assert ppd == "/usr/share/cups/model/br7030.ppd" + assert ex.value.sdstatus is Status.ERROR_PRINTER_DRIVER_UNAVAILABLE + def test_install_printer_ppd_error_not_supported(mocker): + mocker.patch("subprocess.run", side_effect=CalledProcessError(1, "run")) -def test_install_printer_ppd_error_no_driver(mocker): - submission = Archive("testfile", TEST_CONFIG) - service = Service(submission) + with pytest.raises(ExportException) as ex: + self.service._install_printer_ppd("usb://Not/Supported?serial=A00000A000000") - mocker.patch("subprocess.run", side_effect=CalledProcessError(1, "run")) + assert ex.value.sdstatus is Status.ERROR_PRINTER_NOT_SUPPORTED - with pytest.raises(ExportException) as ex: - service._install_printer_ppd("usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000") + def test_setup_printer_error(mocker): + mocker.patch("subprocess.run", side_effect=CalledProcessError(1, "run")) - assert ex.value.sdstatus is Status.ERROR_PRINTER_DRIVER_UNAVAILABLE + with pytest.raises(ExportException) as ex: + self.service._setup_printer( + "usb://Brother/HL-L2320D%20series?serial=A00000A000000", + "/usr/share/cups/model/br7030.ppd", + ) + assert ex.value.sdstatus is Status.ERROR_PRINTER_INSTALL -def test_install_printer_ppd_error_not_supported(mocker): - submission = Archive("testfile", TEST_CONFIG) - service = Service(submission) - mocker.patch("subprocess.run", side_effect=CalledProcessError(1, "run")) - with pytest.raises(ExportException) as ex: - service._install_printer_ppd("usb://Not/Supported?serial=A00000A000000") + def test_safe_check_call(self): + # This works, since `ls` is a valid comand + self.service.safe_check_call(["ls"], Status.TEST_SUCCESS) - assert ex.value.sdstatus is Status.ERROR_PRINTER_NOT_SUPPORTED + def test_safe_check_call_invalid_call(self): + with pytest.raises(ExportException) as ex: + self.service.safe_check_call(["ls", "kjdsfhkdjfh"], Status.ERROR_PRINT) + assert ex.value.sdstatus is FakeStatus.ERROR_PRINT -def test_setup_printer_error(mocker): - submission = Archive("testfile", TEST_CONFIG) - service = Service(submission) - - mocker.patch("subprocess.run", side_effect=CalledProcessError(1, "run")) - - with pytest.raises(ExportException) as ex: - service._setup_printer( - "usb://Brother/HL-L2320D%20series?serial=A00000A000000", - "/usr/share/cups/model/br7030.ppd", + def test_safe_check_call_write_to_stderr_and_ignore_error(self): + self.service.safe_check_call( + ["python3", "-c", "import sys;sys.stderr.write('hello')"], + Status.TEST_SUCCESS, + ignore_stderr_startswith=b"hello", ) - assert ex.value.sdstatus is Status.ERROR_PRINTER_INSTALL \ No newline at end of file + def test_safe_check_call_write_to_stderr_wrong_ignore_param(self): + # This one writes to stderr and ignores the wrong string, so we expect an exception + with pytest.raises(ExportException) as ex: + self.service.safe_check_call( + ["python3", "-c", "import sys;sys.stderr.write('hello\n')"], + Status.ERROR_PRINT, + ignore_stderr_startswith=b"world", + ) + + assert ex.value.sdstatus is Status.ERROR_PRINT \ No newline at end of file diff --git a/tests/test_archive.py b/tests/test_archive.py index 0437e94ff..eb45b5dd3 100644 --- a/tests/test_archive.py +++ b/tests/test_archive.py @@ -475,3 +475,20 @@ def test_valid_encryption_config(capsys): assert config.encryption_method == "luks" +def test_cannot_use_metadata_constructor(): + """ + Require the `create_and_validate()` method for returning a Metadata object + """ + with pytest.raises(ValueError): + Metadata(object(), tempfile.mkdtemp()) + + +@mock.patch("json.loads", side_effect=json.decoder.JSONDecodeError("ugh", "badjson", 0)) +def test_metadata_parsing_error(mock_json): + """ + Handle exception caused when loading metadata JSON + """ + with pytest.raises(ExportException) as ex: + Metadata.create_and_validate(tempfile.mkdtemp()) + + assert ex.value.sdstatus is Status.ERROR_METADATA_PARSING \ No newline at end of file diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py new file mode 100644 index 000000000..577fae13a --- /dev/null +++ b/tests/test_exceptions.py @@ -0,0 +1,20 @@ +import pytest +import signal + +from securedrop_export.exceptions import handler, TimeoutException + +def test_handler(): + signal.signal(signal.SIGALRM, handler) + signal.setitimer(signal.ITIMER_REAL, 0.001) + + with pytest.raises(TimeoutException) as ex: + _run_handler_routine() + +def _run_handler_routine(): + try: + while True: + continue + except TimeoutException: + raise + + \ No newline at end of file diff --git a/tests/test_main.py b/tests/test_main.py index aa020acc6..2254d158f 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -2,16 +2,10 @@ from unittest import mock import os -#from securedrop_export.main import __main__, _exit_gracefully # noqa: F401 -from securedrop_export.main import Status, _extract_and_run, _exit_gracefully, _write_status # noqa: F401 +from securedrop_export.main import Status, entrypoint, _extract_and_run, _exit_gracefully, _write_status # noqa: F401 from securedrop_export.archive import Archive -# This import ensures at least the imports in main.__main__ -# are executed during a test run TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") -BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad.json") -ANOTHER_BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad-2.json") - class TestMain(): @@ -51,7 +45,7 @@ def test_write_status(self, status, capsys): captured = capsys.readouterr() assert captured.err == status.value + "\n" - @pytest.mark.parametrize("invalid_status", ["foo", ";ls", "&& echo 0"]) + @pytest.mark.parametrize("invalid_status", ["foo", ";ls", "&& echo 0", None]) def test_write_status_error(self, invalid_status, capsys): with pytest.raises(ValueError): @@ -64,3 +58,6 @@ def test__extract_and_run(self): def test__extract_and_run_failure(self): pass + + def test_entrypoint(self): + pass diff --git a/tests/test_util.py b/tests/test_util.py index 297f08b93..5a1f56aa9 100644 --- a/tests/test_util.py +++ b/tests/test_util.py @@ -1,39 +1,8 @@ import pytest from securedrop_export import utils -from securedrop_export.enums import ExportEnum from securedrop_export.exceptions import ExportException -class FakeStatus(ExportEnum): - OH_NO = "Oh No!" - NO_PROBLEM = "No Problem!" - class TestUtil: - def test_safe_check_call(self): - # This works, since `ls` is a valid comand - utils.safe_check_call(["ls"], FakeStatus.NO_PROBLEM) - - def test_safe_check_call_invalid_call(self): - with pytest.raises(ExportException) as ex: - utils.safe_check_call(["ls", "kjdsfhkdjfh"], FakeStatus.OH_NO) - - assert ex.value.sdstatus is FakeStatus.OH_NO - - def test_safe_check_call_write_to_stderr_and_ignore_error(self): - utils.safe_check_call( - ["python3", "-c", "import sys;sys.stderr.write('hello')"], - FakeStatus.NO_PROBLEM, - ignore_stderr_startswith=b"hello", - ) - - def test_safe_check_call_write_to_stderr_wrong_ignore_param(self): - # This one writes to stderr and ignores the wrong string, so we expect an exception - with pytest.raises(ExportException) as ex: - utils.safe_check_call( - ["python3", "-c", "import sys;sys.stderr.write('hello\n')"], - FakeStatus.OH_NO, - ignore_stderr_startswith=b"world", - ) - - assert ex.value.sdstatus is FakeStatus.OH_NO \ No newline at end of file + pass #todo \ No newline at end of file From b52e68b665606dc7e59816f69b4c907045ebd4c2 Mon Sep 17 00:00:00 2001 From: Ro Date: Thu, 6 Oct 2022 17:42:50 -0700 Subject: [PATCH 308/352] Rename utils.py to directory_util, add test coverage, show new name in semgrep rules. i#add directory_util.py test coverage, show new name in semgrep rules --- .semgrep/custom-rules.yaml | 4 +- securedrop_export/archive.py | 2 +- .../{utils.py => directory_util.py} | 3 - securedrop_export/main.py | 2 +- tests/test_directory_util.py | 65 +++++++++++++++++++ tests/test_util.py | 8 --- 6 files changed, 69 insertions(+), 15 deletions(-) rename securedrop_export/{utils.py => directory_util.py} (99%) create mode 100644 tests/test_directory_util.py delete mode 100644 tests/test_util.py diff --git a/.semgrep/custom-rules.yaml b/.semgrep/custom-rules.yaml index 6793bb298..38f60f003 100644 --- a/.semgrep/custom-rules.yaml +++ b/.semgrep/custom-rules.yaml @@ -47,7 +47,7 @@ rules: languages: - python severity: ERROR - message: Possible path traversal or insecure directory and file permissions through os.mkdir(). Use securedrop_export.utils.safe_mkdir instead. + message: Possible path traversal or insecure directory and file permissions through os.mkdir(). Use securedrop_export.directory_util.safe_mkdir instead. patterns: - pattern: "....mkdir(...)" - pattern-not-inside: | @@ -58,7 +58,7 @@ rules: languages: - python severity: ERROR - message: Possible path traversal or insecure directory and file permissions through os.makedirs(). Use securedrop_export.utils.safe_mkdir instead. + message: Possible path traversal or insecure directory and file permissions through os.makedirs(). Use securedrop_export.directory_util.safe_mkdir instead. patterns: - pattern: "....makedirs(...)" - pattern-not-inside: | diff --git a/securedrop_export/archive.py b/securedrop_export/archive.py index c2b192e83..10c62bb95 100755 --- a/securedrop_export/archive.py +++ b/securedrop_export/archive.py @@ -12,7 +12,7 @@ from securedrop_export.exceptions import ExportException from securedrop_export.status import BaseStatus from securedrop_export.command import Command -from securedrop_export.utils import safe_extractall +from securedrop_export.directory_util import safe_extractall logger = logging.getLogger(__name__) diff --git a/securedrop_export/utils.py b/securedrop_export/directory_util.py similarity index 99% rename from securedrop_export/utils.py rename to securedrop_export/directory_util.py index c7f9557d4..b4608851c 100644 --- a/securedrop_export/utils.py +++ b/securedrop_export/directory_util.py @@ -5,9 +5,6 @@ import subprocess import logging -from securedrop_export.exceptions import ExportException - - logger = logging.getLogger(__name__) diff --git a/securedrop_export/main.py b/securedrop_export/main.py index 9860bbe12..13b46e15d 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -8,13 +8,13 @@ from securedrop_export.archive import Archive, Metadata from securedrop_export.command import Command from securedrop_export.status import BaseStatus +from securedrop_export.directory_util import safe_mkdir from securedrop_export.disk.service import Service as ExportService from securedrop_export.print.service import Service as PrintService from logging.handlers import TimedRotatingFileHandler, SysLogHandler from securedrop_export import __version__ -from securedrop_export.utils import safe_mkdir CONFIG_PATH = "/etc/sd-export-config.json" DEFAULT_HOME = os.path.join(os.path.expanduser("~"), ".securedrop_export") diff --git a/tests/test_directory_util.py b/tests/test_directory_util.py new file mode 100644 index 000000000..18eb6dd2a --- /dev/null +++ b/tests/test_directory_util.py @@ -0,0 +1,65 @@ +import pytest +import os + +from pathlib import Path +from securedrop_export import directory_util +from securedrop_export.exceptions import ExportException + +class TestUtil: + + _TMPDIR_PATH = "/tmp/pretendium/" + _REL_TRAVERSAL = "../../../whee" + _SAFE_RELPATH = "./hi" + _SAFE_RELPATH2 = "yay/a/path" + _UNSAFE_RELPATH = "lgtm/../ohwait" + + def setup_method(self, method): + pass + + def teadown_method(self, method): + if (os.path.exists(self._TMPDIR_PATH)): + os.remove(self._TMPDIR_PATH) + + def test_safe_mkdir_error_base_relpath(self): + with pytest.raises(ValueError): + directory_util.safe_mkdir(base_path=Path(".")) + + def test_safe_mkdir_error_basepath_path_traversal(self): + with pytest.raises(ValueError): + directory_util.safe_mkdir(f"{self._TMPDIR_PATH}{self._REL_TRAVERSAL}") + + def test_safe_mkdir_error_relpath_path_traversal(self): + with pytest.raises(ValueError): + directory_util.safe_mkdir(f"{self._TMPDIR_PATH}", f"{self._REL_TRAVERSAL}") + + def test_safe_mkdir_success(self): + directory_util.safe_mkdir(f"{self._TMPDIR_PATH}") + + def test_safe_mkdir_success_with_relpath(self): + directory_util.safe_mkdir(f"{self._TMPDIR_PATH}", f"{self._SAFE_RELPATH}") + + assert (os.path.exists(f"{self._TMPDIR_PATH}{self._SAFE_RELPATH}")) + + def test_safe_mkdir_success_another_relpath(self): + directory_util.safe_mkdir(f"{self._TMPDIR_PATH}", f"{self._SAFE_RELPATH2}") + + assert (os.path.exists(f"{self._TMPDIR_PATH}{self._SAFE_RELPATH2}")) + + def test_safe_mkdir_weird_path(self): + with pytest.raises(ValueError): + directory_util.safe_mkdir(f"{self._TMPDIR_PATH}", f"{self._UNSAFE_RELPATH}") + + def test__check_all_permissions_path_missing(self): + with pytest.raises(ValueError): + directory_util._check_all_permissions(f"{self._TMPDIR_PATH}", f"{self._SAFE_RELPATH}") + + def test_check_dir_perms_unsafe(self): + path = Path(f"{self._TMPDIR_PATH}{self._SAFE_RELPATH}") + + directory_util.safe_mkdir(path) + + # Not what we want, ever + path.chmod(0o666) + + with pytest.raises(RuntimeError): + directory_util._check_dir_permissions(path) diff --git a/tests/test_util.py b/tests/test_util.py deleted file mode 100644 index 5a1f56aa9..000000000 --- a/tests/test_util.py +++ /dev/null @@ -1,8 +0,0 @@ -import pytest - -from securedrop_export import utils -from securedrop_export.exceptions import ExportException - -class TestUtil: - - pass #todo \ No newline at end of file From 5e398ad61fafa45589f55355718ac5f4d90fb0cc Mon Sep 17 00:00:00 2001 From: Ro Date: Thu, 6 Oct 2022 17:43:04 -0700 Subject: [PATCH 309/352] Remove unused config file param from Archive and clean up test_archive.py --- securedrop_export/archive.py | 2 +- securedrop_export/main.py | 3 +-- tests/disk/test_cli.py | 10 +++----- tests/disk/test_service.py | 3 +-- tests/sd-export-config-bad-2.json | 3 --- tests/sd-export-config-bad.json | 3 --- tests/sd-export-config.json | 3 --- tests/test_archive.py | 41 ++++++++++++++----------------- tests/test_main.py | 6 ++--- 9 files changed, 27 insertions(+), 47 deletions(-) delete mode 100644 tests/sd-export-config-bad-2.json delete mode 100644 tests/sd-export-config-bad.json delete mode 100644 tests/sd-export-config.json diff --git a/securedrop_export/archive.py b/securedrop_export/archive.py index 10c62bb95..0cbb98273 100755 --- a/securedrop_export/archive.py +++ b/securedrop_export/archive.py @@ -89,7 +89,7 @@ def validate(self): class Archive(object): - def __init__(self, archive, config_path): + def __init__(self, archive): os.umask(0o077) self.archive = archive self.submission_dirname = os.path.basename(self.archive).split(".")[0] diff --git a/securedrop_export/main.py b/securedrop_export/main.py index 13b46e15d..408173649 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -16,7 +16,6 @@ from logging.handlers import TimedRotatingFileHandler, SysLogHandler from securedrop_export import __version__ -CONFIG_PATH = "/etc/sd-export-config.json" DEFAULT_HOME = os.path.join(os.path.expanduser("~"), ".securedrop_export") LOG_DIR_NAME = "logs" EXPORT_LOG_FILENAME = "export.log" @@ -43,7 +42,7 @@ def entrypoint(): _exit_gracefully(submission=None, status=Status.ERROR_LOGGING) logger.info("Starting SecureDrop Export {}".format(__version__)) - data = Archive(sys.argv[1], CONFIG_PATH) + data = Archive(sys.argv[1]) try: # Halt immediately if target file is absent diff --git a/tests/disk/test_cli.py b/tests/disk/test_cli.py index bd92e4abd..55d79a40e 100644 --- a/tests/disk/test_cli.py +++ b/tests/disk/test_cli.py @@ -13,8 +13,6 @@ from securedrop_export.archive import Archive -TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") - _DEFAULT_USB_DEVICE = "/dev/sda" _DEFAULT_USB_DEVICE_ONE_PART = "/dev/sda1" @@ -406,7 +404,7 @@ def test_write_to_disk(self, mock_check_call): encryption=EncryptionScheme.LUKS, ) - submission = Archive("testfile", TEST_CONFIG) + submission = Archive("testfile") self.cli.write_data_to_device(submission.tmpdir, submission.target_dirname, vol) self.cli.cleanup_drive_and_tmpdir.assert_called_once() @@ -427,7 +425,7 @@ def test_write_to_disk_error_still_does_cleanup(self, mock_call): mountpoint=self.cli._DEFAULT_MOUNTPOINT, encryption=EncryptionScheme.LUKS, ) - submission = Archive("testfile", TEST_CONFIG) + submission = Archive("testfile") with pytest.raises(ExportException): self.cli.write_data_to_device(submission.tmpdir, submission.target_dirname, vol) @@ -437,7 +435,7 @@ def test_write_to_disk_error_still_does_cleanup(self, mock_call): @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) def test_cleanup_drive_and_tmpdir_error(self, mocked_subprocess): - submission = Archive("testfile", TEST_CONFIG) + submission = Archive("testfile") mock_volume = mock.MagicMock(Volume) with pytest.raises(ExportException) as ex: @@ -447,7 +445,7 @@ def test_cleanup_drive_and_tmpdir_error(self, mocked_subprocess): @mock.patch("os.path.exists", return_value=False) @mock.patch("subprocess.check_call", return_value=0) def test_cleanup_drive_and_tmpdir(self, mock_subprocess, mocked_path): - submission = Archive("testfile", TEST_CONFIG) + submission = Archive("testfile") vol = Volume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, mapped_name=_PRETEND_LUKS_ID, diff --git a/tests/disk/test_service.py b/tests/disk/test_service.py index 1454cbf0c..6cda02716 100644 --- a/tests/disk/test_service.py +++ b/tests/disk/test_service.py @@ -18,7 +18,6 @@ from securedrop_export.disk.service import Service from securedrop_export.disk.cli import CLI -TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") SAMPLE_OUTPUT_LSBLK_NO_PART = b"disk\ncrypt" # noqa SAMPLE_OUTPUT_USB = "/dev/sda" # noqa SAMPLE_OUTPUT_USB_PARTITIONED = "/dev/sda1" @@ -46,7 +45,7 @@ def _setup_submission(cls) -> Archive: """ Helper method to set up sample archive """ - submission = Archive("testfile", TEST_CONFIG) + submission = Archive("testfile") temp_folder = tempfile.mkdtemp() metadata = os.path.join(temp_folder, Metadata.METADATA_FILE) with open(metadata, "w") as f: diff --git a/tests/sd-export-config-bad-2.json b/tests/sd-export-config-bad-2.json deleted file mode 100644 index f69e25b7a..000000000 --- a/tests/sd-export-config-bad-2.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "no_pci_bus_id": "nope" -} diff --git a/tests/sd-export-config-bad.json b/tests/sd-export-config-bad.json deleted file mode 100644 index f7cbf8d7a..000000000 --- a/tests/sd-export-config-bad.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pciishf. i3u 2 -} diff --git a/tests/sd-export-config.json b/tests/sd-export-config.json deleted file mode 100644 index d1167cf5a..000000000 --- a/tests/sd-export-config.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pci_bus_id": "2" -} diff --git a/tests/test_archive.py b/tests/test_archive.py index eb45b5dd3..4e840abf3 100644 --- a/tests/test_archive.py +++ b/tests/test_archive.py @@ -12,11 +12,6 @@ from securedrop_export.exceptions import ExportException from securedrop_export.archive import Archive, Metadata, Status -TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") -BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad.json") -ANOTHER_BAD_TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config-bad-2.json") - - def test_extract_tarball(): """ Check that we can successfully extract a valid tarball. @@ -47,7 +42,7 @@ def test_extract_tarball(): archive.close() - submission = Archive(archive_path, TEST_CONFIG) + submission = Archive(archive_path) assert oct(os.stat(submission.tmpdir).st_mode) == "0o40700" submission.extract_tarball() @@ -86,7 +81,7 @@ def test_extract_tarball_with_symlink(): archive.addfile(symlink_info) archive.close() - submission = Archive(archive_path, TEST_CONFIG) + submission = Archive(archive_path) assert oct(os.stat(submission.tmpdir).st_mode) == "0o40700" submission.extract_tarball() @@ -121,9 +116,9 @@ def test_extract_tarball_raises_if_doing_path_traversal(): archive.addfile(traversed_file_info, BytesIO(content)) archive.close() - submission = Archive(archive_path, TEST_CONFIG) + submission = Archive(archive_path) - with pytest.raises(ExportException): # prev: SystemExit + with pytest.raises(ExportException): submission.extract_tarball() assert not os.path.exists("/tmp/traversed") @@ -158,9 +153,9 @@ def test_extract_tarball_raises_if_doing_path_traversal_with_dir(): archive.addfile(dir_info) archive.close() - submission = Archive(archive_path, TEST_CONFIG) + submission = Archive(archive_path) - with pytest.raises(ExportException): # prev: SystemExit + with pytest.raises(ExportException): submission.extract_tarball() assert not os.path.exists("/tmp/traversed") @@ -197,7 +192,7 @@ def test_extract_tarball_raises_if_doing_path_traversal_with_symlink(): archive.addfile(symlink_info, BytesIO(content)) archive.close() - submission = Archive(archive_path, TEST_CONFIG) + submission = Archive(archive_path) with pytest.raises(ExportException): submission.extract_tarball() @@ -236,7 +231,7 @@ def test_extract_tarball_raises_if_doing_path_traversal_with_symlink_linkname(): archive.addfile(symlink_info, BytesIO(content)) archive.close() - submission = Archive(archive_path, TEST_CONFIG) + submission = Archive(archive_path) with pytest.raises(ExportException): submission.extract_tarball() @@ -272,7 +267,7 @@ def test_extract_tarball_raises_if_name_has_unsafe_absolute_path(): archive.addfile(file_info, BytesIO(content)) archive.close() - submission = Archive(archive_path, TEST_CONFIG) + submission = Archive(archive_path) with pytest.raises(ExportException): submission.extract_tarball() @@ -309,7 +304,7 @@ def test_extract_tarball_raises_if_name_has_unsafe_absolute_path_with_symlink(): archive.add(symlink_path, "symlink") archive.close() - submission = Archive(archive_path, TEST_CONFIG) + submission = Archive(archive_path) with pytest.raises(ExportException): submission.extract_tarball() @@ -354,7 +349,7 @@ def test_extract_tarball_raises_if_name_has_unsafe_absolute_path_with_symlink_to archive.add(file_path, "symlink/unsafe") archive.close() - submission = Archive(archive_path, TEST_CONFIG) + submission = Archive(archive_path) with pytest.raises(ExportException): submission.extract_tarball() @@ -391,7 +386,7 @@ def test_extract_tarball_raises_if_linkname_has_unsafe_absolute_path(): archive.addfile(symlink_info, BytesIO(content)) archive.close() - submission = Archive(archive_path, TEST_CONFIG) + submission = Archive(archive_path) with pytest.raises(ExportException): submission.extract_tarball() @@ -400,7 +395,7 @@ def test_extract_tarball_raises_if_linkname_has_unsafe_absolute_path(): def test_empty_config(capsys): - Archive("testfile", TEST_CONFIG) + Archive("testfile") temp_folder = tempfile.mkdtemp() metadata = os.path.join(temp_folder, Metadata.METADATA_FILE) with open(metadata, "w") as f: @@ -411,7 +406,7 @@ def test_empty_config(capsys): def test_valid_printer_test_config(capsys): - Archive("testfile", TEST_CONFIG) + Archive("testfile") temp_folder = tempfile.mkdtemp() metadata = os.path.join(temp_folder, Metadata.METADATA_FILE) with open(metadata, "w") as f: @@ -424,7 +419,7 @@ def test_valid_printer_test_config(capsys): def test_valid_printer_config(capsys): - Archive("", TEST_CONFIG) + Archive("") temp_folder = tempfile.mkdtemp() metadata = os.path.join(temp_folder, Metadata.METADATA_FILE) with open(metadata, "w") as f: @@ -437,7 +432,7 @@ def test_valid_printer_config(capsys): def test_invalid_encryption_config(capsys): - Archive("testfile", TEST_CONFIG) + Archive("testfile") temp_folder = tempfile.mkdtemp() metadata = os.path.join(temp_folder, Metadata.METADATA_FILE) @@ -450,7 +445,7 @@ def test_invalid_encryption_config(capsys): assert ex.value.sdstatus is Status.ERROR_ARCHIVE_METADATA def test_malforned_config(capsys): - Archive("testfile", TEST_CONFIG) + Archive("testfile") temp_folder = tempfile.mkdtemp() metadata = os.path.join(temp_folder, Metadata.METADATA_FILE) @@ -463,7 +458,7 @@ def test_malforned_config(capsys): assert ex.value.sdstatus is Status.ERROR_METADATA_PARSING def test_valid_encryption_config(capsys): - Archive("testfile", TEST_CONFIG) + Archive("testfile") temp_folder = tempfile.mkdtemp() metadata = os.path.join(temp_folder, Metadata.METADATA_FILE) with open(metadata, "w") as f: diff --git a/tests/test_main.py b/tests/test_main.py index 2254d158f..e309ec305 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -5,12 +5,10 @@ from securedrop_export.main import Status, entrypoint, _extract_and_run, _exit_gracefully, _write_status # noqa: F401 from securedrop_export.archive import Archive -TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") - class TestMain(): def test_exit_gracefully_no_exception(self, capsys): - submission = Archive("testfile", TEST_CONFIG) + submission = Archive("testfile") with pytest.raises(SystemExit) as sysexit: _exit_gracefully(submission, Status.ERROR_GENERIC) @@ -24,7 +22,7 @@ def test_exit_gracefully_no_exception(self, capsys): def test_exit_gracefully_exception(self, capsys): - submission = Archive("testfile", TEST_CONFIG) + submission = Archive("testfile") with pytest.raises(SystemExit) as sysexit: exception = mock.MagicMock() From 65920dcd0a48d38f12bb340fe7d4930227857206 Mon Sep 17 00:00:00 2001 From: Ro Date: Tue, 11 Oct 2022 15:52:21 -0700 Subject: [PATCH 310/352] Apply black and linter. Improve test coverage. TimeoutException no longer inherits from ExportException. Small fix to export metadata and get_partitioned_devices. --- securedrop_export/archive.py | 80 +++---- securedrop_export/command.py | 4 +- securedrop_export/directory_util.py | 15 +- securedrop_export/disk/cli.py | 126 ++++++---- securedrop_export/disk/new_service.py | 17 +- securedrop_export/disk/new_status.py | 25 +- securedrop_export/disk/service.py | 62 +++-- securedrop_export/disk/status.py | 9 +- securedrop_export/disk/volume.py | 8 +- securedrop_export/exceptions.py | 4 +- securedrop_export/main.py | 162 +++++++------ securedrop_export/print/service.py | 73 +++--- securedrop_export/print/status.py | 6 +- securedrop_export/status.py | 2 + setup.py | 4 +- tests/disk/test_cli.py | 185 ++++++++++----- tests/disk/test_service.py | 82 ++++--- tests/disk/test_volume.py | 21 +- tests/print/test_service.py | 317 +++++++++++++++++++++++--- tests/test_archive.py | 79 ++++--- tests/test_directory_util.py | 55 +++-- tests/test_exceptions.py | 6 +- tests/test_main.py | 166 ++++++++++++-- 23 files changed, 1058 insertions(+), 450 deletions(-) diff --git a/securedrop_export/archive.py b/securedrop_export/archive.py index 0cbb98273..2ec50b5c8 100755 --- a/securedrop_export/archive.py +++ b/securedrop_export/archive.py @@ -4,9 +4,6 @@ import json import logging import os -import shutil -import subprocess -import sys import tempfile from securedrop_export.exceptions import ExportException @@ -16,51 +13,26 @@ logger = logging.getLogger(__name__) + class Status(BaseStatus): ERROR_ARCHIVE_METADATA = "ERROR_ARCHIVE_METADATA" ERROR_METADATA_PARSING = "ERROR_METADATA_PARSING" ERROR_EXTRACTION = "ERROR_EXTRACTION" + class Metadata(object): """ Object to parse, validate and store json metadata from the sd-export archive. - - Create a Metadata object by using the `create_and_validate()` method to - ensure well-formed and valid metadata. """ METADATA_FILE = "metadata.json" SUPPORTED_ENCRYPTION_METHODS = ["luks"] - # Slightly underhanded way of ensuring that a Metadata object is not instantiated - # directly; instead, the create_and_validate() method is used - __key = object() - - - def __init__(self, key: object, archive_path: str): - if not key == Metadata.__key: - raise ValueError("Must use create_and_validate() to create Metadata object") - - # Initialize + def __init__(self, archive_path: str): self.metadata_path = os.path.join(archive_path, self.METADATA_FILE) - - @classmethod - def create_and_validate(cls, archive_path) -> 'Metadata': - """ - Create and validate metadata object. Raise ExportException for invalid metadata. - """ - md = Metadata(cls.__key, archive_path) - md.validate() - - return md - - - def validate(self): - """ - Validate Metadata. - Throw ExportException if invalid state is found. - """ + def validate(self) -> "Metadata": + # Read metadata json and set relevant attributes try: with open(self.metadata_path) as f: logger.info("Parsing archive metadata") @@ -69,7 +41,7 @@ def validate(self): self.encryption_method = json_config.get("encryption_method", None) self.encryption_key = json_config.get("encryption_key", None) logger.info( - "Exporting to device {} with encryption_method {}".format( + "Target: {}, encryption_method {}".format( self.export_method, self.encryption_method ) ) @@ -78,32 +50,52 @@ def validate(self): logger.error("Metadata parsing failure") raise ExportException(sdstatus=Status.ERROR_METADATA_PARSING) from ex - # Validate metadata - this will fail if command is not in list of supported commands - try: + # Validate action - fails if command is not in list of supported commands + try: + logger.debug("Validate export action") self.command = Command(self.export_method) - if self.command is Command.EXPORT and not self.encryption_method in self.SUPPORTED_ENCRYPTION_METHODS: + if ( + self.command is Command.EXPORT + and self.encryption_method not in self.SUPPORTED_ENCRYPTION_METHODS + ): logger.error("Unsupported encryption method") raise ExportException(sdstatus=Status.ERROR_ARCHIVE_METADATA) except ValueError as v: - raise ExportException(sdstatus=Status.ERROR_METADATA_PARSING) from v + raise ExportException(sdstatus=Status.ERROR_ARCHIVE_METADATA) from v + + return self class Archive(object): - def __init__(self, archive): + def __init__(self, archive_path: str): os.umask(0o077) - self.archive = archive - self.submission_dirname = os.path.basename(self.archive).split(".")[0] + self.archive = archive_path self.target_dirname = "sd-export-{}".format( datetime.datetime.now().strftime("%Y%m%d-%H%M%S") ) self.tmpdir = tempfile.mkdtemp() - def extract_tarball(self): + def extract_tarball(self) -> "Archive": + """ + Extract tarball, checking for path traversal, and return Archive object. + """ try: - logger.info("Extracting tarball {} into {}".format(self.archive, self.tmpdir)) + logger.info( + "Extracting tarball {} into {}".format(self.archive, self.tmpdir) + ) safe_extractall(self.archive, self.tmpdir) + return self except Exception as ex: logger.error("Unable to extract tarball: {}".format(ex)) raise ExportException(sdstatus=Status.ERROR_EXTRACTION) from ex - \ No newline at end of file + def set_metadata(self, metadata: Metadata) -> "Archive": + """ + Set relevant metadata attributes for a given archive. + """ + self.command = metadata.command + if self.command is Command.EXPORT: + # When we support multiple encryption types, we will also want to add the + # encryption_method here + self.encryption_key = metadata.encryption_key + return self diff --git a/securedrop_export/command.py b/securedrop_export/command.py index 382b4feff..06a31677d 100644 --- a/securedrop_export/command.py +++ b/securedrop_export/command.py @@ -1,12 +1,14 @@ from enum import Enum + class Command(Enum): """ All supported commands. - Values are as supplied by the calling VM (sd-app), and a change in any values require + Values are as supplied by the calling VM (sd-app), and a change in any values requires corresponding changes in the calling VM. """ + PRINTER_PREFLIGHT = "printer-preflight" PRINTER_TEST = "printer-test" PRINT = "printer" diff --git a/securedrop_export/directory_util.py b/securedrop_export/directory_util.py index b4608851c..a2a866c7e 100644 --- a/securedrop_export/directory_util.py +++ b/securedrop_export/directory_util.py @@ -2,7 +2,6 @@ import tarfile from pathlib import Path from typing import Optional, Union -import subprocess import logging logger = logging.getLogger(__name__) @@ -105,7 +104,9 @@ def _check_path_traversal(filename_or_filepath: Union[str, Path]) -> None: if filename_or_filepath.is_absolute(): base_path = filename_or_filepath else: - base_path = Path.cwd() # use cwd so we can next ensure relative path does not traverse up + base_path = ( + Path.cwd() + ) # use cwd so we can next ensure relative path does not traverse up try: relative_path = relative_filepath(filename_or_filepath, base_path) @@ -114,7 +115,10 @@ def _check_path_traversal(filename_or_filepath: Union[str, Path]) -> None: # base, but can still have harmful side effects to the application. If this kind of # traversal is needed, then call relative_filepath instead in order to check that the # desired traversal does not go past a safe base directory. - if relative_path != filename_or_filepath and not filename_or_filepath.is_absolute(): + if ( + relative_path != filename_or_filepath + and not filename_or_filepath.is_absolute() + ): raise ValueError except ValueError: raise ValueError(f"Unsafe file or directory name: '{filename_or_filepath}'") @@ -147,5 +151,6 @@ def _check_dir_permissions(dir_path: Union[str, Path]) -> None: stat_res = os.stat(dir_path).st_mode masked = stat_res & 0o777 if masked & 0o077: - raise RuntimeError("Unsafe permissions ({}) on {}".format(oct(stat_res), dir_path)) - + raise RuntimeError( + "Unsafe permissions ({}) on {}".format(oct(stat_res), dir_path) + ) diff --git a/securedrop_export/disk/cli.py b/securedrop_export/disk/cli.py index 8f9dc6d43..7abb33b02 100644 --- a/securedrop_export/disk/cli.py +++ b/securedrop_export/disk/cli.py @@ -1,11 +1,6 @@ -import datetime -import json import logging import os -import shutil import subprocess -import tempfile -import sys from typing import List, Optional @@ -36,12 +31,18 @@ def get_connected_devices(self) -> List[str]: Raise ExportException if any commands fail. """ + logger.info("Checking connected volumes") try: lsblk = subprocess.Popen( - ["lsblk", "-o", "NAME,TYPE"], stdout=subprocess.PIPE, stderr=subprocess.PIPE + ["lsblk", "-o", "NAME,TYPE"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, ) grep = subprocess.Popen( - ["grep", "disk"], stdin=lsblk.stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ["grep", "disk"], + stdin=lsblk.stdout, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, ) command_output = grep.stdout.readlines() @@ -57,12 +58,14 @@ def _get_removable_devices(self, attached_devices: List[str]) -> List[str]: """ Determine which block devices are USBs by selecting those that are removable. """ + logger.info("Checking removable devices") usb_devices = [] for device in attached_devices: is_removable = False try: removable = subprocess.check_output( - ["cat", f"/sys/class/block/{device}/removable"], stderr=subprocess.PIPE + ["cat", f"/sys/class/block/{device}/removable"], + stderr=subprocess.PIPE, ) # 0 for non-removable device, 1 for removable @@ -75,6 +78,7 @@ def _get_removable_devices(self, attached_devices: List[str]) -> List[str]: if is_removable: usb_devices.append(f"/dev/{device}") + logger.info(f"{len(usb_devices)} connected") return usb_devices def get_partitioned_device(self, blkid: str) -> str: @@ -85,30 +89,40 @@ def get_partitioned_device(self, blkid: str) -> str: Raise ExportException if partition check fails or device has unsupported partition scheme (currently, multiple partitions are unsupported). """ - try: + device_and_partitions = self._check_partitions(blkid) - device_and_partitions = subprocess.check_output( - ["lsblk", "-o", "TYPE", "--noheadings", blkid], stderr=subprocess.PIPE + if device_and_partitions: + partition_count = ( + device_and_partitions.decode("utf-8").split("\n").count("part") ) + logger.debug(f"Counted {partition_count} partitions") + if partition_count > 1: + # We don't currently support devices with multiple partitions + logger.error( + f"Multiple partitions not supported ({partition_count} partitions" + f" on {blkid})" + ) + raise ExportException(sdstatus=Status.INVALID_DEVICE_DETECTED) - if device_and_partitions: - partition_count = device_and_partitions.decode("utf-8").split("\n").count("part") - if partition_count > 1: - # We don't currently support devices with multiple partitions - logger.error( - f"Multiple partitions not supported (found {partition_count} partitions on {blkid}" - ) - raise ExportException(sdstatus=Status.INVALID_DEVICE_DETECTED) + # redefine device to /dev/sda if disk is encrypted, /dev/sda1 if partition encrypted + if partition_count == 1: + logger.debug("One partition found") + blkid += "1" - # redefine device to /dev/sda if disk is encrypted, /dev/sda1 if partition encrypted - if partition_count == 1: - blkid += "1" + return blkid - return blkid + else: + # lsblk did not return output we could process + logger.error("Error checking device partitions") + raise ExportException(sdstatus=Status.DEVICE_ERROR) - else: - # lsblk did not return output we could process - raise ExportException(sdstatus=Status.DEVICE_ERROR) + def _check_partitions(self, blkid: str) -> str: + try: + logger.debug(f"Checking device partitions on {blkid}") + device_and_partitions = subprocess.check_output( + ["lsblk", "-o", "TYPE", "--noheadings", blkid], stderr=subprocess.PIPE + ) + return device_and_partitions except subprocess.CalledProcessError as ex: logger.error(f"Error checking block deivce {blkid}") @@ -122,7 +136,7 @@ def is_luks_volume(self, device: str) -> bool: isLuks = False try: - logger.debug(f"Checking if {device} is luks encrypted") + logger.debug("Checking if target device is luks encrypted") # cryptsetup isLuks returns 0 if the device is a luks volume # subprocess will throw if the device is not luks (rc !=0) @@ -130,9 +144,9 @@ def is_luks_volume(self, device: str) -> bool: isLuks = True - except subprocess.CalledProcessError as ex: + except subprocess.CalledProcessError: # Not necessarily an error state, just means the volume is not LUKS encrypted - logger.debug(f"{device} is not LUKS-encrypted") + logger.info("Target device is not LUKS-encrypted") return isLuks @@ -142,8 +156,11 @@ def _get_luks_name_from_headers(self, device: str) -> str: Raise ExportException if errors encounterd during attempt to parse LUKS headers. """ + logger.debug("Get LUKS name from headers") try: - luks_header = subprocess.check_output(["sudo", "cryptsetup", "luksDump", device]) + luks_header = subprocess.check_output( + ["sudo", "cryptsetup", "luksDump", device] + ) if luks_header: luks_header_list = luks_header.decode("utf-8").split("\n") for line in luks_header_list: @@ -151,11 +168,13 @@ def _get_luks_name_from_headers(self, device: str) -> str: if "UUID" in items[0]: return "luks-" + items[1] - # If no header or no UUID field, we can't use this drive - logger.error(f"Failed to get UUID from LUKS header; {device} may not be correctly formatted") + # If no header or no UUID field, we can't use this drive + logger.error( + f"Failed to get UUID from LUKS header; {device} may not be correctly formatted" + ) raise ExportException(sdstatus=Status.INVALID_DEVICE_DETECTED) except subprocess.CalledProcessError as ex: - logger.error(f"Failed to dump LUKS header") + logger.error("Failed to dump LUKS header") raise ExportException(sdstatus=Status.DEVICE_ERROR) from ex def get_luks_volume(self, device: str) -> Volume: @@ -174,10 +193,13 @@ def get_luks_volume(self, device: str) -> Volume: """ try: mapped_name = self._get_luks_name_from_headers(device) + logger.debug(f"Mapped name is {mapped_name}") # Setting the mapped_name does not mean the device has already been unlocked. luks_volume = Volume( - device_name=device, mapped_name=mapped_name, encryption=EncryptionScheme.LUKS + device_name=device, + mapped_name=mapped_name, + encryption=EncryptionScheme.LUKS, ) # If the device has been unlocked, we can see if it's mounted and @@ -199,14 +221,20 @@ def unlock_luks_volume(self, volume: Volume, decryption_key: str) -> Volume: Raise ExportException if errors are encountered during device unlocking. """ - if not volume.encryption is EncryptionScheme.LUKS: + if volume.encryption is not EncryptionScheme.LUKS: logger.error("Must call unlock_luks_volume() on LUKS-encrypted device") raise ExportException(sdstatus=Status.DEVICE_ERROR) try: logger.debug("Unlocking luks volume {}".format(volume.device_name)) p = subprocess.Popen( - ["sudo", "cryptsetup", "luksOpen", volume.device_name, volume.mapped_name], + [ + "sudo", + "cryptsetup", + "luksOpen", + volume.device_name, + volume.mapped_name, + ], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, @@ -217,7 +245,9 @@ def unlock_luks_volume(self, volume: Volume, decryption_key: str) -> Volume: if rc == 0: return Volume( - device_name=volume.device_name, mapped_name=volume.mapped_name, encryption=EncryptionScheme.LUKS + device_name=volume.device_name, + mapped_name=volume.mapped_name, + encryption=EncryptionScheme.LUKS, ) else: logger.error("Bad volume passphrase") @@ -231,6 +261,7 @@ def _get_mountpoint(self, volume: Volume) -> Optional[str]: Check for existing mountpoint. Raise ExportException if errors encountered during command. """ + logger.debug("Checking mountpoint") try: output = subprocess.check_output( ["lsblk", "-o", "MOUNTPOINT", "--noheadings", volume.device_name] @@ -251,22 +282,23 @@ def mount_volume(self, volume: Volume) -> Volume: Raise ExportException if errors are encountered during mounting. """ if not volume.unlocked: + logger.error("Volume is not unlocked.") raise ExportException(sdstatus=Status.ERROR_MOUNT) mountpoint = self._get_mountpoint(volume) if mountpoint: - logger.debug("The device is already mounted") + logger.info("The device is already mounted") if volume.mountpoint is not mountpoint: - logger.warning(f"Mountpoint was inaccurate, updating") + logger.warning("Mountpoint was inaccurate, updating") volume.mountpoint = mountpoint return volume else: + logger.info("Mount volume at default mountpoint") return self._mount_at_mountpoint(volume, self._DEFAULT_MOUNTPOINT) - def _mount_at_mountpoint(self, volume: Volume, mountpoint: str) -> Volume: """ Mount a volume at the supplied mountpoint, creating the mountpoint directory and @@ -283,10 +315,12 @@ def _mount_at_mountpoint(self, volume: Volume, mountpoint: str) -> Volume: raise ExportException(sdstatus=Status.ERROR_MOUNT) from ex # Mount device /dev/mapper/{mapped_name} at /media/usb/ - mapped_device_path = os.path.join(volume.MAPPED_VOLUME_PREFIX, volume.mapped_name) + mapped_device_path = os.path.join( + volume.MAPPED_VOLUME_PREFIX, volume.mapped_name + ) try: - logger.debug(f"Mounting volume {volume.device_name} at {mountpoint}") + logger.info(f"Mounting volume at {mountpoint}") subprocess.check_call(["sudo", "mount", mapped_device_path, mountpoint]) subprocess.check_call(["sudo", "chown", "-R", "user:user", mountpoint]) @@ -311,10 +345,12 @@ def write_data_to_device( subprocess.check_call(["mkdir", target_path]) export_data = os.path.join(submission_tmpdir, "export_data/") - logger.info("Copying file to {}".format(submission_target_dirname)) + logger.debug("Copying file to {}".format(submission_target_dirname)) subprocess.check_call(["cp", "-r", export_data, target_path]) - logger.info("File copied successfully to {}".format(submission_target_dirname)) + logger.info( + "File copied successfully to {}".format(submission_target_dirname) + ) except (subprocess.CalledProcessError, OSError) as ex: raise ExportException(sdstatus=Status.ERROR_EXPORT) from ex @@ -330,7 +366,7 @@ def cleanup_drive_and_tmpdir(self, volume: Volume, submission_tmpdir: str): Raise ExportException if errors during cleanup are encoutered. """ - logger.info("Syncing filesystems") + logger.debug("Syncing filesystems") try: subprocess.check_call(["sync"]) umounted = self._unmount_volume(volume) diff --git a/securedrop_export/disk/new_service.py b/securedrop_export/disk/new_service.py index 1084f751a..b5702a474 100644 --- a/securedrop_export/disk/new_service.py +++ b/securedrop_export/disk/new_service.py @@ -1,17 +1,10 @@ import logging -import os -import subprocess -import sys - -from enum import Enum - -from typing import List from securedrop_export.archive import Archive from .cli import CLI from .status import Status -from .volume import EncryptionScheme, Volume +from .volume import Volume from securedrop_export.exceptions import ExportException @@ -29,7 +22,7 @@ def __init__(self, cli: CLI): def run(self, arg: str) -> Status: """ - Run export actions. + Run export actions. """ def scan_all_devices(self) -> Status: @@ -46,13 +39,13 @@ def scan_all_devices(self) -> Status: elif number_devices > 1: return Status.MULTI_DEVICE_DETECTED else: - return scan_single_device(all_devices[0]) + return self.scan_single_device(all_devices[0]) - except ExportException: + except ExportException as ex: logger.error(ex) return Status.DEVICE_ERROR # Could not assess devices - def scan_single_device(self, str: blkid) -> Status: + def scan_single_device(self, blkid: str) -> Status: """ Given a string representing a single block device, see if it is a suitable export target and return information about its state. diff --git a/securedrop_export/disk/new_status.py b/securedrop_export/disk/new_status.py index 2bb0c242a..285d9f8b9 100644 --- a/securedrop_export/disk/new_status.py +++ b/securedrop_export/disk/new_status.py @@ -1,20 +1,29 @@ from securedrop_export.status import BaseStatus + class Status(BaseStatus): NO_DEVICE_DETECTED = "NO_DEVICE_DETECTED" - INVALID_DEVICE_DETECTED = "INVALID_DEVICE_DETECTED" # Multi partitioned, not encrypted, etc - MULTI_DEVICE_DETECTED = "MULTI_DEVICE_DETECTED" # Not currently supported + INVALID_DEVICE_DETECTED = ( + "INVALID_DEVICE_DETECTED" # Multi partitioned, not encrypted, etc + ) + MULTI_DEVICE_DETECTED = "MULTI_DEVICE_DETECTED" # Not currently supported - DEVICE_LOCKED = "DEVICE_LOCKED" # One device detected, and it's locked - DEVICE_WRITABLE = "DEVICE_WRITABLE" # One device detected, and it's unlocked (and mounted) + DEVICE_LOCKED = "DEVICE_LOCKED" # One device detected, and it's locked + DEVICE_WRITABLE = ( + "DEVICE_WRITABLE" # One device detected, and it's unlocked (and mounted) + ) ERROR_UNLOCK_LUKS = "ERROR_UNLOCK_LUKS" ERROR_UNLOCK_GENERIC = "ERROR_UNLOCK_GENERIC" - ERROR_MOUNT = "ERROR_MOUNT" # Unlocked but not mounted + ERROR_MOUNT = "ERROR_MOUNT" # Unlocked but not mounted SUCCESS_EXPORT = "SUCCESS_EXPORT" - ERROR_EXPORT = "ERROR_EXPORT" # Could not write to disk - ERROR_EXPORT_CLEANUP = "ERROR_EXPORT_CLEANUP" # If export succeeds but drives were not properly unmounted + ERROR_EXPORT = "ERROR_EXPORT" # Could not write to disk + + # export succeeds but drives were not properly unmounted + ERROR_EXPORT_CLEANUP = "ERROR_EXPORT_CLEANUP" - DEVICE_ERROR = "DEVICE_ERROR" # Something went wrong while trying to check the device + DEVICE_ERROR = ( + "DEVICE_ERROR" # Something went wrong while trying to check the device + ) diff --git a/securedrop_export/disk/service.py b/securedrop_export/disk/service.py index adabf4e4e..e87386a91 100644 --- a/securedrop_export/disk/service.py +++ b/securedrop_export/disk/service.py @@ -1,11 +1,5 @@ import logging -import os -import subprocess -import sys -from typing import List - -from securedrop_export.archive import Archive from securedrop_export.exceptions import ExportException from .cli import CLI @@ -16,7 +10,6 @@ class Service: - def __init__(self, submission, cli=None): self.submission = submission self.cli = cli or CLI() @@ -32,6 +25,7 @@ def check_connected_devices(self) -> Status: num_devices = len(all_devices) except ExportException as ex: + logger.error(f"Error encountered during USB check: {ex.sdstatus.value}") # Use legacy status instead of new status values raise ExportException(sdstatus=Status.LEGACY_ERROR_USB_CHECK) from ex @@ -42,7 +36,6 @@ def check_connected_devices(self) -> Status: elif num_devices > 1: raise ExportException(sdstatus=Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED) - def check_disk_format(self) -> Status: """ Check if volume is correctly formatted for export. @@ -51,15 +44,21 @@ def check_disk_format(self) -> Status: all_devices = self.cli.get_connected_devices() if len(all_devices) == 1: - device = self.cli.get_partitioned_device(all_devices) + device = self.cli.get_partitioned_device(all_devices[0]) + logger.info("Check if LUKS") if not self.cli.is_luks_volume(device): - raise ExportException(sdstatus=Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED) + raise ExportException( + sdstatus=Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED + ) # We can support checking if a drive is already unlocked, but for # backwards compatibility, this is the only expected status - # at this stage + # at this stage return Status.LEGACY_USB_ENCRYPTED except ExportException as ex: + logger.error( + f"Error encountered during disk format check: {ex.sdstatus.value}" + ) # Return legacy status values for now for ongoing client compatibility if ex.sdstatus in [s for s in NewStatus]: status = self._legacy_status(ex.sdstatus) @@ -67,10 +66,9 @@ def check_disk_format(self) -> Status: elif ex.sdstatus: raise else: - raise ExportException(sdstatus=Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED) - + raise ExportException(sdstatus=Status.LEGACY_USB_DISK_ERROR) - def export(self) -> Status: + def export(self): """ Export all files to target device. """ @@ -83,27 +81,38 @@ def export(self) -> Status: device = self.cli.get_partitioned_device(all_devices[0]) # Decide what kind of volume it is + logger.info("Check if LUKS") if self.cli.is_luks_volume(device): volume = self.cli.get_luks_volume(device) + logger.info("Check if writable") if not volume.writable: - unlocked = self.cli.unlock_luks_volume( - volume, self.submission.archive_metadata.encryption_key + logger.info("Not writable-will try unlocking") + volume = self.cli.unlock_luks_volume( + volume, self.submission.encryption_key ) - mounted = self.cli.mount_volume(unlocked) + volume = self.cli.mount_volume(volume) - logger.debug(f"Export submission to {mounted.mountpoint}") - self.cli.write_data_to_device(self.submission.tmpdir, self.submission.target_dirname, mounted) - return Status.SUCCESS_EXPORT + logger.info(f"Export submission to {volume.mountpoint}") + self.cli.write_data_to_device( + self.submission.tmpdir, self.submission.target_dirname, volume + ) + # This is SUCCESS_EXPORT, but the 0.7.0 client is not expecting + # a return status from a successful export operation. + # When the client is updated, we will return SUCCESS_EXPORT here. else: # Another kind of drive: VeraCrypt/TC, or unsupported. # For now this is an error--in future there will be support # for additional encryption formats logger.error(f"Export failed because {device} is not supported") - raise ExportException(sdstatus=Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED) + raise ExportException( + sdstatus=Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED + ) except ExportException as ex: - print(ex) + logger.error( + f"Error encountered during disk format check: {ex.sdstatus.value}" + ) # Return legacy status values for now for ongoing client compatibility if ex.sdstatus in [s for s in NewStatus]: status = self._legacy_status(ex.sdstatus) @@ -113,20 +122,23 @@ def export(self) -> Status: else: raise ExportException(sdstatus=Status.LEGACY_ERROR_GENERIC) - def _legacy_status(self, status: NewStatus) -> Status: """ Backwards-compatibility - status values that client (@0.7.0) is expecting. """ + logger.info(f"Convert to legacy: {status.value}") if status is NewStatus.ERROR_MOUNT: return Status.LEGACY_ERROR_USB_MOUNT elif status in [NewStatus.ERROR_EXPORT, NewStatus.ERROR_EXPORT_CLEANUP]: return Status.LEGACY_ERROR_USB_WRITE elif status in [NewStatus.ERROR_UNLOCK_LUKS, NewStatus.ERROR_UNLOCK_GENERIC]: return Status.LEGACY_USB_BAD_PASSPHRASE - elif status in [NewStatus.INVALID_DEVICE_DETECTED, NewStatus.MULTI_DEVICE_DETECTED]: + elif status in [ + NewStatus.INVALID_DEVICE_DETECTED, + NewStatus.MULTI_DEVICE_DETECTED, + ]: return Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED # The other status values, such as Status.NO_DEVICE_DETECTED, are not returned by the # CLI, so we don't need to check for them here else: - return Status.LEGACY_ERROR_GENERIC \ No newline at end of file + return Status.LEGACY_ERROR_GENERIC diff --git a/securedrop_export/disk/status.py b/securedrop_export/disk/status.py index 4a3aa8881..fa0bdf869 100644 --- a/securedrop_export/disk/status.py +++ b/securedrop_export/disk/status.py @@ -1,19 +1,20 @@ from securedrop_export.status import BaseStatus + class Status(BaseStatus): LEGACY_ERROR_GENERIC = "ERROR_GENERIC" # Legacy USB preflight related - LEGACY_USB_CONNECTED = "USB_CONNECTED" # Success + LEGACY_USB_CONNECTED = "USB_CONNECTED" # Success LEGACY_USB_NOT_CONNECTED = "USB_NOT_CONNECTED" LEGACY_ERROR_USB_CHECK = "ERROR_USB_CHECK" # Legacy USB Disk preflight related errors - LEGACY_USB_ENCRYPTED = "USB_ENCRYPTED" # Success + LEGACY_USB_ENCRYPTED = "USB_ENCRYPTED" # Success LEGACY_USB_ENCRYPTION_NOT_SUPPORTED = "USB_ENCRYPTION_NOT_SUPPORTED" - #@todo - this can be raised during disk format check + # Can be raised during disk format check LEGACY_USB_DISK_ERROR = "USB_DISK_ERROR" # Legacy Disk export errors @@ -22,4 +23,4 @@ class Status(BaseStatus): LEGACY_ERROR_USB_WRITE = "ERROR_USB_WRITE" # New - SUCCESS_EXPORT = "SUCCESS_EXPORT" \ No newline at end of file + SUCCESS_EXPORT = "SUCCESS_EXPORT" diff --git a/securedrop_export/disk/volume.py b/securedrop_export/disk/volume.py index a3049e16e..c6bc2f8ae 100644 --- a/securedrop_export/disk/volume.py +++ b/securedrop_export/disk/volume.py @@ -17,8 +17,8 @@ class Volume: """ A volume on a removable device. - Volumes have a device name ("/dev/sdX"), a mapped name ("/dev/mapper/xxx"), an encryption scheme, - and a mountpoint if they are mounted. + Volumes have a device name ("/dev/sdX"), a mapped name ("/dev/mapper/xxx"), an encryption + scheme, and a mountpoint if they are mounted. """ def __init__( @@ -53,5 +53,7 @@ def unlocked(self) -> bool: return ( self.mapped_name is not None and self.encryption is not EncryptionScheme.UNKNOWN - and os.path.exists(os.path.join(self.MAPPED_VOLUME_PREFIX, self.mapped_name)) + and os.path.exists( + os.path.join(self.MAPPED_VOLUME_PREFIX, self.mapped_name) + ) ) diff --git a/securedrop_export/exceptions.py b/securedrop_export/exceptions.py index d740fc36b..78c0519c8 100644 --- a/securedrop_export/exceptions.py +++ b/securedrop_export/exceptions.py @@ -1,5 +1,4 @@ import logging -from typing import Optional logger = logging.getLogger(__name__) @@ -16,7 +15,8 @@ def __init__(self, *args, **kwargs): self.sdstatus = kwargs.get("sdstatus") self.sderror = kwargs.get("sderror") -class TimeoutException(ExportException): + +class TimeoutException(Exception): pass diff --git a/securedrop_export/main.py b/securedrop_export/main.py index 408173649..dca8e3f9a 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -3,12 +3,12 @@ import platform import logging import sys -import subprocess from securedrop_export.archive import Archive, Metadata from securedrop_export.command import Command from securedrop_export.status import BaseStatus from securedrop_export.directory_util import safe_mkdir +from securedrop_export.exceptions import ExportException from securedrop_export.disk.service import Service as ExportService from securedrop_export.print.service import Service as PrintService @@ -22,139 +22,136 @@ logger = logging.getLogger(__name__) + class Status(BaseStatus): """ Status values that can occur during initialization. """ + ERROR_LOGGING = "ERROR_LOGGING" ERROR_GENERIC = "ERROR_GENERIC" ERROR_FILE_NOT_FOUND = "ERROR_FILE_NOT_FOUND" + def entrypoint(): """ Entrypoint method (Note: a method is required for setuptools). Configure logging, extract tarball, and run desired export service, exiting with return code 0. """ - try: - _configure_logging() - except Exception: - _exit_gracefully(submission=None, status=Status.ERROR_LOGGING) - - logger.info("Starting SecureDrop Export {}".format(__version__)) - data = Archive(sys.argv[1]) + status, stacktrace, submission = None, None, None try: - # Halt immediately if target file is absent - if not os.path.exists(data.archive): - logger.info("Archive is not found {}.".format(data.archive)) - _exit_gracefully(data, Status.ERROR_FILE_NOT_FOUND) - - # Extract archive and either print or export to disk. - # Includes cleanup logic, which removes any temporary directories associated with - # the archive. - _extract_and_run(data) - - except Exception as e: - _exit_gracefully(data, Status.ERROR_GENERIC, e.output) - - -def _configure_logging(): - """ - All logging related settings are set up by this function. - """ - safe_mkdir(DEFAULT_HOME) - safe_mkdir(DEFAULT_HOME, LOG_DIR_NAME) + _configure_logging() + logger.info("Starting SecureDrop Export {}".format(__version__)) - log_file = os.path.join(DEFAULT_HOME, LOG_DIR_NAME, EXPORT_LOG_FILENAME) + data_path = sys.argv[1] - # set logging format - log_fmt = "%(asctime)s - %(name)s:%(lineno)d(%(funcName)s) " "%(levelname)s: %(message)s" - formatter = logging.Formatter(log_fmt) + # Halt if target file is absent + if not os.path.exists(data_path): + logger.info("Archive is not found {}.".format(data_path)) + status = Status.ERROR_FILE_NOT_FOUND - handler = TimedRotatingFileHandler(log_file) - handler.setFormatter(formatter) + else: + logger.debug("Extract tarball") + submission = Archive(data_path).extract_tarball() + logger.debug("Validate metadata") + metadata = Metadata(submission.tmpdir).validate() + logger.info("Archive extraction and metadata validation successful") + + # If all we're doing is starting the vm, we're done; otherwise, + # run the appropriate print or export routine + if metadata.command is not Command.START_VM: + submission.set_metadata(metadata) + logger.info(f"Start {metadata.command.value} service") + status = _start_service(submission) - # For rsyslog handler - if platform.system() != "Linux": # pragma: no cover - syslog_file = "/var/run/syslog" - else: - syslog_file = "/dev/log" + except ExportException as ex: + logger.error(f"Encountered exception {ex.sdstatus.value}, exiting") + status = ex.sdstatus + stacktrace = ex.output - sysloghandler = SysLogHandler(address=syslog_file) - sysloghandler.setFormatter(formatter) - handler.setLevel(logging.DEBUG) + except Exception as exc: + logger.error("Encountered exception during export, exiting") + status = Status.ERROR_GENERIC + stacktrace = exc.output - # set up primary log - log = logging.getLogger() - log.setLevel(logging.DEBUG) - log.addHandler(handler) - # add the second logger - log.addHandler(sysloghandler) + finally: + _exit_gracefully(submission, status=status, e=stacktrace) -def _extract_and_run(submission: Archive): +def _configure_logging(): """ - Extract tarball and metadata and run appropriate command based on metadata instruction. - Always exits with return code 0 and writes exit status, if applicable, to stderr. + All logging related settings are set up by this function. """ - status = None - stacktrace = None - try: - submission.extract_tarball() + safe_mkdir(DEFAULT_HOME) + safe_mkdir(DEFAULT_HOME, LOG_DIR_NAME) - # Validates metadata and ensures requested action is supported - submission.archive_metadata = Metadata.create_and_validate(submission.tmpdir) + log_file = os.path.join(DEFAULT_HOME, LOG_DIR_NAME, EXPORT_LOG_FILENAME) - # If we just wanted to start the VM, our work here is done - if submission.archive_metadata.command is Command.START_VM: - _exit_gracefully(submission) - else: - status = _start_service(submission, command) + # set logging format + log_fmt = ( + "%(asctime)s - %(name)s:%(lineno)d(%(funcName)s) " + "%(levelname)s: %(message)s" + ) + formatter = logging.Formatter(log_fmt) - except ExportException as ex: - status = ex.value.sdstatus - stacktrace = ex.output + handler = TimedRotatingFileHandler(log_file) + handler.setFormatter(formatter) - except Exception as exc: - # All exceptions are wrapped in ExportException, but we are being cautious - logger.error("Encountered exception during export, exiting") - status = Status.ERROR_GENERIC - stacktrace = exc.output - - finally: - _exit_gracefully(submission, status, stacktrace) + # For rsyslog handler + if platform.system() != "Linux": # pragma: no cover + syslog_file = "/var/run/syslog" + else: + syslog_file = "/dev/log" + + sysloghandler = SysLogHandler(address=syslog_file) + sysloghandler.setFormatter(formatter) + handler.setLevel(logging.DEBUG) + + # set up primary log + log = logging.getLogger() + log.setLevel(logging.DEBUG) + log.addHandler(handler) + # add the second logger + log.addHandler(sysloghandler) + except Exception as ex: + raise ExportException(sdstatus=Status.ERROR_LOGGING) from ex -def _start_service(submission: Archive, cmd: Command) -> Status: +def _start_service(submission: Archive) -> Status: """ Start print or export service. """ # Print Routines - if cmd is Commmand.PRINTER: + if submission.command is Command.PRINT: return PrintService(submission).print() - elif cmd is Commmand.PRINTER_TEST: + elif submission.command is Command.PRINTER_PREFLIGHT: return PrintService(submission).printer_preflight() - elif cmd is Commmand.PRINTER_TEST: + elif submission.command is Command.PRINTER_TEST: return PrintService(submission).printer_test() # Export routines - elif cmd is Commmand.EXPORT: + elif submission.command is Command.EXPORT: return ExportService(submission).export() - elif cmd is Commmand.CHECK_USBS: + elif submission.command is Command.CHECK_USBS: return ExportService(submission).check_connected_devices() - elif cmd is Commmand.CHECK_VOLUME: + elif submission.command is Command.CHECK_VOLUME: return ExportService(submission).check_disk_format() -def _exit_gracefully(submission: Archive, status: Status=None, e=None): + +def _exit_gracefully(submission: Archive, status: BaseStatus = None, e: str = None): """ Utility to print error messages, mostly used during debugging, then exits successfully despite the error. Always exits 0, since non-zero exit values will cause system to try alternative solutions for mimetype handling, which we want to avoid. """ - logger.info(f"Exiting with status: {status.value}") + if status: + logger.info(f"Exit gracefully with status: {status.value}") + else: + logger.info("Exit gracefully (no status code supplied)") if e: logger.error("Captured exception output: {}".format(e.output)) try: @@ -178,6 +175,7 @@ def _write_status(status: BaseStatus): Write string to stderr. """ if status: + logger.info(f"Write status {status.value}") sys.stderr.write(status.value) sys.stderr.write("\n") else: diff --git a/securedrop_export/print/service.py b/securedrop_export/print/service.py index a8e22d24c..4cfad872f 100644 --- a/securedrop_export/print/service.py +++ b/securedrop_export/print/service.py @@ -7,13 +7,6 @@ from securedrop_export.exceptions import handler, TimeoutException, ExportException from .status import Status -PRINTER_NAME = "sdw-printer" -PRINTER_WAIT_TIMEOUT = 60 -BRLASER_DRIVER = "/usr/share/cups/drv/brlaser.drv" -BRLASER_PPD = "/usr/share/cups/model/br7030.ppd" -LASERJET_DRIVER = "/usr/share/cups/drv/hpcups.drv" -LASERJET_PPD = "/usr/share/cups/model/hp-laserjet_6l.ppd" - logger = logging.getLogger(__name__) @@ -22,12 +15,19 @@ class Service: Printer service """ + PRINTER_NAME = "sdw-printer" + PRINTER_WAIT_TIMEOUT = 60 + BRLASER_DRIVER = "/usr/share/cups/drv/brlaser.drv" + BRLASER_PPD = "/usr/share/cups/model/br7030.ppd" + LASERJET_DRIVER = "/usr/share/cups/drv/hpcups.drv" + LASERJET_PPD = "/usr/share/cups/model/hp-laserjet_6l.ppd" + def __init__(self, submission): self.submission = submission - self.printer_name = PRINTER_NAME - self.printer_wait_timeout = PRINTER_WAIT_TIMEOUT + self.printer_name = self.PRINTER_NAME + self.printer_wait_timeout = self.PRINTER_WAIT_TIMEOUT - def print(self) -> Status: + def print(self): """ Routine to print all files. Throws ExportException if an error is encountered. @@ -35,40 +35,49 @@ def print(self) -> Status: logger.info("Printing all files from archive") self._check_printer_setup() self._print_all_files() - return Status.PRINT_SUCCESS + # When client can accept new print statuses, we will return + # a success status here + # return Status.PRINT_SUCCESS - def printer_preflight(self) -> Status: + def printer_preflight(self): """ Routine to perform preflight printer testing. Throws ExportException if an error is encoutered. """ - logger.info("Running printer preflight") + logger.info("Running printer preflight") self._check_printer_setup() - return Status.PREFLIGHT_SUCCESS + # When client can accept new print statuses, we will return + # a success status here + # return Status.PREFLIGHT_SUCCESS - def printer_test(self) -> Status: + def printer_test(self): """ Routine to print a test page. Throws ExportException if an error is encountered. """ - logger.info("Printing test page") + logger.info("Printing test page") self._check_printer_setup() self._print_test_page() - return Status.PRINT_SUCCESS + # When client can accept new print statuses, we will return + # a success status here + # return Status.TEST_SUCCESS def _wait_for_print(self): """ Use lpstat to ensure the job was fully transfered to the printer Return True if print was successful, otherwise throw ExportException. + Currently, the handler `handler` is defined in `exceptions.py`. """ signal.signal(signal.SIGALRM, handler) signal.alarm(self.printer_wait_timeout) printer_idle_string = "printer {} is idle".format(self.printer_name) while True: try: - logger.info("Running lpstat waiting for printer {}".format(self.printer_name)) + logger.info( + "Running lpstat waiting for printer {}".format(self.printer_name) + ) output = subprocess.check_output(["lpstat", "-p", self.printer_name]) if printer_idle_string in output.decode("utf-8"): logger.info("Print completed") @@ -108,10 +117,10 @@ def _check_printer_setup(self) -> None: printer_uri = printers[0] printer_ppd = self._install_printer_ppd(printer_uri) - self.setup_printer(printer_uri, printer_ppd) + self._setup_printer(printer_uri, printer_ppd) except subprocess.CalledProcessError as e: logger.error(e) - raise ExportException(sdstatus=Status.ERROR_GENERIC) + raise ExportException(sdstatus=Status.ERROR_UNKNOWN) def _get_printer_uri(self) -> str: """ @@ -123,6 +132,7 @@ def _get_printer_uri(self) -> str: try: output = subprocess.check_output(["sudo", "lpinfo", "-v"]) except subprocess.CalledProcessError: + logger.error("Error attempting to retrieve printer uri with lpinfo") raise ExportException(sdstatus=Status.ERROR_PRINTER_URI) # fetch the usb printer uri @@ -146,16 +156,17 @@ def _get_printer_uri(self) -> str: def _install_printer_ppd(self, uri): if not any(x in uri for x in ("Brother", "LaserJet")): - logger.error("Cannot install printer ppd for unsupported printer: {}".format(uri)) + logger.error( + "Cannot install printer ppd for unsupported printer: {}".format(uri) + ) raise ExportException(sdstatus=Status.ERROR_PRINTER_NOT_SUPPORTED) - return if "Brother" in uri: - printer_driver = BRLASER_DRIVER - printer_ppd = BRLASER_PPD + printer_driver = self.BRLASER_DRIVER + printer_ppd = self.BRLASER_PPD elif "LaserJet" in uri: - printer_driver = LASERJET_DRIVER - printer_ppd = LASERJET_PPD + printer_driver = self.LASERJET_DRIVER + printer_ppd = self.LASERJET_PPD # Compile and install drivers that are not already installed if not os.path.exists(printer_ppd): @@ -177,7 +188,7 @@ def _install_printer_ppd(self, uri): def _setup_printer(self, printer_uri, printer_ppd): # Add the printer using lpadmin logger.info("Setting up printer {}".format(self.printer_name)) - safe_check_call( + self.safe_check_call( command=[ "sudo", "lpadmin", @@ -235,7 +246,7 @@ def _print_file(self, file_to_print): folder = os.path.dirname(file_to_print) converted_filename = file_to_print + ".pdf" converted_path = os.path.join(folder, converted_filename) - safe_check_call( + self.safe_check_call( command=["unoconv", "-o", converted_path, file_to_print], error_status=Status.ERROR_PRINT, ) @@ -248,8 +259,9 @@ def _print_file(self, file_to_print): error_status=Status.ERROR_PRINT, ) - - def safe_check_call(command: str, error_status: Status, ignore_stderr_startswith=None): + def safe_check_call( + self, command: str, error_status: Status, ignore_stderr_startswith=None + ): """ Wrap subprocess.check_output to ensure we wrap CalledProcessError and return our own exception, and log the error messages. @@ -267,4 +279,3 @@ def safe_check_call(command: str, error_status: Status, ignore_stderr_startswith raise ExportException(sdstatus=error_status, sderror=err) except subprocess.CalledProcessError as ex: raise ExportException(sdstatus=error_status, sderror=ex.output) - diff --git a/securedrop_export/print/status.py b/securedrop_export/print/status.py index fef0dbdf1..5ec81c8a3 100644 --- a/securedrop_export/print/status.py +++ b/securedrop_export/print/status.py @@ -1,5 +1,6 @@ from securedrop_export.status import BaseStatus + class Status(BaseStatus): # Printer preflight related errors @@ -8,11 +9,14 @@ class Status(BaseStatus): ERROR_PRINTER_NOT_SUPPORTED = "ERROR_PRINTER_NOT_SUPPORTED" ERROR_PRINTER_DRIVER_UNAVAILABLE = "ERROR_PRINTER_DRIVER_UNAVAILABLE" ERROR_PRINTER_INSTALL = "ERROR_PRINTER_INSTALL" + ERROR_PRINTER_URI = "ERROR_PRINTER_URI" # new - # Printer export errors + # Print error ERROR_PRINT = "ERROR_PRINT" # New PREFLIGHT_SUCCESS = "PRINTER_PREFLIGHT_SUCCESS" TEST_SUCCESS = "PRINTER_TEST_SUCCESS" PRINT_SUCCESS = "PRINTER_SUCCESS" + + ERROR_UNKNOWN = "ERROR_GENERIC" # Unknown printer error, backwards-compatible diff --git a/securedrop_export/status.py b/securedrop_export/status.py index 29b304574..bc3d29d97 100644 --- a/securedrop_export/status.py +++ b/securedrop_export/status.py @@ -1,5 +1,6 @@ from enum import Enum + class BaseStatus(Enum): """ Base class for export and print statuses. A Status represents a string that can be returned @@ -7,4 +8,5 @@ class BaseStatus(Enum): Status values are defined in subclasses in their respective packages. A full list is available in the project's README. """ + pass diff --git a/setup.py b/setup.py index 485a88084..b04979ddb 100644 --- a/setup.py +++ b/setup.py @@ -31,5 +31,7 @@ "Intended Audience :: Developers", "Operating System :: OS Independent", ), - entry_points={"console_scripts": ["send-to-usb = securedrop_export.main:entrypoint"]}, + entry_points={ + "console_scripts": ["send-to-usb = securedrop_export.main:entrypoint"] + }, ) diff --git a/tests/disk/test_cli.py b/tests/disk/test_cli.py index 55d79a40e..d174dc445 100644 --- a/tests/disk/test_cli.py +++ b/tests/disk/test_cli.py @@ -1,10 +1,7 @@ import pytest from unittest import mock -import os -import pytest import subprocess -import sys from securedrop_export.disk.cli import CLI from securedrop_export.disk.volume import EncryptionScheme, Volume @@ -32,6 +29,7 @@ class TestCli: Test the CLI wrapper that handless identification and locking/unlocking of USB volumes. """ + @classmethod def setup_class(cls): cls.cli = CLI() @@ -49,7 +47,8 @@ def _setup_usb_devices(self, mocker, disks, is_removable): Parameters: disks (byte array): Array of disk names separated by newline. - is_removable (byte array): Array of removable status results (1 for removable) separated by newline + is_removable (byte array): Array of removable status results (1 for removable), + separated by newline """ # Patch commandline calls to `lsblk | grep disk` @@ -58,7 +57,7 @@ def _setup_usb_devices(self, mocker, disks, is_removable): command_output.stdout.readlines = mock.MagicMock(return_value=disks) mocker.patch("subprocess.Popen", return_value=command_output) - # Pactch commandline call to 'cat /sys/class/block/{device}/removable' + # Patch commandline call to 'cat /sys/class/block/{device}/removable' # Using side_effect with an iterable allows for different return value each time, # which matches what would happen if iterating through list of devices @@ -73,7 +72,10 @@ def test_get_connected_devices(self, mocker): assert result[0] == "/dev/sda" and result[1] == "/dev/sdb" - @mock.patch("subprocess.check_output", side_effect=subprocess.CalledProcessError(1, "check_output")) + @mock.patch( + "subprocess.check_output", + side_effect=subprocess.CalledProcessError(1, "check_output"), + ) def test_get_removable_devices_none_removable(self, mocker): disks = [b"sda disk\n", b"sdb disk\n"] removable = [b"0\n", b"0\n"] @@ -83,7 +85,9 @@ def test_get_removable_devices_none_removable(self, mocker): result = self.cli._get_removable_devices(disks) assert len(result) == 0 - @mock.patch("subprocess.Popen", side_effect=subprocess.CalledProcessError(1, "Popen")) + @mock.patch( + "subprocess.Popen", side_effect=subprocess.CalledProcessError(1, "Popen") + ) def test_get_connected_devices_error(self, mocked_subprocess): with pytest.raises(ExportException): @@ -91,15 +95,20 @@ def test_get_connected_devices_error(self, mocked_subprocess): @mock.patch("subprocess.check_output", return_value=_SAMPLE_OUTPUT_NO_PART) def test_get_partitioned_device_no_partition(self, mocked_call): - assert self.cli.get_partitioned_device(_DEFAULT_USB_DEVICE) == _DEFAULT_USB_DEVICE + assert ( + self.cli.get_partitioned_device(_DEFAULT_USB_DEVICE) == _DEFAULT_USB_DEVICE + ) @mock.patch("subprocess.check_output", return_value=_SAMPLE_OUTPUT_ONE_PART) def test_get_partitioned_device_one_partition(self, mocked_call): - assert self.cli.get_partitioned_device(_DEFAULT_USB_DEVICE) == _DEFAULT_USB_DEVICE+"1" + assert ( + self.cli.get_partitioned_device(_DEFAULT_USB_DEVICE) + == _DEFAULT_USB_DEVICE + "1" + ) @mock.patch("subprocess.check_output", return_value=_SAMPLE_OUTPUT_MULTI_PART) def test_get_partitioned_device_multi_partition(self, mocked_call): - + with pytest.raises(ExportException) as ex: self.cli.get_partitioned_device(_SAMPLE_OUTPUT_MULTI_PART) @@ -113,10 +122,11 @@ def test_get_partitioned_device_lsblk_error(self, mocked_subprocess): assert ex.value.sdstatus is Status.DEVICE_ERROR @mock.patch( - "subprocess.check_output", side_effect=subprocess.CalledProcessError(1, "check_output") + "subprocess.check_output", + side_effect=subprocess.CalledProcessError(1, "check_output"), ) def test_get_partitioned_device_multi_partition_error(self, mocked_call): - + # Make sure we wrap CalledProcessError and throw our own exception with pytest.raises(ExportException) as ex: self.cli.get_partitioned_device(_DEFAULT_USB_DEVICE) @@ -125,11 +135,14 @@ def test_get_partitioned_device_multi_partition_error(self, mocked_call): @mock.patch("subprocess.check_call", return_value=0) def test_is_luks_volume_true(self, mocked_call): - + # `sudo cryptsetup isLuks` returns 0 if true assert self.cli.is_luks_volume(_SAMPLE_OUTPUT_ONE_PART) - @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) + @mock.patch( + "subprocess.check_call", + side_effect=subprocess.CalledProcessError(1, "check_call"), + ) def test_is_luks_volume_false(self, mocked_subprocess): # `sudo cryptsetup isLuks` returns 1 if false; CalledProcessError is thrown @@ -139,10 +152,14 @@ def test_is_luks_volume_false(self, mocked_subprocess): def test__get_luks_name_from_headers(self, mocked_subprocess): result = self.cli._get_luks_name_from_headers(_DEFAULT_USB_DEVICE) - assert result is not None and result.split("-")[1] in _SAMPLE_LUKS_HEADER.decode("utf8") + assert result is not None and result.split("-")[ + 1 + ] in _SAMPLE_LUKS_HEADER.decode("utf8") - @mock.patch("subprocess.check_output", return_value=b"corrupted-or-invalid-header\n") - def test__get_luks_name_from_headers_error(self, mocked_subprocess): + @mock.patch( + "subprocess.check_output", return_value=b"corrupted-or-invalid-header\n" + ) + def test__get_luks_name_from_headers_error_invalid(self, mocked_subprocess): with pytest.raises(ExportException) as ex: self.cli._get_luks_name_from_headers(_DEFAULT_USB_DEVICE) @@ -158,7 +175,9 @@ def test__get_luks_name_from_headers_error_no_header(self, mocked_subprocess): assert ex.value.sdstatus is Status.INVALID_DEVICE_DETECTED @mock.patch("subprocess.check_output", return_value=None) - def test__get_luks_name_from_headers_error_nothing_returned(self, mocked_subprocess): + def test__get_luks_name_from_headers_error_nothing_returned( + self, mocked_subprocess + ): with pytest.raises(ExportException) as ex: self.cli._get_luks_name_from_headers(_DEFAULT_USB_DEVICE) @@ -166,7 +185,8 @@ def test__get_luks_name_from_headers_error_nothing_returned(self, mocked_subproc assert ex.value.sdstatus is Status.INVALID_DEVICE_DETECTED @mock.patch( - "subprocess.check_output", side_effect=subprocess.CalledProcessError(1, "check_output") + "subprocess.check_output", + side_effect=subprocess.CalledProcessError(1, "check_output"), ) def test__get_luks_name_from_headers_error(self, mocked_subprocess): with pytest.raises(ExportException): @@ -188,7 +208,10 @@ def test_get_luks_volume_still_locked(self, mocked_subprocess, mocked_os_call): assert result.encryption is EncryptionScheme.LUKS assert not result.unlocked - @mock.patch("subprocess.check_output", side_effect=subprocess.CalledProcessError("check_output", 1)) + @mock.patch( + "subprocess.check_output", + side_effect=subprocess.CalledProcessError("check_output", 1), + ) def test_get_luks_volume_error(self, mocked_subprocess): with pytest.raises(ExportException) as ex: self.cli.get_luks_volume(_DEFAULT_USB_DEVICE_ONE_PART) @@ -202,13 +225,19 @@ def test_unlock_luks_volume_success(self, mock_path, mocker): mock_popen.returncode = 0 mocker.patch("subprocess.Popen", return_value=mock_popen) - mocker.patch("subprocess.Popen.communicate", return_value=mock_popen_communicate) + mocker.patch( + "subprocess.Popen.communicate", return_value=mock_popen_communicate + ) mapped_name = "luks-id-123456" - vol = Volume(device_name=_DEFAULT_USB_DEVICE, mapped_name=mapped_name, encryption=EncryptionScheme.LUKS) + vol = Volume( + device_name=_DEFAULT_USB_DEVICE, + mapped_name=mapped_name, + encryption=EncryptionScheme.LUKS, + ) key = "a_key&_!" result = self.cli.unlock_luks_volume(vol, key) - assert vol.unlocked + assert result.unlocked @mock.patch("os.path.exists", return_value=True) def test_unlock_luks_volume_not_luks(self, mocker): @@ -218,9 +247,12 @@ def test_unlock_luks_volume_not_luks(self, mocker): mocker.patch("subprocess.Popen", mock_popen) - vol = Volume(device_name=_DEFAULT_USB_DEVICE, mapped_name=_PRETEND_LUKS_ID, encryption=EncryptionScheme.UNKNOWN) + vol = Volume( + device_name=_DEFAULT_USB_DEVICE, + mapped_name=_PRETEND_LUKS_ID, + encryption=EncryptionScheme.UNKNOWN, + ) key = "a key!" - mapped_name = "luks-id-123456" with pytest.raises(ExportException) as ex: self.cli.unlock_luks_volume(vol, key) @@ -234,18 +266,26 @@ def test_unlock_luks_volume_passphrase_failure(self, mocker): mocker.patch("subprocess.Popen", mock_popen) - vol = Volume(device_name=_DEFAULT_USB_DEVICE, mapped_name=_PRETEND_LUKS_ID, encryption=EncryptionScheme.LUKS) + vol = Volume( + device_name=_DEFAULT_USB_DEVICE, + mapped_name=_PRETEND_LUKS_ID, + encryption=EncryptionScheme.LUKS, + ) key = "a key!" - mapped_name = "luks-id-123456" with pytest.raises(ExportException): self.cli.unlock_luks_volume(vol, key) - @mock.patch("subprocess.Popen", side_effect=subprocess.CalledProcessError("1", "Popen")) + @mock.patch( + "subprocess.Popen", side_effect=subprocess.CalledProcessError("1", "Popen") + ) def test_unlock_luks_volume_luksOpen_exception(self, mocked_subprocess): - pd = Volume(device_name=_DEFAULT_USB_DEVICE, mapped_name=_PRETEND_LUKS_ID, encryption=EncryptionScheme.LUKS) + pd = Volume( + device_name=_DEFAULT_USB_DEVICE, + mapped_name=_PRETEND_LUKS_ID, + encryption=EncryptionScheme.LUKS, + ) key = "a key!" - mapped_name = "luks-id-123456" with pytest.raises(ExportException) as ex: self.cli.unlock_luks_volume(pd, key) @@ -261,13 +301,17 @@ def test_mount_volume(self, mocked_call, mocked_output, mocked_path): mapped_name=_PRETEND_LUKS_ID, encryption=EncryptionScheme.LUKS, ) - result = self.cli.mount_volume(vol) + self.cli.mount_volume(vol) assert vol.mountpoint is self.cli._DEFAULT_MOUNTPOINT @mock.patch("os.path.exists", return_value=True) - @mock.patch("subprocess.check_output", return_value=b"/dev/pretend/luks-id-123456\n") + @mock.patch( + "subprocess.check_output", return_value=b"/dev/pretend/luks-id-123456\n" + ) @mock.patch("subprocess.check_call", return_value=0) - def test_mount_volume_already_mounted(self, mocked_output, mocked_call, mocked_path): + def test_mount_volume_already_mounted( + self, mocked_output, mocked_call, mocked_path + ): md = Volume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, mapped_name=_PRETEND_LUKS_ID, @@ -288,7 +332,10 @@ def test_mount_volume_mkdir(self, mocked_output, mocked_subprocess, mocked_path) assert self.cli.mount_volume(md).mapped_name == _PRETEND_LUKS_ID @mock.patch("subprocess.check_output", return_value=b"\n") - @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) + @mock.patch( + "subprocess.check_call", + side_effect=subprocess.CalledProcessError(1, "check_call"), + ) def test_mount_volume_error(self, mocked_subprocess, mocked_output): md = Volume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, @@ -302,7 +349,10 @@ def test_mount_volume_error(self, mocked_subprocess, mocked_output): assert ex.value.sdstatus is Status.ERROR_MOUNT @mock.patch("os.path.exists", return_value=False) - @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) + @mock.patch( + "subprocess.check_call", + side_effect=subprocess.CalledProcessError(1, "check_call"), + ) def test_mount_at_mountpoint_mkdir_error(self, mocked_subprocess, mocked_path): md = Volume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, @@ -317,7 +367,10 @@ def test_mount_at_mountpoint_mkdir_error(self, mocked_subprocess, mocked_path): assert ex.value.sdstatus is Status.ERROR_MOUNT @mock.patch("os.path.exists", return_value=True) - @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) + @mock.patch( + "subprocess.check_call", + side_effect=subprocess.CalledProcessError(1, "check_call"), + ) def test_mount_at_mountpoint_mounting_error(self, mocked_subprocess, mocked_path): md = Volume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, @@ -345,7 +398,10 @@ def test__unmount_volume(self, mocked_subprocess, mocked_mountpath): assert result.mountpoint is None @mock.patch("os.path.exists", return_value=True) - @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) + @mock.patch( + "subprocess.check_call", + side_effect=subprocess.CalledProcessError(1, "check_call"), + ) def test__unmount_volume_error(self, mocked_subprocess, mocked_mountpath): mounted = Volume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, @@ -372,7 +428,10 @@ def test__close_luks_volume(self, mocked_subprocess, mocked_os_call): self.cli._close_luks_volume(mapped) @mock.patch("os.path.exists", return_value=True) - @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) + @mock.patch( + "subprocess.check_call", + side_effect=subprocess.CalledProcessError(1, "check_call"), + ) def test__close_luks_volume_error(self, mocked_subprocess, mocked_os_call): mapped = Volume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, @@ -385,7 +444,10 @@ def test__close_luks_volume_error(self, mocked_subprocess, mocked_os_call): assert ex.value.sdstatus is Status.DEVICE_ERROR - @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) + @mock.patch( + "subprocess.check_call", + side_effect=subprocess.CalledProcessError(1, "check_call"), + ) def test__remove_temp_directory_error(self, mocked_subprocess): with pytest.raises(ExportException): self.cli._remove_temp_directory("tmp") @@ -412,7 +474,10 @@ def test_write_to_disk(self, mock_check_call): # Don't want to patch it indefinitely though, that will mess with the other tests patch.stop() - @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) + @mock.patch( + "subprocess.check_call", + side_effect=subprocess.CalledProcessError(1, "check_call"), + ) def test_write_to_disk_error_still_does_cleanup(self, mock_call): # see above - patch internal method only for this test patch = mock.patch.object(self.cli, "cleanup_drive_and_tmpdir") @@ -428,16 +493,21 @@ def test_write_to_disk_error_still_does_cleanup(self, mock_call): submission = Archive("testfile") with pytest.raises(ExportException): - self.cli.write_data_to_device(submission.tmpdir, submission.target_dirname, vol) + self.cli.write_data_to_device( + submission.tmpdir, submission.target_dirname, vol + ) self.cli.cleanup_drive_and_tmpdir.assert_called_once() patch.stop() - @mock.patch("subprocess.check_call", side_effect=subprocess.CalledProcessError(1, "check_call")) + @mock.patch( + "subprocess.check_call", + side_effect=subprocess.CalledProcessError(1, "check_call"), + ) def test_cleanup_drive_and_tmpdir_error(self, mocked_subprocess): submission = Archive("testfile") mock_volume = mock.MagicMock(Volume) - + with pytest.raises(ExportException) as ex: self.cli.cleanup_drive_and_tmpdir(mock_volume, submission.tmpdir) assert ex.value.sdstatus is Status.ERROR_EXPORT_CLEANUP @@ -469,10 +539,19 @@ def test_cleanup_drive_and_tmpdir(self, mock_subprocess, mocked_path): close_patch.stop() remove_tmpdir_patch.stop() - @mock.patch("subprocess.check_output", side_effect=subprocess.CalledProcessError(1, "check_output")) + @mock.patch( + "subprocess.check_output", + side_effect=subprocess.CalledProcessError(1, "check_output"), + ) def test_mountpoint_error(self, mock_subprocess): with pytest.raises(ExportException) as ex: - self.cli._get_mountpoint(Volume(device_name=_DEFAULT_USB_DEVICE, mapped_name=_PRETEND_LUKS_ID, encryption=EncryptionScheme.LUKS)) + self.cli._get_mountpoint( + Volume( + device_name=_DEFAULT_USB_DEVICE, + mapped_name=_PRETEND_LUKS_ID, + encryption=EncryptionScheme.LUKS, + ) + ) assert ex.value.sdstatus is Status.ERROR_MOUNT @@ -480,15 +559,15 @@ def test_mountpoint_error(self, mock_subprocess): def test_mount_mkdir_fails(self, mocked_path): mock_mountpoint = mock.patch.object(self.cli, "_get_mountpoint") mock_mountpoint.return_value = None - # mock.patch("subprocess.check_output", side_effect=subprocess.CalledProcessError(1, "check_output")) - mock_volume = mock.MagicMock() - mock_volume.device_name = _DEFAULT_USB_DEVICE_ONE_PART - mock_volume.mapped_name = _PRETEND_LUKS_ID - mock_volume.EncryptionScheme = EncryptionScheme.LUKS - mock_volume.unlocked = True + vol = Volume( + device_name=_DEFAULT_USB_DEVICE_ONE_PART, + mapped_name=_PRETEND_LUKS_ID, + encryption=EncryptionScheme.LUKS, + ) + mock.patch.object(vol, "unlocked", return_value=True) with pytest.raises(ExportException) as ex: - self.cli.mount_volume(mock_volume) + self.cli.mount_volume(vol) - assert ex.value.sdstatus is Status.ERROR_MOUNT \ No newline at end of file + assert ex.value.sdstatus is Status.ERROR_MOUNT diff --git a/tests/disk/test_service.py b/tests/disk/test_service.py index 6cda02716..800a4fd13 100644 --- a/tests/disk/test_service.py +++ b/tests/disk/test_service.py @@ -1,19 +1,12 @@ import pytest from unittest import mock - import os -import pytest -import sys import tempfile -import subprocess -from subprocess import CalledProcessError - from securedrop_export.exceptions import ExportException from securedrop_export.disk.status import Status from securedrop_export.disk.new_status import Status as NewStatus from securedrop_export.disk.volume import Volume, EncryptionScheme - from securedrop_export.archive import Archive, Metadata from securedrop_export.disk.service import Service from securedrop_export.disk.cli import CLI @@ -22,15 +15,24 @@ SAMPLE_OUTPUT_USB = "/dev/sda" # noqa SAMPLE_OUTPUT_USB_PARTITIONED = "/dev/sda1" -class TestExportService: +class TestExportService: @classmethod def setup_class(cls): cls.mock_cli = mock.MagicMock(CLI) cls.mock_submission = cls._setup_submission() - cls.mock_luks_volume_unmounted = Volume(device_name=SAMPLE_OUTPUT_USB, mapped_name="fake-luks-id-123456", encryption=EncryptionScheme.LUKS) - cls.mock_luks_volume_mounted = Volume(device_name=SAMPLE_OUTPUT_USB, mapped_name="fake-luks-id-123456", mountpoint="/media/usb", encryption=EncryptionScheme.LUKS) + cls.mock_luks_volume_unmounted = Volume( + device_name=SAMPLE_OUTPUT_USB, + mapped_name="fake-luks-id-123456", + encryption=EncryptionScheme.LUKS, + ) + cls.mock_luks_volume_mounted = Volume( + device_name=SAMPLE_OUTPUT_USB, + mapped_name="fake-luks-id-123456", + mountpoint="/media/usb", + encryption=EncryptionScheme.LUKS, + ) cls.service = Service(cls.mock_submission, cls.mock_cli) @@ -49,11 +51,12 @@ def _setup_submission(cls) -> Archive: temp_folder = tempfile.mkdtemp() metadata = os.path.join(temp_folder, Metadata.METADATA_FILE) with open(metadata, "w") as f: - f.write('{"device": "disk", "encryption_method": "luks", "encryption_key": "hunter1"}') - - submission.archive_metadata = Metadata.create_and_validate(temp_folder) + f.write( + '{"device": "disk", "encryption_method":' + ' "luks", "encryption_key": "hunter1"}' + ) - return submission + return submission.set_metadata(Metadata(temp_folder).validate()) def setup_method(self, method): """ @@ -63,7 +66,9 @@ def setup_method(self, method): test methods. """ self.mock_cli.get_connected_devices.return_value = [SAMPLE_OUTPUT_USB] - self.mock_cli.get_partitioned_device.return_value = SAMPLE_OUTPUT_USB_PARTITIONED + self.mock_cli.get_partitioned_device.return_value = ( + SAMPLE_OUTPUT_USB_PARTITIONED + ) self.mock_cli.get_luks_volume.return_value = self.mock_luks_volume_unmounted self.mock_cli.mount_volume.return_value = self.mock_luks_volume_mounted @@ -83,7 +88,10 @@ def test_no_devices_connected(self): assert ex.value.sdstatus is Status.LEGACY_USB_NOT_CONNECTED def test_too_many_devices_connected(self): - self.mock_cli.get_connected_devices.return_value = [SAMPLE_OUTPUT_USB, "/dev/sdb"] + self.mock_cli.get_connected_devices.return_value = [ + SAMPLE_OUTPUT_USB, + "/dev/sdb", + ] with pytest.raises(ExportException) as ex: self.service.check_connected_devices() @@ -100,7 +108,9 @@ def test_device_is_not_luks(self): assert ex.value.sdstatus is Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED def test_check_usb_error(self): - self.mock_cli.get_connected_devices.side_effect = ExportException(sdstatus=Status.LEGACY_ERROR_USB_CHECK) + self.mock_cli.get_connected_devices.side_effect = ExportException( + sdstatus=Status.LEGACY_ERROR_USB_CHECK + ) with pytest.raises(ExportException) as ex: self.service.check_connected_devices() @@ -113,17 +123,21 @@ def test_check_disk_format(self): assert status is Status.LEGACY_USB_ENCRYPTED def test_check_disk_format_error(self): - self.mock_cli.get_partitioned_device.side_effect=ExportException(sdstatus=NewStatus.INVALID_DEVICE_DETECTED) + self.mock_cli.get_partitioned_device.side_effect = ExportException( + sdstatus=NewStatus.INVALID_DEVICE_DETECTED + ) with pytest.raises(ExportException) as ex: self.service.check_disk_format() - # We still return the legacy status for now + # We still return the legacy status for now assert ex.value.sdstatus is Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED def test_export(self): - status = self.service.export() - assert status is Status.SUCCESS_EXPORT + # Currently, a successful export does not return a success status. + # When the client is updated, this will change to assert EXPORT_SUCCESS + # is returned. + self.service.export() def test_export_disk_not_supported(self): self.mock_cli.is_luks_volume.return_value = False @@ -134,8 +148,10 @@ def test_export_disk_not_supported(self): assert ex.value.sdstatus is Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED def test_export_write_error(self): - self.mock_cli.is_luks_volume.return_value=True - self.mock_cli.write_data_to_device.side_effect = ExportException(sdstatus=Status.LEGACY_ERROR_USB_WRITE) + self.mock_cli.is_luks_volume.return_value = True + self.mock_cli.write_data_to_device.side_effect = ExportException( + sdstatus=Status.LEGACY_ERROR_USB_WRITE + ) with pytest.raises(ExportException) as ex: self.service.export() @@ -143,7 +159,9 @@ def test_export_write_error(self): assert ex.value.sdstatus is Status.LEGACY_ERROR_USB_WRITE def test_export_throws_new_exception_return_legacy_status(self): - self.mock_cli.get_connected_devices.side_effect = ExportException(sdstatus=NewStatus.ERROR_MOUNT) + self.mock_cli.get_connected_devices.side_effect = ExportException( + sdstatus=NewStatus.ERROR_MOUNT + ) with pytest.raises(ExportException) as ex: self.service.export() @@ -152,8 +170,10 @@ def test_export_throws_new_exception_return_legacy_status(self): @mock.patch("os.path.exists", return_value=True) def test_write_error_returns_legacy_status(self, mock_path): - self.mock_cli.is_luks_volume.return_value=True - self.mock_cli.write_data_to_device.side_effect = ExportException(sdstatus=NewStatus.ERROR_EXPORT) + self.mock_cli.is_luks_volume.return_value = True + self.mock_cli.write_data_to_device.side_effect = ExportException( + sdstatus=NewStatus.ERROR_EXPORT + ) with pytest.raises(ExportException) as ex: self.service.export() @@ -162,7 +182,9 @@ def test_write_error_returns_legacy_status(self, mock_path): @mock.patch("os.path.exists", return_value=True) def test_unlock_error_returns_legacy_status(self, mock_path): - self.mock_cli.unlock_luks_volume.side_effect = ExportException(sdstatus=NewStatus.ERROR_UNLOCK_LUKS) + self.mock_cli.unlock_luks_volume.side_effect = ExportException( + sdstatus=NewStatus.ERROR_UNLOCK_LUKS + ) with pytest.raises(ExportException) as ex: self.service.export() @@ -171,9 +193,11 @@ def test_unlock_error_returns_legacy_status(self, mock_path): @mock.patch("os.path.exists", return_value=True) def test_unexpected_error_returns_legacy_status_generic(self, mock_path): - self.mock_cli.unlock_luks_volume.side_effect = ExportException(sdstatus=NewStatus.DEVICE_ERROR) + self.mock_cli.unlock_luks_volume.side_effect = ExportException( + sdstatus=NewStatus.DEVICE_ERROR + ) with pytest.raises(ExportException) as ex: self.service.export() - assert ex.value.sdstatus is Status.LEGACY_ERROR_GENERIC \ No newline at end of file + assert ex.value.sdstatus is Status.LEGACY_ERROR_GENERIC diff --git a/tests/disk/test_volume.py b/tests/disk/test_volume.py index 8651bdbef..f28e711c7 100644 --- a/tests/disk/test_volume.py +++ b/tests/disk/test_volume.py @@ -1,4 +1,3 @@ -import pytest from unittest import mock from securedrop_export.disk.volume import Volume, EncryptionScheme @@ -6,7 +5,11 @@ class TestVolume: def test_overwrite_valid_encryption_scheme(self): - volume = Volume(device_name="/dev/sda", mapped_name="pretend-luks-mapper-id", encryption=EncryptionScheme.LUKS) + volume = Volume( + device_name="/dev/sda", + mapped_name="pretend-luks-mapper-id", + encryption=EncryptionScheme.LUKS, + ) assert volume.encryption is EncryptionScheme.LUKS volume.encryption = None assert volume.encryption is EncryptionScheme.UNKNOWN @@ -14,7 +17,9 @@ def test_overwrite_valid_encryption_scheme(self): @mock.patch("os.path.exists", return_value=True) def test_is_unlocked_true(self, mock_os_path): volume = Volume( - device_name="/dev/sda1", mapped_name="pretend-luks-mapper-id", encryption=EncryptionScheme.LUKS + device_name="/dev/sda1", + mapped_name="pretend-luks-mapper-id", + encryption=EncryptionScheme.LUKS, ) assert volume.unlocked @@ -22,7 +27,9 @@ def test_is_unlocked_true(self, mock_os_path): @mock.patch("os.path.exists", return_value=False) def test_is_unlocked_false_no_path(self, mock_os_path): volume = Volume( - device_name="/dev/sda1", mapped_name="pretend-luks-mapper-id", encryption=EncryptionScheme.LUKS + device_name="/dev/sda1", + mapped_name="pretend-luks-mapper-id", + encryption=EncryptionScheme.LUKS, ) assert not volume.unlocked @@ -30,13 +37,15 @@ def test_is_unlocked_false_no_path(self, mock_os_path): @mock.patch("os.path.exists", return_value=True) def test_writable_false(self, mock_os_path): vol = Volume( - device_name="dev/sda1", mapped_name="pretend-luks-id", encryption=EncryptionScheme.LUKS + device_name="dev/sda1", + mapped_name="pretend-luks-id", + encryption=EncryptionScheme.LUKS, ) assert not vol.writable @mock.patch("os.path.exists", return_value=True) - def test_writable_false(self, mock_os_path): + def test_writable(self, mock_os_path): vol = Volume( device_name="dev/sda1", mapped_name="pretend-luks-id", diff --git a/tests/print/test_service.py b/tests/print/test_service.py index 317cc992d..dfff606d2 100644 --- a/tests/print/test_service.py +++ b/tests/print/test_service.py @@ -1,9 +1,11 @@ -from unittest import mock +import pytest +from unittest import mock import os -import pytest +import subprocess from subprocess import CalledProcessError -import sys + +from securedrop_export.directory_util import safe_mkdir from securedrop_export.exceptions import ExportException from securedrop_export.archive import Archive @@ -13,15 +15,24 @@ SAMPLE_OUTPUT_NO_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\nnetwork lpd" # noqa SAMPLE_OUTPUT_BROTHER_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\ndirect usb://Brother/HL-L2320D%20series?serial=A00000A000000\nnetwork lpd" # noqa SAMPLE_OUTPUT_LASERJET_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\ndirect usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000\nnetwork lpd" # noqa -TEST_CONFIG = os.path.join(os.path.dirname(__file__), "sd-export-config.json") - +SAMPLE_OUTPUT_UNSUPPORTED_PRINTER = b"network beh\nnetwork https\nnetwork ipp\nnetwork ipps\nnetwork http\nnetwork\nnetwork ipp14\ndirect usb://Canon/QL-700%?serial=A00000A000000\nnetwork lpd" # noqa -class PrinterTest: +class TestPrint: @classmethod def setup_class(cls): - cls.submission = Archive("testfile", TEST_CONFIG) - cls.service = Service(submission) + cls.submission = Archive("testfile") + cls.service = Service(cls.submission) + + # Set up files as if extracted from tarball + fp = os.path.join(cls.submission.tmpdir, "export_data") + if not os.path.exists(fp): + safe_mkdir(fp) + + for i in ["file1", "file2", "file3"]: + with open(f"{cls.submission.tmpdir}/export_data/{i}.txt", "a+") as file: + file.write(f"It's a pretend file {i}") + file.write("\n") @classmethod def teardown_class(cls): @@ -29,16 +40,72 @@ def teardown_class(cls): cls.submission = None @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_BROTHER_PRINTER) - def test_get_good_printer_uri_laserjet(mocked_call): - assert self.service._get_printer_uri() == "usb://Brother/HL-L2320D%20series?serial=A00000A000000" + def test_get_good_printer_uri_laserjet(self, mocked_call): + assert ( + self.service._get_printer_uri() + == "usb://Brother/HL-L2320D%20series?serial=A00000A000000" + ) + + def test_service_initialized_correctly(self): + assert self.service.printer_wait_timeout == 60 + assert self.service.printer_name == "sdw-printer" + + def test_print_all_methods_called(self): + patch_setup = mock.patch.object(self.service, "_check_printer_setup") + patch_print = mock.patch.object(self.service, "_print_all_files") + + mock_setup = patch_setup.start() + mock_print = patch_print.start() + + self.service.print() + + # When the client can accept new status values, we will assert that the + # above call results in Status.PRINT_SUCCESS + assert mock_setup.called_once() + assert mock_print.called_once() + + patch_setup.stop() + patch_print.stop() + + def test_printer_test_all_methods_called(self): + patch_setup = mock.patch.object(self.service, "_check_printer_setup") + + mock_setup = patch_setup.start() + + self.service.printer_preflight() + + # When the client can accept new status values, we will assert that the + # above call results in Status.PREFLIGHT_SUCCESS + assert mock_setup.called_once() + + patch_setup.stop() + + def test_print_all_checks_called(self): + patch_setup = mock.patch.object(self.service, "_check_printer_setup") + patch_print = mock.patch.object(self.service, "_print_test_page") + + mock_setup = patch_setup.start() + mock_print = patch_print.start() + self.service.printer_test() + # When the client can accept new status values, we will assert that the + # above call results in Status.TEST_SUCCESS + + assert mock_setup.called_once() + assert mock_print.called_once() + + patch_setup.stop() + patch_print.stop() @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_LASERJET_PRINTER) - def test_get_good_printer_uri_brother(mocked_call): - assert self.service._get_printer_uri() == "usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000" + def test_get_good_printer_uri_brother(self, mocked_call): + assert ( + self.service._get_printer_uri() + == "usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000" + ) @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PRINTER) - def test_get_bad_printer_uri(mocked_call, capsys, mocker): + def test_get_bad_printer_uri(self, mocked_call, capsys, mocker): with pytest.raises(ExportException) as ex: self.service._get_printer_uri() @@ -53,7 +120,7 @@ def test_get_bad_printer_uri(mocked_call, capsys, mocker): "/tmp/tmpJf83j9/secret.pptx" ], ) - def test_is_open_office_file(capsys, open_office_paths): + def test_is_open_office_file(self, capsys, open_office_paths): assert self.service._is_open_office_file(open_office_paths) @pytest.mark.parametrize( @@ -65,37 +132,44 @@ def test_is_open_office_file(capsys, open_office_paths): "/tmp/tmpJf83j9/secret.gpg" ], ) - def test_is_not_open_office_file(capsys, open_office_paths): + def test_is_not_open_office_file(self, capsys, open_office_paths): assert not self.service._is_open_office_file(open_office_paths) @mock.patch("subprocess.run") - def test_install_printer_ppd_laserjet(mocker): - ppd = self.service._install_printer_ppd("usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A00000") + def test_install_printer_ppd_laserjet(self, mocker): + ppd = self.service._install_printer_ppd( + "usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A00000" + ) assert ppd == "/usr/share/cups/model/hp-laserjet_6l.ppd" @mock.patch("subprocess.run") - def test_install_printer_ppd_brother(mocker): - ppd = self.service._install_printer_ppd("usb://Brother/HL-L2320D%20series?serial=A00000A000000") + def test_install_printer_ppd_brother(self, mocker): + ppd = self.service._install_printer_ppd( + "usb://Brother/HL-L2320D%20series?serial=A00000A000000" + ) assert ppd == "/usr/share/cups/model/br7030.ppd" - - def test_install_printer_ppd_error_no_driver(mocker): + def test_install_printer_ppd_error_no_driver(self, mocker): mocker.patch("subprocess.run", side_effect=CalledProcessError(1, "run")) with pytest.raises(ExportException) as ex: - self.service._install_printer_ppd("usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000") + self.service._install_printer_ppd( + "usb://HP/LaserJet%20Pro%20M404-M405?serial=A00000A000000" + ) assert ex.value.sdstatus is Status.ERROR_PRINTER_DRIVER_UNAVAILABLE - def test_install_printer_ppd_error_not_supported(mocker): + def test_install_printer_ppd_error_not_supported(self, mocker): mocker.patch("subprocess.run", side_effect=CalledProcessError(1, "run")) with pytest.raises(ExportException) as ex: - self.service._install_printer_ppd("usb://Not/Supported?serial=A00000A000000") + self.service._install_printer_ppd( + "usb://Not/Supported?serial=A00000A000000" + ) assert ex.value.sdstatus is Status.ERROR_PRINTER_NOT_SUPPORTED - def test_setup_printer_error(mocker): + def test_setup_printer_error(self, mocker): mocker.patch("subprocess.run", side_effect=CalledProcessError(1, "run")) with pytest.raises(ExportException) as ex: @@ -106,7 +180,6 @@ def test_setup_printer_error(mocker): assert ex.value.sdstatus is Status.ERROR_PRINTER_INSTALL - def test_safe_check_call(self): # This works, since `ls` is a valid comand self.service.safe_check_call(["ls"], Status.TEST_SUCCESS) @@ -115,12 +188,12 @@ def test_safe_check_call_invalid_call(self): with pytest.raises(ExportException) as ex: self.service.safe_check_call(["ls", "kjdsfhkdjfh"], Status.ERROR_PRINT) - assert ex.value.sdstatus is FakeStatus.ERROR_PRINT + assert ex.value.sdstatus is Status.ERROR_PRINT def test_safe_check_call_write_to_stderr_and_ignore_error(self): self.service.safe_check_call( ["python3", "-c", "import sys;sys.stderr.write('hello')"], - Status.TEST_SUCCESS, + error_status=Status.TEST_SUCCESS, ignore_stderr_startswith=b"hello", ) @@ -129,8 +202,192 @@ def test_safe_check_call_write_to_stderr_wrong_ignore_param(self): with pytest.raises(ExportException) as ex: self.service.safe_check_call( ["python3", "-c", "import sys;sys.stderr.write('hello\n')"], - Status.ERROR_PRINT, + error_status=Status.ERROR_PRINT, ignore_stderr_startswith=b"world", ) - assert ex.value.sdstatus is Status.ERROR_PRINT \ No newline at end of file + assert ex.value.sdstatus is Status.ERROR_PRINT + + @mock.patch("time.sleep", return_value=None) + @mock.patch( + "subprocess.check_output", + side_effect=[ + b"printer sdw-printer is busy\n", + b"printer sdw-printer is idle\n", + ], + ) + def test__wait_for_print(self, mock_subprocess, mock_time): + assert self.service._wait_for_print() + + @mock.patch("time.sleep", return_value=None) + @mock.patch( + "subprocess.check_output", + side_effect=subprocess.CalledProcessError(1, "check_output"), + ) + def test__wait_for_print_print_exception(self, mock_subprocess, mock_time): + with pytest.raises(ExportException) as ex: + self.service._wait_for_print() + + assert ex.value.sdstatus is Status.ERROR_PRINT + + @mock.patch( + "subprocess.check_output", return_value=b"printer sdw-printer is busy\n" + ) + def test__wait_for_print_timeout_exception(self, mock_subprocess): + self.service.printer_wait_timeout = 1 + + with pytest.raises(ExportException) as ex: + self.service._wait_for_print() + + assert ex.value.sdstatus is Status.ERROR_PRINT + + self.service.printer_wait_timeout = self.service.PRINTER_WAIT_TIMEOUT + + @pytest.mark.parametrize( + "printers", [SAMPLE_OUTPUT_BROTHER_PRINTER, SAMPLE_OUTPUT_LASERJET_PRINTER] + ) + def test__check_printer_setup(self, printers, mocker): + mocker.patch("subprocess.check_output", return_value=printers) + p = mocker.patch.object(self.service, "_setup_printer") + p2 = mocker.patch.object(self.service, "_install_printer_ppd") + p.start() + p2.start() + + self.service._check_printer_setup() + p.assert_called_once() + p2.assert_called_once() + + p.stop() + p2.stop() + + @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PRINTER) + def test__check_printer_setup_error_no_printer(self, mock_output): + + with pytest.raises(ExportException) as ex: + self.service._check_printer_setup() + assert ex.value.sdstatus is Status.ERROR_PRINTER_NOT_FOUND + + @mock.patch( + "subprocess.check_output", + return_value=SAMPLE_OUTPUT_BROTHER_PRINTER + + b"\n" + + SAMPLE_OUTPUT_LASERJET_PRINTER, + ) + def test__check_printer_setup_error_too_many_printers(self, mock_output): + + with pytest.raises(ExportException) as ex: + self.service._check_printer_setup() + assert ex.value.sdstatus is Status.ERROR_MULTIPLE_PRINTERS_FOUND + + @mock.patch( + "subprocess.check_output", return_value=SAMPLE_OUTPUT_UNSUPPORTED_PRINTER + ) + def test__check_printer_setup_error_unsupported_printer(self, mock_output): + + with pytest.raises(ExportException) as ex: + self.service._check_printer_setup() + assert ex.value.sdstatus is Status.ERROR_PRINTER_NOT_SUPPORTED + + @mock.patch( + "subprocess.check_output", + side_effect=subprocess.CalledProcessError(1, "check_output"), + ) + def test__check_printer_setup_error_checking_printer(self, mock_output): + + with pytest.raises(ExportException) as ex: + self.service._check_printer_setup() + assert ex.value.sdstatus is Status.ERROR_UNKNOWN + + @mock.patch( + "subprocess.check_output", + side_effect=subprocess.CalledProcessError(1, "check_output"), + ) + def test__get_printer_uri_error(self, mocked_subprocess): + with pytest.raises(ExportException) as ex: + self.service._get_printer_uri() + assert ex.value.sdstatus is Status.ERROR_PRINTER_URI + + @mock.patch( + "subprocess.check_output", return_value=SAMPLE_OUTPUT_UNSUPPORTED_PRINTER + ) + def test__get_printer_uri_error_unsupported(self, mocked_subprocess): + with pytest.raises(ExportException) as ex: + self.service._get_printer_uri() + assert ex.value.sdstatus is Status.ERROR_PRINTER_NOT_SUPPORTED + + def test__install_printer_ppd_error_unsupported_uri(self): + with pytest.raises(ExportException) as ex: + self.service._install_printer_ppd( + "usb://YOURE_NOT_MY_REAL_PRINTER/A00000A000000" + ) + assert ex.value.sdstatus is Status.ERROR_PRINTER_NOT_SUPPORTED + + def test__print_test_page_calls_method(self): + p = mock.patch.object(self.service, "_print_file") + mock_print = p.start() + + self.service._print_test_page() + mock_print.assert_called_once_with("/usr/share/cups/data/testprint") + p.stop() + + def test__print_all_files(self): + p = mock.patch.object(self.service, "_print_file") + mock_print = p.start() + + self.service._print_all_files() + mock_print.assert_has_calls( + [ + mock.call(f"{self.submission.tmpdir}/export_data/file1.txt"), + mock.call(f"{self.submission.tmpdir}/export_data/file2.txt"), + mock.call(f"{self.submission.tmpdir}/export_data/file3.txt"), + ], + any_order=True, + ) + p.stop() + + def test_open_office_file_convert_to_pdf(self): + file = "/tmp/definitely-an-office-file.odt" + + with mock.patch.object(self.service, "safe_check_call") as scc, mock.patch( + "securedrop_export.print.service.logger.info" + ) as log: + self.service._print_file(file) + + assert scc.call_count == 2 + scc.assert_has_calls( + [ + mock.call( + command=[ + "unoconv", + "-o", + "/tmp/definitely-an-office-file.odt.pdf", + "/tmp/definitely-an-office-file.odt", + ], + error_status=Status.ERROR_PRINT, + ), + mock.call( + command=[ + "xpp", + "-P", + "sdw-printer", + "/tmp/definitely-an-office-file.odt.pdf", + ], + error_status=Status.ERROR_PRINT, + ), + ] + ) + assert log.call_count == 2 + log.assert_has_calls( + [ + mock.call("Converting Office document to pdf"), + mock.call("Sending file to printer sdw-printer"), + ] + ) + + def test_safe_check_call_has_error_in_stderr(self): + mock.patch("subprocess.run") + + with mock.patch("subprocess.run"), pytest.raises(ExportException) as ex: + self.service.safe_check_call(command="ls", error_status=Status.TEST_SUCCESS) + + assert ex.value.sdstatus is Status.TEST_SUCCESS diff --git a/tests/test_archive.py b/tests/test_archive.py index 4e840abf3..57791a82e 100644 --- a/tests/test_archive.py +++ b/tests/test_archive.py @@ -12,6 +12,7 @@ from securedrop_export.exceptions import ExportException from securedrop_export.archive import Archive, Metadata, Status + def test_extract_tarball(): """ Check that we can successfully extract a valid tarball. @@ -42,20 +43,25 @@ def test_extract_tarball(): archive.close() - submission = Archive(archive_path) + submission = Archive(archive_path).extract_tarball() assert oct(os.stat(submission.tmpdir).st_mode) == "0o40700" - submission.extract_tarball() - - extracted_file_path = os.path.join(submission.tmpdir, "some", "dirs", "file.txt") + extracted_file_path = os.path.join( + submission.tmpdir, "some", "dirs", "file.txt" + ) assert os.path.exists(extracted_file_path) assert oct(os.stat(extracted_file_path).st_mode) == "0o100600" # Subdirectories that are added as members are extracted with 700 permissions - assert oct(os.stat(os.path.join(submission.tmpdir, "some")).st_mode) == "0o40700" + assert ( + oct(os.stat(os.path.join(submission.tmpdir, "some")).st_mode) == "0o40700" + ) # Subdirectories that are not added as members are extracted with 700 permissions # because os.umask(0o077) is set in the Archive constructor. - assert oct(os.stat(os.path.join(submission.tmpdir, "some", "dirs")).st_mode) == "0o40700" + assert ( + oct(os.stat(os.path.join(submission.tmpdir, "some", "dirs")).st_mode) + == "0o40700" + ) def test_extract_tarball_with_symlink(): @@ -84,7 +90,7 @@ def test_extract_tarball_with_symlink(): submission = Archive(archive_path) assert oct(os.stat(submission.tmpdir).st_mode) == "0o40700" - submission.extract_tarball() + submission = submission.extract_tarball() symlink_path = os.path.join(submission.tmpdir, "symlink") assert os.path.islink(symlink_path) @@ -111,7 +117,9 @@ def test_extract_tarball_raises_if_doing_path_traversal(): metadata_file_info.size = len(metadata_str) archive.addfile(metadata_file_info, metadata_bytes) content = b"test" - traversed_file_info = tarfile.TarInfo("../../../../../../../../../tmp/traversed") + traversed_file_info = tarfile.TarInfo( + "../../../../../../../../../tmp/traversed" + ) traversed_file_info.size = len(content) archive.addfile(traversed_file_info, BytesIO(content)) archive.close() @@ -288,7 +296,9 @@ def test_extract_tarball_raises_if_name_has_unsafe_absolute_path_with_symlink(): archive_path = os.path.join(temp_dir, "archive.sd-export") symlink_path = os.path.join(temp_dir, "symlink") - os.system(f"ln -s {tmp}/unsafe {symlink_path}") # create symlink to "/tmp/unsafe" + os.system( + f"ln -s {tmp}/unsafe {symlink_path}" + ) # create symlink to "/tmp/unsafe" with tarfile.open(archive_path, "w:gz") as archive: metadata = { @@ -402,7 +412,8 @@ def test_empty_config(capsys): f.write("{}") with pytest.raises(ExportException) as ex: - config = Metadata.create_and_validate(temp_folder) + Metadata(temp_folder).validate() + assert ex.value.sdstatus is Status.ERROR_ARCHIVE_METADATA def test_valid_printer_test_config(capsys): @@ -412,7 +423,7 @@ def test_valid_printer_test_config(capsys): with open(metadata, "w") as f: f.write('{"device": "printer-test"}') - config = Metadata.create_and_validate(temp_folder) + config = Metadata(temp_folder).validate() assert config.encryption_key is None assert config.encryption_method is None @@ -425,7 +436,7 @@ def test_valid_printer_config(capsys): with open(metadata, "w") as f: f.write('{"device": "printer"}') - config = Metadata.create_and_validate(temp_folder) + config = Metadata(temp_folder).validate() assert config.encryption_key is None assert config.encryption_method is None @@ -437,14 +448,17 @@ def test_invalid_encryption_config(capsys): temp_folder = tempfile.mkdtemp() metadata = os.path.join(temp_folder, Metadata.METADATA_FILE) with open(metadata, "w") as f: - f.write('{"device": "disk", "encryption_method": "base64", "encryption_key": "hunter1"}') + f.write( + '{"device": "disk", "encryption_method": "base64", "encryption_key": "hunter1"}' + ) with pytest.raises(ExportException) as ex: - config = Metadata.create_and_validate(temp_folder) + Metadata(temp_folder).validate() assert ex.value.sdstatus is Status.ERROR_ARCHIVE_METADATA -def test_malforned_config(capsys): + +def test_invalid_config(capsys): Archive("testfile") temp_folder = tempfile.mkdtemp() @@ -453,37 +467,46 @@ def test_malforned_config(capsys): f.write('{"device": "asdf", "encryption_method": "OHNO"}') with pytest.raises(ExportException) as ex: - config = Metadata.create_and_validate(temp_folder) + Metadata(temp_folder).validate() + + assert ex.value.sdstatus is Status.ERROR_ARCHIVE_METADATA + + +def test_malformed_config(capsys): + Archive("testfile") + + temp_folder = tempfile.mkdtemp() + metadata = os.path.join(temp_folder, Metadata.METADATA_FILE) + with open(metadata, "w") as f: + f.write('{"device": "asdf", "encryption_method": {"OHNO", "MALFORMED"}') + + with pytest.raises(ExportException) as ex: + Metadata(temp_folder).validate() assert ex.value.sdstatus is Status.ERROR_METADATA_PARSING + def test_valid_encryption_config(capsys): Archive("testfile") temp_folder = tempfile.mkdtemp() metadata = os.path.join(temp_folder, Metadata.METADATA_FILE) with open(metadata, "w") as f: - f.write('{"device": "disk", "encryption_method": "luks", "encryption_key": "hunter1"}') + f.write( + '{"device": "disk", "encryption_method": "luks", "encryption_key": "hunter1"}' + ) - config = Metadata.create_and_validate(temp_folder) + config = Metadata(temp_folder).validate() assert config.encryption_key == "hunter1" assert config.encryption_method == "luks" -def test_cannot_use_metadata_constructor(): - """ - Require the `create_and_validate()` method for returning a Metadata object - """ - with pytest.raises(ValueError): - Metadata(object(), tempfile.mkdtemp()) - - @mock.patch("json.loads", side_effect=json.decoder.JSONDecodeError("ugh", "badjson", 0)) def test_metadata_parsing_error(mock_json): """ Handle exception caused when loading metadata JSON """ with pytest.raises(ExportException) as ex: - Metadata.create_and_validate(tempfile.mkdtemp()) + Metadata(tempfile.mkdtemp()).validate() - assert ex.value.sdstatus is Status.ERROR_METADATA_PARSING \ No newline at end of file + assert ex.value.sdstatus is Status.ERROR_METADATA_PARSING diff --git a/tests/test_directory_util.py b/tests/test_directory_util.py index 18eb6dd2a..cc1f304a8 100644 --- a/tests/test_directory_util.py +++ b/tests/test_directory_util.py @@ -1,24 +1,34 @@ import pytest import os +import tempfile +import shutil from pathlib import Path from securedrop_export import directory_util -from securedrop_export.exceptions import ExportException -class TestUtil: - _TMPDIR_PATH = "/tmp/pretendium/" +class TestDirectoryUtil: + _REL_TRAVERSAL = "../../../whee" _SAFE_RELPATH = "./hi" _SAFE_RELPATH2 = "yay/a/path" _UNSAFE_RELPATH = "lgtm/../ohwait" + @classmethod + def setup_class(cls): + cls.homedir = tempfile.mkdtemp() + "/" + + @classmethod + def teardown_class(cls): + if os.path.exists(cls.homedir): + shutil.rmtree(cls.homedir) + def setup_method(self, method): pass def teadown_method(self, method): - if (os.path.exists(self._TMPDIR_PATH)): - os.remove(self._TMPDIR_PATH) + if os.path.exists(self.homedir): + os.remove(self.homedir) def test_safe_mkdir_error_base_relpath(self): with pytest.raises(ValueError): @@ -26,40 +36,49 @@ def test_safe_mkdir_error_base_relpath(self): def test_safe_mkdir_error_basepath_path_traversal(self): with pytest.raises(ValueError): - directory_util.safe_mkdir(f"{self._TMPDIR_PATH}{self._REL_TRAVERSAL}") + directory_util.safe_mkdir(f"{self.homedir}{self._REL_TRAVERSAL}") def test_safe_mkdir_error_relpath_path_traversal(self): with pytest.raises(ValueError): - directory_util.safe_mkdir(f"{self._TMPDIR_PATH}", f"{self._REL_TRAVERSAL}") + directory_util.safe_mkdir(f"{self.homedir}", f"{self._REL_TRAVERSAL}") def test_safe_mkdir_success(self): - directory_util.safe_mkdir(f"{self._TMPDIR_PATH}") + directory_util.safe_mkdir(f"{self.homedir}") def test_safe_mkdir_success_with_relpath(self): - directory_util.safe_mkdir(f"{self._TMPDIR_PATH}", f"{self._SAFE_RELPATH}") + directory_util.safe_mkdir(f"{self.homedir}", f"{self._SAFE_RELPATH}") - assert (os.path.exists(f"{self._TMPDIR_PATH}{self._SAFE_RELPATH}")) + assert os.path.exists(f"{self.homedir}{self._SAFE_RELPATH}") def test_safe_mkdir_success_another_relpath(self): - directory_util.safe_mkdir(f"{self._TMPDIR_PATH}", f"{self._SAFE_RELPATH2}") + directory_util.safe_mkdir(f"{self.homedir}", f"{self._SAFE_RELPATH2}") + + assert os.path.exists(f"{self.homedir}{self._SAFE_RELPATH2}") - assert (os.path.exists(f"{self._TMPDIR_PATH}{self._SAFE_RELPATH2}")) - def test_safe_mkdir_weird_path(self): with pytest.raises(ValueError): - directory_util.safe_mkdir(f"{self._TMPDIR_PATH}", f"{self._UNSAFE_RELPATH}") + directory_util.safe_mkdir(f"{self.homedir}", f"{self._UNSAFE_RELPATH}") def test__check_all_permissions_path_missing(self): with pytest.raises(ValueError): - directory_util._check_all_permissions(f"{self._TMPDIR_PATH}", f"{self._SAFE_RELPATH}") + directory_util._check_all_permissions( + f"{self.homedir}", f"{self._SAFE_RELPATH}" + ) def test_check_dir_perms_unsafe(self): - path = Path(f"{self._TMPDIR_PATH}{self._SAFE_RELPATH}") + path = Path(f"{self.homedir}{self._SAFE_RELPATH}") directory_util.safe_mkdir(path) # Not what we want, ever path.chmod(0o666) - + with pytest.raises(RuntimeError): - directory_util._check_dir_permissions(path) + directory_util._check_dir_permissions(path) + + def test_check_all_perms_invalid_full_path(self): + path = Path(f"{self.homedir}/idontexist") + base = Path(f"{self.homedir}") + + # Returns without error + assert directory_util._check_all_permissions(path, base) is None diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index 577fae13a..71af41143 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -3,18 +3,18 @@ from securedrop_export.exceptions import handler, TimeoutException + def test_handler(): signal.signal(signal.SIGALRM, handler) signal.setitimer(signal.ITIMER_REAL, 0.001) - with pytest.raises(TimeoutException) as ex: + with pytest.raises(TimeoutException): _run_handler_routine() + def _run_handler_routine(): try: while True: continue except TimeoutException: raise - - \ No newline at end of file diff --git a/tests/test_main.py b/tests/test_main.py index e309ec305..b94109a88 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -1,17 +1,41 @@ import pytest -from unittest import mock +import tempfile import os +from unittest import mock +import shutil + +from securedrop_export.archive import Archive, Metadata, Status as ArchiveStatus +from securedrop_export.status import BaseStatus +from securedrop_export.command import Command +from securedrop_export.exceptions import ExportException + +from securedrop_export.main import ( + Status, + entrypoint, + _exit_gracefully, + _write_status, + _start_service, + _configure_logging, +) + +SUBMISSION_SAMPLE_ARCHIVE = "pretendfile.tar.gz" -from securedrop_export.main import Status, entrypoint, _extract_and_run, _exit_gracefully, _write_status # noqa: F401 -from securedrop_export.archive import Archive -class TestMain(): +class TestMain: + def setup_method(self, method): + # This can't be a class method, since we expect sysexit during this test suite, + # which + self.submission = Archive("pretendfile.tar.gz") + assert os.path.exists(self.submission.tmpdir) + + def teardown_method(self, method): + if os.path.exists(self.submission.tmpdir): + shutil.rmtree(self.submission.tmpdir) + self.submission = None def test_exit_gracefully_no_exception(self, capsys): - submission = Archive("testfile") - with pytest.raises(SystemExit) as sysexit: - _exit_gracefully(submission, Status.ERROR_GENERIC) + _exit_gracefully(self.submission, Status.ERROR_GENERIC) # A graceful exit means a return code of 0 assert sysexit.value.code == 0 @@ -20,23 +44,19 @@ def test_exit_gracefully_no_exception(self, capsys): assert captured.err == "{}\n".format(Status.ERROR_GENERIC.value) assert captured.out == "" - def test_exit_gracefully_exception(self, capsys): - submission = Archive("testfile") - with pytest.raises(SystemExit) as sysexit: exception = mock.MagicMock() exception.output = "BANG!" - _exit_gracefully(submission, Status.ERROR_GENERIC, e=exception) + _exit_gracefully(self.submission, Status.ERROR_GENERIC, e=exception) # A graceful exit means a return code of 0 assert sysexit.value.code == 0 captured = capsys.readouterr() - assert captured.err.rstrip() == Status.ERROR_GENERIC.value + assert captured.err.rstrip() == Status.ERROR_GENERIC.value # todo assert captured.out == "" - @pytest.mark.parametrize("status", [s for s in Status]) def test_write_status(self, status, capsys): _write_status(status) @@ -49,13 +69,121 @@ def test_write_status_error(self, invalid_status, capsys): with pytest.raises(ValueError): _write_status(Status(invalid_status)) + def _did_exit_gracefully(self, exit, capsys, status: BaseStatus) -> bool: + """ + Helper. True if exited with 0, writing supplied status to stderr. + """ + return exit.value.code == 0 and capsys.readouterr().err == status.value + "\n" + + @pytest.mark.parametrize("command", list(Command)) + @mock.patch("securedrop_export.main._configure_logging") + @mock.patch("os.path.exists", return_value=True) + def test_entrypoint_success_start_service(self, mock_log, mock_path, command): + metadata = os.path.join(self.submission.tmpdir, Metadata.METADATA_FILE) + + with open(metadata, "w") as f: + f.write(f'{{"device": "{command.value}", "encryption_method": "luks"}}') + + with mock.patch( + "sys.argv", ["qvm-send-to-usb", SUBMISSION_SAMPLE_ARCHIVE] + ), mock.patch( + "securedrop_export.main._start_service" + ) as mock_service, mock.patch( + "securedrop_export.main.Archive.extract_tarball", + return_value=self.submission, + ), pytest.raises( + SystemExit + ): + entrypoint() + + if command is not Command.START_VM: + assert self.submission.command == command + assert mock_service.call_args[0][0].archive == SUBMISSION_SAMPLE_ARCHIVE + mock_service.assert_called_once_with(self.submission) + + def test_valid_printer_test_config(self, capsys): + Archive("testfile") + temp_folder = tempfile.mkdtemp() + metadata = os.path.join(temp_folder, Metadata.METADATA_FILE) + with open(metadata, "w") as f: + f.write('{"device": "printer-test"}') + + config = Metadata(temp_folder).validate() + + assert config.encryption_key is None + assert config.encryption_method is None + + @mock.patch( + "securedrop_export.archive.safe_extractall", + side_effect=ValueError("A tarball problem!"), + ) + @mock.patch("securedrop_export.main.os.path.exists", return_value=True) + @mock.patch("securedrop_export.main.shutil.rmtree") + def test_entrypoint_failure_extraction( + self, mock_rm, mock_path, mock_extract, capsys + ): + with mock.patch( + "sys.argv", ["qvm-send-to-usb", SUBMISSION_SAMPLE_ARCHIVE] + ), pytest.raises(SystemExit) as sysexit: + entrypoint() + + assert self._did_exit_gracefully( + sysexit, capsys, ArchiveStatus.ERROR_EXTRACTION + ) + + @mock.patch( + "securedrop_export.main._configure_logging", + side_effect=ExportException( + sdstatus=Status.ERROR_LOGGING, + message="Zounds, an error setting up logging!", + ), + ) + def test_entrypoint_logging_fails(self, mock_mkdir, capsys): + with pytest.raises(SystemExit) as sysexit: + entrypoint() + + assert self._did_exit_gracefully(sysexit, capsys, Status.ERROR_LOGGING) + + @mock.patch( + "securedrop_export.main._configure_logging", + side_effect=RuntimeError("Zounds, an uncaught error!"), + ) + def test_entrypoint_fails_unexpected(self, mock_mkdir, capsys): + with pytest.raises(SystemExit) as sysexit: + entrypoint() + + assert self._did_exit_gracefully(sysexit, capsys, Status.ERROR_GENERIC) + + @mock.patch("os.path.exists", return_value=False) + def test_entrypoint_archive_path_fails(self, mock_path, capsys): + with pytest.raises(SystemExit) as sysexit: + entrypoint() + + assert self._did_exit_gracefully(sysexit, capsys, Status.ERROR_FILE_NOT_FOUND) + + @mock.patch( + "securedrop_export.main.safe_mkdir", + side_effect=ValueError(1, "No logs for you!"), + ) + def test__configure_logging_error(self, mock_mkdir, capsys): + with pytest.raises(ExportException) as ex: + _configure_logging() + + assert ex.value.sdstatus is Status.ERROR_LOGGING - def test__extract_and_run(self): - pass + @pytest.mark.parametrize("command", list(Command)) + def test__start_service_calls_correct_services(self, command): + if command is Command.START_VM: + pytest.skip("Command does not start a service") + self.submission.command = command - def test__extract_and_run_failure(self): - pass + with mock.patch("securedrop_export.main.PrintService") as ps, mock.patch( + "securedrop_export.main.ExportService" + ) as es: + _start_service(self.submission) - def test_entrypoint(self): - pass + if command in [Command.PRINT, Command.PRINTER_TEST, Command.PRINTER_PREFLIGHT]: + assert ps.call_args[0][0] is self.submission + else: + assert es.call_args[0][0] is self.submission From d488374d77a378e65eb3ebd5849244a0b1decea4 Mon Sep 17 00:00:00 2001 From: Ro Date: Mon, 28 Nov 2022 20:58:35 -0800 Subject: [PATCH 311/352] Cleanup: Use wait_for_print method to ensure jobs are correctly transferred. Log exceptions when they occur instead of during graceful exit. Rename old service and status files to legacy_*; address review feedback. --- .semgrep/custom-rules.yaml | 4 +- securedrop_export/archive.py | 2 +- .../{directory_util.py => directory.py} | 3 - securedrop_export/disk/__init__.py | 1 + securedrop_export/disk/cli.py | 6 +- securedrop_export/disk/legacy_service.py | 146 +++++++++++++ securedrop_export/disk/legacy_status.py | 26 +++ securedrop_export/disk/new_service.py | 120 ----------- securedrop_export/disk/new_status.py | 29 --- securedrop_export/disk/service.py | 194 ++++++++---------- securedrop_export/disk/status.py | 35 ++-- securedrop_export/exceptions.py | 5 - securedrop_export/main.py | 26 +-- securedrop_export/print/__init__.py | 1 + securedrop_export/print/service.py | 23 ++- tests/disk/test_cli.py | 2 +- tests/disk/test_service.py | 46 ++--- tests/disk/test_status.py | 0 tests/print/test_service.py | 70 +++++-- ...st_directory_util.py => test_directory.py} | 28 ++- tests/test_main.py | 30 ++- 21 files changed, 421 insertions(+), 376 deletions(-) rename securedrop_export/{directory_util.py => directory.py} (99%) create mode 100644 securedrop_export/disk/legacy_service.py create mode 100644 securedrop_export/disk/legacy_status.py delete mode 100644 securedrop_export/disk/new_service.py delete mode 100644 securedrop_export/disk/new_status.py delete mode 100644 tests/disk/test_status.py rename tests/{test_directory_util.py => test_directory.py} (67%) diff --git a/.semgrep/custom-rules.yaml b/.semgrep/custom-rules.yaml index 38f60f003..377e55f4e 100644 --- a/.semgrep/custom-rules.yaml +++ b/.semgrep/custom-rules.yaml @@ -47,7 +47,7 @@ rules: languages: - python severity: ERROR - message: Possible path traversal or insecure directory and file permissions through os.mkdir(). Use securedrop_export.directory_util.safe_mkdir instead. + message: Possible path traversal or insecure directory and file permissions through os.mkdir(). Use securedrop_export.directory.safe_mkdir instead. patterns: - pattern: "....mkdir(...)" - pattern-not-inside: | @@ -58,7 +58,7 @@ rules: languages: - python severity: ERROR - message: Possible path traversal or insecure directory and file permissions through os.makedirs(). Use securedrop_export.directory_util.safe_mkdir instead. + message: Possible path traversal or insecure directory and file permissions through os.makedirs(). Use securedrop_export.directory.safe_mkdir instead. patterns: - pattern: "....makedirs(...)" - pattern-not-inside: | diff --git a/securedrop_export/archive.py b/securedrop_export/archive.py index 2ec50b5c8..ed8108221 100755 --- a/securedrop_export/archive.py +++ b/securedrop_export/archive.py @@ -9,7 +9,7 @@ from securedrop_export.exceptions import ExportException from securedrop_export.status import BaseStatus from securedrop_export.command import Command -from securedrop_export.directory_util import safe_extractall +from securedrop_export.directory import safe_extractall logger = logging.getLogger(__name__) diff --git a/securedrop_export/directory_util.py b/securedrop_export/directory.py similarity index 99% rename from securedrop_export/directory_util.py rename to securedrop_export/directory.py index a2a866c7e..4f5edf546 100644 --- a/securedrop_export/directory_util.py +++ b/securedrop_export/directory.py @@ -2,9 +2,6 @@ import tarfile from pathlib import Path from typing import Optional, Union -import logging - -logger = logging.getLogger(__name__) def safe_mkdir( diff --git a/securedrop_export/disk/__init__.py b/securedrop_export/disk/__init__.py index e69de29bb..3fa6c3647 100644 --- a/securedrop_export/disk/__init__.py +++ b/securedrop_export/disk/__init__.py @@ -0,0 +1 @@ +from .service import Service # noqa: F401 diff --git a/securedrop_export/disk/cli.py b/securedrop_export/disk/cli.py index 7abb33b02..5d07c9d53 100644 --- a/securedrop_export/disk/cli.py +++ b/securedrop_export/disk/cli.py @@ -7,7 +7,7 @@ from securedrop_export.exceptions import ExportException from .volume import EncryptionScheme, Volume -from .new_status import Status +from .status import Status logger = logging.getLogger(__name__) @@ -125,7 +125,7 @@ def _check_partitions(self, blkid: str) -> str: return device_and_partitions except subprocess.CalledProcessError as ex: - logger.error(f"Error checking block deivce {blkid}") + logger.error(f"Error checking block device {blkid}") raise ExportException(sdstatus=Status.DEVICE_ERROR) from ex def is_luks_volume(self, device: str) -> bool: @@ -364,7 +364,7 @@ def cleanup_drive_and_tmpdir(self, volume: Volume, submission_tmpdir: str): directory. Currently called at end of `write_data_to_device()` to ensure device is always locked after export. - Raise ExportException if errors during cleanup are encoutered. + Raise ExportException if errors during cleanup are encountered. """ logger.debug("Syncing filesystems") try: diff --git a/securedrop_export/disk/legacy_service.py b/securedrop_export/disk/legacy_service.py new file mode 100644 index 000000000..279a84d9c --- /dev/null +++ b/securedrop_export/disk/legacy_service.py @@ -0,0 +1,146 @@ +import logging + +from securedrop_export.exceptions import ExportException + +from .cli import CLI +from .legacy_status import Status as LegacyStatus +from .status import Status as Status + +logger = logging.getLogger(__name__) + + +class Service: + def __init__(self, submission, cli=None): + self.submission = submission + self.cli = cli or CLI() + + def check_connected_devices(self) -> LegacyStatus: + """ + Check if single USB is inserted. + """ + logger.info("Export archive is usb-test") + + try: + all_devices = self.cli.get_connected_devices() + num_devices = len(all_devices) + + except ExportException as ex: + logger.error(f"Error encountered during USB check: {ex.sdstatus.value}") + # Use legacy status instead of new status values + raise ExportException(sdstatus=LegacyStatus.LEGACY_ERROR_USB_CHECK) from ex + + if num_devices == 0: + raise ExportException(sdstatus=LegacyStatus.LEGACY_USB_NOT_CONNECTED) + elif num_devices == 1: + return LegacyStatus.LEGACY_USB_CONNECTED + elif num_devices > 1: + raise ExportException( + sdstatus=LegacyStatus.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED + ) + + def check_disk_format(self) -> LegacyStatus: + """ + Check if volume is correctly formatted for export. + """ + try: + all_devices = self.cli.get_connected_devices() + + if len(all_devices) == 1: + device = self.cli.get_partitioned_device(all_devices[0]) + logger.info("Check if LUKS") + if not self.cli.is_luks_volume(device): + raise ExportException( + sdstatus=LegacyStatus.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED + ) + # We can support checking if a drive is already unlocked, but for + # backwards compatibility, this is the only expected status + # at this stage + return LegacyStatus.LEGACY_USB_ENCRYPTED + + except ExportException as ex: + logger.error( + f"Error encountered during disk format check: {ex.sdstatus.value}" + ) + # Return legacy status values for now for ongoing client compatibility + if ex.sdstatus in [s for s in Status]: + status = self._legacy_status(ex.sdstatus) + raise ExportException(sdstatus=status) + elif ex.sdstatus: + raise + else: + raise ExportException(sdstatus=LegacyStatus.LEGACY_USB_DISK_ERROR) + + def export(self): + """ + Export all files to target device. + """ + logger.info("Export archive is disk") + + try: + all_devices = self.cli.get_connected_devices() + + if len(all_devices) == 1: + device = self.cli.get_partitioned_device(all_devices[0]) + + # Decide what kind of volume it is + logger.info("Check if LUKS") + if self.cli.is_luks_volume(device): + volume = self.cli.get_luks_volume(device) + logger.info("Check if writable") + if not volume.writable: + logger.info("Not writable-will try unlocking") + volume = self.cli.unlock_luks_volume( + volume, self.submission.encryption_key + ) + volume = self.cli.mount_volume(volume) + + logger.info(f"Export submission to {volume.mountpoint}") + self.cli.write_data_to_device( + self.submission.tmpdir, self.submission.target_dirname, volume + ) + # This is SUCCESS_EXPORT, but the 0.7.0 client is not expecting + # a return status from a successful export operation. + # When the client is updated, we will return SUCCESS_EXPORT here. + + else: + # Another kind of drive: VeraCrypt/TC, or unsupported. + # For now this is an error--in future there will be support + # for additional encryption formats + logger.error(f"Export failed because {device} is not supported") + raise ExportException( + sdstatus=LegacyStatus.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED + ) + + except ExportException as ex: + logger.error( + f"Error encountered during disk format check: {ex.sdstatus.value}" + ) + # Return legacy status values for now for ongoing client compatibility + if ex.sdstatus in [s for s in Status]: + status = self._legacy_status(ex.sdstatus) + raise ExportException(sdstatus=status) + elif ex.sdstatus: + raise + else: + raise ExportException(sdstatus=LegacyStatus.LEGACY_ERROR_GENERIC) + + def _legacy_status(self, status: Status) -> LegacyStatus: + """ + Backwards-compatibility - status values that client (@0.7.0) is expecting. + """ + logger.info(f"Convert to legacy: {status.value}") + if status is Status.ERROR_MOUNT: + return LegacyStatus.LEGACY_ERROR_USB_MOUNT + elif status in [Status.ERROR_EXPORT, Status.ERROR_EXPORT_CLEANUP]: + return LegacyStatus.LEGACY_ERROR_USB_WRITE + elif status in [Status.ERROR_UNLOCK_LUKS, Status.ERROR_UNLOCK_GENERIC]: + return LegacyStatus.LEGACY_USB_BAD_PASSPHRASE + elif status in [ + Status.INVALID_DEVICE_DETECTED, + Status.MULTI_DEVICE_DETECTED, + ]: + return LegacyStatus.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED + # The other status values, such as Status.NO_DEVICE_DETECTED, are not returned by the + # CLI, so we don't need to check for them here + else: + return LegacyStatus.LEGACY_ERROR_GENERIC diff --git a/securedrop_export/disk/legacy_status.py b/securedrop_export/disk/legacy_status.py new file mode 100644 index 000000000..fa0bdf869 --- /dev/null +++ b/securedrop_export/disk/legacy_status.py @@ -0,0 +1,26 @@ +from securedrop_export.status import BaseStatus + + +class Status(BaseStatus): + + LEGACY_ERROR_GENERIC = "ERROR_GENERIC" + + # Legacy USB preflight related + LEGACY_USB_CONNECTED = "USB_CONNECTED" # Success + LEGACY_USB_NOT_CONNECTED = "USB_NOT_CONNECTED" + LEGACY_ERROR_USB_CHECK = "ERROR_USB_CHECK" + + # Legacy USB Disk preflight related errors + LEGACY_USB_ENCRYPTED = "USB_ENCRYPTED" # Success + LEGACY_USB_ENCRYPTION_NOT_SUPPORTED = "USB_ENCRYPTION_NOT_SUPPORTED" + + # Can be raised during disk format check + LEGACY_USB_DISK_ERROR = "USB_DISK_ERROR" + + # Legacy Disk export errors + LEGACY_USB_BAD_PASSPHRASE = "USB_BAD_PASSPHRASE" + LEGACY_ERROR_USB_MOUNT = "ERROR_USB_MOUNT" + LEGACY_ERROR_USB_WRITE = "ERROR_USB_WRITE" + + # New + SUCCESS_EXPORT = "SUCCESS_EXPORT" diff --git a/securedrop_export/disk/new_service.py b/securedrop_export/disk/new_service.py deleted file mode 100644 index b5702a474..000000000 --- a/securedrop_export/disk/new_service.py +++ /dev/null @@ -1,120 +0,0 @@ -import logging - -from securedrop_export.archive import Archive - -from .cli import CLI -from .status import Status -from .volume import Volume -from securedrop_export.exceptions import ExportException - - -logger = logging.getLogger(__name__) - - -class Service: - """ - Checks that can be performed against the device(s). - This is the "API" portion of the export workflow. - """ - - def __init__(self, cli: CLI): - self.cli = cli - - def run(self, arg: str) -> Status: - """ - Run export actions. - """ - - def scan_all_devices(self) -> Status: - """ - Check all connected devices and return current device - status. - """ - try: - all_devices = self.cli.get_connected_devices() - number_devices = len(all_devices) - - if number_devices == 0: - return Status.NO_DEVICE_DETECTED - elif number_devices > 1: - return Status.MULTI_DEVICE_DETECTED - else: - return self.scan_single_device(all_devices[0]) - - except ExportException as ex: - logger.error(ex) - return Status.DEVICE_ERROR # Could not assess devices - - def scan_single_device(self, blkid: str) -> Status: - """ - Given a string representing a single block device, see if it - is a suitable export target and return information about its state. - """ - try: - target = self.cli.get_partitioned_device(blkid) - - # See if it's a LUKS drive - if self.cli.is_luks_volume(target): - - # Returns Volume or throws ExportException - self.volume = self.cli.get_luks_volume(target) - - # See if it's unlocked and mounted - if self.volume.writable: - logger.debug("LUKS device is already mounted") - return Status.DEVICE_WRITABLE - else: - # Prompt for passphrase - return Status.DEVICE_LOCKED - else: - # Might be VeraCrypt, might be madness - logger.info("LUKS drive not found") - - # Currently we don't support anything other than LUKS. - # In future, we will support TC/VC volumes as well - return Status.INVALID_DEVICE_DETECTED - - except ExportException as ex: - logger.error(ex) - if ex.sdstatus: - return ex.sdstatus - else: - return Status.DEVICE_ERROR - - def unlock_device(self, passphrase: str, volume: Volume) -> Status: - """ - Given provided passphrase, unlock target volume. Currently, - LUKS volumes are supported. - """ - if volume: - try: - self.volume = self.cli.unlock_luks_volume(volume, passphrase) - - if volume.writable: - return Status.DEVICE_WRITABLE - else: - return Status.ERROR_UNLOCK_LUKS - - except ExportException as ex: - logger.error(ex) - return Status.ERROR_UNLOCK_LUKS - else: - # Trying to unlock devices before having an active device - logger.warning("Tried to unlock_device but no current volume detected.") - return Status.NO_DEVICE_DETECTED - - def write_to_device(self, volume: Volume, data: Archive) -> Status: - """ - Export data to volume. CLI unmounts and locks volume on completion, even - if export was unsuccessful. - """ - try: - self.cli.write_data_to_device(data.tmpdir, data.target_dirname, volume) - return Status.SUCCESS_EXPORT - - except ExportException as ex: - logger.error(ex) - if ex.sdstatus: - return ex.sdstatus - else: - return Status.ERROR_EXPORT diff --git a/securedrop_export/disk/new_status.py b/securedrop_export/disk/new_status.py deleted file mode 100644 index 285d9f8b9..000000000 --- a/securedrop_export/disk/new_status.py +++ /dev/null @@ -1,29 +0,0 @@ -from securedrop_export.status import BaseStatus - - -class Status(BaseStatus): - - NO_DEVICE_DETECTED = "NO_DEVICE_DETECTED" - INVALID_DEVICE_DETECTED = ( - "INVALID_DEVICE_DETECTED" # Multi partitioned, not encrypted, etc - ) - MULTI_DEVICE_DETECTED = "MULTI_DEVICE_DETECTED" # Not currently supported - - DEVICE_LOCKED = "DEVICE_LOCKED" # One device detected, and it's locked - DEVICE_WRITABLE = ( - "DEVICE_WRITABLE" # One device detected, and it's unlocked (and mounted) - ) - - ERROR_UNLOCK_LUKS = "ERROR_UNLOCK_LUKS" - ERROR_UNLOCK_GENERIC = "ERROR_UNLOCK_GENERIC" - ERROR_MOUNT = "ERROR_MOUNT" # Unlocked but not mounted - - SUCCESS_EXPORT = "SUCCESS_EXPORT" - ERROR_EXPORT = "ERROR_EXPORT" # Could not write to disk - - # export succeeds but drives were not properly unmounted - ERROR_EXPORT_CLEANUP = "ERROR_EXPORT_CLEANUP" - - DEVICE_ERROR = ( - "DEVICE_ERROR" # Something went wrong while trying to check the device - ) diff --git a/securedrop_export/disk/service.py b/securedrop_export/disk/service.py index e87386a91..b5702a474 100644 --- a/securedrop_export/disk/service.py +++ b/securedrop_export/disk/service.py @@ -1,144 +1,120 @@ import logging -from securedrop_export.exceptions import ExportException +from securedrop_export.archive import Archive from .cli import CLI from .status import Status -from .new_status import Status as NewStatus +from .volume import Volume +from securedrop_export.exceptions import ExportException + logger = logging.getLogger(__name__) class Service: - def __init__(self, submission, cli=None): - self.submission = submission - self.cli = cli or CLI() + """ + Checks that can be performed against the device(s). + This is the "API" portion of the export workflow. + """ + + def __init__(self, cli: CLI): + self.cli = cli - def check_connected_devices(self) -> Status: + def run(self, arg: str) -> Status: """ - Check if single USB is inserted. + Run export actions. """ - logger.info("Export archive is usb-test") - - try: - all_devices = self.cli.get_connected_devices() - num_devices = len(all_devices) - except ExportException as ex: - logger.error(f"Error encountered during USB check: {ex.sdstatus.value}") - # Use legacy status instead of new status values - raise ExportException(sdstatus=Status.LEGACY_ERROR_USB_CHECK) from ex - - if num_devices == 0: - raise ExportException(sdstatus=Status.LEGACY_USB_NOT_CONNECTED) - elif num_devices == 1: - return Status.LEGACY_USB_CONNECTED - elif num_devices > 1: - raise ExportException(sdstatus=Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED) - - def check_disk_format(self) -> Status: + def scan_all_devices(self) -> Status: """ - Check if volume is correctly formatted for export. + Check all connected devices and return current device + status. """ try: all_devices = self.cli.get_connected_devices() + number_devices = len(all_devices) - if len(all_devices) == 1: - device = self.cli.get_partitioned_device(all_devices[0]) - logger.info("Check if LUKS") - if not self.cli.is_luks_volume(device): - raise ExportException( - sdstatus=Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED - ) - # We can support checking if a drive is already unlocked, but for - # backwards compatibility, this is the only expected status - # at this stage - return Status.LEGACY_USB_ENCRYPTED + if number_devices == 0: + return Status.NO_DEVICE_DETECTED + elif number_devices > 1: + return Status.MULTI_DEVICE_DETECTED + else: + return self.scan_single_device(all_devices[0]) except ExportException as ex: - logger.error( - f"Error encountered during disk format check: {ex.sdstatus.value}" - ) - # Return legacy status values for now for ongoing client compatibility - if ex.sdstatus in [s for s in NewStatus]: - status = self._legacy_status(ex.sdstatus) - raise ExportException(sdstatus=status) - elif ex.sdstatus: - raise - else: - raise ExportException(sdstatus=Status.LEGACY_USB_DISK_ERROR) + logger.error(ex) + return Status.DEVICE_ERROR # Could not assess devices - def export(self): + def scan_single_device(self, blkid: str) -> Status: """ - Export all files to target device. + Given a string representing a single block device, see if it + is a suitable export target and return information about its state. """ - logger.info("Export archive is disk") - try: - all_devices = self.cli.get_connected_devices() + target = self.cli.get_partitioned_device(blkid) + + # See if it's a LUKS drive + if self.cli.is_luks_volume(target): - if len(all_devices) == 1: - device = self.cli.get_partitioned_device(all_devices[0]) - - # Decide what kind of volume it is - logger.info("Check if LUKS") - if self.cli.is_luks_volume(device): - volume = self.cli.get_luks_volume(device) - logger.info("Check if writable") - if not volume.writable: - logger.info("Not writable-will try unlocking") - volume = self.cli.unlock_luks_volume( - volume, self.submission.encryption_key - ) - volume = self.cli.mount_volume(volume) - - logger.info(f"Export submission to {volume.mountpoint}") - self.cli.write_data_to_device( - self.submission.tmpdir, self.submission.target_dirname, volume - ) - # This is SUCCESS_EXPORT, but the 0.7.0 client is not expecting - # a return status from a successful export operation. - # When the client is updated, we will return SUCCESS_EXPORT here. + # Returns Volume or throws ExportException + self.volume = self.cli.get_luks_volume(target) + # See if it's unlocked and mounted + if self.volume.writable: + logger.debug("LUKS device is already mounted") + return Status.DEVICE_WRITABLE else: - # Another kind of drive: VeraCrypt/TC, or unsupported. - # For now this is an error--in future there will be support - # for additional encryption formats - logger.error(f"Export failed because {device} is not supported") - raise ExportException( - sdstatus=Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED - ) + # Prompt for passphrase + return Status.DEVICE_LOCKED + else: + # Might be VeraCrypt, might be madness + logger.info("LUKS drive not found") + + # Currently we don't support anything other than LUKS. + # In future, we will support TC/VC volumes as well + return Status.INVALID_DEVICE_DETECTED except ExportException as ex: - logger.error( - f"Error encountered during disk format check: {ex.sdstatus.value}" - ) - # Return legacy status values for now for ongoing client compatibility - if ex.sdstatus in [s for s in NewStatus]: - status = self._legacy_status(ex.sdstatus) - raise ExportException(sdstatus=status) - elif ex.sdstatus: - raise + logger.error(ex) + if ex.sdstatus: + return ex.sdstatus else: - raise ExportException(sdstatus=Status.LEGACY_ERROR_GENERIC) + return Status.DEVICE_ERROR - def _legacy_status(self, status: NewStatus) -> Status: + def unlock_device(self, passphrase: str, volume: Volume) -> Status: """ - Backwards-compatibility - status values that client (@0.7.0) is expecting. + Given provided passphrase, unlock target volume. Currently, + LUKS volumes are supported. """ - logger.info(f"Convert to legacy: {status.value}") - if status is NewStatus.ERROR_MOUNT: - return Status.LEGACY_ERROR_USB_MOUNT - elif status in [NewStatus.ERROR_EXPORT, NewStatus.ERROR_EXPORT_CLEANUP]: - return Status.LEGACY_ERROR_USB_WRITE - elif status in [NewStatus.ERROR_UNLOCK_LUKS, NewStatus.ERROR_UNLOCK_GENERIC]: - return Status.LEGACY_USB_BAD_PASSPHRASE - elif status in [ - NewStatus.INVALID_DEVICE_DETECTED, - NewStatus.MULTI_DEVICE_DETECTED, - ]: - return Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED - # The other status values, such as Status.NO_DEVICE_DETECTED, are not returned by the - # CLI, so we don't need to check for them here + if volume: + try: + self.volume = self.cli.unlock_luks_volume(volume, passphrase) + + if volume.writable: + return Status.DEVICE_WRITABLE + else: + return Status.ERROR_UNLOCK_LUKS + + except ExportException as ex: + logger.error(ex) + return Status.ERROR_UNLOCK_LUKS else: - return Status.LEGACY_ERROR_GENERIC + # Trying to unlock devices before having an active device + logger.warning("Tried to unlock_device but no current volume detected.") + return Status.NO_DEVICE_DETECTED + + def write_to_device(self, volume: Volume, data: Archive) -> Status: + """ + Export data to volume. CLI unmounts and locks volume on completion, even + if export was unsuccessful. + """ + try: + self.cli.write_data_to_device(data.tmpdir, data.target_dirname, volume) + return Status.SUCCESS_EXPORT + + except ExportException as ex: + logger.error(ex) + if ex.sdstatus: + return ex.sdstatus + else: + return Status.ERROR_EXPORT diff --git a/securedrop_export/disk/status.py b/securedrop_export/disk/status.py index fa0bdf869..285d9f8b9 100644 --- a/securedrop_export/disk/status.py +++ b/securedrop_export/disk/status.py @@ -3,24 +3,27 @@ class Status(BaseStatus): - LEGACY_ERROR_GENERIC = "ERROR_GENERIC" + NO_DEVICE_DETECTED = "NO_DEVICE_DETECTED" + INVALID_DEVICE_DETECTED = ( + "INVALID_DEVICE_DETECTED" # Multi partitioned, not encrypted, etc + ) + MULTI_DEVICE_DETECTED = "MULTI_DEVICE_DETECTED" # Not currently supported - # Legacy USB preflight related - LEGACY_USB_CONNECTED = "USB_CONNECTED" # Success - LEGACY_USB_NOT_CONNECTED = "USB_NOT_CONNECTED" - LEGACY_ERROR_USB_CHECK = "ERROR_USB_CHECK" + DEVICE_LOCKED = "DEVICE_LOCKED" # One device detected, and it's locked + DEVICE_WRITABLE = ( + "DEVICE_WRITABLE" # One device detected, and it's unlocked (and mounted) + ) - # Legacy USB Disk preflight related errors - LEGACY_USB_ENCRYPTED = "USB_ENCRYPTED" # Success - LEGACY_USB_ENCRYPTION_NOT_SUPPORTED = "USB_ENCRYPTION_NOT_SUPPORTED" + ERROR_UNLOCK_LUKS = "ERROR_UNLOCK_LUKS" + ERROR_UNLOCK_GENERIC = "ERROR_UNLOCK_GENERIC" + ERROR_MOUNT = "ERROR_MOUNT" # Unlocked but not mounted - # Can be raised during disk format check - LEGACY_USB_DISK_ERROR = "USB_DISK_ERROR" + SUCCESS_EXPORT = "SUCCESS_EXPORT" + ERROR_EXPORT = "ERROR_EXPORT" # Could not write to disk - # Legacy Disk export errors - LEGACY_USB_BAD_PASSPHRASE = "USB_BAD_PASSPHRASE" - LEGACY_ERROR_USB_MOUNT = "ERROR_USB_MOUNT" - LEGACY_ERROR_USB_WRITE = "ERROR_USB_WRITE" + # export succeeds but drives were not properly unmounted + ERROR_EXPORT_CLEANUP = "ERROR_EXPORT_CLEANUP" - # New - SUCCESS_EXPORT = "SUCCESS_EXPORT" + DEVICE_ERROR = ( + "DEVICE_ERROR" # Something went wrong while trying to check the device + ) diff --git a/securedrop_export/exceptions.py b/securedrop_export/exceptions.py index 78c0519c8..c70fac6bd 100644 --- a/securedrop_export/exceptions.py +++ b/securedrop_export/exceptions.py @@ -1,8 +1,3 @@ -import logging - -logger = logging.getLogger(__name__) - - class ExportException(Exception): """ Base class for exceptions encountered during export. diff --git a/securedrop_export/main.py b/securedrop_export/main.py index dca8e3f9a..e2910d74a 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -7,11 +7,11 @@ from securedrop_export.archive import Archive, Metadata from securedrop_export.command import Command from securedrop_export.status import BaseStatus -from securedrop_export.directory_util import safe_mkdir +from securedrop_export.directory import safe_mkdir from securedrop_export.exceptions import ExportException -from securedrop_export.disk.service import Service as ExportService -from securedrop_export.print.service import Service as PrintService +from securedrop_export.disk import Service as ExportService +from securedrop_export.print import Service as PrintService from logging.handlers import TimedRotatingFileHandler, SysLogHandler from securedrop_export import __version__ @@ -38,8 +38,11 @@ def entrypoint(): Entrypoint method (Note: a method is required for setuptools). Configure logging, extract tarball, and run desired export service, exiting with return code 0. + + Non-zero exit values will cause the system to try alternative + solutions for mimetype handling, which we want to avoid. """ - status, stacktrace, submission = None, None, None + status, submission = None, None try: _configure_logging() @@ -68,16 +71,16 @@ def entrypoint(): except ExportException as ex: logger.error(f"Encountered exception {ex.sdstatus.value}, exiting") + logger.error(ex) status = ex.sdstatus - stacktrace = ex.output except Exception as exc: logger.error("Encountered exception during export, exiting") + logger.error(exc) status = Status.ERROR_GENERIC - stacktrace = exc.output finally: - _exit_gracefully(submission, status=status, e=stacktrace) + _exit_gracefully(submission, status) def _configure_logging(): @@ -141,19 +144,16 @@ def _start_service(submission: Archive) -> Status: return ExportService(submission).check_disk_format() -def _exit_gracefully(submission: Archive, status: BaseStatus = None, e: str = None): +def _exit_gracefully(submission: Archive, status: BaseStatus = None): """ - Utility to print error messages, mostly used during debugging, - then exits successfully despite the error. Always exits 0, - since non-zero exit values will cause system to try alternative + Write status code, ensure file cleanup, and exit with return code 0. + Non-zero exit values will cause the system to try alternative solutions for mimetype handling, which we want to avoid. """ if status: logger.info(f"Exit gracefully with status: {status.value}") else: logger.info("Exit gracefully (no status code supplied)") - if e: - logger.error("Captured exception output: {}".format(e.output)) try: # If the file archive was extracted, delete before returning if submission and os.path.isdir(submission.tmpdir): diff --git a/securedrop_export/print/__init__.py b/securedrop_export/print/__init__.py index e69de29bb..3fa6c3647 100644 --- a/securedrop_export/print/__init__.py +++ b/securedrop_export/print/__init__.py @@ -0,0 +1 @@ +from .service import Service # noqa: F401 diff --git a/securedrop_export/print/service.py b/securedrop_export/print/service.py index 4cfad872f..dbff034bf 100644 --- a/securedrop_export/print/service.py +++ b/securedrop_export/print/service.py @@ -22,10 +22,15 @@ class Service: LASERJET_DRIVER = "/usr/share/cups/drv/hpcups.drv" LASERJET_PPD = "/usr/share/cups/model/hp-laserjet_6l.ppd" - def __init__(self, submission): + BROTHER = "Brother" + LASERJET = "LaserJet" + + SUPPORTED_PRINTERS = [BROTHER, LASERJET] + + def __init__(self, submission, printer_timeout_seconds=PRINTER_WAIT_TIMEOUT): self.submission = submission self.printer_name = self.PRINTER_NAME - self.printer_wait_timeout = self.PRINTER_WAIT_TIMEOUT + self.printer_wait_timeout = printer_timeout_seconds # Override during testing def print(self): """ @@ -105,7 +110,7 @@ def _check_printer_setup(self) -> None: raise ExportException(sdstatus=Status.ERROR_PRINTER_NOT_FOUND) supported_printers = [ - p for p in printers if any(sub in p for sub in ("Brother", "LaserJet")) + p for p in printers if any(sub in p for sub in self.SUPPORTED_PRINTERS) ] if not supported_printers: logger.info("{} are unsupported printers".format(printers)) @@ -146,7 +151,7 @@ def _get_printer_uri(self) -> str: # No usb printer is connected logger.info("No usb printers connected") raise ExportException(sdstatus=Status.ERROR_PRINTER_NOT_FOUND) - elif not any(x in printer_uri for x in ("Brother", "LaserJet")): + elif not any(x in printer_uri for x in self.SUPPORTED_PRINTERS): # printer url is a make that is unsupported logger.info("Printer {} is unsupported".format(printer_uri)) raise ExportException(sdstatus=Status.ERROR_PRINTER_NOT_SUPPORTED) @@ -155,16 +160,16 @@ def _get_printer_uri(self) -> str: return printer_uri def _install_printer_ppd(self, uri): - if not any(x in uri for x in ("Brother", "LaserJet")): + if not any(x in uri for x in self.SUPPORTED_PRINTERS): logger.error( "Cannot install printer ppd for unsupported printer: {}".format(uri) ) raise ExportException(sdstatus=Status.ERROR_PRINTER_NOT_SUPPORTED) - if "Brother" in uri: + if self.BROTHER in uri: printer_driver = self.BRLASER_DRIVER printer_ppd = self.BRLASER_PPD - elif "LaserJet" in uri: + elif self.LASERJET in uri: printer_driver = self.LASERJET_DRIVER printer_ppd = self.LASERJET_PPD @@ -258,6 +263,10 @@ def _print_file(self, file_to_print): command=["xpp", "-P", self.printer_name, file_to_print], error_status=Status.ERROR_PRINT, ) + # This is an addition to ensure that the entire print job is transferred over. + # If the job is not fully transferred within the timeout window, the user + # will see an error message. + self._wait_for_print() def safe_check_call( self, command: str, error_status: Status, ignore_stderr_startswith=None diff --git a/tests/disk/test_cli.py b/tests/disk/test_cli.py index d174dc445..310e4b2d2 100644 --- a/tests/disk/test_cli.py +++ b/tests/disk/test_cli.py @@ -6,7 +6,7 @@ from securedrop_export.disk.cli import CLI from securedrop_export.disk.volume import EncryptionScheme, Volume from securedrop_export.exceptions import ExportException -from securedrop_export.disk.new_status import Status +from securedrop_export.disk.status import Status from securedrop_export.archive import Archive diff --git a/tests/disk/test_service.py b/tests/disk/test_service.py index 800a4fd13..17ad3266b 100644 --- a/tests/disk/test_service.py +++ b/tests/disk/test_service.py @@ -4,11 +4,11 @@ import tempfile from securedrop_export.exceptions import ExportException -from securedrop_export.disk.status import Status -from securedrop_export.disk.new_status import Status as NewStatus +from securedrop_export.disk.legacy_status import Status as LegacyStatus +from securedrop_export.disk.status import Status as Status from securedrop_export.disk.volume import Volume, EncryptionScheme from securedrop_export.archive import Archive, Metadata -from securedrop_export.disk.service import Service +from securedrop_export.disk.legacy_service import Service from securedrop_export.disk.cli import CLI SAMPLE_OUTPUT_LSBLK_NO_PART = b"disk\ncrypt" # noqa @@ -78,14 +78,14 @@ def teardown_method(self, method): def test_check_usb(self): status = self.service.check_connected_devices() - assert status is Status.LEGACY_USB_CONNECTED + assert status is LegacyStatus.LEGACY_USB_CONNECTED def test_no_devices_connected(self): self.mock_cli.get_connected_devices.return_value = [] with pytest.raises(ExportException) as ex: self.service.check_connected_devices() - assert ex.value.sdstatus is Status.LEGACY_USB_NOT_CONNECTED + assert ex.value.sdstatus is LegacyStatus.LEGACY_USB_NOT_CONNECTED def test_too_many_devices_connected(self): self.mock_cli.get_connected_devices.return_value = [ @@ -95,7 +95,7 @@ def test_too_many_devices_connected(self): with pytest.raises(ExportException) as ex: self.service.check_connected_devices() - assert ex.value.sdstatus is Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED + assert ex.value.sdstatus is LegacyStatus.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED def test_device_is_not_luks(self): self.mock_cli.is_luks_volume.return_value = False @@ -105,33 +105,33 @@ def test_device_is_not_luks(self): with pytest.raises(ExportException) as ex: self.service.check_disk_format() - assert ex.value.sdstatus is Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED + assert ex.value.sdstatus is LegacyStatus.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED def test_check_usb_error(self): self.mock_cli.get_connected_devices.side_effect = ExportException( - sdstatus=Status.LEGACY_ERROR_USB_CHECK + sdstatus=LegacyStatus.LEGACY_ERROR_USB_CHECK ) with pytest.raises(ExportException) as ex: self.service.check_connected_devices() - assert ex.value.sdstatus is Status.LEGACY_ERROR_USB_CHECK + assert ex.value.sdstatus is LegacyStatus.LEGACY_ERROR_USB_CHECK def test_check_disk_format(self): status = self.service.check_disk_format() - assert status is Status.LEGACY_USB_ENCRYPTED + assert status is LegacyStatus.LEGACY_USB_ENCRYPTED def test_check_disk_format_error(self): self.mock_cli.get_partitioned_device.side_effect = ExportException( - sdstatus=NewStatus.INVALID_DEVICE_DETECTED + sdstatus=Status.INVALID_DEVICE_DETECTED ) with pytest.raises(ExportException) as ex: self.service.check_disk_format() # We still return the legacy status for now - assert ex.value.sdstatus is Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED + assert ex.value.sdstatus is LegacyStatus.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED def test_export(self): # Currently, a successful export does not return a success status. @@ -145,59 +145,59 @@ def test_export_disk_not_supported(self): with pytest.raises(ExportException) as ex: self.service.export() - assert ex.value.sdstatus is Status.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED + assert ex.value.sdstatus is LegacyStatus.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED def test_export_write_error(self): self.mock_cli.is_luks_volume.return_value = True self.mock_cli.write_data_to_device.side_effect = ExportException( - sdstatus=Status.LEGACY_ERROR_USB_WRITE + sdstatus=LegacyStatus.LEGACY_ERROR_USB_WRITE ) with pytest.raises(ExportException) as ex: self.service.export() - assert ex.value.sdstatus is Status.LEGACY_ERROR_USB_WRITE + assert ex.value.sdstatus is LegacyStatus.LEGACY_ERROR_USB_WRITE def test_export_throws_new_exception_return_legacy_status(self): self.mock_cli.get_connected_devices.side_effect = ExportException( - sdstatus=NewStatus.ERROR_MOUNT + sdstatus=Status.ERROR_MOUNT ) with pytest.raises(ExportException) as ex: self.service.export() - assert ex.value.sdstatus is Status.LEGACY_ERROR_USB_MOUNT + assert ex.value.sdstatus is LegacyStatus.LEGACY_ERROR_USB_MOUNT @mock.patch("os.path.exists", return_value=True) def test_write_error_returns_legacy_status(self, mock_path): self.mock_cli.is_luks_volume.return_value = True self.mock_cli.write_data_to_device.side_effect = ExportException( - sdstatus=NewStatus.ERROR_EXPORT + sdstatus=Status.ERROR_EXPORT ) with pytest.raises(ExportException) as ex: self.service.export() - assert ex.value.sdstatus is Status.LEGACY_ERROR_USB_WRITE + assert ex.value.sdstatus is LegacyStatus.LEGACY_ERROR_USB_WRITE @mock.patch("os.path.exists", return_value=True) def test_unlock_error_returns_legacy_status(self, mock_path): self.mock_cli.unlock_luks_volume.side_effect = ExportException( - sdstatus=NewStatus.ERROR_UNLOCK_LUKS + sdstatus=Status.ERROR_UNLOCK_LUKS ) with pytest.raises(ExportException) as ex: self.service.export() - assert ex.value.sdstatus is Status.LEGACY_USB_BAD_PASSPHRASE + assert ex.value.sdstatus is LegacyStatus.LEGACY_USB_BAD_PASSPHRASE @mock.patch("os.path.exists", return_value=True) def test_unexpected_error_returns_legacy_status_generic(self, mock_path): self.mock_cli.unlock_luks_volume.side_effect = ExportException( - sdstatus=NewStatus.DEVICE_ERROR + sdstatus=Status.DEVICE_ERROR ) with pytest.raises(ExportException) as ex: self.service.export() - assert ex.value.sdstatus is Status.LEGACY_ERROR_GENERIC + assert ex.value.sdstatus is LegacyStatus.LEGACY_ERROR_GENERIC diff --git a/tests/disk/test_status.py b/tests/disk/test_status.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/print/test_service.py b/tests/print/test_service.py index dfff606d2..ffaee6861 100644 --- a/tests/print/test_service.py +++ b/tests/print/test_service.py @@ -5,7 +5,7 @@ import subprocess from subprocess import CalledProcessError -from securedrop_export.directory_util import safe_mkdir +from securedrop_export.directory import safe_mkdir from securedrop_export.exceptions import ExportException from securedrop_export.archive import Archive @@ -39,6 +39,9 @@ def teardown_class(cls): cls.service = None cls.submission = None + def setup_method(self): + self.service.printer_wait_timeout = self.service.PRINTER_WAIT_TIMEOUT + @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_BROTHER_PRINTER) def test_get_good_printer_uri_laserjet(self, mocked_call): assert ( @@ -67,7 +70,8 @@ def test_print_all_methods_called(self): patch_setup.stop() patch_print.stop() - def test_printer_test_all_methods_called(self): + @mock.patch("securedrop_export.print.service.Service._wait_for_print") + def test_printer_preflight_all_methods_called(self, mock_wait): patch_setup = mock.patch.object(self.service, "_check_printer_setup") mock_setup = patch_setup.start() @@ -80,7 +84,8 @@ def test_printer_test_all_methods_called(self): patch_setup.stop() - def test_print_all_checks_called(self): + @mock.patch("securedrop_export.print.service.Service._wait_for_print") + def test_print_testpage_all_checks_called(self, mock_wait): patch_setup = mock.patch.object(self.service, "_check_printer_setup") patch_print = mock.patch.object(self.service, "_print_test_page") @@ -208,7 +213,7 @@ def test_safe_check_call_write_to_stderr_wrong_ignore_param(self): assert ex.value.sdstatus is Status.ERROR_PRINT - @mock.patch("time.sleep", return_value=None) + @mock.patch("securedrop_export.print.service.time.sleep", return_value=None) @mock.patch( "subprocess.check_output", side_effect=[ @@ -219,12 +224,12 @@ def test_safe_check_call_write_to_stderr_wrong_ignore_param(self): def test__wait_for_print(self, mock_subprocess, mock_time): assert self.service._wait_for_print() - @mock.patch("time.sleep", return_value=None) @mock.patch( "subprocess.check_output", side_effect=subprocess.CalledProcessError(1, "check_output"), ) - def test__wait_for_print_print_exception(self, mock_subprocess, mock_time): + @mock.patch("time.sleep", return_value=None) + def test__wait_for_print_print_exception(self, mock_time, mock_subprocess): with pytest.raises(ExportException) as ex: self.service._wait_for_print() @@ -233,7 +238,7 @@ def test__wait_for_print_print_exception(self, mock_subprocess, mock_time): @mock.patch( "subprocess.check_output", return_value=b"printer sdw-printer is busy\n" ) - def test__wait_for_print_timeout_exception(self, mock_subprocess): + def test__wait_for_print_timeout_exception(self, mock_output): self.service.printer_wait_timeout = 1 with pytest.raises(ExportException) as ex: @@ -241,8 +246,6 @@ def test__wait_for_print_timeout_exception(self, mock_subprocess): assert ex.value.sdstatus is Status.ERROR_PRINT - self.service.printer_wait_timeout = self.service.PRINTER_WAIT_TIMEOUT - @pytest.mark.parametrize( "printers", [SAMPLE_OUTPUT_BROTHER_PRINTER, SAMPLE_OUTPUT_LASERJET_PRINTER] ) @@ -322,7 +325,8 @@ def test__install_printer_ppd_error_unsupported_uri(self): ) assert ex.value.sdstatus is Status.ERROR_PRINTER_NOT_SUPPORTED - def test__print_test_page_calls_method(self): + @mock.patch("securedrop_export.print.service.Service._wait_for_print") + def test__print_test_page_calls_method(self, mock_wait): p = mock.patch.object(self.service, "_print_file") mock_print = p.start() @@ -330,7 +334,8 @@ def test__print_test_page_calls_method(self): mock_print.assert_called_once_with("/usr/share/cups/data/testprint") p.stop() - def test__print_all_files(self): + @mock.patch("securedrop_export.print.service.Service._wait_for_print") + def test__print_all_files(self, mock_wait): p = mock.patch.object(self.service, "_print_file") mock_print = p.start() @@ -345,7 +350,8 @@ def test__print_all_files(self): ) p.stop() - def test_open_office_file_convert_to_pdf(self): + @mock.patch("securedrop_export.print.service.Service._wait_for_print") + def test_open_office_file_convert_to_pdf(self, mock_wait): file = "/tmp/definitely-an-office-file.odt" with mock.patch.object(self.service, "safe_check_call") as scc, mock.patch( @@ -391,3 +397,43 @@ def test_safe_check_call_has_error_in_stderr(self): self.service.safe_check_call(command="ls", error_status=Status.TEST_SUCCESS) assert ex.value.sdstatus is Status.TEST_SUCCESS + + @mock.patch("securedrop_export.print.service.time.sleep", return_value=None) + @mock.patch( + "subprocess.check_output", + side_effect=[ + b"printer sdw-printer is busy\n", + b"printer sdw-printer is idle\n", + ], + ) + def test__wait_for_print_waits_correctly(self, mock_subprocess, mock_time): + file = "/tmp/happy-to-print-you.pdf" + + with mock.patch.object(self.service, "safe_check_call") as scc, mock.patch( + "securedrop_export.print.service.logger.info" + ) as log: + self.service._print_file(file) + + assert scc.call_count == 1 + scc.assert_has_calls( + [ + mock.call( + command=[ + "xpp", + "-P", + "sdw-printer", + "/tmp/happy-to-print-you.pdf", + ], + error_status=Status.ERROR_PRINT, + ), + ] + ) + assert log.call_count == 4 + log.assert_has_calls( + [ + mock.call("Sending file to printer sdw-printer"), + mock.call("Running lpstat waiting for printer sdw-printer"), + mock.call("Running lpstat waiting for printer sdw-printer"), + mock.call("Print completed"), + ] + ) diff --git a/tests/test_directory_util.py b/tests/test_directory.py similarity index 67% rename from tests/test_directory_util.py rename to tests/test_directory.py index cc1f304a8..2f0a3a9ef 100644 --- a/tests/test_directory_util.py +++ b/tests/test_directory.py @@ -4,10 +4,10 @@ import shutil from pathlib import Path -from securedrop_export import directory_util +from securedrop_export import directory -class TestDirectoryUtil: +class TestDirectory: _REL_TRAVERSAL = "../../../whee" _SAFE_RELPATH = "./hi" @@ -32,53 +32,51 @@ def teadown_method(self, method): def test_safe_mkdir_error_base_relpath(self): with pytest.raises(ValueError): - directory_util.safe_mkdir(base_path=Path(".")) + directory.safe_mkdir(base_path=Path(".")) def test_safe_mkdir_error_basepath_path_traversal(self): with pytest.raises(ValueError): - directory_util.safe_mkdir(f"{self.homedir}{self._REL_TRAVERSAL}") + directory.safe_mkdir(f"{self.homedir}{self._REL_TRAVERSAL}") def test_safe_mkdir_error_relpath_path_traversal(self): with pytest.raises(ValueError): - directory_util.safe_mkdir(f"{self.homedir}", f"{self._REL_TRAVERSAL}") + directory.safe_mkdir(f"{self.homedir}", f"{self._REL_TRAVERSAL}") def test_safe_mkdir_success(self): - directory_util.safe_mkdir(f"{self.homedir}") + directory.safe_mkdir(f"{self.homedir}") def test_safe_mkdir_success_with_relpath(self): - directory_util.safe_mkdir(f"{self.homedir}", f"{self._SAFE_RELPATH}") + directory.safe_mkdir(f"{self.homedir}", f"{self._SAFE_RELPATH}") assert os.path.exists(f"{self.homedir}{self._SAFE_RELPATH}") def test_safe_mkdir_success_another_relpath(self): - directory_util.safe_mkdir(f"{self.homedir}", f"{self._SAFE_RELPATH2}") + directory.safe_mkdir(f"{self.homedir}", f"{self._SAFE_RELPATH2}") assert os.path.exists(f"{self.homedir}{self._SAFE_RELPATH2}") def test_safe_mkdir_weird_path(self): with pytest.raises(ValueError): - directory_util.safe_mkdir(f"{self.homedir}", f"{self._UNSAFE_RELPATH}") + directory.safe_mkdir(f"{self.homedir}", f"{self._UNSAFE_RELPATH}") def test__check_all_permissions_path_missing(self): with pytest.raises(ValueError): - directory_util._check_all_permissions( - f"{self.homedir}", f"{self._SAFE_RELPATH}" - ) + directory._check_all_permissions(f"{self.homedir}", f"{self._SAFE_RELPATH}") def test_check_dir_perms_unsafe(self): path = Path(f"{self.homedir}{self._SAFE_RELPATH}") - directory_util.safe_mkdir(path) + directory.safe_mkdir(path) # Not what we want, ever path.chmod(0o666) with pytest.raises(RuntimeError): - directory_util._check_dir_permissions(path) + directory._check_dir_permissions(path) def test_check_all_perms_invalid_full_path(self): path = Path(f"{self.homedir}/idontexist") base = Path(f"{self.homedir}") # Returns without error - assert directory_util._check_all_permissions(path, base) is None + assert directory._check_all_permissions(path, base) is None diff --git a/tests/test_main.py b/tests/test_main.py index b94109a88..41fce7f62 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -37,25 +37,14 @@ def test_exit_gracefully_no_exception(self, capsys): with pytest.raises(SystemExit) as sysexit: _exit_gracefully(self.submission, Status.ERROR_GENERIC) - # A graceful exit means a return code of 0 - assert sysexit.value.code == 0 - - captured = capsys.readouterr() - assert captured.err == "{}\n".format(Status.ERROR_GENERIC.value) - assert captured.out == "" + assert self._did_exit_gracefully(sysexit, capsys, Status.ERROR_GENERIC) def test_exit_gracefully_exception(self, capsys): with pytest.raises(SystemExit) as sysexit: - exception = mock.MagicMock() - exception.output = "BANG!" - _exit_gracefully(self.submission, Status.ERROR_GENERIC, e=exception) + _exit_gracefully(self.submission, Status.ERROR_GENERIC) # A graceful exit means a return code of 0 - assert sysexit.value.code == 0 - - captured = capsys.readouterr() - assert captured.err.rstrip() == Status.ERROR_GENERIC.value # todo - assert captured.out == "" + assert self._did_exit_gracefully(sysexit, capsys, Status.ERROR_GENERIC) @pytest.mark.parametrize("status", [s for s in Status]) def test_write_status(self, status, capsys): @@ -73,7 +62,13 @@ def _did_exit_gracefully(self, exit, capsys, status: BaseStatus) -> bool: """ Helper. True if exited with 0, writing supplied status to stderr. """ - return exit.value.code == 0 and capsys.readouterr().err == status.value + "\n" + captured = capsys.readouterr() + + return ( + exit.value.code == 0 + and captured.err.rstrip().endswith(status.value) + and captured.out == "" + ) @pytest.mark.parametrize("command", list(Command)) @mock.patch("securedrop_export.main._configure_logging") @@ -119,8 +114,9 @@ def test_valid_printer_test_config(self, capsys): ) @mock.patch("securedrop_export.main.os.path.exists", return_value=True) @mock.patch("securedrop_export.main.shutil.rmtree") + @mock.patch("securedrop_export.main._configure_logging") def test_entrypoint_failure_extraction( - self, mock_rm, mock_path, mock_extract, capsys + self, mock_log, mock_rm, mock_path, mock_extract, capsys ): with mock.patch( "sys.argv", ["qvm-send-to-usb", SUBMISSION_SAMPLE_ARCHIVE] @@ -135,7 +131,7 @@ def test_entrypoint_failure_extraction( "securedrop_export.main._configure_logging", side_effect=ExportException( sdstatus=Status.ERROR_LOGGING, - message="Zounds, an error setting up logging!", + sderror="Zounds, an error setting up logging!", ), ) def test_entrypoint_logging_fails(self, mock_mkdir, capsys): From b835101617edc03d681d2c79b6c9f8e368d89429 Mon Sep 17 00:00:00 2001 From: Alex Date: Wed, 18 Jan 2023 14:05:13 +0100 Subject: [PATCH 312/352] [update-safety-alerts] add project.json, silence 2 safety alerts --- project.json | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 project.json diff --git a/project.json b/project.json new file mode 100644 index 000000000..ffe297927 --- /dev/null +++ b/project.json @@ -0,0 +1,8 @@ +{ + "variables": { + "SAFETY_IGNORE_IDS": [ + "51499", + "52495" + ] + } +} From 6c2dec374adbe2b64966a1a4e4cd2fa6ea957563 Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Tue, 31 Jan 2023 17:50:02 -0500 Subject: [PATCH 313/352] Update wheel checksums after rebuilding for Bookworm / 3.11 See . --- requirements/build-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/build-requirements.txt b/requirements/build-requirements.txt index af35aa35d..5548ed0c2 100644 --- a/requirements/build-requirements.txt +++ b/requirements/build-requirements.txt @@ -3,7 +3,7 @@ charset-normalizer==2.0.4 --hash=sha256:cd9a4492eef4e5276c07f9c0dc1338e7be3e95f2 furl==2.0.0 --hash=sha256:9f50360f6e4a0f1d0a35fb4997878e7186a73331f0fde5f6fc9b1bb9f006e6cc idna==3.2 --hash=sha256:691d9fc304505c65ea9ceb8eb7385d63988e344c065cacbbd2156ff9bdfcf0c1 orderedmultidict==1.0 --hash=sha256:f6022beda2b3387c61e6eb7e0e1e3e2832fd9f55f3f64d4b4b226eea7487327f -pyyaml==5.4.1 --hash=sha256:24ba69a7c05ba63fef9732bf26cc5d328b2089e525ee87fc9ec572c92f88dd46 --hash=sha256:be111e40b3e32707b373b90ef490fa0908bf7769c77f8cf940004f0c957954f6 --hash=sha256:645773490bf785cd110b4a5e47635990c46219b7c4f01b424f0409cf01d12f2b +pyyaml==5.4.1 --hash=sha256:9608c1b459ff310fe7fa78e8a9e12767a9a0ea9e3fa7cce116db58f95b61f56f --hash=sha256:f7190863a72d6eb89ed92e345e178a0803c439fd7126985b62c1c113cb01e534 requests==2.26.0 --hash=sha256:7cec5239ce6ec4f6bf3d1b8c7e4d34ebe1b86d3896fe9657a8465ee4d7282bc8 six==1.11.0 --hash=sha256:eb52689b06ca7433c1cac3b91f320400bd3b358790b7ff4b6367cb1c81d37561 urllib3==1.26.6 --hash=sha256:7a2814749409a681ab58babe6539b02a2f84f6649904211f90fb649811ae7b36 From d18bcc991a6018611d58fe74fe9119ddf8bfa6e3 Mon Sep 17 00:00:00 2001 From: Ro Date: Tue, 7 Feb 2023 12:58:16 -0800 Subject: [PATCH 314/352] Fix: imports/namespacing --- securedrop_export/disk/__init__.py | 2 +- securedrop_export/main.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/securedrop_export/disk/__init__.py b/securedrop_export/disk/__init__.py index 3fa6c3647..6af1f1a09 100644 --- a/securedrop_export/disk/__init__.py +++ b/securedrop_export/disk/__init__.py @@ -1 +1 @@ -from .service import Service # noqa: F401 +from .legacy_service import Service as LegacyService # noqa: F401 diff --git a/securedrop_export/main.py b/securedrop_export/main.py index e2910d74a..b52d960c9 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -10,7 +10,7 @@ from securedrop_export.directory import safe_mkdir from securedrop_export.exceptions import ExportException -from securedrop_export.disk import Service as ExportService +from securedrop_export.disk import LegacyService as ExportService from securedrop_export.print import Service as PrintService from logging.handlers import TimedRotatingFileHandler, SysLogHandler From 24cfed80fb26c70df54dec59c60e42d124e915ba Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Tue, 10 Jan 2023 11:40:14 -0500 Subject: [PATCH 315/352] Stop using furl We needed to update furl for Python 3.10+ support, but after looking at it, we can drop it entirely. The valid_path() check to see if the path contains a hostname is straightforward to switch to urllib.parse.urlparse(). In prep_request(), we use furl for validation and path normalization. urlparse() is actually better for validation since it parses against the URL spec[1]. But one weird part is that port validation only applies when you actually access the `.port` member variable. For the path normalization, I just copied the 3 lines (now 8) of code out of furl. It should be very stable, it was never modified in furl once it was added 9 years ago. Finally, the test_proxy_500_misconfiguration test case had to be adjusted, because it relied on furl incorrectly interpreting an extra colon as an invalid port. The config now uses an explicitly invalid port ("-1") so it fails under the new code. [1] https://url.spec.whatwg.org/ Fixes #105. --- requirements/build-requirements.txt | 3 -- requirements/dev-requirements.txt | 13 --------- requirements/requirements.in | 3 -- requirements/requirements.txt | 17 ----------- securedrop_proxy/proxy.py | 44 +++++++++++++++++++++-------- tests/files/invalid-config.yaml | 4 +-- 6 files changed, 34 insertions(+), 50 deletions(-) diff --git a/requirements/build-requirements.txt b/requirements/build-requirements.txt index 5548ed0c2..13435c1eb 100644 --- a/requirements/build-requirements.txt +++ b/requirements/build-requirements.txt @@ -1,10 +1,7 @@ certifi==2022.12.7 --hash=sha256:7f205a1a4f02f4970fb5d0e16457964bb30d6b678a766515278bc56e6eeb645f charset-normalizer==2.0.4 --hash=sha256:cd9a4492eef4e5276c07f9c0dc1338e7be3e95f2a536bf2c5b620b1f27d03d74 -furl==2.0.0 --hash=sha256:9f50360f6e4a0f1d0a35fb4997878e7186a73331f0fde5f6fc9b1bb9f006e6cc idna==3.2 --hash=sha256:691d9fc304505c65ea9ceb8eb7385d63988e344c065cacbbd2156ff9bdfcf0c1 -orderedmultidict==1.0 --hash=sha256:f6022beda2b3387c61e6eb7e0e1e3e2832fd9f55f3f64d4b4b226eea7487327f pyyaml==5.4.1 --hash=sha256:9608c1b459ff310fe7fa78e8a9e12767a9a0ea9e3fa7cce116db58f95b61f56f --hash=sha256:f7190863a72d6eb89ed92e345e178a0803c439fd7126985b62c1c113cb01e534 requests==2.26.0 --hash=sha256:7cec5239ce6ec4f6bf3d1b8c7e4d34ebe1b86d3896fe9657a8465ee4d7282bc8 -six==1.11.0 --hash=sha256:eb52689b06ca7433c1cac3b91f320400bd3b358790b7ff4b6367cb1c81d37561 urllib3==1.26.6 --hash=sha256:7a2814749409a681ab58babe6539b02a2f84f6649904211f90fb649811ae7b36 werkzeug==2.0.2 --hash=sha256:55e8ebd03bf69dc51cd986ba7bf3e25f549bb27a22de9d6bdd15c855ba8f1f99 diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt index 6dc72691a..e8c50264c 100644 --- a/requirements/dev-requirements.txt +++ b/requirements/dev-requirements.txt @@ -96,10 +96,6 @@ flake8==6.0.0 \ --hash=sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7 \ --hash=sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181 # via -r requirements/dev-requirements.in -furl==2.0.0 \ - --hash=sha256:f7e90e9f85ef3f2e64485f04c2a80b50af6133942812fd87a44d45305b079018 \ - --hash=sha256:fdcaedc1fb19a63d7d875b0105b0a5b496dd0989330d454a42bcb401fa5454ec - # via -r requirements/requirements.in idna==3.2 \ --hash=sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a \ --hash=sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3 @@ -234,12 +230,6 @@ mypy-extensions==0.4.3 \ # -r requirements/dev-requirements.in # black # mypy -orderedmultidict==1.0 \ - --hash=sha256:24e3b730cf84e4a6a68be5cc760864905cf66abc89851e724bd5b4e849eaa96b \ - --hash=sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5 - # via - # -r requirements/requirements.in - # furl packaging==22.0 \ --hash=sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3 \ --hash=sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3 @@ -314,9 +304,6 @@ six==1.11.0 \ --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb # via # -r requirements/dev-requirements.in - # -r requirements/requirements.in - # furl - # orderedmultidict # vcrpy tomli==2.0.1 \ --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ diff --git a/requirements/requirements.in b/requirements/requirements.in index 66c290ef2..d7cdcdc2a 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -1,10 +1,7 @@ certifi>=2022.12.07 charset-normalizer>=2.0.4 -furl==2.0.0 idna>=2.7 -orderedmultidict==1.0 pyyaml==5.4.1 requests>=2.26.0 -six==1.11.0 urllib3>=1.26.5 werkzeug>=0.16.0 diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 94b944bd8..71611f287 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -16,22 +16,12 @@ charset-normalizer==2.0.4 \ # via # -r requirements/requirements.in # requests -furl==2.0.0 \ - --hash=sha256:f7e90e9f85ef3f2e64485f04c2a80b50af6133942812fd87a44d45305b079018 \ - --hash=sha256:fdcaedc1fb19a63d7d875b0105b0a5b496dd0989330d454a42bcb401fa5454ec - # via -r requirements/requirements.in idna==3.2 \ --hash=sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a \ --hash=sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3 # via # -r requirements/requirements.in # requests -orderedmultidict==1.0 \ - --hash=sha256:24e3b730cf84e4a6a68be5cc760864905cf66abc89851e724bd5b4e849eaa96b \ - --hash=sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5 - # via - # -r requirements/requirements.in - # furl pyyaml==5.4.1 \ --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \ --hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \ @@ -67,13 +57,6 @@ requests==2.26.0 \ --hash=sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24 \ --hash=sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7 # via -r requirements/requirements.in -six==1.11.0 \ - --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \ - --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb - # via - # -r requirements/requirements.in - # furl - # orderedmultidict urllib3==1.26.6 \ --hash=sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4 \ --hash=sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index e5a7c2da7..b21320516 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -1,13 +1,14 @@ import http import logging import os +import posixpath import subprocess import sys import tempfile import uuid from typing import IO, Dict, Optional +from urllib.parse import ParseResult, urlparse -import furl # type: ignore import requests import werkzeug import yaml @@ -57,11 +58,8 @@ def on_done(self) -> None: @staticmethod def valid_path(path: str) -> bool: - u = furl.furl(path) - - if u.host is not None: - return False - return True + """Check does not contain a hostname in the path""" + return urlparse(path).hostname is None def err_on_done(self): print(json.dumps(self.res.__dict__)) @@ -154,6 +152,25 @@ def simple_error(self, status: int, err: str) -> None: self.res = res + def normalize_path(self, parsed: ParseResult) -> ParseResult: + """ + This is copied from the furl library + SPDX-License-Identifier: Unlicense + + Example: '//a/./b/../c//' -> '/a/c/' + """ + path = parsed.path + is_dir = path[-1] == "/" + path = posixpath.normpath(path) + if is_dir: + # Re-add the trailing / + path += "/" + if path.startswith("//"): + # https://bugs.python.org/issue636648 + path = "/" + path.lstrip("/") + + return parsed._replace(path=path) + def prep_request(self) -> None: scheme = self.conf.scheme @@ -167,16 +184,19 @@ def prep_request(self) -> None: self.simple_error(400, "Path provided in request did not look valid") raise ValueError("Path provided was invalid") + parsed = urlparse("{}://{}:{}/{}".format(scheme, host, port, path)) + parsed = self.normalize_path(parsed) + + # urlparse only errors on an invalid port if you examine it + # manually. try: - url = furl.furl("{}://{}:{}/{}".format(scheme, host, port, path)) - except Exception as e: - logger.error(e) + _ = parsed.port + except ValueError as err: + logger.error(err) self.simple_error(500, "Proxy error while generating URL to request") raise ValueError("Error generating URL from provided values") - url.path.normalize() - - preq = requests.Request(method, url.url) + preq = requests.Request(method, parsed.geturl()) preq.headers = self.req.headers preq.data = self.req.body prep = preq.prepare() diff --git a/tests/files/invalid-config.yaml b/tests/files/invalid-config.yaml index 1338eefc3..29d7af94c 100644 --- a/tests/files/invalid-config.yaml +++ b/tests/files/invalid-config.yaml @@ -1,5 +1,5 @@ -host: jsonplaceholder.typicode.com -scheme: https://http +host: badport.local:-1 +scheme: https port: 443 target_vm: compost dev: False From afc33dcfe9f0878a7a65e4c7f49265946275563c Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Thu, 16 Feb 2023 13:20:06 -0500 Subject: [PATCH 316/352] Replace werkzeug dependency with basic string checks werkzeug is ~19k lines of Python, which is a very heavy dependency just to parse a content-type header. The format of the header is pretty simple, via MDN[1] the syntax roughly is: Content-Type: text/html; charset=utf-8 Content-Type: multipart/form-data; boundary=something All we want to know is whether the type is JSON or not, so we can just check if the mime part of the value is "application/json". Originally I implemented this as a regex (`^application/json($|;)`), but realized that wasn't even necessary. This code is already covered by tests, but I added a few to demonstrate basic functionality of the new function. [1] https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Type Fixes #110. --- requirements/build-requirements.txt | 1 - requirements/dev-requirements.txt | 8 ++------ requirements/requirements.in | 1 - requirements/requirements.txt | 8 ++------ securedrop_proxy/proxy.py | 14 ++++++++++---- tests/test_proxy.py | 5 +++++ 6 files changed, 19 insertions(+), 18 deletions(-) diff --git a/requirements/build-requirements.txt b/requirements/build-requirements.txt index 5548ed0c2..2ded7f105 100644 --- a/requirements/build-requirements.txt +++ b/requirements/build-requirements.txt @@ -7,4 +7,3 @@ pyyaml==5.4.1 --hash=sha256:9608c1b459ff310fe7fa78e8a9e12767a9a0ea9e3fa7cce116db requests==2.26.0 --hash=sha256:7cec5239ce6ec4f6bf3d1b8c7e4d34ebe1b86d3896fe9657a8465ee4d7282bc8 six==1.11.0 --hash=sha256:eb52689b06ca7433c1cac3b91f320400bd3b358790b7ff4b6367cb1c81d37561 urllib3==1.26.6 --hash=sha256:7a2814749409a681ab58babe6539b02a2f84f6649904211f90fb649811ae7b36 -werkzeug==2.0.2 --hash=sha256:55e8ebd03bf69dc51cd986ba7bf3e25f549bb27a22de9d6bdd15c855ba8f1f99 diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt index 6dc72691a..624daf3db 100644 --- a/requirements/dev-requirements.txt +++ b/requirements/dev-requirements.txt @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with python 3.9 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: # # pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-requirements.txt requirements/dev-requirements.in requirements/requirements.in # @@ -353,10 +353,6 @@ vcrpy==4.2.1 \ --hash=sha256:7cd3e81a2c492e01c281f180bcc2a86b520b173d2b656cb5d89d99475423e013 \ --hash=sha256:efac3e2e0b2af7686f83a266518180af7a048619b2f696e7bad9520f5e2eac09 # via -r requirements/dev-requirements.in -werkzeug==2.0.2 \ - --hash=sha256:63d3dc1cf60e7b7e35e97fa9861f7397283b75d765afcaefd993d6046899de8f \ - --hash=sha256:aa2bb6fc8dee8d6c504c0ac1e7f5f7dc5810a9903e793b6f715a9f015bdadb9a - # via -r requirements/requirements.in wheel==0.38.4 \ --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 diff --git a/requirements/requirements.in b/requirements/requirements.in index 66c290ef2..c01325cf6 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -7,4 +7,3 @@ pyyaml==5.4.1 requests>=2.26.0 six==1.11.0 urllib3>=1.26.5 -werkzeug>=0.16.0 diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 94b944bd8..4d3344ea3 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with python 3.9 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: # # pip-compile --generate-hashes --output-file=requirements/requirements.txt requirements/requirements.in # @@ -80,7 +80,3 @@ urllib3==1.26.6 \ # via # -r requirements/requirements.in # requests -werkzeug==2.0.2 \ - --hash=sha256:63d3dc1cf60e7b7e35e97fa9861f7397283b75d765afcaefd993d6046899de8f \ - --hash=sha256:aa2bb6fc8dee8d6c504c0ac1e7f5f7dc5810a9903e793b6f715a9f015bdadb9a - # via -r requirements/requirements.in diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index e5a7c2da7..90ad530f0 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -9,7 +9,6 @@ import furl # type: ignore import requests -import werkzeug import yaml import securedrop_proxy.version as version @@ -18,6 +17,15 @@ logger = logging.getLogger(__name__) +def is_json_content_type(value: str) -> bool: + """ + Is the value of the content-type header JSON? + + See + """ + return value == "application/json" or value.startswith("application/json;") + + class Conf: scheme = "" host = "" @@ -214,9 +222,7 @@ def handle_non_json_response(self) -> None: def handle_response(self) -> None: logger.debug("Handling response") - ctype = werkzeug.http.parse_options_header(self._presp.headers["content-type"]) - - if ctype[0] == "application/json": + if is_json_content_type(self._presp.headers["content-type"]): self.handle_json_response() else: self.handle_non_json_response() diff --git a/tests/test_proxy.py b/tests/test_proxy.py index e09509859..5d6a10f24 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -386,6 +386,11 @@ def test_production_on_save(self): p.on_save(fh, self.res) self.assertEqual(patched_run.call_args[0][0][0], "qvm-move-to-vm") + def test_is_json_content_type(self): + self.assertTrue(proxy.is_json_content_type("application/json")) + self.assertTrue(proxy.is_json_content_type("application/json; charset=utf-8")) + self.assertFalse(proxy.is_json_content_type("application/yaml")) + class TestConfig(unittest.TestCase): def setUp(self): From 849072e371f3d5045478905b7629a3a612acd113 Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Thu, 16 Feb 2023 14:46:33 -0500 Subject: [PATCH 317/352] Drop buster jobs from CI Will add bookworm ones later as part of parameterizing and overhauling the CI config. --- .circleci/config.yml | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 0540c1050..0e19e5c06 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -38,12 +38,6 @@ common-steps: version: 2 jobs: - test: - docker: - - image: debian:buster - steps: - - checkout - - *run_tests test-bullseye: docker: - image: debian:bullseye @@ -51,15 +45,6 @@ jobs: - checkout - *run_tests - build-buster: - docker: - - image: debian:buster - steps: - - checkout - - *install_packaging_dependencies - - *verify_requirements - - *build_debian_package - build-bullseye: docker: - image: debian:bullseye @@ -73,7 +58,5 @@ workflows: version: 2 per_pr: jobs: - - test - test-bullseye - - build-buster - build-bullseye From 4f9e422c340ed7227adfaba2fc0e0b100945a63f Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Thu, 16 Feb 2023 14:44:24 -0500 Subject: [PATCH 318/352] Configure flake8 and mypy This is slightly different than other repositories because most of the code is in individual scripts rather than a typical Python package (the same problem as securedrop-workstation). CI now calls `make check` instead of just running tests (copied from securedrop-proxy). Fixes #37. --- .circleci/config.yml | 8 +- .flake8 | 8 ++ Makefile | 13 +++ pyproject.toml | 9 ++ requirements/dev-requirements.in | 5 + requirements/dev-requirements.txt | 179 +++++++++++++++++++++++++++--- 6 files changed, 205 insertions(+), 17 deletions(-) create mode 100644 .flake8 create mode 100644 pyproject.toml diff --git a/.circleci/config.yml b/.circleci/config.yml index 0e19e5c06..da9d88844 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -2,10 +2,12 @@ common-steps: - &run_tests run: - name: Run tests + name: Install requirements and run tests command: | - apt-get update && apt-get install -y python3 - python3 -m unittest + apt-get update && apt-get install -y make python3-venv + make venv + source .venv/bin/activate + make check - &install_packaging_dependencies run: diff --git a/.flake8 b/.flake8 new file mode 100644 index 000000000..f53bdba4d --- /dev/null +++ b/.flake8 @@ -0,0 +1,8 @@ +[flake8] +max-line-length = 100 +extend-exclude = .venv +filename = + *.py + securedrop-log + securedrop-log-saver + securedrop-redis-log diff --git a/Makefile b/Makefile index 3b8e95fcb..c0b027a84 100644 --- a/Makefile +++ b/Makefile @@ -47,6 +47,19 @@ update-dev-only-dependencies: ## Update dev-requirements.txt to pin to the late pip-compile --allow-unsafe --generate-hashes --upgrade-package $file --output-file requirements/dev-requirements.txt requirements/requirements.in requirements/dev-requirements.in; \ done < 'requirements/dev-requirements.in' +.PHONY: check +check: flake8 mypy test + +.PHONY: flake8 +flake8: ## Run flake8 to lint Python files + flake8 + +mypy: ## Type check Python files + mypy + +test: ## Run Python unit tests + python3 -m unittest + # Explaination of the below shell command should it ever break. # 1. Set the field separator to ": ##" and any make targets that might appear between : and ## # 2. Use sed-like syntax to remove the make targets diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..0db7522bb --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,9 @@ +[tool.mypy] +python_version = "3.9" +scripts_are_modules = true +files = [ + "*.py", + "securedrop-log", + "securedrop-log-saver", + "securedrop-redis-log", +] diff --git a/requirements/dev-requirements.in b/requirements/dev-requirements.in index 2c2a9f384..28f7362de 100644 --- a/requirements/dev-requirements.in +++ b/requirements/dev-requirements.in @@ -1 +1,6 @@ +flake8 pip-tools + +mypy +types-redis +types-setuptools diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt index 51774d6bc..4deb73f0b 100644 --- a/requirements/dev-requirements.txt +++ b/requirements/dev-requirements.txt @@ -1,19 +1,146 @@ # -# This file is autogenerated by pip-compile with python 3.7 +# This file is autogenerated by pip-compile with python 3.9 # To update, run: # # pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-requirements.txt requirements/dev-requirements.in requirements/requirements.in # +cffi==1.15.1 \ + --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ + --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ + --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ + --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ + --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ + --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ + --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ + --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ + --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ + --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ + --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ + --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ + --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ + --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ + --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ + --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ + --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ + --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ + --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ + --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ + --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ + --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ + --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ + --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ + --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ + --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ + --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ + --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ + --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ + --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ + --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ + --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ + --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ + --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ + --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ + --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ + --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ + --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ + --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ + --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ + --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ + --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ + --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ + --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ + --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ + --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ + --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ + --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ + --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ + --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ + --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ + --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ + --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ + --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ + --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ + --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ + --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ + --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ + --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ + --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ + --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ + --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ + --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ + --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 + # via cryptography click==8.0.1 \ --hash=sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a \ --hash=sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6 # via pip-tools -importlib-metadata==4.6.4 \ - --hash=sha256:7b30a78db2922d78a6f47fb30683156a14f3c6aa5cc23f77cc8967e9ab2d002f \ - --hash=sha256:ed5157fef23a4bc4594615a0dd8eba94b2bb36bf2a343fa3d8bb2fa0a62a99d5 +cryptography==39.0.1 \ + --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ + --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ + --hash=sha256:4789d1e3e257965e960232345002262ede4d094d1a19f4d3b52e48d4d8f3b885 \ + --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ + --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ + --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ + --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ + --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ + --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ + --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ + --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ + --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ + --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ + --hash=sha256:c5caeb8188c24888c90b5108a441c106f7faa4c4c075a2bcae438c6e8ca73cef \ + --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ + --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ + --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ + --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ + --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ + --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ + --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ + --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ + --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 # via - # click - # pep517 + # types-pyopenssl + # types-redis +flake8==6.0.0 \ + --hash=sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7 \ + --hash=sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181 + # via -r requirements/dev-requirements.in +mccabe==0.7.0 \ + --hash=sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325 \ + --hash=sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e + # via flake8 +mypy==1.0.0 \ + --hash=sha256:01b1b9e1ed40544ef486fa8ac022232ccc57109f379611633ede8e71630d07d2 \ + --hash=sha256:0ab090d9240d6b4e99e1fa998c2d0aa5b29fc0fb06bd30e7ad6183c95fa07593 \ + --hash=sha256:14d776869a3e6c89c17eb943100f7868f677703c8a4e00b3803918f86aafbc52 \ + --hash=sha256:1ace23f6bb4aec4604b86c4843276e8fa548d667dbbd0cb83a3ae14b18b2db6c \ + --hash=sha256:2efa963bdddb27cb4a0d42545cd137a8d2b883bd181bbc4525b568ef6eca258f \ + --hash=sha256:2f6ac8c87e046dc18c7d1d7f6653a66787a4555085b056fe2d599f1f1a2a2d21 \ + --hash=sha256:3ae4c7a99e5153496243146a3baf33b9beff714464ca386b5f62daad601d87af \ + --hash=sha256:3cfad08f16a9c6611e6143485a93de0e1e13f48cfb90bcad7d5fde1c0cec3d36 \ + --hash=sha256:4e5175026618c178dfba6188228b845b64131034ab3ba52acaffa8f6c361f805 \ + --hash=sha256:50979d5efff8d4135d9db293c6cb2c42260e70fb010cbc697b1311a4d7a39ddb \ + --hash=sha256:5cd187d92b6939617f1168a4fe68f68add749902c010e66fe574c165c742ed88 \ + --hash=sha256:5cfca124f0ac6707747544c127880893ad72a656e136adc935c8600740b21ff5 \ + --hash=sha256:5e398652d005a198a7f3c132426b33c6b85d98aa7dc852137a2a3be8890c4072 \ + --hash=sha256:67cced7f15654710386e5c10b96608f1ee3d5c94ca1da5a2aad5889793a824c1 \ + --hash=sha256:7306edca1c6f1b5fa0bc9aa645e6ac8393014fa82d0fa180d0ebc990ebe15964 \ + --hash=sha256:7cc2c01dfc5a3cbddfa6c13f530ef3b95292f926329929001d45e124342cd6b7 \ + --hash=sha256:87edfaf344c9401942883fad030909116aa77b0fa7e6e8e1c5407e14549afe9a \ + --hash=sha256:8845125d0b7c57838a10fd8925b0f5f709d0e08568ce587cc862aacce453e3dd \ + --hash=sha256:92024447a339400ea00ac228369cd242e988dd775640755fa4ac0c126e49bb74 \ + --hash=sha256:a86b794e8a56ada65c573183756eac8ac5b8d3d59daf9d5ebd72ecdbb7867a43 \ + --hash=sha256:bb2782a036d9eb6b5a6efcdda0986774bf798beef86a62da86cb73e2a10b423d \ + --hash=sha256:be78077064d016bc1b639c2cbcc5be945b47b4261a4f4b7d8923f6c69c5c9457 \ + --hash=sha256:c7cf862aef988b5fbaa17764ad1d21b4831436701c7d2b653156a9497d92c83c \ + --hash=sha256:e0626db16705ab9f7fa6c249c017c887baf20738ce7f9129da162bb3075fc1af \ + --hash=sha256:f34495079c8d9da05b183f9f7daec2878280c2ad7cc81da686ef0b484cea2ecf \ + --hash=sha256:fe523fcbd52c05040c7bee370d66fee8373c5972171e4fbc323153433198592d + # via -r requirements/dev-requirements.in +mypy-extensions==1.0.0 \ + --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ + --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 + # via mypy pep517==0.11.0 \ --hash=sha256:3fa6b85b9def7ba4de99fb7f96fe3f02e2d630df8aa2720a5cf3b183f087a738 \ --hash=sha256:e1ba5dffa3a131387979a68ff3e391ac7d645be409216b961bc2efe6468ab0b2 @@ -22,6 +149,18 @@ pip-tools==6.2.0 \ --hash=sha256:77727ef7457d1865e61fe34c2b1439f9b971b570cc232616a22ce82ab89d357d \ --hash=sha256:9ed38c73da4993e531694ea151f77048b4dbf2ba7b94c4a569daa39568cc6564 # via -r requirements/dev-requirements.in +pycodestyle==2.10.0 \ + --hash=sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053 \ + --hash=sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610 + # via flake8 +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 + # via cffi +pyflakes==3.0.1 \ + --hash=sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf \ + --hash=sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd + # via flake8 redis==3.3.11 \ --hash=sha256:3613daad9ce5951e426f460deddd5caf469e08a3af633e9578fc77d362becf62 \ --hash=sha256:8d0fc278d3f5e1249967cba2eb4a5632d19e45ce5c09442b8422d15ee2c22cc2 @@ -29,22 +168,34 @@ redis==3.3.11 \ tomli==1.2.1 \ --hash=sha256:8dd0e9524d6f386271a36b41dbf6c57d8e32fd96fd22b6584679dc569d20899f \ --hash=sha256:a5b75cb6f3968abb47af1b40c1819dc519ea82bcc065776a866e8d74c5ca9442 - # via pep517 + # via + # mypy + # pep517 +types-docutils==0.19.1.4 \ + --hash=sha256:1b64b21b609ff1fc7791d3d930f14b56b36ad09029fd97e45e34cc889d671b5f \ + --hash=sha256:870d71f3663141f67a3c59d26d2c54a8c478c842208bb0b345fbf6036f49f561 + # via types-setuptools +types-pyopenssl==23.0.0.3 \ + --hash=sha256:6ca54d593f8b946f9570f9ed7457c41da3b518feff5e344851941a6209bea62b \ + --hash=sha256:847ab17a16475a882dc29898648a6a35ad0d3e11a5bba5aa8ab2f3435a8647cb + # via types-redis +types-redis==4.5.1.1 \ + --hash=sha256:081dfeec730df6e3f32ccbdafe3198873b7c02516c22d79cc2a40efdd69a3963 \ + --hash=sha256:c072e4824855f46d0a968509c3e0fa4789fc13b62d472064527bad3d1815aeed + # via -r requirements/dev-requirements.in +types-setuptools==67.3.0.1 \ + --hash=sha256:1a26d373036c720e566823b6edd664a2db4d138b6eeba856721ec1254203474f \ + --hash=sha256:a7e0f0816b5b449f5bcdc0efa43da91ff81dbe6941f293a6490d68a450e130a1 + # via -r requirements/dev-requirements.in typing-extensions==3.10.0.0 \ --hash=sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497 \ --hash=sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342 \ --hash=sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84 - # via importlib-metadata + # via mypy wheel==0.37.0 \ --hash=sha256:21014b2bd93c6d0034b6ba5d35e4eb284340e09d63c59aef6fc14b0f346146fd \ --hash=sha256:e2ef7239991699e3355d54f8e968a21bb940a1dbf34a4d226741e64462516fad # via pip-tools -zipp==3.5.0 \ - --hash=sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3 \ - --hash=sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4 - # via - # importlib-metadata - # pep517 # The following packages are considered to be unsafe in a requirements file: pip==21.2.4 \ From ced0c8e69ac1c7eda83ac63a6b2e4919fd0799c3 Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Thu, 16 Feb 2023 14:45:09 -0500 Subject: [PATCH 319/352] Fix flake8 and mypy warnings Left one TODO for a confusing metaclass that I couldn't figure out. --- ex2.py | 1 - example.py | 1 - journal-example.py | 9 +++++---- securedrop_log/__init__.py | 4 +++- setup.py | 1 - tests/test_logger.py | 5 +++-- 6 files changed, 11 insertions(+), 10 deletions(-) diff --git a/ex2.py b/ex2.py index 9bb488e31..8813b4749 100644 --- a/ex2.py +++ b/ex2.py @@ -5,6 +5,5 @@ class Hello: def __init__(self, *args, **kwargs): self.logger = logging.getLogger(__name__) - def talk(self, msg): self.logger.debug(msg) diff --git a/example.py b/example.py index 1a582bbea..6b7a82876 100644 --- a/example.py +++ b/example.py @@ -10,7 +10,6 @@ def main(): logging.basicConfig(level=logging.DEBUG, handlers=[handler]) logger = logging.getLogger("example") - d = ex2.Hello() d.talk("This should be line 1") ex1.fire("Where are you in middle?") diff --git a/journal-example.py b/journal-example.py index 903412af9..3d53f4eba 100644 --- a/journal-example.py +++ b/journal-example.py @@ -1,6 +1,6 @@ import logging from securedrop_log import SecureDropLog -from systemd import journal +from systemd import journal # type: ignore[import] import select @@ -13,14 +13,15 @@ def main(): p = select.poll() p.register(j, j.get_events()) - while True: + while True: p.poll() if j.process() == journal.APPEND: for m in j: msg = "MSG: {}".format(m["MESSAGE"]) print(msg) - # TODO: Figure out why the log file in the logging VM is closing + # TODO: Figure out why the log file in the logging VM is closing logger.info(m["MESSAGE"]) + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/securedrop_log/__init__.py b/securedrop_log/__init__.py index a67d90f8e..4d67d5a8d 100644 --- a/securedrop_log/__init__.py +++ b/securedrop_log/__init__.py @@ -1,10 +1,12 @@ from logging import StreamHandler from subprocess import Popen, PIPE import threading +from typing import Dict class Singleton(type): - _ins = {} + # TODO: Add a better typehint here + _ins: Dict = {} _lock = threading.Lock() def __call__(cls, *args, **kwargs): diff --git a/setup.py b/setup.py index 0d3ccec97..520bac2c6 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,3 @@ -import pkgutil import setuptools with open("README.md", "r") as fh: diff --git a/tests/test_logger.py b/tests/test_logger.py index 174544f00..b7bed2966 100644 --- a/tests/test_logger.py +++ b/tests/test_logger.py @@ -12,7 +12,8 @@ def test_singleton_there_can_be_only_one(self, mock_popen): self.assertEqual(logger1.qubes_log, logger2.qubes_log) def test_singleton_raises_exception_for_dev(self, mock_popen): - logger1 = securedrop_log.SecureDropLog('name', 'logvmname') + # No exception raised + securedrop_log.SecureDropLog('name', 'logvmname') with self.assertRaises(Exception): - logger2 = securedrop_log.SecureDropLog('name2', 'logvmname2') + securedrop_log.SecureDropLog('name2', 'logvmname2') From 788aec82ebc23e494fb0884eb0a2ebb1384e6073 Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Thu, 1 Sep 2022 20:46:10 -0400 Subject: [PATCH 320/352] Makefile: Use standard check-{black,isort} names This is what we use in our other repositories. --- Makefile | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/Makefile b/Makefile index 7b5bcd5de..3f765e783 100644 --- a/Makefile +++ b/Makefile @@ -26,7 +26,7 @@ safety: ## Runs `safety check` to check python dependencies for vulnerabilities done .PHONY: lint -lint: isort-check black-check ## Run isort, black and flake8 +lint: check-isort check-black ## Run isort, black and flake8 @flake8 securedrop_proxy tests .PHONY: mypy @@ -37,16 +37,16 @@ mypy: ## Run mypy static type checker black: ## Run black for file formatting @black securedrop_proxy tests -.PHONY: black-check -black-check: ## Check Python source code formatting with black +.PHONY: check-black +check-black: ## Check Python source code formatting with black @black --check --diff securedrop_proxy tests .PHONY: isort isort: ## Run isort for file formatting @isort securedrop_proxy/*.py tests/*.py -.PHONY: isort-check -isort-check: ## Check isort for file formatting +.PHONY: check-isort +check-isort: ## Check isort for file formatting @isort --check-only --diff securedrop_proxy/*.py tests/*.py .PHONY: sync-requirements From a8c0f03bd516c81d6fe7600606f110271c53d29d Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Thu, 1 Sep 2022 21:04:47 -0400 Subject: [PATCH 321/352] Remove indirect dependencies from dev-requirements.in It seems like in 40635b6dab05 these were accidentally added to dev-requirements.in despite not being needed directly. --- requirements/dev-requirements.in | 8 ------ requirements/dev-requirements.txt | 47 ++++++++++--------------------- 2 files changed, 15 insertions(+), 40 deletions(-) diff --git a/requirements/dev-requirements.in b/requirements/dev-requirements.in index 2fb106860..9f5c23c39 100644 --- a/requirements/dev-requirements.in +++ b/requirements/dev-requirements.in @@ -2,16 +2,8 @@ black coverage flake8 isort -mccabe -multidict mypy -mypy-extensions pip-tools -pycodestyle -pyflakes types-PyYAML types-requests -six vcrpy -wrapt -yarl diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt index 03fa746c2..507e21d32 100644 --- a/requirements/dev-requirements.txt +++ b/requirements/dev-requirements.txt @@ -110,9 +110,7 @@ isort==5.11.2 \ mccabe==0.7.0 \ --hash=sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325 \ --hash=sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e - # via - # -r requirements/dev-requirements.in - # flake8 + # via flake8 multidict==6.0.3 \ --hash=sha256:018c8e3be7f161a12b3e41741b6721f9baeb2210f4ab25a6359b7d76c1017dce \ --hash=sha256:01b456046a05ff7cceefb0e1d2a9d32f05efcb1c7e0d152446304e11557639ce \ @@ -188,9 +186,7 @@ multidict==6.0.3 \ --hash=sha256:f76109387e1ec8d8e2137c94c437b89fe002f29e0881aae8ae45529bdff92000 \ --hash=sha256:f8a728511c977df6f3d8af388fcb157e49f11db4a6637dd60131b8b6e40b0253 \ --hash=sha256:fb6c3dc3d65014d2c782f5acf0b3ba14e639c6c33d3ed8932ead76b9080b3544 - # via - # -r requirements/dev-requirements.in - # yarl + # via yarl mypy==0.991 \ --hash=sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d \ --hash=sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6 \ @@ -227,7 +223,6 @@ mypy-extensions==0.4.3 \ --hash=sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d \ --hash=sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8 # via - # -r requirements/dev-requirements.in # black # mypy packaging==22.0 \ @@ -242,6 +237,10 @@ pep517==0.13.0 \ --hash=sha256:4ba4446d80aed5b5eac6509ade100bff3e7943a8489de249654a5ae9b33ee35b \ --hash=sha256:ae69927c5c172be1add9203726d4b84cf3ebad1edcd5f71fcdc746e66e829f59 # via build +pip==22.3.1 \ + --hash=sha256:65fd48317359f3af8e593943e6ae1506b66325085ea64b706a998c6e83eeaf38 \ + --hash=sha256:908c78e6bc29b676ede1c4d57981d490cb892eb45cd8c214ab6298125119e077 + # via pip-tools pip-tools==6.12.0 \ --hash=sha256:8e22fbc84ede7ca522ba4b033c4fcf6a6419adabc75d24747be3d8262504489a \ --hash=sha256:f441603c63b16f4af0dd5026f7522a49eddec2bc8a4a4979af44e1f6b0a1c13e @@ -253,15 +252,11 @@ platformdirs==2.6.0 \ pycodestyle==2.10.0 \ --hash=sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053 \ --hash=sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610 - # via - # -r requirements/dev-requirements.in - # flake8 + # via flake8 pyflakes==3.0.1 \ --hash=sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf \ --hash=sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd - # via - # -r requirements/dev-requirements.in - # flake8 + # via flake8 pyyaml==5.4.1 \ --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \ --hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \ @@ -299,12 +294,14 @@ requests==2.26.0 \ --hash=sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24 \ --hash=sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7 # via -r requirements/requirements.in +setuptools==65.6.3 \ + --hash=sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54 \ + --hash=sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75 + # via pip-tools six==1.11.0 \ --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \ --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb - # via - # -r requirements/dev-requirements.in - # vcrpy + # via vcrpy tomli==2.0.1 \ --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f @@ -409,9 +406,7 @@ wrapt==1.14.1 \ --hash=sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462 \ --hash=sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015 \ --hash=sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af - # via - # -r requirements/dev-requirements.in - # vcrpy + # via vcrpy yarl==1.8.2 \ --hash=sha256:009a028127e0a1755c38b03244c0bea9d5565630db9c4cf9572496e947137a87 \ --hash=sha256:0414fd91ce0b763d4eadb4456795b307a71524dbacd015c657bb2a39db2eab89 \ @@ -487,16 +482,4 @@ yarl==1.8.2 \ --hash=sha256:fb742dcdd5eec9f26b61224c23baea46c9055cf16f62475e11b9b15dfd5c117b \ --hash=sha256:fc77086ce244453e074e445104f0ecb27530d6fd3a46698e33f6c38951d5a0f1 \ --hash=sha256:ff205b58dc2929191f68162633d5e10e8044398d7a45265f90a0f1d51f85f72c - # via - # -r requirements/dev-requirements.in - # vcrpy - -# The following packages are considered to be unsafe in a requirements file: -pip==22.3.1 \ - --hash=sha256:65fd48317359f3af8e593943e6ae1506b66325085ea64b706a998c6e83eeaf38 \ - --hash=sha256:908c78e6bc29b676ede1c4d57981d490cb892eb45cd8c214ab6298125119e077 - # via pip-tools -setuptools==65.6.3 \ - --hash=sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54 \ - --hash=sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75 - # via pip-tools + # via vcrpy From 95a5c4b821670f11416c33054d81651d9de50482 Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Thu, 1 Sep 2022 20:46:36 -0400 Subject: [PATCH 322/352] Update CI configuration, add bullseye and bookworm jobs This is mostly copy-pasted from securedrop-client, with steps that don't apply here removed. --- .circleci/config.yml | 140 +++++++++++++++++++++++++++++++++---------- Makefile | 2 +- 2 files changed, 110 insertions(+), 32 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index abb89ba3e..d151e370b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,22 +1,67 @@ --- common-steps: - - &run_tests + - &install_testing_dependencies run: - name: Install requirements and run tests + name: Install testing dependencies command: | - apt-get update && apt-get install -y make python3-venv + set -e + apt update && apt install -y git gnupg make python3-dev python3-venv + + - &install_build_dependencies + run: + name: Install build dependencies + command: | + set -e + apt update && apt install -y git make sudo + + - &run_unit_tests + run: + name: Install requirements and run unit tests + command: | + set -e + make venv + source .venv/bin/activate + export PYTHONPATH=$PYTHONPATH:. # so alembic can get to Base metadata + make test + + - &run_lint + run: + name: Run lint, type checking, code formatting + command: | + set -e + make venv + source .venv/bin/activate + make lint + + - &check_security + run: + name: Run static analysis on source code to find security issues + command: | + set -e + make venv + source .venv/bin/activate + make bandit + + - &check_python_dependencies_for_vulnerabilities + run: + name: Check Python dependencies for known vulnerabilities + command: | + set -e make venv source .venv/bin/activate - make check + make safety - &install_packaging_dependencies run: - name: Install Debian packaging dependencies and download wheels + name: Install Debian packaging dependencies and download Python wheels command: | - apt-get update && apt-get install -y git git-lfs make sudo + set -x mkdir ~/packaging && cd ~/packaging + # local builds may not have an ssh url, so || true + git config --global --unset url.ssh://git@github.com.insteadof || true git clone https://github.com/freedomofpress/securedrop-debian-packaging.git cd securedrop-debian-packaging + apt-get update && apt-get install -y sudo make make install-deps PKG_DIR=~/project make requirements @@ -29,48 +74,83 @@ common-steps: # previous run step), else return 0. git diff --quiet - - &make_source_tarball - run: - name: Tag and make source tarball - command: | - cd ~/project - ./update_version.sh 1000.0 # Dummy version number, doesn't matter what we put here - python3 setup.py sdist - - &build_debian_package run: name: Build debian package command: | cd ~/packaging/securedrop-debian-packaging export PKG_VERSION=1000.0 - export PKG_PATH=~/project + export PKG_PATH=~/project/ make securedrop-proxy -version: 2 +version: 2.1 + jobs: - build-bullseye: - docker: - - image: debian:bullseye + build: + parameters: ¶meters + image: + type: string + docker: &docker + - image: debian:<< parameters.image >> steps: + - *install_build_dependencies - checkout - *install_packaging_dependencies - *verify_requirements - - *make_source_tarball - *build_debian_package - test-bullseye: - docker: - - image: debian:bullseye + unit-test: + parameters: *parameters + docker: *docker + steps: + - *install_testing_dependencies + - checkout + - *run_unit_tests + - store_test_results: + path: test-results + + lint: + parameters: *parameters + docker: *docker + steps: + - *install_testing_dependencies + - checkout + - *run_lint + + check-security: + parameters: *parameters + docker: *docker steps: + - *install_testing_dependencies - checkout - - *run_tests + - *check_security + + check-python-security: + parameters: *parameters + docker: *docker + steps: + - *install_testing_dependencies + - checkout + - *check_python_dependencies_for_vulnerabilities + workflows: - version: 2 securedrop_proxy_ci: - jobs: - - test-bullseye - - build-bullseye + jobs: &jobs + - unit-test: + matrix: &matrix + parameters: + image: + - bullseye + - bookworm + - lint: + matrix: *matrix + - check-security: + matrix: *matrix + - check-python-security: + matrix: *matrix + - build: + matrix: *matrix nightly: triggers: @@ -80,6 +160,4 @@ workflows: branches: only: - main - jobs: - - test-bullseye - - build-bullseye + jobs: *jobs diff --git a/Makefile b/Makefile index 3f765e783..0ae4efa82 100644 --- a/Makefile +++ b/Makefile @@ -26,7 +26,7 @@ safety: ## Runs `safety check` to check python dependencies for vulnerabilities done .PHONY: lint -lint: check-isort check-black ## Run isort, black and flake8 +lint: check-isort check-black mypy ## Run isort, black and flake8 and mypy @flake8 securedrop_proxy tests .PHONY: mypy From f4c88555d693c7e03fcb62c71922f552c167b4af Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Wed, 15 Feb 2023 15:49:19 -0500 Subject: [PATCH 323/352] Configure mypy and add to CI mypy can be run with `make mypy` or as part of `make check` (used by CI). Dependency files updated with `make dev-requirements`. There's a bigger diff on bookworm because some of those dependencies aren't needed on Python 3.11+. Fixes #103. --- Makefile | 6 +- pyproject.toml | 2 + requirements/dev-bookworm-requirements.in | 5 +- requirements/dev-bookworm-requirements.txt | 96 ++++++++++------------ requirements/dev-bullseye-requirements.in | 5 +- requirements/dev-bullseye-requirements.txt | 44 +++++++++- 6 files changed, 102 insertions(+), 56 deletions(-) create mode 100644 pyproject.toml diff --git a/Makefile b/Makefile index aaa0624b8..aa2f18eed 100644 --- a/Makefile +++ b/Makefile @@ -51,7 +51,7 @@ update-dev-only-dependencies: ## Update dev-requirements.txt to pin to the late done < 'requirements/dev-bookworm-requirements.in' .PHONY: check -check: lint semgrep test check-black ## Run linter and tests +check: lint mypy semgrep test check-black ## Run linter and tests .PHONY: check-black check-black: ## Check Python source code formatting with black @@ -66,6 +66,10 @@ test: ## Run tests lint: ## Run linter flake8 securedrop_export/ tests/ +.PHONY: mypy +mypy: ## Type check Python files + mypy . + .PHONY: black black: ## Format Python source code with black @black ./ diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..5ec8cc186 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,2 @@ +[tool.mypy] +python_version = "3.9" diff --git a/requirements/dev-bookworm-requirements.in b/requirements/dev-bookworm-requirements.in index 5200c5299..1dd6e9bb7 100644 --- a/requirements/dev-bookworm-requirements.in +++ b/requirements/dev-bookworm-requirements.in @@ -4,4 +4,7 @@ pip-tools pytest pytest-cov pytest-mock -semgrep \ No newline at end of file +semgrep + +mypy +types-setuptools diff --git a/requirements/dev-bookworm-requirements.txt b/requirements/dev-bookworm-requirements.txt index 46cccac92..d89afb8c8 100644 --- a/requirements/dev-bookworm-requirements.txt +++ b/requirements/dev-bookworm-requirements.txt @@ -1,8 +1,8 @@ # -# This file is autogenerated by pip-compile with Python 3.10 +# This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-bookworm-requirements.txt requirements/dev-bookworm-requirements.in requirements/requirements.in +# pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-bookworm-requirements.txt requirements/dev-bookworm-requirements.in # attrs==21.4.0 \ --hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4 \ @@ -121,10 +121,6 @@ defusedxml==0.7.1 \ --hash=sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69 \ --hash=sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61 # via semgrep -exceptiongroup==1.0.4 \ - --hash=sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828 \ - --hash=sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec - # via pytest face==22.0.0 \ --hash=sha256:344fe31562d0f6f444a45982418f3793d4b14f9abb98ccca1509d22e0a3e7e35 \ --hash=sha256:d5d692f90bc8f5987b636e47e36384b9bbda499aaf0a77aa0b0bbe834c76923d @@ -153,10 +149,40 @@ mccabe==0.7.0 \ --hash=sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325 \ --hash=sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e # via flake8 +mypy==1.0.0 \ + --hash=sha256:01b1b9e1ed40544ef486fa8ac022232ccc57109f379611633ede8e71630d07d2 \ + --hash=sha256:0ab090d9240d6b4e99e1fa998c2d0aa5b29fc0fb06bd30e7ad6183c95fa07593 \ + --hash=sha256:14d776869a3e6c89c17eb943100f7868f677703c8a4e00b3803918f86aafbc52 \ + --hash=sha256:1ace23f6bb4aec4604b86c4843276e8fa548d667dbbd0cb83a3ae14b18b2db6c \ + --hash=sha256:2efa963bdddb27cb4a0d42545cd137a8d2b883bd181bbc4525b568ef6eca258f \ + --hash=sha256:2f6ac8c87e046dc18c7d1d7f6653a66787a4555085b056fe2d599f1f1a2a2d21 \ + --hash=sha256:3ae4c7a99e5153496243146a3baf33b9beff714464ca386b5f62daad601d87af \ + --hash=sha256:3cfad08f16a9c6611e6143485a93de0e1e13f48cfb90bcad7d5fde1c0cec3d36 \ + --hash=sha256:4e5175026618c178dfba6188228b845b64131034ab3ba52acaffa8f6c361f805 \ + --hash=sha256:50979d5efff8d4135d9db293c6cb2c42260e70fb010cbc697b1311a4d7a39ddb \ + --hash=sha256:5cd187d92b6939617f1168a4fe68f68add749902c010e66fe574c165c742ed88 \ + --hash=sha256:5cfca124f0ac6707747544c127880893ad72a656e136adc935c8600740b21ff5 \ + --hash=sha256:5e398652d005a198a7f3c132426b33c6b85d98aa7dc852137a2a3be8890c4072 \ + --hash=sha256:67cced7f15654710386e5c10b96608f1ee3d5c94ca1da5a2aad5889793a824c1 \ + --hash=sha256:7306edca1c6f1b5fa0bc9aa645e6ac8393014fa82d0fa180d0ebc990ebe15964 \ + --hash=sha256:7cc2c01dfc5a3cbddfa6c13f530ef3b95292f926329929001d45e124342cd6b7 \ + --hash=sha256:87edfaf344c9401942883fad030909116aa77b0fa7e6e8e1c5407e14549afe9a \ + --hash=sha256:8845125d0b7c57838a10fd8925b0f5f709d0e08568ce587cc862aacce453e3dd \ + --hash=sha256:92024447a339400ea00ac228369cd242e988dd775640755fa4ac0c126e49bb74 \ + --hash=sha256:a86b794e8a56ada65c573183756eac8ac5b8d3d59daf9d5ebd72ecdbb7867a43 \ + --hash=sha256:bb2782a036d9eb6b5a6efcdda0986774bf798beef86a62da86cb73e2a10b423d \ + --hash=sha256:be78077064d016bc1b639c2cbcc5be945b47b4261a4f4b7d8923f6c69c5c9457 \ + --hash=sha256:c7cf862aef988b5fbaa17764ad1d21b4831436701c7d2b653156a9497d92c83c \ + --hash=sha256:e0626db16705ab9f7fa6c249c017c887baf20738ce7f9129da162bb3075fc1af \ + --hash=sha256:f34495079c8d9da05b183f9f7daec2878280c2ad7cc81da686ef0b484cea2ecf \ + --hash=sha256:fe523fcbd52c05040c7bee370d66fee8373c5972171e4fbc323153433198592d + # via -r requirements/dev-bookworm-requirements.in mypy-extensions==0.4.3 \ --hash=sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d \ --hash=sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8 - # via black + # via + # black + # mypy packaging==21.3 \ --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 @@ -254,42 +280,6 @@ ruamel-yaml==0.17.21 \ --hash=sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7 \ --hash=sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af # via semgrep -ruamel-yaml-clib==0.2.7 \ - --hash=sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e \ - --hash=sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3 \ - --hash=sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5 \ - --hash=sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497 \ - --hash=sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f \ - --hash=sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac \ - --hash=sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697 \ - --hash=sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763 \ - --hash=sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282 \ - --hash=sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94 \ - --hash=sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1 \ - --hash=sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072 \ - --hash=sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9 \ - --hash=sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5 \ - --hash=sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231 \ - --hash=sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93 \ - --hash=sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b \ - --hash=sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb \ - --hash=sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f \ - --hash=sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307 \ - --hash=sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8 \ - --hash=sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b \ - --hash=sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b \ - --hash=sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640 \ - --hash=sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7 \ - --hash=sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a \ - --hash=sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71 \ - --hash=sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8 \ - --hash=sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7 \ - --hash=sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80 \ - --hash=sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e \ - --hash=sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab \ - --hash=sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0 \ - --hash=sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646 - # via ruamel-yaml semgrep==1.2.0 \ --hash=sha256:31f5f764ff114e2e56b3a93b09829f738cb9e287af7479e2c4714c77b10dc5c0 \ --hash=sha256:cba38f882c9fedd00462247474a991715d5c8faf169e38cfbf299c7c89ccad55 \ @@ -303,21 +293,25 @@ setuptools==65.6.3 \ tomli==2.0.1 \ --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f - # via - # black - # build - # coverage - # pep517 - # pytest - # semgrep + # via semgrep tqdm==4.64.1 \ --hash=sha256:5f4f682a004951c1b450bc753c710e9280c5746ce6ffedee253ddbcbf54cf1e4 \ --hash=sha256:6fee160d6ffcd1b1c68c65f14c829c22832bc401726335ce92c52d395944a6a1 # via semgrep +types-docutils==0.19.1.4 \ + --hash=sha256:1b64b21b609ff1fc7791d3d930f14b56b36ad09029fd97e45e34cc889d671b5f \ + --hash=sha256:870d71f3663141f67a3c59d26d2c54a8c478c842208bb0b345fbf6036f49f561 + # via types-setuptools +types-setuptools==67.3.0.1 \ + --hash=sha256:1a26d373036c720e566823b6edd664a2db4d138b6eeba856721ec1254203474f \ + --hash=sha256:a7e0f0816b5b449f5bcdc0efa43da91ff81dbe6941f293a6490d68a450e130a1 + # via -r requirements/dev-bookworm-requirements.in typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e - # via semgrep + # via + # mypy + # semgrep ujson==5.6.0 \ --hash=sha256:0f0f21157d1a84ad5fb54388f31767cde9c1a48fb29de7ef91d8887fdc2ca92b \ --hash=sha256:1217326ba80eab1ff3f644f9eee065bd4fcc4e0c068a2f86f851cafd05737169 \ diff --git a/requirements/dev-bullseye-requirements.in b/requirements/dev-bullseye-requirements.in index 5200c5299..1dd6e9bb7 100644 --- a/requirements/dev-bullseye-requirements.in +++ b/requirements/dev-bullseye-requirements.in @@ -4,4 +4,7 @@ pip-tools pytest pytest-cov pytest-mock -semgrep \ No newline at end of file +semgrep + +mypy +types-setuptools diff --git a/requirements/dev-bullseye-requirements.txt b/requirements/dev-bullseye-requirements.txt index 36465c3fb..9feaaffa2 100644 --- a/requirements/dev-bullseye-requirements.txt +++ b/requirements/dev-bullseye-requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-bullseye-requirements.txt requirements/dev-bullseye-requirements.in requirements/requirements.in +# pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-bullseye-requirements.txt requirements/dev-bullseye-requirements.in # attrs==21.4.0 \ --hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4 \ @@ -153,10 +153,40 @@ mccabe==0.7.0 \ --hash=sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325 \ --hash=sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e # via flake8 +mypy==1.0.0 \ + --hash=sha256:01b1b9e1ed40544ef486fa8ac022232ccc57109f379611633ede8e71630d07d2 \ + --hash=sha256:0ab090d9240d6b4e99e1fa998c2d0aa5b29fc0fb06bd30e7ad6183c95fa07593 \ + --hash=sha256:14d776869a3e6c89c17eb943100f7868f677703c8a4e00b3803918f86aafbc52 \ + --hash=sha256:1ace23f6bb4aec4604b86c4843276e8fa548d667dbbd0cb83a3ae14b18b2db6c \ + --hash=sha256:2efa963bdddb27cb4a0d42545cd137a8d2b883bd181bbc4525b568ef6eca258f \ + --hash=sha256:2f6ac8c87e046dc18c7d1d7f6653a66787a4555085b056fe2d599f1f1a2a2d21 \ + --hash=sha256:3ae4c7a99e5153496243146a3baf33b9beff714464ca386b5f62daad601d87af \ + --hash=sha256:3cfad08f16a9c6611e6143485a93de0e1e13f48cfb90bcad7d5fde1c0cec3d36 \ + --hash=sha256:4e5175026618c178dfba6188228b845b64131034ab3ba52acaffa8f6c361f805 \ + --hash=sha256:50979d5efff8d4135d9db293c6cb2c42260e70fb010cbc697b1311a4d7a39ddb \ + --hash=sha256:5cd187d92b6939617f1168a4fe68f68add749902c010e66fe574c165c742ed88 \ + --hash=sha256:5cfca124f0ac6707747544c127880893ad72a656e136adc935c8600740b21ff5 \ + --hash=sha256:5e398652d005a198a7f3c132426b33c6b85d98aa7dc852137a2a3be8890c4072 \ + --hash=sha256:67cced7f15654710386e5c10b96608f1ee3d5c94ca1da5a2aad5889793a824c1 \ + --hash=sha256:7306edca1c6f1b5fa0bc9aa645e6ac8393014fa82d0fa180d0ebc990ebe15964 \ + --hash=sha256:7cc2c01dfc5a3cbddfa6c13f530ef3b95292f926329929001d45e124342cd6b7 \ + --hash=sha256:87edfaf344c9401942883fad030909116aa77b0fa7e6e8e1c5407e14549afe9a \ + --hash=sha256:8845125d0b7c57838a10fd8925b0f5f709d0e08568ce587cc862aacce453e3dd \ + --hash=sha256:92024447a339400ea00ac228369cd242e988dd775640755fa4ac0c126e49bb74 \ + --hash=sha256:a86b794e8a56ada65c573183756eac8ac5b8d3d59daf9d5ebd72ecdbb7867a43 \ + --hash=sha256:bb2782a036d9eb6b5a6efcdda0986774bf798beef86a62da86cb73e2a10b423d \ + --hash=sha256:be78077064d016bc1b639c2cbcc5be945b47b4261a4f4b7d8923f6c69c5c9457 \ + --hash=sha256:c7cf862aef988b5fbaa17764ad1d21b4831436701c7d2b653156a9497d92c83c \ + --hash=sha256:e0626db16705ab9f7fa6c249c017c887baf20738ce7f9129da162bb3075fc1af \ + --hash=sha256:f34495079c8d9da05b183f9f7daec2878280c2ad7cc81da686ef0b484cea2ecf \ + --hash=sha256:fe523fcbd52c05040c7bee370d66fee8373c5972171e4fbc323153433198592d + # via -r requirements/dev-bullseye-requirements.in mypy-extensions==0.4.3 \ --hash=sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d \ --hash=sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8 - # via black + # via + # black + # mypy packaging==21.3 \ --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 @@ -307,6 +337,7 @@ tomli==2.0.1 \ # black # build # coverage + # mypy # pep517 # pytest # semgrep @@ -314,11 +345,20 @@ tqdm==4.64.1 \ --hash=sha256:5f4f682a004951c1b450bc753c710e9280c5746ce6ffedee253ddbcbf54cf1e4 \ --hash=sha256:6fee160d6ffcd1b1c68c65f14c829c22832bc401726335ce92c52d395944a6a1 # via semgrep +types-docutils==0.19.1.4 \ + --hash=sha256:1b64b21b609ff1fc7791d3d930f14b56b36ad09029fd97e45e34cc889d671b5f \ + --hash=sha256:870d71f3663141f67a3c59d26d2c54a8c478c842208bb0b345fbf6036f49f561 + # via types-setuptools +types-setuptools==67.3.0.1 \ + --hash=sha256:1a26d373036c720e566823b6edd664a2db4d138b6eeba856721ec1254203474f \ + --hash=sha256:a7e0f0816b5b449f5bcdc0efa43da91ff81dbe6941f293a6490d68a450e130a1 + # via -r requirements/dev-bullseye-requirements.in typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via # black + # mypy # semgrep ujson==5.6.0 \ --hash=sha256:0f0f21157d1a84ad5fb54388f31767cde9c1a48fb29de7ef91d8887fdc2ca92b \ From 3291ba72f21b9c04a9e3d0e3d7ce54b45d500bf7 Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Wed, 15 Feb 2023 15:58:10 -0500 Subject: [PATCH 324/352] Fix straightforward mypy errors disk/cli.py: * `grep.stdout` could be None if we passed e.g. stdout=subprocess.DEVNULL. Since mypy can't assert that we used PIPE, just ignore the error. * Always have `is_removable` be a bool. * `_check_partitions` actually returns bytes since we don't pass `text=True` to the subprocess calls. * Check that volume.mountpoint is not None before trying to unmount it. disk/legacy_service.py: * mypy doesn't know that `num_devices` is a non-negative integer so it thinks we're missing a return. Raise an exception as an unreachable logic error to satisfy it. disk/volume.py: * Newer mypy doesn't imply Optional so we need to explicitly state it. main.py: * mypy doesn't know we checked all the possible enum values (maybe it could using a 3.11+ match statement?), so raise an exception as an unreachable logic error to satisfy it. * Same explicit Optional needed for `_exit_gracefully`, and then pass it through to `_write_status`. setup.py: * pkgutil.get_data() can return None triggering mypy, but it's simpler for us to just read the file directly. (Probably we should do what client does and just stick the real version in setup.py, but that's a bigger change than I want to make here.) * `classifiers` is typed as List[str], not a tuple. tests/disk/test_cli.py: * The arguments to CalledProcessError were reversed. --- securedrop_export/disk/cli.py | 11 ++++++----- securedrop_export/disk/legacy_service.py | 4 ++++ securedrop_export/disk/volume.py | 3 ++- securedrop_export/main.py | 10 ++++++++-- setup.py | 9 ++++----- tests/disk/test_cli.py | 4 ++-- 6 files changed, 26 insertions(+), 15 deletions(-) diff --git a/securedrop_export/disk/cli.py b/securedrop_export/disk/cli.py index 5d07c9d53..503d80b86 100644 --- a/securedrop_export/disk/cli.py +++ b/securedrop_export/disk/cli.py @@ -44,7 +44,7 @@ def get_connected_devices(self) -> List[str]: stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) - command_output = grep.stdout.readlines() + command_output = grep.stdout.readlines() # type: ignore[union-attr] # The first word in each element of the command_output list is the device name attached_devices = [x.decode("utf8").split()[0] for x in command_output] @@ -68,8 +68,9 @@ def _get_removable_devices(self, attached_devices: List[str]) -> List[str]: stderr=subprocess.PIPE, ) - # 0 for non-removable device, 1 for removable - is_removable = int(removable.decode("utf8").strip()) + # removable is "0" for non-removable device, "1" for removable, + # convert that into a Python boolean + is_removable = bool(int(removable.decode("utf8").strip())) except subprocess.CalledProcessError: # Not a removable device @@ -116,7 +117,7 @@ def get_partitioned_device(self, blkid: str) -> str: logger.error("Error checking device partitions") raise ExportException(sdstatus=Status.DEVICE_ERROR) - def _check_partitions(self, blkid: str) -> str: + def _check_partitions(self, blkid: str) -> bytes: try: logger.debug(f"Checking device partitions on {blkid}") device_and_partitions = subprocess.check_output( @@ -382,7 +383,7 @@ def _unmount_volume(self, volume: Volume) -> Volume: """ Helper. Unmount volume """ - if os.path.exists(volume.mountpoint): + if volume.mountpoint and os.path.exists(volume.mountpoint): logger.debug(f"Unmounting drive from {volume.mountpoint}") try: subprocess.check_call(["sudo", "umount", volume.mountpoint]) diff --git a/securedrop_export/disk/legacy_service.py b/securedrop_export/disk/legacy_service.py index 279a84d9c..555c1e697 100644 --- a/securedrop_export/disk/legacy_service.py +++ b/securedrop_export/disk/legacy_service.py @@ -37,6 +37,10 @@ def check_connected_devices(self) -> LegacyStatus: raise ExportException( sdstatus=LegacyStatus.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED ) + else: + # Unreachable, num_devices is a non-negative integer, + # and we handled all possible cases already + raise ValueError(f"unreachable: num_devices is negative: {num_devices}") def check_disk_format(self) -> LegacyStatus: """ diff --git a/securedrop_export/disk/volume.py b/securedrop_export/disk/volume.py index c6bc2f8ae..195318c94 100644 --- a/securedrop_export/disk/volume.py +++ b/securedrop_export/disk/volume.py @@ -1,5 +1,6 @@ from enum import Enum import os +from typing import Optional class EncryptionScheme(Enum): @@ -26,7 +27,7 @@ def __init__( device_name: str, mapped_name: str, encryption: EncryptionScheme, - mountpoint: str = None, + mountpoint: Optional[str] = None, ): self.device_name = device_name self.mapped_name = mapped_name diff --git a/securedrop_export/main.py b/securedrop_export/main.py index b52d960c9..f0a8b80b6 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -3,6 +3,7 @@ import platform import logging import sys +from typing import Optional from securedrop_export.archive import Archive, Metadata from securedrop_export.command import Command @@ -143,8 +144,13 @@ def _start_service(submission: Archive) -> Status: elif submission.command is Command.CHECK_VOLUME: return ExportService(submission).check_disk_format() + # Unreachable + raise ValueError( + f"unreachable: unknown submission.command value: {submission.command}" + ) -def _exit_gracefully(submission: Archive, status: BaseStatus = None): + +def _exit_gracefully(submission: Archive, status: Optional[BaseStatus] = None): """ Write status code, ensure file cleanup, and exit with return code 0. Non-zero exit values will cause the system to try alternative @@ -170,7 +176,7 @@ def _exit_gracefully(submission: Archive, status: BaseStatus = None): sys.exit(0) -def _write_status(status: BaseStatus): +def _write_status(status: Optional[BaseStatus]): """ Write string to stderr. """ diff --git a/setup.py b/setup.py index b04979ddb..19256c2e6 100644 --- a/setup.py +++ b/setup.py @@ -1,11 +1,10 @@ -import pkgutil import setuptools with open("README.md", "r") as fh: long_description = fh.read() -version = pkgutil.get_data("securedrop_export", "VERSION").decode("utf-8") -version = version.strip() +with open("securedrop_export/VERSION") as f: + version = f.read().strip() setuptools.setup( name="securedrop-export", @@ -23,14 +22,14 @@ package_data={ "securedrop_export": ["VERSION"], }, - classifiers=( + classifiers=[ "Development Status :: 3 - Alpha", "Programming Language :: Python :: 3", "Topic :: Software Development :: Libraries :: Python Modules", "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", "Intended Audience :: Developers", "Operating System :: OS Independent", - ), + ], entry_points={ "console_scripts": ["send-to-usb = securedrop_export.main:entrypoint"] }, diff --git a/tests/disk/test_cli.py b/tests/disk/test_cli.py index 310e4b2d2..4853666c7 100644 --- a/tests/disk/test_cli.py +++ b/tests/disk/test_cli.py @@ -210,7 +210,7 @@ def test_get_luks_volume_still_locked(self, mocked_subprocess, mocked_os_call): @mock.patch( "subprocess.check_output", - side_effect=subprocess.CalledProcessError("check_output", 1), + side_effect=subprocess.CalledProcessError(1, "check_output"), ) def test_get_luks_volume_error(self, mocked_subprocess): with pytest.raises(ExportException) as ex: @@ -277,7 +277,7 @@ def test_unlock_luks_volume_passphrase_failure(self, mocker): self.cli.unlock_luks_volume(vol, key) @mock.patch( - "subprocess.Popen", side_effect=subprocess.CalledProcessError("1", "Popen") + "subprocess.Popen", side_effect=subprocess.CalledProcessError(1, "Popen") ) def test_unlock_luks_volume_luksOpen_exception(self, mocked_subprocess): pd = Volume( From 87fd14cae6918b5899c464302889f507e7df3104 Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Wed, 15 Feb 2023 15:58:43 -0500 Subject: [PATCH 325/352] Add FIXMEs and suppress mypy errors that weren't obvious --- securedrop_export/disk/cli.py | 5 ++++- securedrop_export/disk/legacy_service.py | 3 +++ securedrop_export/disk/service.py | 2 ++ securedrop_export/main.py | 6 ++++-- 4 files changed, 13 insertions(+), 3 deletions(-) diff --git a/securedrop_export/disk/cli.py b/securedrop_export/disk/cli.py index 503d80b86..b37084f37 100644 --- a/securedrop_export/disk/cli.py +++ b/securedrop_export/disk/cli.py @@ -342,7 +342,10 @@ def write_data_to_device( that cleanup happens even if export fails or only partially succeeds. """ try: - target_path = os.path.join(device.mountpoint, submission_target_dirname) + # TODO: is it possible for device.mountpoint to be None here? + target_path = os.path.join( + device.mountpoint, submission_target_dirname # type: ignore[arg-type] + ) subprocess.check_call(["mkdir", target_path]) export_data = os.path.join(submission_tmpdir, "export_data/") diff --git a/securedrop_export/disk/legacy_service.py b/securedrop_export/disk/legacy_service.py index 555c1e697..ef6bda151 100644 --- a/securedrop_export/disk/legacy_service.py +++ b/securedrop_export/disk/legacy_service.py @@ -60,6 +60,9 @@ def check_disk_format(self) -> LegacyStatus: # backwards compatibility, this is the only expected status # at this stage return LegacyStatus.LEGACY_USB_ENCRYPTED + else: + # FIXME: What should be returned if there is more than 1 connected device? + raise NotImplementedError("???") except ExportException as ex: logger.error( diff --git a/securedrop_export/disk/service.py b/securedrop_export/disk/service.py index b5702a474..e2ddfa5f3 100644 --- a/securedrop_export/disk/service.py +++ b/securedrop_export/disk/service.py @@ -24,6 +24,8 @@ def run(self, arg: str) -> Status: """ Run export actions. """ + # FIXME: is this supposed to be empty or abstract? + raise NotImplementedError("???") def scan_all_devices(self) -> Status: """ diff --git a/securedrop_export/main.py b/securedrop_export/main.py index f0a8b80b6..c753000f0 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -140,9 +140,11 @@ def _start_service(submission: Archive) -> Status: elif submission.command is Command.EXPORT: return ExportService(submission).export() elif submission.command is Command.CHECK_USBS: - return ExportService(submission).check_connected_devices() + # FIXME: this returns LegacyStatus, not Status + return ExportService(submission).check_connected_devices() # type: ignore[return-value] elif submission.command is Command.CHECK_VOLUME: - return ExportService(submission).check_disk_format() + # FIXME: this returns LegacyStatus, not Status + return ExportService(submission).check_disk_format() # type: ignore[return-value] # Unreachable raise ValueError( From c0288e48bda4edd24402abf555410ed84c34ec99 Mon Sep 17 00:00:00 2001 From: Ro Date: Tue, 28 Feb 2023 12:31:01 -0800 Subject: [PATCH 326/352] Remove unused methods, fix type errors --- securedrop_export/disk/__init__.py | 1 + securedrop_export/disk/legacy_service.py | 4 ++-- securedrop_export/disk/service.py | 7 ------- securedrop_export/main.py | 11 +++++------ 4 files changed, 8 insertions(+), 15 deletions(-) diff --git a/securedrop_export/disk/__init__.py b/securedrop_export/disk/__init__.py index 6af1f1a09..e61094546 100644 --- a/securedrop_export/disk/__init__.py +++ b/securedrop_export/disk/__init__.py @@ -1 +1,2 @@ from .legacy_service import Service as LegacyService # noqa: F401 +from .legacy_status import Status as LegacyStatus # noqa: F401 diff --git a/securedrop_export/disk/legacy_service.py b/securedrop_export/disk/legacy_service.py index ef6bda151..d6b883a9b 100644 --- a/securedrop_export/disk/legacy_service.py +++ b/securedrop_export/disk/legacy_service.py @@ -61,8 +61,8 @@ def check_disk_format(self) -> LegacyStatus: # at this stage return LegacyStatus.LEGACY_USB_ENCRYPTED else: - # FIXME: What should be returned if there is more than 1 connected device? - raise NotImplementedError("???") + logger.error("Multiple partitions not supported") + return LegacyStatus.LEGACY_USB_ENCRYPTION_NOT_SUPPORTED except ExportException as ex: logger.error( diff --git a/securedrop_export/disk/service.py b/securedrop_export/disk/service.py index e2ddfa5f3..22e524b59 100644 --- a/securedrop_export/disk/service.py +++ b/securedrop_export/disk/service.py @@ -20,13 +20,6 @@ class Service: def __init__(self, cli: CLI): self.cli = cli - def run(self, arg: str) -> Status: - """ - Run export actions. - """ - # FIXME: is this supposed to be empty or abstract? - raise NotImplementedError("???") - def scan_all_devices(self) -> Status: """ Check all connected devices and return current device diff --git a/securedrop_export/main.py b/securedrop_export/main.py index c753000f0..bc55ae159 100755 --- a/securedrop_export/main.py +++ b/securedrop_export/main.py @@ -12,6 +12,7 @@ from securedrop_export.exceptions import ExportException from securedrop_export.disk import LegacyService as ExportService +from securedrop_export.disk import LegacyStatus from securedrop_export.print import Service as PrintService from logging.handlers import TimedRotatingFileHandler, SysLogHandler @@ -124,7 +125,7 @@ def _configure_logging(): raise ExportException(sdstatus=Status.ERROR_LOGGING) from ex -def _start_service(submission: Archive) -> Status: +def _start_service(submission: Archive) -> LegacyStatus: """ Start print or export service. """ @@ -140,14 +141,12 @@ def _start_service(submission: Archive) -> Status: elif submission.command is Command.EXPORT: return ExportService(submission).export() elif submission.command is Command.CHECK_USBS: - # FIXME: this returns LegacyStatus, not Status - return ExportService(submission).check_connected_devices() # type: ignore[return-value] + return ExportService(submission).check_connected_devices() elif submission.command is Command.CHECK_VOLUME: - # FIXME: this returns LegacyStatus, not Status - return ExportService(submission).check_disk_format() # type: ignore[return-value] + return ExportService(submission).check_disk_format() # Unreachable - raise ValueError( + raise ExportException( f"unreachable: unknown submission.command value: {submission.command}" ) From d57c18239d1950c55a085896e78e610536a6106d Mon Sep 17 00:00:00 2001 From: Ro Date: Tue, 28 Feb 2023 12:15:10 -0800 Subject: [PATCH 327/352] Create MountedVolume object instead of using nullable mountpoint attribute for Volume. --- securedrop_export/disk/cli.py | 64 ++++++++++++------------ securedrop_export/disk/legacy_service.py | 11 ++-- securedrop_export/disk/service.py | 8 +-- securedrop_export/disk/volume.py | 34 ++++++++++--- tests/disk/test_cli.py | 34 +++++++------ tests/disk/test_service.py | 4 +- tests/disk/test_volume.py | 26 ++++------ 7 files changed, 101 insertions(+), 80 deletions(-) diff --git a/securedrop_export/disk/cli.py b/securedrop_export/disk/cli.py index b37084f37..abdc0c104 100644 --- a/securedrop_export/disk/cli.py +++ b/securedrop_export/disk/cli.py @@ -2,11 +2,11 @@ import os import subprocess -from typing import List, Optional +from typing import List, Optional, Union from securedrop_export.exceptions import ExportException -from .volume import EncryptionScheme, Volume +from .volume import EncryptionScheme, Volume, MountedVolume from .status import Status logger = logging.getLogger(__name__) @@ -178,17 +178,19 @@ def _get_luks_name_from_headers(self, device: str) -> str: logger.error("Failed to dump LUKS header") raise ExportException(sdstatus=Status.DEVICE_ERROR) from ex - def get_luks_volume(self, device: str) -> Volume: + def get_luks_volume(self, device: str) -> Union[Volume, MountedVolume]: """ Given a string corresponding to a LUKS-partitioned volume, return a corresponding Volume object. - If LUKS volume is already mounted, existing mountpoint will be preserved. - If LUKS volume is unlocked but not mounted, volume will be mounted at _DEFAULT_MOUNTPOINT. + If LUKS volume is already mounted, existing mountpoint will be preserved and a + MountedVolume object will be returned. + If LUKS volume is unlocked but not mounted, volume will be mounted at _DEFAULT_MOUNTPOINT, + and a MountedVolume object will be returned. - If device is still locked, mountpoint will not be set. Once the decrpytion passphrase is - available, call unlock_luks_volume(), passing the Volume object and passphrase, to - unlock the volume. + If device is still locked, mountpoint will not be set, and a Volume object will be retuned. + Once the decrpytion passphrase is available, call unlock_luks_volume(), passing the Volume + object and passphrase to unlock the volume. Raise ExportException if errors are encountered. """ @@ -205,6 +207,7 @@ def get_luks_volume(self, device: str) -> Volume: # If the device has been unlocked, we can see if it's mounted and # use the existing mountpoint, or mount it ourselves. + # Either way, return a MountedVolume. if os.path.exists(os.path.join("/dev/mapper/", mapped_name)): return self.mount_volume(luks_volume) @@ -273,9 +276,9 @@ def _get_mountpoint(self, volume: Volume) -> Optional[str]: logger.error(ex) raise ExportException(sdstatus=Status.ERROR_MOUNT) from ex - def mount_volume(self, volume: Volume) -> Volume: + def mount_volume(self, volume: Volume) -> MountedVolume: """ - Given an unlocked LUKS volume, return a mounted LUKS volume. + Given an unlocked LUKS volume, return MountedVolume object. If volume is already mounted, mountpoint is not changed. Otherwise, volume is mounted at _DEFAULT_MOUNTPOINT. @@ -289,23 +292,19 @@ def mount_volume(self, volume: Volume) -> Volume: mountpoint = self._get_mountpoint(volume) if mountpoint: - logger.info("The device is already mounted") - if volume.mountpoint is not mountpoint: - logger.warning("Mountpoint was inaccurate, updating") - - volume.mountpoint = mountpoint - return volume + logger.info("The device is already mounted--use existing mountpoint") + return MountedVolume.from_volume(volume, mountpoint) else: logger.info("Mount volume at default mountpoint") return self._mount_at_mountpoint(volume, self._DEFAULT_MOUNTPOINT) - def _mount_at_mountpoint(self, volume: Volume, mountpoint: str) -> Volume: + def _mount_at_mountpoint(self, volume: Volume, mountpoint: str) -> MountedVolume: """ Mount a volume at the supplied mountpoint, creating the mountpoint directory and adjusting permissions (user:user) if need be. `mountpoint` must be a full path. - Return Volume object. + Return MountedVolume object. Raise ExportException if unable to mount volume at target mountpoint. """ if not os.path.exists(mountpoint): @@ -325,27 +324,26 @@ def _mount_at_mountpoint(self, volume: Volume, mountpoint: str) -> Volume: subprocess.check_call(["sudo", "mount", mapped_device_path, mountpoint]) subprocess.check_call(["sudo", "chown", "-R", "user:user", mountpoint]) - volume.mountpoint = mountpoint - except subprocess.CalledProcessError as ex: logger.error(ex) raise ExportException(sdstatus=Status.ERROR_MOUNT) from ex - return volume + return MountedVolume.from_volume(volume, mountpoint) def write_data_to_device( - self, submission_tmpdir: str, submission_target_dirname: str, device: Volume + self, + submission_tmpdir: str, + submission_target_dirname: str, + device: MountedVolume, ): """ Move files to drive (overwrites files with same filename) and unmount drive. Drive is unmounted and files are cleaned up as part of the `finally` block to ensure that cleanup happens even if export fails or only partially succeeds. """ + try: - # TODO: is it possible for device.mountpoint to be None here? - target_path = os.path.join( - device.mountpoint, submission_target_dirname # type: ignore[arg-type] - ) + target_path = os.path.join(device.mountpoint, submission_target_dirname) subprocess.check_call(["mkdir", target_path]) export_data = os.path.join(submission_tmpdir, "export_data/") @@ -357,12 +355,13 @@ def write_data_to_device( ) except (subprocess.CalledProcessError, OSError) as ex: + logger.error(ex) raise ExportException(sdstatus=Status.ERROR_EXPORT) from ex finally: self.cleanup_drive_and_tmpdir(device, submission_tmpdir) - def cleanup_drive_and_tmpdir(self, volume: Volume, submission_tmpdir: str): + def cleanup_drive_and_tmpdir(self, volume: MountedVolume, submission_tmpdir: str): """ Post-export cleanup method. Unmount and lock drive and remove temporary directory. Currently called at end of `write_data_to_device()` to ensure @@ -382,15 +381,14 @@ def cleanup_drive_and_tmpdir(self, volume: Volume, submission_tmpdir: str): logger.error("Error syncing filesystem") raise ExportException(sdstatus=Status.ERROR_EXPORT_CLEANUP) from ex - def _unmount_volume(self, volume: Volume) -> Volume: + def _unmount_volume(self, volume: MountedVolume) -> Volume: """ Helper. Unmount volume """ - if volume.mountpoint and os.path.exists(volume.mountpoint): + if os.path.exists(volume.mountpoint): logger.debug(f"Unmounting drive from {volume.mountpoint}") try: subprocess.check_call(["sudo", "umount", volume.mountpoint]) - volume.mountpoint = None except subprocess.CalledProcessError as ex: logger.error("Error unmounting device") @@ -398,7 +396,11 @@ def _unmount_volume(self, volume: Volume) -> Volume: else: logger.info("Mountpoint does not exist; volume was already unmounted") - return volume + return Volume( + device_name=volume.device_name, + mapped_name=volume.mapped_name, + encryption=volume.encryption, + ) def _close_luks_volume(self, unlocked_device: Volume) -> None: """ diff --git a/securedrop_export/disk/legacy_service.py b/securedrop_export/disk/legacy_service.py index d6b883a9b..3dbe6acaa 100644 --- a/securedrop_export/disk/legacy_service.py +++ b/securedrop_export/disk/legacy_service.py @@ -5,6 +5,7 @@ from .cli import CLI from .legacy_status import Status as LegacyStatus from .status import Status as Status +from .volume import MountedVolume logger = logging.getLogger(__name__) @@ -94,16 +95,18 @@ def export(self): if self.cli.is_luks_volume(device): volume = self.cli.get_luks_volume(device) logger.info("Check if writable") - if not volume.writable: + if not isinstance(volume, MountedVolume): logger.info("Not writable-will try unlocking") volume = self.cli.unlock_luks_volume( volume, self.submission.encryption_key ) - volume = self.cli.mount_volume(volume) + mounted_volume = self.cli.mount_volume(volume) - logger.info(f"Export submission to {volume.mountpoint}") + logger.info(f"Export submission to {mounted_volume.mountpoint}") self.cli.write_data_to_device( - self.submission.tmpdir, self.submission.target_dirname, volume + self.submission.tmpdir, + self.submission.target_dirname, + mounted_volume, ) # This is SUCCESS_EXPORT, but the 0.7.0 client is not expecting # a return status from a successful export operation. diff --git a/securedrop_export/disk/service.py b/securedrop_export/disk/service.py index 22e524b59..0c983c516 100644 --- a/securedrop_export/disk/service.py +++ b/securedrop_export/disk/service.py @@ -4,7 +4,7 @@ from .cli import CLI from .status import Status -from .volume import Volume +from .volume import Volume, MountedVolume from securedrop_export.exceptions import ExportException @@ -55,7 +55,7 @@ def scan_single_device(self, blkid: str) -> Status: self.volume = self.cli.get_luks_volume(target) # See if it's unlocked and mounted - if self.volume.writable: + if isinstance(self.volume, MountedVolume): logger.debug("LUKS device is already mounted") return Status.DEVICE_WRITABLE else: @@ -85,7 +85,7 @@ def unlock_device(self, passphrase: str, volume: Volume) -> Status: try: self.volume = self.cli.unlock_luks_volume(volume, passphrase) - if volume.writable: + if isinstance(volume, MountedVolume): return Status.DEVICE_WRITABLE else: return Status.ERROR_UNLOCK_LUKS @@ -98,7 +98,7 @@ def unlock_device(self, passphrase: str, volume: Volume) -> Status: logger.warning("Tried to unlock_device but no current volume detected.") return Status.NO_DEVICE_DETECTED - def write_to_device(self, volume: Volume, data: Archive) -> Status: + def write_to_device(self, volume: MountedVolume, data: Archive) -> Status: """ Export data to volume. CLI unmounts and locks volume on completion, even if export was unsuccessful. diff --git a/securedrop_export/disk/volume.py b/securedrop_export/disk/volume.py index 195318c94..6d41bce47 100644 --- a/securedrop_export/disk/volume.py +++ b/securedrop_export/disk/volume.py @@ -1,6 +1,5 @@ from enum import Enum import os -from typing import Optional class EncryptionScheme(Enum): @@ -27,11 +26,9 @@ def __init__( device_name: str, mapped_name: str, encryption: EncryptionScheme, - mountpoint: Optional[str] = None, ): self.device_name = device_name self.mapped_name = mapped_name - self.mountpoint = mountpoint self.encryption = encryption @property @@ -45,10 +42,6 @@ def encryption(self, scheme: EncryptionScheme): else: self._encryption = EncryptionScheme.UNKNOWN - @property - def writable(self) -> bool: - return self.unlocked and self.mountpoint is not None - @property def unlocked(self) -> bool: return ( @@ -58,3 +51,30 @@ def unlocked(self) -> bool: os.path.join(self.MAPPED_VOLUME_PREFIX, self.mapped_name) ) ) + + +class MountedVolume(Volume): + """ + An unlocked and mounted Volume. + """ + + def __init__( + self, + device_name: str, + mapped_name: str, + encryption: EncryptionScheme, + mountpoint: str, + ): + super().__init__( + device_name=device_name, mapped_name=mapped_name, encryption=encryption + ) + self.mountpoint = mountpoint + + @classmethod + def from_volume(cls, vol: Volume, mountpoint: str): + return cls( + device_name=vol.device_name, + mapped_name=vol.mapped_name, + encryption=vol.encryption, + mountpoint=mountpoint, + ) diff --git a/tests/disk/test_cli.py b/tests/disk/test_cli.py index 4853666c7..8326bb8e9 100644 --- a/tests/disk/test_cli.py +++ b/tests/disk/test_cli.py @@ -4,7 +4,7 @@ import subprocess from securedrop_export.disk.cli import CLI -from securedrop_export.disk.volume import EncryptionScheme, Volume +from securedrop_export.disk.volume import EncryptionScheme, Volume, MountedVolume from securedrop_export.exceptions import ExportException from securedrop_export.disk.status import Status @@ -301,8 +301,9 @@ def test_mount_volume(self, mocked_call, mocked_output, mocked_path): mapped_name=_PRETEND_LUKS_ID, encryption=EncryptionScheme.LUKS, ) - self.cli.mount_volume(vol) - assert vol.mountpoint is self.cli._DEFAULT_MOUNTPOINT + mv = self.cli.mount_volume(vol) + assert isinstance(mv, MountedVolume) + assert mv.mountpoint is self.cli._DEFAULT_MOUNTPOINT @mock.patch("os.path.exists", return_value=True) @mock.patch( @@ -319,6 +320,7 @@ def test_mount_volume_already_mounted( ) result = self.cli.mount_volume(md) assert result.mountpoint == "/dev/pretend/luks-id-123456" + assert isinstance(result, MountedVolume) @mock.patch("os.path.exists", return_value=True) @mock.patch("subprocess.check_output", return_value=b"\n") @@ -329,7 +331,9 @@ def test_mount_volume_mkdir(self, mocked_output, mocked_subprocess, mocked_path) mapped_name=_PRETEND_LUKS_ID, encryption=EncryptionScheme.LUKS, ) - assert self.cli.mount_volume(md).mapped_name == _PRETEND_LUKS_ID + mv = self.cli.mount_volume(md) + assert mv.mapped_name == _PRETEND_LUKS_ID + assert isinstance(mv, MountedVolume) @mock.patch("subprocess.check_output", return_value=b"\n") @mock.patch( @@ -361,8 +365,7 @@ def test_mount_at_mountpoint_mkdir_error(self, mocked_subprocess, mocked_path): ) with pytest.raises(ExportException) as ex: - volume = self.cli._mount_at_mountpoint(md, self.cli._DEFAULT_MOUNTPOINT) - assert not volume.writable + self.cli._mount_at_mountpoint(md, self.cli._DEFAULT_MOUNTPOINT) assert ex.value.sdstatus is Status.ERROR_MOUNT @@ -379,15 +382,14 @@ def test_mount_at_mountpoint_mounting_error(self, mocked_subprocess, mocked_path ) with pytest.raises(ExportException) as ex: - volume = self.cli._mount_at_mountpoint(md, self.cli._DEFAULT_MOUNTPOINT) - assert not volume.writable + self.cli._mount_at_mountpoint(md, self.cli._DEFAULT_MOUNTPOINT) assert ex.value.sdstatus is Status.ERROR_MOUNT @mock.patch("os.path.exists", return_value=True) @mock.patch("subprocess.check_call", return_value=0) def test__unmount_volume(self, mocked_subprocess, mocked_mountpath): - mounted = Volume( + mounted = MountedVolume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, mapped_name=_PRETEND_LUKS_ID, mountpoint=self.cli._DEFAULT_MOUNTPOINT, @@ -395,7 +397,7 @@ def test__unmount_volume(self, mocked_subprocess, mocked_mountpath): ) result = self.cli._unmount_volume(mounted) - assert result.mountpoint is None + assert not isinstance(result, MountedVolume) @mock.patch("os.path.exists", return_value=True) @mock.patch( @@ -403,7 +405,7 @@ def test__unmount_volume(self, mocked_subprocess, mocked_mountpath): side_effect=subprocess.CalledProcessError(1, "check_call"), ) def test__unmount_volume_error(self, mocked_subprocess, mocked_mountpath): - mounted = Volume( + mounted = MountedVolume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, mapped_name=_PRETEND_LUKS_ID, mountpoint=self.cli._DEFAULT_MOUNTPOINT, @@ -459,7 +461,7 @@ def test_write_to_disk(self, mock_check_call): patch.return_value = mock.MagicMock() patch.start() - vol = Volume( + vol = MountedVolume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, mapped_name=_PRETEND_LUKS_ID, mountpoint=self.cli._DEFAULT_MOUNTPOINT, @@ -484,7 +486,7 @@ def test_write_to_disk_error_still_does_cleanup(self, mock_call): patch.return_value = mock.MagicMock() patch.start() - vol = Volume( + vol = MountedVolume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, mapped_name=_PRETEND_LUKS_ID, mountpoint=self.cli._DEFAULT_MOUNTPOINT, @@ -519,9 +521,9 @@ def test_cleanup_drive_and_tmpdir(self, mock_subprocess, mocked_path): vol = Volume( device_name=_DEFAULT_USB_DEVICE_ONE_PART, mapped_name=_PRETEND_LUKS_ID, - mountpoint=self.cli._DEFAULT_MOUNTPOINT, encryption=EncryptionScheme.LUKS, ) + mv = MountedVolume.from_volume(vol, mountpoint=self.cli._DEFAULT_MOUNTPOINT) close_patch = mock.patch.object(self.cli, "_close_luks_volume") remove_tmpdir_patch = mock.patch.object(self.cli, "_remove_temp_directory") @@ -530,9 +532,9 @@ def test_cleanup_drive_and_tmpdir(self, mock_subprocess, mocked_path): rm_tpdir_mock = remove_tmpdir_patch.start() # That was all setup. Here's our test - self.cli.cleanup_drive_and_tmpdir(vol, submission.tmpdir) + self.cli.cleanup_drive_and_tmpdir(mv, submission.tmpdir) - close_mock.assert_called_once_with(vol) + close_mock.assert_called_once() rm_tpdir_mock.assert_called_once_with(submission.tmpdir) # Undo patch changes diff --git a/tests/disk/test_service.py b/tests/disk/test_service.py index 17ad3266b..d7053e1d0 100644 --- a/tests/disk/test_service.py +++ b/tests/disk/test_service.py @@ -6,7 +6,7 @@ from securedrop_export.exceptions import ExportException from securedrop_export.disk.legacy_status import Status as LegacyStatus from securedrop_export.disk.status import Status as Status -from securedrop_export.disk.volume import Volume, EncryptionScheme +from securedrop_export.disk.volume import Volume, MountedVolume, EncryptionScheme from securedrop_export.archive import Archive, Metadata from securedrop_export.disk.legacy_service import Service from securedrop_export.disk.cli import CLI @@ -27,7 +27,7 @@ def setup_class(cls): mapped_name="fake-luks-id-123456", encryption=EncryptionScheme.LUKS, ) - cls.mock_luks_volume_mounted = Volume( + cls.mock_luks_volume_mounted = MountedVolume( device_name=SAMPLE_OUTPUT_USB, mapped_name="fake-luks-id-123456", mountpoint="/media/usb", diff --git a/tests/disk/test_volume.py b/tests/disk/test_volume.py index f28e711c7..10d4c6894 100644 --- a/tests/disk/test_volume.py +++ b/tests/disk/test_volume.py @@ -1,6 +1,6 @@ from unittest import mock -from securedrop_export.disk.volume import Volume, EncryptionScheme +from securedrop_export.disk.volume import Volume, MountedVolume, EncryptionScheme class TestVolume: @@ -34,23 +34,17 @@ def test_is_unlocked_false_no_path(self, mock_os_path): assert not volume.unlocked - @mock.patch("os.path.exists", return_value=True) - def test_writable_false(self, mock_os_path): - vol = Volume( - device_name="dev/sda1", - mapped_name="pretend-luks-id", - encryption=EncryptionScheme.LUKS, - ) - - assert not vol.writable +class TestMountedVolume: @mock.patch("os.path.exists", return_value=True) - def test_writable(self, mock_os_path): - vol = Volume( - device_name="dev/sda1", - mapped_name="pretend-luks-id", + def test_is_unlocked_true(self, mock_os_path): + volume = Volume( + device_name="/dev/sda1", + mapped_name="pretend-luks-mapper-id", encryption=EncryptionScheme.LUKS, - mountpoint="/media/usb", ) - assert vol.writable + mounted_volume = MountedVolume.from_volume(volume, mountpoint="/media/usb") + + assert mounted_volume.unlocked + assert mounted_volume.mountpoint == "/media/usb" From c80f652ad8c1130b0822e44176cee4196748dcb7 Mon Sep 17 00:00:00 2001 From: Gonzalo Bulnes Guilpain Date: Tue, 7 Feb 2023 17:10:46 +1100 Subject: [PATCH 328/352] Fix testing requirement checks: scope it to Debian version The requirement files for Bullseye and Bookworm used ot only differ in comments. That's not the case anymore, so this commit scoped the check to the requirement file that matches the Debian version under test. I've verified that the check fails as expected when the file is out of date. (cherry-picked from 37292714b97b1147801de408d41889b9a5d63205) --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 0aa6fb8a4..94cdb4b3d 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -86,7 +86,7 @@ common-steps: make venv source .venv/bin/activate make requirements - git diff --ignore-matching-lines=# --exit-code + git diff --exit-code requirements/dev-${VERSION_CODENAME}-requirements.txt - &build_debian_package run: From 9398dbefbe77bc229cf895bcff252f2f3b95630b Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Tue, 2 May 2023 12:32:31 -0400 Subject: [PATCH 329/352] Bump version to 0.4.1, add changelog Refs #116. --- changelog.md | 5 +++++ securedrop_proxy/VERSION | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/changelog.md b/changelog.md index 2453b0d66..32aa3ae84 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,10 @@ # Changelog +## 0.4.1 + * Updated certifi to 2022.12.7 (#107) + * Drop furl dependency (#105, #111) + * Replace werkzeug dependency with basic string checks (#110, 115) + ## 0.4.0 * Reject JSON with duplicate keys (TOB-SDW-014) (#98) diff --git a/securedrop_proxy/VERSION b/securedrop_proxy/VERSION index 1d0ba9ea1..267577d47 100644 --- a/securedrop_proxy/VERSION +++ b/securedrop_proxy/VERSION @@ -1 +1 @@ -0.4.0 +0.4.1 From fb62e37ed1de1e29b2411c99b2c48738b406c9b1 Mon Sep 17 00:00:00 2001 From: Ro Date: Wed, 21 Jun 2023 15:02:06 -0700 Subject: [PATCH 330/352] Update requests to 2.31.0 --- requirements/build-requirements.txt | 2 +- requirements/requirements.in | 2 +- requirements/requirements.txt | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/build-requirements.txt b/requirements/build-requirements.txt index 0c7572ba7..36355f99d 100644 --- a/requirements/build-requirements.txt +++ b/requirements/build-requirements.txt @@ -2,5 +2,5 @@ certifi==2022.12.7 --hash=sha256:7f205a1a4f02f4970fb5d0e16457964bb30d6b678a76651 charset-normalizer==2.0.4 --hash=sha256:cd9a4492eef4e5276c07f9c0dc1338e7be3e95f2a536bf2c5b620b1f27d03d74 idna==3.2 --hash=sha256:691d9fc304505c65ea9ceb8eb7385d63988e344c065cacbbd2156ff9bdfcf0c1 pyyaml==5.4.1 --hash=sha256:9608c1b459ff310fe7fa78e8a9e12767a9a0ea9e3fa7cce116db58f95b61f56f --hash=sha256:f7190863a72d6eb89ed92e345e178a0803c439fd7126985b62c1c113cb01e534 -requests==2.26.0 --hash=sha256:7cec5239ce6ec4f6bf3d1b8c7e4d34ebe1b86d3896fe9657a8465ee4d7282bc8 +requests==2.31.0 --hash=sha256:9f7f0f507fe7a9e10ea51e666da60aafcfcda94c71f0cac477dbadd8d25de49f urllib3==1.26.6 --hash=sha256:7a2814749409a681ab58babe6539b02a2f84f6649904211f90fb649811ae7b36 diff --git a/requirements/requirements.in b/requirements/requirements.in index 07e99dfca..8f11a868d 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -2,5 +2,5 @@ certifi>=2022.12.07 charset-normalizer>=2.0.4 idna>=2.7 pyyaml==5.4.1 -requests>=2.26.0 +requests>=2.31.0 urllib3>=1.26.5 diff --git a/requirements/requirements.txt b/requirements/requirements.txt index fb334feb4..952bd0c2f 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -53,9 +53,9 @@ pyyaml==5.4.1 \ --hash=sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6 \ --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0 # via -r requirements/requirements.in -requests==2.26.0 \ - --hash=sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24 \ - --hash=sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7 +requests==2.31.0 \ + --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ + --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 # via -r requirements/requirements.in urllib3==1.26.6 \ --hash=sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4 \ From a8a73b524ae2eef3273302ff216e9f299fef0c4b Mon Sep 17 00:00:00 2001 From: Ro Date: Wed, 21 Jun 2023 15:55:05 -0700 Subject: [PATCH 331/352] Update dev dependencies --- requirements/dev-requirements.txt | 765 ++++++++++++++++-------------- 1 file changed, 399 insertions(+), 366 deletions(-) diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt index 507e21d32..145a4b967 100644 --- a/requirements/dev-requirements.txt +++ b/requirements/dev-requirements.txt @@ -4,23 +4,36 @@ # # pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-requirements.txt requirements/dev-requirements.in requirements/requirements.in # -black==22.12.0 \ - --hash=sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320 \ - --hash=sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351 \ - --hash=sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350 \ - --hash=sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f \ - --hash=sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf \ - --hash=sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148 \ - --hash=sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4 \ - --hash=sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d \ - --hash=sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc \ - --hash=sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d \ - --hash=sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2 \ - --hash=sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f +black==23.3.0 \ + --hash=sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5 \ + --hash=sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915 \ + --hash=sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326 \ + --hash=sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940 \ + --hash=sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b \ + --hash=sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30 \ + --hash=sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c \ + --hash=sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c \ + --hash=sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab \ + --hash=sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27 \ + --hash=sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2 \ + --hash=sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961 \ + --hash=sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9 \ + --hash=sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb \ + --hash=sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70 \ + --hash=sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331 \ + --hash=sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2 \ + --hash=sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266 \ + --hash=sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d \ + --hash=sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6 \ + --hash=sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b \ + --hash=sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925 \ + --hash=sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8 \ + --hash=sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4 \ + --hash=sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3 # via -r requirements/dev-requirements.in -build==0.9.0 \ - --hash=sha256:1a07724e891cbd898923145eb7752ee7653674c511378eb9c7691aab1612bc3c \ - --hash=sha256:38a7a2b7a0bdc61a42a0a67509d88c71ecfc37b393baba770fae34e20929ff69 +build==0.10.0 \ + --hash=sha256:af266720050a66c893a6096a2f410989eeac74ff9a68ba194b3f6473e8e26171 \ + --hash=sha256:d5b71264afdb5951d6704482aac78de887c80691c52b88a9ad195983ca2c9269 # via pip-tools certifi==2022.12.7 \ --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ @@ -40,57 +53,67 @@ click==8.1.3 \ # via # black # pip-tools -coverage==6.5.0 \ - --hash=sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79 \ - --hash=sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a \ - --hash=sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f \ - --hash=sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a \ - --hash=sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa \ - --hash=sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398 \ - --hash=sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba \ - --hash=sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d \ - --hash=sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf \ - --hash=sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b \ - --hash=sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518 \ - --hash=sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d \ - --hash=sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795 \ - --hash=sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2 \ - --hash=sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e \ - --hash=sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32 \ - --hash=sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745 \ - --hash=sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b \ - --hash=sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e \ - --hash=sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d \ - --hash=sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f \ - --hash=sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660 \ - --hash=sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62 \ - --hash=sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6 \ - --hash=sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04 \ - --hash=sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c \ - --hash=sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5 \ - --hash=sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef \ - --hash=sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc \ - --hash=sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae \ - --hash=sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578 \ - --hash=sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466 \ - --hash=sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4 \ - --hash=sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91 \ - --hash=sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0 \ - --hash=sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4 \ - --hash=sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b \ - --hash=sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe \ - --hash=sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b \ - --hash=sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75 \ - --hash=sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b \ - --hash=sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c \ - --hash=sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72 \ - --hash=sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b \ - --hash=sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f \ - --hash=sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e \ - --hash=sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53 \ - --hash=sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3 \ - --hash=sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84 \ - --hash=sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987 +coverage==7.2.7 \ + --hash=sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f \ + --hash=sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2 \ + --hash=sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a \ + --hash=sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a \ + --hash=sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01 \ + --hash=sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6 \ + --hash=sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7 \ + --hash=sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f \ + --hash=sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02 \ + --hash=sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c \ + --hash=sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063 \ + --hash=sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a \ + --hash=sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5 \ + --hash=sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959 \ + --hash=sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97 \ + --hash=sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6 \ + --hash=sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f \ + --hash=sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9 \ + --hash=sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5 \ + --hash=sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f \ + --hash=sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562 \ + --hash=sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe \ + --hash=sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9 \ + --hash=sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f \ + --hash=sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb \ + --hash=sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb \ + --hash=sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1 \ + --hash=sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb \ + --hash=sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250 \ + --hash=sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e \ + --hash=sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511 \ + --hash=sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5 \ + --hash=sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59 \ + --hash=sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2 \ + --hash=sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d \ + --hash=sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3 \ + --hash=sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4 \ + --hash=sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de \ + --hash=sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9 \ + --hash=sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833 \ + --hash=sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0 \ + --hash=sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9 \ + --hash=sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d \ + --hash=sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050 \ + --hash=sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d \ + --hash=sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6 \ + --hash=sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353 \ + --hash=sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb \ + --hash=sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e \ + --hash=sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8 \ + --hash=sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495 \ + --hash=sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2 \ + --hash=sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd \ + --hash=sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27 \ + --hash=sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1 \ + --hash=sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818 \ + --hash=sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4 \ + --hash=sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e \ + --hash=sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850 \ + --hash=sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3 # via -r requirements/dev-requirements.in flake8==6.0.0 \ --hash=sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7 \ @@ -103,151 +126,145 @@ idna==3.2 \ # -r requirements/requirements.in # requests # yarl -isort==5.11.2 \ - --hash=sha256:dd8bbc5c0990f2a095d754e50360915f73b4c26fc82733eb5bfc6b48396af4d2 \ - --hash=sha256:e486966fba83f25b8045f8dd7455b0a0d1e4de481e1d7ce4669902d9fb85e622 +isort==5.12.0 \ + --hash=sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504 \ + --hash=sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6 # via -r requirements/dev-requirements.in mccabe==0.7.0 \ --hash=sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325 \ --hash=sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e # via flake8 -multidict==6.0.3 \ - --hash=sha256:018c8e3be7f161a12b3e41741b6721f9baeb2210f4ab25a6359b7d76c1017dce \ - --hash=sha256:01b456046a05ff7cceefb0e1d2a9d32f05efcb1c7e0d152446304e11557639ce \ - --hash=sha256:114a4ab3e5cfbc56c4b6697686ecb92376c7e8c56893ef20547921552f8bdf57 \ - --hash=sha256:12e0d396faa6dc55ff5379eee54d1df3b508243ff15bfc8295a6ec7a4483a335 \ - --hash=sha256:190626ced82d4cc567a09e7346340d380154a493bac6905e0095d8158cdf1e38 \ - --hash=sha256:1f5d5129a937af4e3c4a1d6c139f4051b7d17d43276cefdd8d442a7031f7eef2 \ - --hash=sha256:21e1ce0b187c4e93112304dcde2aa18922fdbe8fb4f13d8aa72a5657bce0563a \ - --hash=sha256:24e8d513bfcaadc1f8b0ebece3ff50961951c54b07d5a775008a882966102418 \ - --hash=sha256:2523a29006c034687eccd3ee70093a697129a3ffe8732535d3b2df6a4ecc279d \ - --hash=sha256:26fbbe17f8a7211b623502d2bf41022a51da3025142401417c765bf9a56fed4c \ - --hash=sha256:2b66d61966b12e6bba500e5cbb2c721a35e119c30ee02495c5629bd0e91eea30 \ - --hash=sha256:2cf5d19e12eff855aa198259c0b02fd3f5d07e1291fbd20279c37b3b0e6c9852 \ - --hash=sha256:2cfda34b7cb99eacada2072e0f69c0ad3285cb6f8e480b11f2b6d6c1c6f92718 \ - --hash=sha256:3541882266247c7cd3dba78d6ef28dbe704774df60c9e4231edaa4493522e614 \ - --hash=sha256:36df958b15639e40472adaa4f0c2c7828fe680f894a6b48c4ce229f59a6a798b \ - --hash=sha256:38d394814b39be1c36ac709006d39d50d72a884f9551acd9c8cc1ffae3fc8c4e \ - --hash=sha256:4159fc1ec9ede8ab93382e0d6ba9b1b3d23c72da39a834db7a116986605c7ab4 \ - --hash=sha256:445c0851a1cbc1f2ec3b40bc22f9c4a235edb3c9a0906122a9df6ea8d51f886c \ - --hash=sha256:47defc0218682281a52fb1f6346ebb8b68b17538163a89ea24dfe4da37a8a9a3 \ - --hash=sha256:4cc5c8cd205a9810d16a5cd428cd81bac554ad1477cb87f4ad722b10992e794d \ - --hash=sha256:4ccf55f28066b4f08666764a957c2b7c241c7547b0921d69c7ceab5f74fe1a45 \ - --hash=sha256:4fb3fe591956d8841882c463f934c9f7485cfd5f763a08c0d467b513dc18ef89 \ - --hash=sha256:526f8397fc124674b8f39748680a0ff673bd6a715fecb4866716d36e380f015f \ - --hash=sha256:578bfcb16f4b8675ef71b960c00f174b0426e0eeb796bab6737389d8288eb827 \ - --hash=sha256:5b51969503709415a35754954c2763f536a70b8bf7360322b2edb0c0a44391f6 \ - --hash=sha256:5e58ec0375803526d395f6f7e730ecc45d06e15f68f7b9cdbf644a2918324e51 \ - --hash=sha256:62db44727d0befea68e8ad2881bb87a9cfb6b87d45dd78609009627167f37b69 \ - --hash=sha256:67090b17a0a5be5704fd109f231ee73cefb1b3802d41288d6378b5df46ae89ba \ - --hash=sha256:6cd14e61f0da2a2cfb9fe05bfced2a1ed7063ce46a7a8cd473be4973de9a7f91 \ - --hash=sha256:70740c2bc9ab1c99f7cdcb104f27d16c63860c56d51c5bf0ef82fc1d892a2131 \ - --hash=sha256:73009ea04205966d47e16d98686ac5c438af23a1bb30b48a2c5da3423ec9ce37 \ - --hash=sha256:791458a1f7d1b4ab3bd9e93e0dcd1d59ef7ee9aa051dcd1ea030e62e49b923fd \ - --hash=sha256:7f9511e48bde6b995825e8d35e434fc96296cf07a25f4aae24ff9162be7eaa46 \ - --hash=sha256:81c3d597591b0940e04949e4e4f79359b2d2e542a686ba0da5e25de33fec13e0 \ - --hash=sha256:8230a39bae6c2e8a09e4da6bace5064693b00590a4a213e38f9a9366da10e7dd \ - --hash=sha256:8b92a9f3ab904397a33b193000dc4de7318ea175c4c460a1e154c415f9008e3d \ - --hash=sha256:94cbe5535ef150546b8321aebea22862a3284da51e7b55f6f95b7d73e96d90ee \ - --hash=sha256:960ce1b790952916e682093788696ef7e33ac6a97482f9b983abdc293091b531 \ - --hash=sha256:99341ca1f1db9e7f47914cb2461305665a662383765ced6f843712564766956d \ - --hash=sha256:9aac6881454a750554ed4b280a839dcf9e2133a9d12ab4d417d673fb102289b7 \ - --hash=sha256:9d359b0a962e052b713647ac1f13eabf2263167b149ed1e27d5c579f5c8c7d2c \ - --hash=sha256:9dbab2a7e9c073bc9538824a01f5ed689194db7f55f2b8102766873e906a6c1a \ - --hash=sha256:a27b029caa3b555a4f3da54bc1e718eb55fcf1a11fda8bf0132147b476cf4c08 \ - --hash=sha256:a8b817d4ed68fd568ec5e45dd75ddf30cc72a47a6b41b74d5bb211374c296f5e \ - --hash=sha256:ad7d66422b9cc51125509229693d27e18c08f2dea3ac9de408d821932b1b3759 \ - --hash=sha256:b46e79a9f4db53897d17bc64a39d1c7c2be3e3d4f8dba6d6730a2b13ddf0f986 \ - --hash=sha256:baa96a3418e27d723064854143b2f414a422c84cc87285a71558722049bebc5a \ - --hash=sha256:beeca903e4270b4afcd114f371a9602240dc143f9e944edfea00f8d4ad56c40d \ - --hash=sha256:c2a1168e5aa7c72499fb03c850e0f03f624fa4a5c8d2e215c518d0a73872eb64 \ - --hash=sha256:c5790cc603456b6dcf8a9a4765f666895a6afddc88b3d3ba7b53dea2b6e23116 \ - --hash=sha256:cb4a08f0aaaa869f189ffea0e17b86ad0237b51116d494da15ef7991ee6ad2d7 \ - --hash=sha256:cd5771e8ea325f85cbb361ddbdeb9ae424a68e5dfb6eea786afdcd22e68a7d5d \ - --hash=sha256:ce8e51774eb03844588d3c279adb94efcd0edeccd2f97516623292445bcc01f9 \ - --hash=sha256:d09daf5c6ce7fc6ed444c9339bbde5ea84e2534d1ca1cd37b60f365c77f00dea \ - --hash=sha256:d0e798b072cf2aab9daceb43d97c9c527a0c7593e67a7846ad4cc6051de1e303 \ - --hash=sha256:d325d61cac602976a5d47b19eaa7d04e3daf4efce2164c630219885087234102 \ - --hash=sha256:d408172519049e36fb6d29672f060dc8461fc7174eba9883c7026041ef9bfb38 \ - --hash=sha256:d52442e7c951e4c9ee591d6047706e66923d248d83958bbf99b8b19515fffaef \ - --hash=sha256:dc4cfef5d899f5f1a15f3d2ac49f71107a01a5a2745b4dd53fa0cede1419385a \ - --hash=sha256:df7b4cee3ff31b3335aba602f8d70dbc641e5b7164b1e9565570c9d3c536a438 \ - --hash=sha256:e068dfeadbce63072b2d8096486713d04db4946aad0a0f849bd4fc300799d0d3 \ - --hash=sha256:e07c24018986fb00d6e7eafca8fcd6e05095649e17fcf0e33a592caaa62a78b9 \ - --hash=sha256:e0bce9f7c30e7e3a9e683f670314c0144e8d34be6b7019e40604763bd278d84f \ - --hash=sha256:e1925f78a543b94c3d46274c66a366fee8a263747060220ed0188e5f3eeea1c0 \ - --hash=sha256:e322c94596054352f5a02771eec71563c018b15699b961aba14d6dd943367022 \ - --hash=sha256:e4a095e18847c12ec20e55326ab8782d9c2d599400a3a2f174fab4796875d0e2 \ - --hash=sha256:e5a811aab1b4aea0b4be669363c19847a8c547510f0e18fb632956369fdbdf67 \ - --hash=sha256:eddf604a3de2ace3d9a4e4d491be7562a1ac095a0a1c95a9ec5781ef0273ef11 \ - --hash=sha256:ee9b1cae9a6c5d023e5a150f6f6b9dbb3c3bbc7887d6ee07d4c0ecb49a473734 \ - --hash=sha256:f1650ea41c408755da5eed52ac6ccbc8938ccc3e698d81e6f6a1be02ff2a0945 \ - --hash=sha256:f2c0957b3e8c66c10d27272709a5299ab3670a0f187c9428f3b90d267119aedb \ - --hash=sha256:f76109387e1ec8d8e2137c94c437b89fe002f29e0881aae8ae45529bdff92000 \ - --hash=sha256:f8a728511c977df6f3d8af388fcb157e49f11db4a6637dd60131b8b6e40b0253 \ - --hash=sha256:fb6c3dc3d65014d2c782f5acf0b3ba14e639c6c33d3ed8932ead76b9080b3544 +multidict==6.0.4 \ + --hash=sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9 \ + --hash=sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8 \ + --hash=sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03 \ + --hash=sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710 \ + --hash=sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161 \ + --hash=sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664 \ + --hash=sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569 \ + --hash=sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067 \ + --hash=sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313 \ + --hash=sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706 \ + --hash=sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2 \ + --hash=sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636 \ + --hash=sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49 \ + --hash=sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93 \ + --hash=sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603 \ + --hash=sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0 \ + --hash=sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60 \ + --hash=sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4 \ + --hash=sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e \ + --hash=sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1 \ + --hash=sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60 \ + --hash=sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951 \ + --hash=sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc \ + --hash=sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe \ + --hash=sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95 \ + --hash=sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d \ + --hash=sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8 \ + --hash=sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed \ + --hash=sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2 \ + --hash=sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775 \ + --hash=sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87 \ + --hash=sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c \ + --hash=sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2 \ + --hash=sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98 \ + --hash=sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3 \ + --hash=sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe \ + --hash=sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78 \ + --hash=sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660 \ + --hash=sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176 \ + --hash=sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e \ + --hash=sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988 \ + --hash=sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c \ + --hash=sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c \ + --hash=sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0 \ + --hash=sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449 \ + --hash=sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f \ + --hash=sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde \ + --hash=sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5 \ + --hash=sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d \ + --hash=sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac \ + --hash=sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a \ + --hash=sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9 \ + --hash=sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca \ + --hash=sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11 \ + --hash=sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35 \ + --hash=sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063 \ + --hash=sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b \ + --hash=sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982 \ + --hash=sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258 \ + --hash=sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1 \ + --hash=sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52 \ + --hash=sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480 \ + --hash=sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7 \ + --hash=sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461 \ + --hash=sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d \ + --hash=sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc \ + --hash=sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779 \ + --hash=sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a \ + --hash=sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547 \ + --hash=sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0 \ + --hash=sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171 \ + --hash=sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf \ + --hash=sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d \ + --hash=sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba # via yarl -mypy==0.991 \ - --hash=sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d \ - --hash=sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6 \ - --hash=sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf \ - --hash=sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f \ - --hash=sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813 \ - --hash=sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33 \ - --hash=sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad \ - --hash=sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05 \ - --hash=sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297 \ - --hash=sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06 \ - --hash=sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd \ - --hash=sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243 \ - --hash=sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305 \ - --hash=sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476 \ - --hash=sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711 \ - --hash=sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70 \ - --hash=sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5 \ - --hash=sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461 \ - --hash=sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab \ - --hash=sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c \ - --hash=sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d \ - --hash=sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135 \ - --hash=sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93 \ - --hash=sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648 \ - --hash=sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a \ - --hash=sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb \ - --hash=sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3 \ - --hash=sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372 \ - --hash=sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb \ - --hash=sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef +mypy==1.4.0 \ + --hash=sha256:0cf0ca95e4b8adeaf07815a78b4096b65adf64ea7871b39a2116c19497fcd0dd \ + --hash=sha256:0f98973e39e4a98709546a9afd82e1ffcc50c6ec9ce6f7870f33ebbf0bd4f26d \ + --hash=sha256:19d42b08c7532d736a7e0fb29525855e355fa51fd6aef4f9bbc80749ff64b1a2 \ + --hash=sha256:210fe0f39ec5be45dd9d0de253cb79245f0a6f27631d62e0c9c7988be7152965 \ + --hash=sha256:3b1b5c875fcf3e7217a3de7f708166f641ca154b589664c44a6fd6d9f17d9e7e \ + --hash=sha256:3f2b353eebef669529d9bd5ae3566905a685ae98b3af3aad7476d0d519714758 \ + --hash=sha256:50f65f0e9985f1e50040e603baebab83efed9eb37e15a22a4246fa7cd660f981 \ + --hash=sha256:53c2a1fed81e05ded10a4557fe12bae05b9ecf9153f162c662a71d924d504135 \ + --hash=sha256:5a0ee54c2cb0f957f8a6f41794d68f1a7e32b9968675ade5846f538504856d42 \ + --hash=sha256:62bf18d97c6b089f77f0067b4e321db089d8520cdeefc6ae3ec0f873621c22e5 \ + --hash=sha256:653863c75f0dbb687d92eb0d4bd9fe7047d096987ecac93bb7b1bc336de48ebd \ + --hash=sha256:67242d5b28ed0fa88edd8f880aed24da481929467fdbca6487167cb5e3fd31ff \ + --hash=sha256:6ba9a69172abaa73910643744d3848877d6aac4a20c41742027dcfd8d78f05d9 \ + --hash=sha256:6c34d43e3d54ad05024576aef28081d9d0580f6fa7f131255f54020eb12f5352 \ + --hash=sha256:7461469e163f87a087a5e7aa224102a30f037c11a096a0ceeb721cb0dce274c8 \ + --hash=sha256:94a81b9354545123feb1a99b960faeff9e1fa204fce47e0042335b473d71530d \ + --hash=sha256:a0b2e0da7ff9dd8d2066d093d35a169305fc4e38db378281fce096768a3dbdbf \ + --hash=sha256:a34eed094c16cad0f6b0d889811592c7a9b7acf10d10a7356349e325d8704b4f \ + --hash=sha256:a3af348e0925a59213244f28c7c0c3a2c2088b4ba2fe9d6c8d4fbb0aba0b7d05 \ + --hash=sha256:b4c734d947e761c7ceb1f09a98359dd5666460acbc39f7d0a6b6beec373c5840 \ + --hash=sha256:bba57b4d2328740749f676807fcf3036e9de723530781405cc5a5e41fc6e20de \ + --hash=sha256:ca33ab70a4aaa75bb01086a0b04f0ba8441e51e06fc57e28585176b08cad533b \ + --hash=sha256:de1e7e68148a213036276d1f5303b3836ad9a774188961eb2684eddff593b042 \ + --hash=sha256:f051ca656be0c179c735a4c3193f307d34c92fdc4908d44fd4516fbe8b10567d \ + --hash=sha256:f5984a8d13d35624e3b235a793c814433d810acba9eeefe665cdfed3d08bc3af \ + --hash=sha256:f7a5971490fd4a5a436e143105a1f78fa8b3fe95b30fff2a77542b4f3227a01f # via -r requirements/dev-requirements.in -mypy-extensions==0.4.3 \ - --hash=sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d \ - --hash=sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8 +mypy-extensions==1.0.0 \ + --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ + --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 # via # black # mypy -packaging==22.0 \ - --hash=sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3 \ - --hash=sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3 - # via build -pathspec==0.10.3 \ - --hash=sha256:3c95343af8b756205e2aba76e843ba9520a24dd84f68c22b9f93251507509dd6 \ - --hash=sha256:56200de4077d9d0791465aa9095a01d421861e405b5096955051deefd697d6f6 +packaging==23.1 \ + --hash=sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61 \ + --hash=sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f + # via + # black + # build +pathspec==0.11.1 \ + --hash=sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687 \ + --hash=sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293 # via black -pep517==0.13.0 \ - --hash=sha256:4ba4446d80aed5b5eac6509ade100bff3e7943a8489de249654a5ae9b33ee35b \ - --hash=sha256:ae69927c5c172be1add9203726d4b84cf3ebad1edcd5f71fcdc746e66e829f59 - # via build -pip==22.3.1 \ - --hash=sha256:65fd48317359f3af8e593943e6ae1506b66325085ea64b706a998c6e83eeaf38 \ - --hash=sha256:908c78e6bc29b676ede1c4d57981d490cb892eb45cd8c214ab6298125119e077 +pip==23.1.2 \ + --hash=sha256:0e7c86f486935893c708287b30bd050a36ac827ec7fe5e43fe7cb198dd835fba \ + --hash=sha256:3ef6ac33239e4027d9a5598a381b9d30880a1477e50039db2eac6e8a8f6d1b18 # via pip-tools -pip-tools==6.12.0 \ - --hash=sha256:8e22fbc84ede7ca522ba4b033c4fcf6a6419adabc75d24747be3d8262504489a \ - --hash=sha256:f441603c63b16f4af0dd5026f7522a49eddec2bc8a4a4979af44e1f6b0a1c13e +pip-tools==6.13.0 \ + --hash=sha256:50943f151d87e752abddec8158622c34ad7f292e193836e90e30d87da60b19d9 \ + --hash=sha256:61d46bd2eb8016ed4a924e196e6e5b0a268cd3babd79e593048720db23522bb1 # via -r requirements/dev-requirements.in -platformdirs==2.6.0 \ - --hash=sha256:1a89a12377800c81983db6be069ec068eee989748799b946cce2a6e80dcc54ca \ - --hash=sha256:b46ffafa316e6b83b47489d240ce17173f123a9b9c83282141c3daf26ad9ac2e +platformdirs==3.7.0 \ + --hash=sha256:87fbf6473e87c078d536980ba970a472422e94f17b752cfad17024c18876d481 \ + --hash=sha256:cfd065ba43133ff103ab3bd10aecb095c2a0035fcd1f07217c9376900d94ba07 # via black pycodestyle==2.10.0 \ --hash=sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053 \ @@ -257,6 +274,10 @@ pyflakes==3.0.1 \ --hash=sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf \ --hash=sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd # via flake8 +pyproject-hooks==1.0.0 \ + --hash=sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8 \ + --hash=sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5 + # via build pyyaml==5.4.1 \ --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \ --hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \ @@ -290,17 +311,17 @@ pyyaml==5.4.1 \ # via # -r requirements/requirements.in # vcrpy -requests==2.26.0 \ - --hash=sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24 \ - --hash=sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7 +requests==2.31.0 \ + --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ + --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 # via -r requirements/requirements.in -setuptools==65.6.3 \ - --hash=sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54 \ - --hash=sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75 +setuptools==68.0.0 \ + --hash=sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f \ + --hash=sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235 # via pip-tools -six==1.11.0 \ - --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \ - --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 # via vcrpy tomli==2.0.1 \ --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ @@ -309,21 +330,21 @@ tomli==2.0.1 \ # black # build # mypy -types-pyyaml==6.0.12.2 \ - --hash=sha256:1e94e80aafee07a7e798addb2a320e32956a373f376655128ae20637adb2655b \ - --hash=sha256:6840819871c92deebe6a2067fb800c11b8a063632eb4e3e755914e7ab3604e83 +types-pyyaml==6.0.12.10 \ + --hash=sha256:662fa444963eff9b68120d70cda1af5a5f2aa57900003c2006d7626450eaae5f \ + --hash=sha256:ebab3d0700b946553724ae6ca636ea932c1b0868701d4af121630e78d695fc97 # via -r requirements/dev-requirements.in -types-requests==2.28.11.5 \ - --hash=sha256:091d4a5a33c1b4f20d8b1b952aa8fa27a6e767c44c3cf65e56580df0b05fd8a9 \ - --hash=sha256:a7df37cc6fb6187a84097da951f8e21d335448aa2501a6b0a39cbd1d7ca9ee2a +types-requests==2.31.0.1 \ + --hash=sha256:3de667cffa123ce698591de0ad7db034a5317457a596eb0b4944e5a9d9e8d1ac \ + --hash=sha256:afb06ef8f25ba83d59a1d424bd7a5a939082f94b94e90ab5e6116bd2559deaa3 # via -r requirements/dev-requirements.in -types-urllib3==1.26.25.4 \ - --hash=sha256:ed6b9e8a8be488796f72306889a06a3fc3cb1aa99af02ab8afb50144d7317e49 \ - --hash=sha256:eec5556428eec862b1ac578fb69aab3877995a99ffec9e5a12cf7fbd0cc9daee +types-urllib3==1.26.25.13 \ + --hash=sha256:3300538c9dc11dad32eae4827ac313f5d986b8b21494801f1bf97a1ac6c03ae5 \ + --hash=sha256:5dbd1d2bef14efee43f5318b5d36d805a489f6600252bb53626d4bfafd95e27c # via types-requests -typing-extensions==4.4.0 \ - --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ - --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e +typing-extensions==4.6.3 \ + --hash=sha256:88a4153d8505aabbb4e13aacb7c486c2b4a33ca3b3f807914a9b4c844c471c26 \ + --hash=sha256:d91d5919357fe7f681a9f2b5b4cb2a5f1ef0a1e9f59c4d8ff0d3491e05c0ffd5 # via # black # mypy @@ -333,153 +354,165 @@ urllib3==1.26.6 \ # via # -r requirements/requirements.in # requests -vcrpy==4.2.1 \ - --hash=sha256:7cd3e81a2c492e01c281f180bcc2a86b520b173d2b656cb5d89d99475423e013 \ - --hash=sha256:efac3e2e0b2af7686f83a266518180af7a048619b2f696e7bad9520f5e2eac09 + # vcrpy +vcrpy==4.3.1 \ + --hash=sha256:24e2d450bf1c2f9f9b4246ee91beb7d58f862a9f2f030514b14783b83c5146ec \ + --hash=sha256:35398f1b373f32340f39d735ea45f40d679ace316f3dddf8cbcbc2f120e6d1d0 # via -r requirements/dev-requirements.in -wheel==0.38.4 \ - --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ - --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 +wheel==0.40.0 \ + --hash=sha256:cd1196f3faee2b31968d626e1731c94f99cbdb67cf5a46e4f5656cbee7738873 \ + --hash=sha256:d236b20e7cb522daf2390fa84c55eea81c5c30190f90f29ae2ca1ad8355bf247 # via pip-tools -wrapt==1.14.1 \ - --hash=sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3 \ - --hash=sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b \ - --hash=sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4 \ - --hash=sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2 \ - --hash=sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656 \ - --hash=sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3 \ - --hash=sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff \ - --hash=sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310 \ - --hash=sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a \ - --hash=sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57 \ - --hash=sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069 \ - --hash=sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383 \ - --hash=sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe \ - --hash=sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87 \ - --hash=sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d \ - --hash=sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b \ - --hash=sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907 \ - --hash=sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f \ - --hash=sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0 \ - --hash=sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28 \ - --hash=sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1 \ - --hash=sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853 \ - --hash=sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc \ - --hash=sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3 \ - --hash=sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3 \ - --hash=sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164 \ - --hash=sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1 \ - --hash=sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c \ - --hash=sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1 \ - --hash=sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7 \ - --hash=sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1 \ - --hash=sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320 \ - --hash=sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed \ - --hash=sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1 \ - --hash=sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248 \ - --hash=sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c \ - --hash=sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456 \ - --hash=sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77 \ - --hash=sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef \ - --hash=sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1 \ - --hash=sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7 \ - --hash=sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86 \ - --hash=sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4 \ - --hash=sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d \ - --hash=sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d \ - --hash=sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8 \ - --hash=sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5 \ - --hash=sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471 \ - --hash=sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00 \ - --hash=sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68 \ - --hash=sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3 \ - --hash=sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d \ - --hash=sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735 \ - --hash=sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d \ - --hash=sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569 \ - --hash=sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7 \ - --hash=sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59 \ - --hash=sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5 \ - --hash=sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb \ - --hash=sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b \ - --hash=sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f \ - --hash=sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462 \ - --hash=sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015 \ - --hash=sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af +wrapt==1.15.0 \ + --hash=sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0 \ + --hash=sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420 \ + --hash=sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a \ + --hash=sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c \ + --hash=sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079 \ + --hash=sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923 \ + --hash=sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f \ + --hash=sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1 \ + --hash=sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8 \ + --hash=sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86 \ + --hash=sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0 \ + --hash=sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364 \ + --hash=sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e \ + --hash=sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c \ + --hash=sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e \ + --hash=sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c \ + --hash=sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727 \ + --hash=sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff \ + --hash=sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e \ + --hash=sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29 \ + --hash=sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7 \ + --hash=sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72 \ + --hash=sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475 \ + --hash=sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a \ + --hash=sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317 \ + --hash=sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2 \ + --hash=sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd \ + --hash=sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640 \ + --hash=sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98 \ + --hash=sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248 \ + --hash=sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e \ + --hash=sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d \ + --hash=sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec \ + --hash=sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1 \ + --hash=sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e \ + --hash=sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9 \ + --hash=sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92 \ + --hash=sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb \ + --hash=sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094 \ + --hash=sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46 \ + --hash=sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29 \ + --hash=sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd \ + --hash=sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705 \ + --hash=sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8 \ + --hash=sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975 \ + --hash=sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb \ + --hash=sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e \ + --hash=sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b \ + --hash=sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418 \ + --hash=sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019 \ + --hash=sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1 \ + --hash=sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba \ + --hash=sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6 \ + --hash=sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2 \ + --hash=sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3 \ + --hash=sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7 \ + --hash=sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752 \ + --hash=sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416 \ + --hash=sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f \ + --hash=sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1 \ + --hash=sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc \ + --hash=sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145 \ + --hash=sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee \ + --hash=sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a \ + --hash=sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7 \ + --hash=sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b \ + --hash=sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653 \ + --hash=sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0 \ + --hash=sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90 \ + --hash=sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29 \ + --hash=sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6 \ + --hash=sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034 \ + --hash=sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09 \ + --hash=sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559 \ + --hash=sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639 # via vcrpy -yarl==1.8.2 \ - --hash=sha256:009a028127e0a1755c38b03244c0bea9d5565630db9c4cf9572496e947137a87 \ - --hash=sha256:0414fd91ce0b763d4eadb4456795b307a71524dbacd015c657bb2a39db2eab89 \ - --hash=sha256:0978f29222e649c351b173da2b9b4665ad1feb8d1daa9d971eb90df08702668a \ - --hash=sha256:0ef8fb25e52663a1c85d608f6dd72e19bd390e2ecaf29c17fb08f730226e3a08 \ - --hash=sha256:10b08293cda921157f1e7c2790999d903b3fd28cd5c208cf8826b3b508026996 \ - --hash=sha256:1684a9bd9077e922300ecd48003ddae7a7474e0412bea38d4631443a91d61077 \ - --hash=sha256:1b372aad2b5f81db66ee7ec085cbad72c4da660d994e8e590c997e9b01e44901 \ - --hash=sha256:1e21fb44e1eff06dd6ef971d4bdc611807d6bd3691223d9c01a18cec3677939e \ - --hash=sha256:2305517e332a862ef75be8fad3606ea10108662bc6fe08509d5ca99503ac2aee \ - --hash=sha256:24ad1d10c9db1953291f56b5fe76203977f1ed05f82d09ec97acb623a7976574 \ - --hash=sha256:272b4f1599f1b621bf2aabe4e5b54f39a933971f4e7c9aa311d6d7dc06965165 \ - --hash=sha256:2a1fca9588f360036242f379bfea2b8b44cae2721859b1c56d033adfd5893634 \ - --hash=sha256:2b4fa2606adf392051d990c3b3877d768771adc3faf2e117b9de7eb977741229 \ - --hash=sha256:3150078118f62371375e1e69b13b48288e44f6691c1069340081c3fd12c94d5b \ - --hash=sha256:326dd1d3caf910cd26a26ccbfb84c03b608ba32499b5d6eeb09252c920bcbe4f \ - --hash=sha256:34c09b43bd538bf6c4b891ecce94b6fa4f1f10663a8d4ca589a079a5018f6ed7 \ - --hash=sha256:388a45dc77198b2460eac0aca1efd6a7c09e976ee768b0d5109173e521a19daf \ - --hash=sha256:3adeef150d528ded2a8e734ebf9ae2e658f4c49bf413f5f157a470e17a4a2e89 \ - --hash=sha256:3edac5d74bb3209c418805bda77f973117836e1de7c000e9755e572c1f7850d0 \ - --hash=sha256:3f6b4aca43b602ba0f1459de647af954769919c4714706be36af670a5f44c9c1 \ - --hash=sha256:3fc056e35fa6fba63248d93ff6e672c096f95f7836938241ebc8260e062832fe \ - --hash=sha256:418857f837347e8aaef682679f41e36c24250097f9e2f315d39bae3a99a34cbf \ - --hash=sha256:42430ff511571940d51e75cf42f1e4dbdded477e71c1b7a17f4da76c1da8ea76 \ - --hash=sha256:44ceac0450e648de86da8e42674f9b7077d763ea80c8ceb9d1c3e41f0f0a9951 \ - --hash=sha256:47d49ac96156f0928f002e2424299b2c91d9db73e08c4cd6742923a086f1c863 \ - --hash=sha256:48dd18adcf98ea9cd721a25313aef49d70d413a999d7d89df44f469edfb38a06 \ - --hash=sha256:49d43402c6e3013ad0978602bf6bf5328535c48d192304b91b97a3c6790b1562 \ - --hash=sha256:4d04acba75c72e6eb90745447d69f84e6c9056390f7a9724605ca9c56b4afcc6 \ - --hash=sha256:57a7c87927a468e5a1dc60c17caf9597161d66457a34273ab1760219953f7f4c \ - --hash=sha256:58a3c13d1c3005dbbac5c9f0d3210b60220a65a999b1833aa46bd6677c69b08e \ - --hash=sha256:5df5e3d04101c1e5c3b1d69710b0574171cc02fddc4b23d1b2813e75f35a30b1 \ - --hash=sha256:63243b21c6e28ec2375f932a10ce7eda65139b5b854c0f6b82ed945ba526bff3 \ - --hash=sha256:64dd68a92cab699a233641f5929a40f02a4ede8c009068ca8aa1fe87b8c20ae3 \ - --hash=sha256:6604711362f2dbf7160df21c416f81fac0de6dbcf0b5445a2ef25478ecc4c778 \ - --hash=sha256:6c4fcfa71e2c6a3cb568cf81aadc12768b9995323186a10827beccf5fa23d4f8 \ - --hash=sha256:6d88056a04860a98341a0cf53e950e3ac9f4e51d1b6f61a53b0609df342cc8b2 \ - --hash=sha256:705227dccbe96ab02c7cb2c43e1228e2826e7ead880bb19ec94ef279e9555b5b \ - --hash=sha256:728be34f70a190566d20aa13dc1f01dc44b6aa74580e10a3fb159691bc76909d \ - --hash=sha256:74dece2bfc60f0f70907c34b857ee98f2c6dd0f75185db133770cd67300d505f \ - --hash=sha256:75c16b2a900b3536dfc7014905a128a2bea8fb01f9ee26d2d7d8db0a08e7cb2c \ - --hash=sha256:77e913b846a6b9c5f767b14dc1e759e5aff05502fe73079f6f4176359d832581 \ - --hash=sha256:7a66c506ec67eb3159eea5096acd05f5e788ceec7b96087d30c7d2865a243918 \ - --hash=sha256:8c46d3d89902c393a1d1e243ac847e0442d0196bbd81aecc94fcebbc2fd5857c \ - --hash=sha256:93202666046d9edadfe9f2e7bf5e0782ea0d497b6d63da322e541665d65a044e \ - --hash=sha256:97209cc91189b48e7cfe777237c04af8e7cc51eb369004e061809bcdf4e55220 \ - --hash=sha256:a48f4f7fea9a51098b02209d90297ac324241bf37ff6be6d2b0149ab2bd51b37 \ - --hash=sha256:a783cd344113cb88c5ff7ca32f1f16532a6f2142185147822187913eb989f739 \ - --hash=sha256:ae0eec05ab49e91a78700761777f284c2df119376e391db42c38ab46fd662b77 \ - --hash=sha256:ae4d7ff1049f36accde9e1ef7301912a751e5bae0a9d142459646114c70ecba6 \ - --hash=sha256:b05df9ea7496df11b710081bd90ecc3a3db6adb4fee36f6a411e7bc91a18aa42 \ - --hash=sha256:baf211dcad448a87a0d9047dc8282d7de59473ade7d7fdf22150b1d23859f946 \ - --hash=sha256:bb81f753c815f6b8e2ddd2eef3c855cf7da193b82396ac013c661aaa6cc6b0a5 \ - --hash=sha256:bcd7bb1e5c45274af9a1dd7494d3c52b2be5e6bd8d7e49c612705fd45420b12d \ - --hash=sha256:bf071f797aec5b96abfc735ab97da9fd8f8768b43ce2abd85356a3127909d146 \ - --hash=sha256:c15163b6125db87c8f53c98baa5e785782078fbd2dbeaa04c6141935eb6dab7a \ - --hash=sha256:cb6d48d80a41f68de41212f3dfd1a9d9898d7841c8f7ce6696cf2fd9cb57ef83 \ - --hash=sha256:ceff9722e0df2e0a9e8a79c610842004fa54e5b309fe6d218e47cd52f791d7ef \ - --hash=sha256:cfa2bbca929aa742b5084fd4663dd4b87c191c844326fcb21c3afd2d11497f80 \ - --hash=sha256:d617c241c8c3ad5c4e78a08429fa49e4b04bedfc507b34b4d8dceb83b4af3588 \ - --hash=sha256:d881d152ae0007809c2c02e22aa534e702f12071e6b285e90945aa3c376463c5 \ - --hash=sha256:da65c3f263729e47351261351b8679c6429151ef9649bba08ef2528ff2c423b2 \ - --hash=sha256:de986979bbd87272fe557e0a8fcb66fd40ae2ddfe28a8b1ce4eae22681728fef \ - --hash=sha256:df60a94d332158b444301c7f569659c926168e4d4aad2cfbf4bce0e8fb8be826 \ - --hash=sha256:dfef7350ee369197106805e193d420b75467b6cceac646ea5ed3049fcc950a05 \ - --hash=sha256:e59399dda559688461762800d7fb34d9e8a6a7444fd76ec33220a926c8be1516 \ - --hash=sha256:e6f3515aafe0209dd17fb9bdd3b4e892963370b3de781f53e1746a521fb39fc0 \ - --hash=sha256:e7fd20d6576c10306dea2d6a5765f46f0ac5d6f53436217913e952d19237efc4 \ - --hash=sha256:ebb78745273e51b9832ef90c0898501006670d6e059f2cdb0e999494eb1450c2 \ - --hash=sha256:efff27bd8cbe1f9bd127e7894942ccc20c857aa8b5a0327874f30201e5ce83d0 \ - --hash=sha256:f37db05c6051eff17bc832914fe46869f8849de5b92dc4a3466cd63095d23dfd \ - --hash=sha256:f8ca8ad414c85bbc50f49c0a106f951613dfa5f948ab69c10ce9b128d368baf8 \ - --hash=sha256:fb742dcdd5eec9f26b61224c23baea46c9055cf16f62475e11b9b15dfd5c117b \ - --hash=sha256:fc77086ce244453e074e445104f0ecb27530d6fd3a46698e33f6c38951d5a0f1 \ - --hash=sha256:ff205b58dc2929191f68162633d5e10e8044398d7a45265f90a0f1d51f85f72c +yarl==1.9.2 \ + --hash=sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571 \ + --hash=sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3 \ + --hash=sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3 \ + --hash=sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c \ + --hash=sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7 \ + --hash=sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04 \ + --hash=sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191 \ + --hash=sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea \ + --hash=sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4 \ + --hash=sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4 \ + --hash=sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095 \ + --hash=sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e \ + --hash=sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74 \ + --hash=sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef \ + --hash=sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33 \ + --hash=sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde \ + --hash=sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45 \ + --hash=sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf \ + --hash=sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b \ + --hash=sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac \ + --hash=sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0 \ + --hash=sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528 \ + --hash=sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716 \ + --hash=sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb \ + --hash=sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18 \ + --hash=sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72 \ + --hash=sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6 \ + --hash=sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582 \ + --hash=sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5 \ + --hash=sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368 \ + --hash=sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc \ + --hash=sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9 \ + --hash=sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be \ + --hash=sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a \ + --hash=sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80 \ + --hash=sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8 \ + --hash=sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6 \ + --hash=sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417 \ + --hash=sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574 \ + --hash=sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59 \ + --hash=sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608 \ + --hash=sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82 \ + --hash=sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1 \ + --hash=sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3 \ + --hash=sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d \ + --hash=sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8 \ + --hash=sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc \ + --hash=sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac \ + --hash=sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8 \ + --hash=sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955 \ + --hash=sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0 \ + --hash=sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367 \ + --hash=sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb \ + --hash=sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a \ + --hash=sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623 \ + --hash=sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2 \ + --hash=sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6 \ + --hash=sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7 \ + --hash=sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4 \ + --hash=sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051 \ + --hash=sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938 \ + --hash=sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8 \ + --hash=sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9 \ + --hash=sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3 \ + --hash=sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5 \ + --hash=sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9 \ + --hash=sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333 \ + --hash=sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185 \ + --hash=sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3 \ + --hash=sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560 \ + --hash=sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b \ + --hash=sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7 \ + --hash=sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78 \ + --hash=sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7 # via vcrpy From 6336153ef9a7b201e18a10be9657ea7a1a184865 Mon Sep 17 00:00:00 2001 From: Ro Date: Mon, 26 Jun 2023 11:54:37 -0700 Subject: [PATCH 332/352] Satisfy black --- securedrop_proxy/proxy.py | 6 ------ tests/test_main.py | 1 - 2 files changed, 7 deletions(-) diff --git a/securedrop_proxy/proxy.py b/securedrop_proxy/proxy.py index 11af7cdd9..b87d43c19 100644 --- a/securedrop_proxy/proxy.py +++ b/securedrop_proxy/proxy.py @@ -74,7 +74,6 @@ def err_on_done(self): sys.exit(1) def read_conf(self, conf_path: str) -> None: - if not os.path.isfile(conf_path): self.simple_error(500, "Configuration file does not exist at {}".format(conf_path)) self.err_on_done() @@ -180,7 +179,6 @@ def normalize_path(self, parsed: ParseResult) -> ParseResult: return parsed._replace(path=path) def prep_request(self) -> None: - scheme = self.conf.scheme host = self.conf.host port = self.conf.port @@ -212,7 +210,6 @@ def prep_request(self) -> None: self._prepared_request = prep def handle_json_response(self) -> None: - res = Response(self._presp.status_code) res.headers = dict(self._presp.headers) @@ -221,7 +218,6 @@ def handle_json_response(self) -> None: self.res = res def handle_non_json_response(self) -> None: - res = Response(self._presp.status_code) # Create a NamedTemporaryFile, we don't want @@ -255,9 +251,7 @@ def handle_response(self) -> None: self.res.headers = dict(self.res.headers) def proxy(self) -> None: - try: - self.prep_request() # To confirm that we have a prepared request before the proxy call assert self._prepared_request diff --git a/tests/test_main.py b/tests/test_main.py index 94a39a2cc..66289660c 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -102,7 +102,6 @@ def test_non_json_response(self): "path_query": "" }""" def on_save(self, fh, res): - subprocess.run(["cp", fh.name, "/tmp/{}".format(self.fn)]) res.headers["X-Origin-Content-Type"] = res.headers["Content-Type"] From 66f19176cc669d465c04187dd85946e32774c85c Mon Sep 17 00:00:00 2001 From: Ro Date: Mon, 10 Jul 2023 17:51:56 -0700 Subject: [PATCH 333/352] Add comments to Makefile targets. Remove requirements.in from update-dev-only-dependencies Makefile target, since this requirement is now specified in the .in file directly. --- Makefile | 6 ++++-- requirements/dev-bookworm-requirements.in | 3 +++ requirements/dev-bullseye-requirements.in | 3 +++ 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index aa2f18eed..6e9cf4965 100644 --- a/Makefile +++ b/Makefile @@ -19,12 +19,14 @@ safety: ## Runs `safety check` to check python dependencies for vulnerabilities || exit 1; \ done +# Helper, not to be used directly .PHONY: sync-requirements sync-requirements: ## Update dev-requirements.txt to pin to the same versions of prod dependencies if test -f "requirements/dev-bullseye-requirements.txt"; then rm -r requirements/dev-bullseye-requirements.txt; fi if test -f "requirements/dev-bookworm-requirements.txt"; then rm -r requirements/dev-bookworm-requirements.txt; fi $(MAKE) dev-requirements +# Helper, not to be used directly .PHONY: dev-requirements dev-requirements: ## Update dev-*requirements.txt files if pinned versions do not comply with the dependency specifications in dev-*requirements.in pip-compile --allow-unsafe --generate-hashes --output-file requirements/dev-bullseye-requirements.txt requirements/dev-bullseye-requirements.in @@ -44,10 +46,10 @@ update-dependency: ## Add or upgrade a package to the latest version that compl update-dev-only-dependencies: ## Update dev-requirements.txt to pin to the latest versions of dev-only dependencies that comply with the dependency specifications in dev-requirements.in $(MAKE) sync-requirements @while read line; do \ - pip-compile --allow-unsafe --generate-hashes --upgrade-package $file --output-file requirements/dev-bullseye-requirements.txt requirements/requirements.in requirements/dev-bullseye-requirements.in; \ + pip-compile --allow-unsafe --generate-hashes --upgrade-package $file --output-file requirements/dev-bullseye-requirements.txt requirements/dev-bullseye-requirements.in; \ done < 'requirements/dev-bullseye-requirements.in' @while read line; do \ - pip-compile --allow-unsafe --generate-hashes --upgrade-package $file --output-file requirements/dev-bookworm-requirements.txt requirements/requirements.in requirements/dev-bookworm-requirements.in; \ + pip-compile --allow-unsafe --generate-hashes --upgrade-package $file --output-file requirements/dev-bookworm-requirements.txt requirements/dev-bookworm-requirements.in; \ done < 'requirements/dev-bookworm-requirements.in' .PHONY: check diff --git a/requirements/dev-bookworm-requirements.in b/requirements/dev-bookworm-requirements.in index 1dd6e9bb7..8bad67377 100644 --- a/requirements/dev-bookworm-requirements.in +++ b/requirements/dev-bookworm-requirements.in @@ -1,3 +1,6 @@ +# Include prod requirements +-r requirements.in + black flake8 pip-tools diff --git a/requirements/dev-bullseye-requirements.in b/requirements/dev-bullseye-requirements.in index 1dd6e9bb7..b75bf7f7c 100644 --- a/requirements/dev-bullseye-requirements.in +++ b/requirements/dev-bullseye-requirements.in @@ -1,3 +1,6 @@ +# include prod requirements +-r requirements.in + black flake8 pip-tools From 447cf771374c678ebd2a97bc6a1c59f90a77b91a Mon Sep 17 00:00:00 2001 From: Ro Date: Mon, 10 Jul 2023 19:45:14 -0700 Subject: [PATCH 334/352] Update dev-only dependencies --- requirements/dev-bookworm-requirements.txt | 788 +++++++++++++-------- requirements/dev-bullseye-requirements.txt | 765 ++++++++++++-------- 2 files changed, 981 insertions(+), 572 deletions(-) diff --git a/requirements/dev-bookworm-requirements.txt b/requirements/dev-bookworm-requirements.txt index d89afb8c8..380bb8c5c 100644 --- a/requirements/dev-bookworm-requirements.txt +++ b/requirements/dev-bookworm-requirements.txt @@ -2,29 +2,39 @@ # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-bookworm-requirements.txt requirements/dev-bookworm-requirements.in +# pip-compile --allow-unsafe --config=pyproject.toml --generate-hashes --output-file=requirements/dev-bookworm-requirements.txt requirements/dev-bookworm-requirements.in # -attrs==21.4.0 \ - --hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4 \ - --hash=sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd +attrs==23.1.0 \ + --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ + --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 # via # glom # jsonschema - # pytest + # referencing # semgrep -black==22.12.0 \ - --hash=sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320 \ - --hash=sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351 \ - --hash=sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350 \ - --hash=sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f \ - --hash=sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf \ - --hash=sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148 \ - --hash=sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4 \ - --hash=sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d \ - --hash=sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc \ - --hash=sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d \ - --hash=sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2 \ - --hash=sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f +black==23.7.0 \ + --hash=sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3 \ + --hash=sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb \ + --hash=sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087 \ + --hash=sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320 \ + --hash=sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6 \ + --hash=sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3 \ + --hash=sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc \ + --hash=sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f \ + --hash=sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587 \ + --hash=sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91 \ + --hash=sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a \ + --hash=sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad \ + --hash=sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926 \ + --hash=sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9 \ + --hash=sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be \ + --hash=sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd \ + --hash=sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96 \ + --hash=sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491 \ + --hash=sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2 \ + --hash=sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a \ + --hash=sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f \ + --hash=sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995 # via -r requirements/dev-bookworm-requirements.in boltons==21.0.0 \ --hash=sha256:65e70a79a731a7fe6e98592ecfb5ccf2115873d01dbc576079874629e5c90f13 \ @@ -37,85 +47,168 @@ bracex==2.3.post1 \ --hash=sha256:351b7f20d56fb9ea91f9b9e9e7664db466eb234188c175fd943f8f755c807e73 \ --hash=sha256:e7b23fc8b2cd06d3dec0692baabecb249dda94e06a617901ff03a6c56fd71693 # via wcmatch -build==0.9.0 \ - --hash=sha256:1a07724e891cbd898923145eb7752ee7653674c511378eb9c7691aab1612bc3c \ - --hash=sha256:38a7a2b7a0bdc61a42a0a67509d88c71ecfc37b393baba770fae34e20929ff69 +build==0.10.0 \ + --hash=sha256:af266720050a66c893a6096a2f410989eeac74ff9a68ba194b3f6473e8e26171 \ + --hash=sha256:d5b71264afdb5951d6704482aac78de887c80691c52b88a9ad195983ca2c9269 # via pip-tools -certifi==2022.12.7 \ - --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ - --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 +certifi==2023.5.7 \ + --hash=sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7 \ + --hash=sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716 # via requests -charset-normalizer==2.1.1 \ - --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ - --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f +charset-normalizer==3.2.0 \ + --hash=sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96 \ + --hash=sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c \ + --hash=sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710 \ + --hash=sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706 \ + --hash=sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020 \ + --hash=sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252 \ + --hash=sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad \ + --hash=sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329 \ + --hash=sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a \ + --hash=sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f \ + --hash=sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6 \ + --hash=sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4 \ + --hash=sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a \ + --hash=sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46 \ + --hash=sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2 \ + --hash=sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23 \ + --hash=sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace \ + --hash=sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd \ + --hash=sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982 \ + --hash=sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10 \ + --hash=sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2 \ + --hash=sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea \ + --hash=sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09 \ + --hash=sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5 \ + --hash=sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149 \ + --hash=sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489 \ + --hash=sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9 \ + --hash=sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80 \ + --hash=sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592 \ + --hash=sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3 \ + --hash=sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6 \ + --hash=sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed \ + --hash=sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c \ + --hash=sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200 \ + --hash=sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a \ + --hash=sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e \ + --hash=sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d \ + --hash=sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6 \ + --hash=sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623 \ + --hash=sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669 \ + --hash=sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3 \ + --hash=sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa \ + --hash=sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9 \ + --hash=sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2 \ + --hash=sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f \ + --hash=sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1 \ + --hash=sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4 \ + --hash=sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a \ + --hash=sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8 \ + --hash=sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3 \ + --hash=sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029 \ + --hash=sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f \ + --hash=sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959 \ + --hash=sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22 \ + --hash=sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7 \ + --hash=sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952 \ + --hash=sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346 \ + --hash=sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e \ + --hash=sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d \ + --hash=sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299 \ + --hash=sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd \ + --hash=sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a \ + --hash=sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3 \ + --hash=sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037 \ + --hash=sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94 \ + --hash=sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c \ + --hash=sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858 \ + --hash=sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a \ + --hash=sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449 \ + --hash=sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c \ + --hash=sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918 \ + --hash=sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1 \ + --hash=sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c \ + --hash=sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac \ + --hash=sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa # via requests -click==8.1.3 \ - --hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \ - --hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48 +click==8.1.4 \ + --hash=sha256:2739815aaa5d2c986a88f1e9230c55e17f0caad3d958a5e13ad0797c166db9e3 \ + --hash=sha256:b97d0c74955da062a7d4ef92fadb583806a585b2ea81958a81bd72726cbb8e37 # via # black # click-option-group # pip-tools # semgrep -click-option-group==0.5.5 \ - --hash=sha256:0f8ca79bc9b1d6fcaafdbe194b17ba1a2dde44ddf19087235c3efed2ad288143 \ - --hash=sha256:78ee474f07a0ca0ef6c0317bb3ebe79387aafb0c4a1e03b1d8b2b0be1e42fc78 +click-option-group==0.5.6 \ + --hash=sha256:38a26d963ee3ad93332ddf782f9259c5bdfe405e73408d943ef5e7d0c3767ec7 \ + --hash=sha256:97d06703873518cc5038509443742b25069a3c7562d1ea72ff08bfadde1ce777 # via semgrep colorama==0.4.6 \ --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 # via semgrep -coverage[toml]==6.5.0 \ - --hash=sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79 \ - --hash=sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a \ - --hash=sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f \ - --hash=sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a \ - --hash=sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa \ - --hash=sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398 \ - --hash=sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba \ - --hash=sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d \ - --hash=sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf \ - --hash=sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b \ - --hash=sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518 \ - --hash=sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d \ - --hash=sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795 \ - --hash=sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2 \ - --hash=sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e \ - --hash=sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32 \ - --hash=sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745 \ - --hash=sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b \ - --hash=sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e \ - --hash=sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d \ - --hash=sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f \ - --hash=sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660 \ - --hash=sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62 \ - --hash=sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6 \ - --hash=sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04 \ - --hash=sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c \ - --hash=sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5 \ - --hash=sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef \ - --hash=sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc \ - --hash=sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae \ - --hash=sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578 \ - --hash=sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466 \ - --hash=sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4 \ - --hash=sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91 \ - --hash=sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0 \ - --hash=sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4 \ - --hash=sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b \ - --hash=sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe \ - --hash=sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b \ - --hash=sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75 \ - --hash=sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b \ - --hash=sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c \ - --hash=sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72 \ - --hash=sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b \ - --hash=sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f \ - --hash=sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e \ - --hash=sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53 \ - --hash=sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3 \ - --hash=sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84 \ - --hash=sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987 +coverage[toml]==7.2.7 \ + --hash=sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f \ + --hash=sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2 \ + --hash=sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a \ + --hash=sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a \ + --hash=sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01 \ + --hash=sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6 \ + --hash=sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7 \ + --hash=sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f \ + --hash=sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02 \ + --hash=sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c \ + --hash=sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063 \ + --hash=sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a \ + --hash=sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5 \ + --hash=sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959 \ + --hash=sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97 \ + --hash=sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6 \ + --hash=sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f \ + --hash=sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9 \ + --hash=sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5 \ + --hash=sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f \ + --hash=sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562 \ + --hash=sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe \ + --hash=sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9 \ + --hash=sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f \ + --hash=sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb \ + --hash=sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb \ + --hash=sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1 \ + --hash=sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb \ + --hash=sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250 \ + --hash=sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e \ + --hash=sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511 \ + --hash=sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5 \ + --hash=sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59 \ + --hash=sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2 \ + --hash=sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d \ + --hash=sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3 \ + --hash=sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4 \ + --hash=sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de \ + --hash=sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9 \ + --hash=sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833 \ + --hash=sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0 \ + --hash=sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9 \ + --hash=sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d \ + --hash=sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050 \ + --hash=sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d \ + --hash=sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6 \ + --hash=sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353 \ + --hash=sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb \ + --hash=sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e \ + --hash=sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8 \ + --hash=sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495 \ + --hash=sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2 \ + --hash=sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd \ + --hash=sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27 \ + --hash=sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1 \ + --hash=sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818 \ + --hash=sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4 \ + --hash=sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e \ + --hash=sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850 \ + --hash=sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3 # via pytest-cov defusedxml==0.7.1 \ --hash=sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69 \ @@ -137,85 +230,90 @@ idna==3.4 \ --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -iniconfig==1.1.1 \ - --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \ - --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32 +iniconfig==2.0.0 \ + --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ + --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 # via pytest -jsonschema==4.17.3 \ - --hash=sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d \ - --hash=sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6 +jsonschema==4.18.0 \ + --hash=sha256:8caf5b57a990a98e9b39832ef3cb35c176fe331414252b6e1b26fd5866f891a4 \ + --hash=sha256:b508dd6142bd03f4c3670534c80af68cd7bbff9ea830b9cf2625d4a3c49ddf60 # via semgrep +jsonschema-specifications==2023.6.1 \ + --hash=sha256:3d2b82663aff01815f744bb5c7887e2121a63399b49b104a3c96145474d091d7 \ + --hash=sha256:ca1c4dd059a9e7b34101cf5b3ab7ff1d18b139f35950d598d629837ef66e8f28 + # via jsonschema +markdown-it-py==3.0.0 \ + --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ + --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb + # via rich mccabe==0.7.0 \ --hash=sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325 \ --hash=sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e # via flake8 -mypy==1.0.0 \ - --hash=sha256:01b1b9e1ed40544ef486fa8ac022232ccc57109f379611633ede8e71630d07d2 \ - --hash=sha256:0ab090d9240d6b4e99e1fa998c2d0aa5b29fc0fb06bd30e7ad6183c95fa07593 \ - --hash=sha256:14d776869a3e6c89c17eb943100f7868f677703c8a4e00b3803918f86aafbc52 \ - --hash=sha256:1ace23f6bb4aec4604b86c4843276e8fa548d667dbbd0cb83a3ae14b18b2db6c \ - --hash=sha256:2efa963bdddb27cb4a0d42545cd137a8d2b883bd181bbc4525b568ef6eca258f \ - --hash=sha256:2f6ac8c87e046dc18c7d1d7f6653a66787a4555085b056fe2d599f1f1a2a2d21 \ - --hash=sha256:3ae4c7a99e5153496243146a3baf33b9beff714464ca386b5f62daad601d87af \ - --hash=sha256:3cfad08f16a9c6611e6143485a93de0e1e13f48cfb90bcad7d5fde1c0cec3d36 \ - --hash=sha256:4e5175026618c178dfba6188228b845b64131034ab3ba52acaffa8f6c361f805 \ - --hash=sha256:50979d5efff8d4135d9db293c6cb2c42260e70fb010cbc697b1311a4d7a39ddb \ - --hash=sha256:5cd187d92b6939617f1168a4fe68f68add749902c010e66fe574c165c742ed88 \ - --hash=sha256:5cfca124f0ac6707747544c127880893ad72a656e136adc935c8600740b21ff5 \ - --hash=sha256:5e398652d005a198a7f3c132426b33c6b85d98aa7dc852137a2a3be8890c4072 \ - --hash=sha256:67cced7f15654710386e5c10b96608f1ee3d5c94ca1da5a2aad5889793a824c1 \ - --hash=sha256:7306edca1c6f1b5fa0bc9aa645e6ac8393014fa82d0fa180d0ebc990ebe15964 \ - --hash=sha256:7cc2c01dfc5a3cbddfa6c13f530ef3b95292f926329929001d45e124342cd6b7 \ - --hash=sha256:87edfaf344c9401942883fad030909116aa77b0fa7e6e8e1c5407e14549afe9a \ - --hash=sha256:8845125d0b7c57838a10fd8925b0f5f709d0e08568ce587cc862aacce453e3dd \ - --hash=sha256:92024447a339400ea00ac228369cd242e988dd775640755fa4ac0c126e49bb74 \ - --hash=sha256:a86b794e8a56ada65c573183756eac8ac5b8d3d59daf9d5ebd72ecdbb7867a43 \ - --hash=sha256:bb2782a036d9eb6b5a6efcdda0986774bf798beef86a62da86cb73e2a10b423d \ - --hash=sha256:be78077064d016bc1b639c2cbcc5be945b47b4261a4f4b7d8923f6c69c5c9457 \ - --hash=sha256:c7cf862aef988b5fbaa17764ad1d21b4831436701c7d2b653156a9497d92c83c \ - --hash=sha256:e0626db16705ab9f7fa6c249c017c887baf20738ce7f9129da162bb3075fc1af \ - --hash=sha256:f34495079c8d9da05b183f9f7daec2878280c2ad7cc81da686ef0b484cea2ecf \ - --hash=sha256:fe523fcbd52c05040c7bee370d66fee8373c5972171e4fbc323153433198592d +mdurl==0.1.2 \ + --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ + --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba + # via markdown-it-py +mypy==1.4.1 \ + --hash=sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042 \ + --hash=sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd \ + --hash=sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2 \ + --hash=sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01 \ + --hash=sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7 \ + --hash=sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3 \ + --hash=sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816 \ + --hash=sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3 \ + --hash=sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc \ + --hash=sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4 \ + --hash=sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b \ + --hash=sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8 \ + --hash=sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c \ + --hash=sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462 \ + --hash=sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7 \ + --hash=sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc \ + --hash=sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258 \ + --hash=sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b \ + --hash=sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9 \ + --hash=sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6 \ + --hash=sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f \ + --hash=sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1 \ + --hash=sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828 \ + --hash=sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878 \ + --hash=sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f \ + --hash=sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b # via -r requirements/dev-bookworm-requirements.in -mypy-extensions==0.4.3 \ - --hash=sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d \ - --hash=sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8 +mypy-extensions==1.0.0 \ + --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ + --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 # via # black # mypy -packaging==21.3 \ - --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ - --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 +packaging==23.1 \ + --hash=sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61 \ + --hash=sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f # via + # black # build # pytest # semgrep -pathspec==0.10.3 \ - --hash=sha256:3c95343af8b756205e2aba76e843ba9520a24dd84f68c22b9f93251507509dd6 \ - --hash=sha256:56200de4077d9d0791465aa9095a01d421861e405b5096955051deefd697d6f6 +pathspec==0.11.1 \ + --hash=sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687 \ + --hash=sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293 # via black -peewee==3.15.4 \ - --hash=sha256:2581520c8dfbacd9d580c2719ae259f0637a9e46eda47dfc0ce01864c6366205 +peewee==3.16.2 \ + --hash=sha256:10769981198c7311f84a0ca8db892fa213303a8eb1305deb795a71e7bd606a91 # via semgrep -pep517==0.13.0 \ - --hash=sha256:4ba4446d80aed5b5eac6509ade100bff3e7943a8489de249654a5ae9b33ee35b \ - --hash=sha256:ae69927c5c172be1add9203726d4b84cf3ebad1edcd5f71fcdc746e66e829f59 - # via build -pip==22.3.1 \ - --hash=sha256:65fd48317359f3af8e593943e6ae1506b66325085ea64b706a998c6e83eeaf38 \ - --hash=sha256:908c78e6bc29b676ede1c4d57981d490cb892eb45cd8c214ab6298125119e077 - # via pip-tools -pip-tools==6.12.0 \ - --hash=sha256:8e22fbc84ede7ca522ba4b033c4fcf6a6419adabc75d24747be3d8262504489a \ - --hash=sha256:f441603c63b16f4af0dd5026f7522a49eddec2bc8a4a4979af44e1f6b0a1c13e +pip-tools==6.14.0 \ + --hash=sha256:06366be0e08d86b416407333e998b4d305d5bd925151b08942ed149380ba3e47 \ + --hash=sha256:c5ad042cd27c0b343b10db1db7f77a7d087beafbec59ae6df1bba4d3368dfe8c # via -r requirements/dev-bookworm-requirements.in -platformdirs==2.6.0 \ - --hash=sha256:1a89a12377800c81983db6be069ec068eee989748799b946cce2a6e80dcc54ca \ - --hash=sha256:b46ffafa316e6b83b47489d240ce17173f123a9b9c83282141c3daf26ad9ac2e +platformdirs==3.8.1 \ + --hash=sha256:cec7b889196b9144d088e4c57d9ceef7374f6c39694ad1577a0aab50d27ea28c \ + --hash=sha256:f87ca4fcff7d2b0f81c6a748a77973d7af0f4d526f98f308477c3c436c74d528 # via black -pluggy==1.0.0 \ - --hash=sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159 \ - --hash=sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3 +pluggy==1.2.0 \ + --hash=sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849 \ + --hash=sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3 # via pytest pycodestyle==2.10.0 \ --hash=sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053 \ @@ -225,163 +323,277 @@ pyflakes==3.0.1 \ --hash=sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf \ --hash=sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd # via flake8 -pyparsing==3.0.9 \ - --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ - --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc - # via packaging -pyrsistent==0.19.2 \ - --hash=sha256:055ab45d5911d7cae397dc418808d8802fb95262751872c841c170b0dbf51eed \ - --hash=sha256:111156137b2e71f3a9936baf27cb322e8024dac3dc54ec7fb9f0bcf3249e68bb \ - --hash=sha256:187d5730b0507d9285a96fca9716310d572e5464cadd19f22b63a6976254d77a \ - --hash=sha256:21455e2b16000440e896ab99e8304617151981ed40c29e9507ef1c2e4314ee95 \ - --hash=sha256:2aede922a488861de0ad00c7630a6e2d57e8023e4be72d9d7147a9fcd2d30712 \ - --hash=sha256:3ba4134a3ff0fc7ad225b6b457d1309f4698108fb6b35532d015dca8f5abed73 \ - --hash=sha256:456cb30ca8bff00596519f2c53e42c245c09e1a4543945703acd4312949bfd41 \ - --hash=sha256:71d332b0320642b3261e9fee47ab9e65872c2bd90260e5d225dabeed93cbd42b \ - --hash=sha256:879b4c2f4d41585c42df4d7654ddffff1239dc4065bc88b745f0341828b83e78 \ - --hash=sha256:9cd3e9978d12b5d99cbdc727a3022da0430ad007dacf33d0bf554b96427f33ab \ - --hash=sha256:a178209e2df710e3f142cbd05313ba0c5ebed0a55d78d9945ac7a4e09d923308 \ - --hash=sha256:b39725209e06759217d1ac5fcdb510e98670af9e37223985f330b611f62e7425 \ - --hash=sha256:bfa0351be89c9fcbcb8c9879b826f4353be10f58f8a677efab0c017bf7137ec2 \ - --hash=sha256:bfd880614c6237243ff53a0539f1cb26987a6dc8ac6e66e0c5a40617296a045e \ - --hash=sha256:c43bec251bbd10e3cb58ced80609c5c1eb238da9ca78b964aea410fb820d00d6 \ - --hash=sha256:d690b18ac4b3e3cab73b0b7aa7dbe65978a172ff94970ff98d82f2031f8971c2 \ - --hash=sha256:d6982b5a0237e1b7d876b60265564648a69b14017f3b5f908c5be2de3f9abb7a \ - --hash=sha256:dec3eac7549869365fe263831f576c8457f6c833937c68542d08fde73457d291 \ - --hash=sha256:e371b844cec09d8dc424d940e54bba8f67a03ebea20ff7b7b0d56f526c71d584 \ - --hash=sha256:e5d8f84d81e3729c3b506657dddfe46e8ba9c330bf1858ee33108f8bb2adb38a \ - --hash=sha256:ea6b79a02a28550c98b6ca9c35b9f492beaa54d7c5c9e9949555893c8a9234d0 \ - --hash=sha256:f1258f4e6c42ad0b20f9cfcc3ada5bd6b83374516cd01c0960e3cb75fdca6770 - # via jsonschema -pytest==7.2.0 \ - --hash=sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71 \ - --hash=sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59 +pygments==2.15.1 \ + --hash=sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c \ + --hash=sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1 + # via rich +pyproject-hooks==1.0.0 \ + --hash=sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8 \ + --hash=sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5 + # via build +pytest==7.4.0 \ + --hash=sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32 \ + --hash=sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a # via # -r requirements/dev-bookworm-requirements.in # pytest-cov # pytest-mock -pytest-cov==4.0.0 \ - --hash=sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b \ - --hash=sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470 +pytest-cov==4.1.0 \ + --hash=sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6 \ + --hash=sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a # via -r requirements/dev-bookworm-requirements.in -pytest-mock==3.10.0 \ - --hash=sha256:f4c973eeae0282963eb293eb173ce91b091a79c1334455acfac9ddee8a1c784b \ - --hash=sha256:fbbdb085ef7c252a326fd8cdcac0aa3b1333d8811f131bdcc701002e1be7ed4f +pytest-mock==3.11.1 \ + --hash=sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39 \ + --hash=sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f # via -r requirements/dev-bookworm-requirements.in python-lsp-jsonrpc==1.0.0 \ --hash=sha256:079b143be64b0a378bdb21dff5e28a8c1393fe7e8a654ef068322d754e545fc7 \ --hash=sha256:7bec170733db628d3506ea3a5288ff76aa33c70215ed223abdb0d95e957660bd # via semgrep -requests==2.28.1 \ - --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ - --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 +referencing==0.29.1 \ + --hash=sha256:90cb53782d550ba28d2166ef3f55731f38397def8832baac5d45235f1995e35e \ + --hash=sha256:d3c8f323ee1480095da44d55917cfb8278d73d6b4d5f677e3e40eb21314ac67f + # via + # jsonschema + # jsonschema-specifications +requests==2.31.0 \ + --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ + --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 + # via semgrep +rich==13.4.2 \ + --hash=sha256:8f87bc7ee54675732fa66a05ebfe489e27264caeeff3728c945d25971b6485ec \ + --hash=sha256:d653d6bccede5844304c605d5aac802c7cf9621efd700b46c7ec2b51ea914898 # via semgrep -ruamel-yaml==0.17.21 \ - --hash=sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7 \ - --hash=sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af +rpds-py==0.8.10 \ + --hash=sha256:08166467258fd0240a1256fce272f689f2360227ee41c72aeea103e9e4f63d2b \ + --hash=sha256:083df0fafe199371206111583c686c985dddaf95ab3ee8e7b24f1fda54515d09 \ + --hash=sha256:0da53292edafecba5e1d8c1218f99babf2ed0bf1c791d83c0ab5c29b57223068 \ + --hash=sha256:0eeb2731708207d0fe2619afe6c4dc8cb9798f7de052da891de5f19c0006c315 \ + --hash=sha256:134ec8f14ca7dbc6d9ae34dac632cdd60939fe3734b5d287a69683c037c51acb \ + --hash=sha256:13e643ce8ad502a0263397362fb887594b49cf84bf518d6038c16f235f2bcea4 \ + --hash=sha256:148b0b38d719c0760e31ce9285a9872972bdd7774969a4154f40c980e5beaca7 \ + --hash=sha256:14f1c356712f66653b777ecd8819804781b23dbbac4eade4366b94944c9e78ad \ + --hash=sha256:15a90d0ac11b4499171067ae40a220d1ca3cb685ec0acc356d8f3800e07e4cb8 \ + --hash=sha256:1a2edf8173ac0c7a19da21bc68818be1321998528b5e3f748d6ee90c0ba2a1fd \ + --hash=sha256:1b21575031478609db6dbd1f0465e739fe0e7f424a8e7e87610a6c7f68b4eb16 \ + --hash=sha256:1ee45cd1d84beed6cbebc839fd85c2e70a3a1325c8cfd16b62c96e2ffb565eca \ + --hash=sha256:220bdcad2d2936f674650d304e20ac480a3ce88a40fe56cd084b5780f1d104d9 \ + --hash=sha256:2418cf17d653d24ffb8b75e81f9f60b7ba1b009a23298a433a4720b2a0a17017 \ + --hash=sha256:2614c2732bf45de5c7f9e9e54e18bc78693fa2f635ae58d2895b7965e470378c \ + --hash=sha256:2cd3045e7f6375dda64ed7db1c5136826facb0159ea982f77d9cf6125025bd34 \ + --hash=sha256:2eb4b08c45f8f8d8254cdbfacd3fc5d6b415d64487fb30d7380b0d0569837bf1 \ + --hash=sha256:300eb606e6b94a7a26f11c8cc8ee59e295c6649bd927f91e1dbd37a4c89430b6 \ + --hash=sha256:376b8de737401050bd12810003d207e824380be58810c031f10ec563ff6aef3d \ + --hash=sha256:3793c21494bad1373da517001d0849eea322e9a049a0e4789e50d8d1329df8e7 \ + --hash=sha256:37f7ee4dc86db7af3bac6d2a2cedbecb8e57ce4ed081f6464510e537589f8b1e \ + --hash=sha256:3816a890a6a9e9f1de250afa12ca71c9a7a62f2b715a29af6aaee3aea112c181 \ + --hash=sha256:3c490204e16bca4f835dba8467869fe7295cdeaa096e4c5a7af97f3454a97991 \ + --hash=sha256:3cc5e5b5514796f45f03a568981971b12a3570f3de2e76114f7dc18d4b60a3c4 \ + --hash=sha256:41c89a366eae49ad9e65ed443a8f94aee762931a1e3723749d72aeac80f5ef2f \ + --hash=sha256:4a8ca409f1252e1220bf09c57290b76cae2f14723746215a1e0506472ebd7bdf \ + --hash=sha256:4b519bac7c09444dd85280fd60f28c6dde4389c88dddf4279ba9b630aca3bbbe \ + --hash=sha256:521fc8861a86ae54359edf53a15a05fabc10593cea7b3357574132f8427a5e5a \ + --hash=sha256:574868858a7ff6011192c023a5289158ed20e3f3b94b54f97210a773f2f22921 \ + --hash=sha256:5a665f6f1a87614d1c3039baf44109094926dedf785e346d8b0a728e9cabd27a \ + --hash=sha256:5d1c2bc319428d50b3e0fa6b673ab8cc7fa2755a92898db3a594cbc4eeb6d1f7 \ + --hash=sha256:60e0e86e870350e03b3e25f9b1dd2c6cc72d2b5f24e070249418320a6f9097b7 \ + --hash=sha256:695f642a3a5dbd4ad2ffbbacf784716ecd87f1b7a460843b9ddf965ccaeafff4 \ + --hash=sha256:69d089c026f6a8b9d64a06ff67dc3be196707b699d7f6ca930c25f00cf5e30d8 \ + --hash=sha256:6c6a0225b8501d881b32ebf3f5807a08ad3685b5eb5f0a6bfffd3a6e039b2055 \ + --hash=sha256:70bb9c8004b97b4ef7ae56a2aa56dfaa74734a0987c78e7e85f00004ab9bf2d0 \ + --hash=sha256:73a1e48430f418f0ac3dfd87860e4cc0d33ad6c0f589099a298cb53724db1169 \ + --hash=sha256:7495010b658ec5b52835f21d8c8b1a7e52e194c50f095d4223c0b96c3da704b1 \ + --hash=sha256:7947e6e2c2ad68b1c12ee797d15e5f8d0db36331200b0346871492784083b0c6 \ + --hash=sha256:7b38a9ac96eeb6613e7f312cd0014de64c3f07000e8bf0004ad6ec153bac46f8 \ + --hash=sha256:7d20a8ed227683401cc508e7be58cba90cc97f784ea8b039c8cd01111e6043e0 \ + --hash=sha256:7f29b8c55fd3a2bc48e485e37c4e2df3317f43b5cc6c4b6631c33726f52ffbb3 \ + --hash=sha256:802f42200d8caf7f25bbb2a6464cbd83e69d600151b7e3b49f49a47fa56b0a38 \ + --hash=sha256:805a5f3f05d186c5d50de2e26f765ba7896d0cc1ac5b14ffc36fae36df5d2f10 \ + --hash=sha256:82bb361cae4d0a627006dadd69dc2f36b7ad5dc1367af9d02e296ec565248b5b \ + --hash=sha256:84eb541a44f7a18f07a6bfc48b95240739e93defe1fdfb4f2a295f37837945d7 \ + --hash=sha256:89c92b74e8bf6f53a6f4995fd52f4bd510c12f103ee62c99e22bc9e05d45583c \ + --hash=sha256:8c398fda6df361a30935ab4c4bccb7f7a3daef2964ca237f607c90e9f3fdf66f \ + --hash=sha256:915031002c86a5add7c6fd4beb601b2415e8a1c956590a5f91d825858e92fe6e \ + --hash=sha256:927d784648211447201d4c6f1babddb7971abad922b32257ab74de2f2750fad0 \ + --hash=sha256:92cf5b3ee60eef41f41e1a2cabca466846fb22f37fc580ffbcb934d1bcab225a \ + --hash=sha256:93d06cccae15b3836247319eee7b6f1fdcd6c10dabb4e6d350d27bd0bdca2711 \ + --hash=sha256:93d99f957a300d7a4ced41615c45aeb0343bb8f067c42b770b505de67a132346 \ + --hash=sha256:96b293c0498c70162effb13100624c5863797d99df75f2f647438bd10cbf73e4 \ + --hash=sha256:97cab733d303252f7c2f7052bf021a3469d764fc2b65e6dbef5af3cbf89d4892 \ + --hash=sha256:996cc95830de9bc22b183661d95559ec6b3cd900ad7bc9154c4cbf5be0c9b734 \ + --hash=sha256:9a7d20c1cf8d7b3960c5072c265ec47b3f72a0c608a9a6ee0103189b4f28d531 \ + --hash=sha256:9cd57981d9fab04fc74438d82460f057a2419974d69a96b06a440822d693b3c0 \ + --hash=sha256:a11ab0d97be374efd04f640c04fe5c2d3dabc6dfb998954ea946ee3aec97056d \ + --hash=sha256:a13c8e56c46474cd5958d525ce6a9996727a83d9335684e41f5192c83deb6c58 \ + --hash=sha256:a38b9f526d0d6cbdaa37808c400e3d9f9473ac4ff64d33d9163fd05d243dbd9b \ + --hash=sha256:a7c6304b894546b5a6bdc0fe15761fa53fe87d28527a7142dae8de3c663853e1 \ + --hash=sha256:ad3bfb44c8840fb4be719dc58e229f435e227fbfbe133dc33f34981ff622a8f8 \ + --hash=sha256:ae40f4a70a1f40939d66ecbaf8e7edc144fded190c4a45898a8cfe19d8fc85ea \ + --hash=sha256:b01b39ad5411563031ea3977bbbc7324d82b088e802339e6296f082f78f6115c \ + --hash=sha256:b2e3c4f2a8e3da47f850d7ea0d7d56720f0f091d66add889056098c4b2fd576c \ + --hash=sha256:b41941583adce4242af003d2a8337b066ba6148ca435f295f31ac6d9e4ea2722 \ + --hash=sha256:b4627520a02fccbd324b33c7a83e5d7906ec746e1083a9ac93c41ac7d15548c7 \ + --hash=sha256:ba9f1d1ebe4b63801977cec7401f2d41e888128ae40b5441270d43140efcad52 \ + --hash=sha256:c03a435d26c3999c2a8642cecad5d1c4d10c961817536af52035f6f4ee2f5dd0 \ + --hash=sha256:c200b30dd573afa83847bed7e3041aa36a8145221bf0cfdfaa62d974d720805c \ + --hash=sha256:c493365d3fad241d52f096e4995475a60a80f4eba4d3ff89b713bc65c2ca9615 \ + --hash=sha256:c4d42e83ddbf3445e6514f0aff96dca511421ed0392d9977d3990d9f1ba6753c \ + --hash=sha256:c60528671d9d467009a6ec284582179f6b88651e83367d0ab54cb739021cd7de \ + --hash=sha256:c72ebc22e70e04126158c46ba56b85372bc4d54d00d296be060b0db1671638a4 \ + --hash=sha256:ccbbd276642788c4376fbe8d4e6c50f0fb4972ce09ecb051509062915891cbf0 \ + --hash=sha256:ceaac0c603bf5ac2f505a78b2dcab78d3e6b706be6596c8364b64cc613d208d2 \ + --hash=sha256:d19db6ba816e7f59fc806c690918da80a7d186f00247048cd833acdab9b4847b \ + --hash=sha256:d5c191713e98e7c28800233f039a32a42c1a4f9a001a8a0f2448b07391881036 \ + --hash=sha256:d64f9f88d5203274a002b54442cafc9c7a1abff2a238f3e767b70aadf919b451 \ + --hash=sha256:d77dff3a5aa5eedcc3da0ebd10ff8e4969bc9541aa3333a8d41715b429e99f47 \ + --hash=sha256:dd4f16e57c12c0ae17606c53d1b57d8d1c8792efe3f065a37cb3341340599d49 \ + --hash=sha256:e39d7ab0c18ac99955b36cd19f43926450baba21e3250f053e0704d6ffd76873 \ + --hash=sha256:e3d0cd3dff0e7638a7b5390f3a53057c4e347f4ef122ee84ed93fc2fb7ea4aa2 \ + --hash=sha256:e7dfb1cbb895810fa2b892b68153c17716c6abaa22c7dc2b2f6dcf3364932a1c \ + --hash=sha256:e8e24b210a4deb5a7744971f8f77393005bae7f873568e37dfd9effe808be7f7 \ + --hash=sha256:e9c0683cb35a9b5881b41bc01d5568ffc667910d9dbc632a1fba4e7d59e98773 \ + --hash=sha256:ed41f3f49507936a6fe7003985ea2574daccfef999775525d79eb67344e23767 \ + --hash=sha256:ee744fca8d1ea822480a2a4e7c5f2e1950745477143668f0b523769426060f29 \ + --hash=sha256:f3f1e860be21f3e83011116a65e7310486300e08d9a3028e73e8d13bb6c77292 \ + --hash=sha256:f43ab4cb04bde6109eb2555528a64dfd8a265cc6a9920a67dcbde13ef53a46c8 \ + --hash=sha256:f53f55a8852f0e49b0fc76f2412045d6ad9d5772251dea8f55ea45021616e7d5 \ + --hash=sha256:f59996d0550894affaad8743e97b9b9c98f638b221fac12909210ec3d9294786 \ + --hash=sha256:f96f3f98fbff7af29e9edf9a6584f3c1382e7788783d07ba3721790625caa43e \ + --hash=sha256:f9adb5664b78fcfcd830000416c8cc69853ef43cb084d645b3f1f0296edd9bae \ + --hash=sha256:fa326b3505d5784436d9433b7980171ab2375535d93dd63fbcd20af2b5ca1bb6 \ + --hash=sha256:fafc0049add8043ad07ab5382ee80d80ed7e3699847f26c9a5cf4d3714d96a84 + # via + # jsonschema + # referencing +ruamel-yaml==0.17.32 \ + --hash=sha256:23cd2ed620231677564646b0c6a89d138b6822a0d78656df7abda5879ec4f447 \ + --hash=sha256:ec939063761914e14542972a5cba6d33c23b0859ab6342f61cf070cfc600efc2 # via semgrep -semgrep==1.2.0 \ - --hash=sha256:31f5f764ff114e2e56b3a93b09829f738cb9e287af7479e2c4714c77b10dc5c0 \ - --hash=sha256:cba38f882c9fedd00462247474a991715d5c8faf169e38cfbf299c7c89ccad55 \ - --hash=sha256:d7b9ccffab1cbecb7870e6792dc274f6a63133910150f33b6ba07d28f5cf00d5 \ - --hash=sha256:e04dbc4a95ddfc9b07550b09b88f61c5c7d81817fac1c86683d8c2534514ac6c +ruamel-yaml-clib==0.2.7 \ + --hash=sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e \ + --hash=sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3 \ + --hash=sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5 \ + --hash=sha256:1a6391a7cabb7641c32517539ca42cf84b87b667bad38b78d4d42dd23e957c81 \ + --hash=sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497 \ + --hash=sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f \ + --hash=sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac \ + --hash=sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697 \ + --hash=sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763 \ + --hash=sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282 \ + --hash=sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94 \ + --hash=sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1 \ + --hash=sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072 \ + --hash=sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9 \ + --hash=sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231 \ + --hash=sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93 \ + --hash=sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b \ + --hash=sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb \ + --hash=sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f \ + --hash=sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307 \ + --hash=sha256:9c7617df90c1365638916b98cdd9be833d31d337dbcd722485597b43c4a215bf \ + --hash=sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8 \ + --hash=sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b \ + --hash=sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b \ + --hash=sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640 \ + --hash=sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7 \ + --hash=sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a \ + --hash=sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71 \ + --hash=sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8 \ + --hash=sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122 \ + --hash=sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7 \ + --hash=sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80 \ + --hash=sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e \ + --hash=sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab \ + --hash=sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0 \ + --hash=sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646 \ + --hash=sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38 + # via ruamel-yaml +semgrep==1.31.2 \ + --hash=sha256:0fc463f8afcc649efaf61c00f17f7c124498c2e95cca9d805fd68d203362cdeb \ + --hash=sha256:30d0662a6ac8d7258af3b383cca1c93da646fc99b60e3247f6acf3dcf764e815 \ + --hash=sha256:c26ce223c60688e317299f97cac9889b3e879dc4ee28097555cad6215086dcf4 \ + --hash=sha256:cd707b74cd76ef5dff974df3fe653967faf1bd0248019f7b6777170cefa4fca5 # via -r requirements/dev-bookworm-requirements.in -setuptools==65.6.3 \ - --hash=sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54 \ - --hash=sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75 - # via pip-tools tomli==2.0.1 \ --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f # via semgrep -tqdm==4.64.1 \ - --hash=sha256:5f4f682a004951c1b450bc753c710e9280c5746ce6ffedee253ddbcbf54cf1e4 \ - --hash=sha256:6fee160d6ffcd1b1c68c65f14c829c22832bc401726335ce92c52d395944a6a1 - # via semgrep -types-docutils==0.19.1.4 \ - --hash=sha256:1b64b21b609ff1fc7791d3d930f14b56b36ad09029fd97e45e34cc889d671b5f \ - --hash=sha256:870d71f3663141f67a3c59d26d2c54a8c478c842208bb0b345fbf6036f49f561 - # via types-setuptools -types-setuptools==67.3.0.1 \ - --hash=sha256:1a26d373036c720e566823b6edd664a2db4d138b6eeba856721ec1254203474f \ - --hash=sha256:a7e0f0816b5b449f5bcdc0efa43da91ff81dbe6941f293a6490d68a450e130a1 +types-setuptools==68.0.0.1 \ + --hash=sha256:a0454ea7ad0711f63a602caa87929003a83cab89224ae1506ed44bb5be8fe7d7 \ + --hash=sha256:cc5acbc464b106104899e9b9eb4955dd47e854753c8d4ee2ce697eaf0f4d74e1 # via -r requirements/dev-bookworm-requirements.in -typing-extensions==4.4.0 \ - --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ - --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e +typing-extensions==4.7.1 \ + --hash=sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36 \ + --hash=sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2 # via # mypy # semgrep -ujson==5.6.0 \ - --hash=sha256:0f0f21157d1a84ad5fb54388f31767cde9c1a48fb29de7ef91d8887fdc2ca92b \ - --hash=sha256:1217326ba80eab1ff3f644f9eee065bd4fcc4e0c068a2f86f851cafd05737169 \ - --hash=sha256:169b3fbd1188647c6ce00cb690915526aff86997c89a94c1b50432010ad7ae0f \ - --hash=sha256:1a7e4023c79d9a053c0c6b7c6ec50ea0af78381539ab27412e6af8d9410ae555 \ - --hash=sha256:20d929a27822cb79e034cc5e0bb62daa0257ab197247cb6f35d5149f2f438983 \ - --hash=sha256:213e41dc501b4a6d029873039da3e45ba7766b9f9eba97ecc4287c371f5403cc \ - --hash=sha256:23051f062bb257a87f3e55ea5a055ea98d56f08185fd415b34313268fa4d814e \ - --hash=sha256:24d40e01accbf4f0ba5181c4db1bac83749fdc1a5413466da582529f2a096085 \ - --hash=sha256:2a24b9a96364f943a4754fa00b47855d0a01b84ac4b8b11ebf058c8fb68c1f77 \ - --hash=sha256:2cb7a4bd91de97b4c8e57fb5289d1e5f3f019723b59d01d79e2df83783dce5a6 \ - --hash=sha256:31288f85db6295ec63e128daff7285bb0bc220935e1b5107bd2d67e2dc687b7e \ - --hash=sha256:35423460954d0c61602da734697724e8dd5326a8aa7900123e584b935116203e \ - --hash=sha256:355ef5311854936b9edc7f1ce638f8257cb45fb6b9873f6b2d16a715eafc9570 \ - --hash=sha256:3a68a204386648ec92ae9b526c1ffca528f38221eca70f98b4709390c3204275 \ - --hash=sha256:3ad74eb53ee07e76c82f9ef8e7256c33873b81bd1f97a274fdb65ed87c2801f6 \ - --hash=sha256:3b49a1014d396b962cb1d6c5f867f88b2c9aa9224c3860ee6ff63b2837a2965b \ - --hash=sha256:3f00dff3bf26bbb96791ceaf51ca95a3f34e2a21985748da855a650c38633b99 \ - --hash=sha256:3f8b9e8c0420ce3dcc193ab6dd5628840ba79ad1b76e1816ac7ca6752c6bf035 \ - --hash=sha256:52f536712d16a1f4e0f9d084982c28e11b7e70c397a1059069e4d28d53b3f522 \ - --hash=sha256:551408a5c4306839b4a4f91503c96069204dbef2c7ed91a9dab08874ac1ed679 \ - --hash=sha256:57904e5b49ffe93189349229dcd83f73862ef9bb8517e8f1e62d0ff73f313847 \ - --hash=sha256:5e5715b0e2767b1987ceed0066980fc0a53421dd2f197b4f88460d474d6aef4c \ - --hash=sha256:61fdf24f7bddc402ce06b25e4bed7bf5ee4f03e23028a0a09116835c21d54888 \ - --hash=sha256:6d0a60c5f065737a81249c819475d001a86da9a41900d888287e34619c9b4851 \ - --hash=sha256:6ea9024749a41864bffb12da15aace4a3193c03ea97e77b069557aefa342811f \ - --hash=sha256:7174e81c137d480abe2f8036e9fb69157e509f2db0bfdee4488eb61dc3f0ff6b \ - --hash=sha256:72fa6e850831280a46704032721c75155fd41b839ddadabb6068ab218c56a37a \ - --hash=sha256:74671d1bde8c03daeb92abdbd972960978347b1a1d432c4c1b3c9284ce4094cf \ - --hash=sha256:798116b88158f13ed687417526100ef353ba4692e0aef8afbc622bd4bf7e9057 \ - --hash=sha256:7a66c5a75b46545361271b4cf55560d9ad8bad794dd054a14b3fbb031407948e \ - --hash=sha256:7bde16cb18b95a8f68cc48715e4652b394b4fee68cb3f9fee0fd7d26b29a53b6 \ - --hash=sha256:82bf24ea72a73c7d77402a7adc954931243e7ec4241d5738ae74894b53944458 \ - --hash=sha256:87578ccfc35461c77e73660fb7d89bc577732f671364f442bda9e2c58b571765 \ - --hash=sha256:91000612a2c30f50c6a009e6459a677e5c1972e51b59ecefd6063543dc47a4e9 \ - --hash=sha256:9cf04fcc958bb52a6b6c301b780cb9afab3ec68713b17ca5aa423e1f99c2c1cf \ - --hash=sha256:9f4efcac06f45183b6ed8e2321554739a964a02d8aa3089ec343253d86bf2804 \ - --hash=sha256:a51cbe614acb5ea8e2006e4fd80b4e8ea7c51ae51e42c75290012f4925a9d6ab \ - --hash=sha256:a8795de7ceadf84bcef88f947f91900d647eda234a2c6cc89912c25048cc0490 \ - --hash=sha256:ae723b8308ac17a591bb8be9478b58c2c26fada23fd2211fc323796801ad7ff5 \ - --hash=sha256:aff708a1b9e2d4979f74375ade0bff978be72c8bd90422a756d24d8a46d78059 \ - --hash=sha256:b2aece7a92dffc9c78787f5f36e47e24b95495812270c27abc2fa430435a931d \ - --hash=sha256:b4420bfff18ca6aa39cfb22fe35d8aba3811fa1190c4f4e1ad816b0aad72f7e3 \ - --hash=sha256:b64d2ac99503a9a5846157631addacc9f74e23f64d5a886fe910e9662660fa10 \ - --hash=sha256:b72d4d948749e9c6afcd3d7af9ecc780fccde84e26d275c97273dd83c68a488b \ - --hash=sha256:b74396a655ac8a5299dcb765b4a17ba706e45c0df95818bcc6c13c4645a1c38e \ - --hash=sha256:b9e9d26600020cf635a4e58763959f5a59f8c70f75d72ebf26ceae94c2efac74 \ - --hash=sha256:bca074d08f0677f05df8170b25ce6e61db3bcdfda78062444972fa6508dc825f \ - --hash=sha256:bca3c06c3f10ce03fa80b1301dce53765815c2578a24bd141ce4e5769bb7b709 \ - --hash=sha256:bfb1fdf61763fafc0f8a20becf9cc4287c14fc41c0e14111d28c0d0dfda9ba56 \ - --hash=sha256:c169e12642f0edf1dde607fb264721b88787b55a6da5fb3824302a9cac6f9405 \ - --hash=sha256:c4277f6b1d24be30b7f87ec5346a87693cbc1e55bbc5877f573381b2250c4dd6 \ - --hash=sha256:ceee5aef3e234c7e998fdb52e5236c41e50cdedc116360f7f1874a04829f6490 \ - --hash=sha256:d1b5e233e42f53bbbc6961caeb492986e9f3aeacd30be811467583203873bad2 \ - --hash=sha256:d6f4be832d97836d62ac0c148026ec021f9f36481f38e455b51538fcd949ed2a \ - --hash=sha256:dd5ccc036b0f4721b98e1c03ccc604e7f3e1db53866ccc92b2add40ace1782f7 \ - --hash=sha256:dde59d2f06297fc4e70b2bae6e4a6b3ce89ca89697ab2c41e641abae3be96b0c \ - --hash=sha256:e4be7d865cb5161824e12db71cee83290ab72b3523566371a30d6ba1bd63402a \ - --hash=sha256:e97af10b6f13a498de197fb852e9242064217c25dfca79ebe7ad0cf2b0dd0cb7 \ - --hash=sha256:f2d70b7f0b485f85141bbc518d0581ae96b912d9f8b070eaf68a9beef8eb1e60 \ - --hash=sha256:f3e651f04b7510fae7d4706a4600cd43457f015df08702ece82a71339fc15c3d \ - --hash=sha256:f63535d51e039a984b2fb67ff87057ffe4216d4757c3cedf2fc846af88253cb7 \ - --hash=sha256:f881e2d8a022e9285aa2eab6ba8674358dbcb2b57fa68618d88d62937ac3ff04 \ - --hash=sha256:fadebaddd3eb71a5c986f0bdc7bb28b072bfc585c141eef37474fc66d1830b0a \ - --hash=sha256:fb1632b27e12c0b0df62f924c362206daf246a42c0080e959dd465810dc3482e \ - --hash=sha256:fecf83b2ef3cbce4f5cc573df6f6ded565e5e27c1af84038bae5ade306686d82 +ujson==5.8.0 \ + --hash=sha256:07d459aca895eb17eb463b00441986b021b9312c6c8cc1d06880925c7f51009c \ + --hash=sha256:0be81bae295f65a6896b0c9030b55a106fb2dec69ef877253a87bc7c9c5308f7 \ + --hash=sha256:0fe1b7edaf560ca6ab023f81cbeaf9946a240876a993b8c5a21a1c539171d903 \ + --hash=sha256:102bf31c56f59538cccdfec45649780ae00657e86247c07edac434cb14d5388c \ + --hash=sha256:11da6bed916f9bfacf13f4fc6a9594abd62b2bb115acfb17a77b0f03bee4cfd5 \ + --hash=sha256:16fde596d5e45bdf0d7de615346a102510ac8c405098e5595625015b0d4b5296 \ + --hash=sha256:193349a998cd821483a25f5df30b44e8f495423840ee11b3b28df092ddfd0f7f \ + --hash=sha256:20768961a6a706170497129960762ded9c89fb1c10db2989c56956b162e2a8a3 \ + --hash=sha256:27a2a3c7620ebe43641e926a1062bc04e92dbe90d3501687957d71b4bdddaec4 \ + --hash=sha256:2873d196725a8193f56dde527b322c4bc79ed97cd60f1d087826ac3290cf9207 \ + --hash=sha256:299a312c3e85edee1178cb6453645217ba23b4e3186412677fa48e9a7f986de6 \ + --hash=sha256:2a64cc32bb4a436e5813b83f5aab0889927e5ea1788bf99b930fad853c5625cb \ + --hash=sha256:2b852bdf920fe9f84e2a2c210cc45f1b64f763b4f7d01468b33f7791698e455e \ + --hash=sha256:2e72ba76313d48a1a3a42e7dc9d1db32ea93fac782ad8dde6f8b13e35c229130 \ + --hash=sha256:3659deec9ab9eb19e8646932bfe6fe22730757c4addbe9d7d5544e879dc1b721 \ + --hash=sha256:3b27a8da7a080add559a3b73ec9ebd52e82cc4419f7c6fb7266e62439a055ed0 \ + --hash=sha256:3f9b63530a5392eb687baff3989d0fb5f45194ae5b1ca8276282fb647f8dcdb3 \ + --hash=sha256:407d60eb942c318482bbfb1e66be093308bb11617d41c613e33b4ce5be789adc \ + --hash=sha256:40931d7c08c4ce99adc4b409ddb1bbb01635a950e81239c2382cfe24251b127a \ + --hash=sha256:48c7d373ff22366eecfa36a52b9b55b0ee5bd44c2b50e16084aa88b9de038916 \ + --hash=sha256:4ddeabbc78b2aed531f167d1e70387b151900bc856d61e9325fcdfefb2a51ad8 \ + --hash=sha256:5ac97b1e182d81cf395ded620528c59f4177eee024b4b39a50cdd7b720fdeec6 \ + --hash=sha256:5ce24909a9c25062e60653073dd6d5e6ec9d6ad7ed6e0069450d5b673c854405 \ + --hash=sha256:69b3104a2603bab510497ceabc186ba40fef38ec731c0ccaa662e01ff94a985c \ + --hash=sha256:6a4dafa9010c366589f55afb0fd67084acd8added1a51251008f9ff2c3e44042 \ + --hash=sha256:6d230d870d1ce03df915e694dcfa3f4e8714369cce2346686dbe0bc8e3f135e7 \ + --hash=sha256:78e318def4ade898a461b3d92a79f9441e7e0e4d2ad5419abed4336d702c7425 \ + --hash=sha256:7a42baa647a50fa8bed53d4e242be61023bd37b93577f27f90ffe521ac9dc7a3 \ + --hash=sha256:7cba16b26efe774c096a5e822e4f27097b7c81ed6fb5264a2b3f5fd8784bab30 \ + --hash=sha256:7d8283ac5d03e65f488530c43d6610134309085b71db4f675e9cf5dff96a8282 \ + --hash=sha256:7ecc33b107ae88405aebdb8d82c13d6944be2331ebb04399134c03171509371a \ + --hash=sha256:9249fdefeb021e00b46025e77feed89cd91ffe9b3a49415239103fc1d5d9c29a \ + --hash=sha256:9399eaa5d1931a0ead49dce3ffacbea63f3177978588b956036bfe53cdf6af75 \ + --hash=sha256:94c7bd9880fa33fcf7f6d7f4cc032e2371adee3c5dba2922b918987141d1bf07 \ + --hash=sha256:9571de0c53db5cbc265945e08f093f093af2c5a11e14772c72d8e37fceeedd08 \ + --hash=sha256:9721cd112b5e4687cb4ade12a7b8af8b048d4991227ae8066d9c4b3a6642a582 \ + --hash=sha256:9ab282d67ef3097105552bf151438b551cc4bedb3f24d80fada830f2e132aeb9 \ + --hash=sha256:9d9707e5aacf63fb919f6237d6490c4e0244c7f8d3dc2a0f84d7dec5db7cb54c \ + --hash=sha256:a70f776bda2e5072a086c02792c7863ba5833d565189e09fabbd04c8b4c3abba \ + --hash=sha256:a89cf3cd8bf33a37600431b7024a7ccf499db25f9f0b332947fbc79043aad879 \ + --hash=sha256:a8c91b6f4bf23f274af9002b128d133b735141e867109487d17e344d38b87d94 \ + --hash=sha256:ad24ec130855d4430a682c7a60ca0bc158f8253ec81feed4073801f6b6cb681b \ + --hash=sha256:ae7f4725c344bf437e9b881019c558416fe84ad9c6b67426416c131ad577df67 \ + --hash=sha256:b748797131ac7b29826d1524db1cc366d2722ab7afacc2ce1287cdafccddbf1f \ + --hash=sha256:bdf04c6af3852161be9613e458a1fb67327910391de8ffedb8332e60800147a2 \ + --hash=sha256:bf5737dbcfe0fa0ac8fa599eceafae86b376492c8f1e4b84e3adf765f03fb564 \ + --hash=sha256:c4e7bb7eba0e1963f8b768f9c458ecb193e5bf6977090182e2b4f4408f35ac76 \ + --hash=sha256:d524a8c15cfc863705991d70bbec998456a42c405c291d0f84a74ad7f35c5109 \ + --hash=sha256:d53039d39de65360e924b511c7ca1a67b0975c34c015dd468fca492b11caa8f7 \ + --hash=sha256:d6f84a7a175c75beecde53a624881ff618e9433045a69fcfb5e154b73cdaa377 \ + --hash=sha256:e0147d41e9fb5cd174207c4a2895c5e24813204499fd0839951d4c8784a23bf5 \ + --hash=sha256:e3673053b036fd161ae7a5a33358ccae6793ee89fd499000204676baafd7b3aa \ + --hash=sha256:e54578fa8838ddc722539a752adfce9372474114f8c127bb316db5392d942f8b \ + --hash=sha256:eb0142f6f10f57598655340a3b2c70ed4646cbe674191da195eb0985a9813b83 \ + --hash=sha256:efeddf950fb15a832376c0c01d8d7713479fbeceaed1eaecb2665aa62c305aec \ + --hash=sha256:f26629ac531d712f93192c233a74888bc8b8212558bd7d04c349125f10199fcf \ + --hash=sha256:f2e385a7679b9088d7bc43a64811a7713cc7c33d032d020f757c54e7d41931ae \ + --hash=sha256:f3554eaadffe416c6f543af442066afa6549edbc34fe6a7719818c3e72ebfe95 \ + --hash=sha256:f4511560d75b15ecb367eef561554959b9d49b6ec3b8d5634212f9fed74a6df1 \ + --hash=sha256:f504117a39cb98abba4153bf0b46b4954cc5d62f6351a14660201500ba31fe7f \ + --hash=sha256:fb87decf38cc82bcdea1d7511e73629e651bdec3a43ab40985167ab8449b769c # via python-lsp-jsonrpc -urllib3==1.26.13 \ - --hash=sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc \ - --hash=sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8 +urllib3==1.26.16 \ + --hash=sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f \ + --hash=sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14 # via # requests # semgrep @@ -389,7 +601,17 @@ wcmatch==8.4.1 \ --hash=sha256:3476cd107aba7b25ba1d59406938a47dc7eec6cfd0ad09ff77193f21a964dee7 \ --hash=sha256:b1f042a899ea4c458b7321da1b5e3331e3e0ec781583434de1301946ceadb943 # via semgrep -wheel==0.38.4 \ - --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ - --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 +wheel==0.40.0 \ + --hash=sha256:cd1196f3faee2b31968d626e1731c94f99cbdb67cf5a46e4f5656cbee7738873 \ + --hash=sha256:d236b20e7cb522daf2390fa84c55eea81c5c30190f90f29ae2ca1ad8355bf247 + # via pip-tools + +# The following packages are considered to be unsafe in a requirements file: +pip==23.1.2 \ + --hash=sha256:0e7c86f486935893c708287b30bd050a36ac827ec7fe5e43fe7cb198dd835fba \ + --hash=sha256:3ef6ac33239e4027d9a5598a381b9d30880a1477e50039db2eac6e8a8f6d1b18 + # via pip-tools +setuptools==68.0.0 \ + --hash=sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f \ + --hash=sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235 # via pip-tools diff --git a/requirements/dev-bullseye-requirements.txt b/requirements/dev-bullseye-requirements.txt index 9feaaffa2..ef3de0c31 100644 --- a/requirements/dev-bullseye-requirements.txt +++ b/requirements/dev-bullseye-requirements.txt @@ -2,29 +2,39 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-bullseye-requirements.txt requirements/dev-bullseye-requirements.in +# pip-compile --allow-unsafe --config=pyproject.toml --generate-hashes --output-file=requirements/dev-bullseye-requirements.txt requirements/dev-bullseye-requirements.in # -attrs==21.4.0 \ - --hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4 \ - --hash=sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd +attrs==23.1.0 \ + --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ + --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 # via # glom # jsonschema - # pytest + # referencing # semgrep -black==22.12.0 \ - --hash=sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320 \ - --hash=sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351 \ - --hash=sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350 \ - --hash=sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f \ - --hash=sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf \ - --hash=sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148 \ - --hash=sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4 \ - --hash=sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d \ - --hash=sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc \ - --hash=sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d \ - --hash=sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2 \ - --hash=sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f +black==23.7.0 \ + --hash=sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3 \ + --hash=sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb \ + --hash=sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087 \ + --hash=sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320 \ + --hash=sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6 \ + --hash=sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3 \ + --hash=sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc \ + --hash=sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f \ + --hash=sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587 \ + --hash=sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91 \ + --hash=sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a \ + --hash=sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad \ + --hash=sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926 \ + --hash=sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9 \ + --hash=sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be \ + --hash=sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd \ + --hash=sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96 \ + --hash=sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491 \ + --hash=sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2 \ + --hash=sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a \ + --hash=sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f \ + --hash=sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995 # via -r requirements/dev-bullseye-requirements.in boltons==21.0.0 \ --hash=sha256:65e70a79a731a7fe6e98592ecfb5ccf2115873d01dbc576079874629e5c90f13 \ @@ -37,93 +47,176 @@ bracex==2.3.post1 \ --hash=sha256:351b7f20d56fb9ea91f9b9e9e7664db466eb234188c175fd943f8f755c807e73 \ --hash=sha256:e7b23fc8b2cd06d3dec0692baabecb249dda94e06a617901ff03a6c56fd71693 # via wcmatch -build==0.9.0 \ - --hash=sha256:1a07724e891cbd898923145eb7752ee7653674c511378eb9c7691aab1612bc3c \ - --hash=sha256:38a7a2b7a0bdc61a42a0a67509d88c71ecfc37b393baba770fae34e20929ff69 +build==0.10.0 \ + --hash=sha256:af266720050a66c893a6096a2f410989eeac74ff9a68ba194b3f6473e8e26171 \ + --hash=sha256:d5b71264afdb5951d6704482aac78de887c80691c52b88a9ad195983ca2c9269 # via pip-tools -certifi==2022.12.7 \ - --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ - --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 +certifi==2023.5.7 \ + --hash=sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7 \ + --hash=sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716 # via requests -charset-normalizer==2.1.1 \ - --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ - --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f +charset-normalizer==3.2.0 \ + --hash=sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96 \ + --hash=sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c \ + --hash=sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710 \ + --hash=sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706 \ + --hash=sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020 \ + --hash=sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252 \ + --hash=sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad \ + --hash=sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329 \ + --hash=sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a \ + --hash=sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f \ + --hash=sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6 \ + --hash=sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4 \ + --hash=sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a \ + --hash=sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46 \ + --hash=sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2 \ + --hash=sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23 \ + --hash=sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace \ + --hash=sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd \ + --hash=sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982 \ + --hash=sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10 \ + --hash=sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2 \ + --hash=sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea \ + --hash=sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09 \ + --hash=sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5 \ + --hash=sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149 \ + --hash=sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489 \ + --hash=sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9 \ + --hash=sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80 \ + --hash=sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592 \ + --hash=sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3 \ + --hash=sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6 \ + --hash=sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed \ + --hash=sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c \ + --hash=sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200 \ + --hash=sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a \ + --hash=sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e \ + --hash=sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d \ + --hash=sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6 \ + --hash=sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623 \ + --hash=sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669 \ + --hash=sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3 \ + --hash=sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa \ + --hash=sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9 \ + --hash=sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2 \ + --hash=sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f \ + --hash=sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1 \ + --hash=sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4 \ + --hash=sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a \ + --hash=sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8 \ + --hash=sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3 \ + --hash=sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029 \ + --hash=sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f \ + --hash=sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959 \ + --hash=sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22 \ + --hash=sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7 \ + --hash=sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952 \ + --hash=sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346 \ + --hash=sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e \ + --hash=sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d \ + --hash=sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299 \ + --hash=sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd \ + --hash=sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a \ + --hash=sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3 \ + --hash=sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037 \ + --hash=sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94 \ + --hash=sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c \ + --hash=sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858 \ + --hash=sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a \ + --hash=sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449 \ + --hash=sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c \ + --hash=sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918 \ + --hash=sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1 \ + --hash=sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c \ + --hash=sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac \ + --hash=sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa # via requests -click==8.1.3 \ - --hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \ - --hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48 +click==8.1.4 \ + --hash=sha256:2739815aaa5d2c986a88f1e9230c55e17f0caad3d958a5e13ad0797c166db9e3 \ + --hash=sha256:b97d0c74955da062a7d4ef92fadb583806a585b2ea81958a81bd72726cbb8e37 # via # black # click-option-group # pip-tools # semgrep -click-option-group==0.5.5 \ - --hash=sha256:0f8ca79bc9b1d6fcaafdbe194b17ba1a2dde44ddf19087235c3efed2ad288143 \ - --hash=sha256:78ee474f07a0ca0ef6c0317bb3ebe79387aafb0c4a1e03b1d8b2b0be1e42fc78 +click-option-group==0.5.6 \ + --hash=sha256:38a26d963ee3ad93332ddf782f9259c5bdfe405e73408d943ef5e7d0c3767ec7 \ + --hash=sha256:97d06703873518cc5038509443742b25069a3c7562d1ea72ff08bfadde1ce777 # via semgrep colorama==0.4.6 \ --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 # via semgrep -coverage[toml]==6.5.0 \ - --hash=sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79 \ - --hash=sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a \ - --hash=sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f \ - --hash=sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a \ - --hash=sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa \ - --hash=sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398 \ - --hash=sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba \ - --hash=sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d \ - --hash=sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf \ - --hash=sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b \ - --hash=sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518 \ - --hash=sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d \ - --hash=sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795 \ - --hash=sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2 \ - --hash=sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e \ - --hash=sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32 \ - --hash=sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745 \ - --hash=sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b \ - --hash=sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e \ - --hash=sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d \ - --hash=sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f \ - --hash=sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660 \ - --hash=sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62 \ - --hash=sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6 \ - --hash=sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04 \ - --hash=sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c \ - --hash=sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5 \ - --hash=sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef \ - --hash=sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc \ - --hash=sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae \ - --hash=sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578 \ - --hash=sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466 \ - --hash=sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4 \ - --hash=sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91 \ - --hash=sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0 \ - --hash=sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4 \ - --hash=sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b \ - --hash=sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe \ - --hash=sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b \ - --hash=sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75 \ - --hash=sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b \ - --hash=sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c \ - --hash=sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72 \ - --hash=sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b \ - --hash=sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f \ - --hash=sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e \ - --hash=sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53 \ - --hash=sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3 \ - --hash=sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84 \ - --hash=sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987 +coverage[toml]==7.2.7 \ + --hash=sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f \ + --hash=sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2 \ + --hash=sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a \ + --hash=sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a \ + --hash=sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01 \ + --hash=sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6 \ + --hash=sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7 \ + --hash=sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f \ + --hash=sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02 \ + --hash=sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c \ + --hash=sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063 \ + --hash=sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a \ + --hash=sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5 \ + --hash=sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959 \ + --hash=sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97 \ + --hash=sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6 \ + --hash=sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f \ + --hash=sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9 \ + --hash=sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5 \ + --hash=sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f \ + --hash=sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562 \ + --hash=sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe \ + --hash=sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9 \ + --hash=sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f \ + --hash=sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb \ + --hash=sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb \ + --hash=sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1 \ + --hash=sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb \ + --hash=sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250 \ + --hash=sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e \ + --hash=sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511 \ + --hash=sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5 \ + --hash=sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59 \ + --hash=sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2 \ + --hash=sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d \ + --hash=sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3 \ + --hash=sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4 \ + --hash=sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de \ + --hash=sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9 \ + --hash=sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833 \ + --hash=sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0 \ + --hash=sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9 \ + --hash=sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d \ + --hash=sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050 \ + --hash=sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d \ + --hash=sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6 \ + --hash=sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353 \ + --hash=sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb \ + --hash=sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e \ + --hash=sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8 \ + --hash=sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495 \ + --hash=sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2 \ + --hash=sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd \ + --hash=sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27 \ + --hash=sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1 \ + --hash=sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818 \ + --hash=sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4 \ + --hash=sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e \ + --hash=sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850 \ + --hash=sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3 # via pytest-cov defusedxml==0.7.1 \ --hash=sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69 \ --hash=sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61 # via semgrep -exceptiongroup==1.0.4 \ - --hash=sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828 \ - --hash=sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec +exceptiongroup==1.1.2 \ + --hash=sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5 \ + --hash=sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f # via pytest face==22.0.0 \ --hash=sha256:344fe31562d0f6f444a45982418f3793d4b14f9abb98ccca1509d22e0a3e7e35 \ @@ -141,85 +234,90 @@ idna==3.4 \ --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -iniconfig==1.1.1 \ - --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \ - --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32 +iniconfig==2.0.0 \ + --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ + --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 # via pytest -jsonschema==4.17.3 \ - --hash=sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d \ - --hash=sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6 +jsonschema==4.18.0 \ + --hash=sha256:8caf5b57a990a98e9b39832ef3cb35c176fe331414252b6e1b26fd5866f891a4 \ + --hash=sha256:b508dd6142bd03f4c3670534c80af68cd7bbff9ea830b9cf2625d4a3c49ddf60 # via semgrep +jsonschema-specifications==2023.6.1 \ + --hash=sha256:3d2b82663aff01815f744bb5c7887e2121a63399b49b104a3c96145474d091d7 \ + --hash=sha256:ca1c4dd059a9e7b34101cf5b3ab7ff1d18b139f35950d598d629837ef66e8f28 + # via jsonschema +markdown-it-py==3.0.0 \ + --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ + --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb + # via rich mccabe==0.7.0 \ --hash=sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325 \ --hash=sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e # via flake8 -mypy==1.0.0 \ - --hash=sha256:01b1b9e1ed40544ef486fa8ac022232ccc57109f379611633ede8e71630d07d2 \ - --hash=sha256:0ab090d9240d6b4e99e1fa998c2d0aa5b29fc0fb06bd30e7ad6183c95fa07593 \ - --hash=sha256:14d776869a3e6c89c17eb943100f7868f677703c8a4e00b3803918f86aafbc52 \ - --hash=sha256:1ace23f6bb4aec4604b86c4843276e8fa548d667dbbd0cb83a3ae14b18b2db6c \ - --hash=sha256:2efa963bdddb27cb4a0d42545cd137a8d2b883bd181bbc4525b568ef6eca258f \ - --hash=sha256:2f6ac8c87e046dc18c7d1d7f6653a66787a4555085b056fe2d599f1f1a2a2d21 \ - --hash=sha256:3ae4c7a99e5153496243146a3baf33b9beff714464ca386b5f62daad601d87af \ - --hash=sha256:3cfad08f16a9c6611e6143485a93de0e1e13f48cfb90bcad7d5fde1c0cec3d36 \ - --hash=sha256:4e5175026618c178dfba6188228b845b64131034ab3ba52acaffa8f6c361f805 \ - --hash=sha256:50979d5efff8d4135d9db293c6cb2c42260e70fb010cbc697b1311a4d7a39ddb \ - --hash=sha256:5cd187d92b6939617f1168a4fe68f68add749902c010e66fe574c165c742ed88 \ - --hash=sha256:5cfca124f0ac6707747544c127880893ad72a656e136adc935c8600740b21ff5 \ - --hash=sha256:5e398652d005a198a7f3c132426b33c6b85d98aa7dc852137a2a3be8890c4072 \ - --hash=sha256:67cced7f15654710386e5c10b96608f1ee3d5c94ca1da5a2aad5889793a824c1 \ - --hash=sha256:7306edca1c6f1b5fa0bc9aa645e6ac8393014fa82d0fa180d0ebc990ebe15964 \ - --hash=sha256:7cc2c01dfc5a3cbddfa6c13f530ef3b95292f926329929001d45e124342cd6b7 \ - --hash=sha256:87edfaf344c9401942883fad030909116aa77b0fa7e6e8e1c5407e14549afe9a \ - --hash=sha256:8845125d0b7c57838a10fd8925b0f5f709d0e08568ce587cc862aacce453e3dd \ - --hash=sha256:92024447a339400ea00ac228369cd242e988dd775640755fa4ac0c126e49bb74 \ - --hash=sha256:a86b794e8a56ada65c573183756eac8ac5b8d3d59daf9d5ebd72ecdbb7867a43 \ - --hash=sha256:bb2782a036d9eb6b5a6efcdda0986774bf798beef86a62da86cb73e2a10b423d \ - --hash=sha256:be78077064d016bc1b639c2cbcc5be945b47b4261a4f4b7d8923f6c69c5c9457 \ - --hash=sha256:c7cf862aef988b5fbaa17764ad1d21b4831436701c7d2b653156a9497d92c83c \ - --hash=sha256:e0626db16705ab9f7fa6c249c017c887baf20738ce7f9129da162bb3075fc1af \ - --hash=sha256:f34495079c8d9da05b183f9f7daec2878280c2ad7cc81da686ef0b484cea2ecf \ - --hash=sha256:fe523fcbd52c05040c7bee370d66fee8373c5972171e4fbc323153433198592d +mdurl==0.1.2 \ + --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ + --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba + # via markdown-it-py +mypy==1.4.1 \ + --hash=sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042 \ + --hash=sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd \ + --hash=sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2 \ + --hash=sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01 \ + --hash=sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7 \ + --hash=sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3 \ + --hash=sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816 \ + --hash=sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3 \ + --hash=sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc \ + --hash=sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4 \ + --hash=sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b \ + --hash=sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8 \ + --hash=sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c \ + --hash=sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462 \ + --hash=sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7 \ + --hash=sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc \ + --hash=sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258 \ + --hash=sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b \ + --hash=sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9 \ + --hash=sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6 \ + --hash=sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f \ + --hash=sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1 \ + --hash=sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828 \ + --hash=sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878 \ + --hash=sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f \ + --hash=sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b # via -r requirements/dev-bullseye-requirements.in -mypy-extensions==0.4.3 \ - --hash=sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d \ - --hash=sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8 +mypy-extensions==1.0.0 \ + --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ + --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 # via # black # mypy -packaging==21.3 \ - --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ - --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 +packaging==23.1 \ + --hash=sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61 \ + --hash=sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f # via + # black # build # pytest # semgrep -pathspec==0.10.3 \ - --hash=sha256:3c95343af8b756205e2aba76e843ba9520a24dd84f68c22b9f93251507509dd6 \ - --hash=sha256:56200de4077d9d0791465aa9095a01d421861e405b5096955051deefd697d6f6 +pathspec==0.11.1 \ + --hash=sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687 \ + --hash=sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293 # via black -peewee==3.15.4 \ - --hash=sha256:2581520c8dfbacd9d580c2719ae259f0637a9e46eda47dfc0ce01864c6366205 +peewee==3.16.2 \ + --hash=sha256:10769981198c7311f84a0ca8db892fa213303a8eb1305deb795a71e7bd606a91 # via semgrep -pep517==0.13.0 \ - --hash=sha256:4ba4446d80aed5b5eac6509ade100bff3e7943a8489de249654a5ae9b33ee35b \ - --hash=sha256:ae69927c5c172be1add9203726d4b84cf3ebad1edcd5f71fcdc746e66e829f59 - # via build -pip==22.3.1 \ - --hash=sha256:65fd48317359f3af8e593943e6ae1506b66325085ea64b706a998c6e83eeaf38 \ - --hash=sha256:908c78e6bc29b676ede1c4d57981d490cb892eb45cd8c214ab6298125119e077 - # via pip-tools -pip-tools==6.12.0 \ - --hash=sha256:8e22fbc84ede7ca522ba4b033c4fcf6a6419adabc75d24747be3d8262504489a \ - --hash=sha256:f441603c63b16f4af0dd5026f7522a49eddec2bc8a4a4979af44e1f6b0a1c13e +pip-tools==6.14.0 \ + --hash=sha256:06366be0e08d86b416407333e998b4d305d5bd925151b08942ed149380ba3e47 \ + --hash=sha256:c5ad042cd27c0b343b10db1db7f77a7d087beafbec59ae6df1bba4d3368dfe8c # via -r requirements/dev-bullseye-requirements.in -platformdirs==2.6.0 \ - --hash=sha256:1a89a12377800c81983db6be069ec068eee989748799b946cce2a6e80dcc54ca \ - --hash=sha256:b46ffafa316e6b83b47489d240ce17173f123a9b9c83282141c3daf26ad9ac2e +platformdirs==3.8.1 \ + --hash=sha256:cec7b889196b9144d088e4c57d9ceef7374f6c39694ad1577a0aab50d27ea28c \ + --hash=sha256:f87ca4fcff7d2b0f81c6a748a77973d7af0f4d526f98f308477c3c436c74d528 # via black -pluggy==1.0.0 \ - --hash=sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159 \ - --hash=sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3 +pluggy==1.2.0 \ + --hash=sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849 \ + --hash=sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3 # via pytest pycodestyle==2.10.0 \ --hash=sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053 \ @@ -229,65 +327,157 @@ pyflakes==3.0.1 \ --hash=sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf \ --hash=sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd # via flake8 -pyparsing==3.0.9 \ - --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ - --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc - # via packaging -pyrsistent==0.19.2 \ - --hash=sha256:055ab45d5911d7cae397dc418808d8802fb95262751872c841c170b0dbf51eed \ - --hash=sha256:111156137b2e71f3a9936baf27cb322e8024dac3dc54ec7fb9f0bcf3249e68bb \ - --hash=sha256:187d5730b0507d9285a96fca9716310d572e5464cadd19f22b63a6976254d77a \ - --hash=sha256:21455e2b16000440e896ab99e8304617151981ed40c29e9507ef1c2e4314ee95 \ - --hash=sha256:2aede922a488861de0ad00c7630a6e2d57e8023e4be72d9d7147a9fcd2d30712 \ - --hash=sha256:3ba4134a3ff0fc7ad225b6b457d1309f4698108fb6b35532d015dca8f5abed73 \ - --hash=sha256:456cb30ca8bff00596519f2c53e42c245c09e1a4543945703acd4312949bfd41 \ - --hash=sha256:71d332b0320642b3261e9fee47ab9e65872c2bd90260e5d225dabeed93cbd42b \ - --hash=sha256:879b4c2f4d41585c42df4d7654ddffff1239dc4065bc88b745f0341828b83e78 \ - --hash=sha256:9cd3e9978d12b5d99cbdc727a3022da0430ad007dacf33d0bf554b96427f33ab \ - --hash=sha256:a178209e2df710e3f142cbd05313ba0c5ebed0a55d78d9945ac7a4e09d923308 \ - --hash=sha256:b39725209e06759217d1ac5fcdb510e98670af9e37223985f330b611f62e7425 \ - --hash=sha256:bfa0351be89c9fcbcb8c9879b826f4353be10f58f8a677efab0c017bf7137ec2 \ - --hash=sha256:bfd880614c6237243ff53a0539f1cb26987a6dc8ac6e66e0c5a40617296a045e \ - --hash=sha256:c43bec251bbd10e3cb58ced80609c5c1eb238da9ca78b964aea410fb820d00d6 \ - --hash=sha256:d690b18ac4b3e3cab73b0b7aa7dbe65978a172ff94970ff98d82f2031f8971c2 \ - --hash=sha256:d6982b5a0237e1b7d876b60265564648a69b14017f3b5f908c5be2de3f9abb7a \ - --hash=sha256:dec3eac7549869365fe263831f576c8457f6c833937c68542d08fde73457d291 \ - --hash=sha256:e371b844cec09d8dc424d940e54bba8f67a03ebea20ff7b7b0d56f526c71d584 \ - --hash=sha256:e5d8f84d81e3729c3b506657dddfe46e8ba9c330bf1858ee33108f8bb2adb38a \ - --hash=sha256:ea6b79a02a28550c98b6ca9c35b9f492beaa54d7c5c9e9949555893c8a9234d0 \ - --hash=sha256:f1258f4e6c42ad0b20f9cfcc3ada5bd6b83374516cd01c0960e3cb75fdca6770 - # via jsonschema -pytest==7.2.0 \ - --hash=sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71 \ - --hash=sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59 +pygments==2.15.1 \ + --hash=sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c \ + --hash=sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1 + # via rich +pyproject-hooks==1.0.0 \ + --hash=sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8 \ + --hash=sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5 + # via build +pytest==7.4.0 \ + --hash=sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32 \ + --hash=sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a # via # -r requirements/dev-bullseye-requirements.in # pytest-cov # pytest-mock -pytest-cov==4.0.0 \ - --hash=sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b \ - --hash=sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470 +pytest-cov==4.1.0 \ + --hash=sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6 \ + --hash=sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a # via -r requirements/dev-bullseye-requirements.in -pytest-mock==3.10.0 \ - --hash=sha256:f4c973eeae0282963eb293eb173ce91b091a79c1334455acfac9ddee8a1c784b \ - --hash=sha256:fbbdb085ef7c252a326fd8cdcac0aa3b1333d8811f131bdcc701002e1be7ed4f +pytest-mock==3.11.1 \ + --hash=sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39 \ + --hash=sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f # via -r requirements/dev-bullseye-requirements.in python-lsp-jsonrpc==1.0.0 \ --hash=sha256:079b143be64b0a378bdb21dff5e28a8c1393fe7e8a654ef068322d754e545fc7 \ --hash=sha256:7bec170733db628d3506ea3a5288ff76aa33c70215ed223abdb0d95e957660bd # via semgrep -requests==2.28.1 \ - --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ - --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 +referencing==0.29.1 \ + --hash=sha256:90cb53782d550ba28d2166ef3f55731f38397def8832baac5d45235f1995e35e \ + --hash=sha256:d3c8f323ee1480095da44d55917cfb8278d73d6b4d5f677e3e40eb21314ac67f + # via + # jsonschema + # jsonschema-specifications +requests==2.31.0 \ + --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ + --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 # via semgrep -ruamel-yaml==0.17.21 \ - --hash=sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7 \ - --hash=sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af +rich==13.4.2 \ + --hash=sha256:8f87bc7ee54675732fa66a05ebfe489e27264caeeff3728c945d25971b6485ec \ + --hash=sha256:d653d6bccede5844304c605d5aac802c7cf9621efd700b46c7ec2b51ea914898 + # via semgrep +rpds-py==0.8.10 \ + --hash=sha256:08166467258fd0240a1256fce272f689f2360227ee41c72aeea103e9e4f63d2b \ + --hash=sha256:083df0fafe199371206111583c686c985dddaf95ab3ee8e7b24f1fda54515d09 \ + --hash=sha256:0da53292edafecba5e1d8c1218f99babf2ed0bf1c791d83c0ab5c29b57223068 \ + --hash=sha256:0eeb2731708207d0fe2619afe6c4dc8cb9798f7de052da891de5f19c0006c315 \ + --hash=sha256:134ec8f14ca7dbc6d9ae34dac632cdd60939fe3734b5d287a69683c037c51acb \ + --hash=sha256:13e643ce8ad502a0263397362fb887594b49cf84bf518d6038c16f235f2bcea4 \ + --hash=sha256:148b0b38d719c0760e31ce9285a9872972bdd7774969a4154f40c980e5beaca7 \ + --hash=sha256:14f1c356712f66653b777ecd8819804781b23dbbac4eade4366b94944c9e78ad \ + --hash=sha256:15a90d0ac11b4499171067ae40a220d1ca3cb685ec0acc356d8f3800e07e4cb8 \ + --hash=sha256:1a2edf8173ac0c7a19da21bc68818be1321998528b5e3f748d6ee90c0ba2a1fd \ + --hash=sha256:1b21575031478609db6dbd1f0465e739fe0e7f424a8e7e87610a6c7f68b4eb16 \ + --hash=sha256:1ee45cd1d84beed6cbebc839fd85c2e70a3a1325c8cfd16b62c96e2ffb565eca \ + --hash=sha256:220bdcad2d2936f674650d304e20ac480a3ce88a40fe56cd084b5780f1d104d9 \ + --hash=sha256:2418cf17d653d24ffb8b75e81f9f60b7ba1b009a23298a433a4720b2a0a17017 \ + --hash=sha256:2614c2732bf45de5c7f9e9e54e18bc78693fa2f635ae58d2895b7965e470378c \ + --hash=sha256:2cd3045e7f6375dda64ed7db1c5136826facb0159ea982f77d9cf6125025bd34 \ + --hash=sha256:2eb4b08c45f8f8d8254cdbfacd3fc5d6b415d64487fb30d7380b0d0569837bf1 \ + --hash=sha256:300eb606e6b94a7a26f11c8cc8ee59e295c6649bd927f91e1dbd37a4c89430b6 \ + --hash=sha256:376b8de737401050bd12810003d207e824380be58810c031f10ec563ff6aef3d \ + --hash=sha256:3793c21494bad1373da517001d0849eea322e9a049a0e4789e50d8d1329df8e7 \ + --hash=sha256:37f7ee4dc86db7af3bac6d2a2cedbecb8e57ce4ed081f6464510e537589f8b1e \ + --hash=sha256:3816a890a6a9e9f1de250afa12ca71c9a7a62f2b715a29af6aaee3aea112c181 \ + --hash=sha256:3c490204e16bca4f835dba8467869fe7295cdeaa096e4c5a7af97f3454a97991 \ + --hash=sha256:3cc5e5b5514796f45f03a568981971b12a3570f3de2e76114f7dc18d4b60a3c4 \ + --hash=sha256:41c89a366eae49ad9e65ed443a8f94aee762931a1e3723749d72aeac80f5ef2f \ + --hash=sha256:4a8ca409f1252e1220bf09c57290b76cae2f14723746215a1e0506472ebd7bdf \ + --hash=sha256:4b519bac7c09444dd85280fd60f28c6dde4389c88dddf4279ba9b630aca3bbbe \ + --hash=sha256:521fc8861a86ae54359edf53a15a05fabc10593cea7b3357574132f8427a5e5a \ + --hash=sha256:574868858a7ff6011192c023a5289158ed20e3f3b94b54f97210a773f2f22921 \ + --hash=sha256:5a665f6f1a87614d1c3039baf44109094926dedf785e346d8b0a728e9cabd27a \ + --hash=sha256:5d1c2bc319428d50b3e0fa6b673ab8cc7fa2755a92898db3a594cbc4eeb6d1f7 \ + --hash=sha256:60e0e86e870350e03b3e25f9b1dd2c6cc72d2b5f24e070249418320a6f9097b7 \ + --hash=sha256:695f642a3a5dbd4ad2ffbbacf784716ecd87f1b7a460843b9ddf965ccaeafff4 \ + --hash=sha256:69d089c026f6a8b9d64a06ff67dc3be196707b699d7f6ca930c25f00cf5e30d8 \ + --hash=sha256:6c6a0225b8501d881b32ebf3f5807a08ad3685b5eb5f0a6bfffd3a6e039b2055 \ + --hash=sha256:70bb9c8004b97b4ef7ae56a2aa56dfaa74734a0987c78e7e85f00004ab9bf2d0 \ + --hash=sha256:73a1e48430f418f0ac3dfd87860e4cc0d33ad6c0f589099a298cb53724db1169 \ + --hash=sha256:7495010b658ec5b52835f21d8c8b1a7e52e194c50f095d4223c0b96c3da704b1 \ + --hash=sha256:7947e6e2c2ad68b1c12ee797d15e5f8d0db36331200b0346871492784083b0c6 \ + --hash=sha256:7b38a9ac96eeb6613e7f312cd0014de64c3f07000e8bf0004ad6ec153bac46f8 \ + --hash=sha256:7d20a8ed227683401cc508e7be58cba90cc97f784ea8b039c8cd01111e6043e0 \ + --hash=sha256:7f29b8c55fd3a2bc48e485e37c4e2df3317f43b5cc6c4b6631c33726f52ffbb3 \ + --hash=sha256:802f42200d8caf7f25bbb2a6464cbd83e69d600151b7e3b49f49a47fa56b0a38 \ + --hash=sha256:805a5f3f05d186c5d50de2e26f765ba7896d0cc1ac5b14ffc36fae36df5d2f10 \ + --hash=sha256:82bb361cae4d0a627006dadd69dc2f36b7ad5dc1367af9d02e296ec565248b5b \ + --hash=sha256:84eb541a44f7a18f07a6bfc48b95240739e93defe1fdfb4f2a295f37837945d7 \ + --hash=sha256:89c92b74e8bf6f53a6f4995fd52f4bd510c12f103ee62c99e22bc9e05d45583c \ + --hash=sha256:8c398fda6df361a30935ab4c4bccb7f7a3daef2964ca237f607c90e9f3fdf66f \ + --hash=sha256:915031002c86a5add7c6fd4beb601b2415e8a1c956590a5f91d825858e92fe6e \ + --hash=sha256:927d784648211447201d4c6f1babddb7971abad922b32257ab74de2f2750fad0 \ + --hash=sha256:92cf5b3ee60eef41f41e1a2cabca466846fb22f37fc580ffbcb934d1bcab225a \ + --hash=sha256:93d06cccae15b3836247319eee7b6f1fdcd6c10dabb4e6d350d27bd0bdca2711 \ + --hash=sha256:93d99f957a300d7a4ced41615c45aeb0343bb8f067c42b770b505de67a132346 \ + --hash=sha256:96b293c0498c70162effb13100624c5863797d99df75f2f647438bd10cbf73e4 \ + --hash=sha256:97cab733d303252f7c2f7052bf021a3469d764fc2b65e6dbef5af3cbf89d4892 \ + --hash=sha256:996cc95830de9bc22b183661d95559ec6b3cd900ad7bc9154c4cbf5be0c9b734 \ + --hash=sha256:9a7d20c1cf8d7b3960c5072c265ec47b3f72a0c608a9a6ee0103189b4f28d531 \ + --hash=sha256:9cd57981d9fab04fc74438d82460f057a2419974d69a96b06a440822d693b3c0 \ + --hash=sha256:a11ab0d97be374efd04f640c04fe5c2d3dabc6dfb998954ea946ee3aec97056d \ + --hash=sha256:a13c8e56c46474cd5958d525ce6a9996727a83d9335684e41f5192c83deb6c58 \ + --hash=sha256:a38b9f526d0d6cbdaa37808c400e3d9f9473ac4ff64d33d9163fd05d243dbd9b \ + --hash=sha256:a7c6304b894546b5a6bdc0fe15761fa53fe87d28527a7142dae8de3c663853e1 \ + --hash=sha256:ad3bfb44c8840fb4be719dc58e229f435e227fbfbe133dc33f34981ff622a8f8 \ + --hash=sha256:ae40f4a70a1f40939d66ecbaf8e7edc144fded190c4a45898a8cfe19d8fc85ea \ + --hash=sha256:b01b39ad5411563031ea3977bbbc7324d82b088e802339e6296f082f78f6115c \ + --hash=sha256:b2e3c4f2a8e3da47f850d7ea0d7d56720f0f091d66add889056098c4b2fd576c \ + --hash=sha256:b41941583adce4242af003d2a8337b066ba6148ca435f295f31ac6d9e4ea2722 \ + --hash=sha256:b4627520a02fccbd324b33c7a83e5d7906ec746e1083a9ac93c41ac7d15548c7 \ + --hash=sha256:ba9f1d1ebe4b63801977cec7401f2d41e888128ae40b5441270d43140efcad52 \ + --hash=sha256:c03a435d26c3999c2a8642cecad5d1c4d10c961817536af52035f6f4ee2f5dd0 \ + --hash=sha256:c200b30dd573afa83847bed7e3041aa36a8145221bf0cfdfaa62d974d720805c \ + --hash=sha256:c493365d3fad241d52f096e4995475a60a80f4eba4d3ff89b713bc65c2ca9615 \ + --hash=sha256:c4d42e83ddbf3445e6514f0aff96dca511421ed0392d9977d3990d9f1ba6753c \ + --hash=sha256:c60528671d9d467009a6ec284582179f6b88651e83367d0ab54cb739021cd7de \ + --hash=sha256:c72ebc22e70e04126158c46ba56b85372bc4d54d00d296be060b0db1671638a4 \ + --hash=sha256:ccbbd276642788c4376fbe8d4e6c50f0fb4972ce09ecb051509062915891cbf0 \ + --hash=sha256:ceaac0c603bf5ac2f505a78b2dcab78d3e6b706be6596c8364b64cc613d208d2 \ + --hash=sha256:d19db6ba816e7f59fc806c690918da80a7d186f00247048cd833acdab9b4847b \ + --hash=sha256:d5c191713e98e7c28800233f039a32a42c1a4f9a001a8a0f2448b07391881036 \ + --hash=sha256:d64f9f88d5203274a002b54442cafc9c7a1abff2a238f3e767b70aadf919b451 \ + --hash=sha256:d77dff3a5aa5eedcc3da0ebd10ff8e4969bc9541aa3333a8d41715b429e99f47 \ + --hash=sha256:dd4f16e57c12c0ae17606c53d1b57d8d1c8792efe3f065a37cb3341340599d49 \ + --hash=sha256:e39d7ab0c18ac99955b36cd19f43926450baba21e3250f053e0704d6ffd76873 \ + --hash=sha256:e3d0cd3dff0e7638a7b5390f3a53057c4e347f4ef122ee84ed93fc2fb7ea4aa2 \ + --hash=sha256:e7dfb1cbb895810fa2b892b68153c17716c6abaa22c7dc2b2f6dcf3364932a1c \ + --hash=sha256:e8e24b210a4deb5a7744971f8f77393005bae7f873568e37dfd9effe808be7f7 \ + --hash=sha256:e9c0683cb35a9b5881b41bc01d5568ffc667910d9dbc632a1fba4e7d59e98773 \ + --hash=sha256:ed41f3f49507936a6fe7003985ea2574daccfef999775525d79eb67344e23767 \ + --hash=sha256:ee744fca8d1ea822480a2a4e7c5f2e1950745477143668f0b523769426060f29 \ + --hash=sha256:f3f1e860be21f3e83011116a65e7310486300e08d9a3028e73e8d13bb6c77292 \ + --hash=sha256:f43ab4cb04bde6109eb2555528a64dfd8a265cc6a9920a67dcbde13ef53a46c8 \ + --hash=sha256:f53f55a8852f0e49b0fc76f2412045d6ad9d5772251dea8f55ea45021616e7d5 \ + --hash=sha256:f59996d0550894affaad8743e97b9b9c98f638b221fac12909210ec3d9294786 \ + --hash=sha256:f96f3f98fbff7af29e9edf9a6584f3c1382e7788783d07ba3721790625caa43e \ + --hash=sha256:f9adb5664b78fcfcd830000416c8cc69853ef43cb084d645b3f1f0296edd9bae \ + --hash=sha256:fa326b3505d5784436d9433b7980171ab2375535d93dd63fbcd20af2b5ca1bb6 \ + --hash=sha256:fafc0049add8043ad07ab5382ee80d80ed7e3699847f26c9a5cf4d3714d96a84 + # via + # jsonschema + # referencing +ruamel-yaml==0.17.32 \ + --hash=sha256:23cd2ed620231677564646b0c6a89d138b6822a0d78656df7abda5879ec4f447 \ + --hash=sha256:ec939063761914e14542972a5cba6d33c23b0859ab6342f61cf070cfc600efc2 # via semgrep ruamel-yaml-clib==0.2.7 \ --hash=sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e \ --hash=sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3 \ --hash=sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5 \ + --hash=sha256:1a6391a7cabb7641c32517539ca42cf84b87b667bad38b78d4d42dd23e957c81 \ --hash=sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497 \ --hash=sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f \ --hash=sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac \ @@ -298,13 +488,13 @@ ruamel-yaml-clib==0.2.7 \ --hash=sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1 \ --hash=sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072 \ --hash=sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9 \ - --hash=sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5 \ --hash=sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231 \ --hash=sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93 \ --hash=sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b \ --hash=sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb \ --hash=sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f \ --hash=sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307 \ + --hash=sha256:9c7617df90c1365638916b98cdd9be833d31d337dbcd722485597b43c4a215bf \ --hash=sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8 \ --hash=sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b \ --hash=sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b \ @@ -313,23 +503,21 @@ ruamel-yaml-clib==0.2.7 \ --hash=sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a \ --hash=sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71 \ --hash=sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8 \ + --hash=sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122 \ --hash=sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7 \ --hash=sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80 \ --hash=sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e \ --hash=sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab \ --hash=sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0 \ - --hash=sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646 + --hash=sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646 \ + --hash=sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38 # via ruamel-yaml -semgrep==1.2.0 \ - --hash=sha256:31f5f764ff114e2e56b3a93b09829f738cb9e287af7479e2c4714c77b10dc5c0 \ - --hash=sha256:cba38f882c9fedd00462247474a991715d5c8faf169e38cfbf299c7c89ccad55 \ - --hash=sha256:d7b9ccffab1cbecb7870e6792dc274f6a63133910150f33b6ba07d28f5cf00d5 \ - --hash=sha256:e04dbc4a95ddfc9b07550b09b88f61c5c7d81817fac1c86683d8c2534514ac6c +semgrep==1.31.2 \ + --hash=sha256:0fc463f8afcc649efaf61c00f17f7c124498c2e95cca9d805fd68d203362cdeb \ + --hash=sha256:30d0662a6ac8d7258af3b383cca1c93da646fc99b60e3247f6acf3dcf764e815 \ + --hash=sha256:c26ce223c60688e317299f97cac9889b3e879dc4ee28097555cad6215086dcf4 \ + --hash=sha256:cd707b74cd76ef5dff974df3fe653967faf1bd0248019f7b6777170cefa4fca5 # via -r requirements/dev-bullseye-requirements.in -setuptools==65.6.3 \ - --hash=sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54 \ - --hash=sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75 - # via pip-tools tomli==2.0.1 \ --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f @@ -338,98 +526,87 @@ tomli==2.0.1 \ # build # coverage # mypy - # pep517 + # pip-tools + # pyproject-hooks # pytest # semgrep -tqdm==4.64.1 \ - --hash=sha256:5f4f682a004951c1b450bc753c710e9280c5746ce6ffedee253ddbcbf54cf1e4 \ - --hash=sha256:6fee160d6ffcd1b1c68c65f14c829c22832bc401726335ce92c52d395944a6a1 - # via semgrep -types-docutils==0.19.1.4 \ - --hash=sha256:1b64b21b609ff1fc7791d3d930f14b56b36ad09029fd97e45e34cc889d671b5f \ - --hash=sha256:870d71f3663141f67a3c59d26d2c54a8c478c842208bb0b345fbf6036f49f561 - # via types-setuptools -types-setuptools==67.3.0.1 \ - --hash=sha256:1a26d373036c720e566823b6edd664a2db4d138b6eeba856721ec1254203474f \ - --hash=sha256:a7e0f0816b5b449f5bcdc0efa43da91ff81dbe6941f293a6490d68a450e130a1 +types-setuptools==68.0.0.1 \ + --hash=sha256:a0454ea7ad0711f63a602caa87929003a83cab89224ae1506ed44bb5be8fe7d7 \ + --hash=sha256:cc5acbc464b106104899e9b9eb4955dd47e854753c8d4ee2ce697eaf0f4d74e1 # via -r requirements/dev-bullseye-requirements.in -typing-extensions==4.4.0 \ - --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ - --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e +typing-extensions==4.7.1 \ + --hash=sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36 \ + --hash=sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2 # via # black # mypy # semgrep -ujson==5.6.0 \ - --hash=sha256:0f0f21157d1a84ad5fb54388f31767cde9c1a48fb29de7ef91d8887fdc2ca92b \ - --hash=sha256:1217326ba80eab1ff3f644f9eee065bd4fcc4e0c068a2f86f851cafd05737169 \ - --hash=sha256:169b3fbd1188647c6ce00cb690915526aff86997c89a94c1b50432010ad7ae0f \ - --hash=sha256:1a7e4023c79d9a053c0c6b7c6ec50ea0af78381539ab27412e6af8d9410ae555 \ - --hash=sha256:20d929a27822cb79e034cc5e0bb62daa0257ab197247cb6f35d5149f2f438983 \ - --hash=sha256:213e41dc501b4a6d029873039da3e45ba7766b9f9eba97ecc4287c371f5403cc \ - --hash=sha256:23051f062bb257a87f3e55ea5a055ea98d56f08185fd415b34313268fa4d814e \ - --hash=sha256:24d40e01accbf4f0ba5181c4db1bac83749fdc1a5413466da582529f2a096085 \ - --hash=sha256:2a24b9a96364f943a4754fa00b47855d0a01b84ac4b8b11ebf058c8fb68c1f77 \ - --hash=sha256:2cb7a4bd91de97b4c8e57fb5289d1e5f3f019723b59d01d79e2df83783dce5a6 \ - --hash=sha256:31288f85db6295ec63e128daff7285bb0bc220935e1b5107bd2d67e2dc687b7e \ - --hash=sha256:35423460954d0c61602da734697724e8dd5326a8aa7900123e584b935116203e \ - --hash=sha256:355ef5311854936b9edc7f1ce638f8257cb45fb6b9873f6b2d16a715eafc9570 \ - --hash=sha256:3a68a204386648ec92ae9b526c1ffca528f38221eca70f98b4709390c3204275 \ - --hash=sha256:3ad74eb53ee07e76c82f9ef8e7256c33873b81bd1f97a274fdb65ed87c2801f6 \ - --hash=sha256:3b49a1014d396b962cb1d6c5f867f88b2c9aa9224c3860ee6ff63b2837a2965b \ - --hash=sha256:3f00dff3bf26bbb96791ceaf51ca95a3f34e2a21985748da855a650c38633b99 \ - --hash=sha256:3f8b9e8c0420ce3dcc193ab6dd5628840ba79ad1b76e1816ac7ca6752c6bf035 \ - --hash=sha256:52f536712d16a1f4e0f9d084982c28e11b7e70c397a1059069e4d28d53b3f522 \ - --hash=sha256:551408a5c4306839b4a4f91503c96069204dbef2c7ed91a9dab08874ac1ed679 \ - --hash=sha256:57904e5b49ffe93189349229dcd83f73862ef9bb8517e8f1e62d0ff73f313847 \ - --hash=sha256:5e5715b0e2767b1987ceed0066980fc0a53421dd2f197b4f88460d474d6aef4c \ - --hash=sha256:61fdf24f7bddc402ce06b25e4bed7bf5ee4f03e23028a0a09116835c21d54888 \ - --hash=sha256:6d0a60c5f065737a81249c819475d001a86da9a41900d888287e34619c9b4851 \ - --hash=sha256:6ea9024749a41864bffb12da15aace4a3193c03ea97e77b069557aefa342811f \ - --hash=sha256:7174e81c137d480abe2f8036e9fb69157e509f2db0bfdee4488eb61dc3f0ff6b \ - --hash=sha256:72fa6e850831280a46704032721c75155fd41b839ddadabb6068ab218c56a37a \ - --hash=sha256:74671d1bde8c03daeb92abdbd972960978347b1a1d432c4c1b3c9284ce4094cf \ - --hash=sha256:798116b88158f13ed687417526100ef353ba4692e0aef8afbc622bd4bf7e9057 \ - --hash=sha256:7a66c5a75b46545361271b4cf55560d9ad8bad794dd054a14b3fbb031407948e \ - --hash=sha256:7bde16cb18b95a8f68cc48715e4652b394b4fee68cb3f9fee0fd7d26b29a53b6 \ - --hash=sha256:82bf24ea72a73c7d77402a7adc954931243e7ec4241d5738ae74894b53944458 \ - --hash=sha256:87578ccfc35461c77e73660fb7d89bc577732f671364f442bda9e2c58b571765 \ - --hash=sha256:91000612a2c30f50c6a009e6459a677e5c1972e51b59ecefd6063543dc47a4e9 \ - --hash=sha256:9cf04fcc958bb52a6b6c301b780cb9afab3ec68713b17ca5aa423e1f99c2c1cf \ - --hash=sha256:9f4efcac06f45183b6ed8e2321554739a964a02d8aa3089ec343253d86bf2804 \ - --hash=sha256:a51cbe614acb5ea8e2006e4fd80b4e8ea7c51ae51e42c75290012f4925a9d6ab \ - --hash=sha256:a8795de7ceadf84bcef88f947f91900d647eda234a2c6cc89912c25048cc0490 \ - --hash=sha256:ae723b8308ac17a591bb8be9478b58c2c26fada23fd2211fc323796801ad7ff5 \ - --hash=sha256:aff708a1b9e2d4979f74375ade0bff978be72c8bd90422a756d24d8a46d78059 \ - --hash=sha256:b2aece7a92dffc9c78787f5f36e47e24b95495812270c27abc2fa430435a931d \ - --hash=sha256:b4420bfff18ca6aa39cfb22fe35d8aba3811fa1190c4f4e1ad816b0aad72f7e3 \ - --hash=sha256:b64d2ac99503a9a5846157631addacc9f74e23f64d5a886fe910e9662660fa10 \ - --hash=sha256:b72d4d948749e9c6afcd3d7af9ecc780fccde84e26d275c97273dd83c68a488b \ - --hash=sha256:b74396a655ac8a5299dcb765b4a17ba706e45c0df95818bcc6c13c4645a1c38e \ - --hash=sha256:b9e9d26600020cf635a4e58763959f5a59f8c70f75d72ebf26ceae94c2efac74 \ - --hash=sha256:bca074d08f0677f05df8170b25ce6e61db3bcdfda78062444972fa6508dc825f \ - --hash=sha256:bca3c06c3f10ce03fa80b1301dce53765815c2578a24bd141ce4e5769bb7b709 \ - --hash=sha256:bfb1fdf61763fafc0f8a20becf9cc4287c14fc41c0e14111d28c0d0dfda9ba56 \ - --hash=sha256:c169e12642f0edf1dde607fb264721b88787b55a6da5fb3824302a9cac6f9405 \ - --hash=sha256:c4277f6b1d24be30b7f87ec5346a87693cbc1e55bbc5877f573381b2250c4dd6 \ - --hash=sha256:ceee5aef3e234c7e998fdb52e5236c41e50cdedc116360f7f1874a04829f6490 \ - --hash=sha256:d1b5e233e42f53bbbc6961caeb492986e9f3aeacd30be811467583203873bad2 \ - --hash=sha256:d6f4be832d97836d62ac0c148026ec021f9f36481f38e455b51538fcd949ed2a \ - --hash=sha256:dd5ccc036b0f4721b98e1c03ccc604e7f3e1db53866ccc92b2add40ace1782f7 \ - --hash=sha256:dde59d2f06297fc4e70b2bae6e4a6b3ce89ca89697ab2c41e641abae3be96b0c \ - --hash=sha256:e4be7d865cb5161824e12db71cee83290ab72b3523566371a30d6ba1bd63402a \ - --hash=sha256:e97af10b6f13a498de197fb852e9242064217c25dfca79ebe7ad0cf2b0dd0cb7 \ - --hash=sha256:f2d70b7f0b485f85141bbc518d0581ae96b912d9f8b070eaf68a9beef8eb1e60 \ - --hash=sha256:f3e651f04b7510fae7d4706a4600cd43457f015df08702ece82a71339fc15c3d \ - --hash=sha256:f63535d51e039a984b2fb67ff87057ffe4216d4757c3cedf2fc846af88253cb7 \ - --hash=sha256:f881e2d8a022e9285aa2eab6ba8674358dbcb2b57fa68618d88d62937ac3ff04 \ - --hash=sha256:fadebaddd3eb71a5c986f0bdc7bb28b072bfc585c141eef37474fc66d1830b0a \ - --hash=sha256:fb1632b27e12c0b0df62f924c362206daf246a42c0080e959dd465810dc3482e \ - --hash=sha256:fecf83b2ef3cbce4f5cc573df6f6ded565e5e27c1af84038bae5ade306686d82 +ujson==5.8.0 \ + --hash=sha256:07d459aca895eb17eb463b00441986b021b9312c6c8cc1d06880925c7f51009c \ + --hash=sha256:0be81bae295f65a6896b0c9030b55a106fb2dec69ef877253a87bc7c9c5308f7 \ + --hash=sha256:0fe1b7edaf560ca6ab023f81cbeaf9946a240876a993b8c5a21a1c539171d903 \ + --hash=sha256:102bf31c56f59538cccdfec45649780ae00657e86247c07edac434cb14d5388c \ + --hash=sha256:11da6bed916f9bfacf13f4fc6a9594abd62b2bb115acfb17a77b0f03bee4cfd5 \ + --hash=sha256:16fde596d5e45bdf0d7de615346a102510ac8c405098e5595625015b0d4b5296 \ + --hash=sha256:193349a998cd821483a25f5df30b44e8f495423840ee11b3b28df092ddfd0f7f \ + --hash=sha256:20768961a6a706170497129960762ded9c89fb1c10db2989c56956b162e2a8a3 \ + --hash=sha256:27a2a3c7620ebe43641e926a1062bc04e92dbe90d3501687957d71b4bdddaec4 \ + --hash=sha256:2873d196725a8193f56dde527b322c4bc79ed97cd60f1d087826ac3290cf9207 \ + --hash=sha256:299a312c3e85edee1178cb6453645217ba23b4e3186412677fa48e9a7f986de6 \ + --hash=sha256:2a64cc32bb4a436e5813b83f5aab0889927e5ea1788bf99b930fad853c5625cb \ + --hash=sha256:2b852bdf920fe9f84e2a2c210cc45f1b64f763b4f7d01468b33f7791698e455e \ + --hash=sha256:2e72ba76313d48a1a3a42e7dc9d1db32ea93fac782ad8dde6f8b13e35c229130 \ + --hash=sha256:3659deec9ab9eb19e8646932bfe6fe22730757c4addbe9d7d5544e879dc1b721 \ + --hash=sha256:3b27a8da7a080add559a3b73ec9ebd52e82cc4419f7c6fb7266e62439a055ed0 \ + --hash=sha256:3f9b63530a5392eb687baff3989d0fb5f45194ae5b1ca8276282fb647f8dcdb3 \ + --hash=sha256:407d60eb942c318482bbfb1e66be093308bb11617d41c613e33b4ce5be789adc \ + --hash=sha256:40931d7c08c4ce99adc4b409ddb1bbb01635a950e81239c2382cfe24251b127a \ + --hash=sha256:48c7d373ff22366eecfa36a52b9b55b0ee5bd44c2b50e16084aa88b9de038916 \ + --hash=sha256:4ddeabbc78b2aed531f167d1e70387b151900bc856d61e9325fcdfefb2a51ad8 \ + --hash=sha256:5ac97b1e182d81cf395ded620528c59f4177eee024b4b39a50cdd7b720fdeec6 \ + --hash=sha256:5ce24909a9c25062e60653073dd6d5e6ec9d6ad7ed6e0069450d5b673c854405 \ + --hash=sha256:69b3104a2603bab510497ceabc186ba40fef38ec731c0ccaa662e01ff94a985c \ + --hash=sha256:6a4dafa9010c366589f55afb0fd67084acd8added1a51251008f9ff2c3e44042 \ + --hash=sha256:6d230d870d1ce03df915e694dcfa3f4e8714369cce2346686dbe0bc8e3f135e7 \ + --hash=sha256:78e318def4ade898a461b3d92a79f9441e7e0e4d2ad5419abed4336d702c7425 \ + --hash=sha256:7a42baa647a50fa8bed53d4e242be61023bd37b93577f27f90ffe521ac9dc7a3 \ + --hash=sha256:7cba16b26efe774c096a5e822e4f27097b7c81ed6fb5264a2b3f5fd8784bab30 \ + --hash=sha256:7d8283ac5d03e65f488530c43d6610134309085b71db4f675e9cf5dff96a8282 \ + --hash=sha256:7ecc33b107ae88405aebdb8d82c13d6944be2331ebb04399134c03171509371a \ + --hash=sha256:9249fdefeb021e00b46025e77feed89cd91ffe9b3a49415239103fc1d5d9c29a \ + --hash=sha256:9399eaa5d1931a0ead49dce3ffacbea63f3177978588b956036bfe53cdf6af75 \ + --hash=sha256:94c7bd9880fa33fcf7f6d7f4cc032e2371adee3c5dba2922b918987141d1bf07 \ + --hash=sha256:9571de0c53db5cbc265945e08f093f093af2c5a11e14772c72d8e37fceeedd08 \ + --hash=sha256:9721cd112b5e4687cb4ade12a7b8af8b048d4991227ae8066d9c4b3a6642a582 \ + --hash=sha256:9ab282d67ef3097105552bf151438b551cc4bedb3f24d80fada830f2e132aeb9 \ + --hash=sha256:9d9707e5aacf63fb919f6237d6490c4e0244c7f8d3dc2a0f84d7dec5db7cb54c \ + --hash=sha256:a70f776bda2e5072a086c02792c7863ba5833d565189e09fabbd04c8b4c3abba \ + --hash=sha256:a89cf3cd8bf33a37600431b7024a7ccf499db25f9f0b332947fbc79043aad879 \ + --hash=sha256:a8c91b6f4bf23f274af9002b128d133b735141e867109487d17e344d38b87d94 \ + --hash=sha256:ad24ec130855d4430a682c7a60ca0bc158f8253ec81feed4073801f6b6cb681b \ + --hash=sha256:ae7f4725c344bf437e9b881019c558416fe84ad9c6b67426416c131ad577df67 \ + --hash=sha256:b748797131ac7b29826d1524db1cc366d2722ab7afacc2ce1287cdafccddbf1f \ + --hash=sha256:bdf04c6af3852161be9613e458a1fb67327910391de8ffedb8332e60800147a2 \ + --hash=sha256:bf5737dbcfe0fa0ac8fa599eceafae86b376492c8f1e4b84e3adf765f03fb564 \ + --hash=sha256:c4e7bb7eba0e1963f8b768f9c458ecb193e5bf6977090182e2b4f4408f35ac76 \ + --hash=sha256:d524a8c15cfc863705991d70bbec998456a42c405c291d0f84a74ad7f35c5109 \ + --hash=sha256:d53039d39de65360e924b511c7ca1a67b0975c34c015dd468fca492b11caa8f7 \ + --hash=sha256:d6f84a7a175c75beecde53a624881ff618e9433045a69fcfb5e154b73cdaa377 \ + --hash=sha256:e0147d41e9fb5cd174207c4a2895c5e24813204499fd0839951d4c8784a23bf5 \ + --hash=sha256:e3673053b036fd161ae7a5a33358ccae6793ee89fd499000204676baafd7b3aa \ + --hash=sha256:e54578fa8838ddc722539a752adfce9372474114f8c127bb316db5392d942f8b \ + --hash=sha256:eb0142f6f10f57598655340a3b2c70ed4646cbe674191da195eb0985a9813b83 \ + --hash=sha256:efeddf950fb15a832376c0c01d8d7713479fbeceaed1eaecb2665aa62c305aec \ + --hash=sha256:f26629ac531d712f93192c233a74888bc8b8212558bd7d04c349125f10199fcf \ + --hash=sha256:f2e385a7679b9088d7bc43a64811a7713cc7c33d032d020f757c54e7d41931ae \ + --hash=sha256:f3554eaadffe416c6f543af442066afa6549edbc34fe6a7719818c3e72ebfe95 \ + --hash=sha256:f4511560d75b15ecb367eef561554959b9d49b6ec3b8d5634212f9fed74a6df1 \ + --hash=sha256:f504117a39cb98abba4153bf0b46b4954cc5d62f6351a14660201500ba31fe7f \ + --hash=sha256:fb87decf38cc82bcdea1d7511e73629e651bdec3a43ab40985167ab8449b769c # via python-lsp-jsonrpc -urllib3==1.26.13 \ - --hash=sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc \ - --hash=sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8 +urllib3==1.26.16 \ + --hash=sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f \ + --hash=sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14 # via # requests # semgrep @@ -437,7 +614,17 @@ wcmatch==8.4.1 \ --hash=sha256:3476cd107aba7b25ba1d59406938a47dc7eec6cfd0ad09ff77193f21a964dee7 \ --hash=sha256:b1f042a899ea4c458b7321da1b5e3331e3e0ec781583434de1301946ceadb943 # via semgrep -wheel==0.38.4 \ - --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ - --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 +wheel==0.40.0 \ + --hash=sha256:cd1196f3faee2b31968d626e1731c94f99cbdb67cf5a46e4f5656cbee7738873 \ + --hash=sha256:d236b20e7cb522daf2390fa84c55eea81c5c30190f90f29ae2ca1ad8355bf247 + # via pip-tools + +# The following packages are considered to be unsafe in a requirements file: +pip==23.1.2 \ + --hash=sha256:0e7c86f486935893c708287b30bd050a36ac827ec7fe5e43fe7cb198dd835fba \ + --hash=sha256:3ef6ac33239e4027d9a5598a381b9d30880a1477e50039db2eac6e8a8f6d1b18 + # via pip-tools +setuptools==68.0.0 \ + --hash=sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f \ + --hash=sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235 # via pip-tools From 2676b370fe72b9ecd6667e03ce5f31b19a334587 Mon Sep 17 00:00:00 2001 From: Ro Date: Tue, 11 Jul 2023 09:21:52 -0700 Subject: [PATCH 335/352] Satisfy linter --- securedrop_export/disk/legacy_status.py | 1 - securedrop_export/disk/service.py | 1 - securedrop_export/disk/status.py | 1 - securedrop_export/disk/volume.py | 1 - securedrop_export/print/status.py | 1 - tests/disk/test_cli.py | 8 -------- tests/print/test_service.py | 4 ---- tests/test_directory.py | 1 - tests/test_main.py | 1 - 9 files changed, 19 deletions(-) diff --git a/securedrop_export/disk/legacy_status.py b/securedrop_export/disk/legacy_status.py index fa0bdf869..77f0fa6ce 100644 --- a/securedrop_export/disk/legacy_status.py +++ b/securedrop_export/disk/legacy_status.py @@ -2,7 +2,6 @@ class Status(BaseStatus): - LEGACY_ERROR_GENERIC = "ERROR_GENERIC" # Legacy USB preflight related diff --git a/securedrop_export/disk/service.py b/securedrop_export/disk/service.py index 0c983c516..1db9a8338 100644 --- a/securedrop_export/disk/service.py +++ b/securedrop_export/disk/service.py @@ -50,7 +50,6 @@ def scan_single_device(self, blkid: str) -> Status: # See if it's a LUKS drive if self.cli.is_luks_volume(target): - # Returns Volume or throws ExportException self.volume = self.cli.get_luks_volume(target) diff --git a/securedrop_export/disk/status.py b/securedrop_export/disk/status.py index 285d9f8b9..7ce713913 100644 --- a/securedrop_export/disk/status.py +++ b/securedrop_export/disk/status.py @@ -2,7 +2,6 @@ class Status(BaseStatus): - NO_DEVICE_DETECTED = "NO_DEVICE_DETECTED" INVALID_DEVICE_DETECTED = ( "INVALID_DEVICE_DETECTED" # Multi partitioned, not encrypted, etc diff --git a/securedrop_export/disk/volume.py b/securedrop_export/disk/volume.py index 6d41bce47..aae7d9332 100644 --- a/securedrop_export/disk/volume.py +++ b/securedrop_export/disk/volume.py @@ -12,7 +12,6 @@ class EncryptionScheme(Enum): class Volume: - MAPPED_VOLUME_PREFIX = "/dev/mapper/" """ diff --git a/securedrop_export/print/status.py b/securedrop_export/print/status.py index 5ec81c8a3..116316a46 100644 --- a/securedrop_export/print/status.py +++ b/securedrop_export/print/status.py @@ -2,7 +2,6 @@ class Status(BaseStatus): - # Printer preflight related errors ERROR_MULTIPLE_PRINTERS_FOUND = "ERROR_MULTIPLE_PRINTERS_FOUND" ERROR_PRINTER_NOT_FOUND = "ERROR_PRINTER_NOT_FOUND" diff --git a/tests/disk/test_cli.py b/tests/disk/test_cli.py index 8326bb8e9..798980905 100644 --- a/tests/disk/test_cli.py +++ b/tests/disk/test_cli.py @@ -89,7 +89,6 @@ def test_get_removable_devices_none_removable(self, mocker): "subprocess.Popen", side_effect=subprocess.CalledProcessError(1, "Popen") ) def test_get_connected_devices_error(self, mocked_subprocess): - with pytest.raises(ExportException): self.cli.get_connected_devices() @@ -108,7 +107,6 @@ def test_get_partitioned_device_one_partition(self, mocked_call): @mock.patch("subprocess.check_output", return_value=_SAMPLE_OUTPUT_MULTI_PART) def test_get_partitioned_device_multi_partition(self, mocked_call): - with pytest.raises(ExportException) as ex: self.cli.get_partitioned_device(_SAMPLE_OUTPUT_MULTI_PART) @@ -126,7 +124,6 @@ def test_get_partitioned_device_lsblk_error(self, mocked_subprocess): side_effect=subprocess.CalledProcessError(1, "check_output"), ) def test_get_partitioned_device_multi_partition_error(self, mocked_call): - # Make sure we wrap CalledProcessError and throw our own exception with pytest.raises(ExportException) as ex: self.cli.get_partitioned_device(_DEFAULT_USB_DEVICE) @@ -135,7 +132,6 @@ def test_get_partitioned_device_multi_partition_error(self, mocked_call): @mock.patch("subprocess.check_call", return_value=0) def test_is_luks_volume_true(self, mocked_call): - # `sudo cryptsetup isLuks` returns 0 if true assert self.cli.is_luks_volume(_SAMPLE_OUTPUT_ONE_PART) @@ -144,7 +140,6 @@ def test_is_luks_volume_true(self, mocked_call): side_effect=subprocess.CalledProcessError(1, "check_call"), ) def test_is_luks_volume_false(self, mocked_subprocess): - # `sudo cryptsetup isLuks` returns 1 if false; CalledProcessError is thrown assert not self.cli.is_luks_volume(_SAMPLE_OUTPUT_ONE_PART) @@ -160,7 +155,6 @@ def test__get_luks_name_from_headers(self, mocked_subprocess): "subprocess.check_output", return_value=b"corrupted-or-invalid-header\n" ) def test__get_luks_name_from_headers_error_invalid(self, mocked_subprocess): - with pytest.raises(ExportException) as ex: self.cli._get_luks_name_from_headers(_DEFAULT_USB_DEVICE) @@ -168,7 +162,6 @@ def test__get_luks_name_from_headers_error_invalid(self, mocked_subprocess): @mock.patch("subprocess.check_output", return_value=b"\n") def test__get_luks_name_from_headers_error_no_header(self, mocked_subprocess): - with pytest.raises(ExportException) as ex: self.cli._get_luks_name_from_headers(_DEFAULT_USB_DEVICE) @@ -178,7 +171,6 @@ def test__get_luks_name_from_headers_error_no_header(self, mocked_subprocess): def test__get_luks_name_from_headers_error_nothing_returned( self, mocked_subprocess ): - with pytest.raises(ExportException) as ex: self.cli._get_luks_name_from_headers(_DEFAULT_USB_DEVICE) diff --git a/tests/print/test_service.py b/tests/print/test_service.py index ffaee6861..cf5c6ca1a 100644 --- a/tests/print/test_service.py +++ b/tests/print/test_service.py @@ -265,7 +265,6 @@ def test__check_printer_setup(self, printers, mocker): @mock.patch("subprocess.check_output", return_value=SAMPLE_OUTPUT_NO_PRINTER) def test__check_printer_setup_error_no_printer(self, mock_output): - with pytest.raises(ExportException) as ex: self.service._check_printer_setup() assert ex.value.sdstatus is Status.ERROR_PRINTER_NOT_FOUND @@ -277,7 +276,6 @@ def test__check_printer_setup_error_no_printer(self, mock_output): + SAMPLE_OUTPUT_LASERJET_PRINTER, ) def test__check_printer_setup_error_too_many_printers(self, mock_output): - with pytest.raises(ExportException) as ex: self.service._check_printer_setup() assert ex.value.sdstatus is Status.ERROR_MULTIPLE_PRINTERS_FOUND @@ -286,7 +284,6 @@ def test__check_printer_setup_error_too_many_printers(self, mock_output): "subprocess.check_output", return_value=SAMPLE_OUTPUT_UNSUPPORTED_PRINTER ) def test__check_printer_setup_error_unsupported_printer(self, mock_output): - with pytest.raises(ExportException) as ex: self.service._check_printer_setup() assert ex.value.sdstatus is Status.ERROR_PRINTER_NOT_SUPPORTED @@ -296,7 +293,6 @@ def test__check_printer_setup_error_unsupported_printer(self, mock_output): side_effect=subprocess.CalledProcessError(1, "check_output"), ) def test__check_printer_setup_error_checking_printer(self, mock_output): - with pytest.raises(ExportException) as ex: self.service._check_printer_setup() assert ex.value.sdstatus is Status.ERROR_UNKNOWN diff --git a/tests/test_directory.py b/tests/test_directory.py index 2f0a3a9ef..b0857f59c 100644 --- a/tests/test_directory.py +++ b/tests/test_directory.py @@ -8,7 +8,6 @@ class TestDirectory: - _REL_TRAVERSAL = "../../../whee" _SAFE_RELPATH = "./hi" _SAFE_RELPATH2 = "yay/a/path" diff --git a/tests/test_main.py b/tests/test_main.py index 41fce7f62..06da9ef66 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -54,7 +54,6 @@ def test_write_status(self, status, capsys): @pytest.mark.parametrize("invalid_status", ["foo", ";ls", "&& echo 0", None]) def test_write_status_error(self, invalid_status, capsys): - with pytest.raises(ValueError): _write_status(Status(invalid_status)) From 2c2e0d4f08d751f5d24735f286f6bcb32176ebef Mon Sep 17 00:00:00 2001 From: Michael Z Date: Wed, 12 Jul 2023 10:25:29 -0400 Subject: [PATCH 336/352] Update dev requirements (with Python 3.8) --- requirements/dev-requirements.txt | 189 +++++++++++++++--------------- 1 file changed, 97 insertions(+), 92 deletions(-) diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt index 4deb73f0b..6827b970b 100644 --- a/requirements/dev-requirements.txt +++ b/requirements/dev-requirements.txt @@ -1,9 +1,13 @@ # -# This file is autogenerated by pip-compile with python 3.9 +# This file is autogenerated by pip-compile with python 3.8 # To update, run: # # pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-requirements.txt requirements/dev-requirements.in requirements/requirements.in # +build==0.10.0 \ + --hash=sha256:af266720050a66c893a6096a2f410989eeac74ff9a68ba194b3f6473e8e26171 \ + --hash=sha256:d5b71264afdb5951d6704482aac78de887c80691c52b88a9ad195983ca2c9269 + # via pip-tools cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ @@ -70,34 +74,34 @@ cffi==1.15.1 \ --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 # via cryptography -click==8.0.1 \ - --hash=sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a \ - --hash=sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6 +click==8.1.4 \ + --hash=sha256:2739815aaa5d2c986a88f1e9230c55e17f0caad3d958a5e13ad0797c166db9e3 \ + --hash=sha256:b97d0c74955da062a7d4ef92fadb583806a585b2ea81958a81bd72726cbb8e37 # via pip-tools -cryptography==39.0.1 \ - --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ - --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ - --hash=sha256:4789d1e3e257965e960232345002262ede4d094d1a19f4d3b52e48d4d8f3b885 \ - --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ - --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ - --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ - --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ - --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ - --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ - --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ - --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ - --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ - --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ - --hash=sha256:c5caeb8188c24888c90b5108a441c106f7faa4c4c075a2bcae438c6e8ca73cef \ - --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ - --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ - --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ - --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ - --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ - --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ - --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ - --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ - --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 +cryptography==41.0.2 \ + --hash=sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711 \ + --hash=sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7 \ + --hash=sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd \ + --hash=sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e \ + --hash=sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58 \ + --hash=sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0 \ + --hash=sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d \ + --hash=sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83 \ + --hash=sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831 \ + --hash=sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766 \ + --hash=sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b \ + --hash=sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c \ + --hash=sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182 \ + --hash=sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f \ + --hash=sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa \ + --hash=sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4 \ + --hash=sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a \ + --hash=sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2 \ + --hash=sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76 \ + --hash=sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5 \ + --hash=sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee \ + --hash=sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f \ + --hash=sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14 # via # types-pyopenssl # types-redis @@ -109,45 +113,45 @@ mccabe==0.7.0 \ --hash=sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325 \ --hash=sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e # via flake8 -mypy==1.0.0 \ - --hash=sha256:01b1b9e1ed40544ef486fa8ac022232ccc57109f379611633ede8e71630d07d2 \ - --hash=sha256:0ab090d9240d6b4e99e1fa998c2d0aa5b29fc0fb06bd30e7ad6183c95fa07593 \ - --hash=sha256:14d776869a3e6c89c17eb943100f7868f677703c8a4e00b3803918f86aafbc52 \ - --hash=sha256:1ace23f6bb4aec4604b86c4843276e8fa548d667dbbd0cb83a3ae14b18b2db6c \ - --hash=sha256:2efa963bdddb27cb4a0d42545cd137a8d2b883bd181bbc4525b568ef6eca258f \ - --hash=sha256:2f6ac8c87e046dc18c7d1d7f6653a66787a4555085b056fe2d599f1f1a2a2d21 \ - --hash=sha256:3ae4c7a99e5153496243146a3baf33b9beff714464ca386b5f62daad601d87af \ - --hash=sha256:3cfad08f16a9c6611e6143485a93de0e1e13f48cfb90bcad7d5fde1c0cec3d36 \ - --hash=sha256:4e5175026618c178dfba6188228b845b64131034ab3ba52acaffa8f6c361f805 \ - --hash=sha256:50979d5efff8d4135d9db293c6cb2c42260e70fb010cbc697b1311a4d7a39ddb \ - --hash=sha256:5cd187d92b6939617f1168a4fe68f68add749902c010e66fe574c165c742ed88 \ - --hash=sha256:5cfca124f0ac6707747544c127880893ad72a656e136adc935c8600740b21ff5 \ - --hash=sha256:5e398652d005a198a7f3c132426b33c6b85d98aa7dc852137a2a3be8890c4072 \ - --hash=sha256:67cced7f15654710386e5c10b96608f1ee3d5c94ca1da5a2aad5889793a824c1 \ - --hash=sha256:7306edca1c6f1b5fa0bc9aa645e6ac8393014fa82d0fa180d0ebc990ebe15964 \ - --hash=sha256:7cc2c01dfc5a3cbddfa6c13f530ef3b95292f926329929001d45e124342cd6b7 \ - --hash=sha256:87edfaf344c9401942883fad030909116aa77b0fa7e6e8e1c5407e14549afe9a \ - --hash=sha256:8845125d0b7c57838a10fd8925b0f5f709d0e08568ce587cc862aacce453e3dd \ - --hash=sha256:92024447a339400ea00ac228369cd242e988dd775640755fa4ac0c126e49bb74 \ - --hash=sha256:a86b794e8a56ada65c573183756eac8ac5b8d3d59daf9d5ebd72ecdbb7867a43 \ - --hash=sha256:bb2782a036d9eb6b5a6efcdda0986774bf798beef86a62da86cb73e2a10b423d \ - --hash=sha256:be78077064d016bc1b639c2cbcc5be945b47b4261a4f4b7d8923f6c69c5c9457 \ - --hash=sha256:c7cf862aef988b5fbaa17764ad1d21b4831436701c7d2b653156a9497d92c83c \ - --hash=sha256:e0626db16705ab9f7fa6c249c017c887baf20738ce7f9129da162bb3075fc1af \ - --hash=sha256:f34495079c8d9da05b183f9f7daec2878280c2ad7cc81da686ef0b484cea2ecf \ - --hash=sha256:fe523fcbd52c05040c7bee370d66fee8373c5972171e4fbc323153433198592d +mypy==1.4.1 \ + --hash=sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042 \ + --hash=sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd \ + --hash=sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2 \ + --hash=sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01 \ + --hash=sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7 \ + --hash=sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3 \ + --hash=sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816 \ + --hash=sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3 \ + --hash=sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc \ + --hash=sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4 \ + --hash=sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b \ + --hash=sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8 \ + --hash=sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c \ + --hash=sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462 \ + --hash=sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7 \ + --hash=sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc \ + --hash=sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258 \ + --hash=sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b \ + --hash=sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9 \ + --hash=sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6 \ + --hash=sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f \ + --hash=sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1 \ + --hash=sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828 \ + --hash=sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878 \ + --hash=sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f \ + --hash=sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b # via -r requirements/dev-requirements.in mypy-extensions==1.0.0 \ --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 # via mypy -pep517==0.11.0 \ - --hash=sha256:3fa6b85b9def7ba4de99fb7f96fe3f02e2d630df8aa2720a5cf3b183f087a738 \ - --hash=sha256:e1ba5dffa3a131387979a68ff3e391ac7d645be409216b961bc2efe6468ab0b2 - # via pip-tools -pip-tools==6.2.0 \ - --hash=sha256:77727ef7457d1865e61fe34c2b1439f9b971b570cc232616a22ce82ab89d357d \ - --hash=sha256:9ed38c73da4993e531694ea151f77048b4dbf2ba7b94c4a569daa39568cc6564 +packaging==23.1 \ + --hash=sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61 \ + --hash=sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f + # via build +pip-tools==6.14.0 \ + --hash=sha256:06366be0e08d86b416407333e998b4d305d5bd925151b08942ed149380ba3e47 \ + --hash=sha256:c5ad042cd27c0b343b10db1db7f77a7d087beafbec59ae6df1bba4d3368dfe8c # via -r requirements/dev-requirements.in pycodestyle==2.10.0 \ --hash=sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053 \ @@ -161,48 +165,49 @@ pyflakes==3.0.1 \ --hash=sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf \ --hash=sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd # via flake8 +pyproject-hooks==1.0.0 \ + --hash=sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8 \ + --hash=sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5 + # via build redis==3.3.11 \ --hash=sha256:3613daad9ce5951e426f460deddd5caf469e08a3af633e9578fc77d362becf62 \ --hash=sha256:8d0fc278d3f5e1249967cba2eb4a5632d19e45ce5c09442b8422d15ee2c22cc2 # via -r requirements/requirements.in -tomli==1.2.1 \ - --hash=sha256:8dd0e9524d6f386271a36b41dbf6c57d8e32fd96fd22b6584679dc569d20899f \ - --hash=sha256:a5b75cb6f3968abb47af1b40c1819dc519ea82bcc065776a866e8d74c5ca9442 +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f # via + # build # mypy - # pep517 -types-docutils==0.19.1.4 \ - --hash=sha256:1b64b21b609ff1fc7791d3d930f14b56b36ad09029fd97e45e34cc889d671b5f \ - --hash=sha256:870d71f3663141f67a3c59d26d2c54a8c478c842208bb0b345fbf6036f49f561 - # via types-setuptools -types-pyopenssl==23.0.0.3 \ - --hash=sha256:6ca54d593f8b946f9570f9ed7457c41da3b518feff5e344851941a6209bea62b \ - --hash=sha256:847ab17a16475a882dc29898648a6a35ad0d3e11a5bba5aa8ab2f3435a8647cb + # pip-tools + # pyproject-hooks +types-pyopenssl==23.2.0.1 \ + --hash=sha256:0568553f104466f1b8e0db3360fbe6770137d02e21a1a45c209bf2b1b03d90d4 \ + --hash=sha256:beeb5d22704c625a1e4b6dc756355c5b4af0b980138b702a9d9f932acf020903 # via types-redis -types-redis==4.5.1.1 \ - --hash=sha256:081dfeec730df6e3f32ccbdafe3198873b7c02516c22d79cc2a40efdd69a3963 \ - --hash=sha256:c072e4824855f46d0a968509c3e0fa4789fc13b62d472064527bad3d1815aeed +types-redis==4.6.0.2 \ + --hash=sha256:a98f3386f44d045057696f3efc8869c53dda0060610e0fe3d8a4d391e2a8916a \ + --hash=sha256:d0efcd96f65fd2036437c29d8c12566cfdc549345d73eddacb0488b81aff9f9e # via -r requirements/dev-requirements.in -types-setuptools==67.3.0.1 \ - --hash=sha256:1a26d373036c720e566823b6edd664a2db4d138b6eeba856721ec1254203474f \ - --hash=sha256:a7e0f0816b5b449f5bcdc0efa43da91ff81dbe6941f293a6490d68a450e130a1 +types-setuptools==68.0.0.1 \ + --hash=sha256:a0454ea7ad0711f63a602caa87929003a83cab89224ae1506ed44bb5be8fe7d7 \ + --hash=sha256:cc5acbc464b106104899e9b9eb4955dd47e854753c8d4ee2ce697eaf0f4d74e1 # via -r requirements/dev-requirements.in -typing-extensions==3.10.0.0 \ - --hash=sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497 \ - --hash=sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342 \ - --hash=sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84 +typing-extensions==4.7.1 \ + --hash=sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36 \ + --hash=sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2 # via mypy -wheel==0.37.0 \ - --hash=sha256:21014b2bd93c6d0034b6ba5d35e4eb284340e09d63c59aef6fc14b0f346146fd \ - --hash=sha256:e2ef7239991699e3355d54f8e968a21bb940a1dbf34a4d226741e64462516fad +wheel==0.40.0 \ + --hash=sha256:cd1196f3faee2b31968d626e1731c94f99cbdb67cf5a46e4f5656cbee7738873 \ + --hash=sha256:d236b20e7cb522daf2390fa84c55eea81c5c30190f90f29ae2ca1ad8355bf247 # via pip-tools # The following packages are considered to be unsafe in a requirements file: -pip==21.2.4 \ - --hash=sha256:0eb8a1516c3d138ae8689c0c1a60fde7143310832f9dc77e11d8a4bc62de193b \ - --hash=sha256:fa9ebb85d3fd607617c0c44aca302b1b45d87f9c2a1649b46c26167ca4296323 +pip==23.1.2 \ + --hash=sha256:0e7c86f486935893c708287b30bd050a36ac827ec7fe5e43fe7cb198dd835fba \ + --hash=sha256:3ef6ac33239e4027d9a5598a381b9d30880a1477e50039db2eac6e8a8f6d1b18 # via pip-tools -setuptools==57.4.0 \ - --hash=sha256:6bac238ffdf24e8806c61440e755192470352850f3419a52f26ffe0a1a64f465 \ - --hash=sha256:a49230977aa6cfb9d933614d2f7b79036e9945c4cdd7583163f4e920b83418d6 +setuptools==68.0.0 \ + --hash=sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f \ + --hash=sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235 # via pip-tools From 98d104e9a876673f97e13c0a1df3706ffc94a5da Mon Sep 17 00:00:00 2001 From: Erik Moeller Date: Wed, 26 Jul 2023 16:42:42 -0700 Subject: [PATCH 337/352] Remove CodeQL In other repositories we've decided that this analysis adds limited value for the time being. --- .github/workflows/codeql-analysis.yml | 71 --------------------------- 1 file changed, 71 deletions(-) delete mode 100644 .github/workflows/codeql-analysis.yml diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml deleted file mode 100644 index 0e907d316..000000000 --- a/.github/workflows/codeql-analysis.yml +++ /dev/null @@ -1,71 +0,0 @@ -# For most projects, this workflow file will not need changing; you simply need -# to commit it to your repository. -# -# You may wish to alter this file to override the set of languages analyzed, -# or to provide custom queries or build logic. -name: "CodeQL" - -on: - push: - branches: [main] - pull_request: - # The branches below must be a subset of the branches above - branches: [main] - schedule: - - cron: '0 3 * * 6' - -jobs: - analyze: - name: Analyze - runs-on: ubuntu-latest - - strategy: - fail-fast: false - matrix: - # Override automatic language detection by changing the below list - # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python'] - language: ['python'] - # Learn more... - # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection - - steps: - - name: Checkout repository - uses: actions/checkout@v2 - with: - # We must fetch at least the immediate parents so that if this is - # a pull request then we can checkout the head. - fetch-depth: 2 - - # If this run was triggered by a pull request event, then checkout - # the head of the pull request instead of the merge commit. - - run: git checkout HEAD^2 - if: ${{ github.event_name == 'pull_request' }} - - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v1 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - # queries: ./path/to/local/query, your-org/your-repo/queries@main - - # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). - # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v1 - - # ℹ️ Command-line programs to run using the OS shell. - # 📚 https://git.io/JvXDl - - # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines - # and modify them (or add more) to build your code if your project - # uses a compiled language - - #- run: | - # make bootstrap - # make release - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v1 From e1cd07d1151caf7a49c0cbda3a867c570de05c5d Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Mon, 31 Jul 2023 13:28:35 -0400 Subject: [PATCH 338/352] Update certifi to 2023.7.22 Fixes CVE-2023-37920. --- requirements/dev-requirements.txt | 6 +++--- requirements/requirements.in | 2 +- requirements/requirements.txt | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt index 145a4b967..e14f0a563 100644 --- a/requirements/dev-requirements.txt +++ b/requirements/dev-requirements.txt @@ -35,9 +35,9 @@ build==0.10.0 \ --hash=sha256:af266720050a66c893a6096a2f410989eeac74ff9a68ba194b3f6473e8e26171 \ --hash=sha256:d5b71264afdb5951d6704482aac78de887c80691c52b88a9ad195983ca2c9269 # via pip-tools -certifi==2022.12.7 \ - --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ - --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 +certifi==2023.7.22 \ + --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ + --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 # via # -r requirements/requirements.in # requests diff --git a/requirements/requirements.in b/requirements/requirements.in index 8f11a868d..00b5b4ca6 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -1,4 +1,4 @@ -certifi>=2022.12.07 +certifi>=2023.7.22 charset-normalizer>=2.0.4 idna>=2.7 pyyaml==5.4.1 diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 952bd0c2f..f5095c605 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -4,9 +4,9 @@ # # pip-compile --generate-hashes --output-file=requirements/requirements.txt requirements/requirements.in # -certifi==2022.12.7 \ - --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ - --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 +certifi==2023.7.22 \ + --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ + --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 # via # -r requirements/requirements.in # requests From 2420769045297371a938f8cd4fe2503b70b0fd1f Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Mon, 31 Jul 2023 14:39:05 -0400 Subject: [PATCH 339/352] Update build-requirements with certifi localwheel hash Added in . --- requirements/build-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/build-requirements.txt b/requirements/build-requirements.txt index 36355f99d..abbd5984e 100644 --- a/requirements/build-requirements.txt +++ b/requirements/build-requirements.txt @@ -1,4 +1,4 @@ -certifi==2022.12.7 --hash=sha256:7f205a1a4f02f4970fb5d0e16457964bb30d6b678a766515278bc56e6eeb645f +certifi==2023.7.22 --hash=sha256:920fc822272317b0d13e60eef0405ddd7f921e08dd21150a9eb613c863e31c2a charset-normalizer==2.0.4 --hash=sha256:cd9a4492eef4e5276c07f9c0dc1338e7be3e95f2a536bf2c5b620b1f27d03d74 idna==3.2 --hash=sha256:691d9fc304505c65ea9ceb8eb7385d63988e344c065cacbbd2156ff9bdfcf0c1 pyyaml==5.4.1 --hash=sha256:9608c1b459ff310fe7fa78e8a9e12767a9a0ea9e3fa7cce116db58f95b61f56f --hash=sha256:f7190863a72d6eb89ed92e345e178a0803c439fd7126985b62c1c113cb01e534 From cf8bf957f8324b5d4d748d2dde8e74b08348652e Mon Sep 17 00:00:00 2001 From: Michael Z Date: Wed, 26 Jul 2023 09:48:01 -0400 Subject: [PATCH 340/352] Bump certifi dev dependency due to removal of root certificate --- requirements/dev-bookworm-requirements.in | 1 + requirements/dev-bookworm-requirements.txt | 10 ++++++---- requirements/dev-bullseye-requirements.in | 1 + requirements/dev-bullseye-requirements.txt | 10 ++++++---- 4 files changed, 14 insertions(+), 8 deletions(-) diff --git a/requirements/dev-bookworm-requirements.in b/requirements/dev-bookworm-requirements.in index 8bad67377..4e3eb791b 100644 --- a/requirements/dev-bookworm-requirements.in +++ b/requirements/dev-bookworm-requirements.in @@ -2,6 +2,7 @@ -r requirements.in black +certifi>=2023.07.22 flake8 pip-tools pytest diff --git a/requirements/dev-bookworm-requirements.txt b/requirements/dev-bookworm-requirements.txt index 380bb8c5c..c89930a71 100644 --- a/requirements/dev-bookworm-requirements.txt +++ b/requirements/dev-bookworm-requirements.txt @@ -51,10 +51,12 @@ build==0.10.0 \ --hash=sha256:af266720050a66c893a6096a2f410989eeac74ff9a68ba194b3f6473e8e26171 \ --hash=sha256:d5b71264afdb5951d6704482aac78de887c80691c52b88a9ad195983ca2c9269 # via pip-tools -certifi==2023.5.7 \ - --hash=sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7 \ - --hash=sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716 - # via requests +certifi==2023.7.22 \ + --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ + --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 + # via + # -r requirements/dev-bookworm-requirements.in + # requests charset-normalizer==3.2.0 \ --hash=sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96 \ --hash=sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c \ diff --git a/requirements/dev-bullseye-requirements.in b/requirements/dev-bullseye-requirements.in index b75bf7f7c..43216f80c 100644 --- a/requirements/dev-bullseye-requirements.in +++ b/requirements/dev-bullseye-requirements.in @@ -2,6 +2,7 @@ -r requirements.in black +certifi>=2023.07.22 flake8 pip-tools pytest diff --git a/requirements/dev-bullseye-requirements.txt b/requirements/dev-bullseye-requirements.txt index ef3de0c31..8ec35fd7a 100644 --- a/requirements/dev-bullseye-requirements.txt +++ b/requirements/dev-bullseye-requirements.txt @@ -51,10 +51,12 @@ build==0.10.0 \ --hash=sha256:af266720050a66c893a6096a2f410989eeac74ff9a68ba194b3f6473e8e26171 \ --hash=sha256:d5b71264afdb5951d6704482aac78de887c80691c52b88a9ad195983ca2c9269 # via pip-tools -certifi==2023.5.7 \ - --hash=sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7 \ - --hash=sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716 - # via requests +certifi==2023.7.22 \ + --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ + --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 + # via + # -r requirements/dev-bullseye-requirements.in + # requests charset-normalizer==3.2.0 \ --hash=sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96 \ --hash=sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c \ From bcc250b90e538334ce58b6c94609f450cea8f1fa Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Thu, 10 Aug 2023 16:24:41 -0400 Subject: [PATCH 341/352] Use types-redis<4, drop cryptography and other dev dependencies We're still using redis==3.3.11, so we should be pulling in those type stubs and not the ones for v4. Coincidentally, those stubs also happen to not have dependencies on cryptography and types-pyOpenSSL, which is very nice. Refs . --- requirements/dev-requirements.in | 2 +- requirements/dev-requirements.txt | 108 +----------------------------- 2 files changed, 4 insertions(+), 106 deletions(-) diff --git a/requirements/dev-requirements.in b/requirements/dev-requirements.in index 28f7362de..3c10a63db 100644 --- a/requirements/dev-requirements.in +++ b/requirements/dev-requirements.in @@ -2,5 +2,5 @@ flake8 pip-tools mypy -types-redis +types-redis<4 types-setuptools diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt index 6827b970b..d58b33fb0 100644 --- a/requirements/dev-requirements.txt +++ b/requirements/dev-requirements.txt @@ -8,103 +8,10 @@ build==0.10.0 \ --hash=sha256:af266720050a66c893a6096a2f410989eeac74ff9a68ba194b3f6473e8e26171 \ --hash=sha256:d5b71264afdb5951d6704482aac78de887c80691c52b88a9ad195983ca2c9269 # via pip-tools -cffi==1.15.1 \ - --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ - --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ - --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ - --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ - --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ - --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ - --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ - --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ - --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ - --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ - --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ - --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ - --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ - --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ - --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ - --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ - --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ - --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ - --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ - --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ - --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ - --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ - --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ - --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ - --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ - --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ - --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ - --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ - --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ - --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ - --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ - --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ - --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ - --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ - --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ - --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ - --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ - --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ - --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ - --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ - --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ - --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ - --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ - --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ - --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ - --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ - --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ - --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ - --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ - --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ - --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ - --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ - --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ - --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ - --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ - --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ - --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ - --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ - --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ - --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ - --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ - --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ - --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ - --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 - # via cryptography click==8.1.4 \ --hash=sha256:2739815aaa5d2c986a88f1e9230c55e17f0caad3d958a5e13ad0797c166db9e3 \ --hash=sha256:b97d0c74955da062a7d4ef92fadb583806a585b2ea81958a81bd72726cbb8e37 # via pip-tools -cryptography==41.0.2 \ - --hash=sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711 \ - --hash=sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7 \ - --hash=sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd \ - --hash=sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e \ - --hash=sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58 \ - --hash=sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0 \ - --hash=sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d \ - --hash=sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83 \ - --hash=sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831 \ - --hash=sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766 \ - --hash=sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b \ - --hash=sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c \ - --hash=sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182 \ - --hash=sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f \ - --hash=sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa \ - --hash=sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4 \ - --hash=sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a \ - --hash=sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2 \ - --hash=sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76 \ - --hash=sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5 \ - --hash=sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee \ - --hash=sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f \ - --hash=sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14 - # via - # types-pyopenssl - # types-redis flake8==6.0.0 \ --hash=sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7 \ --hash=sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181 @@ -157,10 +64,6 @@ pycodestyle==2.10.0 \ --hash=sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053 \ --hash=sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610 # via flake8 -pycparser==2.21 \ - --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ - --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 - # via cffi pyflakes==3.0.1 \ --hash=sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf \ --hash=sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd @@ -180,14 +83,9 @@ tomli==2.0.1 \ # build # mypy # pip-tools - # pyproject-hooks -types-pyopenssl==23.2.0.1 \ - --hash=sha256:0568553f104466f1b8e0db3360fbe6770137d02e21a1a45c209bf2b1b03d90d4 \ - --hash=sha256:beeb5d22704c625a1e4b6dc756355c5b4af0b980138b702a9d9f932acf020903 - # via types-redis -types-redis==4.6.0.2 \ - --hash=sha256:a98f3386f44d045057696f3efc8869c53dda0060610e0fe3d8a4d391e2a8916a \ - --hash=sha256:d0efcd96f65fd2036437c29d8c12566cfdc549345d73eddacb0488b81aff9f9e +types-redis==3.5.18 \ + --hash=sha256:15482304e8848c63b383b938ffaba7ebe0b7f8f33381ecc450ee03935213e166 \ + --hash=sha256:5c55c4b9e8ebdc6d57d4e47900b77d99f19ca0a563264af3f701246ed0926335 # via -r requirements/dev-requirements.in types-setuptools==68.0.0.1 \ --hash=sha256:a0454ea7ad0711f63a602caa87929003a83cab89224ae1506ed44bb5be8fe7d7 \ From ed54ff40115e8db7ab1d0d23f76769442f04151c Mon Sep 17 00:00:00 2001 From: Erik Moeller Date: Thu, 24 Aug 2023 19:07:25 -0400 Subject: [PATCH 342/352] Switch dependency management to Poetry --- MANIFEST.in | 3 +- Makefile | 62 +- README.md | 43 +- ...requirements.txt => build-requirements.txt | 0 poetry.lock | 1020 +++++++++++++++++ pyproject.toml | 25 + requirements/dev-requirements.in | 9 - requirements/dev-requirements.txt | 518 --------- requirements/requirements.in | 6 - requirements/requirements.txt | 65 -- 10 files changed, 1090 insertions(+), 661 deletions(-) rename requirements/build-requirements.txt => build-requirements.txt (100%) create mode 100644 poetry.lock delete mode 100644 requirements/dev-requirements.in delete mode 100644 requirements/dev-requirements.txt delete mode 100644 requirements/requirements.in delete mode 100644 requirements/requirements.txt diff --git a/MANIFEST.in b/MANIFEST.in index 1c69ebe30..adb7bc804 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -4,7 +4,6 @@ include securedrop_proxy/VERSION include changelog.md include config-example.yaml include qubes/securedrop.Proxy -include requirements/build-requirements.txt -include requirements/requirements.txt +include build-requirements.txt include securedrop_proxy/*.py include setup.py diff --git a/Makefile b/Makefile index 0ae4efa82..b1590ef97 100644 --- a/Makefile +++ b/Makefile @@ -3,83 +3,53 @@ .PHONY: all all: help -.PHONY: venv -venv: - python3 -m venv .venv ## Provision a Python 3 virtualenv for **development** - .venv/bin/pip install --upgrade pip wheel - .venv/bin/pip install --require-hashes -r "requirements/dev-requirements.txt" - .PHONY: bandit bandit: ## Run bandit with medium level excluding test-related folders - pip install --upgrade pip && \ - pip install --upgrade bandit!=1.6.0 && \ - bandit -ll --recursive securedrop_proxy + @echo "Running bandit security checks…" + @poetry run bandit -ll --recursive securedrop_proxy .PHONY: safety safety: ## Runs `safety check` to check python dependencies for vulnerabilities - pip install --upgrade safety && \ - for req_file in `find . -type f -name '*requirements.txt'`; do \ - echo "Checking file $$req_file" \ - && safety check --full-report -r $$req_file \ - && echo -e '\n' \ - || exit 1; \ - done + @echo "Running safety against build requirements…" + @poetry run safety check --full-report -r build-requirements.txt .PHONY: lint lint: check-isort check-black mypy ## Run isort, black and flake8 and mypy - @flake8 securedrop_proxy tests + @poetry run flake8 securedrop_proxy tests .PHONY: mypy mypy: ## Run mypy static type checker - @mypy --ignore-missing-imports securedrop_proxy + @poetry run mypy --ignore-missing-imports securedrop_proxy .PHONY: black black: ## Run black for file formatting - @black securedrop_proxy tests + @echo "Running black (may result in changes in your working directory)…" + @poetry run black securedrop_proxy tests .PHONY: check-black check-black: ## Check Python source code formatting with black - @black --check --diff securedrop_proxy tests + @echo "Running black formatting check…" + @poetry run black --check --diff securedrop_proxy tests .PHONY: isort isort: ## Run isort for file formatting - @isort securedrop_proxy/*.py tests/*.py + @echo "Running isort (may result in changes in your working directory)…" + @poetry run isort securedrop_proxy/*.py tests/*.py .PHONY: check-isort check-isort: ## Check isort for file formatting - @isort --check-only --diff securedrop_proxy/*.py tests/*.py - -.PHONY: sync-requirements -sync-requirements: ## Update dev-requirements.txt to pin to the same versions of prod dependencies - rm -r requirements/dev-requirements.txt && cp requirements/requirements.txt requirements/dev-requirements.txt - pip-compile --allow-unsafe --generate-hashes --output-file requirements/dev-requirements.txt requirements/requirements.in requirements/dev-requirements.in - -.PHONY: requirements -requirements: ## Update *requirements.txt files if pinned versions do not comply with the dependency specifications in *requirements.in - pip-compile --generate-hashes --output-file requirements/requirements.txt requirements/requirements.in - $(MAKE) sync-requirements - -.PHONY: update-dependency -update-dependency: ## Add or upgrade a package to the latest version that complies with the dependency specifications in requirements.in - pip-compile --generate-hashes --upgrade-package $(PACKAGE) --output-file requirements/requirements.txt requirements/requirements.in - $(MAKE) sync-requirements - -.PHONY: update-dev-only-dependencies -update-dev-only-dependencies: ## Update dev-requirements.txt to pin to the latest versions of dev-only dependencies that comply with the dependency specifications in dev-requirements.in - $(MAKE) sync-requirements - @while read line; do \ - pip-compile --allow-unsafe --generate-hashes --upgrade-package $file --output-file requirements/dev-requirements.txt requirements/requirements.in requirements/dev-requirements.in; \ - done < 'requirements/dev-requirements.in' + @echo "Running isort module ordering check…" + @poetry run isort --check-only --diff securedrop_proxy/*.py tests/*.py .PHONY: test test: clean .coverage ## Runs tests with coverage .coverage: - @coverage run --source securedrop_proxy -m unittest + @poetry run coverage run --source securedrop_proxy -m unittest .PHONY: browse-coverage browse-coverage: .coverage ## Generates and opens HTML coverage report - @coverage html + @poetry run coverage html @xdg-open htmlcov/index.html 2>/dev/null || open htmlcov/index.html 2>/dev/null .PHONY: check diff --git a/README.md b/README.md index 6d80117f1..2204be5bc 100644 --- a/README.md +++ b/README.md @@ -29,27 +29,40 @@ https://github.com/freedomofpress/securedrop-workstation/issues/107. #### Quick Start -To try the proxy script, create a virtual environment and install the -requirements. In the root of the project directory, run +1. [Install Poetry](https://python-poetry.org/docs/#installing-with-the-official-installer) +2. Run `make test` to verify the installation -``` -make venv -source .venv/bin/activate -make test -``` +#### Managing Dependencies -#### Update Dependencies +We use Poetry to manage dependencies for this project. -If you're adding or updating a dependency, you need to: +### Development dependencies -1. Modify either `dev-requirements.in` and `requirements.in` (depending on whether it is prod or dev only) and then run `make update-pip-dependencies`. This will generate `dev-requirements.txt` and `requirements.txt`. +You can add development dependencies via `poetry add --group dev`. +Make sure you commit changes to the lockfile along with changes to `pyproject.toml`. -2. For building a debian package from this project, we use the requirements in -`build-requirements.txt` which uses our pip mirror, i.e. the hashes in that file point to -wheels on our pip mirror. A maintainer will need to add -the updated dependency to our pip mirror (you can request this in the PR). +To update the dependency to the latest version within the specified +version constraints, simply run `poetry update ` and commit the resutling +changes. -3. Once the pip mirror is updated, you should checkout the [securedrop-debian-packaging repo](https://github.com/freedomofpress/securedrop-debian-packaging) and run `make requirements`. Commit the `build-requirements.txt` that results and add it to your PR. +To update to a new major version (e.g., from 1.0.0 to 2.0.0), you will typically have to +update `pyproject.toml`. + +### Production dependencies + +To add a production dependency, use `poetry add `, and to update it, +use `poetry update `. + +For our production Debian packages, we use locally built wheels instead of +downloading wheels from PyPI. + +This means that whenever you add or update a production dependency, you also +have to build and commit a new wheel according to the process described in the +[securedrop-builder](https://github.com/freedomofpress/securedrop-builder) +repository. + +This will result in an updated `build-requirements.txt` file you can add to your +PR in this repository. ## Making a Release diff --git a/requirements/build-requirements.txt b/build-requirements.txt similarity index 100% rename from requirements/build-requirements.txt rename to build-requirements.txt diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 000000000..784188f39 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1020 @@ +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. + +[[package]] +name = "bandit" +version = "1.7.5" +description = "Security oriented static analyser for python code." +optional = false +python-versions = ">=3.7" +files = [ + {file = "bandit-1.7.5-py3-none-any.whl", hash = "sha256:75665181dc1e0096369112541a056c59d1c5f66f9bb74a8d686c3c362b83f549"}, + {file = "bandit-1.7.5.tar.gz", hash = "sha256:bdfc739baa03b880c2d15d0431b31c658ffc348e907fe197e54e0389dd59e11e"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} +GitPython = ">=1.0.1" +PyYAML = ">=5.3.1" +rich = "*" +stevedore = ">=1.20.0" + +[package.extras] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "tomli (>=1.1.0)"] +toml = ["tomli (>=1.1.0)"] +yaml = ["PyYAML"] + +[[package]] +name = "black" +version = "23.7.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, + {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, + {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, + {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, + {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, + {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, + {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, + {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, + {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, + {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, + {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2023.7.22" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, +] + +[[package]] +name = "charset-normalizer" +version = "2.0.4" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.5.0" +files = [ + {file = "charset-normalizer-2.0.4.tar.gz", hash = "sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3"}, + {file = "charset_normalizer-2.0.4-py3-none-any.whl", hash = "sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b"}, +] + +[package.extras] +unicode-backport = ["unicodedata2"] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.3.0" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db76a1bcb51f02b2007adacbed4c88b6dee75342c37b05d1822815eed19edee5"}, + {file = "coverage-7.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c02cfa6c36144ab334d556989406837336c1d05215a9bdf44c0bc1d1ac1cb637"}, + {file = "coverage-7.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:477c9430ad5d1b80b07f3c12f7120eef40bfbf849e9e7859e53b9c93b922d2af"}, + {file = "coverage-7.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce2ee86ca75f9f96072295c5ebb4ef2a43cecf2870b0ca5e7a1cbdd929cf67e1"}, + {file = "coverage-7.3.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68d8a0426b49c053013e631c0cdc09b952d857efa8f68121746b339912d27a12"}, + {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3eb0c93e2ea6445b2173da48cb548364f8f65bf68f3d090404080d338e3a689"}, + {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:90b6e2f0f66750c5a1178ffa9370dec6c508a8ca5265c42fbad3ccac210a7977"}, + {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:96d7d761aea65b291a98c84e1250cd57b5b51726821a6f2f8df65db89363be51"}, + {file = "coverage-7.3.0-cp310-cp310-win32.whl", hash = "sha256:63c5b8ecbc3b3d5eb3a9d873dec60afc0cd5ff9d9f1c75981d8c31cfe4df8527"}, + {file = "coverage-7.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:97c44f4ee13bce914272589b6b41165bbb650e48fdb7bd5493a38bde8de730a1"}, + {file = "coverage-7.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74c160285f2dfe0acf0f72d425f3e970b21b6de04157fc65adc9fd07ee44177f"}, + {file = "coverage-7.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b543302a3707245d454fc49b8ecd2c2d5982b50eb63f3535244fd79a4be0c99d"}, + {file = "coverage-7.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad0f87826c4ebd3ef484502e79b39614e9c03a5d1510cfb623f4a4a051edc6fd"}, + {file = "coverage-7.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13c6cbbd5f31211d8fdb477f0f7b03438591bdd077054076eec362cf2207b4a7"}, + {file = "coverage-7.3.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fac440c43e9b479d1241fe9d768645e7ccec3fb65dc3a5f6e90675e75c3f3e3a"}, + {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3c9834d5e3df9d2aba0275c9f67989c590e05732439b3318fa37a725dff51e74"}, + {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4c8e31cf29b60859876474034a83f59a14381af50cbe8a9dbaadbf70adc4b214"}, + {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7a9baf8e230f9621f8e1d00c580394a0aa328fdac0df2b3f8384387c44083c0f"}, + {file = "coverage-7.3.0-cp311-cp311-win32.whl", hash = "sha256:ccc51713b5581e12f93ccb9c5e39e8b5d4b16776d584c0f5e9e4e63381356482"}, + {file = "coverage-7.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:887665f00ea4e488501ba755a0e3c2cfd6278e846ada3185f42d391ef95e7e70"}, + {file = "coverage-7.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d000a739f9feed900381605a12a61f7aaced6beae832719ae0d15058a1e81c1b"}, + {file = "coverage-7.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:59777652e245bb1e300e620ce2bef0d341945842e4eb888c23a7f1d9e143c446"}, + {file = "coverage-7.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9737bc49a9255d78da085fa04f628a310c2332b187cd49b958b0e494c125071"}, + {file = "coverage-7.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5247bab12f84a1d608213b96b8af0cbb30d090d705b6663ad794c2f2a5e5b9fe"}, + {file = "coverage-7.3.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ac9a1de294773b9fa77447ab7e529cf4fe3910f6a0832816e5f3d538cfea9a"}, + {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:85b7335c22455ec12444cec0d600533a238d6439d8d709d545158c1208483873"}, + {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:36ce5d43a072a036f287029a55b5c6a0e9bd73db58961a273b6dc11a2c6eb9c2"}, + {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:211a4576e984f96d9fce61766ffaed0115d5dab1419e4f63d6992b480c2bd60b"}, + {file = "coverage-7.3.0-cp312-cp312-win32.whl", hash = "sha256:56afbf41fa4a7b27f6635bc4289050ac3ab7951b8a821bca46f5b024500e6321"}, + {file = "coverage-7.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:7f297e0c1ae55300ff688568b04ff26b01c13dfbf4c9d2b7d0cb688ac60df479"}, + {file = "coverage-7.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac0dec90e7de0087d3d95fa0533e1d2d722dcc008bc7b60e1143402a04c117c1"}, + {file = "coverage-7.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:438856d3f8f1e27f8e79b5410ae56650732a0dcfa94e756df88c7e2d24851fcd"}, + {file = "coverage-7.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1084393c6bda8875c05e04fce5cfe1301a425f758eb012f010eab586f1f3905e"}, + {file = "coverage-7.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49ab200acf891e3dde19e5aa4b0f35d12d8b4bd805dc0be8792270c71bd56c54"}, + {file = "coverage-7.3.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a67e6bbe756ed458646e1ef2b0778591ed4d1fcd4b146fc3ba2feb1a7afd4254"}, + {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f39c49faf5344af36042b293ce05c0d9004270d811c7080610b3e713251c9b0"}, + {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7df91fb24c2edaabec4e0eee512ff3bc6ec20eb8dccac2e77001c1fe516c0c84"}, + {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:34f9f0763d5fa3035a315b69b428fe9c34d4fc2f615262d6be3d3bf3882fb985"}, + {file = "coverage-7.3.0-cp38-cp38-win32.whl", hash = "sha256:bac329371d4c0d456e8d5f38a9b0816b446581b5f278474e416ea0c68c47dcd9"}, + {file = "coverage-7.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b859128a093f135b556b4765658d5d2e758e1fae3e7cc2f8c10f26fe7005e543"}, + {file = "coverage-7.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fc0ed8d310afe013db1eedd37176d0839dc66c96bcfcce8f6607a73ffea2d6ba"}, + {file = "coverage-7.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61260ec93f99f2c2d93d264b564ba912bec502f679793c56f678ba5251f0393"}, + {file = "coverage-7.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97af9554a799bd7c58c0179cc8dbf14aa7ab50e1fd5fa73f90b9b7215874ba28"}, + {file = "coverage-7.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3558e5b574d62f9c46b76120a5c7c16c4612dc2644c3d48a9f4064a705eaee95"}, + {file = "coverage-7.3.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37d5576d35fcb765fca05654f66aa71e2808d4237d026e64ac8b397ffa66a56a"}, + {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:07ea61bcb179f8f05ffd804d2732b09d23a1238642bf7e51dad62082b5019b34"}, + {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:80501d1b2270d7e8daf1b64b895745c3e234289e00d5f0e30923e706f110334e"}, + {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4eddd3153d02204f22aef0825409091a91bf2a20bce06fe0f638f5c19a85de54"}, + {file = "coverage-7.3.0-cp39-cp39-win32.whl", hash = "sha256:2d22172f938455c156e9af2612650f26cceea47dc86ca048fa4e0b2d21646ad3"}, + {file = "coverage-7.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:60f64e2007c9144375dd0f480a54d6070f00bb1a28f65c408370544091c9bc9e"}, + {file = "coverage-7.3.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:5492a6ce3bdb15c6ad66cb68a0244854d9917478877a25671d70378bdc8562d0"}, + {file = "coverage-7.3.0.tar.gz", hash = "sha256:49dbb19cdcafc130f597d9e04a29d0a032ceedf729e41b181f51cd170e6ee865"}, +] + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "dparse" +version = "0.6.3" +description = "A parser for Python dependency files" +optional = false +python-versions = ">=3.6" +files = [ + {file = "dparse-0.6.3-py3-none-any.whl", hash = "sha256:0d8fe18714056ca632d98b24fbfc4e9791d4e47065285ab486182288813a5318"}, + {file = "dparse-0.6.3.tar.gz", hash = "sha256:27bb8b4bcaefec3997697ba3f6e06b2447200ba273c0b085c3d012a04571b528"}, +] + +[package.dependencies] +packaging = "*" +tomli = {version = "*", markers = "python_version < \"3.11\""} + +[package.extras] +conda = ["pyyaml"] +pipenv = ["pipenv (<=2022.12.19)"] + +[[package]] +name = "flake8" +version = "6.1.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, + {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.1.0,<3.2.0" + +[[package]] +name = "gitdb" +version = "4.0.10" +description = "Git Object Database" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, + {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, +] + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.37" +description = "GitPython is a Python library used to interact with Git repositories" +optional = false +python-versions = ">=3.7" +files = [ + {file = "GitPython-3.1.37-py3-none-any.whl", hash = "sha256:5f4c4187de49616d710a77e98ddf17b4782060a1788df441846bddefbb89ab33"}, + {file = "GitPython-3.1.37.tar.gz", hash = "sha256:f9b9ddc0761c125d5780eab2d64be4873fc6817c2899cbcb34b02344bdc7bc54"}, +] + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[package.extras] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-sugar"] + +[[package]] +name = "idna" +version = "3.2" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.2-py3-none-any.whl", hash = "sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a"}, + {file = "idna-3.2.tar.gz", hash = "sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"}, +] + +[[package]] +name = "isort" +version = "5.12.0" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, +] + +[package.extras] +colors = ["colorama (>=0.4.3)"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] +plugins = ["setuptools"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "multidict" +version = "6.0.4" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, + {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, + {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, + {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, + {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, + {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, + {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, + {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, + {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, + {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, + {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, + {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, + {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, +] + +[[package]] +name = "mypy" +version = "1.5.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f33592ddf9655a4894aef22d134de7393e95fcbdc2d15c1ab65828eee5c66c70"}, + {file = "mypy-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:258b22210a4a258ccd077426c7a181d789d1121aca6db73a83f79372f5569ae0"}, + {file = "mypy-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9ec1f695f0c25986e6f7f8778e5ce61659063268836a38c951200c57479cc12"}, + {file = "mypy-1.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:abed92d9c8f08643c7d831300b739562b0a6c9fcb028d211134fc9ab20ccad5d"}, + {file = "mypy-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:a156e6390944c265eb56afa67c74c0636f10283429171018446b732f1a05af25"}, + {file = "mypy-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ac9c21bfe7bc9f7f1b6fae441746e6a106e48fc9de530dea29e8cd37a2c0cc4"}, + {file = "mypy-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51cb1323064b1099e177098cb939eab2da42fea5d818d40113957ec954fc85f4"}, + {file = "mypy-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:596fae69f2bfcb7305808c75c00f81fe2829b6236eadda536f00610ac5ec2243"}, + {file = "mypy-1.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32cb59609b0534f0bd67faebb6e022fe534bdb0e2ecab4290d683d248be1b275"}, + {file = "mypy-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:159aa9acb16086b79bbb0016145034a1a05360626046a929f84579ce1666b315"}, + {file = "mypy-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f6b0e77db9ff4fda74de7df13f30016a0a663928d669c9f2c057048ba44f09bb"}, + {file = "mypy-1.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26f71b535dfc158a71264e6dc805a9f8d2e60b67215ca0bfa26e2e1aa4d4d373"}, + {file = "mypy-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc3a600f749b1008cc75e02b6fb3d4db8dbcca2d733030fe7a3b3502902f161"}, + {file = "mypy-1.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:26fb32e4d4afa205b24bf645eddfbb36a1e17e995c5c99d6d00edb24b693406a"}, + {file = "mypy-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:82cb6193de9bbb3844bab4c7cf80e6227d5225cc7625b068a06d005d861ad5f1"}, + {file = "mypy-1.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4a465ea2ca12804d5b34bb056be3a29dc47aea5973b892d0417c6a10a40b2d65"}, + {file = "mypy-1.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9fece120dbb041771a63eb95e4896791386fe287fefb2837258925b8326d6160"}, + {file = "mypy-1.5.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d28ddc3e3dfeab553e743e532fb95b4e6afad51d4706dd22f28e1e5e664828d2"}, + {file = "mypy-1.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:57b10c56016adce71fba6bc6e9fd45d8083f74361f629390c556738565af8eeb"}, + {file = "mypy-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:ff0cedc84184115202475bbb46dd99f8dcb87fe24d5d0ddfc0fe6b8575c88d2f"}, + {file = "mypy-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8f772942d372c8cbac575be99f9cc9d9fb3bd95c8bc2de6c01411e2c84ebca8a"}, + {file = "mypy-1.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5d627124700b92b6bbaa99f27cbe615c8ea7b3402960f6372ea7d65faf376c14"}, + {file = "mypy-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:361da43c4f5a96173220eb53340ace68cda81845cd88218f8862dfb0adc8cddb"}, + {file = "mypy-1.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:330857f9507c24de5c5724235e66858f8364a0693894342485e543f5b07c8693"}, + {file = "mypy-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:c543214ffdd422623e9fedd0869166c2f16affe4ba37463975043ef7d2ea8770"}, + {file = "mypy-1.5.1-py3-none-any.whl", hash = "sha256:f757063a83970d67c444f6e01d9550a7402322af3557ce7630d3c957386fa8f5"}, + {file = "mypy-1.5.1.tar.gz", hash = "sha256:b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "packaging" +version = "23.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, +] + +[[package]] +name = "pathspec" +version = "0.11.2" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, +] + +[[package]] +name = "pbr" +version = "5.11.1" +description = "Python Build Reasonableness" +optional = false +python-versions = ">=2.6" +files = [ + {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, + {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, +] + +[[package]] +name = "platformdirs" +version = "3.10.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.7" +files = [ + {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, + {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + +[[package]] +name = "pycodestyle" +version = "2.11.0" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.0-py2.py3-none-any.whl", hash = "sha256:5d1013ba8dc7895b548be5afb05740ca82454fd899971563d2ef625d090326f8"}, + {file = "pycodestyle-2.11.0.tar.gz", hash = "sha256:259bcc17857d8a8b3b4a2327324b79e5f020a13c16074670f9c8c8f872ea76d0"}, +] + +[[package]] +name = "pyflakes" +version = "3.1.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, + {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, +] + +[[package]] +name = "pygments" +version = "2.16.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pyyaml" +version = "5.4.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, + {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"}, + {file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"}, + {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"}, + {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541"}, + {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"}, + {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"}, + {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0"}, + {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"}, + {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"}, + {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc"}, + {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"}, + {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"}, + {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6"}, + {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"}, + {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, + {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rich" +version = "13.5.3" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.5.3-py3-none-any.whl", hash = "sha256:9257b468badc3d347e146a4faa268ff229039d4c2d176ab0cffb4c4fbc73d5d9"}, + {file = "rich-13.5.3.tar.gz", hash = "sha256:87b43e0543149efa1253f485cd845bb7ee54df16c9617b8a893650ab84b4acb6"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "ruamel-yaml" +version = "0.17.32" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +optional = false +python-versions = ">=3" +files = [ + {file = "ruamel.yaml-0.17.32-py3-none-any.whl", hash = "sha256:23cd2ed620231677564646b0c6a89d138b6822a0d78656df7abda5879ec4f447"}, + {file = "ruamel.yaml-0.17.32.tar.gz", hash = "sha256:ec939063761914e14542972a5cba6d33c23b0859ab6342f61cf070cfc600efc2"}, +] + +[package.dependencies] +"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.12\""} + +[package.extras] +docs = ["ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + +[[package]] +name = "ruamel-yaml-clib" +version = "0.2.7" +description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +optional = false +python-versions = ">=3.5" +files = [ + {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71"}, + {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7"}, + {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80"}, + {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab"}, + {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win32.whl", hash = "sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231"}, + {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:1a6391a7cabb7641c32517539ca42cf84b87b667bad38b78d4d42dd23e957c81"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:9c7617df90c1365638916b98cdd9be833d31d337dbcd722485597b43c4a215bf"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win32.whl", hash = "sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122"}, + {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072"}, + {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_12_0_arm64.whl", hash = "sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8"}, + {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3"}, + {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763"}, + {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-win32.whl", hash = "sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e"}, + {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-win_amd64.whl", hash = "sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646"}, + {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f"}, + {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0"}, + {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282"}, + {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7"}, + {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-win32.whl", hash = "sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93"}, + {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b"}, + {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb"}, + {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307"}, + {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697"}, + {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b"}, + {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-win32.whl", hash = "sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac"}, + {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f"}, + {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9"}, + {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1"}, + {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640"}, + {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b"}, + {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-win32.whl", hash = "sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8"}, + {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5"}, + {file = "ruamel.yaml.clib-0.2.7.tar.gz", hash = "sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497"}, +] + +[[package]] +name = "safety" +version = "2.3.4" +description = "Checks installed dependencies for known vulnerabilities and licenses." +optional = false +python-versions = "*" +files = [ + {file = "safety-2.3.4-py3-none-any.whl", hash = "sha256:6224dcd9b20986a2b2c5e7acfdfba6bca42bb11b2783b24ed04f32317e5167ea"}, + {file = "safety-2.3.4.tar.gz", hash = "sha256:b9e74e794e82f54d11f4091c5d820c4d2d81de9f953bf0b4f33ac8bc402ae72c"}, +] + +[package.dependencies] +Click = ">=8.0.2" +dparse = ">=0.6.2" +packaging = ">=21.0" +requests = "*" +"ruamel.yaml" = ">=0.17.21" +setuptools = ">=19.3" + +[package.extras] +github = ["jinja2 (>=3.1.0)", "pygithub (>=1.43.3)"] +gitlab = ["python-gitlab (>=1.3.0)"] + +[[package]] +name = "setuptools" +version = "68.2.2" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, + {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "smmap" +version = "5.0.1" +description = "A pure Python implementation of a sliding window memory map manager" +optional = false +python-versions = ">=3.7" +files = [ + {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, + {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, +] + +[[package]] +name = "stevedore" +version = "5.1.0" +description = "Manage dynamic plugins for Python applications" +optional = false +python-versions = ">=3.8" +files = [ + {file = "stevedore-5.1.0-py3-none-any.whl", hash = "sha256:8cc040628f3cea5d7128f2e76cf486b2251a4e543c7b938f58d9a377f6694a2d"}, + {file = "stevedore-5.1.0.tar.gz", hash = "sha256:a54534acf9b89bc7ed264807013b505bf07f74dbe4bcfa37d32bd063870b087c"}, +] + +[package.dependencies] +pbr = ">=2.0.0,<2.1.0 || >2.1.0" + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.11" +description = "Typing stubs for PyYAML" +optional = false +python-versions = "*" +files = [ + {file = "types-PyYAML-6.0.12.11.tar.gz", hash = "sha256:7d340b19ca28cddfdba438ee638cd4084bde213e501a3978738543e27094775b"}, + {file = "types_PyYAML-6.0.12.11-py3-none-any.whl", hash = "sha256:a461508f3096d1d5810ec5ab95d7eeecb651f3a15b71959999988942063bf01d"}, +] + +[[package]] +name = "types-requests" +version = "2.31.0.2" +description = "Typing stubs for requests" +optional = false +python-versions = "*" +files = [ + {file = "types-requests-2.31.0.2.tar.gz", hash = "sha256:6aa3f7faf0ea52d728bb18c0a0d1522d9bfd8c72d26ff6f61bfc3d06a411cf40"}, + {file = "types_requests-2.31.0.2-py3-none-any.whl", hash = "sha256:56d181c85b5925cbc59f4489a57e72a8b2166f18273fd8ba7b6fe0c0b986f12a"}, +] + +[package.dependencies] +types-urllib3 = "*" + +[[package]] +name = "types-urllib3" +version = "1.26.25.14" +description = "Typing stubs for urllib3" +optional = false +python-versions = "*" +files = [ + {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"}, + {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"}, +] + +[[package]] +name = "typing-extensions" +version = "4.7.1" +description = "Backported and Experimental Type Hints for Python 3.7+" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, +] + +[[package]] +name = "urllib3" +version = "1.26.6" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +files = [ + {file = "urllib3-1.26.6-py2.py3-none-any.whl", hash = "sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4"}, + {file = "urllib3-1.26.6.tar.gz", hash = "sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"}, +] + +[package.extras] +brotli = ["brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "vcrpy" +version = "5.1.0" +description = "Automatically mock your HTTP interactions to simplify and speed up testing" +optional = false +python-versions = ">=3.8" +files = [ + {file = "vcrpy-5.1.0-py2.py3-none-any.whl", hash = "sha256:605e7b7a63dcd940db1df3ab2697ca7faf0e835c0852882142bafb19649d599e"}, + {file = "vcrpy-5.1.0.tar.gz", hash = "sha256:bbf1532f2618a04f11bce2a99af3a9647a32c880957293ff91e0a5f187b6b3d2"}, +] + +[package.dependencies] +PyYAML = "*" +urllib3 = {version = "<2", markers = "python_version < \"3.10\""} +wrapt = "*" +yarl = "*" + +[[package]] +name = "wrapt" +version = "1.15.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +files = [ + {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"}, + {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"}, + {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"}, + {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"}, + {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"}, + {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"}, + {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"}, + {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"}, + {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"}, + {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"}, + {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"}, + {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"}, + {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"}, + {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"}, + {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"}, + {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"}, + {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"}, + {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"}, + {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"}, + {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"}, + {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"}, + {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"}, + {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"}, + {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"}, + {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"}, + {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"}, + {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, +] + +[[package]] +name = "yarl" +version = "1.9.2" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, + {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, + {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"}, + {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"}, + {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"}, + {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"}, + {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"}, + {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"}, + {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"}, + {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"}, + {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"}, + {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"}, + {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"}, + {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, + {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[metadata] +lock-version = "2.0" +python-versions = "^3.9" +content-hash = "8fe8dfaaea2b81e6262ab59080e35c12c680ac67cd1c3fe130b061f7ee4fa8ce" diff --git a/pyproject.toml b/pyproject.toml index 7eaeaea01..c65f11a97 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,3 +3,28 @@ line-length = 100 [tool.isort] line_length = 100 + +[tool.poetry] +name = "securedrop-proxy" +version = "0.1.0" +description = "Qubes RPC proxy service for the SecureDrop Client" +authors = ["SecureDrop Team"] +license = "GPLv3+" +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.9" +pyyaml = "^5.4.1" +requests = "^2.31.0" + +[tool.poetry.group.dev.dependencies] +black = "^23.7.0" +coverage = "^7.3.0" +flake8 = "^6.1.0" +isort = "^5.12.0" +mypy = "^1.5.1" +types-PyYAML = "^6.0.12.11" +types-requests = "^2.31.0.2" +vcrpy = "^5.1.0" +bandit = "*" +safety = "*" diff --git a/requirements/dev-requirements.in b/requirements/dev-requirements.in deleted file mode 100644 index 9f5c23c39..000000000 --- a/requirements/dev-requirements.in +++ /dev/null @@ -1,9 +0,0 @@ -black -coverage -flake8 -isort -mypy -pip-tools -types-PyYAML -types-requests -vcrpy diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt deleted file mode 100644 index e14f0a563..000000000 --- a/requirements/dev-requirements.txt +++ /dev/null @@ -1,518 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-requirements.txt requirements/dev-requirements.in requirements/requirements.in -# -black==23.3.0 \ - --hash=sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5 \ - --hash=sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915 \ - --hash=sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326 \ - --hash=sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940 \ - --hash=sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b \ - --hash=sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30 \ - --hash=sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c \ - --hash=sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c \ - --hash=sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab \ - --hash=sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27 \ - --hash=sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2 \ - --hash=sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961 \ - --hash=sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9 \ - --hash=sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb \ - --hash=sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70 \ - --hash=sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331 \ - --hash=sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2 \ - --hash=sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266 \ - --hash=sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d \ - --hash=sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6 \ - --hash=sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b \ - --hash=sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925 \ - --hash=sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8 \ - --hash=sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4 \ - --hash=sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3 - # via -r requirements/dev-requirements.in -build==0.10.0 \ - --hash=sha256:af266720050a66c893a6096a2f410989eeac74ff9a68ba194b3f6473e8e26171 \ - --hash=sha256:d5b71264afdb5951d6704482aac78de887c80691c52b88a9ad195983ca2c9269 - # via pip-tools -certifi==2023.7.22 \ - --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ - --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 - # via - # -r requirements/requirements.in - # requests -charset-normalizer==2.0.4 \ - --hash=sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b \ - --hash=sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3 - # via - # -r requirements/requirements.in - # requests -click==8.1.3 \ - --hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \ - --hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48 - # via - # black - # pip-tools -coverage==7.2.7 \ - --hash=sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f \ - --hash=sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2 \ - --hash=sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a \ - --hash=sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a \ - --hash=sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01 \ - --hash=sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6 \ - --hash=sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7 \ - --hash=sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f \ - --hash=sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02 \ - --hash=sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c \ - --hash=sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063 \ - --hash=sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a \ - --hash=sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5 \ - --hash=sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959 \ - --hash=sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97 \ - --hash=sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6 \ - --hash=sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f \ - --hash=sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9 \ - --hash=sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5 \ - --hash=sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f \ - --hash=sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562 \ - --hash=sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe \ - --hash=sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9 \ - --hash=sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f \ - --hash=sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb \ - --hash=sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb \ - --hash=sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1 \ - --hash=sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb \ - --hash=sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250 \ - --hash=sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e \ - --hash=sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511 \ - --hash=sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5 \ - --hash=sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59 \ - --hash=sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2 \ - --hash=sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d \ - --hash=sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3 \ - --hash=sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4 \ - --hash=sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de \ - --hash=sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9 \ - --hash=sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833 \ - --hash=sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0 \ - --hash=sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9 \ - --hash=sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d \ - --hash=sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050 \ - --hash=sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d \ - --hash=sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6 \ - --hash=sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353 \ - --hash=sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb \ - --hash=sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e \ - --hash=sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8 \ - --hash=sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495 \ - --hash=sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2 \ - --hash=sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd \ - --hash=sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27 \ - --hash=sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1 \ - --hash=sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818 \ - --hash=sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4 \ - --hash=sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e \ - --hash=sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850 \ - --hash=sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3 - # via -r requirements/dev-requirements.in -flake8==6.0.0 \ - --hash=sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7 \ - --hash=sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181 - # via -r requirements/dev-requirements.in -idna==3.2 \ - --hash=sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a \ - --hash=sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3 - # via - # -r requirements/requirements.in - # requests - # yarl -isort==5.12.0 \ - --hash=sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504 \ - --hash=sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6 - # via -r requirements/dev-requirements.in -mccabe==0.7.0 \ - --hash=sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325 \ - --hash=sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e - # via flake8 -multidict==6.0.4 \ - --hash=sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9 \ - --hash=sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8 \ - --hash=sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03 \ - --hash=sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710 \ - --hash=sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161 \ - --hash=sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664 \ - --hash=sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569 \ - --hash=sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067 \ - --hash=sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313 \ - --hash=sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706 \ - --hash=sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2 \ - --hash=sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636 \ - --hash=sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49 \ - --hash=sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93 \ - --hash=sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603 \ - --hash=sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0 \ - --hash=sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60 \ - --hash=sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4 \ - --hash=sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e \ - --hash=sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1 \ - --hash=sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60 \ - --hash=sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951 \ - --hash=sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc \ - --hash=sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe \ - --hash=sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95 \ - --hash=sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d \ - --hash=sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8 \ - --hash=sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed \ - --hash=sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2 \ - --hash=sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775 \ - --hash=sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87 \ - --hash=sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c \ - --hash=sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2 \ - --hash=sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98 \ - --hash=sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3 \ - --hash=sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe \ - --hash=sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78 \ - --hash=sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660 \ - --hash=sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176 \ - --hash=sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e \ - --hash=sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988 \ - --hash=sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c \ - --hash=sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c \ - --hash=sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0 \ - --hash=sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449 \ - --hash=sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f \ - --hash=sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde \ - --hash=sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5 \ - --hash=sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d \ - --hash=sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac \ - --hash=sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a \ - --hash=sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9 \ - --hash=sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca \ - --hash=sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11 \ - --hash=sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35 \ - --hash=sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063 \ - --hash=sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b \ - --hash=sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982 \ - --hash=sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258 \ - --hash=sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1 \ - --hash=sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52 \ - --hash=sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480 \ - --hash=sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7 \ - --hash=sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461 \ - --hash=sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d \ - --hash=sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc \ - --hash=sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779 \ - --hash=sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a \ - --hash=sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547 \ - --hash=sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0 \ - --hash=sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171 \ - --hash=sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf \ - --hash=sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d \ - --hash=sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba - # via yarl -mypy==1.4.0 \ - --hash=sha256:0cf0ca95e4b8adeaf07815a78b4096b65adf64ea7871b39a2116c19497fcd0dd \ - --hash=sha256:0f98973e39e4a98709546a9afd82e1ffcc50c6ec9ce6f7870f33ebbf0bd4f26d \ - --hash=sha256:19d42b08c7532d736a7e0fb29525855e355fa51fd6aef4f9bbc80749ff64b1a2 \ - --hash=sha256:210fe0f39ec5be45dd9d0de253cb79245f0a6f27631d62e0c9c7988be7152965 \ - --hash=sha256:3b1b5c875fcf3e7217a3de7f708166f641ca154b589664c44a6fd6d9f17d9e7e \ - --hash=sha256:3f2b353eebef669529d9bd5ae3566905a685ae98b3af3aad7476d0d519714758 \ - --hash=sha256:50f65f0e9985f1e50040e603baebab83efed9eb37e15a22a4246fa7cd660f981 \ - --hash=sha256:53c2a1fed81e05ded10a4557fe12bae05b9ecf9153f162c662a71d924d504135 \ - --hash=sha256:5a0ee54c2cb0f957f8a6f41794d68f1a7e32b9968675ade5846f538504856d42 \ - --hash=sha256:62bf18d97c6b089f77f0067b4e321db089d8520cdeefc6ae3ec0f873621c22e5 \ - --hash=sha256:653863c75f0dbb687d92eb0d4bd9fe7047d096987ecac93bb7b1bc336de48ebd \ - --hash=sha256:67242d5b28ed0fa88edd8f880aed24da481929467fdbca6487167cb5e3fd31ff \ - --hash=sha256:6ba9a69172abaa73910643744d3848877d6aac4a20c41742027dcfd8d78f05d9 \ - --hash=sha256:6c34d43e3d54ad05024576aef28081d9d0580f6fa7f131255f54020eb12f5352 \ - --hash=sha256:7461469e163f87a087a5e7aa224102a30f037c11a096a0ceeb721cb0dce274c8 \ - --hash=sha256:94a81b9354545123feb1a99b960faeff9e1fa204fce47e0042335b473d71530d \ - --hash=sha256:a0b2e0da7ff9dd8d2066d093d35a169305fc4e38db378281fce096768a3dbdbf \ - --hash=sha256:a34eed094c16cad0f6b0d889811592c7a9b7acf10d10a7356349e325d8704b4f \ - --hash=sha256:a3af348e0925a59213244f28c7c0c3a2c2088b4ba2fe9d6c8d4fbb0aba0b7d05 \ - --hash=sha256:b4c734d947e761c7ceb1f09a98359dd5666460acbc39f7d0a6b6beec373c5840 \ - --hash=sha256:bba57b4d2328740749f676807fcf3036e9de723530781405cc5a5e41fc6e20de \ - --hash=sha256:ca33ab70a4aaa75bb01086a0b04f0ba8441e51e06fc57e28585176b08cad533b \ - --hash=sha256:de1e7e68148a213036276d1f5303b3836ad9a774188961eb2684eddff593b042 \ - --hash=sha256:f051ca656be0c179c735a4c3193f307d34c92fdc4908d44fd4516fbe8b10567d \ - --hash=sha256:f5984a8d13d35624e3b235a793c814433d810acba9eeefe665cdfed3d08bc3af \ - --hash=sha256:f7a5971490fd4a5a436e143105a1f78fa8b3fe95b30fff2a77542b4f3227a01f - # via -r requirements/dev-requirements.in -mypy-extensions==1.0.0 \ - --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ - --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 - # via - # black - # mypy -packaging==23.1 \ - --hash=sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61 \ - --hash=sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f - # via - # black - # build -pathspec==0.11.1 \ - --hash=sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687 \ - --hash=sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293 - # via black -pip==23.1.2 \ - --hash=sha256:0e7c86f486935893c708287b30bd050a36ac827ec7fe5e43fe7cb198dd835fba \ - --hash=sha256:3ef6ac33239e4027d9a5598a381b9d30880a1477e50039db2eac6e8a8f6d1b18 - # via pip-tools -pip-tools==6.13.0 \ - --hash=sha256:50943f151d87e752abddec8158622c34ad7f292e193836e90e30d87da60b19d9 \ - --hash=sha256:61d46bd2eb8016ed4a924e196e6e5b0a268cd3babd79e593048720db23522bb1 - # via -r requirements/dev-requirements.in -platformdirs==3.7.0 \ - --hash=sha256:87fbf6473e87c078d536980ba970a472422e94f17b752cfad17024c18876d481 \ - --hash=sha256:cfd065ba43133ff103ab3bd10aecb095c2a0035fcd1f07217c9376900d94ba07 - # via black -pycodestyle==2.10.0 \ - --hash=sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053 \ - --hash=sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610 - # via flake8 -pyflakes==3.0.1 \ - --hash=sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf \ - --hash=sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd - # via flake8 -pyproject-hooks==1.0.0 \ - --hash=sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8 \ - --hash=sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5 - # via build -pyyaml==5.4.1 \ - --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \ - --hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \ - --hash=sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393 \ - --hash=sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77 \ - --hash=sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922 \ - --hash=sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5 \ - --hash=sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8 \ - --hash=sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10 \ - --hash=sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc \ - --hash=sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018 \ - --hash=sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e \ - --hash=sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253 \ - --hash=sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347 \ - --hash=sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183 \ - --hash=sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541 \ - --hash=sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb \ - --hash=sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185 \ - --hash=sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc \ - --hash=sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db \ - --hash=sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa \ - --hash=sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46 \ - --hash=sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122 \ - --hash=sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b \ - --hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \ - --hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \ - --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \ - --hash=sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247 \ - --hash=sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6 \ - --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0 - # via - # -r requirements/requirements.in - # vcrpy -requests==2.31.0 \ - --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ - --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 - # via -r requirements/requirements.in -setuptools==68.0.0 \ - --hash=sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f \ - --hash=sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235 - # via pip-tools -six==1.16.0 \ - --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ - --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 - # via vcrpy -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f - # via - # black - # build - # mypy -types-pyyaml==6.0.12.10 \ - --hash=sha256:662fa444963eff9b68120d70cda1af5a5f2aa57900003c2006d7626450eaae5f \ - --hash=sha256:ebab3d0700b946553724ae6ca636ea932c1b0868701d4af121630e78d695fc97 - # via -r requirements/dev-requirements.in -types-requests==2.31.0.1 \ - --hash=sha256:3de667cffa123ce698591de0ad7db034a5317457a596eb0b4944e5a9d9e8d1ac \ - --hash=sha256:afb06ef8f25ba83d59a1d424bd7a5a939082f94b94e90ab5e6116bd2559deaa3 - # via -r requirements/dev-requirements.in -types-urllib3==1.26.25.13 \ - --hash=sha256:3300538c9dc11dad32eae4827ac313f5d986b8b21494801f1bf97a1ac6c03ae5 \ - --hash=sha256:5dbd1d2bef14efee43f5318b5d36d805a489f6600252bb53626d4bfafd95e27c - # via types-requests -typing-extensions==4.6.3 \ - --hash=sha256:88a4153d8505aabbb4e13aacb7c486c2b4a33ca3b3f807914a9b4c844c471c26 \ - --hash=sha256:d91d5919357fe7f681a9f2b5b4cb2a5f1ef0a1e9f59c4d8ff0d3491e05c0ffd5 - # via - # black - # mypy -urllib3==1.26.6 \ - --hash=sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4 \ - --hash=sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f - # via - # -r requirements/requirements.in - # requests - # vcrpy -vcrpy==4.3.1 \ - --hash=sha256:24e2d450bf1c2f9f9b4246ee91beb7d58f862a9f2f030514b14783b83c5146ec \ - --hash=sha256:35398f1b373f32340f39d735ea45f40d679ace316f3dddf8cbcbc2f120e6d1d0 - # via -r requirements/dev-requirements.in -wheel==0.40.0 \ - --hash=sha256:cd1196f3faee2b31968d626e1731c94f99cbdb67cf5a46e4f5656cbee7738873 \ - --hash=sha256:d236b20e7cb522daf2390fa84c55eea81c5c30190f90f29ae2ca1ad8355bf247 - # via pip-tools -wrapt==1.15.0 \ - --hash=sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0 \ - --hash=sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420 \ - --hash=sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a \ - --hash=sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c \ - --hash=sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079 \ - --hash=sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923 \ - --hash=sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f \ - --hash=sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1 \ - --hash=sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8 \ - --hash=sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86 \ - --hash=sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0 \ - --hash=sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364 \ - --hash=sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e \ - --hash=sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c \ - --hash=sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e \ - --hash=sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c \ - --hash=sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727 \ - --hash=sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff \ - --hash=sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e \ - --hash=sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29 \ - --hash=sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7 \ - --hash=sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72 \ - --hash=sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475 \ - --hash=sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a \ - --hash=sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317 \ - --hash=sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2 \ - --hash=sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd \ - --hash=sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640 \ - --hash=sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98 \ - --hash=sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248 \ - --hash=sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e \ - --hash=sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d \ - --hash=sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec \ - --hash=sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1 \ - --hash=sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e \ - --hash=sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9 \ - --hash=sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92 \ - --hash=sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb \ - --hash=sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094 \ - --hash=sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46 \ - --hash=sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29 \ - --hash=sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd \ - --hash=sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705 \ - --hash=sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8 \ - --hash=sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975 \ - --hash=sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb \ - --hash=sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e \ - --hash=sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b \ - --hash=sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418 \ - --hash=sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019 \ - --hash=sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1 \ - --hash=sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba \ - --hash=sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6 \ - --hash=sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2 \ - --hash=sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3 \ - --hash=sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7 \ - --hash=sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752 \ - --hash=sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416 \ - --hash=sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f \ - --hash=sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1 \ - --hash=sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc \ - --hash=sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145 \ - --hash=sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee \ - --hash=sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a \ - --hash=sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7 \ - --hash=sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b \ - --hash=sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653 \ - --hash=sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0 \ - --hash=sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90 \ - --hash=sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29 \ - --hash=sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6 \ - --hash=sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034 \ - --hash=sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09 \ - --hash=sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559 \ - --hash=sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639 - # via vcrpy -yarl==1.9.2 \ - --hash=sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571 \ - --hash=sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3 \ - --hash=sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3 \ - --hash=sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c \ - --hash=sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7 \ - --hash=sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04 \ - --hash=sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191 \ - --hash=sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea \ - --hash=sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4 \ - --hash=sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4 \ - --hash=sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095 \ - --hash=sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e \ - --hash=sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74 \ - --hash=sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef \ - --hash=sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33 \ - --hash=sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde \ - --hash=sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45 \ - --hash=sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf \ - --hash=sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b \ - --hash=sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac \ - --hash=sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0 \ - --hash=sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528 \ - --hash=sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716 \ - --hash=sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb \ - --hash=sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18 \ - --hash=sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72 \ - --hash=sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6 \ - --hash=sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582 \ - --hash=sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5 \ - --hash=sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368 \ - --hash=sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc \ - --hash=sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9 \ - --hash=sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be \ - --hash=sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a \ - --hash=sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80 \ - --hash=sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8 \ - --hash=sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6 \ - --hash=sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417 \ - --hash=sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574 \ - --hash=sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59 \ - --hash=sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608 \ - --hash=sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82 \ - --hash=sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1 \ - --hash=sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3 \ - --hash=sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d \ - --hash=sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8 \ - --hash=sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc \ - --hash=sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac \ - --hash=sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8 \ - --hash=sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955 \ - --hash=sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0 \ - --hash=sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367 \ - --hash=sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb \ - --hash=sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a \ - --hash=sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623 \ - --hash=sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2 \ - --hash=sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6 \ - --hash=sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7 \ - --hash=sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4 \ - --hash=sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051 \ - --hash=sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938 \ - --hash=sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8 \ - --hash=sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9 \ - --hash=sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3 \ - --hash=sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5 \ - --hash=sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9 \ - --hash=sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333 \ - --hash=sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185 \ - --hash=sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3 \ - --hash=sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560 \ - --hash=sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b \ - --hash=sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7 \ - --hash=sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78 \ - --hash=sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7 - # via vcrpy diff --git a/requirements/requirements.in b/requirements/requirements.in deleted file mode 100644 index 00b5b4ca6..000000000 --- a/requirements/requirements.in +++ /dev/null @@ -1,6 +0,0 @@ -certifi>=2023.7.22 -charset-normalizer>=2.0.4 -idna>=2.7 -pyyaml==5.4.1 -requests>=2.31.0 -urllib3>=1.26.5 diff --git a/requirements/requirements.txt b/requirements/requirements.txt deleted file mode 100644 index f5095c605..000000000 --- a/requirements/requirements.txt +++ /dev/null @@ -1,65 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --generate-hashes --output-file=requirements/requirements.txt requirements/requirements.in -# -certifi==2023.7.22 \ - --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ - --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 - # via - # -r requirements/requirements.in - # requests -charset-normalizer==2.0.4 \ - --hash=sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b \ - --hash=sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3 - # via - # -r requirements/requirements.in - # requests -idna==3.2 \ - --hash=sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a \ - --hash=sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3 - # via - # -r requirements/requirements.in - # requests -pyyaml==5.4.1 \ - --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \ - --hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \ - --hash=sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393 \ - --hash=sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77 \ - --hash=sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922 \ - --hash=sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5 \ - --hash=sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8 \ - --hash=sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10 \ - --hash=sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc \ - --hash=sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018 \ - --hash=sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e \ - --hash=sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253 \ - --hash=sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347 \ - --hash=sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183 \ - --hash=sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541 \ - --hash=sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb \ - --hash=sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185 \ - --hash=sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc \ - --hash=sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db \ - --hash=sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa \ - --hash=sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46 \ - --hash=sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122 \ - --hash=sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b \ - --hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \ - --hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \ - --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \ - --hash=sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247 \ - --hash=sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6 \ - --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0 - # via -r requirements/requirements.in -requests==2.31.0 \ - --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ - --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 - # via -r requirements/requirements.in -urllib3==1.26.6 \ - --hash=sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4 \ - --hash=sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f - # via - # -r requirements/requirements.in - # requests From 2891a69ba5dde9ef516a0beaaf729365ade412e9 Mon Sep 17 00:00:00 2001 From: Erik Moeller Date: Tue, 26 Sep 2023 20:51:52 -0400 Subject: [PATCH 343/352] Update CircleCI config to use Poetry We're installing the Poetry system package on Debian Bookworm -- that's generally a preferable strategy going forward, and in fact, pip will error out if you attempt to install it from PyPI. This necessitates some conditional logic we can drop once we move fully to Bookworm. --- .circleci/config.yml | 58 ++++++++++++++++++++++++++++---------------- 1 file changed, 37 insertions(+), 21 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index d151e370b..a39a4c5b3 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,26 +1,41 @@ --- common-steps: + - &install_poetry + run: + name: Install Poetry + command: | + set -e + source /etc/os-release + if [[ "$VERSION_CODENAME" == "bullseye" ]]; then + # Install Poetry via PyPI + apt-get update && apt-get install --yes --no-install-recommends python3-pip + pip install poetry==1.6.1 + elif [[ "$VERSION_CODENAME" == "bookworm" ]]; then + # Install Poetry via system package + apt-get update && apt-get install --yes --no-install-recommends python3-poetry + else + echo "Unsupported Debian version: $VERSION_CODENAME" + exit 1 + fi + - &install_testing_dependencies run: name: Install testing dependencies command: | - set -e - apt update && apt install -y git gnupg make python3-dev python3-venv + apt-get install --yes --no-install-recommends git gnupg make + poetry install --no-ansi - &install_build_dependencies run: name: Install build dependencies command: | set -e - apt update && apt install -y git make sudo + apt-get update && apt-get install --yes git make sudo - &run_unit_tests run: name: Install requirements and run unit tests command: | - set -e - make venv - source .venv/bin/activate export PYTHONPATH=$PYTHONPATH:. # so alembic can get to Base metadata make test @@ -28,9 +43,6 @@ common-steps: run: name: Run lint, type checking, code formatting command: | - set -e - make venv - source .venv/bin/activate make lint - &check_security @@ -38,8 +50,7 @@ common-steps: name: Run static analysis on source code to find security issues command: | set -e - make venv - source .venv/bin/activate + poetry update bandit make bandit - &check_python_dependencies_for_vulnerabilities @@ -47,8 +58,7 @@ common-steps: name: Check Python dependencies for known vulnerabilities command: | set -e - make venv - source .venv/bin/activate + poetry update safety make safety - &install_packaging_dependencies @@ -59,10 +69,11 @@ common-steps: mkdir ~/packaging && cd ~/packaging # local builds may not have an ssh url, so || true git config --global --unset url.ssh://git@github.com.insteadof || true - git clone https://github.com/freedomofpress/securedrop-debian-packaging.git - cd securedrop-debian-packaging + git clone https://github.com/freedomofpress/securedrop-builder.git + cd securedrop-builder apt-get update && apt-get install -y sudo make make install-deps + source .venv/bin/activate PKG_DIR=~/project make requirements - &verify_requirements @@ -78,9 +89,10 @@ common-steps: run: name: Build debian package command: | - cd ~/packaging/securedrop-debian-packaging + cd ~/packaging/securedrop-builder export PKG_VERSION=1000.0 export PKG_PATH=~/project/ + source .venv/bin/activate make securedrop-proxy version: 2.1 @@ -93,8 +105,8 @@ jobs: docker: &docker - image: debian:<< parameters.image >> steps: - - *install_build_dependencies - checkout + - *install_build_dependencies - *install_packaging_dependencies - *verify_requirements - *build_debian_package @@ -103,8 +115,9 @@ jobs: parameters: *parameters docker: *docker steps: - - *install_testing_dependencies - checkout + - *install_poetry + - *install_testing_dependencies - *run_unit_tests - store_test_results: path: test-results @@ -113,24 +126,27 @@ jobs: parameters: *parameters docker: *docker steps: - - *install_testing_dependencies - checkout + - *install_poetry + - *install_testing_dependencies - *run_lint check-security: parameters: *parameters docker: *docker steps: - - *install_testing_dependencies - checkout + - *install_poetry + - *install_testing_dependencies - *check_security check-python-security: parameters: *parameters docker: *docker steps: - - *install_testing_dependencies - checkout + - *install_poetry + - *install_testing_dependencies - *check_python_dependencies_for_vulnerabilities From 21545a0e7d14aeff9fc2ba37c72efd5e02b0edb3 Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Fri, 3 Nov 2023 13:50:12 -0400 Subject: [PATCH 344/352] Migrate dependency management to poetry Switch dependency management to use poetry, which is much nicer than pip-tools. This is largely based off of and applies the same changes to the Makefile and CI. --- .circleci/config.yml | 38 +- MANIFEST.in | 5 +- Makefile | 50 +- README.md | 12 +- ...requirements.txt => build-requirements.txt | 0 poetry.lock | 733 ++++++++++++++++++ pyproject.toml | 20 + requirements/dev-requirements.in | 6 - requirements/dev-requirements.txt | 111 --- requirements/requirements.in | 1 - requirements/requirements.txt | 10 - 11 files changed, 801 insertions(+), 185 deletions(-) rename requirements/build-requirements.txt => build-requirements.txt (100%) create mode 100644 poetry.lock delete mode 100644 requirements/dev-requirements.in delete mode 100644 requirements/dev-requirements.txt delete mode 100644 requirements/requirements.in delete mode 100644 requirements/requirements.txt diff --git a/.circleci/config.yml b/.circleci/config.yml index da9d88844..2391acf1b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,12 +1,34 @@ --- common-steps: + - &install_poetry + run: + name: Install Poetry + command: | + set -e + source /etc/os-release + if [[ "$VERSION_CODENAME" == "bullseye" ]]; then + # Install Poetry via PyPI + apt-get update && apt-get install --yes --no-install-recommends python3-pip + pip install poetry==1.6.1 + elif [[ "$VERSION_CODENAME" == "bookworm" ]]; then + # Install Poetry via system package + apt-get update && apt-get install --yes --no-install-recommends python3-poetry + else + echo "Unsupported Debian version: $VERSION_CODENAME" + exit 1 + fi + + - &install_testing_dependencies + run: + name: Install testing dependencies + command: | + apt-get install --yes --no-install-recommends git gnupg make + poetry install --no-ansi + - &run_tests run: name: Install requirements and run tests command: | - apt-get update && apt-get install -y make python3-venv - make venv - source .venv/bin/activate make check - &install_packaging_dependencies @@ -15,9 +37,10 @@ common-steps: command: | apt-get update && apt-get install -y git git-lfs make sudo mkdir ~/packaging && cd ~/packaging - git clone https://github.com/freedomofpress/securedrop-debian-packaging.git - cd securedrop-debian-packaging + git clone https://github.com/freedomofpress/securedrop-builder.git + cd securedrop-builder make install-deps + source .venv/bin/activate PKG_DIR=~/project make requirements - &verify_requirements @@ -33,9 +56,10 @@ common-steps: run: name: Build debian package command: | - cd ~/packaging/securedrop-debian-packaging + cd ~/packaging/securedrop-builder export PKG_VERSION=1000.0 export PKG_PATH=~/project/ + source .venv/bin/activate make securedrop-log version: 2 @@ -45,6 +69,8 @@ jobs: - image: debian:bullseye steps: - checkout + - *install_poetry + - *install_testing_dependencies - *run_tests build-bullseye: diff --git a/MANIFEST.in b/MANIFEST.in index 17086b81b..b1e9a7fa9 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,11 +1,10 @@ include LICENSE include README.md include changelog.md -include requirements/build-requirements.txt -include requirements/requirements.txt +include build-requirements.txt include securedrop-log* include securedrop-redis-log include securedrop.Log include sd-rsyslog* include sdlog.conf -include VERSION \ No newline at end of file +include VERSION diff --git a/Makefile b/Makefile index c0b027a84..dbf828e4a 100644 --- a/Makefile +++ b/Makefile @@ -1,66 +1,32 @@ DEFAULT_GOAL: help SHELL := /bin/bash -.PHONY: venv -venv: ## Provision a Python 3 virtualenv for **development** - python3 -m venv .venv - .venv/bin/pip install --upgrade pip wheel - .venv/bin/pip install --require-hashes -r requirements/dev-requirements.txt - # Bandit is a static code analysis tool to detect security vulnerabilities in Python applications # https://wiki.openstack.org/wiki/Security/Projects/Bandit .PHONY: bandit bandit: ## Run bandit with medium level excluding test-related folders - pip install --upgrade pip && \ - pip install --upgrade bandit!=1.6.0 && \ - bandit -ll --recursive . --exclude tests,.venv + @echo "Running bandit security checks…" + @poetry run bandit -ll --recursive . --exclude tests,.venv .PHONY: safety safety: ## Runs `safety check` to check python dependencies for vulnerabilities - pip install --upgrade safety && \ - for req_file in `find . -type f -name '*requirements.txt'`; do \ - echo "Checking file $$req_file" \ - && safety check --full-report -r $$req_file \ - && echo -e '\n' \ - || exit 1; \ - done - -.PHONY: sync-requirements -sync-requirements: ## Update dev-requirements.txt to pin to the same versions of prod dependencies - rm -r requirements/dev-requirements.txt && cp requirements/requirements.txt requirements/dev-requirements.txt - pip-compile --allow-unsafe --generate-hashes --output-file requirements/dev-requirements.txt requirements/requirements.in requirements/dev-requirements.in - -.PHONY: requirements -requirements: ## Update *requirements.txt files if pinned versions do not comply with the dependency specifications in *requirements.in - pip-compile --generate-hashes --output-file requirements/requirements.txt requirements/requirements.in - $(MAKE) sync-requirements - -.PHONY: update-dependency -update-dependency: ## Add or upgrade a package to the latest version that complies with the dependency specifications in requirements.in - pip-compile --generate-hashes --upgrade-package $(PACKAGE) --output-file requirements/requirements.txt requirements/requirements.in - $(MAKE) sync-requirements - -.PHONY: update-dev-only-dependencies -update-dev-only-dependencies: ## Update dev-requirements.txt to pin to the latest versions of dev-only dependencies that comply with the dependency specifications in dev-requirements.in - $(MAKE) sync-requirements - @while read line; do \ - pip-compile --allow-unsafe --generate-hashes --upgrade-package $file --output-file requirements/dev-requirements.txt requirements/requirements.in requirements/dev-requirements.in; \ - done < 'requirements/dev-requirements.in' + @echo "Running safety against build requirements…" + @poetry run safety check --full-report -r build-requirements.txt .PHONY: check check: flake8 mypy test .PHONY: flake8 flake8: ## Run flake8 to lint Python files - flake8 + @poetry run flake8 mypy: ## Type check Python files - mypy + @poetry run mypy test: ## Run Python unit tests - python3 -m unittest + @poetry run python3 -m unittest -# Explaination of the below shell command should it ever break. +# Explanation of the below shell command should it ever break. # 1. Set the field separator to ": ##" and any make targets that might appear between : and ## # 2. Use sed-like syntax to remove the make targets # 3. Format the split fields into $$1) the target name (in blue) and $$2) the target descrption diff --git a/README.md b/README.md index 9bce5f6ba..88e0398db 100644 --- a/README.md +++ b/README.md @@ -2,15 +2,15 @@ # securedrop-log -This is a Python module and qrexec service for logging in Qubes for [SecureDrop](https://securedrop.org). +`securedrop-log` is part of the [SecureDrop +Workstation](https://github.com/freedomofpress/securedrop-workstation) project. -## How to upgrade the dependencies? +This is a Python module and qrexec service for logging in Qubes. -To upgrade one single Python dependency, say `redis`, run the following: +#### Quick Start -```bash -PACKAGE=redis make upgrade-pip -``` +1. [Install Poetry](https://python-poetry.org/docs/#installing-with-the-official-installer) +2. Run `make test` to verify the installation ## How to use/try this? diff --git a/requirements/build-requirements.txt b/build-requirements.txt similarity index 100% rename from requirements/build-requirements.txt rename to build-requirements.txt diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 000000000..85f1d56c4 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,733 @@ +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. + +[[package]] +name = "bandit" +version = "1.7.5" +description = "Security oriented static analyser for python code." +optional = false +python-versions = ">=3.7" +files = [ + {file = "bandit-1.7.5-py3-none-any.whl", hash = "sha256:75665181dc1e0096369112541a056c59d1c5f66f9bb74a8d686c3c362b83f549"}, + {file = "bandit-1.7.5.tar.gz", hash = "sha256:bdfc739baa03b880c2d15d0431b31c658ffc348e907fe197e54e0389dd59e11e"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} +GitPython = ">=1.0.1" +PyYAML = ">=5.3.1" +rich = "*" +stevedore = ">=1.20.0" + +[package.extras] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "tomli (>=1.1.0)"] +toml = ["tomli (>=1.1.0)"] +yaml = ["PyYAML"] + +[[package]] +name = "certifi" +version = "2023.7.22" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "dparse" +version = "0.6.3" +description = "A parser for Python dependency files" +optional = false +python-versions = ">=3.6" +files = [ + {file = "dparse-0.6.3-py3-none-any.whl", hash = "sha256:0d8fe18714056ca632d98b24fbfc4e9791d4e47065285ab486182288813a5318"}, + {file = "dparse-0.6.3.tar.gz", hash = "sha256:27bb8b4bcaefec3997697ba3f6e06b2447200ba273c0b085c3d012a04571b528"}, +] + +[package.dependencies] +packaging = "*" +tomli = {version = "*", markers = "python_version < \"3.11\""} + +[package.extras] +conda = ["pyyaml"] +pipenv = ["pipenv (<=2022.12.19)"] + +[[package]] +name = "flake8" +version = "6.1.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, + {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.1.0,<3.2.0" + +[[package]] +name = "gitdb" +version = "4.0.11" +description = "Git Object Database" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, + {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, +] + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.40" +description = "GitPython is a Python library used to interact with Git repositories" +optional = false +python-versions = ">=3.7" +files = [ + {file = "GitPython-3.1.40-py3-none-any.whl", hash = "sha256:cf14627d5a8049ffbf49915732e5eddbe8134c3bdb9d476e6182b676fc573f8a"}, + {file = "GitPython-3.1.40.tar.gz", hash = "sha256:22b126e9ffb671fdd0c129796343a02bf67bf2994b35449ffc9321aa755e18a4"}, +] + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[package.extras] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-instafail", "pytest-subtests", "pytest-sugar"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mypy" +version = "1.6.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e5012e5cc2ac628177eaac0e83d622b2dd499e28253d4107a08ecc59ede3fc2c"}, + {file = "mypy-1.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d8fbb68711905f8912e5af474ca8b78d077447d8f3918997fecbf26943ff3cbb"}, + {file = "mypy-1.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a1ad938fee7d2d96ca666c77b7c494c3c5bd88dff792220e1afbebb2925b5e"}, + {file = "mypy-1.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b96ae2c1279d1065413965c607712006205a9ac541895004a1e0d4f281f2ff9f"}, + {file = "mypy-1.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:40b1844d2e8b232ed92e50a4bd11c48d2daa351f9deee6c194b83bf03e418b0c"}, + {file = "mypy-1.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81af8adaa5e3099469e7623436881eff6b3b06db5ef75e6f5b6d4871263547e5"}, + {file = "mypy-1.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8c223fa57cb154c7eab5156856c231c3f5eace1e0bed9b32a24696b7ba3c3245"}, + {file = "mypy-1.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8032e00ce71c3ceb93eeba63963b864bf635a18f6c0c12da6c13c450eedb183"}, + {file = "mypy-1.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c46b51de523817a0045b150ed11b56f9fff55f12b9edd0f3ed35b15a2809de0"}, + {file = "mypy-1.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:19f905bcfd9e167159b3d63ecd8cb5e696151c3e59a1742e79bc3bcb540c42c7"}, + {file = "mypy-1.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:82e469518d3e9a321912955cc702d418773a2fd1e91c651280a1bda10622f02f"}, + {file = "mypy-1.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d4473c22cc296425bbbce7e9429588e76e05bc7342da359d6520b6427bf76660"}, + {file = "mypy-1.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59a0d7d24dfb26729e0a068639a6ce3500e31d6655df8557156c51c1cb874ce7"}, + {file = "mypy-1.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cfd13d47b29ed3bbaafaff7d8b21e90d827631afda134836962011acb5904b71"}, + {file = "mypy-1.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:eb4f18589d196a4cbe5290b435d135dee96567e07c2b2d43b5c4621b6501531a"}, + {file = "mypy-1.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:41697773aa0bf53ff917aa077e2cde7aa50254f28750f9b88884acea38a16169"}, + {file = "mypy-1.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7274b0c57737bd3476d2229c6389b2ec9eefeb090bbaf77777e9d6b1b5a9d143"}, + {file = "mypy-1.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbaf4662e498c8c2e352da5f5bca5ab29d378895fa2d980630656178bd607c46"}, + {file = "mypy-1.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bb8ccb4724f7d8601938571bf3f24da0da791fe2db7be3d9e79849cb64e0ae85"}, + {file = "mypy-1.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:68351911e85145f582b5aa6cd9ad666c8958bcae897a1bfda8f4940472463c45"}, + {file = "mypy-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:49ae115da099dcc0922a7a895c1eec82c1518109ea5c162ed50e3b3594c71208"}, + {file = "mypy-1.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b27958f8c76bed8edaa63da0739d76e4e9ad4ed325c814f9b3851425582a3cd"}, + {file = "mypy-1.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:925cd6a3b7b55dfba252b7c4561892311c5358c6b5a601847015a1ad4eb7d332"}, + {file = "mypy-1.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8f57e6b6927a49550da3d122f0cb983d400f843a8a82e65b3b380d3d7259468f"}, + {file = "mypy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a43ef1c8ddfdb9575691720b6352761f3f53d85f1b57d7745701041053deff30"}, + {file = "mypy-1.6.1-py3-none-any.whl", hash = "sha256:4cbe68ef919c28ea561165206a2dcb68591c50f3bcf777932323bc208d949cf1"}, + {file = "mypy-1.6.1.tar.gz", hash = "sha256:4d01c00d09a0be62a4ca3f933e315455bde83f37f892ba4b08ce92f3cf44bcc1"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "packaging" +version = "21.3" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.6" +files = [ + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, +] + +[package.dependencies] +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" + +[[package]] +name = "pbr" +version = "5.11.1" +description = "Python Build Reasonableness" +optional = false +python-versions = ">=2.6" +files = [ + {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, + {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, +] + +[[package]] +name = "pycodestyle" +version = "2.11.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, +] + +[[package]] +name = "pyflakes" +version = "3.1.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, + {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, +] + +[[package]] +name = "pygments" +version = "2.16.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pyparsing" +version = "3.1.1" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, + {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "redis" +version = "3.3.11" +description = "Python client for Redis key-value store" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "redis-3.3.11-py2.py3-none-any.whl", hash = "sha256:3613daad9ce5951e426f460deddd5caf469e08a3af633e9578fc77d362becf62"}, + {file = "redis-3.3.11.tar.gz", hash = "sha256:8d0fc278d3f5e1249967cba2eb4a5632d19e45ce5c09442b8422d15ee2c22cc2"}, +] + +[package.extras] +hiredis = ["hiredis (>=0.1.3)"] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rich" +version = "13.6.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.6.0-py3-none-any.whl", hash = "sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245"}, + {file = "rich-13.6.0.tar.gz", hash = "sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "ruamel-yaml" +version = "0.18.5" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruamel.yaml-0.18.5-py3-none-any.whl", hash = "sha256:a013ac02f99a69cdd6277d9664689eb1acba07069f912823177c5eced21a6ada"}, + {file = "ruamel.yaml-0.18.5.tar.gz", hash = "sha256:61917e3a35a569c1133a8f772e1226961bf5a1198bea7e23f06a0841dea1ab0e"}, +] + +[package.dependencies] +"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} + +[package.extras] +docs = ["mercurial (>5.7)", "ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + +[[package]] +name = "ruamel-yaml-clib" +version = "0.2.8" +description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +optional = false +python-versions = ">=3.6" +files = [ + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d92f81886165cb14d7b067ef37e142256f1c6a90a65cd156b063a43da1708cfd"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b5edda50e5e9e15e54a6a8a0070302b00c518a9d32accc2346ad6c984aacd279"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:7048c338b6c86627afb27faecf418768acb6331fc24cfa56c93e8c9780f815fa"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, + {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3fcc54cb0c8b811ff66082de1680b4b14cf8a81dce0d4fbf665c2265a81e07a1"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:665f58bfd29b167039f714c6998178d27ccd83984084c286110ef26b230f259f"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9eb5dee2772b0f704ca2e45b1713e4e5198c18f515b52743576d196348f374d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, + {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, +] + +[[package]] +name = "safety" +version = "2.3.5" +description = "Checks installed dependencies for known vulnerabilities and licenses." +optional = false +python-versions = "*" +files = [ + {file = "safety-2.3.5-py3-none-any.whl", hash = "sha256:2227fcac1b22b53c1615af78872b48348661691450aa25d6704a5504dbd1f7e2"}, + {file = "safety-2.3.5.tar.gz", hash = "sha256:a60c11f8952f412cbb165d70cb1f673a3b43a2ba9a93ce11f97e6a4de834aa3a"}, +] + +[package.dependencies] +Click = ">=8.0.2" +dparse = ">=0.6.2" +packaging = ">=21.0,<22.0" +requests = "*" +"ruamel.yaml" = ">=0.17.21" +setuptools = ">=19.3" + +[package.extras] +github = ["jinja2 (>=3.1.0)", "pygithub (>=1.43.3)"] +gitlab = ["python-gitlab (>=1.3.0)"] + +[[package]] +name = "setuptools" +version = "68.2.2" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, + {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "smmap" +version = "5.0.1" +description = "A pure Python implementation of a sliding window memory map manager" +optional = false +python-versions = ">=3.7" +files = [ + {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, + {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, +] + +[[package]] +name = "stevedore" +version = "5.1.0" +description = "Manage dynamic plugins for Python applications" +optional = false +python-versions = ">=3.8" +files = [ + {file = "stevedore-5.1.0-py3-none-any.whl", hash = "sha256:8cc040628f3cea5d7128f2e76cf486b2251a4e543c7b938f58d9a377f6694a2d"}, + {file = "stevedore-5.1.0.tar.gz", hash = "sha256:a54534acf9b89bc7ed264807013b505bf07f74dbe4bcfa37d32bd063870b087c"}, +] + +[package.dependencies] +pbr = ">=2.0.0,<2.1.0 || >2.1.0" + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "types-redis" +version = "3.5.18" +description = "Typing stubs for redis" +optional = false +python-versions = "*" +files = [ + {file = "types-redis-3.5.18.tar.gz", hash = "sha256:15482304e8848c63b383b938ffaba7ebe0b7f8f33381ecc450ee03935213e166"}, + {file = "types_redis-3.5.18-py3-none-any.whl", hash = "sha256:5c55c4b9e8ebdc6d57d4e47900b77d99f19ca0a563264af3f701246ed0926335"}, +] + +[[package]] +name = "types-setuptools" +version = "68.2.0.0" +description = "Typing stubs for setuptools" +optional = false +python-versions = "*" +files = [ + {file = "types-setuptools-68.2.0.0.tar.gz", hash = "sha256:a4216f1e2ef29d089877b3af3ab2acf489eb869ccaf905125c69d2dc3932fd85"}, + {file = "types_setuptools-68.2.0.0-py3-none-any.whl", hash = "sha256:77edcc843e53f8fc83bb1a840684841f3dc804ec94562623bfa2ea70d5a2ba1b"}, +] + +[[package]] +name = "typing-extensions" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, +] + +[[package]] +name = "urllib3" +version = "2.0.7" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, + {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9" +content-hash = "47eeeed0a2f5016dc51f59e43b2b823e8137076ca7e1720b1db5d836839aeec1" diff --git a/pyproject.toml b/pyproject.toml index 0db7522bb..cbca5340e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,23 @@ +[tool.poetry] +name = "securedrop-log" +version = "0.1.0" +description = "SecureDrop Qubes logging scripts" +authors = ["SecureDrop Team"] +license = "GPLv3+" +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.9" +redis = "=3.3.11" + +[tool.poetry.group.dev.dependencies] +flake8 = "^6.0.0" +mypy = "^1.4.1" +types-redis = "<4" +types-setuptools = "^68.0.0" +bandit = "*" +safety = "*" + [tool.mypy] python_version = "3.9" scripts_are_modules = true diff --git a/requirements/dev-requirements.in b/requirements/dev-requirements.in deleted file mode 100644 index 3c10a63db..000000000 --- a/requirements/dev-requirements.in +++ /dev/null @@ -1,6 +0,0 @@ -flake8 -pip-tools - -mypy -types-redis<4 -types-setuptools diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt deleted file mode 100644 index d58b33fb0..000000000 --- a/requirements/dev-requirements.txt +++ /dev/null @@ -1,111 +0,0 @@ -# -# This file is autogenerated by pip-compile with python 3.8 -# To update, run: -# -# pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-requirements.txt requirements/dev-requirements.in requirements/requirements.in -# -build==0.10.0 \ - --hash=sha256:af266720050a66c893a6096a2f410989eeac74ff9a68ba194b3f6473e8e26171 \ - --hash=sha256:d5b71264afdb5951d6704482aac78de887c80691c52b88a9ad195983ca2c9269 - # via pip-tools -click==8.1.4 \ - --hash=sha256:2739815aaa5d2c986a88f1e9230c55e17f0caad3d958a5e13ad0797c166db9e3 \ - --hash=sha256:b97d0c74955da062a7d4ef92fadb583806a585b2ea81958a81bd72726cbb8e37 - # via pip-tools -flake8==6.0.0 \ - --hash=sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7 \ - --hash=sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181 - # via -r requirements/dev-requirements.in -mccabe==0.7.0 \ - --hash=sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325 \ - --hash=sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e - # via flake8 -mypy==1.4.1 \ - --hash=sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042 \ - --hash=sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd \ - --hash=sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2 \ - --hash=sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01 \ - --hash=sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7 \ - --hash=sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3 \ - --hash=sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816 \ - --hash=sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3 \ - --hash=sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc \ - --hash=sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4 \ - --hash=sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b \ - --hash=sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8 \ - --hash=sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c \ - --hash=sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462 \ - --hash=sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7 \ - --hash=sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc \ - --hash=sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258 \ - --hash=sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b \ - --hash=sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9 \ - --hash=sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6 \ - --hash=sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f \ - --hash=sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1 \ - --hash=sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828 \ - --hash=sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878 \ - --hash=sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f \ - --hash=sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b - # via -r requirements/dev-requirements.in -mypy-extensions==1.0.0 \ - --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ - --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 - # via mypy -packaging==23.1 \ - --hash=sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61 \ - --hash=sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f - # via build -pip-tools==6.14.0 \ - --hash=sha256:06366be0e08d86b416407333e998b4d305d5bd925151b08942ed149380ba3e47 \ - --hash=sha256:c5ad042cd27c0b343b10db1db7f77a7d087beafbec59ae6df1bba4d3368dfe8c - # via -r requirements/dev-requirements.in -pycodestyle==2.10.0 \ - --hash=sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053 \ - --hash=sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610 - # via flake8 -pyflakes==3.0.1 \ - --hash=sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf \ - --hash=sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd - # via flake8 -pyproject-hooks==1.0.0 \ - --hash=sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8 \ - --hash=sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5 - # via build -redis==3.3.11 \ - --hash=sha256:3613daad9ce5951e426f460deddd5caf469e08a3af633e9578fc77d362becf62 \ - --hash=sha256:8d0fc278d3f5e1249967cba2eb4a5632d19e45ce5c09442b8422d15ee2c22cc2 - # via -r requirements/requirements.in -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f - # via - # build - # mypy - # pip-tools -types-redis==3.5.18 \ - --hash=sha256:15482304e8848c63b383b938ffaba7ebe0b7f8f33381ecc450ee03935213e166 \ - --hash=sha256:5c55c4b9e8ebdc6d57d4e47900b77d99f19ca0a563264af3f701246ed0926335 - # via -r requirements/dev-requirements.in -types-setuptools==68.0.0.1 \ - --hash=sha256:a0454ea7ad0711f63a602caa87929003a83cab89224ae1506ed44bb5be8fe7d7 \ - --hash=sha256:cc5acbc464b106104899e9b9eb4955dd47e854753c8d4ee2ce697eaf0f4d74e1 - # via -r requirements/dev-requirements.in -typing-extensions==4.7.1 \ - --hash=sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36 \ - --hash=sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2 - # via mypy -wheel==0.40.0 \ - --hash=sha256:cd1196f3faee2b31968d626e1731c94f99cbdb67cf5a46e4f5656cbee7738873 \ - --hash=sha256:d236b20e7cb522daf2390fa84c55eea81c5c30190f90f29ae2ca1ad8355bf247 - # via pip-tools - -# The following packages are considered to be unsafe in a requirements file: -pip==23.1.2 \ - --hash=sha256:0e7c86f486935893c708287b30bd050a36ac827ec7fe5e43fe7cb198dd835fba \ - --hash=sha256:3ef6ac33239e4027d9a5598a381b9d30880a1477e50039db2eac6e8a8f6d1b18 - # via pip-tools -setuptools==68.0.0 \ - --hash=sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f \ - --hash=sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235 - # via pip-tools diff --git a/requirements/requirements.in b/requirements/requirements.in deleted file mode 100644 index 767bdac05..000000000 --- a/requirements/requirements.in +++ /dev/null @@ -1 +0,0 @@ -redis==3.3.11 \ No newline at end of file diff --git a/requirements/requirements.txt b/requirements/requirements.txt deleted file mode 100644 index 5bf812bb7..000000000 --- a/requirements/requirements.txt +++ /dev/null @@ -1,10 +0,0 @@ -# -# This file is autogenerated by pip-compile with python 3.7 -# To update, run: -# -# pip-compile --generate-hashes --output-file=requirements/requirements.txt requirements/requirements.in -# -redis==3.3.11 \ - --hash=sha256:3613daad9ce5951e426f460deddd5caf469e08a3af633e9578fc77d362becf62 \ - --hash=sha256:8d0fc278d3f5e1249967cba2eb4a5632d19e45ce5c09442b8422d15ee2c22cc2 - # via -r requirements/requirements.in From 5b2f65342ce3d96d62b73c289b334de53138ddf2 Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Mon, 6 Nov 2023 12:46:17 -0500 Subject: [PATCH 345/352] Migrate dependency management to poetry Switch dependency management to use poetry, which is much nicer than pip-tools. This is largely based off of and applies the same changes to the Makefile and CI. --- .circleci/config.yml | 79 +- MANIFEST.in | 3 +- Makefile | 66 +- README.md | 17 +- ...requirements.txt => build-requirements.txt | 0 poetry.lock | 1094 +++++++++++++++++ pyproject.toml | 22 + requirements/dev-bookworm-requirements.in | 14 - requirements/dev-bookworm-requirements.txt | 619 ---------- requirements/dev-bullseye-requirements.in | 14 - requirements/dev-bullseye-requirements.txt | 632 ---------- requirements/requirements.in | 0 requirements/requirements.txt | 6 - scripts/codename | 15 - 14 files changed, 1162 insertions(+), 1419 deletions(-) rename requirements/build-requirements.txt => build-requirements.txt (100%) create mode 100644 poetry.lock delete mode 100644 requirements/dev-bookworm-requirements.in delete mode 100644 requirements/dev-bookworm-requirements.txt delete mode 100644 requirements/dev-bullseye-requirements.in delete mode 100644 requirements/dev-bullseye-requirements.txt delete mode 100644 requirements/requirements.in delete mode 100644 requirements/requirements.txt delete mode 100755 scripts/codename diff --git a/.circleci/config.yml b/.circleci/config.yml index 94cdb4b3d..f1549fb15 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,27 +1,40 @@ --- common-steps: + - &install_poetry + run: + name: Install Poetry + command: | + set -e + source /etc/os-release + if [[ "$VERSION_CODENAME" == "bullseye" ]]; then + # Install Poetry via PyPI + apt-get update && apt-get install --yes --no-install-recommends python3-pip + pip install poetry==1.6.1 + elif [[ "$VERSION_CODENAME" == "bookworm" ]]; then + # Install Poetry via system package + apt-get update && apt-get install --yes --no-install-recommends python3-poetry + else + echo "Unsupported Debian version: $VERSION_CODENAME" + exit 1 + fi + - &install_testing_dependencies run: name: Install testing dependencies command: | - set -e apt update && apt install -y git gnupg make python3-dev gnupg python3-venv libnotify-bin + poetry install --no-ansi - &install_build_dependencies run: name: Install build dependencies command: | - set -e apt update && apt install -y git make sudo - &run_unit_tests run: name: Install requirements and run unit tests command: | - set -e - export VERSION_CODENAME=$(~/project/scripts/codename) - make venv - source .venv/bin/activate export PYTHONPATH=$PYTHONPATH:. # so alembic can get to Base metadata make test @@ -29,30 +42,18 @@ common-steps: run: name: Run lint, type checking, code formatting command: | - set -e - export VERSION_CODENAME=$(~/project/scripts/codename) - make venv - source .venv/bin/activate make check-black lint - &check_security run: name: Run static analysis on source code to find security issues command: | - set -e - export VERSION_CODENAME=$(~/project/scripts/codename) - make venv - source .venv/bin/activate make semgrep - &check_python_dependencies_for_vulnerabilities run: name: Check Python dependencies for known vulnerabilities command: | - set -e - export VERSION_CODENAME=$(~/project/scripts/codename) - make venv - source .venv/bin/activate make safety - &install_packaging_dependencies @@ -63,10 +64,10 @@ common-steps: mkdir ~/packaging && cd ~/packaging # local builds may not have an ssh url, so || true git config --global --unset url.ssh://git@github.com.insteadof || true - git clone https://github.com/freedomofpress/securedrop-debian-packaging.git - cd securedrop-debian-packaging - apt-get update && apt-get install -y sudo make + git clone https://github.com/freedomofpress/securedrop-builder.git + cd securedrop-builder make install-deps + source .venv/bin/activate PKG_DIR=~/project make requirements - &check_packaging_requirements @@ -77,24 +78,14 @@ common-steps: # Fail if unstaged changes exist (after `make requirements` in the previous run step). git diff --ignore-matching-lines=# --exit-code - - &check_testing_requirements - run: - name: Ensure that the same Python requirements are used for development/testing and production. - command: | - set -e - export VERSION_CODENAME=$(~/project/scripts/codename) - make venv - source .venv/bin/activate - make requirements - git diff --exit-code requirements/dev-${VERSION_CODENAME}-requirements.txt - - &build_debian_package run: name: Build debian package command: | - cd ~/packaging/securedrop-debian-packaging + cd ~/packaging/securedrop-builder export PKG_VERSION=1000.0 export PKG_PATH=~/project/ + source .venv/bin/activate make securedrop-export version: 2.1 @@ -117,8 +108,9 @@ jobs: parameters: *parameters docker: *docker steps: - - *install_testing_dependencies + - *install_poetry - checkout + - *install_testing_dependencies - *run_unit_tests - store_test_results: path: test-results @@ -127,33 +119,28 @@ jobs: parameters: *parameters docker: *docker steps: - - *install_testing_dependencies + - *install_poetry - checkout + - *install_testing_dependencies - *run_lint check-security: parameters: *parameters docker: *docker steps: - - *install_testing_dependencies + - *install_poetry - checkout + - *install_testing_dependencies - *check_security check-python-security: parameters: *parameters docker: *docker steps: - - *install_testing_dependencies + - *install_poetry - checkout - - *check_python_dependencies_for_vulnerabilities - - check-testing-requirements: - parameters: *parameters - docker: *docker - steps: - *install_testing_dependencies - - checkout - - *check_testing_requirements + - *check_python_dependencies_for_vulnerabilities workflows: securedrop_export_ci: @@ -166,8 +153,6 @@ workflows: - bookworm - lint: matrix: *matrix - - check-testing-requirements: - matrix: *matrix - check-security: matrix: *matrix - check-python-security: diff --git a/MANIFEST.in b/MANIFEST.in index 4322c8cb2..87fb2b127 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -2,8 +2,7 @@ include LICENSE include README.md include securedrop_export/VERSION include changelog.md -include requirements/build-requirements.txt -include requirements/requirements.txt +include build-requirements.txt include securedrop_export/*.py include setup.py include files/send-to-usb.desktop diff --git a/Makefile b/Makefile index 6e9cf4965..b650fcb89 100644 --- a/Makefile +++ b/Makefile @@ -1,80 +1,34 @@ .PHONY: all all: help -VERSION_CODENAME ?= bullseye - -.PHONY: venv -venv: ## Provision a Python 3 virtualenv for **development** - python3 -m venv .venv - .venv/bin/pip install --upgrade pip wheel - .venv/bin/pip install --require-hashes -r requirements/dev-${VERSION_CODENAME}-requirements.txt - .PHONY: safety safety: ## Runs `safety check` to check python dependencies for vulnerabilities - pip install --upgrade safety && \ - for req_file in `find . -type f -name '*requirements.txt'`; do \ - echo "Checking file $$req_file" \ - && safety check --full-report -r $$req_file \ - && echo -e '\n' \ - || exit 1; \ - done - -# Helper, not to be used directly -.PHONY: sync-requirements -sync-requirements: ## Update dev-requirements.txt to pin to the same versions of prod dependencies - if test -f "requirements/dev-bullseye-requirements.txt"; then rm -r requirements/dev-bullseye-requirements.txt; fi - if test -f "requirements/dev-bookworm-requirements.txt"; then rm -r requirements/dev-bookworm-requirements.txt; fi - $(MAKE) dev-requirements - -# Helper, not to be used directly -.PHONY: dev-requirements -dev-requirements: ## Update dev-*requirements.txt files if pinned versions do not comply with the dependency specifications in dev-*requirements.in - pip-compile --allow-unsafe --generate-hashes --output-file requirements/dev-bullseye-requirements.txt requirements/dev-bullseye-requirements.in - pip-compile --allow-unsafe --generate-hashes --output-file requirements/dev-bookworm-requirements.txt requirements/dev-bookworm-requirements.in - -.PHONY: requirements -requirements: ## Update *requirements.txt files if pinned versions do not comply with the dependency specifications in *requirements.in - pip-compile --generate-hashes --output-file requirements/requirements.txt requirements/requirements.in - $(MAKE) dev-requirements - -.PHONY: update-dependency -update-dependency: ## Add or upgrade a package to the latest version that complies with the dependency specifications in requirements.in - pip-compile --generate-hashes --upgrade-package $(PACKAGE) --output-file requirements/requirements.txt requirements/requirements.in - $(MAKE) sync-requirements - -.PHONY: update-dev-only-dependencies -update-dev-only-dependencies: ## Update dev-requirements.txt to pin to the latest versions of dev-only dependencies that comply with the dependency specifications in dev-requirements.in - $(MAKE) sync-requirements - @while read line; do \ - pip-compile --allow-unsafe --generate-hashes --upgrade-package $file --output-file requirements/dev-bullseye-requirements.txt requirements/dev-bullseye-requirements.in; \ - done < 'requirements/dev-bullseye-requirements.in' - @while read line; do \ - pip-compile --allow-unsafe --generate-hashes --upgrade-package $file --output-file requirements/dev-bookworm-requirements.txt requirements/dev-bookworm-requirements.in; \ - done < 'requirements/dev-bookworm-requirements.in' + @echo "Running safety against build requirements…" + @poetry run safety check --full-report -r build-requirements.txt .PHONY: check check: lint mypy semgrep test check-black ## Run linter and tests .PHONY: check-black check-black: ## Check Python source code formatting with black - @black --check --diff ./ + @poetry run black --check --diff ./ TESTS ?= tests .PHONY: test test: ## Run tests - pytest -v --cov-report html --cov-report term-missing --cov=securedrop_export $$TESTS + poetry run pytest -v --cov-report html --cov-report term-missing --cov=securedrop_export $$TESTS .PHONY: lint lint: ## Run linter - flake8 securedrop_export/ tests/ + poetry run flake8 securedrop_export/ tests/ .PHONY: mypy mypy: ## Type check Python files - mypy . + poetry run mypy . .PHONY: black black: ## Format Python source code with black - @black ./ + @poetry run black ./ SEMGREP_FLAGS := --exclude "tests/" --error --strict --verbose @@ -84,14 +38,14 @@ semgrep:semgrep-community semgrep-local .PHONY: semgrep-community semgrep-community: @echo "Running semgrep with semgrep.dev community rules..." - @semgrep $(SEMGREP_FLAGS) --config "p/r2c-security-audit" --config "p/r2c-ci" + @poetry run semgrep $(SEMGREP_FLAGS) --config "p/r2c-security-audit" --config "p/r2c-ci" .PHONY: semgrep-local semgrep-local: @echo "Running semgrep with local rules..." - @semgrep $(SEMGREP_FLAGS) --config ".semgrep" + @poetry run semgrep $(SEMGREP_FLAGS) --config ".semgrep" -# Explaination of the below shell command should it ever break. +# Explanation of the below shell command should it ever break. # 1. Set the field separator to ": ##" and any make targets that might appear between : and ## # 2. Use sed-like syntax to remove the make targets # 3. Format the split fields into $$1) the target name (in blue) and $$2) the target descrption diff --git a/README.md b/README.md index 7572ee30c..241f97b19 100644 --- a/README.md +++ b/README.md @@ -6,21 +6,10 @@ Code for exporting and printing files from the SecureDrop Qubes Workstation. -## Getting Started +## Quick Start -Python 3 support is required. To get started: - -``` -virtualenv --python=python3.7 .venv -source .venv/bin/activate -pip install -r test-requirements.txt -``` - -To run the linter and tests: - -``` -make check -``` +1. [Install Poetry](https://python-poetry.org/docs/#installing-with-the-official-installer) +2. Run `make check` to verify the installation ## Supported Printers diff --git a/requirements/build-requirements.txt b/build-requirements.txt similarity index 100% rename from requirements/build-requirements.txt rename to build-requirements.txt diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 000000000..e385fd54a --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1094 @@ +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. + +[[package]] +name = "attrs" +version = "23.1.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] + +[[package]] +name = "black" +version = "23.10.1" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-23.10.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:ec3f8e6234c4e46ff9e16d9ae96f4ef69fa328bb4ad08198c8cee45bb1f08c69"}, + {file = "black-23.10.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:1b917a2aa020ca600483a7b340c165970b26e9029067f019e3755b56e8dd5916"}, + {file = "black-23.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c74de4c77b849e6359c6f01987e94873c707098322b91490d24296f66d067dc"}, + {file = "black-23.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:7b4d10b0f016616a0d93d24a448100adf1699712fb7a4efd0e2c32bbb219b173"}, + {file = "black-23.10.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b15b75fc53a2fbcac8a87d3e20f69874d161beef13954747e053bca7a1ce53a0"}, + {file = "black-23.10.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:e293e4c2f4a992b980032bbd62df07c1bcff82d6964d6c9496f2cd726e246ace"}, + {file = "black-23.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d56124b7a61d092cb52cce34182a5280e160e6aff3137172a68c2c2c4b76bcb"}, + {file = "black-23.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:3f157a8945a7b2d424da3335f7ace89c14a3b0625e6593d21139c2d8214d55ce"}, + {file = "black-23.10.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:cfcce6f0a384d0da692119f2d72d79ed07c7159879d0bb1bb32d2e443382bf3a"}, + {file = "black-23.10.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:33d40f5b06be80c1bbce17b173cda17994fbad096ce60eb22054da021bf933d1"}, + {file = "black-23.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:840015166dbdfbc47992871325799fd2dc0dcf9395e401ada6d88fe11498abad"}, + {file = "black-23.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:037e9b4664cafda5f025a1728c50a9e9aedb99a759c89f760bd83730e76ba884"}, + {file = "black-23.10.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:7cb5936e686e782fddb1c73f8aa6f459e1ad38a6a7b0e54b403f1f05a1507ee9"}, + {file = "black-23.10.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:7670242e90dc129c539e9ca17665e39a146a761e681805c54fbd86015c7c84f7"}, + {file = "black-23.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed45ac9a613fb52dad3b61c8dea2ec9510bf3108d4db88422bacc7d1ba1243d"}, + {file = "black-23.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6d23d7822140e3fef190734216cefb262521789367fbdc0b3f22af6744058982"}, + {file = "black-23.10.1-py3-none-any.whl", hash = "sha256:d431e6739f727bb2e0495df64a6c7a5310758e87505f5f8cde9ff6c0f2d7e4fe"}, + {file = "black-23.10.1.tar.gz", hash = "sha256:1f8ce316753428ff68749c65a5f7844631aa18c8679dfd3ca9dc1a289979c258"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "boltons" +version = "21.0.0" +description = "When they're not builtins, they're boltons." +optional = false +python-versions = "*" +files = [ + {file = "boltons-21.0.0-py2.py3-none-any.whl", hash = "sha256:b9bb7b58b2b420bbe11a6025fdef6d3e5edc9f76a42fb467afe7ca212ef9948b"}, + {file = "boltons-21.0.0.tar.gz", hash = "sha256:65e70a79a731a7fe6e98592ecfb5ccf2115873d01dbc576079874629e5c90f13"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "certifi" +version = "2023.7.22" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "click-option-group" +version = "0.5.6" +description = "Option groups missing in Click" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "click-option-group-0.5.6.tar.gz", hash = "sha256:97d06703873518cc5038509443742b25069a3c7562d1ea72ff08bfadde1ce777"}, + {file = "click_option_group-0.5.6-py3-none-any.whl", hash = "sha256:38a26d963ee3ad93332ddf782f9259c5bdfe405e73408d943ef5e7d0c3767ec7"}, +] + +[package.dependencies] +Click = ">=7.0,<9" + +[package.extras] +docs = ["Pallets-Sphinx-Themes", "m2r2", "sphinx"] +tests = ["pytest"] +tests-cov = ["coverage", "coveralls", "pytest", "pytest-cov"] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.3.2" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, + {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, + {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, + {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, + {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, + {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, + {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, + {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, + {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, + {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, + {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, + {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, + {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, + {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "defusedxml" +version = "0.7.1" +description = "XML bomb protection for Python stdlib modules" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, + {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, +] + +[[package]] +name = "dparse" +version = "0.6.3" +description = "A parser for Python dependency files" +optional = false +python-versions = ">=3.6" +files = [ + {file = "dparse-0.6.3-py3-none-any.whl", hash = "sha256:0d8fe18714056ca632d98b24fbfc4e9791d4e47065285ab486182288813a5318"}, + {file = "dparse-0.6.3.tar.gz", hash = "sha256:27bb8b4bcaefec3997697ba3f6e06b2447200ba273c0b085c3d012a04571b528"}, +] + +[package.dependencies] +packaging = "*" +tomli = {version = "*", markers = "python_version < \"3.11\""} + +[package.extras] +conda = ["pyyaml"] +pipenv = ["pipenv (<=2022.12.19)"] + +[[package]] +name = "exceptiongroup" +version = "1.1.3" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "face" +version = "22.0.0" +description = "A command-line application framework (and CLI parser). Friendly for users, full-featured for developers." +optional = false +python-versions = "*" +files = [ + {file = "face-22.0.0-py3-none-any.whl", hash = "sha256:344fe31562d0f6f444a45982418f3793d4b14f9abb98ccca1509d22e0a3e7e35"}, + {file = "face-22.0.0.tar.gz", hash = "sha256:d5d692f90bc8f5987b636e47e36384b9bbda499aaf0a77aa0b0bbe834c76923d"}, +] + +[package.dependencies] +boltons = ">=20.0.0" + +[[package]] +name = "flake8" +version = "6.1.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, + {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.1.0,<3.2.0" + +[[package]] +name = "glom" +version = "22.1.0" +description = "A declarative object transformer and formatter, for conglomerating nested data." +optional = false +python-versions = "*" +files = [ + {file = "glom-22.1.0-py2.py3-none-any.whl", hash = "sha256:5339da206bf3532e01a83a35aca202960ea885156986d190574b779598e9e772"}, + {file = "glom-22.1.0.tar.gz", hash = "sha256:1510c6587a8f9c64a246641b70033cbc5ebde99f02ad245693678038e821aeb5"}, +] + +[package.dependencies] +attrs = "*" +boltons = ">=19.3.0" +face = ">=20.1.0" + +[package.extras] +yaml = ["PyYAML"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jsonschema" +version = "4.19.2" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema-4.19.2-py3-none-any.whl", hash = "sha256:eee9e502c788e89cb166d4d37f43084e3b64ab405c795c03d343a4dbc2c810fc"}, + {file = "jsonschema-4.19.2.tar.gz", hash = "sha256:c9ff4d7447eed9592c23a12ccee508baf0dd0d59650615e847feb6cdca74f392"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +jsonschema-specifications = ">=2023.03.6" +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "jsonschema-specifications" +version = "2023.7.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema_specifications-2023.7.1-py3-none-any.whl", hash = "sha256:05adf340b659828a004220a9613be00fa3f223f2b82002e273dee62fd50524b1"}, + {file = "jsonschema_specifications-2023.7.1.tar.gz", hash = "sha256:c91a50404e88a1f6ba40636778e2ee08f6e24c5613fe4c53ac24578a5a7f72bb"}, +] + +[package.dependencies] +referencing = ">=0.28.0" + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mypy" +version = "1.6.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e5012e5cc2ac628177eaac0e83d622b2dd499e28253d4107a08ecc59ede3fc2c"}, + {file = "mypy-1.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d8fbb68711905f8912e5af474ca8b78d077447d8f3918997fecbf26943ff3cbb"}, + {file = "mypy-1.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a1ad938fee7d2d96ca666c77b7c494c3c5bd88dff792220e1afbebb2925b5e"}, + {file = "mypy-1.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b96ae2c1279d1065413965c607712006205a9ac541895004a1e0d4f281f2ff9f"}, + {file = "mypy-1.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:40b1844d2e8b232ed92e50a4bd11c48d2daa351f9deee6c194b83bf03e418b0c"}, + {file = "mypy-1.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81af8adaa5e3099469e7623436881eff6b3b06db5ef75e6f5b6d4871263547e5"}, + {file = "mypy-1.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8c223fa57cb154c7eab5156856c231c3f5eace1e0bed9b32a24696b7ba3c3245"}, + {file = "mypy-1.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8032e00ce71c3ceb93eeba63963b864bf635a18f6c0c12da6c13c450eedb183"}, + {file = "mypy-1.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c46b51de523817a0045b150ed11b56f9fff55f12b9edd0f3ed35b15a2809de0"}, + {file = "mypy-1.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:19f905bcfd9e167159b3d63ecd8cb5e696151c3e59a1742e79bc3bcb540c42c7"}, + {file = "mypy-1.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:82e469518d3e9a321912955cc702d418773a2fd1e91c651280a1bda10622f02f"}, + {file = "mypy-1.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d4473c22cc296425bbbce7e9429588e76e05bc7342da359d6520b6427bf76660"}, + {file = "mypy-1.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59a0d7d24dfb26729e0a068639a6ce3500e31d6655df8557156c51c1cb874ce7"}, + {file = "mypy-1.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cfd13d47b29ed3bbaafaff7d8b21e90d827631afda134836962011acb5904b71"}, + {file = "mypy-1.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:eb4f18589d196a4cbe5290b435d135dee96567e07c2b2d43b5c4621b6501531a"}, + {file = "mypy-1.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:41697773aa0bf53ff917aa077e2cde7aa50254f28750f9b88884acea38a16169"}, + {file = "mypy-1.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7274b0c57737bd3476d2229c6389b2ec9eefeb090bbaf77777e9d6b1b5a9d143"}, + {file = "mypy-1.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbaf4662e498c8c2e352da5f5bca5ab29d378895fa2d980630656178bd607c46"}, + {file = "mypy-1.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bb8ccb4724f7d8601938571bf3f24da0da791fe2db7be3d9e79849cb64e0ae85"}, + {file = "mypy-1.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:68351911e85145f582b5aa6cd9ad666c8958bcae897a1bfda8f4940472463c45"}, + {file = "mypy-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:49ae115da099dcc0922a7a895c1eec82c1518109ea5c162ed50e3b3594c71208"}, + {file = "mypy-1.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b27958f8c76bed8edaa63da0739d76e4e9ad4ed325c814f9b3851425582a3cd"}, + {file = "mypy-1.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:925cd6a3b7b55dfba252b7c4561892311c5358c6b5a601847015a1ad4eb7d332"}, + {file = "mypy-1.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8f57e6b6927a49550da3d122f0cb983d400f843a8a82e65b3b380d3d7259468f"}, + {file = "mypy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a43ef1c8ddfdb9575691720b6352761f3f53d85f1b57d7745701041053deff30"}, + {file = "mypy-1.6.1-py3-none-any.whl", hash = "sha256:4cbe68ef919c28ea561165206a2dcb68591c50f3bcf777932323bc208d949cf1"}, + {file = "mypy-1.6.1.tar.gz", hash = "sha256:4d01c00d09a0be62a4ca3f933e315455bde83f37f892ba4b08ce92f3cf44bcc1"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pathspec" +version = "0.11.2" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, +] + +[[package]] +name = "peewee" +version = "3.17.0" +description = "a little orm" +optional = false +python-versions = "*" +files = [ + {file = "peewee-3.17.0.tar.gz", hash = "sha256:3a56967f28a43ca7a4287f4803752aeeb1a57a08dee2e839b99868181dfb5df8"}, +] + +[[package]] +name = "platformdirs" +version = "3.11.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.7" +files = [ + {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, + {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + +[[package]] +name = "pluggy" +version = "1.3.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pycodestyle" +version = "2.11.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, +] + +[[package]] +name = "pyflakes" +version = "3.1.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, + {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, +] + +[[package]] +name = "pygments" +version = "2.16.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pytest" +version = "7.4.3" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, + {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "pytest-mock" +version = "3.12.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, + {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "referencing" +version = "0.30.2" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "referencing-0.30.2-py3-none-any.whl", hash = "sha256:449b6669b6121a9e96a7f9e410b245d471e8d48964c67113ce9afe50c8dd7bdf"}, + {file = "referencing-0.30.2.tar.gz", hash = "sha256:794ad8003c65938edcdbc027f1933215e0d0ccc0291e3ce20a4d87432b59efc0"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rich" +version = "13.6.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.6.0-py3-none-any.whl", hash = "sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245"}, + {file = "rich-13.6.0.tar.gz", hash = "sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "rpds-py" +version = "0.12.0" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rpds_py-0.12.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:c694bee70ece3b232df4678448fdda245fd3b1bb4ba481fb6cd20e13bb784c46"}, + {file = "rpds_py-0.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:30e5ce9f501fb1f970e4a59098028cf20676dee64fc496d55c33e04bbbee097d"}, + {file = "rpds_py-0.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d72a4315514e5a0b9837a086cb433b004eea630afb0cc129de76d77654a9606f"}, + {file = "rpds_py-0.12.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eebaf8c76c39604d52852366249ab807fe6f7a3ffb0dd5484b9944917244cdbe"}, + {file = "rpds_py-0.12.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a239303acb0315091d54c7ff36712dba24554993b9a93941cf301391d8a997ee"}, + {file = "rpds_py-0.12.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ced40cdbb6dd47a032725a038896cceae9ce267d340f59508b23537f05455431"}, + {file = "rpds_py-0.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c8c0226c71bd0ce9892eaf6afa77ae8f43a3d9313124a03df0b389c01f832de"}, + {file = "rpds_py-0.12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8e11715178f3608874508f08e990d3771e0b8c66c73eb4e183038d600a9b274"}, + {file = "rpds_py-0.12.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5210a0018c7e09c75fa788648617ebba861ae242944111d3079034e14498223f"}, + {file = "rpds_py-0.12.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:171d9a159f1b2f42a42a64a985e4ba46fc7268c78299272ceba970743a67ee50"}, + {file = "rpds_py-0.12.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:57ec6baec231bb19bb5fd5fc7bae21231860a1605174b11585660236627e390e"}, + {file = "rpds_py-0.12.0-cp310-none-win32.whl", hash = "sha256:7188ddc1a8887194f984fa4110d5a3d5b9b5cd35f6bafdff1b649049cbc0ce29"}, + {file = "rpds_py-0.12.0-cp310-none-win_amd64.whl", hash = "sha256:1e04581c6117ad9479b6cfae313e212fe0dfa226ac727755f0d539cd54792963"}, + {file = "rpds_py-0.12.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:0a38612d07a36138507d69646c470aedbfe2b75b43a4643f7bd8e51e52779624"}, + {file = "rpds_py-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f12d69d568f5647ec503b64932874dade5a20255736c89936bf690951a5e79f5"}, + {file = "rpds_py-0.12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f8a1d990dc198a6c68ec3d9a637ba1ce489b38cbfb65440a27901afbc5df575"}, + {file = "rpds_py-0.12.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8c567c664fc2f44130a20edac73e0a867f8e012bf7370276f15c6adc3586c37c"}, + {file = "rpds_py-0.12.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e9e976e0dbed4f51c56db10831c9623d0fd67aac02853fe5476262e5a22acb7"}, + {file = "rpds_py-0.12.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:efddca2d02254a52078c35cadad34762adbae3ff01c6b0c7787b59d038b63e0d"}, + {file = "rpds_py-0.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9e7f29c00577aff6b318681e730a519b235af292732a149337f6aaa4d1c5e31"}, + {file = "rpds_py-0.12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:389c0e38358fdc4e38e9995e7291269a3aead7acfcf8942010ee7bc5baee091c"}, + {file = "rpds_py-0.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:33ab498f9ac30598b6406e2be1b45fd231195b83d948ebd4bd77f337cb6a2bff"}, + {file = "rpds_py-0.12.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d56b1cd606ba4cedd64bb43479d56580e147c6ef3f5d1c5e64203a1adab784a2"}, + {file = "rpds_py-0.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1fa73ed22c40a1bec98d7c93b5659cd35abcfa5a0a95ce876b91adbda170537c"}, + {file = "rpds_py-0.12.0-cp311-none-win32.whl", hash = "sha256:dbc25baa6abb205766fb8606f8263b02c3503a55957fcb4576a6bb0a59d37d10"}, + {file = "rpds_py-0.12.0-cp311-none-win_amd64.whl", hash = "sha256:c6b52b7028b547866c2413f614ee306c2d4eafdd444b1ff656bf3295bf1484aa"}, + {file = "rpds_py-0.12.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:9620650c364c01ed5b497dcae7c3d4b948daeae6e1883ae185fef1c927b6b534"}, + {file = "rpds_py-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2124f9e645a94ab7c853bc0a3644e0ca8ffbe5bb2d72db49aef8f9ec1c285733"}, + {file = "rpds_py-0.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281c8b219d4f4b3581b918b816764098d04964915b2f272d1476654143801aa2"}, + {file = "rpds_py-0.12.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:27ccc93c7457ef890b0dd31564d2a05e1aca330623c942b7e818e9e7c2669ee4"}, + {file = "rpds_py-0.12.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1c562a9bb72244fa767d1c1ab55ca1d92dd5f7c4d77878fee5483a22ffac808"}, + {file = "rpds_py-0.12.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e57919c32ee295a2fca458bb73e4b20b05c115627f96f95a10f9f5acbd61172d"}, + {file = "rpds_py-0.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa35ad36440aaf1ac8332b4a4a433d4acd28f1613f0d480995f5cfd3580e90b7"}, + {file = "rpds_py-0.12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e6aea5c0eb5b0faf52c7b5c4a47c8bb64437173be97227c819ffa31801fa4e34"}, + {file = "rpds_py-0.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:81cf9d306c04df1b45971c13167dc3bad625808aa01281d55f3cf852dde0e206"}, + {file = "rpds_py-0.12.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:08e6e7ff286254016b945e1ab632ee843e43d45e40683b66dd12b73791366dd1"}, + {file = "rpds_py-0.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4d0a675a7acbbc16179188d8c6d0afb8628604fc1241faf41007255957335a0b"}, + {file = "rpds_py-0.12.0-cp312-none-win32.whl", hash = "sha256:b2287c09482949e0ca0c0eb68b2aca6cf57f8af8c6dfd29dcd3bc45f17b57978"}, + {file = "rpds_py-0.12.0-cp312-none-win_amd64.whl", hash = "sha256:8015835494b21aa7abd3b43fdea0614ee35ef6b03db7ecba9beb58eadf01c24f"}, + {file = "rpds_py-0.12.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6174d6ad6b58a6bcf67afbbf1723420a53d06c4b89f4c50763d6fa0a6ac9afd2"}, + {file = "rpds_py-0.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a689e1ded7137552bea36305a7a16ad2b40be511740b80748d3140614993db98"}, + {file = "rpds_py-0.12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f45321224144c25a62052035ce96cbcf264667bcb0d81823b1bbc22c4addd194"}, + {file = "rpds_py-0.12.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aa32205358a76bf578854bf31698a86dc8b2cb591fd1d79a833283f4a403f04b"}, + {file = "rpds_py-0.12.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91bd2b7cf0f4d252eec8b7046fa6a43cee17e8acdfc00eaa8b3dbf2f9a59d061"}, + {file = "rpds_py-0.12.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3acadbab8b59f63b87b518e09c4c64b142e7286b9ca7a208107d6f9f4c393c5c"}, + {file = "rpds_py-0.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:429349a510da82c85431f0f3e66212d83efe9fd2850f50f339341b6532c62fe4"}, + {file = "rpds_py-0.12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05942656cb2cb4989cd50ced52df16be94d344eae5097e8583966a1d27da73a5"}, + {file = "rpds_py-0.12.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0c5441b7626c29dbd54a3f6f3713ec8e956b009f419ffdaaa3c80eaf98ddb523"}, + {file = "rpds_py-0.12.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:b6b0e17d39d21698185097652c611f9cf30f7c56ccec189789920e3e7f1cee56"}, + {file = "rpds_py-0.12.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3b7a64d43e2a1fa2dd46b678e00cabd9a49ebb123b339ce799204c44a593ae1c"}, + {file = "rpds_py-0.12.0-cp38-none-win32.whl", hash = "sha256:e5bbe011a2cea9060fef1bb3d668a2fd8432b8888e6d92e74c9c794d3c101595"}, + {file = "rpds_py-0.12.0-cp38-none-win_amd64.whl", hash = "sha256:bec29b801b4adbf388314c0d050e851d53762ab424af22657021ce4b6eb41543"}, + {file = "rpds_py-0.12.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:1096ca0bf2d3426cbe79d4ccc91dc5aaa73629b08ea2d8467375fad8447ce11a"}, + {file = "rpds_py-0.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48aa98987d54a46e13e6954880056c204700c65616af4395d1f0639eba11764b"}, + {file = "rpds_py-0.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7979d90ee2190d000129598c2b0c82f13053dba432b94e45e68253b09bb1f0f6"}, + {file = "rpds_py-0.12.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:88857060b690a57d2ea8569bca58758143c8faa4639fb17d745ce60ff84c867e"}, + {file = "rpds_py-0.12.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4eb74d44776b0fb0782560ea84d986dffec8ddd94947f383eba2284b0f32e35e"}, + {file = "rpds_py-0.12.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f62581d7e884dd01ee1707b7c21148f61f2febb7de092ae2f108743fcbef5985"}, + {file = "rpds_py-0.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f5dcb658d597410bb7c967c1d24eaf9377b0d621358cbe9d2ff804e5dd12e81"}, + {file = "rpds_py-0.12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9bf9acce44e967a5103fcd820fc7580c7b0ab8583eec4e2051aec560f7b31a63"}, + {file = "rpds_py-0.12.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:240687b5be0f91fbde4936a329c9b7589d9259742766f74de575e1b2046575e4"}, + {file = "rpds_py-0.12.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:25740fb56e8bd37692ed380e15ec734be44d7c71974d8993f452b4527814601e"}, + {file = "rpds_py-0.12.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a54917b7e9cd3a67e429a630e237a90b096e0ba18897bfb99ee8bd1068a5fea0"}, + {file = "rpds_py-0.12.0-cp39-none-win32.whl", hash = "sha256:b92aafcfab3d41580d54aca35a8057341f1cfc7c9af9e8bdfc652f83a20ced31"}, + {file = "rpds_py-0.12.0-cp39-none-win_amd64.whl", hash = "sha256:cd316dbcc74c76266ba94eb021b0cc090b97cca122f50bd7a845f587ff4bf03f"}, + {file = "rpds_py-0.12.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0853da3d5e9bc6a07b2486054a410b7b03f34046c123c6561b535bb48cc509e1"}, + {file = "rpds_py-0.12.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:cb41ad20064e18a900dd427d7cf41cfaec83bcd1184001f3d91a1f76b3fcea4e"}, + {file = "rpds_py-0.12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b710bf7e7ae61957d5c4026b486be593ed3ec3dca3e5be15e0f6d8cf5d0a4990"}, + {file = "rpds_py-0.12.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a952ae3eb460c6712388ac2ec706d24b0e651b9396d90c9a9e0a69eb27737fdc"}, + {file = "rpds_py-0.12.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0bedd91ae1dd142a4dc15970ed2c729ff6c73f33a40fa84ed0cdbf55de87c777"}, + {file = "rpds_py-0.12.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:761531076df51309075133a6bc1db02d98ec7f66e22b064b1d513bc909f29743"}, + {file = "rpds_py-0.12.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2baa6be130e8a00b6cbb9f18a33611ec150b4537f8563bddadb54c1b74b8193"}, + {file = "rpds_py-0.12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f05450fa1cd7c525c0b9d1a7916e595d3041ac0afbed2ff6926e5afb6a781b7f"}, + {file = "rpds_py-0.12.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:81c4d1a3a564775c44732b94135d06e33417e829ff25226c164664f4a1046213"}, + {file = "rpds_py-0.12.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e888be685fa42d8b8a3d3911d5604d14db87538aa7d0b29b1a7ea80d354c732d"}, + {file = "rpds_py-0.12.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6f8d7fe73d1816eeb5378409adc658f9525ecbfaf9e1ede1e2d67a338b0c7348"}, + {file = "rpds_py-0.12.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0831d3ecdea22e4559cc1793f22e77067c9d8c451d55ae6a75bf1d116a8e7f42"}, + {file = "rpds_py-0.12.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:513ccbf7420c30e283c25c82d5a8f439d625a838d3ba69e79a110c260c46813f"}, + {file = "rpds_py-0.12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:301bd744a1adaa2f6a5e06c98f1ac2b6f8dc31a5c23b838f862d65e32fca0d4b"}, + {file = "rpds_py-0.12.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f8832a4f83d4782a8f5a7b831c47e8ffe164e43c2c148c8160ed9a6d630bc02a"}, + {file = "rpds_py-0.12.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2416ed743ec5debcf61e1242e012652a4348de14ecc7df3512da072b074440"}, + {file = "rpds_py-0.12.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35585a8cb5917161f42c2104567bb83a1d96194095fc54a543113ed5df9fa436"}, + {file = "rpds_py-0.12.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d389ff1e95b6e46ebedccf7fd1fadd10559add595ac6a7c2ea730268325f832c"}, + {file = "rpds_py-0.12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9b007c2444705a2dc4a525964fd4dd28c3320b19b3410da6517cab28716f27d3"}, + {file = "rpds_py-0.12.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:188912b22b6c8225f4c4ffa020a2baa6ad8fabb3c141a12dbe6edbb34e7f1425"}, + {file = "rpds_py-0.12.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:1b4cf9ab9a0ae0cb122685209806d3f1dcb63b9fccdf1424fb42a129dc8c2faa"}, + {file = "rpds_py-0.12.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:2d34a5450a402b00d20aeb7632489ffa2556ca7b26f4a63c35f6fccae1977427"}, + {file = "rpds_py-0.12.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:466030a42724780794dea71eb32db83cc51214d66ab3fb3156edd88b9c8f0d78"}, + {file = "rpds_py-0.12.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:68172622a5a57deb079a2c78511c40f91193548e8ab342c31e8cb0764d362459"}, + {file = "rpds_py-0.12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54cdfcda59251b9c2f87a05d038c2ae02121219a04d4a1e6fc345794295bdc07"}, + {file = "rpds_py-0.12.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b75b912a0baa033350367a8a07a8b2d44fd5b90c890bfbd063a8a5f945f644b"}, + {file = "rpds_py-0.12.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:47aeceb4363851d17f63069318ba5721ae695d9da55d599b4d6fb31508595278"}, + {file = "rpds_py-0.12.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0525847f83f506aa1e28eb2057b696fe38217e12931c8b1b02198cfe6975e142"}, + {file = "rpds_py-0.12.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efbe0b5e0fd078ed7b005faa0170da4f72666360f66f0bb2d7f73526ecfd99f9"}, + {file = "rpds_py-0.12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0fadfdda275c838cba5102c7f90a20f2abd7727bf8f4a2b654a5b617529c5c18"}, + {file = "rpds_py-0.12.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:56dd500411d03c5e9927a1eb55621e906837a83b02350a9dc401247d0353717c"}, + {file = "rpds_py-0.12.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:6915fc9fa6b3ec3569566832e1bb03bd801c12cea030200e68663b9a87974e76"}, + {file = "rpds_py-0.12.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5f1519b080d8ce0a814f17ad9fb49fb3a1d4d7ce5891f5c85fc38631ca3a8dc4"}, + {file = "rpds_py-0.12.0.tar.gz", hash = "sha256:7036316cc26b93e401cedd781a579be606dad174829e6ad9e9c5a0da6e036f80"}, +] + +[[package]] +name = "ruamel-yaml" +version = "0.17.40" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +optional = false +python-versions = ">=3" +files = [ + {file = "ruamel.yaml-0.17.40-py3-none-any.whl", hash = "sha256:b16b6c3816dff0a93dca12acf5e70afd089fa5acb80604afd1ffa8b465b7722c"}, + {file = "ruamel.yaml-0.17.40.tar.gz", hash = "sha256:6024b986f06765d482b5b07e086cc4b4cd05dd22ddcbc758fa23d54873cf313d"}, +] + +[package.dependencies] +"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} + +[package.extras] +docs = ["mercurial (>5.7)", "ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + +[[package]] +name = "ruamel-yaml-clib" +version = "0.2.8" +description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +optional = false +python-versions = ">=3.6" +files = [ + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d92f81886165cb14d7b067ef37e142256f1c6a90a65cd156b063a43da1708cfd"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b5edda50e5e9e15e54a6a8a0070302b00c518a9d32accc2346ad6c984aacd279"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:7048c338b6c86627afb27faecf418768acb6331fc24cfa56c93e8c9780f815fa"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, + {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3fcc54cb0c8b811ff66082de1680b4b14cf8a81dce0d4fbf665c2265a81e07a1"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:665f58bfd29b167039f714c6998178d27ccd83984084c286110ef26b230f259f"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9eb5dee2772b0f704ca2e45b1713e4e5198c18f515b52743576d196348f374d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, + {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, +] + +[[package]] +name = "safety" +version = "2.3.4" +description = "Checks installed dependencies for known vulnerabilities and licenses." +optional = false +python-versions = "*" +files = [ + {file = "safety-2.3.4-py3-none-any.whl", hash = "sha256:6224dcd9b20986a2b2c5e7acfdfba6bca42bb11b2783b24ed04f32317e5167ea"}, + {file = "safety-2.3.4.tar.gz", hash = "sha256:b9e74e794e82f54d11f4091c5d820c4d2d81de9f953bf0b4f33ac8bc402ae72c"}, +] + +[package.dependencies] +Click = ">=8.0.2" +dparse = ">=0.6.2" +packaging = ">=21.0" +requests = "*" +"ruamel.yaml" = ">=0.17.21" +setuptools = ">=19.3" + +[package.extras] +github = ["jinja2 (>=3.1.0)", "pygithub (>=1.43.3)"] +gitlab = ["python-gitlab (>=1.3.0)"] + +[[package]] +name = "semgrep" +version = "1.48.0" +description = "Lightweight static analysis for many languages. Find bug variants with patterns that look like source code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "semgrep-1.48.0-cp38.cp39.cp310.cp311.py37.py38.py39.py310.py311-none-any.whl", hash = "sha256:35999402ada6cc9ae03642eb2fb1011fcaf46f23a85d56335d2a8de8a04db38f"}, + {file = "semgrep-1.48.0-cp38.cp39.cp310.cp311.py37.py38.py39.py310.py311-none-macosx_10_14_x86_64.whl", hash = "sha256:111a0a9675735f7ba0b4a47f5c17c989c9243f2cf4a35e92cbfb0cd8ca24c497"}, + {file = "semgrep-1.48.0-cp38.cp39.cp310.cp311.py37.py38.py39.py310.py311-none-macosx_11_0_arm64.whl", hash = "sha256:7a669e8b4beb977f7fe76cce887c15c5af9522f0bd8b9bc60929d3135a23b90e"}, + {file = "semgrep-1.48.0-cp38.cp39.cp310.cp311.py37.py38.py39.py310.py311-none-musllinux_1_0_aarch64.manylinux2014_aarch64.whl", hash = "sha256:516706163a1a6d128dbcb5fec5a0d175371bf0a25a2c0afe70517f1081aa463e"}, + {file = "semgrep-1.48.0.tar.gz", hash = "sha256:9db8547dadd3f65e62c96a74d756be4469559a8a9e11f39549316cbe6930e5a6"}, +] + +[package.dependencies] +attrs = ">=21.3" +boltons = ">=21.0,<22.0" +click = ">=8.1,<9.0" +click-option-group = ">=0.5,<1.0" +colorama = ">=0.4.0,<0.5.0" +defusedxml = ">=0.7.1,<0.8.0" +glom = ">=22.1,<23.0" +jsonschema = ">=4.6,<5.0" +packaging = ">=21.0" +peewee = ">=3.14,<4.0" +requests = ">=2.22,<3.0" +rich = ">=12.6.0" +"ruamel.yaml" = ">=0.16.0,<0.18" +tomli = ">=2.0.1,<2.1.0" +typing-extensions = ">=4.2,<5.0" +urllib3 = ">=1.26,<2.0" +wcmatch = ">=8.3,<9.0" + +[package.extras] +experiments = ["jsonnet (>=0.18,<1.0)"] + +[[package]] +name = "setuptools" +version = "68.2.2" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, + {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "types-setuptools" +version = "68.2.0.0" +description = "Typing stubs for setuptools" +optional = false +python-versions = "*" +files = [ + {file = "types-setuptools-68.2.0.0.tar.gz", hash = "sha256:a4216f1e2ef29d089877b3af3ab2acf489eb869ccaf905125c69d2dc3932fd85"}, + {file = "types_setuptools-68.2.0.0-py3-none-any.whl", hash = "sha256:77edcc843e53f8fc83bb1a840684841f3dc804ec94562623bfa2ea70d5a2ba1b"}, +] + +[[package]] +name = "typing-extensions" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, +] + +[[package]] +name = "urllib3" +version = "1.26.18" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, +] + +[package.extras] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "wcmatch" +version = "8.5" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.8" +files = [ + {file = "wcmatch-8.5-py3-none-any.whl", hash = "sha256:14554e409b142edeefab901dc68ad570b30a72a8ab9a79106c5d5e9a6d241bd5"}, + {file = "wcmatch-8.5.tar.gz", hash = "sha256:86c17572d0f75cbf3bcb1a18f3bf2f9e72b39a9c08c9b4a74e991e1882a8efb3"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[metadata] +lock-version = "2.0" +python-versions = "^3.9" +content-hash = "6e18d52942bf002b4a5fe2f237bfe9f2a355a411e6b9443d3f1abf9a652d0d0b" diff --git a/pyproject.toml b/pyproject.toml index 5ec8cc186..3afb5b8ba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,2 +1,24 @@ +[tool.poetry] +name = "securedrop-export" +version = "0.1.0" +description = "SecureDrop Qubes export scripts" +authors = ["SecureDrop Team"] +license = "GPLv3+" +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.9" + +[tool.poetry.group.dev.dependencies] +black = "^23.7.0" +flake8 = "^6.0.0" +mypy = "^1.4.1" +types-setuptools = "^68.0.0" +pytest = "^7.4.0" +pytest-cov = "^4.1.0" +pytest-mock = "^3.11.1" +semgrep = "^1.31.2" +safety = "*" + [tool.mypy] python_version = "3.9" diff --git a/requirements/dev-bookworm-requirements.in b/requirements/dev-bookworm-requirements.in deleted file mode 100644 index 4e3eb791b..000000000 --- a/requirements/dev-bookworm-requirements.in +++ /dev/null @@ -1,14 +0,0 @@ -# Include prod requirements --r requirements.in - -black -certifi>=2023.07.22 -flake8 -pip-tools -pytest -pytest-cov -pytest-mock -semgrep - -mypy -types-setuptools diff --git a/requirements/dev-bookworm-requirements.txt b/requirements/dev-bookworm-requirements.txt deleted file mode 100644 index c89930a71..000000000 --- a/requirements/dev-bookworm-requirements.txt +++ /dev/null @@ -1,619 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.11 -# by the following command: -# -# pip-compile --allow-unsafe --config=pyproject.toml --generate-hashes --output-file=requirements/dev-bookworm-requirements.txt requirements/dev-bookworm-requirements.in -# -attrs==23.1.0 \ - --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ - --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 - # via - # glom - # jsonschema - # referencing - # semgrep -black==23.7.0 \ - --hash=sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3 \ - --hash=sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb \ - --hash=sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087 \ - --hash=sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320 \ - --hash=sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6 \ - --hash=sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3 \ - --hash=sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc \ - --hash=sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f \ - --hash=sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587 \ - --hash=sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91 \ - --hash=sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a \ - --hash=sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad \ - --hash=sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926 \ - --hash=sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9 \ - --hash=sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be \ - --hash=sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd \ - --hash=sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96 \ - --hash=sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491 \ - --hash=sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2 \ - --hash=sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a \ - --hash=sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f \ - --hash=sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995 - # via -r requirements/dev-bookworm-requirements.in -boltons==21.0.0 \ - --hash=sha256:65e70a79a731a7fe6e98592ecfb5ccf2115873d01dbc576079874629e5c90f13 \ - --hash=sha256:b9bb7b58b2b420bbe11a6025fdef6d3e5edc9f76a42fb467afe7ca212ef9948b - # via - # face - # glom - # semgrep -bracex==2.3.post1 \ - --hash=sha256:351b7f20d56fb9ea91f9b9e9e7664db466eb234188c175fd943f8f755c807e73 \ - --hash=sha256:e7b23fc8b2cd06d3dec0692baabecb249dda94e06a617901ff03a6c56fd71693 - # via wcmatch -build==0.10.0 \ - --hash=sha256:af266720050a66c893a6096a2f410989eeac74ff9a68ba194b3f6473e8e26171 \ - --hash=sha256:d5b71264afdb5951d6704482aac78de887c80691c52b88a9ad195983ca2c9269 - # via pip-tools -certifi==2023.7.22 \ - --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ - --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 - # via - # -r requirements/dev-bookworm-requirements.in - # requests -charset-normalizer==3.2.0 \ - --hash=sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96 \ - --hash=sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c \ - --hash=sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710 \ - --hash=sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706 \ - --hash=sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020 \ - --hash=sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252 \ - --hash=sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad \ - --hash=sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329 \ - --hash=sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a \ - --hash=sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f \ - --hash=sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6 \ - --hash=sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4 \ - --hash=sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a \ - --hash=sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46 \ - --hash=sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2 \ - --hash=sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23 \ - --hash=sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace \ - --hash=sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd \ - --hash=sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982 \ - --hash=sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10 \ - --hash=sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2 \ - --hash=sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea \ - --hash=sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09 \ - --hash=sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5 \ - --hash=sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149 \ - --hash=sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489 \ - --hash=sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9 \ - --hash=sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80 \ - --hash=sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592 \ - --hash=sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3 \ - --hash=sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6 \ - --hash=sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed \ - --hash=sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c \ - --hash=sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200 \ - --hash=sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a \ - --hash=sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e \ - --hash=sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d \ - --hash=sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6 \ - --hash=sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623 \ - --hash=sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669 \ - --hash=sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3 \ - --hash=sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa \ - --hash=sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9 \ - --hash=sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2 \ - --hash=sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f \ - --hash=sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1 \ - --hash=sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4 \ - --hash=sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a \ - --hash=sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8 \ - --hash=sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3 \ - --hash=sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029 \ - --hash=sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f \ - --hash=sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959 \ - --hash=sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22 \ - --hash=sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7 \ - --hash=sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952 \ - --hash=sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346 \ - --hash=sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e \ - --hash=sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d \ - --hash=sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299 \ - --hash=sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd \ - --hash=sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a \ - --hash=sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3 \ - --hash=sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037 \ - --hash=sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94 \ - --hash=sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c \ - --hash=sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858 \ - --hash=sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a \ - --hash=sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449 \ - --hash=sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c \ - --hash=sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918 \ - --hash=sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1 \ - --hash=sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c \ - --hash=sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac \ - --hash=sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa - # via requests -click==8.1.4 \ - --hash=sha256:2739815aaa5d2c986a88f1e9230c55e17f0caad3d958a5e13ad0797c166db9e3 \ - --hash=sha256:b97d0c74955da062a7d4ef92fadb583806a585b2ea81958a81bd72726cbb8e37 - # via - # black - # click-option-group - # pip-tools - # semgrep -click-option-group==0.5.6 \ - --hash=sha256:38a26d963ee3ad93332ddf782f9259c5bdfe405e73408d943ef5e7d0c3767ec7 \ - --hash=sha256:97d06703873518cc5038509443742b25069a3c7562d1ea72ff08bfadde1ce777 - # via semgrep -colorama==0.4.6 \ - --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ - --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 - # via semgrep -coverage[toml]==7.2.7 \ - --hash=sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f \ - --hash=sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2 \ - --hash=sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a \ - --hash=sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a \ - --hash=sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01 \ - --hash=sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6 \ - --hash=sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7 \ - --hash=sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f \ - --hash=sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02 \ - --hash=sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c \ - --hash=sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063 \ - --hash=sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a \ - --hash=sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5 \ - --hash=sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959 \ - --hash=sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97 \ - --hash=sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6 \ - --hash=sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f \ - --hash=sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9 \ - --hash=sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5 \ - --hash=sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f \ - --hash=sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562 \ - --hash=sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe \ - --hash=sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9 \ - --hash=sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f \ - --hash=sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb \ - --hash=sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb \ - --hash=sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1 \ - --hash=sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb \ - --hash=sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250 \ - --hash=sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e \ - --hash=sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511 \ - --hash=sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5 \ - --hash=sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59 \ - --hash=sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2 \ - --hash=sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d \ - --hash=sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3 \ - --hash=sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4 \ - --hash=sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de \ - --hash=sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9 \ - --hash=sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833 \ - --hash=sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0 \ - --hash=sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9 \ - --hash=sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d \ - --hash=sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050 \ - --hash=sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d \ - --hash=sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6 \ - --hash=sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353 \ - --hash=sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb \ - --hash=sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e \ - --hash=sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8 \ - --hash=sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495 \ - --hash=sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2 \ - --hash=sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd \ - --hash=sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27 \ - --hash=sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1 \ - --hash=sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818 \ - --hash=sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4 \ - --hash=sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e \ - --hash=sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850 \ - --hash=sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3 - # via pytest-cov -defusedxml==0.7.1 \ - --hash=sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69 \ - --hash=sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61 - # via semgrep -face==22.0.0 \ - --hash=sha256:344fe31562d0f6f444a45982418f3793d4b14f9abb98ccca1509d22e0a3e7e35 \ - --hash=sha256:d5d692f90bc8f5987b636e47e36384b9bbda499aaf0a77aa0b0bbe834c76923d - # via glom -flake8==6.0.0 \ - --hash=sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7 \ - --hash=sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181 - # via -r requirements/dev-bookworm-requirements.in -glom==22.1.0 \ - --hash=sha256:1510c6587a8f9c64a246641b70033cbc5ebde99f02ad245693678038e821aeb5 \ - --hash=sha256:5339da206bf3532e01a83a35aca202960ea885156986d190574b779598e9e772 - # via semgrep -idna==3.4 \ - --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ - --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 - # via requests -iniconfig==2.0.0 \ - --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ - --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 - # via pytest -jsonschema==4.18.0 \ - --hash=sha256:8caf5b57a990a98e9b39832ef3cb35c176fe331414252b6e1b26fd5866f891a4 \ - --hash=sha256:b508dd6142bd03f4c3670534c80af68cd7bbff9ea830b9cf2625d4a3c49ddf60 - # via semgrep -jsonschema-specifications==2023.6.1 \ - --hash=sha256:3d2b82663aff01815f744bb5c7887e2121a63399b49b104a3c96145474d091d7 \ - --hash=sha256:ca1c4dd059a9e7b34101cf5b3ab7ff1d18b139f35950d598d629837ef66e8f28 - # via jsonschema -markdown-it-py==3.0.0 \ - --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ - --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb - # via rich -mccabe==0.7.0 \ - --hash=sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325 \ - --hash=sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e - # via flake8 -mdurl==0.1.2 \ - --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ - --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba - # via markdown-it-py -mypy==1.4.1 \ - --hash=sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042 \ - --hash=sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd \ - --hash=sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2 \ - --hash=sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01 \ - --hash=sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7 \ - --hash=sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3 \ - --hash=sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816 \ - --hash=sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3 \ - --hash=sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc \ - --hash=sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4 \ - --hash=sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b \ - --hash=sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8 \ - --hash=sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c \ - --hash=sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462 \ - --hash=sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7 \ - --hash=sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc \ - --hash=sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258 \ - --hash=sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b \ - --hash=sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9 \ - --hash=sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6 \ - --hash=sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f \ - --hash=sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1 \ - --hash=sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828 \ - --hash=sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878 \ - --hash=sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f \ - --hash=sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b - # via -r requirements/dev-bookworm-requirements.in -mypy-extensions==1.0.0 \ - --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ - --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 - # via - # black - # mypy -packaging==23.1 \ - --hash=sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61 \ - --hash=sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f - # via - # black - # build - # pytest - # semgrep -pathspec==0.11.1 \ - --hash=sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687 \ - --hash=sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293 - # via black -peewee==3.16.2 \ - --hash=sha256:10769981198c7311f84a0ca8db892fa213303a8eb1305deb795a71e7bd606a91 - # via semgrep -pip-tools==6.14.0 \ - --hash=sha256:06366be0e08d86b416407333e998b4d305d5bd925151b08942ed149380ba3e47 \ - --hash=sha256:c5ad042cd27c0b343b10db1db7f77a7d087beafbec59ae6df1bba4d3368dfe8c - # via -r requirements/dev-bookworm-requirements.in -platformdirs==3.8.1 \ - --hash=sha256:cec7b889196b9144d088e4c57d9ceef7374f6c39694ad1577a0aab50d27ea28c \ - --hash=sha256:f87ca4fcff7d2b0f81c6a748a77973d7af0f4d526f98f308477c3c436c74d528 - # via black -pluggy==1.2.0 \ - --hash=sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849 \ - --hash=sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3 - # via pytest -pycodestyle==2.10.0 \ - --hash=sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053 \ - --hash=sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610 - # via flake8 -pyflakes==3.0.1 \ - --hash=sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf \ - --hash=sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd - # via flake8 -pygments==2.15.1 \ - --hash=sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c \ - --hash=sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1 - # via rich -pyproject-hooks==1.0.0 \ - --hash=sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8 \ - --hash=sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5 - # via build -pytest==7.4.0 \ - --hash=sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32 \ - --hash=sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a - # via - # -r requirements/dev-bookworm-requirements.in - # pytest-cov - # pytest-mock -pytest-cov==4.1.0 \ - --hash=sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6 \ - --hash=sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a - # via -r requirements/dev-bookworm-requirements.in -pytest-mock==3.11.1 \ - --hash=sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39 \ - --hash=sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f - # via -r requirements/dev-bookworm-requirements.in -python-lsp-jsonrpc==1.0.0 \ - --hash=sha256:079b143be64b0a378bdb21dff5e28a8c1393fe7e8a654ef068322d754e545fc7 \ - --hash=sha256:7bec170733db628d3506ea3a5288ff76aa33c70215ed223abdb0d95e957660bd - # via semgrep -referencing==0.29.1 \ - --hash=sha256:90cb53782d550ba28d2166ef3f55731f38397def8832baac5d45235f1995e35e \ - --hash=sha256:d3c8f323ee1480095da44d55917cfb8278d73d6b4d5f677e3e40eb21314ac67f - # via - # jsonschema - # jsonschema-specifications -requests==2.31.0 \ - --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ - --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 - # via semgrep -rich==13.4.2 \ - --hash=sha256:8f87bc7ee54675732fa66a05ebfe489e27264caeeff3728c945d25971b6485ec \ - --hash=sha256:d653d6bccede5844304c605d5aac802c7cf9621efd700b46c7ec2b51ea914898 - # via semgrep -rpds-py==0.8.10 \ - --hash=sha256:08166467258fd0240a1256fce272f689f2360227ee41c72aeea103e9e4f63d2b \ - --hash=sha256:083df0fafe199371206111583c686c985dddaf95ab3ee8e7b24f1fda54515d09 \ - --hash=sha256:0da53292edafecba5e1d8c1218f99babf2ed0bf1c791d83c0ab5c29b57223068 \ - --hash=sha256:0eeb2731708207d0fe2619afe6c4dc8cb9798f7de052da891de5f19c0006c315 \ - --hash=sha256:134ec8f14ca7dbc6d9ae34dac632cdd60939fe3734b5d287a69683c037c51acb \ - --hash=sha256:13e643ce8ad502a0263397362fb887594b49cf84bf518d6038c16f235f2bcea4 \ - --hash=sha256:148b0b38d719c0760e31ce9285a9872972bdd7774969a4154f40c980e5beaca7 \ - --hash=sha256:14f1c356712f66653b777ecd8819804781b23dbbac4eade4366b94944c9e78ad \ - --hash=sha256:15a90d0ac11b4499171067ae40a220d1ca3cb685ec0acc356d8f3800e07e4cb8 \ - --hash=sha256:1a2edf8173ac0c7a19da21bc68818be1321998528b5e3f748d6ee90c0ba2a1fd \ - --hash=sha256:1b21575031478609db6dbd1f0465e739fe0e7f424a8e7e87610a6c7f68b4eb16 \ - --hash=sha256:1ee45cd1d84beed6cbebc839fd85c2e70a3a1325c8cfd16b62c96e2ffb565eca \ - --hash=sha256:220bdcad2d2936f674650d304e20ac480a3ce88a40fe56cd084b5780f1d104d9 \ - --hash=sha256:2418cf17d653d24ffb8b75e81f9f60b7ba1b009a23298a433a4720b2a0a17017 \ - --hash=sha256:2614c2732bf45de5c7f9e9e54e18bc78693fa2f635ae58d2895b7965e470378c \ - --hash=sha256:2cd3045e7f6375dda64ed7db1c5136826facb0159ea982f77d9cf6125025bd34 \ - --hash=sha256:2eb4b08c45f8f8d8254cdbfacd3fc5d6b415d64487fb30d7380b0d0569837bf1 \ - --hash=sha256:300eb606e6b94a7a26f11c8cc8ee59e295c6649bd927f91e1dbd37a4c89430b6 \ - --hash=sha256:376b8de737401050bd12810003d207e824380be58810c031f10ec563ff6aef3d \ - --hash=sha256:3793c21494bad1373da517001d0849eea322e9a049a0e4789e50d8d1329df8e7 \ - --hash=sha256:37f7ee4dc86db7af3bac6d2a2cedbecb8e57ce4ed081f6464510e537589f8b1e \ - --hash=sha256:3816a890a6a9e9f1de250afa12ca71c9a7a62f2b715a29af6aaee3aea112c181 \ - --hash=sha256:3c490204e16bca4f835dba8467869fe7295cdeaa096e4c5a7af97f3454a97991 \ - --hash=sha256:3cc5e5b5514796f45f03a568981971b12a3570f3de2e76114f7dc18d4b60a3c4 \ - --hash=sha256:41c89a366eae49ad9e65ed443a8f94aee762931a1e3723749d72aeac80f5ef2f \ - --hash=sha256:4a8ca409f1252e1220bf09c57290b76cae2f14723746215a1e0506472ebd7bdf \ - --hash=sha256:4b519bac7c09444dd85280fd60f28c6dde4389c88dddf4279ba9b630aca3bbbe \ - --hash=sha256:521fc8861a86ae54359edf53a15a05fabc10593cea7b3357574132f8427a5e5a \ - --hash=sha256:574868858a7ff6011192c023a5289158ed20e3f3b94b54f97210a773f2f22921 \ - --hash=sha256:5a665f6f1a87614d1c3039baf44109094926dedf785e346d8b0a728e9cabd27a \ - --hash=sha256:5d1c2bc319428d50b3e0fa6b673ab8cc7fa2755a92898db3a594cbc4eeb6d1f7 \ - --hash=sha256:60e0e86e870350e03b3e25f9b1dd2c6cc72d2b5f24e070249418320a6f9097b7 \ - --hash=sha256:695f642a3a5dbd4ad2ffbbacf784716ecd87f1b7a460843b9ddf965ccaeafff4 \ - --hash=sha256:69d089c026f6a8b9d64a06ff67dc3be196707b699d7f6ca930c25f00cf5e30d8 \ - --hash=sha256:6c6a0225b8501d881b32ebf3f5807a08ad3685b5eb5f0a6bfffd3a6e039b2055 \ - --hash=sha256:70bb9c8004b97b4ef7ae56a2aa56dfaa74734a0987c78e7e85f00004ab9bf2d0 \ - --hash=sha256:73a1e48430f418f0ac3dfd87860e4cc0d33ad6c0f589099a298cb53724db1169 \ - --hash=sha256:7495010b658ec5b52835f21d8c8b1a7e52e194c50f095d4223c0b96c3da704b1 \ - --hash=sha256:7947e6e2c2ad68b1c12ee797d15e5f8d0db36331200b0346871492784083b0c6 \ - --hash=sha256:7b38a9ac96eeb6613e7f312cd0014de64c3f07000e8bf0004ad6ec153bac46f8 \ - --hash=sha256:7d20a8ed227683401cc508e7be58cba90cc97f784ea8b039c8cd01111e6043e0 \ - --hash=sha256:7f29b8c55fd3a2bc48e485e37c4e2df3317f43b5cc6c4b6631c33726f52ffbb3 \ - --hash=sha256:802f42200d8caf7f25bbb2a6464cbd83e69d600151b7e3b49f49a47fa56b0a38 \ - --hash=sha256:805a5f3f05d186c5d50de2e26f765ba7896d0cc1ac5b14ffc36fae36df5d2f10 \ - --hash=sha256:82bb361cae4d0a627006dadd69dc2f36b7ad5dc1367af9d02e296ec565248b5b \ - --hash=sha256:84eb541a44f7a18f07a6bfc48b95240739e93defe1fdfb4f2a295f37837945d7 \ - --hash=sha256:89c92b74e8bf6f53a6f4995fd52f4bd510c12f103ee62c99e22bc9e05d45583c \ - --hash=sha256:8c398fda6df361a30935ab4c4bccb7f7a3daef2964ca237f607c90e9f3fdf66f \ - --hash=sha256:915031002c86a5add7c6fd4beb601b2415e8a1c956590a5f91d825858e92fe6e \ - --hash=sha256:927d784648211447201d4c6f1babddb7971abad922b32257ab74de2f2750fad0 \ - --hash=sha256:92cf5b3ee60eef41f41e1a2cabca466846fb22f37fc580ffbcb934d1bcab225a \ - --hash=sha256:93d06cccae15b3836247319eee7b6f1fdcd6c10dabb4e6d350d27bd0bdca2711 \ - --hash=sha256:93d99f957a300d7a4ced41615c45aeb0343bb8f067c42b770b505de67a132346 \ - --hash=sha256:96b293c0498c70162effb13100624c5863797d99df75f2f647438bd10cbf73e4 \ - --hash=sha256:97cab733d303252f7c2f7052bf021a3469d764fc2b65e6dbef5af3cbf89d4892 \ - --hash=sha256:996cc95830de9bc22b183661d95559ec6b3cd900ad7bc9154c4cbf5be0c9b734 \ - --hash=sha256:9a7d20c1cf8d7b3960c5072c265ec47b3f72a0c608a9a6ee0103189b4f28d531 \ - --hash=sha256:9cd57981d9fab04fc74438d82460f057a2419974d69a96b06a440822d693b3c0 \ - --hash=sha256:a11ab0d97be374efd04f640c04fe5c2d3dabc6dfb998954ea946ee3aec97056d \ - --hash=sha256:a13c8e56c46474cd5958d525ce6a9996727a83d9335684e41f5192c83deb6c58 \ - --hash=sha256:a38b9f526d0d6cbdaa37808c400e3d9f9473ac4ff64d33d9163fd05d243dbd9b \ - --hash=sha256:a7c6304b894546b5a6bdc0fe15761fa53fe87d28527a7142dae8de3c663853e1 \ - --hash=sha256:ad3bfb44c8840fb4be719dc58e229f435e227fbfbe133dc33f34981ff622a8f8 \ - --hash=sha256:ae40f4a70a1f40939d66ecbaf8e7edc144fded190c4a45898a8cfe19d8fc85ea \ - --hash=sha256:b01b39ad5411563031ea3977bbbc7324d82b088e802339e6296f082f78f6115c \ - --hash=sha256:b2e3c4f2a8e3da47f850d7ea0d7d56720f0f091d66add889056098c4b2fd576c \ - --hash=sha256:b41941583adce4242af003d2a8337b066ba6148ca435f295f31ac6d9e4ea2722 \ - --hash=sha256:b4627520a02fccbd324b33c7a83e5d7906ec746e1083a9ac93c41ac7d15548c7 \ - --hash=sha256:ba9f1d1ebe4b63801977cec7401f2d41e888128ae40b5441270d43140efcad52 \ - --hash=sha256:c03a435d26c3999c2a8642cecad5d1c4d10c961817536af52035f6f4ee2f5dd0 \ - --hash=sha256:c200b30dd573afa83847bed7e3041aa36a8145221bf0cfdfaa62d974d720805c \ - --hash=sha256:c493365d3fad241d52f096e4995475a60a80f4eba4d3ff89b713bc65c2ca9615 \ - --hash=sha256:c4d42e83ddbf3445e6514f0aff96dca511421ed0392d9977d3990d9f1ba6753c \ - --hash=sha256:c60528671d9d467009a6ec284582179f6b88651e83367d0ab54cb739021cd7de \ - --hash=sha256:c72ebc22e70e04126158c46ba56b85372bc4d54d00d296be060b0db1671638a4 \ - --hash=sha256:ccbbd276642788c4376fbe8d4e6c50f0fb4972ce09ecb051509062915891cbf0 \ - --hash=sha256:ceaac0c603bf5ac2f505a78b2dcab78d3e6b706be6596c8364b64cc613d208d2 \ - --hash=sha256:d19db6ba816e7f59fc806c690918da80a7d186f00247048cd833acdab9b4847b \ - --hash=sha256:d5c191713e98e7c28800233f039a32a42c1a4f9a001a8a0f2448b07391881036 \ - --hash=sha256:d64f9f88d5203274a002b54442cafc9c7a1abff2a238f3e767b70aadf919b451 \ - --hash=sha256:d77dff3a5aa5eedcc3da0ebd10ff8e4969bc9541aa3333a8d41715b429e99f47 \ - --hash=sha256:dd4f16e57c12c0ae17606c53d1b57d8d1c8792efe3f065a37cb3341340599d49 \ - --hash=sha256:e39d7ab0c18ac99955b36cd19f43926450baba21e3250f053e0704d6ffd76873 \ - --hash=sha256:e3d0cd3dff0e7638a7b5390f3a53057c4e347f4ef122ee84ed93fc2fb7ea4aa2 \ - --hash=sha256:e7dfb1cbb895810fa2b892b68153c17716c6abaa22c7dc2b2f6dcf3364932a1c \ - --hash=sha256:e8e24b210a4deb5a7744971f8f77393005bae7f873568e37dfd9effe808be7f7 \ - --hash=sha256:e9c0683cb35a9b5881b41bc01d5568ffc667910d9dbc632a1fba4e7d59e98773 \ - --hash=sha256:ed41f3f49507936a6fe7003985ea2574daccfef999775525d79eb67344e23767 \ - --hash=sha256:ee744fca8d1ea822480a2a4e7c5f2e1950745477143668f0b523769426060f29 \ - --hash=sha256:f3f1e860be21f3e83011116a65e7310486300e08d9a3028e73e8d13bb6c77292 \ - --hash=sha256:f43ab4cb04bde6109eb2555528a64dfd8a265cc6a9920a67dcbde13ef53a46c8 \ - --hash=sha256:f53f55a8852f0e49b0fc76f2412045d6ad9d5772251dea8f55ea45021616e7d5 \ - --hash=sha256:f59996d0550894affaad8743e97b9b9c98f638b221fac12909210ec3d9294786 \ - --hash=sha256:f96f3f98fbff7af29e9edf9a6584f3c1382e7788783d07ba3721790625caa43e \ - --hash=sha256:f9adb5664b78fcfcd830000416c8cc69853ef43cb084d645b3f1f0296edd9bae \ - --hash=sha256:fa326b3505d5784436d9433b7980171ab2375535d93dd63fbcd20af2b5ca1bb6 \ - --hash=sha256:fafc0049add8043ad07ab5382ee80d80ed7e3699847f26c9a5cf4d3714d96a84 - # via - # jsonschema - # referencing -ruamel-yaml==0.17.32 \ - --hash=sha256:23cd2ed620231677564646b0c6a89d138b6822a0d78656df7abda5879ec4f447 \ - --hash=sha256:ec939063761914e14542972a5cba6d33c23b0859ab6342f61cf070cfc600efc2 - # via semgrep -ruamel-yaml-clib==0.2.7 \ - --hash=sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e \ - --hash=sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3 \ - --hash=sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5 \ - --hash=sha256:1a6391a7cabb7641c32517539ca42cf84b87b667bad38b78d4d42dd23e957c81 \ - --hash=sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497 \ - --hash=sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f \ - --hash=sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac \ - --hash=sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697 \ - --hash=sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763 \ - --hash=sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282 \ - --hash=sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94 \ - --hash=sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1 \ - --hash=sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072 \ - --hash=sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9 \ - --hash=sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231 \ - --hash=sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93 \ - --hash=sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b \ - --hash=sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb \ - --hash=sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f \ - --hash=sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307 \ - --hash=sha256:9c7617df90c1365638916b98cdd9be833d31d337dbcd722485597b43c4a215bf \ - --hash=sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8 \ - --hash=sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b \ - --hash=sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b \ - --hash=sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640 \ - --hash=sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7 \ - --hash=sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a \ - --hash=sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71 \ - --hash=sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8 \ - --hash=sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122 \ - --hash=sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7 \ - --hash=sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80 \ - --hash=sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e \ - --hash=sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab \ - --hash=sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0 \ - --hash=sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646 \ - --hash=sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38 - # via ruamel-yaml -semgrep==1.31.2 \ - --hash=sha256:0fc463f8afcc649efaf61c00f17f7c124498c2e95cca9d805fd68d203362cdeb \ - --hash=sha256:30d0662a6ac8d7258af3b383cca1c93da646fc99b60e3247f6acf3dcf764e815 \ - --hash=sha256:c26ce223c60688e317299f97cac9889b3e879dc4ee28097555cad6215086dcf4 \ - --hash=sha256:cd707b74cd76ef5dff974df3fe653967faf1bd0248019f7b6777170cefa4fca5 - # via -r requirements/dev-bookworm-requirements.in -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f - # via semgrep -types-setuptools==68.0.0.1 \ - --hash=sha256:a0454ea7ad0711f63a602caa87929003a83cab89224ae1506ed44bb5be8fe7d7 \ - --hash=sha256:cc5acbc464b106104899e9b9eb4955dd47e854753c8d4ee2ce697eaf0f4d74e1 - # via -r requirements/dev-bookworm-requirements.in -typing-extensions==4.7.1 \ - --hash=sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36 \ - --hash=sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2 - # via - # mypy - # semgrep -ujson==5.8.0 \ - --hash=sha256:07d459aca895eb17eb463b00441986b021b9312c6c8cc1d06880925c7f51009c \ - --hash=sha256:0be81bae295f65a6896b0c9030b55a106fb2dec69ef877253a87bc7c9c5308f7 \ - --hash=sha256:0fe1b7edaf560ca6ab023f81cbeaf9946a240876a993b8c5a21a1c539171d903 \ - --hash=sha256:102bf31c56f59538cccdfec45649780ae00657e86247c07edac434cb14d5388c \ - --hash=sha256:11da6bed916f9bfacf13f4fc6a9594abd62b2bb115acfb17a77b0f03bee4cfd5 \ - --hash=sha256:16fde596d5e45bdf0d7de615346a102510ac8c405098e5595625015b0d4b5296 \ - --hash=sha256:193349a998cd821483a25f5df30b44e8f495423840ee11b3b28df092ddfd0f7f \ - --hash=sha256:20768961a6a706170497129960762ded9c89fb1c10db2989c56956b162e2a8a3 \ - --hash=sha256:27a2a3c7620ebe43641e926a1062bc04e92dbe90d3501687957d71b4bdddaec4 \ - --hash=sha256:2873d196725a8193f56dde527b322c4bc79ed97cd60f1d087826ac3290cf9207 \ - --hash=sha256:299a312c3e85edee1178cb6453645217ba23b4e3186412677fa48e9a7f986de6 \ - --hash=sha256:2a64cc32bb4a436e5813b83f5aab0889927e5ea1788bf99b930fad853c5625cb \ - --hash=sha256:2b852bdf920fe9f84e2a2c210cc45f1b64f763b4f7d01468b33f7791698e455e \ - --hash=sha256:2e72ba76313d48a1a3a42e7dc9d1db32ea93fac782ad8dde6f8b13e35c229130 \ - --hash=sha256:3659deec9ab9eb19e8646932bfe6fe22730757c4addbe9d7d5544e879dc1b721 \ - --hash=sha256:3b27a8da7a080add559a3b73ec9ebd52e82cc4419f7c6fb7266e62439a055ed0 \ - --hash=sha256:3f9b63530a5392eb687baff3989d0fb5f45194ae5b1ca8276282fb647f8dcdb3 \ - --hash=sha256:407d60eb942c318482bbfb1e66be093308bb11617d41c613e33b4ce5be789adc \ - --hash=sha256:40931d7c08c4ce99adc4b409ddb1bbb01635a950e81239c2382cfe24251b127a \ - --hash=sha256:48c7d373ff22366eecfa36a52b9b55b0ee5bd44c2b50e16084aa88b9de038916 \ - --hash=sha256:4ddeabbc78b2aed531f167d1e70387b151900bc856d61e9325fcdfefb2a51ad8 \ - --hash=sha256:5ac97b1e182d81cf395ded620528c59f4177eee024b4b39a50cdd7b720fdeec6 \ - --hash=sha256:5ce24909a9c25062e60653073dd6d5e6ec9d6ad7ed6e0069450d5b673c854405 \ - --hash=sha256:69b3104a2603bab510497ceabc186ba40fef38ec731c0ccaa662e01ff94a985c \ - --hash=sha256:6a4dafa9010c366589f55afb0fd67084acd8added1a51251008f9ff2c3e44042 \ - --hash=sha256:6d230d870d1ce03df915e694dcfa3f4e8714369cce2346686dbe0bc8e3f135e7 \ - --hash=sha256:78e318def4ade898a461b3d92a79f9441e7e0e4d2ad5419abed4336d702c7425 \ - --hash=sha256:7a42baa647a50fa8bed53d4e242be61023bd37b93577f27f90ffe521ac9dc7a3 \ - --hash=sha256:7cba16b26efe774c096a5e822e4f27097b7c81ed6fb5264a2b3f5fd8784bab30 \ - --hash=sha256:7d8283ac5d03e65f488530c43d6610134309085b71db4f675e9cf5dff96a8282 \ - --hash=sha256:7ecc33b107ae88405aebdb8d82c13d6944be2331ebb04399134c03171509371a \ - --hash=sha256:9249fdefeb021e00b46025e77feed89cd91ffe9b3a49415239103fc1d5d9c29a \ - --hash=sha256:9399eaa5d1931a0ead49dce3ffacbea63f3177978588b956036bfe53cdf6af75 \ - --hash=sha256:94c7bd9880fa33fcf7f6d7f4cc032e2371adee3c5dba2922b918987141d1bf07 \ - --hash=sha256:9571de0c53db5cbc265945e08f093f093af2c5a11e14772c72d8e37fceeedd08 \ - --hash=sha256:9721cd112b5e4687cb4ade12a7b8af8b048d4991227ae8066d9c4b3a6642a582 \ - --hash=sha256:9ab282d67ef3097105552bf151438b551cc4bedb3f24d80fada830f2e132aeb9 \ - --hash=sha256:9d9707e5aacf63fb919f6237d6490c4e0244c7f8d3dc2a0f84d7dec5db7cb54c \ - --hash=sha256:a70f776bda2e5072a086c02792c7863ba5833d565189e09fabbd04c8b4c3abba \ - --hash=sha256:a89cf3cd8bf33a37600431b7024a7ccf499db25f9f0b332947fbc79043aad879 \ - --hash=sha256:a8c91b6f4bf23f274af9002b128d133b735141e867109487d17e344d38b87d94 \ - --hash=sha256:ad24ec130855d4430a682c7a60ca0bc158f8253ec81feed4073801f6b6cb681b \ - --hash=sha256:ae7f4725c344bf437e9b881019c558416fe84ad9c6b67426416c131ad577df67 \ - --hash=sha256:b748797131ac7b29826d1524db1cc366d2722ab7afacc2ce1287cdafccddbf1f \ - --hash=sha256:bdf04c6af3852161be9613e458a1fb67327910391de8ffedb8332e60800147a2 \ - --hash=sha256:bf5737dbcfe0fa0ac8fa599eceafae86b376492c8f1e4b84e3adf765f03fb564 \ - --hash=sha256:c4e7bb7eba0e1963f8b768f9c458ecb193e5bf6977090182e2b4f4408f35ac76 \ - --hash=sha256:d524a8c15cfc863705991d70bbec998456a42c405c291d0f84a74ad7f35c5109 \ - --hash=sha256:d53039d39de65360e924b511c7ca1a67b0975c34c015dd468fca492b11caa8f7 \ - --hash=sha256:d6f84a7a175c75beecde53a624881ff618e9433045a69fcfb5e154b73cdaa377 \ - --hash=sha256:e0147d41e9fb5cd174207c4a2895c5e24813204499fd0839951d4c8784a23bf5 \ - --hash=sha256:e3673053b036fd161ae7a5a33358ccae6793ee89fd499000204676baafd7b3aa \ - --hash=sha256:e54578fa8838ddc722539a752adfce9372474114f8c127bb316db5392d942f8b \ - --hash=sha256:eb0142f6f10f57598655340a3b2c70ed4646cbe674191da195eb0985a9813b83 \ - --hash=sha256:efeddf950fb15a832376c0c01d8d7713479fbeceaed1eaecb2665aa62c305aec \ - --hash=sha256:f26629ac531d712f93192c233a74888bc8b8212558bd7d04c349125f10199fcf \ - --hash=sha256:f2e385a7679b9088d7bc43a64811a7713cc7c33d032d020f757c54e7d41931ae \ - --hash=sha256:f3554eaadffe416c6f543af442066afa6549edbc34fe6a7719818c3e72ebfe95 \ - --hash=sha256:f4511560d75b15ecb367eef561554959b9d49b6ec3b8d5634212f9fed74a6df1 \ - --hash=sha256:f504117a39cb98abba4153bf0b46b4954cc5d62f6351a14660201500ba31fe7f \ - --hash=sha256:fb87decf38cc82bcdea1d7511e73629e651bdec3a43ab40985167ab8449b769c - # via python-lsp-jsonrpc -urllib3==1.26.16 \ - --hash=sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f \ - --hash=sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14 - # via - # requests - # semgrep -wcmatch==8.4.1 \ - --hash=sha256:3476cd107aba7b25ba1d59406938a47dc7eec6cfd0ad09ff77193f21a964dee7 \ - --hash=sha256:b1f042a899ea4c458b7321da1b5e3331e3e0ec781583434de1301946ceadb943 - # via semgrep -wheel==0.40.0 \ - --hash=sha256:cd1196f3faee2b31968d626e1731c94f99cbdb67cf5a46e4f5656cbee7738873 \ - --hash=sha256:d236b20e7cb522daf2390fa84c55eea81c5c30190f90f29ae2ca1ad8355bf247 - # via pip-tools - -# The following packages are considered to be unsafe in a requirements file: -pip==23.1.2 \ - --hash=sha256:0e7c86f486935893c708287b30bd050a36ac827ec7fe5e43fe7cb198dd835fba \ - --hash=sha256:3ef6ac33239e4027d9a5598a381b9d30880a1477e50039db2eac6e8a8f6d1b18 - # via pip-tools -setuptools==68.0.0 \ - --hash=sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f \ - --hash=sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235 - # via pip-tools diff --git a/requirements/dev-bullseye-requirements.in b/requirements/dev-bullseye-requirements.in deleted file mode 100644 index 43216f80c..000000000 --- a/requirements/dev-bullseye-requirements.in +++ /dev/null @@ -1,14 +0,0 @@ -# include prod requirements --r requirements.in - -black -certifi>=2023.07.22 -flake8 -pip-tools -pytest -pytest-cov -pytest-mock -semgrep - -mypy -types-setuptools diff --git a/requirements/dev-bullseye-requirements.txt b/requirements/dev-bullseye-requirements.txt deleted file mode 100644 index 8ec35fd7a..000000000 --- a/requirements/dev-bullseye-requirements.txt +++ /dev/null @@ -1,632 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --config=pyproject.toml --generate-hashes --output-file=requirements/dev-bullseye-requirements.txt requirements/dev-bullseye-requirements.in -# -attrs==23.1.0 \ - --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ - --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 - # via - # glom - # jsonschema - # referencing - # semgrep -black==23.7.0 \ - --hash=sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3 \ - --hash=sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb \ - --hash=sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087 \ - --hash=sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320 \ - --hash=sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6 \ - --hash=sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3 \ - --hash=sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc \ - --hash=sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f \ - --hash=sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587 \ - --hash=sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91 \ - --hash=sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a \ - --hash=sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad \ - --hash=sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926 \ - --hash=sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9 \ - --hash=sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be \ - --hash=sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd \ - --hash=sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96 \ - --hash=sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491 \ - --hash=sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2 \ - --hash=sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a \ - --hash=sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f \ - --hash=sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995 - # via -r requirements/dev-bullseye-requirements.in -boltons==21.0.0 \ - --hash=sha256:65e70a79a731a7fe6e98592ecfb5ccf2115873d01dbc576079874629e5c90f13 \ - --hash=sha256:b9bb7b58b2b420bbe11a6025fdef6d3e5edc9f76a42fb467afe7ca212ef9948b - # via - # face - # glom - # semgrep -bracex==2.3.post1 \ - --hash=sha256:351b7f20d56fb9ea91f9b9e9e7664db466eb234188c175fd943f8f755c807e73 \ - --hash=sha256:e7b23fc8b2cd06d3dec0692baabecb249dda94e06a617901ff03a6c56fd71693 - # via wcmatch -build==0.10.0 \ - --hash=sha256:af266720050a66c893a6096a2f410989eeac74ff9a68ba194b3f6473e8e26171 \ - --hash=sha256:d5b71264afdb5951d6704482aac78de887c80691c52b88a9ad195983ca2c9269 - # via pip-tools -certifi==2023.7.22 \ - --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ - --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 - # via - # -r requirements/dev-bullseye-requirements.in - # requests -charset-normalizer==3.2.0 \ - --hash=sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96 \ - --hash=sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c \ - --hash=sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710 \ - --hash=sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706 \ - --hash=sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020 \ - --hash=sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252 \ - --hash=sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad \ - --hash=sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329 \ - --hash=sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a \ - --hash=sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f \ - --hash=sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6 \ - --hash=sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4 \ - --hash=sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a \ - --hash=sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46 \ - --hash=sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2 \ - --hash=sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23 \ - --hash=sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace \ - --hash=sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd \ - --hash=sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982 \ - --hash=sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10 \ - --hash=sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2 \ - --hash=sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea \ - --hash=sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09 \ - --hash=sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5 \ - --hash=sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149 \ - --hash=sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489 \ - --hash=sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9 \ - --hash=sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80 \ - --hash=sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592 \ - --hash=sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3 \ - --hash=sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6 \ - --hash=sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed \ - --hash=sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c \ - --hash=sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200 \ - --hash=sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a \ - --hash=sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e \ - --hash=sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d \ - --hash=sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6 \ - --hash=sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623 \ - --hash=sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669 \ - --hash=sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3 \ - --hash=sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa \ - --hash=sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9 \ - --hash=sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2 \ - --hash=sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f \ - --hash=sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1 \ - --hash=sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4 \ - --hash=sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a \ - --hash=sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8 \ - --hash=sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3 \ - --hash=sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029 \ - --hash=sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f \ - --hash=sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959 \ - --hash=sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22 \ - --hash=sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7 \ - --hash=sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952 \ - --hash=sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346 \ - --hash=sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e \ - --hash=sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d \ - --hash=sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299 \ - --hash=sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd \ - --hash=sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a \ - --hash=sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3 \ - --hash=sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037 \ - --hash=sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94 \ - --hash=sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c \ - --hash=sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858 \ - --hash=sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a \ - --hash=sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449 \ - --hash=sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c \ - --hash=sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918 \ - --hash=sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1 \ - --hash=sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c \ - --hash=sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac \ - --hash=sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa - # via requests -click==8.1.4 \ - --hash=sha256:2739815aaa5d2c986a88f1e9230c55e17f0caad3d958a5e13ad0797c166db9e3 \ - --hash=sha256:b97d0c74955da062a7d4ef92fadb583806a585b2ea81958a81bd72726cbb8e37 - # via - # black - # click-option-group - # pip-tools - # semgrep -click-option-group==0.5.6 \ - --hash=sha256:38a26d963ee3ad93332ddf782f9259c5bdfe405e73408d943ef5e7d0c3767ec7 \ - --hash=sha256:97d06703873518cc5038509443742b25069a3c7562d1ea72ff08bfadde1ce777 - # via semgrep -colorama==0.4.6 \ - --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ - --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 - # via semgrep -coverage[toml]==7.2.7 \ - --hash=sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f \ - --hash=sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2 \ - --hash=sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a \ - --hash=sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a \ - --hash=sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01 \ - --hash=sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6 \ - --hash=sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7 \ - --hash=sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f \ - --hash=sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02 \ - --hash=sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c \ - --hash=sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063 \ - --hash=sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a \ - --hash=sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5 \ - --hash=sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959 \ - --hash=sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97 \ - --hash=sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6 \ - --hash=sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f \ - --hash=sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9 \ - --hash=sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5 \ - --hash=sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f \ - --hash=sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562 \ - --hash=sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe \ - --hash=sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9 \ - --hash=sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f \ - --hash=sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb \ - --hash=sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb \ - --hash=sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1 \ - --hash=sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb \ - --hash=sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250 \ - --hash=sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e \ - --hash=sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511 \ - --hash=sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5 \ - --hash=sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59 \ - --hash=sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2 \ - --hash=sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d \ - --hash=sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3 \ - --hash=sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4 \ - --hash=sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de \ - --hash=sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9 \ - --hash=sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833 \ - --hash=sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0 \ - --hash=sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9 \ - --hash=sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d \ - --hash=sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050 \ - --hash=sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d \ - --hash=sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6 \ - --hash=sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353 \ - --hash=sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb \ - --hash=sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e \ - --hash=sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8 \ - --hash=sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495 \ - --hash=sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2 \ - --hash=sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd \ - --hash=sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27 \ - --hash=sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1 \ - --hash=sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818 \ - --hash=sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4 \ - --hash=sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e \ - --hash=sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850 \ - --hash=sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3 - # via pytest-cov -defusedxml==0.7.1 \ - --hash=sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69 \ - --hash=sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61 - # via semgrep -exceptiongroup==1.1.2 \ - --hash=sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5 \ - --hash=sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f - # via pytest -face==22.0.0 \ - --hash=sha256:344fe31562d0f6f444a45982418f3793d4b14f9abb98ccca1509d22e0a3e7e35 \ - --hash=sha256:d5d692f90bc8f5987b636e47e36384b9bbda499aaf0a77aa0b0bbe834c76923d - # via glom -flake8==6.0.0 \ - --hash=sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7 \ - --hash=sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181 - # via -r requirements/dev-bullseye-requirements.in -glom==22.1.0 \ - --hash=sha256:1510c6587a8f9c64a246641b70033cbc5ebde99f02ad245693678038e821aeb5 \ - --hash=sha256:5339da206bf3532e01a83a35aca202960ea885156986d190574b779598e9e772 - # via semgrep -idna==3.4 \ - --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ - --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 - # via requests -iniconfig==2.0.0 \ - --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ - --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 - # via pytest -jsonschema==4.18.0 \ - --hash=sha256:8caf5b57a990a98e9b39832ef3cb35c176fe331414252b6e1b26fd5866f891a4 \ - --hash=sha256:b508dd6142bd03f4c3670534c80af68cd7bbff9ea830b9cf2625d4a3c49ddf60 - # via semgrep -jsonschema-specifications==2023.6.1 \ - --hash=sha256:3d2b82663aff01815f744bb5c7887e2121a63399b49b104a3c96145474d091d7 \ - --hash=sha256:ca1c4dd059a9e7b34101cf5b3ab7ff1d18b139f35950d598d629837ef66e8f28 - # via jsonschema -markdown-it-py==3.0.0 \ - --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ - --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb - # via rich -mccabe==0.7.0 \ - --hash=sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325 \ - --hash=sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e - # via flake8 -mdurl==0.1.2 \ - --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ - --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba - # via markdown-it-py -mypy==1.4.1 \ - --hash=sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042 \ - --hash=sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd \ - --hash=sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2 \ - --hash=sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01 \ - --hash=sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7 \ - --hash=sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3 \ - --hash=sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816 \ - --hash=sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3 \ - --hash=sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc \ - --hash=sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4 \ - --hash=sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b \ - --hash=sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8 \ - --hash=sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c \ - --hash=sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462 \ - --hash=sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7 \ - --hash=sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc \ - --hash=sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258 \ - --hash=sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b \ - --hash=sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9 \ - --hash=sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6 \ - --hash=sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f \ - --hash=sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1 \ - --hash=sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828 \ - --hash=sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878 \ - --hash=sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f \ - --hash=sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b - # via -r requirements/dev-bullseye-requirements.in -mypy-extensions==1.0.0 \ - --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ - --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 - # via - # black - # mypy -packaging==23.1 \ - --hash=sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61 \ - --hash=sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f - # via - # black - # build - # pytest - # semgrep -pathspec==0.11.1 \ - --hash=sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687 \ - --hash=sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293 - # via black -peewee==3.16.2 \ - --hash=sha256:10769981198c7311f84a0ca8db892fa213303a8eb1305deb795a71e7bd606a91 - # via semgrep -pip-tools==6.14.0 \ - --hash=sha256:06366be0e08d86b416407333e998b4d305d5bd925151b08942ed149380ba3e47 \ - --hash=sha256:c5ad042cd27c0b343b10db1db7f77a7d087beafbec59ae6df1bba4d3368dfe8c - # via -r requirements/dev-bullseye-requirements.in -platformdirs==3.8.1 \ - --hash=sha256:cec7b889196b9144d088e4c57d9ceef7374f6c39694ad1577a0aab50d27ea28c \ - --hash=sha256:f87ca4fcff7d2b0f81c6a748a77973d7af0f4d526f98f308477c3c436c74d528 - # via black -pluggy==1.2.0 \ - --hash=sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849 \ - --hash=sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3 - # via pytest -pycodestyle==2.10.0 \ - --hash=sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053 \ - --hash=sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610 - # via flake8 -pyflakes==3.0.1 \ - --hash=sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf \ - --hash=sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd - # via flake8 -pygments==2.15.1 \ - --hash=sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c \ - --hash=sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1 - # via rich -pyproject-hooks==1.0.0 \ - --hash=sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8 \ - --hash=sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5 - # via build -pytest==7.4.0 \ - --hash=sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32 \ - --hash=sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a - # via - # -r requirements/dev-bullseye-requirements.in - # pytest-cov - # pytest-mock -pytest-cov==4.1.0 \ - --hash=sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6 \ - --hash=sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a - # via -r requirements/dev-bullseye-requirements.in -pytest-mock==3.11.1 \ - --hash=sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39 \ - --hash=sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f - # via -r requirements/dev-bullseye-requirements.in -python-lsp-jsonrpc==1.0.0 \ - --hash=sha256:079b143be64b0a378bdb21dff5e28a8c1393fe7e8a654ef068322d754e545fc7 \ - --hash=sha256:7bec170733db628d3506ea3a5288ff76aa33c70215ed223abdb0d95e957660bd - # via semgrep -referencing==0.29.1 \ - --hash=sha256:90cb53782d550ba28d2166ef3f55731f38397def8832baac5d45235f1995e35e \ - --hash=sha256:d3c8f323ee1480095da44d55917cfb8278d73d6b4d5f677e3e40eb21314ac67f - # via - # jsonschema - # jsonschema-specifications -requests==2.31.0 \ - --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ - --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 - # via semgrep -rich==13.4.2 \ - --hash=sha256:8f87bc7ee54675732fa66a05ebfe489e27264caeeff3728c945d25971b6485ec \ - --hash=sha256:d653d6bccede5844304c605d5aac802c7cf9621efd700b46c7ec2b51ea914898 - # via semgrep -rpds-py==0.8.10 \ - --hash=sha256:08166467258fd0240a1256fce272f689f2360227ee41c72aeea103e9e4f63d2b \ - --hash=sha256:083df0fafe199371206111583c686c985dddaf95ab3ee8e7b24f1fda54515d09 \ - --hash=sha256:0da53292edafecba5e1d8c1218f99babf2ed0bf1c791d83c0ab5c29b57223068 \ - --hash=sha256:0eeb2731708207d0fe2619afe6c4dc8cb9798f7de052da891de5f19c0006c315 \ - --hash=sha256:134ec8f14ca7dbc6d9ae34dac632cdd60939fe3734b5d287a69683c037c51acb \ - --hash=sha256:13e643ce8ad502a0263397362fb887594b49cf84bf518d6038c16f235f2bcea4 \ - --hash=sha256:148b0b38d719c0760e31ce9285a9872972bdd7774969a4154f40c980e5beaca7 \ - --hash=sha256:14f1c356712f66653b777ecd8819804781b23dbbac4eade4366b94944c9e78ad \ - --hash=sha256:15a90d0ac11b4499171067ae40a220d1ca3cb685ec0acc356d8f3800e07e4cb8 \ - --hash=sha256:1a2edf8173ac0c7a19da21bc68818be1321998528b5e3f748d6ee90c0ba2a1fd \ - --hash=sha256:1b21575031478609db6dbd1f0465e739fe0e7f424a8e7e87610a6c7f68b4eb16 \ - --hash=sha256:1ee45cd1d84beed6cbebc839fd85c2e70a3a1325c8cfd16b62c96e2ffb565eca \ - --hash=sha256:220bdcad2d2936f674650d304e20ac480a3ce88a40fe56cd084b5780f1d104d9 \ - --hash=sha256:2418cf17d653d24ffb8b75e81f9f60b7ba1b009a23298a433a4720b2a0a17017 \ - --hash=sha256:2614c2732bf45de5c7f9e9e54e18bc78693fa2f635ae58d2895b7965e470378c \ - --hash=sha256:2cd3045e7f6375dda64ed7db1c5136826facb0159ea982f77d9cf6125025bd34 \ - --hash=sha256:2eb4b08c45f8f8d8254cdbfacd3fc5d6b415d64487fb30d7380b0d0569837bf1 \ - --hash=sha256:300eb606e6b94a7a26f11c8cc8ee59e295c6649bd927f91e1dbd37a4c89430b6 \ - --hash=sha256:376b8de737401050bd12810003d207e824380be58810c031f10ec563ff6aef3d \ - --hash=sha256:3793c21494bad1373da517001d0849eea322e9a049a0e4789e50d8d1329df8e7 \ - --hash=sha256:37f7ee4dc86db7af3bac6d2a2cedbecb8e57ce4ed081f6464510e537589f8b1e \ - --hash=sha256:3816a890a6a9e9f1de250afa12ca71c9a7a62f2b715a29af6aaee3aea112c181 \ - --hash=sha256:3c490204e16bca4f835dba8467869fe7295cdeaa096e4c5a7af97f3454a97991 \ - --hash=sha256:3cc5e5b5514796f45f03a568981971b12a3570f3de2e76114f7dc18d4b60a3c4 \ - --hash=sha256:41c89a366eae49ad9e65ed443a8f94aee762931a1e3723749d72aeac80f5ef2f \ - --hash=sha256:4a8ca409f1252e1220bf09c57290b76cae2f14723746215a1e0506472ebd7bdf \ - --hash=sha256:4b519bac7c09444dd85280fd60f28c6dde4389c88dddf4279ba9b630aca3bbbe \ - --hash=sha256:521fc8861a86ae54359edf53a15a05fabc10593cea7b3357574132f8427a5e5a \ - --hash=sha256:574868858a7ff6011192c023a5289158ed20e3f3b94b54f97210a773f2f22921 \ - --hash=sha256:5a665f6f1a87614d1c3039baf44109094926dedf785e346d8b0a728e9cabd27a \ - --hash=sha256:5d1c2bc319428d50b3e0fa6b673ab8cc7fa2755a92898db3a594cbc4eeb6d1f7 \ - --hash=sha256:60e0e86e870350e03b3e25f9b1dd2c6cc72d2b5f24e070249418320a6f9097b7 \ - --hash=sha256:695f642a3a5dbd4ad2ffbbacf784716ecd87f1b7a460843b9ddf965ccaeafff4 \ - --hash=sha256:69d089c026f6a8b9d64a06ff67dc3be196707b699d7f6ca930c25f00cf5e30d8 \ - --hash=sha256:6c6a0225b8501d881b32ebf3f5807a08ad3685b5eb5f0a6bfffd3a6e039b2055 \ - --hash=sha256:70bb9c8004b97b4ef7ae56a2aa56dfaa74734a0987c78e7e85f00004ab9bf2d0 \ - --hash=sha256:73a1e48430f418f0ac3dfd87860e4cc0d33ad6c0f589099a298cb53724db1169 \ - --hash=sha256:7495010b658ec5b52835f21d8c8b1a7e52e194c50f095d4223c0b96c3da704b1 \ - --hash=sha256:7947e6e2c2ad68b1c12ee797d15e5f8d0db36331200b0346871492784083b0c6 \ - --hash=sha256:7b38a9ac96eeb6613e7f312cd0014de64c3f07000e8bf0004ad6ec153bac46f8 \ - --hash=sha256:7d20a8ed227683401cc508e7be58cba90cc97f784ea8b039c8cd01111e6043e0 \ - --hash=sha256:7f29b8c55fd3a2bc48e485e37c4e2df3317f43b5cc6c4b6631c33726f52ffbb3 \ - --hash=sha256:802f42200d8caf7f25bbb2a6464cbd83e69d600151b7e3b49f49a47fa56b0a38 \ - --hash=sha256:805a5f3f05d186c5d50de2e26f765ba7896d0cc1ac5b14ffc36fae36df5d2f10 \ - --hash=sha256:82bb361cae4d0a627006dadd69dc2f36b7ad5dc1367af9d02e296ec565248b5b \ - --hash=sha256:84eb541a44f7a18f07a6bfc48b95240739e93defe1fdfb4f2a295f37837945d7 \ - --hash=sha256:89c92b74e8bf6f53a6f4995fd52f4bd510c12f103ee62c99e22bc9e05d45583c \ - --hash=sha256:8c398fda6df361a30935ab4c4bccb7f7a3daef2964ca237f607c90e9f3fdf66f \ - --hash=sha256:915031002c86a5add7c6fd4beb601b2415e8a1c956590a5f91d825858e92fe6e \ - --hash=sha256:927d784648211447201d4c6f1babddb7971abad922b32257ab74de2f2750fad0 \ - --hash=sha256:92cf5b3ee60eef41f41e1a2cabca466846fb22f37fc580ffbcb934d1bcab225a \ - --hash=sha256:93d06cccae15b3836247319eee7b6f1fdcd6c10dabb4e6d350d27bd0bdca2711 \ - --hash=sha256:93d99f957a300d7a4ced41615c45aeb0343bb8f067c42b770b505de67a132346 \ - --hash=sha256:96b293c0498c70162effb13100624c5863797d99df75f2f647438bd10cbf73e4 \ - --hash=sha256:97cab733d303252f7c2f7052bf021a3469d764fc2b65e6dbef5af3cbf89d4892 \ - --hash=sha256:996cc95830de9bc22b183661d95559ec6b3cd900ad7bc9154c4cbf5be0c9b734 \ - --hash=sha256:9a7d20c1cf8d7b3960c5072c265ec47b3f72a0c608a9a6ee0103189b4f28d531 \ - --hash=sha256:9cd57981d9fab04fc74438d82460f057a2419974d69a96b06a440822d693b3c0 \ - --hash=sha256:a11ab0d97be374efd04f640c04fe5c2d3dabc6dfb998954ea946ee3aec97056d \ - --hash=sha256:a13c8e56c46474cd5958d525ce6a9996727a83d9335684e41f5192c83deb6c58 \ - --hash=sha256:a38b9f526d0d6cbdaa37808c400e3d9f9473ac4ff64d33d9163fd05d243dbd9b \ - --hash=sha256:a7c6304b894546b5a6bdc0fe15761fa53fe87d28527a7142dae8de3c663853e1 \ - --hash=sha256:ad3bfb44c8840fb4be719dc58e229f435e227fbfbe133dc33f34981ff622a8f8 \ - --hash=sha256:ae40f4a70a1f40939d66ecbaf8e7edc144fded190c4a45898a8cfe19d8fc85ea \ - --hash=sha256:b01b39ad5411563031ea3977bbbc7324d82b088e802339e6296f082f78f6115c \ - --hash=sha256:b2e3c4f2a8e3da47f850d7ea0d7d56720f0f091d66add889056098c4b2fd576c \ - --hash=sha256:b41941583adce4242af003d2a8337b066ba6148ca435f295f31ac6d9e4ea2722 \ - --hash=sha256:b4627520a02fccbd324b33c7a83e5d7906ec746e1083a9ac93c41ac7d15548c7 \ - --hash=sha256:ba9f1d1ebe4b63801977cec7401f2d41e888128ae40b5441270d43140efcad52 \ - --hash=sha256:c03a435d26c3999c2a8642cecad5d1c4d10c961817536af52035f6f4ee2f5dd0 \ - --hash=sha256:c200b30dd573afa83847bed7e3041aa36a8145221bf0cfdfaa62d974d720805c \ - --hash=sha256:c493365d3fad241d52f096e4995475a60a80f4eba4d3ff89b713bc65c2ca9615 \ - --hash=sha256:c4d42e83ddbf3445e6514f0aff96dca511421ed0392d9977d3990d9f1ba6753c \ - --hash=sha256:c60528671d9d467009a6ec284582179f6b88651e83367d0ab54cb739021cd7de \ - --hash=sha256:c72ebc22e70e04126158c46ba56b85372bc4d54d00d296be060b0db1671638a4 \ - --hash=sha256:ccbbd276642788c4376fbe8d4e6c50f0fb4972ce09ecb051509062915891cbf0 \ - --hash=sha256:ceaac0c603bf5ac2f505a78b2dcab78d3e6b706be6596c8364b64cc613d208d2 \ - --hash=sha256:d19db6ba816e7f59fc806c690918da80a7d186f00247048cd833acdab9b4847b \ - --hash=sha256:d5c191713e98e7c28800233f039a32a42c1a4f9a001a8a0f2448b07391881036 \ - --hash=sha256:d64f9f88d5203274a002b54442cafc9c7a1abff2a238f3e767b70aadf919b451 \ - --hash=sha256:d77dff3a5aa5eedcc3da0ebd10ff8e4969bc9541aa3333a8d41715b429e99f47 \ - --hash=sha256:dd4f16e57c12c0ae17606c53d1b57d8d1c8792efe3f065a37cb3341340599d49 \ - --hash=sha256:e39d7ab0c18ac99955b36cd19f43926450baba21e3250f053e0704d6ffd76873 \ - --hash=sha256:e3d0cd3dff0e7638a7b5390f3a53057c4e347f4ef122ee84ed93fc2fb7ea4aa2 \ - --hash=sha256:e7dfb1cbb895810fa2b892b68153c17716c6abaa22c7dc2b2f6dcf3364932a1c \ - --hash=sha256:e8e24b210a4deb5a7744971f8f77393005bae7f873568e37dfd9effe808be7f7 \ - --hash=sha256:e9c0683cb35a9b5881b41bc01d5568ffc667910d9dbc632a1fba4e7d59e98773 \ - --hash=sha256:ed41f3f49507936a6fe7003985ea2574daccfef999775525d79eb67344e23767 \ - --hash=sha256:ee744fca8d1ea822480a2a4e7c5f2e1950745477143668f0b523769426060f29 \ - --hash=sha256:f3f1e860be21f3e83011116a65e7310486300e08d9a3028e73e8d13bb6c77292 \ - --hash=sha256:f43ab4cb04bde6109eb2555528a64dfd8a265cc6a9920a67dcbde13ef53a46c8 \ - --hash=sha256:f53f55a8852f0e49b0fc76f2412045d6ad9d5772251dea8f55ea45021616e7d5 \ - --hash=sha256:f59996d0550894affaad8743e97b9b9c98f638b221fac12909210ec3d9294786 \ - --hash=sha256:f96f3f98fbff7af29e9edf9a6584f3c1382e7788783d07ba3721790625caa43e \ - --hash=sha256:f9adb5664b78fcfcd830000416c8cc69853ef43cb084d645b3f1f0296edd9bae \ - --hash=sha256:fa326b3505d5784436d9433b7980171ab2375535d93dd63fbcd20af2b5ca1bb6 \ - --hash=sha256:fafc0049add8043ad07ab5382ee80d80ed7e3699847f26c9a5cf4d3714d96a84 - # via - # jsonschema - # referencing -ruamel-yaml==0.17.32 \ - --hash=sha256:23cd2ed620231677564646b0c6a89d138b6822a0d78656df7abda5879ec4f447 \ - --hash=sha256:ec939063761914e14542972a5cba6d33c23b0859ab6342f61cf070cfc600efc2 - # via semgrep -ruamel-yaml-clib==0.2.7 \ - --hash=sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e \ - --hash=sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3 \ - --hash=sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5 \ - --hash=sha256:1a6391a7cabb7641c32517539ca42cf84b87b667bad38b78d4d42dd23e957c81 \ - --hash=sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497 \ - --hash=sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f \ - --hash=sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac \ - --hash=sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697 \ - --hash=sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763 \ - --hash=sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282 \ - --hash=sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94 \ - --hash=sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1 \ - --hash=sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072 \ - --hash=sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9 \ - --hash=sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231 \ - --hash=sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93 \ - --hash=sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b \ - --hash=sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb \ - --hash=sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f \ - --hash=sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307 \ - --hash=sha256:9c7617df90c1365638916b98cdd9be833d31d337dbcd722485597b43c4a215bf \ - --hash=sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8 \ - --hash=sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b \ - --hash=sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b \ - --hash=sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640 \ - --hash=sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7 \ - --hash=sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a \ - --hash=sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71 \ - --hash=sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8 \ - --hash=sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122 \ - --hash=sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7 \ - --hash=sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80 \ - --hash=sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e \ - --hash=sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab \ - --hash=sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0 \ - --hash=sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646 \ - --hash=sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38 - # via ruamel-yaml -semgrep==1.31.2 \ - --hash=sha256:0fc463f8afcc649efaf61c00f17f7c124498c2e95cca9d805fd68d203362cdeb \ - --hash=sha256:30d0662a6ac8d7258af3b383cca1c93da646fc99b60e3247f6acf3dcf764e815 \ - --hash=sha256:c26ce223c60688e317299f97cac9889b3e879dc4ee28097555cad6215086dcf4 \ - --hash=sha256:cd707b74cd76ef5dff974df3fe653967faf1bd0248019f7b6777170cefa4fca5 - # via -r requirements/dev-bullseye-requirements.in -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f - # via - # black - # build - # coverage - # mypy - # pip-tools - # pyproject-hooks - # pytest - # semgrep -types-setuptools==68.0.0.1 \ - --hash=sha256:a0454ea7ad0711f63a602caa87929003a83cab89224ae1506ed44bb5be8fe7d7 \ - --hash=sha256:cc5acbc464b106104899e9b9eb4955dd47e854753c8d4ee2ce697eaf0f4d74e1 - # via -r requirements/dev-bullseye-requirements.in -typing-extensions==4.7.1 \ - --hash=sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36 \ - --hash=sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2 - # via - # black - # mypy - # semgrep -ujson==5.8.0 \ - --hash=sha256:07d459aca895eb17eb463b00441986b021b9312c6c8cc1d06880925c7f51009c \ - --hash=sha256:0be81bae295f65a6896b0c9030b55a106fb2dec69ef877253a87bc7c9c5308f7 \ - --hash=sha256:0fe1b7edaf560ca6ab023f81cbeaf9946a240876a993b8c5a21a1c539171d903 \ - --hash=sha256:102bf31c56f59538cccdfec45649780ae00657e86247c07edac434cb14d5388c \ - --hash=sha256:11da6bed916f9bfacf13f4fc6a9594abd62b2bb115acfb17a77b0f03bee4cfd5 \ - --hash=sha256:16fde596d5e45bdf0d7de615346a102510ac8c405098e5595625015b0d4b5296 \ - --hash=sha256:193349a998cd821483a25f5df30b44e8f495423840ee11b3b28df092ddfd0f7f \ - --hash=sha256:20768961a6a706170497129960762ded9c89fb1c10db2989c56956b162e2a8a3 \ - --hash=sha256:27a2a3c7620ebe43641e926a1062bc04e92dbe90d3501687957d71b4bdddaec4 \ - --hash=sha256:2873d196725a8193f56dde527b322c4bc79ed97cd60f1d087826ac3290cf9207 \ - --hash=sha256:299a312c3e85edee1178cb6453645217ba23b4e3186412677fa48e9a7f986de6 \ - --hash=sha256:2a64cc32bb4a436e5813b83f5aab0889927e5ea1788bf99b930fad853c5625cb \ - --hash=sha256:2b852bdf920fe9f84e2a2c210cc45f1b64f763b4f7d01468b33f7791698e455e \ - --hash=sha256:2e72ba76313d48a1a3a42e7dc9d1db32ea93fac782ad8dde6f8b13e35c229130 \ - --hash=sha256:3659deec9ab9eb19e8646932bfe6fe22730757c4addbe9d7d5544e879dc1b721 \ - --hash=sha256:3b27a8da7a080add559a3b73ec9ebd52e82cc4419f7c6fb7266e62439a055ed0 \ - --hash=sha256:3f9b63530a5392eb687baff3989d0fb5f45194ae5b1ca8276282fb647f8dcdb3 \ - --hash=sha256:407d60eb942c318482bbfb1e66be093308bb11617d41c613e33b4ce5be789adc \ - --hash=sha256:40931d7c08c4ce99adc4b409ddb1bbb01635a950e81239c2382cfe24251b127a \ - --hash=sha256:48c7d373ff22366eecfa36a52b9b55b0ee5bd44c2b50e16084aa88b9de038916 \ - --hash=sha256:4ddeabbc78b2aed531f167d1e70387b151900bc856d61e9325fcdfefb2a51ad8 \ - --hash=sha256:5ac97b1e182d81cf395ded620528c59f4177eee024b4b39a50cdd7b720fdeec6 \ - --hash=sha256:5ce24909a9c25062e60653073dd6d5e6ec9d6ad7ed6e0069450d5b673c854405 \ - --hash=sha256:69b3104a2603bab510497ceabc186ba40fef38ec731c0ccaa662e01ff94a985c \ - --hash=sha256:6a4dafa9010c366589f55afb0fd67084acd8added1a51251008f9ff2c3e44042 \ - --hash=sha256:6d230d870d1ce03df915e694dcfa3f4e8714369cce2346686dbe0bc8e3f135e7 \ - --hash=sha256:78e318def4ade898a461b3d92a79f9441e7e0e4d2ad5419abed4336d702c7425 \ - --hash=sha256:7a42baa647a50fa8bed53d4e242be61023bd37b93577f27f90ffe521ac9dc7a3 \ - --hash=sha256:7cba16b26efe774c096a5e822e4f27097b7c81ed6fb5264a2b3f5fd8784bab30 \ - --hash=sha256:7d8283ac5d03e65f488530c43d6610134309085b71db4f675e9cf5dff96a8282 \ - --hash=sha256:7ecc33b107ae88405aebdb8d82c13d6944be2331ebb04399134c03171509371a \ - --hash=sha256:9249fdefeb021e00b46025e77feed89cd91ffe9b3a49415239103fc1d5d9c29a \ - --hash=sha256:9399eaa5d1931a0ead49dce3ffacbea63f3177978588b956036bfe53cdf6af75 \ - --hash=sha256:94c7bd9880fa33fcf7f6d7f4cc032e2371adee3c5dba2922b918987141d1bf07 \ - --hash=sha256:9571de0c53db5cbc265945e08f093f093af2c5a11e14772c72d8e37fceeedd08 \ - --hash=sha256:9721cd112b5e4687cb4ade12a7b8af8b048d4991227ae8066d9c4b3a6642a582 \ - --hash=sha256:9ab282d67ef3097105552bf151438b551cc4bedb3f24d80fada830f2e132aeb9 \ - --hash=sha256:9d9707e5aacf63fb919f6237d6490c4e0244c7f8d3dc2a0f84d7dec5db7cb54c \ - --hash=sha256:a70f776bda2e5072a086c02792c7863ba5833d565189e09fabbd04c8b4c3abba \ - --hash=sha256:a89cf3cd8bf33a37600431b7024a7ccf499db25f9f0b332947fbc79043aad879 \ - --hash=sha256:a8c91b6f4bf23f274af9002b128d133b735141e867109487d17e344d38b87d94 \ - --hash=sha256:ad24ec130855d4430a682c7a60ca0bc158f8253ec81feed4073801f6b6cb681b \ - --hash=sha256:ae7f4725c344bf437e9b881019c558416fe84ad9c6b67426416c131ad577df67 \ - --hash=sha256:b748797131ac7b29826d1524db1cc366d2722ab7afacc2ce1287cdafccddbf1f \ - --hash=sha256:bdf04c6af3852161be9613e458a1fb67327910391de8ffedb8332e60800147a2 \ - --hash=sha256:bf5737dbcfe0fa0ac8fa599eceafae86b376492c8f1e4b84e3adf765f03fb564 \ - --hash=sha256:c4e7bb7eba0e1963f8b768f9c458ecb193e5bf6977090182e2b4f4408f35ac76 \ - --hash=sha256:d524a8c15cfc863705991d70bbec998456a42c405c291d0f84a74ad7f35c5109 \ - --hash=sha256:d53039d39de65360e924b511c7ca1a67b0975c34c015dd468fca492b11caa8f7 \ - --hash=sha256:d6f84a7a175c75beecde53a624881ff618e9433045a69fcfb5e154b73cdaa377 \ - --hash=sha256:e0147d41e9fb5cd174207c4a2895c5e24813204499fd0839951d4c8784a23bf5 \ - --hash=sha256:e3673053b036fd161ae7a5a33358ccae6793ee89fd499000204676baafd7b3aa \ - --hash=sha256:e54578fa8838ddc722539a752adfce9372474114f8c127bb316db5392d942f8b \ - --hash=sha256:eb0142f6f10f57598655340a3b2c70ed4646cbe674191da195eb0985a9813b83 \ - --hash=sha256:efeddf950fb15a832376c0c01d8d7713479fbeceaed1eaecb2665aa62c305aec \ - --hash=sha256:f26629ac531d712f93192c233a74888bc8b8212558bd7d04c349125f10199fcf \ - --hash=sha256:f2e385a7679b9088d7bc43a64811a7713cc7c33d032d020f757c54e7d41931ae \ - --hash=sha256:f3554eaadffe416c6f543af442066afa6549edbc34fe6a7719818c3e72ebfe95 \ - --hash=sha256:f4511560d75b15ecb367eef561554959b9d49b6ec3b8d5634212f9fed74a6df1 \ - --hash=sha256:f504117a39cb98abba4153bf0b46b4954cc5d62f6351a14660201500ba31fe7f \ - --hash=sha256:fb87decf38cc82bcdea1d7511e73629e651bdec3a43ab40985167ab8449b769c - # via python-lsp-jsonrpc -urllib3==1.26.16 \ - --hash=sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f \ - --hash=sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14 - # via - # requests - # semgrep -wcmatch==8.4.1 \ - --hash=sha256:3476cd107aba7b25ba1d59406938a47dc7eec6cfd0ad09ff77193f21a964dee7 \ - --hash=sha256:b1f042a899ea4c458b7321da1b5e3331e3e0ec781583434de1301946ceadb943 - # via semgrep -wheel==0.40.0 \ - --hash=sha256:cd1196f3faee2b31968d626e1731c94f99cbdb67cf5a46e4f5656cbee7738873 \ - --hash=sha256:d236b20e7cb522daf2390fa84c55eea81c5c30190f90f29ae2ca1ad8355bf247 - # via pip-tools - -# The following packages are considered to be unsafe in a requirements file: -pip==23.1.2 \ - --hash=sha256:0e7c86f486935893c708287b30bd050a36ac827ec7fe5e43fe7cb198dd835fba \ - --hash=sha256:3ef6ac33239e4027d9a5598a381b9d30880a1477e50039db2eac6e8a8f6d1b18 - # via pip-tools -setuptools==68.0.0 \ - --hash=sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f \ - --hash=sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235 - # via pip-tools diff --git a/requirements/requirements.in b/requirements/requirements.in deleted file mode 100644 index e69de29bb..000000000 diff --git a/requirements/requirements.txt b/requirements/requirements.txt deleted file mode 100644 index b7f4b2d8b..000000000 --- a/requirements/requirements.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -# This file is autogenerated by pip-compile with python 3.9 -# To update, run: -# -# pip-compile --generate-hashes --output-file=requirements/requirements.txt requirements/requirements.in -# diff --git a/scripts/codename b/scripts/codename deleted file mode 100755 index 261793bc5..000000000 --- a/scripts/codename +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -# Returns the Debian version's codename (e.g. "bullseye") in a way that should -# work across both released versions and unreleased ones. -# See https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=1008735 - -source /etc/os-release - -if [[ "$VERSION_CODENAME" != "" ]]; then - echo $VERSION_CODENAME -else - # PRETTY_NAME="Debian GNU/Linux bookworm/sid" - # Use awk to split on spaces and / - echo $PRETTY_NAME | awk '{split($0, a, "[ /]"); print a[4]}' -fi - From 9973f9e43b1b58dd4910f8b956639b0c6837e6b9 Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Mon, 11 Dec 2023 16:38:56 -0500 Subject: [PATCH 346/352] Move client files into client/ folder --- {.circleci => client/.circleci}/config.yml | 0 .coveragerc => client/.coveragerc | 0 .../.git-blame-ignore-revs | 0 {.githooks => client/.githooks}/pre-commit | 0 {.github => client/.github}/CODEOWNERS | 0 .../.github}/ISSUE_TEMPLATE/bug_report.md | 0 .../.github}/ISSUE_TEMPLATE/feature_request.md | 0 .../.github}/ISSUE_TEMPLATE/release.md | 0 .../.github}/pull_request_template.md | 0 .gitignore => client/.gitignore | 0 {.semgrep => client/.semgrep}/custom-rules.yml | 0 CONTRIBUTING.md => client/CONTRIBUTING.md | 0 Doxyfile => client/Doxyfile | 0 LICENSE => client/LICENSE | 0 MANIFEST.in => client/MANIFEST.in | 0 Makefile => client/Makefile | 0 README.md => client/README.md | 0 SECURITY.md => client/SECURITY.md | 0 alembic.ini => client/alembic.ini | 0 {alembic => client/alembic}/README | 0 {alembic => client/alembic}/env.py | 0 {alembic => client/alembic}/script.py.mako | 0 ...7c04463_draftreply_add_column_for_sending_pid.py | 0 .../alembic}/versions/d7c8af95bc8e_initial.py | 0 babel.cfg => client/babel.cfg | 0 .../build-requirements.txt | 0 changelog.md => client/changelog.md | 0 create_dev_data.py => client/create_dev_data.py | 0 {files => client/files}/alembic.ini | 0 .../files}/press.freedom.SecureDropClient.desktop | 0 {files => client/files}/sd-app-qubes-gpg-domain.sh | 0 {files => client/files}/securedrop-client | 0 {files => client/files}/securedrop-client.desktop | 0 {files => client/files}/usr.bin.securedrop-client | 0 poetry.lock => client/poetry.lock | 0 project.json => client/project.json | 0 pyproject.toml => client/pyproject.toml | 0 pytest.ini => client/pytest.ini | 0 run.sh => client/run.sh | 0 .../scripts}/setup-tmp-directories.sh | 0 {scripts => client/scripts}/verify-mo.py | 0 .../securedrop_client}/__init__.py | 0 .../securedrop_client}/__main__.py | 0 .../securedrop_client}/api_jobs/__init__.py | 0 .../securedrop_client}/api_jobs/base.py | 0 .../securedrop_client}/api_jobs/downloads.py | 0 .../securedrop_client}/api_jobs/seen.py | 0 .../securedrop_client}/api_jobs/sources.py | 0 .../securedrop_client}/api_jobs/sync.py | 0 .../securedrop_client}/api_jobs/updatestar.py | 0 .../securedrop_client}/api_jobs/uploads.py | 0 .../securedrop_client}/app.py | 0 .../securedrop_client}/config.py | 0 .../securedrop_client}/conversation/__init__.py | 0 .../conversation/transcript/__init__.py | 0 .../conversation/transcript/items/__init__.py | 0 .../conversation/transcript/items/factory.py | 0 .../conversation/transcript/items/file.py | 0 .../conversation/transcript/items/item.py | 0 .../conversation/transcript/items/message.py | 0 .../transcript/templates/transcript.txt.jinja | 0 .../conversation/transcript/transcript.py | 0 .../securedrop_client}/crypto.py | 0 .../securedrop_client}/database.py | 0 .../securedrop_client}/db.py | 0 .../securedrop_client}/export.py | 0 .../securedrop_client}/gui/__init__.py | 0 .../securedrop_client}/gui/actions.py | 0 .../securedrop_client}/gui/auth/__init__.py | 0 .../securedrop_client}/gui/auth/dialog.css | 0 .../securedrop_client}/gui/auth/dialog.py | 0 .../securedrop_client}/gui/auth/sign_in/__init__.py | 0 .../securedrop_client}/gui/auth/sign_in/button.css | 0 .../securedrop_client}/gui/auth/sign_in/button.py | 0 .../gui/auth/sign_in/error_bar.css | 0 .../gui/auth/sign_in/error_bar.py | 0 .../gui/auth/use_offline/__init__.py | 0 .../gui/auth/use_offline/button.py | 0 .../securedrop_client}/gui/base/__init__.py | 0 .../securedrop_client}/gui/base/buttons.py | 0 .../securedrop_client}/gui/base/checkbox.css | 0 .../securedrop_client}/gui/base/checkbox.py | 0 .../securedrop_client}/gui/base/dialog_button.css | 0 .../securedrop_client}/gui/base/dialog_message.css | 0 .../securedrop_client}/gui/base/dialogs.css | 0 .../securedrop_client}/gui/base/dialogs.py | 0 .../securedrop_client}/gui/base/inputs.py | 0 .../securedrop_client}/gui/base/misc.py | 0 .../securedrop_client}/gui/conversation/__init__.py | 0 .../gui/conversation/delete/__init__.py | 0 .../gui/conversation/delete/dialog.py | 0 .../gui/conversation/export/__init__.py | 0 .../gui/conversation/export/device.py | 0 .../gui/conversation/export/dialog.css | 0 .../gui/conversation/export/dialog.py | 0 .../gui/conversation/export/file_dialog.py | 0 .../gui/conversation/export/print_dialog.py | 0 .../conversation/export/print_transcript_dialog.py | 0 .../gui/conversation/export/transcript_dialog.py | 0 .../securedrop_client}/gui/datetime_helpers.py | 0 .../securedrop_client}/gui/main.py | 0 .../securedrop_client}/gui/source/__init__.py | 0 .../gui/source/delete/__init__.py | 0 .../securedrop_client}/gui/source/delete/dialog.py | 0 .../securedrop_client}/gui/widgets.py | 0 .../locale/ca/LC_MESSAGES/messages.mo | Bin .../locale/ca/LC_MESSAGES/messages.po | 0 .../locale/de/LC_MESSAGES/messages.mo | Bin .../locale/de/LC_MESSAGES/messages.po | 0 .../locale/es/LC_MESSAGES/messages.mo | Bin .../locale/es/LC_MESSAGES/messages.po | 0 .../locale/hr/LC_MESSAGES/messages.mo | Bin .../locale/hr/LC_MESSAGES/messages.po | 0 .../locale/is/LC_MESSAGES/messages.mo | Bin .../locale/is/LC_MESSAGES/messages.po | 0 .../locale/it/LC_MESSAGES/messages.mo | Bin .../locale/it/LC_MESSAGES/messages.po | 0 .../securedrop_client}/locale/messages.pot | 0 .../locale/pt_BR/LC_MESSAGES/messages.mo | Bin .../locale/pt_BR/LC_MESSAGES/messages.po | 0 .../locale/pt_PT/LC_MESSAGES/messages.mo | Bin .../locale/pt_PT/LC_MESSAGES/messages.po | 0 .../locale/ru/LC_MESSAGES/messages.mo | Bin .../locale/ru/LC_MESSAGES/messages.po | 0 .../locale/sk/LC_MESSAGES/messages.mo | Bin .../locale/sk/LC_MESSAGES/messages.po | 0 .../locale/sv/LC_MESSAGES/messages.mo | Bin .../locale/sv/LC_MESSAGES/messages.po | 0 .../locale/tr/LC_MESSAGES/messages.mo | Bin .../locale/tr/LC_MESSAGES/messages.po | 0 .../locale/zh_Hans/LC_MESSAGES/messages.mo | Bin .../locale/zh_Hans/LC_MESSAGES/messages.po | 0 .../securedrop_client}/logic.py | 0 .../securedrop_client}/queue.py | 0 .../securedrop_client}/resources/__init__.py | 0 .../securedrop_client}/resources/css/button.css | 0 .../resources/css/checker_tooltip.css | 0 .../resources/css/file_download_button.css | 0 .../securedrop_client}/resources/css/sdclient.css | 0 .../resources/css/sender_icon.css | 0 .../resources/css/source_menu.css | 0 .../resources/css/source_name.css | 0 .../resources/css/source_preview.css | 0 .../resources/css/source_timestamp.css | 0 .../resources/css/speech_bubble_message.css | 0 .../resources/css/speech_bubble_status_bar.css | 0 .../resources/fonts/Montserrat/Montserrat-Black.ttf | Bin .../fonts/Montserrat/Montserrat-BlackItalic.ttf | Bin .../resources/fonts/Montserrat/Montserrat-Bold.ttf | Bin .../fonts/Montserrat/Montserrat-BoldItalic.ttf | Bin .../fonts/Montserrat/Montserrat-ExtraBold.ttf | Bin .../fonts/Montserrat/Montserrat-ExtraBoldItalic.ttf | Bin .../fonts/Montserrat/Montserrat-ExtraLight.ttf | Bin .../Montserrat/Montserrat-ExtraLightItalic.ttf | Bin .../resources/fonts/Montserrat/Montserrat-Light.ttf | Bin .../fonts/Montserrat/Montserrat-LightItalic.ttf | Bin .../fonts/Montserrat/Montserrat-Medium.ttf | Bin .../fonts/Montserrat/Montserrat-MediumItalic.ttf | Bin .../fonts/Montserrat/Montserrat-Regular.ttf | Bin .../fonts/Montserrat/Montserrat-RegularItalic.ttf | Bin .../fonts/Montserrat/Montserrat-SemiBold.ttf | Bin .../fonts/Montserrat/Montserrat-SemiBoldItalic.ttf | Bin .../resources/fonts/Montserrat/Montserrat-Thin.ttf | Bin .../fonts/Montserrat/Montserrat-ThinItalic.ttf | Bin .../resources/fonts/Montserrat/OFL.txt | 0 .../resources/fonts/Source_Sans_Pro/OFL.txt | 0 .../fonts/Source_Sans_Pro/SourceSansPro-Black.ttf | Bin .../Source_Sans_Pro/SourceSansPro-BlackItalic.ttf | Bin .../fonts/Source_Sans_Pro/SourceSansPro-Bold.ttf | Bin .../Source_Sans_Pro/SourceSansPro-BoldItalic.ttf | Bin .../Source_Sans_Pro/SourceSansPro-ExtraLight.ttf | Bin .../SourceSansPro-ExtraLightItalic.ttf | Bin .../fonts/Source_Sans_Pro/SourceSansPro-Light.ttf | Bin .../Source_Sans_Pro/SourceSansPro-LightItalic.ttf | Bin .../fonts/Source_Sans_Pro/SourceSansPro-Regular.ttf | Bin .../Source_Sans_Pro/SourceSansPro-RegularItalic.ttf | Bin .../Source_Sans_Pro/SourceSansPro-SemiBold.ttf | Bin .../SourceSansPro-SemiBoldItalic.ttf | Bin .../resources/images/activestate-wide.gif | Bin .../securedrop_client}/resources/images/blank.svg | 0 .../resources/images/checkmark.svg | 0 .../resources/images/checkmark_hover.svg | 0 .../securedrop_client}/resources/images/cross.svg | 0 .../securedrop_client}/resources/images/delete.png | Bin .../resources/images/delete_close.svg | 0 .../resources/images/deleted-user.svg | 0 .../resources/images/download_active.svg | 0 .../resources/images/download_file.gif | Bin .../resources/images/download_file.svg | 0 .../resources/images/download_file_hover.svg | 0 .../resources/images/dropdown_arrow.svg | 0 .../resources/images/ellipsis.svg | 0 .../resources/images/error_icon.svg | 0 .../resources/images/error_icon_white.svg | 0 .../resources/images/eye_hidden.svg | 0 .../resources/images/eye_visible.svg | 0 .../resources/images/header_animation.gif | Bin .../resources/images/header_logo.png | Bin .../securedrop_client}/resources/images/hexes.svg | 0 .../securedrop_client}/resources/images/icon.png | Bin .../resources/images/left_pane.svg | 0 .../resources/images/left_pane_offline.svg | 0 .../resources/images/loading-bar.gif | Bin .../resources/images/loading-cubes.gif | Bin .../resources/images/login_bg.svg | 0 .../securedrop_client}/resources/images/logo.png | Bin .../resources/images/paperclip-disabled.svg | 0 .../resources/images/paperclip.svg | 0 .../securedrop_client}/resources/images/printer.svg | 0 .../securedrop_client}/resources/images/refresh.svg | 0 .../resources/images/refresh_active.svg | 0 .../resources/images/refresh_offline.svg | 0 .../resources/images/savetodisk.svg | 0 .../resources/images/send-disabled.svg | 0 .../securedrop_client}/resources/images/send.svg | 0 .../resources/images/star_hover.svg | 0 .../resources/images/star_off.svg | 0 .../securedrop_client}/resources/images/star_on.svg | 0 .../securedrop_client}/resources/images/sync.gif | Bin .../securedrop_client}/resources/images/sync.svg | 0 .../resources/images/sync_active.gif | Bin .../resources/images/sync_disabled.gif | Bin .../resources/images/tear-big.svg | 0 .../resources/images/tear-left.svg | 0 .../resources/images/tear-right.svg | 0 .../securedrop_client}/resources/images/trash.png | Bin .../securedrop_client}/state/__init__.py | 0 .../securedrop_client}/state/domain.py | 0 .../securedrop_client}/state/state.py | 0 .../securedrop_client}/storage.py | 0 .../securedrop_client}/sync.py | 0 .../securedrop_client}/utils.py | 0 setup.cfg => client/setup.cfg | 0 setup.py => client/setup.py | 0 test-functional.sh => client/test-functional.sh | 0 {tests => client/tests}/__init__.py | 0 {tests => client/tests}/api_jobs/__init__.py | 0 {tests => client/tests}/api_jobs/test_base.py | 0 {tests => client/tests}/api_jobs/test_downloads.py | 0 {tests => client/tests}/api_jobs/test_seen.py | 0 {tests => client/tests}/api_jobs/test_sources.py | 0 {tests => client/tests}/api_jobs/test_sync.py | 0 {tests => client/tests}/api_jobs/test_updatestar.py | 0 {tests => client/tests}/api_jobs/test_uploads.py | 0 {tests => client/tests}/conftest.py | 0 {tests => client/tests}/factory.py | 0 {tests => client/tests}/files/securedrop.gpg.asc | 0 .../tests}/files/securedrop.gpg.pub.asc | 0 {tests => client/tests}/files/test-doc.gz.gpg | 0 {tests => client/tests}/files/test-key.gpg.asc | 0 {tests => client/tests}/files/test-key.gpg.pub.asc | 0 {tests => client/tests}/functional/__init__.py | 0 .../functional/cassettes/test_delete_source.yaml | 0 .../functional/cassettes/test_download_file.yaml | 0 .../cassettes/test_export_file_dialog.yaml | 0 .../cassettes/test_login_as_journalist.yaml | 0 .../cassettes/test_login_from_offline.yaml | 0 .../cassettes/test_logout_as_journalist.yaml | 0 .../test_offline_delete_source_attempt.yaml | 0 .../cassettes/test_offline_read_conversation.yaml | 0 .../test_offline_send_reply_to_source.yaml | 0 .../cassettes/test_offline_star_source.yaml | 0 .../cassettes/test_receive_message_from_source.yaml | 0 .../functional/cassettes/test_seen_and_unseen.yaml | 0 .../cassettes/test_send_reply_to_source.yaml | 0 .../functional/cassettes/test_star_source.yaml | 0 .../test_unseen_source_becomes_seen_on_click.yaml | 0 .../functional/cassettes/test_user_icon_click.yaml | 0 .../tests}/functional/test_delete_source.py | 0 .../tests}/functional/test_download_file.py | 0 .../tests}/functional/test_export_file_dialog.py | 0 {tests => client/tests}/functional/test_login.py | 0 {tests => client/tests}/functional/test_logout.py | 0 .../tests}/functional/test_offline_delete_source.py | 0 .../tests}/functional/test_offline_login.py | 0 .../functional/test_offline_read_conversation.py | 0 .../tests}/functional/test_offline_send_reply.py | 0 .../tests}/functional/test_offline_star_source.py | 0 .../tests}/functional/test_receive_message.py | 0 {tests => client/tests}/functional/test_seen.py | 0 .../tests}/functional/test_send_reply.py | 0 .../tests}/functional/test_star_source.py | 0 .../tests}/functional/test_user_profile_menu.py | 0 {tests => client/tests}/gui/__init__.py | 0 .../tests}/gui/auth/sign_in/test_error_bar.py | 0 {tests => client/tests}/gui/auth/test_dialog.py | 0 {tests => client/tests}/gui/base/test_dialogs.py | 0 {tests => client/tests}/gui/base/test_inputs.py | 0 {tests => client/tests}/gui/base/test_misc.py | 0 {tests => client/tests}/gui/base/test_sdcheckbox.py | 0 .../tests}/gui/conversation/delete/__init__.py | 0 .../tests}/gui/conversation/delete/test_dialog.py | 0 .../tests}/gui/conversation/export/__init__.py | 0 .../tests}/gui/conversation/export/test_device.py | 0 .../tests}/gui/conversation/export/test_dialog.py | 0 .../gui/conversation/export/test_file_dialog.py | 0 .../gui/conversation/export/test_print_dialog.py | 0 .../export/test_print_transcript_dialog.py | 0 .../conversation/export/test_transcript_dialog.py | 0 {tests => client/tests}/gui/source/__init__.py | 0 .../tests}/gui/source/delete/__init__.py | 0 .../tests}/gui/source/delete/test_dialog.py | 0 {tests => client/tests}/gui/test_actions.py | 0 .../tests}/gui/test_datetime_helpers.py | 0 {tests => client/tests}/gui/test_main.py | 0 {tests => client/tests}/gui/test_widgets.py | 0 {tests => client/tests}/helper.py | 0 {tests => client/tests}/integration/conftest.py | 0 .../tests}/integration/test_placeholder.py | 0 .../integration/test_styles_file_download_button.py | 0 .../integration/test_styles_modal_dialog_button.py | 0 .../test_styles_modal_dialog_error_details.py | 0 .../integration/test_styles_reply_status_bar.py | 0 .../tests}/integration/test_styles_sdclient.py | 0 .../test_styles_speech_bubble_message.py | 0 .../test_styles_speech_bubble_status_bar.py | 0 {tests => client/tests}/migrations/__init__.py | 0 .../tests}/migrations/test_d7c8af95bc8e.py | 0 {tests => client/tests}/state/__init__.py | 0 {tests => client/tests}/state/test_domain.py | 0 {tests => client/tests}/state/test_state.py | 0 {tests => client/tests}/test_alembic.py | 0 {tests => client/tests}/test_app.py | 0 {tests => client/tests}/test_config.py | 0 {tests => client/tests}/test_conversation.py | 0 {tests => client/tests}/test_crypto.py | 0 {tests => client/tests}/test_export.py | 0 {tests => client/tests}/test_logic.py | 0 {tests => client/tests}/test_models.py | 0 {tests => client/tests}/test_queue.py | 0 {tests => client/tests}/test_resources.py | 0 {tests => client/tests}/test_storage.py | 0 {tests => client/tests}/test_sync.py | 0 {tests => client/tests}/test_utils.py | 0 update_version.sh => client/update_version.sh | 0 335 files changed, 0 insertions(+), 0 deletions(-) rename {.circleci => client/.circleci}/config.yml (100%) rename .coveragerc => client/.coveragerc (100%) rename .git-blame-ignore-revs => client/.git-blame-ignore-revs (100%) rename {.githooks => client/.githooks}/pre-commit (100%) rename {.github => client/.github}/CODEOWNERS (100%) rename {.github => client/.github}/ISSUE_TEMPLATE/bug_report.md (100%) rename {.github => client/.github}/ISSUE_TEMPLATE/feature_request.md (100%) rename {.github => client/.github}/ISSUE_TEMPLATE/release.md (100%) rename {.github => client/.github}/pull_request_template.md (100%) rename .gitignore => client/.gitignore (100%) rename {.semgrep => client/.semgrep}/custom-rules.yml (100%) rename CONTRIBUTING.md => client/CONTRIBUTING.md (100%) rename Doxyfile => client/Doxyfile (100%) rename LICENSE => client/LICENSE (100%) rename MANIFEST.in => client/MANIFEST.in (100%) rename Makefile => client/Makefile (100%) rename README.md => client/README.md (100%) rename SECURITY.md => client/SECURITY.md (100%) rename alembic.ini => client/alembic.ini (100%) rename {alembic => client/alembic}/README (100%) rename {alembic => client/alembic}/env.py (100%) rename {alembic => client/alembic}/script.py.mako (100%) rename {alembic => client/alembic}/versions/414627c04463_draftreply_add_column_for_sending_pid.py (100%) rename {alembic => client/alembic}/versions/d7c8af95bc8e_initial.py (100%) rename babel.cfg => client/babel.cfg (100%) rename build-requirements.txt => client/build-requirements.txt (100%) rename changelog.md => client/changelog.md (100%) rename create_dev_data.py => client/create_dev_data.py (100%) rename {files => client/files}/alembic.ini (100%) rename {files => client/files}/press.freedom.SecureDropClient.desktop (100%) rename {files => client/files}/sd-app-qubes-gpg-domain.sh (100%) rename {files => client/files}/securedrop-client (100%) rename {files => client/files}/securedrop-client.desktop (100%) rename {files => client/files}/usr.bin.securedrop-client (100%) rename poetry.lock => client/poetry.lock (100%) rename project.json => client/project.json (100%) rename pyproject.toml => client/pyproject.toml (100%) rename pytest.ini => client/pytest.ini (100%) rename run.sh => client/run.sh (100%) rename {scripts => client/scripts}/setup-tmp-directories.sh (100%) rename {scripts => client/scripts}/verify-mo.py (100%) rename {securedrop_client => client/securedrop_client}/__init__.py (100%) rename {securedrop_client => client/securedrop_client}/__main__.py (100%) rename {securedrop_client => client/securedrop_client}/api_jobs/__init__.py (100%) rename {securedrop_client => client/securedrop_client}/api_jobs/base.py (100%) rename {securedrop_client => client/securedrop_client}/api_jobs/downloads.py (100%) rename {securedrop_client => client/securedrop_client}/api_jobs/seen.py (100%) rename {securedrop_client => client/securedrop_client}/api_jobs/sources.py (100%) rename {securedrop_client => client/securedrop_client}/api_jobs/sync.py (100%) rename {securedrop_client => client/securedrop_client}/api_jobs/updatestar.py (100%) rename {securedrop_client => client/securedrop_client}/api_jobs/uploads.py (100%) rename {securedrop_client => client/securedrop_client}/app.py (100%) rename {securedrop_client => client/securedrop_client}/config.py (100%) rename {securedrop_client => client/securedrop_client}/conversation/__init__.py (100%) rename {securedrop_client => client/securedrop_client}/conversation/transcript/__init__.py (100%) rename {securedrop_client => client/securedrop_client}/conversation/transcript/items/__init__.py (100%) rename {securedrop_client => client/securedrop_client}/conversation/transcript/items/factory.py (100%) rename {securedrop_client => client/securedrop_client}/conversation/transcript/items/file.py (100%) rename {securedrop_client => client/securedrop_client}/conversation/transcript/items/item.py (100%) rename {securedrop_client => client/securedrop_client}/conversation/transcript/items/message.py (100%) rename {securedrop_client => client/securedrop_client}/conversation/transcript/templates/transcript.txt.jinja (100%) rename {securedrop_client => client/securedrop_client}/conversation/transcript/transcript.py (100%) rename {securedrop_client => client/securedrop_client}/crypto.py (100%) rename {securedrop_client => client/securedrop_client}/database.py (100%) rename {securedrop_client => client/securedrop_client}/db.py (100%) rename {securedrop_client => client/securedrop_client}/export.py (100%) rename {securedrop_client => client/securedrop_client}/gui/__init__.py (100%) rename {securedrop_client => client/securedrop_client}/gui/actions.py (100%) rename {securedrop_client => client/securedrop_client}/gui/auth/__init__.py (100%) rename {securedrop_client => client/securedrop_client}/gui/auth/dialog.css (100%) rename {securedrop_client => client/securedrop_client}/gui/auth/dialog.py (100%) rename {securedrop_client => client/securedrop_client}/gui/auth/sign_in/__init__.py (100%) rename {securedrop_client => client/securedrop_client}/gui/auth/sign_in/button.css (100%) rename {securedrop_client => client/securedrop_client}/gui/auth/sign_in/button.py (100%) rename {securedrop_client => client/securedrop_client}/gui/auth/sign_in/error_bar.css (100%) rename {securedrop_client => client/securedrop_client}/gui/auth/sign_in/error_bar.py (100%) rename {securedrop_client => client/securedrop_client}/gui/auth/use_offline/__init__.py (100%) rename {securedrop_client => client/securedrop_client}/gui/auth/use_offline/button.py (100%) rename {securedrop_client => client/securedrop_client}/gui/base/__init__.py (100%) rename {securedrop_client => client/securedrop_client}/gui/base/buttons.py (100%) rename {securedrop_client => client/securedrop_client}/gui/base/checkbox.css (100%) rename {securedrop_client => client/securedrop_client}/gui/base/checkbox.py (100%) rename {securedrop_client => client/securedrop_client}/gui/base/dialog_button.css (100%) rename {securedrop_client => client/securedrop_client}/gui/base/dialog_message.css (100%) rename {securedrop_client => client/securedrop_client}/gui/base/dialogs.css (100%) rename {securedrop_client => client/securedrop_client}/gui/base/dialogs.py (100%) rename {securedrop_client => client/securedrop_client}/gui/base/inputs.py (100%) rename {securedrop_client => client/securedrop_client}/gui/base/misc.py (100%) rename {securedrop_client => client/securedrop_client}/gui/conversation/__init__.py (100%) rename {securedrop_client => client/securedrop_client}/gui/conversation/delete/__init__.py (100%) rename {securedrop_client => client/securedrop_client}/gui/conversation/delete/dialog.py (100%) rename {securedrop_client => client/securedrop_client}/gui/conversation/export/__init__.py (100%) rename {securedrop_client => client/securedrop_client}/gui/conversation/export/device.py (100%) rename {securedrop_client => client/securedrop_client}/gui/conversation/export/dialog.css (100%) rename {securedrop_client => client/securedrop_client}/gui/conversation/export/dialog.py (100%) rename {securedrop_client => client/securedrop_client}/gui/conversation/export/file_dialog.py (100%) rename {securedrop_client => client/securedrop_client}/gui/conversation/export/print_dialog.py (100%) rename {securedrop_client => client/securedrop_client}/gui/conversation/export/print_transcript_dialog.py (100%) rename {securedrop_client => client/securedrop_client}/gui/conversation/export/transcript_dialog.py (100%) rename {securedrop_client => client/securedrop_client}/gui/datetime_helpers.py (100%) rename {securedrop_client => client/securedrop_client}/gui/main.py (100%) rename {securedrop_client => client/securedrop_client}/gui/source/__init__.py (100%) rename {securedrop_client => client/securedrop_client}/gui/source/delete/__init__.py (100%) rename {securedrop_client => client/securedrop_client}/gui/source/delete/dialog.py (100%) rename {securedrop_client => client/securedrop_client}/gui/widgets.py (100%) rename {securedrop_client => client/securedrop_client}/locale/ca/LC_MESSAGES/messages.mo (100%) rename {securedrop_client => client/securedrop_client}/locale/ca/LC_MESSAGES/messages.po (100%) rename {securedrop_client => client/securedrop_client}/locale/de/LC_MESSAGES/messages.mo (100%) rename {securedrop_client => client/securedrop_client}/locale/de/LC_MESSAGES/messages.po (100%) rename {securedrop_client => client/securedrop_client}/locale/es/LC_MESSAGES/messages.mo (100%) rename {securedrop_client => client/securedrop_client}/locale/es/LC_MESSAGES/messages.po (100%) rename {securedrop_client => client/securedrop_client}/locale/hr/LC_MESSAGES/messages.mo (100%) rename {securedrop_client => client/securedrop_client}/locale/hr/LC_MESSAGES/messages.po (100%) rename {securedrop_client => client/securedrop_client}/locale/is/LC_MESSAGES/messages.mo (100%) rename {securedrop_client => client/securedrop_client}/locale/is/LC_MESSAGES/messages.po (100%) rename {securedrop_client => client/securedrop_client}/locale/it/LC_MESSAGES/messages.mo (100%) rename {securedrop_client => client/securedrop_client}/locale/it/LC_MESSAGES/messages.po (100%) rename {securedrop_client => client/securedrop_client}/locale/messages.pot (100%) rename {securedrop_client => client/securedrop_client}/locale/pt_BR/LC_MESSAGES/messages.mo (100%) rename {securedrop_client => client/securedrop_client}/locale/pt_BR/LC_MESSAGES/messages.po (100%) rename {securedrop_client => client/securedrop_client}/locale/pt_PT/LC_MESSAGES/messages.mo (100%) rename {securedrop_client => client/securedrop_client}/locale/pt_PT/LC_MESSAGES/messages.po (100%) rename {securedrop_client => client/securedrop_client}/locale/ru/LC_MESSAGES/messages.mo (100%) rename {securedrop_client => client/securedrop_client}/locale/ru/LC_MESSAGES/messages.po (100%) rename {securedrop_client => client/securedrop_client}/locale/sk/LC_MESSAGES/messages.mo (100%) rename {securedrop_client => client/securedrop_client}/locale/sk/LC_MESSAGES/messages.po (100%) rename {securedrop_client => client/securedrop_client}/locale/sv/LC_MESSAGES/messages.mo (100%) rename {securedrop_client => client/securedrop_client}/locale/sv/LC_MESSAGES/messages.po (100%) rename {securedrop_client => client/securedrop_client}/locale/tr/LC_MESSAGES/messages.mo (100%) rename {securedrop_client => client/securedrop_client}/locale/tr/LC_MESSAGES/messages.po (100%) rename {securedrop_client => client/securedrop_client}/locale/zh_Hans/LC_MESSAGES/messages.mo (100%) rename {securedrop_client => client/securedrop_client}/locale/zh_Hans/LC_MESSAGES/messages.po (100%) rename {securedrop_client => client/securedrop_client}/logic.py (100%) rename {securedrop_client => client/securedrop_client}/queue.py (100%) rename {securedrop_client => client/securedrop_client}/resources/__init__.py (100%) rename {securedrop_client => client/securedrop_client}/resources/css/button.css (100%) rename {securedrop_client => client/securedrop_client}/resources/css/checker_tooltip.css (100%) rename {securedrop_client => client/securedrop_client}/resources/css/file_download_button.css (100%) rename {securedrop_client => client/securedrop_client}/resources/css/sdclient.css (100%) rename {securedrop_client => client/securedrop_client}/resources/css/sender_icon.css (100%) rename {securedrop_client => client/securedrop_client}/resources/css/source_menu.css (100%) rename {securedrop_client => client/securedrop_client}/resources/css/source_name.css (100%) rename {securedrop_client => client/securedrop_client}/resources/css/source_preview.css (100%) rename {securedrop_client => client/securedrop_client}/resources/css/source_timestamp.css (100%) rename {securedrop_client => client/securedrop_client}/resources/css/speech_bubble_message.css (100%) rename {securedrop_client => client/securedrop_client}/resources/css/speech_bubble_status_bar.css (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Montserrat/Montserrat-Black.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Montserrat/Montserrat-BlackItalic.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Montserrat/Montserrat-Bold.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Montserrat/Montserrat-BoldItalic.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Montserrat/Montserrat-ExtraBold.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Montserrat/Montserrat-ExtraBoldItalic.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Montserrat/Montserrat-ExtraLight.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Montserrat/Montserrat-ExtraLightItalic.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Montserrat/Montserrat-Light.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Montserrat/Montserrat-LightItalic.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Montserrat/Montserrat-Medium.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Montserrat/Montserrat-MediumItalic.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Montserrat/Montserrat-Regular.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Montserrat/Montserrat-RegularItalic.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Montserrat/Montserrat-SemiBold.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Montserrat/Montserrat-SemiBoldItalic.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Montserrat/Montserrat-Thin.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Montserrat/Montserrat-ThinItalic.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Montserrat/OFL.txt (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Source_Sans_Pro/OFL.txt (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Source_Sans_Pro/SourceSansPro-Black.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Source_Sans_Pro/SourceSansPro-BlackItalic.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Source_Sans_Pro/SourceSansPro-Bold.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Source_Sans_Pro/SourceSansPro-BoldItalic.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Source_Sans_Pro/SourceSansPro-ExtraLight.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Source_Sans_Pro/SourceSansPro-ExtraLightItalic.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Source_Sans_Pro/SourceSansPro-Light.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Source_Sans_Pro/SourceSansPro-LightItalic.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Source_Sans_Pro/SourceSansPro-Regular.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Source_Sans_Pro/SourceSansPro-RegularItalic.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Source_Sans_Pro/SourceSansPro-SemiBold.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/fonts/Source_Sans_Pro/SourceSansPro-SemiBoldItalic.ttf (100%) rename {securedrop_client => client/securedrop_client}/resources/images/activestate-wide.gif (100%) rename {securedrop_client => client/securedrop_client}/resources/images/blank.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/checkmark.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/checkmark_hover.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/cross.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/delete.png (100%) rename {securedrop_client => client/securedrop_client}/resources/images/delete_close.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/deleted-user.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/download_active.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/download_file.gif (100%) rename {securedrop_client => client/securedrop_client}/resources/images/download_file.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/download_file_hover.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/dropdown_arrow.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/ellipsis.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/error_icon.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/error_icon_white.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/eye_hidden.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/eye_visible.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/header_animation.gif (100%) rename {securedrop_client => client/securedrop_client}/resources/images/header_logo.png (100%) rename {securedrop_client => client/securedrop_client}/resources/images/hexes.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/icon.png (100%) rename {securedrop_client => client/securedrop_client}/resources/images/left_pane.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/left_pane_offline.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/loading-bar.gif (100%) rename {securedrop_client => client/securedrop_client}/resources/images/loading-cubes.gif (100%) rename {securedrop_client => client/securedrop_client}/resources/images/login_bg.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/logo.png (100%) rename {securedrop_client => client/securedrop_client}/resources/images/paperclip-disabled.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/paperclip.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/printer.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/refresh.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/refresh_active.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/refresh_offline.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/savetodisk.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/send-disabled.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/send.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/star_hover.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/star_off.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/star_on.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/sync.gif (100%) rename {securedrop_client => client/securedrop_client}/resources/images/sync.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/sync_active.gif (100%) rename {securedrop_client => client/securedrop_client}/resources/images/sync_disabled.gif (100%) rename {securedrop_client => client/securedrop_client}/resources/images/tear-big.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/tear-left.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/tear-right.svg (100%) rename {securedrop_client => client/securedrop_client}/resources/images/trash.png (100%) rename {securedrop_client => client/securedrop_client}/state/__init__.py (100%) rename {securedrop_client => client/securedrop_client}/state/domain.py (100%) rename {securedrop_client => client/securedrop_client}/state/state.py (100%) rename {securedrop_client => client/securedrop_client}/storage.py (100%) rename {securedrop_client => client/securedrop_client}/sync.py (100%) rename {securedrop_client => client/securedrop_client}/utils.py (100%) rename setup.cfg => client/setup.cfg (100%) rename setup.py => client/setup.py (100%) rename test-functional.sh => client/test-functional.sh (100%) rename {tests => client/tests}/__init__.py (100%) rename {tests => client/tests}/api_jobs/__init__.py (100%) rename {tests => client/tests}/api_jobs/test_base.py (100%) rename {tests => client/tests}/api_jobs/test_downloads.py (100%) rename {tests => client/tests}/api_jobs/test_seen.py (100%) rename {tests => client/tests}/api_jobs/test_sources.py (100%) rename {tests => client/tests}/api_jobs/test_sync.py (100%) rename {tests => client/tests}/api_jobs/test_updatestar.py (100%) rename {tests => client/tests}/api_jobs/test_uploads.py (100%) rename {tests => client/tests}/conftest.py (100%) rename {tests => client/tests}/factory.py (100%) rename {tests => client/tests}/files/securedrop.gpg.asc (100%) rename {tests => client/tests}/files/securedrop.gpg.pub.asc (100%) rename {tests => client/tests}/files/test-doc.gz.gpg (100%) rename {tests => client/tests}/files/test-key.gpg.asc (100%) rename {tests => client/tests}/files/test-key.gpg.pub.asc (100%) rename {tests => client/tests}/functional/__init__.py (100%) rename {tests => client/tests}/functional/cassettes/test_delete_source.yaml (100%) rename {tests => client/tests}/functional/cassettes/test_download_file.yaml (100%) rename {tests => client/tests}/functional/cassettes/test_export_file_dialog.yaml (100%) rename {tests => client/tests}/functional/cassettes/test_login_as_journalist.yaml (100%) rename {tests => client/tests}/functional/cassettes/test_login_from_offline.yaml (100%) rename {tests => client/tests}/functional/cassettes/test_logout_as_journalist.yaml (100%) rename {tests => client/tests}/functional/cassettes/test_offline_delete_source_attempt.yaml (100%) rename {tests => client/tests}/functional/cassettes/test_offline_read_conversation.yaml (100%) rename {tests => client/tests}/functional/cassettes/test_offline_send_reply_to_source.yaml (100%) rename {tests => client/tests}/functional/cassettes/test_offline_star_source.yaml (100%) rename {tests => client/tests}/functional/cassettes/test_receive_message_from_source.yaml (100%) rename {tests => client/tests}/functional/cassettes/test_seen_and_unseen.yaml (100%) rename {tests => client/tests}/functional/cassettes/test_send_reply_to_source.yaml (100%) rename {tests => client/tests}/functional/cassettes/test_star_source.yaml (100%) rename {tests => client/tests}/functional/cassettes/test_unseen_source_becomes_seen_on_click.yaml (100%) rename {tests => client/tests}/functional/cassettes/test_user_icon_click.yaml (100%) rename {tests => client/tests}/functional/test_delete_source.py (100%) rename {tests => client/tests}/functional/test_download_file.py (100%) rename {tests => client/tests}/functional/test_export_file_dialog.py (100%) rename {tests => client/tests}/functional/test_login.py (100%) rename {tests => client/tests}/functional/test_logout.py (100%) rename {tests => client/tests}/functional/test_offline_delete_source.py (100%) rename {tests => client/tests}/functional/test_offline_login.py (100%) rename {tests => client/tests}/functional/test_offline_read_conversation.py (100%) rename {tests => client/tests}/functional/test_offline_send_reply.py (100%) rename {tests => client/tests}/functional/test_offline_star_source.py (100%) rename {tests => client/tests}/functional/test_receive_message.py (100%) rename {tests => client/tests}/functional/test_seen.py (100%) rename {tests => client/tests}/functional/test_send_reply.py (100%) rename {tests => client/tests}/functional/test_star_source.py (100%) rename {tests => client/tests}/functional/test_user_profile_menu.py (100%) rename {tests => client/tests}/gui/__init__.py (100%) rename {tests => client/tests}/gui/auth/sign_in/test_error_bar.py (100%) rename {tests => client/tests}/gui/auth/test_dialog.py (100%) rename {tests => client/tests}/gui/base/test_dialogs.py (100%) rename {tests => client/tests}/gui/base/test_inputs.py (100%) rename {tests => client/tests}/gui/base/test_misc.py (100%) rename {tests => client/tests}/gui/base/test_sdcheckbox.py (100%) rename {tests => client/tests}/gui/conversation/delete/__init__.py (100%) rename {tests => client/tests}/gui/conversation/delete/test_dialog.py (100%) rename {tests => client/tests}/gui/conversation/export/__init__.py (100%) rename {tests => client/tests}/gui/conversation/export/test_device.py (100%) rename {tests => client/tests}/gui/conversation/export/test_dialog.py (100%) rename {tests => client/tests}/gui/conversation/export/test_file_dialog.py (100%) rename {tests => client/tests}/gui/conversation/export/test_print_dialog.py (100%) rename {tests => client/tests}/gui/conversation/export/test_print_transcript_dialog.py (100%) rename {tests => client/tests}/gui/conversation/export/test_transcript_dialog.py (100%) rename {tests => client/tests}/gui/source/__init__.py (100%) rename {tests => client/tests}/gui/source/delete/__init__.py (100%) rename {tests => client/tests}/gui/source/delete/test_dialog.py (100%) rename {tests => client/tests}/gui/test_actions.py (100%) rename {tests => client/tests}/gui/test_datetime_helpers.py (100%) rename {tests => client/tests}/gui/test_main.py (100%) rename {tests => client/tests}/gui/test_widgets.py (100%) rename {tests => client/tests}/helper.py (100%) rename {tests => client/tests}/integration/conftest.py (100%) rename {tests => client/tests}/integration/test_placeholder.py (100%) rename {tests => client/tests}/integration/test_styles_file_download_button.py (100%) rename {tests => client/tests}/integration/test_styles_modal_dialog_button.py (100%) rename {tests => client/tests}/integration/test_styles_modal_dialog_error_details.py (100%) rename {tests => client/tests}/integration/test_styles_reply_status_bar.py (100%) rename {tests => client/tests}/integration/test_styles_sdclient.py (100%) rename {tests => client/tests}/integration/test_styles_speech_bubble_message.py (100%) rename {tests => client/tests}/integration/test_styles_speech_bubble_status_bar.py (100%) rename {tests => client/tests}/migrations/__init__.py (100%) rename {tests => client/tests}/migrations/test_d7c8af95bc8e.py (100%) rename {tests => client/tests}/state/__init__.py (100%) rename {tests => client/tests}/state/test_domain.py (100%) rename {tests => client/tests}/state/test_state.py (100%) rename {tests => client/tests}/test_alembic.py (100%) rename {tests => client/tests}/test_app.py (100%) rename {tests => client/tests}/test_config.py (100%) rename {tests => client/tests}/test_conversation.py (100%) rename {tests => client/tests}/test_crypto.py (100%) rename {tests => client/tests}/test_export.py (100%) rename {tests => client/tests}/test_logic.py (100%) rename {tests => client/tests}/test_models.py (100%) rename {tests => client/tests}/test_queue.py (100%) rename {tests => client/tests}/test_resources.py (100%) rename {tests => client/tests}/test_storage.py (100%) rename {tests => client/tests}/test_sync.py (100%) rename {tests => client/tests}/test_utils.py (100%) rename update_version.sh => client/update_version.sh (100%) diff --git a/.circleci/config.yml b/client/.circleci/config.yml similarity index 100% rename from .circleci/config.yml rename to client/.circleci/config.yml diff --git a/.coveragerc b/client/.coveragerc similarity index 100% rename from .coveragerc rename to client/.coveragerc diff --git a/.git-blame-ignore-revs b/client/.git-blame-ignore-revs similarity index 100% rename from .git-blame-ignore-revs rename to client/.git-blame-ignore-revs diff --git a/.githooks/pre-commit b/client/.githooks/pre-commit similarity index 100% rename from .githooks/pre-commit rename to client/.githooks/pre-commit diff --git a/.github/CODEOWNERS b/client/.github/CODEOWNERS similarity index 100% rename from .github/CODEOWNERS rename to client/.github/CODEOWNERS diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/client/.github/ISSUE_TEMPLATE/bug_report.md similarity index 100% rename from .github/ISSUE_TEMPLATE/bug_report.md rename to client/.github/ISSUE_TEMPLATE/bug_report.md diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/client/.github/ISSUE_TEMPLATE/feature_request.md similarity index 100% rename from .github/ISSUE_TEMPLATE/feature_request.md rename to client/.github/ISSUE_TEMPLATE/feature_request.md diff --git a/.github/ISSUE_TEMPLATE/release.md b/client/.github/ISSUE_TEMPLATE/release.md similarity index 100% rename from .github/ISSUE_TEMPLATE/release.md rename to client/.github/ISSUE_TEMPLATE/release.md diff --git a/.github/pull_request_template.md b/client/.github/pull_request_template.md similarity index 100% rename from .github/pull_request_template.md rename to client/.github/pull_request_template.md diff --git a/.gitignore b/client/.gitignore similarity index 100% rename from .gitignore rename to client/.gitignore diff --git a/.semgrep/custom-rules.yml b/client/.semgrep/custom-rules.yml similarity index 100% rename from .semgrep/custom-rules.yml rename to client/.semgrep/custom-rules.yml diff --git a/CONTRIBUTING.md b/client/CONTRIBUTING.md similarity index 100% rename from CONTRIBUTING.md rename to client/CONTRIBUTING.md diff --git a/Doxyfile b/client/Doxyfile similarity index 100% rename from Doxyfile rename to client/Doxyfile diff --git a/LICENSE b/client/LICENSE similarity index 100% rename from LICENSE rename to client/LICENSE diff --git a/MANIFEST.in b/client/MANIFEST.in similarity index 100% rename from MANIFEST.in rename to client/MANIFEST.in diff --git a/Makefile b/client/Makefile similarity index 100% rename from Makefile rename to client/Makefile diff --git a/README.md b/client/README.md similarity index 100% rename from README.md rename to client/README.md diff --git a/SECURITY.md b/client/SECURITY.md similarity index 100% rename from SECURITY.md rename to client/SECURITY.md diff --git a/alembic.ini b/client/alembic.ini similarity index 100% rename from alembic.ini rename to client/alembic.ini diff --git a/alembic/README b/client/alembic/README similarity index 100% rename from alembic/README rename to client/alembic/README diff --git a/alembic/env.py b/client/alembic/env.py similarity index 100% rename from alembic/env.py rename to client/alembic/env.py diff --git a/alembic/script.py.mako b/client/alembic/script.py.mako similarity index 100% rename from alembic/script.py.mako rename to client/alembic/script.py.mako diff --git a/alembic/versions/414627c04463_draftreply_add_column_for_sending_pid.py b/client/alembic/versions/414627c04463_draftreply_add_column_for_sending_pid.py similarity index 100% rename from alembic/versions/414627c04463_draftreply_add_column_for_sending_pid.py rename to client/alembic/versions/414627c04463_draftreply_add_column_for_sending_pid.py diff --git a/alembic/versions/d7c8af95bc8e_initial.py b/client/alembic/versions/d7c8af95bc8e_initial.py similarity index 100% rename from alembic/versions/d7c8af95bc8e_initial.py rename to client/alembic/versions/d7c8af95bc8e_initial.py diff --git a/babel.cfg b/client/babel.cfg similarity index 100% rename from babel.cfg rename to client/babel.cfg diff --git a/build-requirements.txt b/client/build-requirements.txt similarity index 100% rename from build-requirements.txt rename to client/build-requirements.txt diff --git a/changelog.md b/client/changelog.md similarity index 100% rename from changelog.md rename to client/changelog.md diff --git a/create_dev_data.py b/client/create_dev_data.py similarity index 100% rename from create_dev_data.py rename to client/create_dev_data.py diff --git a/files/alembic.ini b/client/files/alembic.ini similarity index 100% rename from files/alembic.ini rename to client/files/alembic.ini diff --git a/files/press.freedom.SecureDropClient.desktop b/client/files/press.freedom.SecureDropClient.desktop similarity index 100% rename from files/press.freedom.SecureDropClient.desktop rename to client/files/press.freedom.SecureDropClient.desktop diff --git a/files/sd-app-qubes-gpg-domain.sh b/client/files/sd-app-qubes-gpg-domain.sh similarity index 100% rename from files/sd-app-qubes-gpg-domain.sh rename to client/files/sd-app-qubes-gpg-domain.sh diff --git a/files/securedrop-client b/client/files/securedrop-client similarity index 100% rename from files/securedrop-client rename to client/files/securedrop-client diff --git a/files/securedrop-client.desktop b/client/files/securedrop-client.desktop similarity index 100% rename from files/securedrop-client.desktop rename to client/files/securedrop-client.desktop diff --git a/files/usr.bin.securedrop-client b/client/files/usr.bin.securedrop-client similarity index 100% rename from files/usr.bin.securedrop-client rename to client/files/usr.bin.securedrop-client diff --git a/poetry.lock b/client/poetry.lock similarity index 100% rename from poetry.lock rename to client/poetry.lock diff --git a/project.json b/client/project.json similarity index 100% rename from project.json rename to client/project.json diff --git a/pyproject.toml b/client/pyproject.toml similarity index 100% rename from pyproject.toml rename to client/pyproject.toml diff --git a/pytest.ini b/client/pytest.ini similarity index 100% rename from pytest.ini rename to client/pytest.ini diff --git a/run.sh b/client/run.sh similarity index 100% rename from run.sh rename to client/run.sh diff --git a/scripts/setup-tmp-directories.sh b/client/scripts/setup-tmp-directories.sh similarity index 100% rename from scripts/setup-tmp-directories.sh rename to client/scripts/setup-tmp-directories.sh diff --git a/scripts/verify-mo.py b/client/scripts/verify-mo.py similarity index 100% rename from scripts/verify-mo.py rename to client/scripts/verify-mo.py diff --git a/securedrop_client/__init__.py b/client/securedrop_client/__init__.py similarity index 100% rename from securedrop_client/__init__.py rename to client/securedrop_client/__init__.py diff --git a/securedrop_client/__main__.py b/client/securedrop_client/__main__.py similarity index 100% rename from securedrop_client/__main__.py rename to client/securedrop_client/__main__.py diff --git a/securedrop_client/api_jobs/__init__.py b/client/securedrop_client/api_jobs/__init__.py similarity index 100% rename from securedrop_client/api_jobs/__init__.py rename to client/securedrop_client/api_jobs/__init__.py diff --git a/securedrop_client/api_jobs/base.py b/client/securedrop_client/api_jobs/base.py similarity index 100% rename from securedrop_client/api_jobs/base.py rename to client/securedrop_client/api_jobs/base.py diff --git a/securedrop_client/api_jobs/downloads.py b/client/securedrop_client/api_jobs/downloads.py similarity index 100% rename from securedrop_client/api_jobs/downloads.py rename to client/securedrop_client/api_jobs/downloads.py diff --git a/securedrop_client/api_jobs/seen.py b/client/securedrop_client/api_jobs/seen.py similarity index 100% rename from securedrop_client/api_jobs/seen.py rename to client/securedrop_client/api_jobs/seen.py diff --git a/securedrop_client/api_jobs/sources.py b/client/securedrop_client/api_jobs/sources.py similarity index 100% rename from securedrop_client/api_jobs/sources.py rename to client/securedrop_client/api_jobs/sources.py diff --git a/securedrop_client/api_jobs/sync.py b/client/securedrop_client/api_jobs/sync.py similarity index 100% rename from securedrop_client/api_jobs/sync.py rename to client/securedrop_client/api_jobs/sync.py diff --git a/securedrop_client/api_jobs/updatestar.py b/client/securedrop_client/api_jobs/updatestar.py similarity index 100% rename from securedrop_client/api_jobs/updatestar.py rename to client/securedrop_client/api_jobs/updatestar.py diff --git a/securedrop_client/api_jobs/uploads.py b/client/securedrop_client/api_jobs/uploads.py similarity index 100% rename from securedrop_client/api_jobs/uploads.py rename to client/securedrop_client/api_jobs/uploads.py diff --git a/securedrop_client/app.py b/client/securedrop_client/app.py similarity index 100% rename from securedrop_client/app.py rename to client/securedrop_client/app.py diff --git a/securedrop_client/config.py b/client/securedrop_client/config.py similarity index 100% rename from securedrop_client/config.py rename to client/securedrop_client/config.py diff --git a/securedrop_client/conversation/__init__.py b/client/securedrop_client/conversation/__init__.py similarity index 100% rename from securedrop_client/conversation/__init__.py rename to client/securedrop_client/conversation/__init__.py diff --git a/securedrop_client/conversation/transcript/__init__.py b/client/securedrop_client/conversation/transcript/__init__.py similarity index 100% rename from securedrop_client/conversation/transcript/__init__.py rename to client/securedrop_client/conversation/transcript/__init__.py diff --git a/securedrop_client/conversation/transcript/items/__init__.py b/client/securedrop_client/conversation/transcript/items/__init__.py similarity index 100% rename from securedrop_client/conversation/transcript/items/__init__.py rename to client/securedrop_client/conversation/transcript/items/__init__.py diff --git a/securedrop_client/conversation/transcript/items/factory.py b/client/securedrop_client/conversation/transcript/items/factory.py similarity index 100% rename from securedrop_client/conversation/transcript/items/factory.py rename to client/securedrop_client/conversation/transcript/items/factory.py diff --git a/securedrop_client/conversation/transcript/items/file.py b/client/securedrop_client/conversation/transcript/items/file.py similarity index 100% rename from securedrop_client/conversation/transcript/items/file.py rename to client/securedrop_client/conversation/transcript/items/file.py diff --git a/securedrop_client/conversation/transcript/items/item.py b/client/securedrop_client/conversation/transcript/items/item.py similarity index 100% rename from securedrop_client/conversation/transcript/items/item.py rename to client/securedrop_client/conversation/transcript/items/item.py diff --git a/securedrop_client/conversation/transcript/items/message.py b/client/securedrop_client/conversation/transcript/items/message.py similarity index 100% rename from securedrop_client/conversation/transcript/items/message.py rename to client/securedrop_client/conversation/transcript/items/message.py diff --git a/securedrop_client/conversation/transcript/templates/transcript.txt.jinja b/client/securedrop_client/conversation/transcript/templates/transcript.txt.jinja similarity index 100% rename from securedrop_client/conversation/transcript/templates/transcript.txt.jinja rename to client/securedrop_client/conversation/transcript/templates/transcript.txt.jinja diff --git a/securedrop_client/conversation/transcript/transcript.py b/client/securedrop_client/conversation/transcript/transcript.py similarity index 100% rename from securedrop_client/conversation/transcript/transcript.py rename to client/securedrop_client/conversation/transcript/transcript.py diff --git a/securedrop_client/crypto.py b/client/securedrop_client/crypto.py similarity index 100% rename from securedrop_client/crypto.py rename to client/securedrop_client/crypto.py diff --git a/securedrop_client/database.py b/client/securedrop_client/database.py similarity index 100% rename from securedrop_client/database.py rename to client/securedrop_client/database.py diff --git a/securedrop_client/db.py b/client/securedrop_client/db.py similarity index 100% rename from securedrop_client/db.py rename to client/securedrop_client/db.py diff --git a/securedrop_client/export.py b/client/securedrop_client/export.py similarity index 100% rename from securedrop_client/export.py rename to client/securedrop_client/export.py diff --git a/securedrop_client/gui/__init__.py b/client/securedrop_client/gui/__init__.py similarity index 100% rename from securedrop_client/gui/__init__.py rename to client/securedrop_client/gui/__init__.py diff --git a/securedrop_client/gui/actions.py b/client/securedrop_client/gui/actions.py similarity index 100% rename from securedrop_client/gui/actions.py rename to client/securedrop_client/gui/actions.py diff --git a/securedrop_client/gui/auth/__init__.py b/client/securedrop_client/gui/auth/__init__.py similarity index 100% rename from securedrop_client/gui/auth/__init__.py rename to client/securedrop_client/gui/auth/__init__.py diff --git a/securedrop_client/gui/auth/dialog.css b/client/securedrop_client/gui/auth/dialog.css similarity index 100% rename from securedrop_client/gui/auth/dialog.css rename to client/securedrop_client/gui/auth/dialog.css diff --git a/securedrop_client/gui/auth/dialog.py b/client/securedrop_client/gui/auth/dialog.py similarity index 100% rename from securedrop_client/gui/auth/dialog.py rename to client/securedrop_client/gui/auth/dialog.py diff --git a/securedrop_client/gui/auth/sign_in/__init__.py b/client/securedrop_client/gui/auth/sign_in/__init__.py similarity index 100% rename from securedrop_client/gui/auth/sign_in/__init__.py rename to client/securedrop_client/gui/auth/sign_in/__init__.py diff --git a/securedrop_client/gui/auth/sign_in/button.css b/client/securedrop_client/gui/auth/sign_in/button.css similarity index 100% rename from securedrop_client/gui/auth/sign_in/button.css rename to client/securedrop_client/gui/auth/sign_in/button.css diff --git a/securedrop_client/gui/auth/sign_in/button.py b/client/securedrop_client/gui/auth/sign_in/button.py similarity index 100% rename from securedrop_client/gui/auth/sign_in/button.py rename to client/securedrop_client/gui/auth/sign_in/button.py diff --git a/securedrop_client/gui/auth/sign_in/error_bar.css b/client/securedrop_client/gui/auth/sign_in/error_bar.css similarity index 100% rename from securedrop_client/gui/auth/sign_in/error_bar.css rename to client/securedrop_client/gui/auth/sign_in/error_bar.css diff --git a/securedrop_client/gui/auth/sign_in/error_bar.py b/client/securedrop_client/gui/auth/sign_in/error_bar.py similarity index 100% rename from securedrop_client/gui/auth/sign_in/error_bar.py rename to client/securedrop_client/gui/auth/sign_in/error_bar.py diff --git a/securedrop_client/gui/auth/use_offline/__init__.py b/client/securedrop_client/gui/auth/use_offline/__init__.py similarity index 100% rename from securedrop_client/gui/auth/use_offline/__init__.py rename to client/securedrop_client/gui/auth/use_offline/__init__.py diff --git a/securedrop_client/gui/auth/use_offline/button.py b/client/securedrop_client/gui/auth/use_offline/button.py similarity index 100% rename from securedrop_client/gui/auth/use_offline/button.py rename to client/securedrop_client/gui/auth/use_offline/button.py diff --git a/securedrop_client/gui/base/__init__.py b/client/securedrop_client/gui/base/__init__.py similarity index 100% rename from securedrop_client/gui/base/__init__.py rename to client/securedrop_client/gui/base/__init__.py diff --git a/securedrop_client/gui/base/buttons.py b/client/securedrop_client/gui/base/buttons.py similarity index 100% rename from securedrop_client/gui/base/buttons.py rename to client/securedrop_client/gui/base/buttons.py diff --git a/securedrop_client/gui/base/checkbox.css b/client/securedrop_client/gui/base/checkbox.css similarity index 100% rename from securedrop_client/gui/base/checkbox.css rename to client/securedrop_client/gui/base/checkbox.css diff --git a/securedrop_client/gui/base/checkbox.py b/client/securedrop_client/gui/base/checkbox.py similarity index 100% rename from securedrop_client/gui/base/checkbox.py rename to client/securedrop_client/gui/base/checkbox.py diff --git a/securedrop_client/gui/base/dialog_button.css b/client/securedrop_client/gui/base/dialog_button.css similarity index 100% rename from securedrop_client/gui/base/dialog_button.css rename to client/securedrop_client/gui/base/dialog_button.css diff --git a/securedrop_client/gui/base/dialog_message.css b/client/securedrop_client/gui/base/dialog_message.css similarity index 100% rename from securedrop_client/gui/base/dialog_message.css rename to client/securedrop_client/gui/base/dialog_message.css diff --git a/securedrop_client/gui/base/dialogs.css b/client/securedrop_client/gui/base/dialogs.css similarity index 100% rename from securedrop_client/gui/base/dialogs.css rename to client/securedrop_client/gui/base/dialogs.css diff --git a/securedrop_client/gui/base/dialogs.py b/client/securedrop_client/gui/base/dialogs.py similarity index 100% rename from securedrop_client/gui/base/dialogs.py rename to client/securedrop_client/gui/base/dialogs.py diff --git a/securedrop_client/gui/base/inputs.py b/client/securedrop_client/gui/base/inputs.py similarity index 100% rename from securedrop_client/gui/base/inputs.py rename to client/securedrop_client/gui/base/inputs.py diff --git a/securedrop_client/gui/base/misc.py b/client/securedrop_client/gui/base/misc.py similarity index 100% rename from securedrop_client/gui/base/misc.py rename to client/securedrop_client/gui/base/misc.py diff --git a/securedrop_client/gui/conversation/__init__.py b/client/securedrop_client/gui/conversation/__init__.py similarity index 100% rename from securedrop_client/gui/conversation/__init__.py rename to client/securedrop_client/gui/conversation/__init__.py diff --git a/securedrop_client/gui/conversation/delete/__init__.py b/client/securedrop_client/gui/conversation/delete/__init__.py similarity index 100% rename from securedrop_client/gui/conversation/delete/__init__.py rename to client/securedrop_client/gui/conversation/delete/__init__.py diff --git a/securedrop_client/gui/conversation/delete/dialog.py b/client/securedrop_client/gui/conversation/delete/dialog.py similarity index 100% rename from securedrop_client/gui/conversation/delete/dialog.py rename to client/securedrop_client/gui/conversation/delete/dialog.py diff --git a/securedrop_client/gui/conversation/export/__init__.py b/client/securedrop_client/gui/conversation/export/__init__.py similarity index 100% rename from securedrop_client/gui/conversation/export/__init__.py rename to client/securedrop_client/gui/conversation/export/__init__.py diff --git a/securedrop_client/gui/conversation/export/device.py b/client/securedrop_client/gui/conversation/export/device.py similarity index 100% rename from securedrop_client/gui/conversation/export/device.py rename to client/securedrop_client/gui/conversation/export/device.py diff --git a/securedrop_client/gui/conversation/export/dialog.css b/client/securedrop_client/gui/conversation/export/dialog.css similarity index 100% rename from securedrop_client/gui/conversation/export/dialog.css rename to client/securedrop_client/gui/conversation/export/dialog.css diff --git a/securedrop_client/gui/conversation/export/dialog.py b/client/securedrop_client/gui/conversation/export/dialog.py similarity index 100% rename from securedrop_client/gui/conversation/export/dialog.py rename to client/securedrop_client/gui/conversation/export/dialog.py diff --git a/securedrop_client/gui/conversation/export/file_dialog.py b/client/securedrop_client/gui/conversation/export/file_dialog.py similarity index 100% rename from securedrop_client/gui/conversation/export/file_dialog.py rename to client/securedrop_client/gui/conversation/export/file_dialog.py diff --git a/securedrop_client/gui/conversation/export/print_dialog.py b/client/securedrop_client/gui/conversation/export/print_dialog.py similarity index 100% rename from securedrop_client/gui/conversation/export/print_dialog.py rename to client/securedrop_client/gui/conversation/export/print_dialog.py diff --git a/securedrop_client/gui/conversation/export/print_transcript_dialog.py b/client/securedrop_client/gui/conversation/export/print_transcript_dialog.py similarity index 100% rename from securedrop_client/gui/conversation/export/print_transcript_dialog.py rename to client/securedrop_client/gui/conversation/export/print_transcript_dialog.py diff --git a/securedrop_client/gui/conversation/export/transcript_dialog.py b/client/securedrop_client/gui/conversation/export/transcript_dialog.py similarity index 100% rename from securedrop_client/gui/conversation/export/transcript_dialog.py rename to client/securedrop_client/gui/conversation/export/transcript_dialog.py diff --git a/securedrop_client/gui/datetime_helpers.py b/client/securedrop_client/gui/datetime_helpers.py similarity index 100% rename from securedrop_client/gui/datetime_helpers.py rename to client/securedrop_client/gui/datetime_helpers.py diff --git a/securedrop_client/gui/main.py b/client/securedrop_client/gui/main.py similarity index 100% rename from securedrop_client/gui/main.py rename to client/securedrop_client/gui/main.py diff --git a/securedrop_client/gui/source/__init__.py b/client/securedrop_client/gui/source/__init__.py similarity index 100% rename from securedrop_client/gui/source/__init__.py rename to client/securedrop_client/gui/source/__init__.py diff --git a/securedrop_client/gui/source/delete/__init__.py b/client/securedrop_client/gui/source/delete/__init__.py similarity index 100% rename from securedrop_client/gui/source/delete/__init__.py rename to client/securedrop_client/gui/source/delete/__init__.py diff --git a/securedrop_client/gui/source/delete/dialog.py b/client/securedrop_client/gui/source/delete/dialog.py similarity index 100% rename from securedrop_client/gui/source/delete/dialog.py rename to client/securedrop_client/gui/source/delete/dialog.py diff --git a/securedrop_client/gui/widgets.py b/client/securedrop_client/gui/widgets.py similarity index 100% rename from securedrop_client/gui/widgets.py rename to client/securedrop_client/gui/widgets.py diff --git a/securedrop_client/locale/ca/LC_MESSAGES/messages.mo b/client/securedrop_client/locale/ca/LC_MESSAGES/messages.mo similarity index 100% rename from securedrop_client/locale/ca/LC_MESSAGES/messages.mo rename to client/securedrop_client/locale/ca/LC_MESSAGES/messages.mo diff --git a/securedrop_client/locale/ca/LC_MESSAGES/messages.po b/client/securedrop_client/locale/ca/LC_MESSAGES/messages.po similarity index 100% rename from securedrop_client/locale/ca/LC_MESSAGES/messages.po rename to client/securedrop_client/locale/ca/LC_MESSAGES/messages.po diff --git a/securedrop_client/locale/de/LC_MESSAGES/messages.mo b/client/securedrop_client/locale/de/LC_MESSAGES/messages.mo similarity index 100% rename from securedrop_client/locale/de/LC_MESSAGES/messages.mo rename to client/securedrop_client/locale/de/LC_MESSAGES/messages.mo diff --git a/securedrop_client/locale/de/LC_MESSAGES/messages.po b/client/securedrop_client/locale/de/LC_MESSAGES/messages.po similarity index 100% rename from securedrop_client/locale/de/LC_MESSAGES/messages.po rename to client/securedrop_client/locale/de/LC_MESSAGES/messages.po diff --git a/securedrop_client/locale/es/LC_MESSAGES/messages.mo b/client/securedrop_client/locale/es/LC_MESSAGES/messages.mo similarity index 100% rename from securedrop_client/locale/es/LC_MESSAGES/messages.mo rename to client/securedrop_client/locale/es/LC_MESSAGES/messages.mo diff --git a/securedrop_client/locale/es/LC_MESSAGES/messages.po b/client/securedrop_client/locale/es/LC_MESSAGES/messages.po similarity index 100% rename from securedrop_client/locale/es/LC_MESSAGES/messages.po rename to client/securedrop_client/locale/es/LC_MESSAGES/messages.po diff --git a/securedrop_client/locale/hr/LC_MESSAGES/messages.mo b/client/securedrop_client/locale/hr/LC_MESSAGES/messages.mo similarity index 100% rename from securedrop_client/locale/hr/LC_MESSAGES/messages.mo rename to client/securedrop_client/locale/hr/LC_MESSAGES/messages.mo diff --git a/securedrop_client/locale/hr/LC_MESSAGES/messages.po b/client/securedrop_client/locale/hr/LC_MESSAGES/messages.po similarity index 100% rename from securedrop_client/locale/hr/LC_MESSAGES/messages.po rename to client/securedrop_client/locale/hr/LC_MESSAGES/messages.po diff --git a/securedrop_client/locale/is/LC_MESSAGES/messages.mo b/client/securedrop_client/locale/is/LC_MESSAGES/messages.mo similarity index 100% rename from securedrop_client/locale/is/LC_MESSAGES/messages.mo rename to client/securedrop_client/locale/is/LC_MESSAGES/messages.mo diff --git a/securedrop_client/locale/is/LC_MESSAGES/messages.po b/client/securedrop_client/locale/is/LC_MESSAGES/messages.po similarity index 100% rename from securedrop_client/locale/is/LC_MESSAGES/messages.po rename to client/securedrop_client/locale/is/LC_MESSAGES/messages.po diff --git a/securedrop_client/locale/it/LC_MESSAGES/messages.mo b/client/securedrop_client/locale/it/LC_MESSAGES/messages.mo similarity index 100% rename from securedrop_client/locale/it/LC_MESSAGES/messages.mo rename to client/securedrop_client/locale/it/LC_MESSAGES/messages.mo diff --git a/securedrop_client/locale/it/LC_MESSAGES/messages.po b/client/securedrop_client/locale/it/LC_MESSAGES/messages.po similarity index 100% rename from securedrop_client/locale/it/LC_MESSAGES/messages.po rename to client/securedrop_client/locale/it/LC_MESSAGES/messages.po diff --git a/securedrop_client/locale/messages.pot b/client/securedrop_client/locale/messages.pot similarity index 100% rename from securedrop_client/locale/messages.pot rename to client/securedrop_client/locale/messages.pot diff --git a/securedrop_client/locale/pt_BR/LC_MESSAGES/messages.mo b/client/securedrop_client/locale/pt_BR/LC_MESSAGES/messages.mo similarity index 100% rename from securedrop_client/locale/pt_BR/LC_MESSAGES/messages.mo rename to client/securedrop_client/locale/pt_BR/LC_MESSAGES/messages.mo diff --git a/securedrop_client/locale/pt_BR/LC_MESSAGES/messages.po b/client/securedrop_client/locale/pt_BR/LC_MESSAGES/messages.po similarity index 100% rename from securedrop_client/locale/pt_BR/LC_MESSAGES/messages.po rename to client/securedrop_client/locale/pt_BR/LC_MESSAGES/messages.po diff --git a/securedrop_client/locale/pt_PT/LC_MESSAGES/messages.mo b/client/securedrop_client/locale/pt_PT/LC_MESSAGES/messages.mo similarity index 100% rename from securedrop_client/locale/pt_PT/LC_MESSAGES/messages.mo rename to client/securedrop_client/locale/pt_PT/LC_MESSAGES/messages.mo diff --git a/securedrop_client/locale/pt_PT/LC_MESSAGES/messages.po b/client/securedrop_client/locale/pt_PT/LC_MESSAGES/messages.po similarity index 100% rename from securedrop_client/locale/pt_PT/LC_MESSAGES/messages.po rename to client/securedrop_client/locale/pt_PT/LC_MESSAGES/messages.po diff --git a/securedrop_client/locale/ru/LC_MESSAGES/messages.mo b/client/securedrop_client/locale/ru/LC_MESSAGES/messages.mo similarity index 100% rename from securedrop_client/locale/ru/LC_MESSAGES/messages.mo rename to client/securedrop_client/locale/ru/LC_MESSAGES/messages.mo diff --git a/securedrop_client/locale/ru/LC_MESSAGES/messages.po b/client/securedrop_client/locale/ru/LC_MESSAGES/messages.po similarity index 100% rename from securedrop_client/locale/ru/LC_MESSAGES/messages.po rename to client/securedrop_client/locale/ru/LC_MESSAGES/messages.po diff --git a/securedrop_client/locale/sk/LC_MESSAGES/messages.mo b/client/securedrop_client/locale/sk/LC_MESSAGES/messages.mo similarity index 100% rename from securedrop_client/locale/sk/LC_MESSAGES/messages.mo rename to client/securedrop_client/locale/sk/LC_MESSAGES/messages.mo diff --git a/securedrop_client/locale/sk/LC_MESSAGES/messages.po b/client/securedrop_client/locale/sk/LC_MESSAGES/messages.po similarity index 100% rename from securedrop_client/locale/sk/LC_MESSAGES/messages.po rename to client/securedrop_client/locale/sk/LC_MESSAGES/messages.po diff --git a/securedrop_client/locale/sv/LC_MESSAGES/messages.mo b/client/securedrop_client/locale/sv/LC_MESSAGES/messages.mo similarity index 100% rename from securedrop_client/locale/sv/LC_MESSAGES/messages.mo rename to client/securedrop_client/locale/sv/LC_MESSAGES/messages.mo diff --git a/securedrop_client/locale/sv/LC_MESSAGES/messages.po b/client/securedrop_client/locale/sv/LC_MESSAGES/messages.po similarity index 100% rename from securedrop_client/locale/sv/LC_MESSAGES/messages.po rename to client/securedrop_client/locale/sv/LC_MESSAGES/messages.po diff --git a/securedrop_client/locale/tr/LC_MESSAGES/messages.mo b/client/securedrop_client/locale/tr/LC_MESSAGES/messages.mo similarity index 100% rename from securedrop_client/locale/tr/LC_MESSAGES/messages.mo rename to client/securedrop_client/locale/tr/LC_MESSAGES/messages.mo diff --git a/securedrop_client/locale/tr/LC_MESSAGES/messages.po b/client/securedrop_client/locale/tr/LC_MESSAGES/messages.po similarity index 100% rename from securedrop_client/locale/tr/LC_MESSAGES/messages.po rename to client/securedrop_client/locale/tr/LC_MESSAGES/messages.po diff --git a/securedrop_client/locale/zh_Hans/LC_MESSAGES/messages.mo b/client/securedrop_client/locale/zh_Hans/LC_MESSAGES/messages.mo similarity index 100% rename from securedrop_client/locale/zh_Hans/LC_MESSAGES/messages.mo rename to client/securedrop_client/locale/zh_Hans/LC_MESSAGES/messages.mo diff --git a/securedrop_client/locale/zh_Hans/LC_MESSAGES/messages.po b/client/securedrop_client/locale/zh_Hans/LC_MESSAGES/messages.po similarity index 100% rename from securedrop_client/locale/zh_Hans/LC_MESSAGES/messages.po rename to client/securedrop_client/locale/zh_Hans/LC_MESSAGES/messages.po diff --git a/securedrop_client/logic.py b/client/securedrop_client/logic.py similarity index 100% rename from securedrop_client/logic.py rename to client/securedrop_client/logic.py diff --git a/securedrop_client/queue.py b/client/securedrop_client/queue.py similarity index 100% rename from securedrop_client/queue.py rename to client/securedrop_client/queue.py diff --git a/securedrop_client/resources/__init__.py b/client/securedrop_client/resources/__init__.py similarity index 100% rename from securedrop_client/resources/__init__.py rename to client/securedrop_client/resources/__init__.py diff --git a/securedrop_client/resources/css/button.css b/client/securedrop_client/resources/css/button.css similarity index 100% rename from securedrop_client/resources/css/button.css rename to client/securedrop_client/resources/css/button.css diff --git a/securedrop_client/resources/css/checker_tooltip.css b/client/securedrop_client/resources/css/checker_tooltip.css similarity index 100% rename from securedrop_client/resources/css/checker_tooltip.css rename to client/securedrop_client/resources/css/checker_tooltip.css diff --git a/securedrop_client/resources/css/file_download_button.css b/client/securedrop_client/resources/css/file_download_button.css similarity index 100% rename from securedrop_client/resources/css/file_download_button.css rename to client/securedrop_client/resources/css/file_download_button.css diff --git a/securedrop_client/resources/css/sdclient.css b/client/securedrop_client/resources/css/sdclient.css similarity index 100% rename from securedrop_client/resources/css/sdclient.css rename to client/securedrop_client/resources/css/sdclient.css diff --git a/securedrop_client/resources/css/sender_icon.css b/client/securedrop_client/resources/css/sender_icon.css similarity index 100% rename from securedrop_client/resources/css/sender_icon.css rename to client/securedrop_client/resources/css/sender_icon.css diff --git a/securedrop_client/resources/css/source_menu.css b/client/securedrop_client/resources/css/source_menu.css similarity index 100% rename from securedrop_client/resources/css/source_menu.css rename to client/securedrop_client/resources/css/source_menu.css diff --git a/securedrop_client/resources/css/source_name.css b/client/securedrop_client/resources/css/source_name.css similarity index 100% rename from securedrop_client/resources/css/source_name.css rename to client/securedrop_client/resources/css/source_name.css diff --git a/securedrop_client/resources/css/source_preview.css b/client/securedrop_client/resources/css/source_preview.css similarity index 100% rename from securedrop_client/resources/css/source_preview.css rename to client/securedrop_client/resources/css/source_preview.css diff --git a/securedrop_client/resources/css/source_timestamp.css b/client/securedrop_client/resources/css/source_timestamp.css similarity index 100% rename from securedrop_client/resources/css/source_timestamp.css rename to client/securedrop_client/resources/css/source_timestamp.css diff --git a/securedrop_client/resources/css/speech_bubble_message.css b/client/securedrop_client/resources/css/speech_bubble_message.css similarity index 100% rename from securedrop_client/resources/css/speech_bubble_message.css rename to client/securedrop_client/resources/css/speech_bubble_message.css diff --git a/securedrop_client/resources/css/speech_bubble_status_bar.css b/client/securedrop_client/resources/css/speech_bubble_status_bar.css similarity index 100% rename from securedrop_client/resources/css/speech_bubble_status_bar.css rename to client/securedrop_client/resources/css/speech_bubble_status_bar.css diff --git a/securedrop_client/resources/fonts/Montserrat/Montserrat-Black.ttf b/client/securedrop_client/resources/fonts/Montserrat/Montserrat-Black.ttf similarity index 100% rename from securedrop_client/resources/fonts/Montserrat/Montserrat-Black.ttf rename to client/securedrop_client/resources/fonts/Montserrat/Montserrat-Black.ttf diff --git a/securedrop_client/resources/fonts/Montserrat/Montserrat-BlackItalic.ttf b/client/securedrop_client/resources/fonts/Montserrat/Montserrat-BlackItalic.ttf similarity index 100% rename from securedrop_client/resources/fonts/Montserrat/Montserrat-BlackItalic.ttf rename to client/securedrop_client/resources/fonts/Montserrat/Montserrat-BlackItalic.ttf diff --git a/securedrop_client/resources/fonts/Montserrat/Montserrat-Bold.ttf b/client/securedrop_client/resources/fonts/Montserrat/Montserrat-Bold.ttf similarity index 100% rename from securedrop_client/resources/fonts/Montserrat/Montserrat-Bold.ttf rename to client/securedrop_client/resources/fonts/Montserrat/Montserrat-Bold.ttf diff --git a/securedrop_client/resources/fonts/Montserrat/Montserrat-BoldItalic.ttf b/client/securedrop_client/resources/fonts/Montserrat/Montserrat-BoldItalic.ttf similarity index 100% rename from securedrop_client/resources/fonts/Montserrat/Montserrat-BoldItalic.ttf rename to client/securedrop_client/resources/fonts/Montserrat/Montserrat-BoldItalic.ttf diff --git a/securedrop_client/resources/fonts/Montserrat/Montserrat-ExtraBold.ttf b/client/securedrop_client/resources/fonts/Montserrat/Montserrat-ExtraBold.ttf similarity index 100% rename from securedrop_client/resources/fonts/Montserrat/Montserrat-ExtraBold.ttf rename to client/securedrop_client/resources/fonts/Montserrat/Montserrat-ExtraBold.ttf diff --git a/securedrop_client/resources/fonts/Montserrat/Montserrat-ExtraBoldItalic.ttf b/client/securedrop_client/resources/fonts/Montserrat/Montserrat-ExtraBoldItalic.ttf similarity index 100% rename from securedrop_client/resources/fonts/Montserrat/Montserrat-ExtraBoldItalic.ttf rename to client/securedrop_client/resources/fonts/Montserrat/Montserrat-ExtraBoldItalic.ttf diff --git a/securedrop_client/resources/fonts/Montserrat/Montserrat-ExtraLight.ttf b/client/securedrop_client/resources/fonts/Montserrat/Montserrat-ExtraLight.ttf similarity index 100% rename from securedrop_client/resources/fonts/Montserrat/Montserrat-ExtraLight.ttf rename to client/securedrop_client/resources/fonts/Montserrat/Montserrat-ExtraLight.ttf diff --git a/securedrop_client/resources/fonts/Montserrat/Montserrat-ExtraLightItalic.ttf b/client/securedrop_client/resources/fonts/Montserrat/Montserrat-ExtraLightItalic.ttf similarity index 100% rename from securedrop_client/resources/fonts/Montserrat/Montserrat-ExtraLightItalic.ttf rename to client/securedrop_client/resources/fonts/Montserrat/Montserrat-ExtraLightItalic.ttf diff --git a/securedrop_client/resources/fonts/Montserrat/Montserrat-Light.ttf b/client/securedrop_client/resources/fonts/Montserrat/Montserrat-Light.ttf similarity index 100% rename from securedrop_client/resources/fonts/Montserrat/Montserrat-Light.ttf rename to client/securedrop_client/resources/fonts/Montserrat/Montserrat-Light.ttf diff --git a/securedrop_client/resources/fonts/Montserrat/Montserrat-LightItalic.ttf b/client/securedrop_client/resources/fonts/Montserrat/Montserrat-LightItalic.ttf similarity index 100% rename from securedrop_client/resources/fonts/Montserrat/Montserrat-LightItalic.ttf rename to client/securedrop_client/resources/fonts/Montserrat/Montserrat-LightItalic.ttf diff --git a/securedrop_client/resources/fonts/Montserrat/Montserrat-Medium.ttf b/client/securedrop_client/resources/fonts/Montserrat/Montserrat-Medium.ttf similarity index 100% rename from securedrop_client/resources/fonts/Montserrat/Montserrat-Medium.ttf rename to client/securedrop_client/resources/fonts/Montserrat/Montserrat-Medium.ttf diff --git a/securedrop_client/resources/fonts/Montserrat/Montserrat-MediumItalic.ttf b/client/securedrop_client/resources/fonts/Montserrat/Montserrat-MediumItalic.ttf similarity index 100% rename from securedrop_client/resources/fonts/Montserrat/Montserrat-MediumItalic.ttf rename to client/securedrop_client/resources/fonts/Montserrat/Montserrat-MediumItalic.ttf diff --git a/securedrop_client/resources/fonts/Montserrat/Montserrat-Regular.ttf b/client/securedrop_client/resources/fonts/Montserrat/Montserrat-Regular.ttf similarity index 100% rename from securedrop_client/resources/fonts/Montserrat/Montserrat-Regular.ttf rename to client/securedrop_client/resources/fonts/Montserrat/Montserrat-Regular.ttf diff --git a/securedrop_client/resources/fonts/Montserrat/Montserrat-RegularItalic.ttf b/client/securedrop_client/resources/fonts/Montserrat/Montserrat-RegularItalic.ttf similarity index 100% rename from securedrop_client/resources/fonts/Montserrat/Montserrat-RegularItalic.ttf rename to client/securedrop_client/resources/fonts/Montserrat/Montserrat-RegularItalic.ttf diff --git a/securedrop_client/resources/fonts/Montserrat/Montserrat-SemiBold.ttf b/client/securedrop_client/resources/fonts/Montserrat/Montserrat-SemiBold.ttf similarity index 100% rename from securedrop_client/resources/fonts/Montserrat/Montserrat-SemiBold.ttf rename to client/securedrop_client/resources/fonts/Montserrat/Montserrat-SemiBold.ttf diff --git a/securedrop_client/resources/fonts/Montserrat/Montserrat-SemiBoldItalic.ttf b/client/securedrop_client/resources/fonts/Montserrat/Montserrat-SemiBoldItalic.ttf similarity index 100% rename from securedrop_client/resources/fonts/Montserrat/Montserrat-SemiBoldItalic.ttf rename to client/securedrop_client/resources/fonts/Montserrat/Montserrat-SemiBoldItalic.ttf diff --git a/securedrop_client/resources/fonts/Montserrat/Montserrat-Thin.ttf b/client/securedrop_client/resources/fonts/Montserrat/Montserrat-Thin.ttf similarity index 100% rename from securedrop_client/resources/fonts/Montserrat/Montserrat-Thin.ttf rename to client/securedrop_client/resources/fonts/Montserrat/Montserrat-Thin.ttf diff --git a/securedrop_client/resources/fonts/Montserrat/Montserrat-ThinItalic.ttf b/client/securedrop_client/resources/fonts/Montserrat/Montserrat-ThinItalic.ttf similarity index 100% rename from securedrop_client/resources/fonts/Montserrat/Montserrat-ThinItalic.ttf rename to client/securedrop_client/resources/fonts/Montserrat/Montserrat-ThinItalic.ttf diff --git a/securedrop_client/resources/fonts/Montserrat/OFL.txt b/client/securedrop_client/resources/fonts/Montserrat/OFL.txt similarity index 100% rename from securedrop_client/resources/fonts/Montserrat/OFL.txt rename to client/securedrop_client/resources/fonts/Montserrat/OFL.txt diff --git a/securedrop_client/resources/fonts/Source_Sans_Pro/OFL.txt b/client/securedrop_client/resources/fonts/Source_Sans_Pro/OFL.txt similarity index 100% rename from securedrop_client/resources/fonts/Source_Sans_Pro/OFL.txt rename to client/securedrop_client/resources/fonts/Source_Sans_Pro/OFL.txt diff --git a/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-Black.ttf b/client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-Black.ttf similarity index 100% rename from securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-Black.ttf rename to client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-Black.ttf diff --git a/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-BlackItalic.ttf b/client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-BlackItalic.ttf similarity index 100% rename from securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-BlackItalic.ttf rename to client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-BlackItalic.ttf diff --git a/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-Bold.ttf b/client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-Bold.ttf similarity index 100% rename from securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-Bold.ttf rename to client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-Bold.ttf diff --git a/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-BoldItalic.ttf b/client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-BoldItalic.ttf similarity index 100% rename from securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-BoldItalic.ttf rename to client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-BoldItalic.ttf diff --git a/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-ExtraLight.ttf b/client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-ExtraLight.ttf similarity index 100% rename from securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-ExtraLight.ttf rename to client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-ExtraLight.ttf diff --git a/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-ExtraLightItalic.ttf b/client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-ExtraLightItalic.ttf similarity index 100% rename from securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-ExtraLightItalic.ttf rename to client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-ExtraLightItalic.ttf diff --git a/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-Light.ttf b/client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-Light.ttf similarity index 100% rename from securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-Light.ttf rename to client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-Light.ttf diff --git a/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-LightItalic.ttf b/client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-LightItalic.ttf similarity index 100% rename from securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-LightItalic.ttf rename to client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-LightItalic.ttf diff --git a/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-Regular.ttf b/client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-Regular.ttf similarity index 100% rename from securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-Regular.ttf rename to client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-Regular.ttf diff --git a/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-RegularItalic.ttf b/client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-RegularItalic.ttf similarity index 100% rename from securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-RegularItalic.ttf rename to client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-RegularItalic.ttf diff --git a/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-SemiBold.ttf b/client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-SemiBold.ttf similarity index 100% rename from securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-SemiBold.ttf rename to client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-SemiBold.ttf diff --git a/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-SemiBoldItalic.ttf b/client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-SemiBoldItalic.ttf similarity index 100% rename from securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-SemiBoldItalic.ttf rename to client/securedrop_client/resources/fonts/Source_Sans_Pro/SourceSansPro-SemiBoldItalic.ttf diff --git a/securedrop_client/resources/images/activestate-wide.gif b/client/securedrop_client/resources/images/activestate-wide.gif similarity index 100% rename from securedrop_client/resources/images/activestate-wide.gif rename to client/securedrop_client/resources/images/activestate-wide.gif diff --git a/securedrop_client/resources/images/blank.svg b/client/securedrop_client/resources/images/blank.svg similarity index 100% rename from securedrop_client/resources/images/blank.svg rename to client/securedrop_client/resources/images/blank.svg diff --git a/securedrop_client/resources/images/checkmark.svg b/client/securedrop_client/resources/images/checkmark.svg similarity index 100% rename from securedrop_client/resources/images/checkmark.svg rename to client/securedrop_client/resources/images/checkmark.svg diff --git a/securedrop_client/resources/images/checkmark_hover.svg b/client/securedrop_client/resources/images/checkmark_hover.svg similarity index 100% rename from securedrop_client/resources/images/checkmark_hover.svg rename to client/securedrop_client/resources/images/checkmark_hover.svg diff --git a/securedrop_client/resources/images/cross.svg b/client/securedrop_client/resources/images/cross.svg similarity index 100% rename from securedrop_client/resources/images/cross.svg rename to client/securedrop_client/resources/images/cross.svg diff --git a/securedrop_client/resources/images/delete.png b/client/securedrop_client/resources/images/delete.png similarity index 100% rename from securedrop_client/resources/images/delete.png rename to client/securedrop_client/resources/images/delete.png diff --git a/securedrop_client/resources/images/delete_close.svg b/client/securedrop_client/resources/images/delete_close.svg similarity index 100% rename from securedrop_client/resources/images/delete_close.svg rename to client/securedrop_client/resources/images/delete_close.svg diff --git a/securedrop_client/resources/images/deleted-user.svg b/client/securedrop_client/resources/images/deleted-user.svg similarity index 100% rename from securedrop_client/resources/images/deleted-user.svg rename to client/securedrop_client/resources/images/deleted-user.svg diff --git a/securedrop_client/resources/images/download_active.svg b/client/securedrop_client/resources/images/download_active.svg similarity index 100% rename from securedrop_client/resources/images/download_active.svg rename to client/securedrop_client/resources/images/download_active.svg diff --git a/securedrop_client/resources/images/download_file.gif b/client/securedrop_client/resources/images/download_file.gif similarity index 100% rename from securedrop_client/resources/images/download_file.gif rename to client/securedrop_client/resources/images/download_file.gif diff --git a/securedrop_client/resources/images/download_file.svg b/client/securedrop_client/resources/images/download_file.svg similarity index 100% rename from securedrop_client/resources/images/download_file.svg rename to client/securedrop_client/resources/images/download_file.svg diff --git a/securedrop_client/resources/images/download_file_hover.svg b/client/securedrop_client/resources/images/download_file_hover.svg similarity index 100% rename from securedrop_client/resources/images/download_file_hover.svg rename to client/securedrop_client/resources/images/download_file_hover.svg diff --git a/securedrop_client/resources/images/dropdown_arrow.svg b/client/securedrop_client/resources/images/dropdown_arrow.svg similarity index 100% rename from securedrop_client/resources/images/dropdown_arrow.svg rename to client/securedrop_client/resources/images/dropdown_arrow.svg diff --git a/securedrop_client/resources/images/ellipsis.svg b/client/securedrop_client/resources/images/ellipsis.svg similarity index 100% rename from securedrop_client/resources/images/ellipsis.svg rename to client/securedrop_client/resources/images/ellipsis.svg diff --git a/securedrop_client/resources/images/error_icon.svg b/client/securedrop_client/resources/images/error_icon.svg similarity index 100% rename from securedrop_client/resources/images/error_icon.svg rename to client/securedrop_client/resources/images/error_icon.svg diff --git a/securedrop_client/resources/images/error_icon_white.svg b/client/securedrop_client/resources/images/error_icon_white.svg similarity index 100% rename from securedrop_client/resources/images/error_icon_white.svg rename to client/securedrop_client/resources/images/error_icon_white.svg diff --git a/securedrop_client/resources/images/eye_hidden.svg b/client/securedrop_client/resources/images/eye_hidden.svg similarity index 100% rename from securedrop_client/resources/images/eye_hidden.svg rename to client/securedrop_client/resources/images/eye_hidden.svg diff --git a/securedrop_client/resources/images/eye_visible.svg b/client/securedrop_client/resources/images/eye_visible.svg similarity index 100% rename from securedrop_client/resources/images/eye_visible.svg rename to client/securedrop_client/resources/images/eye_visible.svg diff --git a/securedrop_client/resources/images/header_animation.gif b/client/securedrop_client/resources/images/header_animation.gif similarity index 100% rename from securedrop_client/resources/images/header_animation.gif rename to client/securedrop_client/resources/images/header_animation.gif diff --git a/securedrop_client/resources/images/header_logo.png b/client/securedrop_client/resources/images/header_logo.png similarity index 100% rename from securedrop_client/resources/images/header_logo.png rename to client/securedrop_client/resources/images/header_logo.png diff --git a/securedrop_client/resources/images/hexes.svg b/client/securedrop_client/resources/images/hexes.svg similarity index 100% rename from securedrop_client/resources/images/hexes.svg rename to client/securedrop_client/resources/images/hexes.svg diff --git a/securedrop_client/resources/images/icon.png b/client/securedrop_client/resources/images/icon.png similarity index 100% rename from securedrop_client/resources/images/icon.png rename to client/securedrop_client/resources/images/icon.png diff --git a/securedrop_client/resources/images/left_pane.svg b/client/securedrop_client/resources/images/left_pane.svg similarity index 100% rename from securedrop_client/resources/images/left_pane.svg rename to client/securedrop_client/resources/images/left_pane.svg diff --git a/securedrop_client/resources/images/left_pane_offline.svg b/client/securedrop_client/resources/images/left_pane_offline.svg similarity index 100% rename from securedrop_client/resources/images/left_pane_offline.svg rename to client/securedrop_client/resources/images/left_pane_offline.svg diff --git a/securedrop_client/resources/images/loading-bar.gif b/client/securedrop_client/resources/images/loading-bar.gif similarity index 100% rename from securedrop_client/resources/images/loading-bar.gif rename to client/securedrop_client/resources/images/loading-bar.gif diff --git a/securedrop_client/resources/images/loading-cubes.gif b/client/securedrop_client/resources/images/loading-cubes.gif similarity index 100% rename from securedrop_client/resources/images/loading-cubes.gif rename to client/securedrop_client/resources/images/loading-cubes.gif diff --git a/securedrop_client/resources/images/login_bg.svg b/client/securedrop_client/resources/images/login_bg.svg similarity index 100% rename from securedrop_client/resources/images/login_bg.svg rename to client/securedrop_client/resources/images/login_bg.svg diff --git a/securedrop_client/resources/images/logo.png b/client/securedrop_client/resources/images/logo.png similarity index 100% rename from securedrop_client/resources/images/logo.png rename to client/securedrop_client/resources/images/logo.png diff --git a/securedrop_client/resources/images/paperclip-disabled.svg b/client/securedrop_client/resources/images/paperclip-disabled.svg similarity index 100% rename from securedrop_client/resources/images/paperclip-disabled.svg rename to client/securedrop_client/resources/images/paperclip-disabled.svg diff --git a/securedrop_client/resources/images/paperclip.svg b/client/securedrop_client/resources/images/paperclip.svg similarity index 100% rename from securedrop_client/resources/images/paperclip.svg rename to client/securedrop_client/resources/images/paperclip.svg diff --git a/securedrop_client/resources/images/printer.svg b/client/securedrop_client/resources/images/printer.svg similarity index 100% rename from securedrop_client/resources/images/printer.svg rename to client/securedrop_client/resources/images/printer.svg diff --git a/securedrop_client/resources/images/refresh.svg b/client/securedrop_client/resources/images/refresh.svg similarity index 100% rename from securedrop_client/resources/images/refresh.svg rename to client/securedrop_client/resources/images/refresh.svg diff --git a/securedrop_client/resources/images/refresh_active.svg b/client/securedrop_client/resources/images/refresh_active.svg similarity index 100% rename from securedrop_client/resources/images/refresh_active.svg rename to client/securedrop_client/resources/images/refresh_active.svg diff --git a/securedrop_client/resources/images/refresh_offline.svg b/client/securedrop_client/resources/images/refresh_offline.svg similarity index 100% rename from securedrop_client/resources/images/refresh_offline.svg rename to client/securedrop_client/resources/images/refresh_offline.svg diff --git a/securedrop_client/resources/images/savetodisk.svg b/client/securedrop_client/resources/images/savetodisk.svg similarity index 100% rename from securedrop_client/resources/images/savetodisk.svg rename to client/securedrop_client/resources/images/savetodisk.svg diff --git a/securedrop_client/resources/images/send-disabled.svg b/client/securedrop_client/resources/images/send-disabled.svg similarity index 100% rename from securedrop_client/resources/images/send-disabled.svg rename to client/securedrop_client/resources/images/send-disabled.svg diff --git a/securedrop_client/resources/images/send.svg b/client/securedrop_client/resources/images/send.svg similarity index 100% rename from securedrop_client/resources/images/send.svg rename to client/securedrop_client/resources/images/send.svg diff --git a/securedrop_client/resources/images/star_hover.svg b/client/securedrop_client/resources/images/star_hover.svg similarity index 100% rename from securedrop_client/resources/images/star_hover.svg rename to client/securedrop_client/resources/images/star_hover.svg diff --git a/securedrop_client/resources/images/star_off.svg b/client/securedrop_client/resources/images/star_off.svg similarity index 100% rename from securedrop_client/resources/images/star_off.svg rename to client/securedrop_client/resources/images/star_off.svg diff --git a/securedrop_client/resources/images/star_on.svg b/client/securedrop_client/resources/images/star_on.svg similarity index 100% rename from securedrop_client/resources/images/star_on.svg rename to client/securedrop_client/resources/images/star_on.svg diff --git a/securedrop_client/resources/images/sync.gif b/client/securedrop_client/resources/images/sync.gif similarity index 100% rename from securedrop_client/resources/images/sync.gif rename to client/securedrop_client/resources/images/sync.gif diff --git a/securedrop_client/resources/images/sync.svg b/client/securedrop_client/resources/images/sync.svg similarity index 100% rename from securedrop_client/resources/images/sync.svg rename to client/securedrop_client/resources/images/sync.svg diff --git a/securedrop_client/resources/images/sync_active.gif b/client/securedrop_client/resources/images/sync_active.gif similarity index 100% rename from securedrop_client/resources/images/sync_active.gif rename to client/securedrop_client/resources/images/sync_active.gif diff --git a/securedrop_client/resources/images/sync_disabled.gif b/client/securedrop_client/resources/images/sync_disabled.gif similarity index 100% rename from securedrop_client/resources/images/sync_disabled.gif rename to client/securedrop_client/resources/images/sync_disabled.gif diff --git a/securedrop_client/resources/images/tear-big.svg b/client/securedrop_client/resources/images/tear-big.svg similarity index 100% rename from securedrop_client/resources/images/tear-big.svg rename to client/securedrop_client/resources/images/tear-big.svg diff --git a/securedrop_client/resources/images/tear-left.svg b/client/securedrop_client/resources/images/tear-left.svg similarity index 100% rename from securedrop_client/resources/images/tear-left.svg rename to client/securedrop_client/resources/images/tear-left.svg diff --git a/securedrop_client/resources/images/tear-right.svg b/client/securedrop_client/resources/images/tear-right.svg similarity index 100% rename from securedrop_client/resources/images/tear-right.svg rename to client/securedrop_client/resources/images/tear-right.svg diff --git a/securedrop_client/resources/images/trash.png b/client/securedrop_client/resources/images/trash.png similarity index 100% rename from securedrop_client/resources/images/trash.png rename to client/securedrop_client/resources/images/trash.png diff --git a/securedrop_client/state/__init__.py b/client/securedrop_client/state/__init__.py similarity index 100% rename from securedrop_client/state/__init__.py rename to client/securedrop_client/state/__init__.py diff --git a/securedrop_client/state/domain.py b/client/securedrop_client/state/domain.py similarity index 100% rename from securedrop_client/state/domain.py rename to client/securedrop_client/state/domain.py diff --git a/securedrop_client/state/state.py b/client/securedrop_client/state/state.py similarity index 100% rename from securedrop_client/state/state.py rename to client/securedrop_client/state/state.py diff --git a/securedrop_client/storage.py b/client/securedrop_client/storage.py similarity index 100% rename from securedrop_client/storage.py rename to client/securedrop_client/storage.py diff --git a/securedrop_client/sync.py b/client/securedrop_client/sync.py similarity index 100% rename from securedrop_client/sync.py rename to client/securedrop_client/sync.py diff --git a/securedrop_client/utils.py b/client/securedrop_client/utils.py similarity index 100% rename from securedrop_client/utils.py rename to client/securedrop_client/utils.py diff --git a/setup.cfg b/client/setup.cfg similarity index 100% rename from setup.cfg rename to client/setup.cfg diff --git a/setup.py b/client/setup.py similarity index 100% rename from setup.py rename to client/setup.py diff --git a/test-functional.sh b/client/test-functional.sh similarity index 100% rename from test-functional.sh rename to client/test-functional.sh diff --git a/tests/__init__.py b/client/tests/__init__.py similarity index 100% rename from tests/__init__.py rename to client/tests/__init__.py diff --git a/tests/api_jobs/__init__.py b/client/tests/api_jobs/__init__.py similarity index 100% rename from tests/api_jobs/__init__.py rename to client/tests/api_jobs/__init__.py diff --git a/tests/api_jobs/test_base.py b/client/tests/api_jobs/test_base.py similarity index 100% rename from tests/api_jobs/test_base.py rename to client/tests/api_jobs/test_base.py diff --git a/tests/api_jobs/test_downloads.py b/client/tests/api_jobs/test_downloads.py similarity index 100% rename from tests/api_jobs/test_downloads.py rename to client/tests/api_jobs/test_downloads.py diff --git a/tests/api_jobs/test_seen.py b/client/tests/api_jobs/test_seen.py similarity index 100% rename from tests/api_jobs/test_seen.py rename to client/tests/api_jobs/test_seen.py diff --git a/tests/api_jobs/test_sources.py b/client/tests/api_jobs/test_sources.py similarity index 100% rename from tests/api_jobs/test_sources.py rename to client/tests/api_jobs/test_sources.py diff --git a/tests/api_jobs/test_sync.py b/client/tests/api_jobs/test_sync.py similarity index 100% rename from tests/api_jobs/test_sync.py rename to client/tests/api_jobs/test_sync.py diff --git a/tests/api_jobs/test_updatestar.py b/client/tests/api_jobs/test_updatestar.py similarity index 100% rename from tests/api_jobs/test_updatestar.py rename to client/tests/api_jobs/test_updatestar.py diff --git a/tests/api_jobs/test_uploads.py b/client/tests/api_jobs/test_uploads.py similarity index 100% rename from tests/api_jobs/test_uploads.py rename to client/tests/api_jobs/test_uploads.py diff --git a/tests/conftest.py b/client/tests/conftest.py similarity index 100% rename from tests/conftest.py rename to client/tests/conftest.py diff --git a/tests/factory.py b/client/tests/factory.py similarity index 100% rename from tests/factory.py rename to client/tests/factory.py diff --git a/tests/files/securedrop.gpg.asc b/client/tests/files/securedrop.gpg.asc similarity index 100% rename from tests/files/securedrop.gpg.asc rename to client/tests/files/securedrop.gpg.asc diff --git a/tests/files/securedrop.gpg.pub.asc b/client/tests/files/securedrop.gpg.pub.asc similarity index 100% rename from tests/files/securedrop.gpg.pub.asc rename to client/tests/files/securedrop.gpg.pub.asc diff --git a/tests/files/test-doc.gz.gpg b/client/tests/files/test-doc.gz.gpg similarity index 100% rename from tests/files/test-doc.gz.gpg rename to client/tests/files/test-doc.gz.gpg diff --git a/tests/files/test-key.gpg.asc b/client/tests/files/test-key.gpg.asc similarity index 100% rename from tests/files/test-key.gpg.asc rename to client/tests/files/test-key.gpg.asc diff --git a/tests/files/test-key.gpg.pub.asc b/client/tests/files/test-key.gpg.pub.asc similarity index 100% rename from tests/files/test-key.gpg.pub.asc rename to client/tests/files/test-key.gpg.pub.asc diff --git a/tests/functional/__init__.py b/client/tests/functional/__init__.py similarity index 100% rename from tests/functional/__init__.py rename to client/tests/functional/__init__.py diff --git a/tests/functional/cassettes/test_delete_source.yaml b/client/tests/functional/cassettes/test_delete_source.yaml similarity index 100% rename from tests/functional/cassettes/test_delete_source.yaml rename to client/tests/functional/cassettes/test_delete_source.yaml diff --git a/tests/functional/cassettes/test_download_file.yaml b/client/tests/functional/cassettes/test_download_file.yaml similarity index 100% rename from tests/functional/cassettes/test_download_file.yaml rename to client/tests/functional/cassettes/test_download_file.yaml diff --git a/tests/functional/cassettes/test_export_file_dialog.yaml b/client/tests/functional/cassettes/test_export_file_dialog.yaml similarity index 100% rename from tests/functional/cassettes/test_export_file_dialog.yaml rename to client/tests/functional/cassettes/test_export_file_dialog.yaml diff --git a/tests/functional/cassettes/test_login_as_journalist.yaml b/client/tests/functional/cassettes/test_login_as_journalist.yaml similarity index 100% rename from tests/functional/cassettes/test_login_as_journalist.yaml rename to client/tests/functional/cassettes/test_login_as_journalist.yaml diff --git a/tests/functional/cassettes/test_login_from_offline.yaml b/client/tests/functional/cassettes/test_login_from_offline.yaml similarity index 100% rename from tests/functional/cassettes/test_login_from_offline.yaml rename to client/tests/functional/cassettes/test_login_from_offline.yaml diff --git a/tests/functional/cassettes/test_logout_as_journalist.yaml b/client/tests/functional/cassettes/test_logout_as_journalist.yaml similarity index 100% rename from tests/functional/cassettes/test_logout_as_journalist.yaml rename to client/tests/functional/cassettes/test_logout_as_journalist.yaml diff --git a/tests/functional/cassettes/test_offline_delete_source_attempt.yaml b/client/tests/functional/cassettes/test_offline_delete_source_attempt.yaml similarity index 100% rename from tests/functional/cassettes/test_offline_delete_source_attempt.yaml rename to client/tests/functional/cassettes/test_offline_delete_source_attempt.yaml diff --git a/tests/functional/cassettes/test_offline_read_conversation.yaml b/client/tests/functional/cassettes/test_offline_read_conversation.yaml similarity index 100% rename from tests/functional/cassettes/test_offline_read_conversation.yaml rename to client/tests/functional/cassettes/test_offline_read_conversation.yaml diff --git a/tests/functional/cassettes/test_offline_send_reply_to_source.yaml b/client/tests/functional/cassettes/test_offline_send_reply_to_source.yaml similarity index 100% rename from tests/functional/cassettes/test_offline_send_reply_to_source.yaml rename to client/tests/functional/cassettes/test_offline_send_reply_to_source.yaml diff --git a/tests/functional/cassettes/test_offline_star_source.yaml b/client/tests/functional/cassettes/test_offline_star_source.yaml similarity index 100% rename from tests/functional/cassettes/test_offline_star_source.yaml rename to client/tests/functional/cassettes/test_offline_star_source.yaml diff --git a/tests/functional/cassettes/test_receive_message_from_source.yaml b/client/tests/functional/cassettes/test_receive_message_from_source.yaml similarity index 100% rename from tests/functional/cassettes/test_receive_message_from_source.yaml rename to client/tests/functional/cassettes/test_receive_message_from_source.yaml diff --git a/tests/functional/cassettes/test_seen_and_unseen.yaml b/client/tests/functional/cassettes/test_seen_and_unseen.yaml similarity index 100% rename from tests/functional/cassettes/test_seen_and_unseen.yaml rename to client/tests/functional/cassettes/test_seen_and_unseen.yaml diff --git a/tests/functional/cassettes/test_send_reply_to_source.yaml b/client/tests/functional/cassettes/test_send_reply_to_source.yaml similarity index 100% rename from tests/functional/cassettes/test_send_reply_to_source.yaml rename to client/tests/functional/cassettes/test_send_reply_to_source.yaml diff --git a/tests/functional/cassettes/test_star_source.yaml b/client/tests/functional/cassettes/test_star_source.yaml similarity index 100% rename from tests/functional/cassettes/test_star_source.yaml rename to client/tests/functional/cassettes/test_star_source.yaml diff --git a/tests/functional/cassettes/test_unseen_source_becomes_seen_on_click.yaml b/client/tests/functional/cassettes/test_unseen_source_becomes_seen_on_click.yaml similarity index 100% rename from tests/functional/cassettes/test_unseen_source_becomes_seen_on_click.yaml rename to client/tests/functional/cassettes/test_unseen_source_becomes_seen_on_click.yaml diff --git a/tests/functional/cassettes/test_user_icon_click.yaml b/client/tests/functional/cassettes/test_user_icon_click.yaml similarity index 100% rename from tests/functional/cassettes/test_user_icon_click.yaml rename to client/tests/functional/cassettes/test_user_icon_click.yaml diff --git a/tests/functional/test_delete_source.py b/client/tests/functional/test_delete_source.py similarity index 100% rename from tests/functional/test_delete_source.py rename to client/tests/functional/test_delete_source.py diff --git a/tests/functional/test_download_file.py b/client/tests/functional/test_download_file.py similarity index 100% rename from tests/functional/test_download_file.py rename to client/tests/functional/test_download_file.py diff --git a/tests/functional/test_export_file_dialog.py b/client/tests/functional/test_export_file_dialog.py similarity index 100% rename from tests/functional/test_export_file_dialog.py rename to client/tests/functional/test_export_file_dialog.py diff --git a/tests/functional/test_login.py b/client/tests/functional/test_login.py similarity index 100% rename from tests/functional/test_login.py rename to client/tests/functional/test_login.py diff --git a/tests/functional/test_logout.py b/client/tests/functional/test_logout.py similarity index 100% rename from tests/functional/test_logout.py rename to client/tests/functional/test_logout.py diff --git a/tests/functional/test_offline_delete_source.py b/client/tests/functional/test_offline_delete_source.py similarity index 100% rename from tests/functional/test_offline_delete_source.py rename to client/tests/functional/test_offline_delete_source.py diff --git a/tests/functional/test_offline_login.py b/client/tests/functional/test_offline_login.py similarity index 100% rename from tests/functional/test_offline_login.py rename to client/tests/functional/test_offline_login.py diff --git a/tests/functional/test_offline_read_conversation.py b/client/tests/functional/test_offline_read_conversation.py similarity index 100% rename from tests/functional/test_offline_read_conversation.py rename to client/tests/functional/test_offline_read_conversation.py diff --git a/tests/functional/test_offline_send_reply.py b/client/tests/functional/test_offline_send_reply.py similarity index 100% rename from tests/functional/test_offline_send_reply.py rename to client/tests/functional/test_offline_send_reply.py diff --git a/tests/functional/test_offline_star_source.py b/client/tests/functional/test_offline_star_source.py similarity index 100% rename from tests/functional/test_offline_star_source.py rename to client/tests/functional/test_offline_star_source.py diff --git a/tests/functional/test_receive_message.py b/client/tests/functional/test_receive_message.py similarity index 100% rename from tests/functional/test_receive_message.py rename to client/tests/functional/test_receive_message.py diff --git a/tests/functional/test_seen.py b/client/tests/functional/test_seen.py similarity index 100% rename from tests/functional/test_seen.py rename to client/tests/functional/test_seen.py diff --git a/tests/functional/test_send_reply.py b/client/tests/functional/test_send_reply.py similarity index 100% rename from tests/functional/test_send_reply.py rename to client/tests/functional/test_send_reply.py diff --git a/tests/functional/test_star_source.py b/client/tests/functional/test_star_source.py similarity index 100% rename from tests/functional/test_star_source.py rename to client/tests/functional/test_star_source.py diff --git a/tests/functional/test_user_profile_menu.py b/client/tests/functional/test_user_profile_menu.py similarity index 100% rename from tests/functional/test_user_profile_menu.py rename to client/tests/functional/test_user_profile_menu.py diff --git a/tests/gui/__init__.py b/client/tests/gui/__init__.py similarity index 100% rename from tests/gui/__init__.py rename to client/tests/gui/__init__.py diff --git a/tests/gui/auth/sign_in/test_error_bar.py b/client/tests/gui/auth/sign_in/test_error_bar.py similarity index 100% rename from tests/gui/auth/sign_in/test_error_bar.py rename to client/tests/gui/auth/sign_in/test_error_bar.py diff --git a/tests/gui/auth/test_dialog.py b/client/tests/gui/auth/test_dialog.py similarity index 100% rename from tests/gui/auth/test_dialog.py rename to client/tests/gui/auth/test_dialog.py diff --git a/tests/gui/base/test_dialogs.py b/client/tests/gui/base/test_dialogs.py similarity index 100% rename from tests/gui/base/test_dialogs.py rename to client/tests/gui/base/test_dialogs.py diff --git a/tests/gui/base/test_inputs.py b/client/tests/gui/base/test_inputs.py similarity index 100% rename from tests/gui/base/test_inputs.py rename to client/tests/gui/base/test_inputs.py diff --git a/tests/gui/base/test_misc.py b/client/tests/gui/base/test_misc.py similarity index 100% rename from tests/gui/base/test_misc.py rename to client/tests/gui/base/test_misc.py diff --git a/tests/gui/base/test_sdcheckbox.py b/client/tests/gui/base/test_sdcheckbox.py similarity index 100% rename from tests/gui/base/test_sdcheckbox.py rename to client/tests/gui/base/test_sdcheckbox.py diff --git a/tests/gui/conversation/delete/__init__.py b/client/tests/gui/conversation/delete/__init__.py similarity index 100% rename from tests/gui/conversation/delete/__init__.py rename to client/tests/gui/conversation/delete/__init__.py diff --git a/tests/gui/conversation/delete/test_dialog.py b/client/tests/gui/conversation/delete/test_dialog.py similarity index 100% rename from tests/gui/conversation/delete/test_dialog.py rename to client/tests/gui/conversation/delete/test_dialog.py diff --git a/tests/gui/conversation/export/__init__.py b/client/tests/gui/conversation/export/__init__.py similarity index 100% rename from tests/gui/conversation/export/__init__.py rename to client/tests/gui/conversation/export/__init__.py diff --git a/tests/gui/conversation/export/test_device.py b/client/tests/gui/conversation/export/test_device.py similarity index 100% rename from tests/gui/conversation/export/test_device.py rename to client/tests/gui/conversation/export/test_device.py diff --git a/tests/gui/conversation/export/test_dialog.py b/client/tests/gui/conversation/export/test_dialog.py similarity index 100% rename from tests/gui/conversation/export/test_dialog.py rename to client/tests/gui/conversation/export/test_dialog.py diff --git a/tests/gui/conversation/export/test_file_dialog.py b/client/tests/gui/conversation/export/test_file_dialog.py similarity index 100% rename from tests/gui/conversation/export/test_file_dialog.py rename to client/tests/gui/conversation/export/test_file_dialog.py diff --git a/tests/gui/conversation/export/test_print_dialog.py b/client/tests/gui/conversation/export/test_print_dialog.py similarity index 100% rename from tests/gui/conversation/export/test_print_dialog.py rename to client/tests/gui/conversation/export/test_print_dialog.py diff --git a/tests/gui/conversation/export/test_print_transcript_dialog.py b/client/tests/gui/conversation/export/test_print_transcript_dialog.py similarity index 100% rename from tests/gui/conversation/export/test_print_transcript_dialog.py rename to client/tests/gui/conversation/export/test_print_transcript_dialog.py diff --git a/tests/gui/conversation/export/test_transcript_dialog.py b/client/tests/gui/conversation/export/test_transcript_dialog.py similarity index 100% rename from tests/gui/conversation/export/test_transcript_dialog.py rename to client/tests/gui/conversation/export/test_transcript_dialog.py diff --git a/tests/gui/source/__init__.py b/client/tests/gui/source/__init__.py similarity index 100% rename from tests/gui/source/__init__.py rename to client/tests/gui/source/__init__.py diff --git a/tests/gui/source/delete/__init__.py b/client/tests/gui/source/delete/__init__.py similarity index 100% rename from tests/gui/source/delete/__init__.py rename to client/tests/gui/source/delete/__init__.py diff --git a/tests/gui/source/delete/test_dialog.py b/client/tests/gui/source/delete/test_dialog.py similarity index 100% rename from tests/gui/source/delete/test_dialog.py rename to client/tests/gui/source/delete/test_dialog.py diff --git a/tests/gui/test_actions.py b/client/tests/gui/test_actions.py similarity index 100% rename from tests/gui/test_actions.py rename to client/tests/gui/test_actions.py diff --git a/tests/gui/test_datetime_helpers.py b/client/tests/gui/test_datetime_helpers.py similarity index 100% rename from tests/gui/test_datetime_helpers.py rename to client/tests/gui/test_datetime_helpers.py diff --git a/tests/gui/test_main.py b/client/tests/gui/test_main.py similarity index 100% rename from tests/gui/test_main.py rename to client/tests/gui/test_main.py diff --git a/tests/gui/test_widgets.py b/client/tests/gui/test_widgets.py similarity index 100% rename from tests/gui/test_widgets.py rename to client/tests/gui/test_widgets.py diff --git a/tests/helper.py b/client/tests/helper.py similarity index 100% rename from tests/helper.py rename to client/tests/helper.py diff --git a/tests/integration/conftest.py b/client/tests/integration/conftest.py similarity index 100% rename from tests/integration/conftest.py rename to client/tests/integration/conftest.py diff --git a/tests/integration/test_placeholder.py b/client/tests/integration/test_placeholder.py similarity index 100% rename from tests/integration/test_placeholder.py rename to client/tests/integration/test_placeholder.py diff --git a/tests/integration/test_styles_file_download_button.py b/client/tests/integration/test_styles_file_download_button.py similarity index 100% rename from tests/integration/test_styles_file_download_button.py rename to client/tests/integration/test_styles_file_download_button.py diff --git a/tests/integration/test_styles_modal_dialog_button.py b/client/tests/integration/test_styles_modal_dialog_button.py similarity index 100% rename from tests/integration/test_styles_modal_dialog_button.py rename to client/tests/integration/test_styles_modal_dialog_button.py diff --git a/tests/integration/test_styles_modal_dialog_error_details.py b/client/tests/integration/test_styles_modal_dialog_error_details.py similarity index 100% rename from tests/integration/test_styles_modal_dialog_error_details.py rename to client/tests/integration/test_styles_modal_dialog_error_details.py diff --git a/tests/integration/test_styles_reply_status_bar.py b/client/tests/integration/test_styles_reply_status_bar.py similarity index 100% rename from tests/integration/test_styles_reply_status_bar.py rename to client/tests/integration/test_styles_reply_status_bar.py diff --git a/tests/integration/test_styles_sdclient.py b/client/tests/integration/test_styles_sdclient.py similarity index 100% rename from tests/integration/test_styles_sdclient.py rename to client/tests/integration/test_styles_sdclient.py diff --git a/tests/integration/test_styles_speech_bubble_message.py b/client/tests/integration/test_styles_speech_bubble_message.py similarity index 100% rename from tests/integration/test_styles_speech_bubble_message.py rename to client/tests/integration/test_styles_speech_bubble_message.py diff --git a/tests/integration/test_styles_speech_bubble_status_bar.py b/client/tests/integration/test_styles_speech_bubble_status_bar.py similarity index 100% rename from tests/integration/test_styles_speech_bubble_status_bar.py rename to client/tests/integration/test_styles_speech_bubble_status_bar.py diff --git a/tests/migrations/__init__.py b/client/tests/migrations/__init__.py similarity index 100% rename from tests/migrations/__init__.py rename to client/tests/migrations/__init__.py diff --git a/tests/migrations/test_d7c8af95bc8e.py b/client/tests/migrations/test_d7c8af95bc8e.py similarity index 100% rename from tests/migrations/test_d7c8af95bc8e.py rename to client/tests/migrations/test_d7c8af95bc8e.py diff --git a/tests/state/__init__.py b/client/tests/state/__init__.py similarity index 100% rename from tests/state/__init__.py rename to client/tests/state/__init__.py diff --git a/tests/state/test_domain.py b/client/tests/state/test_domain.py similarity index 100% rename from tests/state/test_domain.py rename to client/tests/state/test_domain.py diff --git a/tests/state/test_state.py b/client/tests/state/test_state.py similarity index 100% rename from tests/state/test_state.py rename to client/tests/state/test_state.py diff --git a/tests/test_alembic.py b/client/tests/test_alembic.py similarity index 100% rename from tests/test_alembic.py rename to client/tests/test_alembic.py diff --git a/tests/test_app.py b/client/tests/test_app.py similarity index 100% rename from tests/test_app.py rename to client/tests/test_app.py diff --git a/tests/test_config.py b/client/tests/test_config.py similarity index 100% rename from tests/test_config.py rename to client/tests/test_config.py diff --git a/tests/test_conversation.py b/client/tests/test_conversation.py similarity index 100% rename from tests/test_conversation.py rename to client/tests/test_conversation.py diff --git a/tests/test_crypto.py b/client/tests/test_crypto.py similarity index 100% rename from tests/test_crypto.py rename to client/tests/test_crypto.py diff --git a/tests/test_export.py b/client/tests/test_export.py similarity index 100% rename from tests/test_export.py rename to client/tests/test_export.py diff --git a/tests/test_logic.py b/client/tests/test_logic.py similarity index 100% rename from tests/test_logic.py rename to client/tests/test_logic.py diff --git a/tests/test_models.py b/client/tests/test_models.py similarity index 100% rename from tests/test_models.py rename to client/tests/test_models.py diff --git a/tests/test_queue.py b/client/tests/test_queue.py similarity index 100% rename from tests/test_queue.py rename to client/tests/test_queue.py diff --git a/tests/test_resources.py b/client/tests/test_resources.py similarity index 100% rename from tests/test_resources.py rename to client/tests/test_resources.py diff --git a/tests/test_storage.py b/client/tests/test_storage.py similarity index 100% rename from tests/test_storage.py rename to client/tests/test_storage.py diff --git a/tests/test_sync.py b/client/tests/test_sync.py similarity index 100% rename from tests/test_sync.py rename to client/tests/test_sync.py diff --git a/tests/test_utils.py b/client/tests/test_utils.py similarity index 100% rename from tests/test_utils.py rename to client/tests/test_utils.py diff --git a/update_version.sh b/client/update_version.sh similarity index 100% rename from update_version.sh rename to client/update_version.sh From c519b082d81833fddf8c79a1eb5d9fcd6ddb8a12 Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Mon, 11 Dec 2023 16:46:09 -0500 Subject: [PATCH 347/352] Move export files into export/ folder --- {.circleci => export/.circleci}/config.yml | 0 .flake8 => export/.flake8 | 0 .gitignore => export/.gitignore | 0 {.semgrep => export/.semgrep}/custom-rules.yaml | 0 LICENSE => export/LICENSE | 0 MANIFEST.in => export/MANIFEST.in | 0 Makefile => export/Makefile | 0 README.md => export/README.md | 0 SECURITY.md => export/SECURITY.md | 0 .../build-requirements.txt | 0 changelog.md => export/changelog.md | 0 {files => export/files}/application-x-sd-export.xml | 0 {files => export/files}/sd-logo.png | Bin {files => export/files}/send-to-usb.desktop | 0 poetry.lock => export/poetry.lock | 0 pyproject.toml => export/pyproject.toml | 0 .../securedrop_export}/VERSION | 0 .../securedrop_export}/__init__.py | 0 .../securedrop_export}/archive.py | 0 .../securedrop_export}/command.py | 0 .../securedrop_export}/directory.py | 0 .../securedrop_export}/disk/__init__.py | 0 .../securedrop_export}/disk/cli.py | 0 .../securedrop_export}/disk/legacy_service.py | 0 .../securedrop_export}/disk/legacy_status.py | 0 .../securedrop_export}/disk/service.py | 0 .../securedrop_export}/disk/status.py | 0 .../securedrop_export}/disk/volume.py | 0 .../securedrop_export}/exceptions.py | 0 .../securedrop_export}/main.py | 0 .../securedrop_export}/print/__init__.py | 0 .../securedrop_export}/print/service.py | 0 .../securedrop_export}/print/status.py | 0 .../securedrop_export}/status.py | 0 setup.py => export/setup.py | 0 {tests => export/tests}/__init__.py | 0 {tests => export/tests}/disk/__init__.py | 0 {tests => export/tests}/disk/test_cli.py | 0 {tests => export/tests}/disk/test_service.py | 0 {tests => export/tests}/disk/test_volume.py | 0 {tests => export/tests}/print/__init__.py | 0 {tests => export/tests}/print/test_service.py | 0 {tests => export/tests}/test_archive.py | 0 {tests => export/tests}/test_directory.py | 0 {tests => export/tests}/test_exceptions.py | 0 {tests => export/tests}/test_main.py | 0 update_version.sh => export/update_version.sh | 0 47 files changed, 0 insertions(+), 0 deletions(-) rename {.circleci => export/.circleci}/config.yml (100%) rename .flake8 => export/.flake8 (100%) rename .gitignore => export/.gitignore (100%) rename {.semgrep => export/.semgrep}/custom-rules.yaml (100%) rename LICENSE => export/LICENSE (100%) rename MANIFEST.in => export/MANIFEST.in (100%) rename Makefile => export/Makefile (100%) rename README.md => export/README.md (100%) rename SECURITY.md => export/SECURITY.md (100%) rename build-requirements.txt => export/build-requirements.txt (100%) rename changelog.md => export/changelog.md (100%) rename {files => export/files}/application-x-sd-export.xml (100%) rename {files => export/files}/sd-logo.png (100%) rename {files => export/files}/send-to-usb.desktop (100%) rename poetry.lock => export/poetry.lock (100%) rename pyproject.toml => export/pyproject.toml (100%) rename {securedrop_export => export/securedrop_export}/VERSION (100%) rename {securedrop_export => export/securedrop_export}/__init__.py (100%) rename {securedrop_export => export/securedrop_export}/archive.py (100%) rename {securedrop_export => export/securedrop_export}/command.py (100%) rename {securedrop_export => export/securedrop_export}/directory.py (100%) rename {securedrop_export => export/securedrop_export}/disk/__init__.py (100%) rename {securedrop_export => export/securedrop_export}/disk/cli.py (100%) rename {securedrop_export => export/securedrop_export}/disk/legacy_service.py (100%) rename {securedrop_export => export/securedrop_export}/disk/legacy_status.py (100%) rename {securedrop_export => export/securedrop_export}/disk/service.py (100%) rename {securedrop_export => export/securedrop_export}/disk/status.py (100%) rename {securedrop_export => export/securedrop_export}/disk/volume.py (100%) rename {securedrop_export => export/securedrop_export}/exceptions.py (100%) rename {securedrop_export => export/securedrop_export}/main.py (100%) rename {securedrop_export => export/securedrop_export}/print/__init__.py (100%) rename {securedrop_export => export/securedrop_export}/print/service.py (100%) rename {securedrop_export => export/securedrop_export}/print/status.py (100%) rename {securedrop_export => export/securedrop_export}/status.py (100%) rename setup.py => export/setup.py (100%) rename {tests => export/tests}/__init__.py (100%) rename {tests => export/tests}/disk/__init__.py (100%) rename {tests => export/tests}/disk/test_cli.py (100%) rename {tests => export/tests}/disk/test_service.py (100%) rename {tests => export/tests}/disk/test_volume.py (100%) rename {tests => export/tests}/print/__init__.py (100%) rename {tests => export/tests}/print/test_service.py (100%) rename {tests => export/tests}/test_archive.py (100%) rename {tests => export/tests}/test_directory.py (100%) rename {tests => export/tests}/test_exceptions.py (100%) rename {tests => export/tests}/test_main.py (100%) rename update_version.sh => export/update_version.sh (100%) diff --git a/.circleci/config.yml b/export/.circleci/config.yml similarity index 100% rename from .circleci/config.yml rename to export/.circleci/config.yml diff --git a/.flake8 b/export/.flake8 similarity index 100% rename from .flake8 rename to export/.flake8 diff --git a/.gitignore b/export/.gitignore similarity index 100% rename from .gitignore rename to export/.gitignore diff --git a/.semgrep/custom-rules.yaml b/export/.semgrep/custom-rules.yaml similarity index 100% rename from .semgrep/custom-rules.yaml rename to export/.semgrep/custom-rules.yaml diff --git a/LICENSE b/export/LICENSE similarity index 100% rename from LICENSE rename to export/LICENSE diff --git a/MANIFEST.in b/export/MANIFEST.in similarity index 100% rename from MANIFEST.in rename to export/MANIFEST.in diff --git a/Makefile b/export/Makefile similarity index 100% rename from Makefile rename to export/Makefile diff --git a/README.md b/export/README.md similarity index 100% rename from README.md rename to export/README.md diff --git a/SECURITY.md b/export/SECURITY.md similarity index 100% rename from SECURITY.md rename to export/SECURITY.md diff --git a/build-requirements.txt b/export/build-requirements.txt similarity index 100% rename from build-requirements.txt rename to export/build-requirements.txt diff --git a/changelog.md b/export/changelog.md similarity index 100% rename from changelog.md rename to export/changelog.md diff --git a/files/application-x-sd-export.xml b/export/files/application-x-sd-export.xml similarity index 100% rename from files/application-x-sd-export.xml rename to export/files/application-x-sd-export.xml diff --git a/files/sd-logo.png b/export/files/sd-logo.png similarity index 100% rename from files/sd-logo.png rename to export/files/sd-logo.png diff --git a/files/send-to-usb.desktop b/export/files/send-to-usb.desktop similarity index 100% rename from files/send-to-usb.desktop rename to export/files/send-to-usb.desktop diff --git a/poetry.lock b/export/poetry.lock similarity index 100% rename from poetry.lock rename to export/poetry.lock diff --git a/pyproject.toml b/export/pyproject.toml similarity index 100% rename from pyproject.toml rename to export/pyproject.toml diff --git a/securedrop_export/VERSION b/export/securedrop_export/VERSION similarity index 100% rename from securedrop_export/VERSION rename to export/securedrop_export/VERSION diff --git a/securedrop_export/__init__.py b/export/securedrop_export/__init__.py similarity index 100% rename from securedrop_export/__init__.py rename to export/securedrop_export/__init__.py diff --git a/securedrop_export/archive.py b/export/securedrop_export/archive.py similarity index 100% rename from securedrop_export/archive.py rename to export/securedrop_export/archive.py diff --git a/securedrop_export/command.py b/export/securedrop_export/command.py similarity index 100% rename from securedrop_export/command.py rename to export/securedrop_export/command.py diff --git a/securedrop_export/directory.py b/export/securedrop_export/directory.py similarity index 100% rename from securedrop_export/directory.py rename to export/securedrop_export/directory.py diff --git a/securedrop_export/disk/__init__.py b/export/securedrop_export/disk/__init__.py similarity index 100% rename from securedrop_export/disk/__init__.py rename to export/securedrop_export/disk/__init__.py diff --git a/securedrop_export/disk/cli.py b/export/securedrop_export/disk/cli.py similarity index 100% rename from securedrop_export/disk/cli.py rename to export/securedrop_export/disk/cli.py diff --git a/securedrop_export/disk/legacy_service.py b/export/securedrop_export/disk/legacy_service.py similarity index 100% rename from securedrop_export/disk/legacy_service.py rename to export/securedrop_export/disk/legacy_service.py diff --git a/securedrop_export/disk/legacy_status.py b/export/securedrop_export/disk/legacy_status.py similarity index 100% rename from securedrop_export/disk/legacy_status.py rename to export/securedrop_export/disk/legacy_status.py diff --git a/securedrop_export/disk/service.py b/export/securedrop_export/disk/service.py similarity index 100% rename from securedrop_export/disk/service.py rename to export/securedrop_export/disk/service.py diff --git a/securedrop_export/disk/status.py b/export/securedrop_export/disk/status.py similarity index 100% rename from securedrop_export/disk/status.py rename to export/securedrop_export/disk/status.py diff --git a/securedrop_export/disk/volume.py b/export/securedrop_export/disk/volume.py similarity index 100% rename from securedrop_export/disk/volume.py rename to export/securedrop_export/disk/volume.py diff --git a/securedrop_export/exceptions.py b/export/securedrop_export/exceptions.py similarity index 100% rename from securedrop_export/exceptions.py rename to export/securedrop_export/exceptions.py diff --git a/securedrop_export/main.py b/export/securedrop_export/main.py similarity index 100% rename from securedrop_export/main.py rename to export/securedrop_export/main.py diff --git a/securedrop_export/print/__init__.py b/export/securedrop_export/print/__init__.py similarity index 100% rename from securedrop_export/print/__init__.py rename to export/securedrop_export/print/__init__.py diff --git a/securedrop_export/print/service.py b/export/securedrop_export/print/service.py similarity index 100% rename from securedrop_export/print/service.py rename to export/securedrop_export/print/service.py diff --git a/securedrop_export/print/status.py b/export/securedrop_export/print/status.py similarity index 100% rename from securedrop_export/print/status.py rename to export/securedrop_export/print/status.py diff --git a/securedrop_export/status.py b/export/securedrop_export/status.py similarity index 100% rename from securedrop_export/status.py rename to export/securedrop_export/status.py diff --git a/setup.py b/export/setup.py similarity index 100% rename from setup.py rename to export/setup.py diff --git a/tests/__init__.py b/export/tests/__init__.py similarity index 100% rename from tests/__init__.py rename to export/tests/__init__.py diff --git a/tests/disk/__init__.py b/export/tests/disk/__init__.py similarity index 100% rename from tests/disk/__init__.py rename to export/tests/disk/__init__.py diff --git a/tests/disk/test_cli.py b/export/tests/disk/test_cli.py similarity index 100% rename from tests/disk/test_cli.py rename to export/tests/disk/test_cli.py diff --git a/tests/disk/test_service.py b/export/tests/disk/test_service.py similarity index 100% rename from tests/disk/test_service.py rename to export/tests/disk/test_service.py diff --git a/tests/disk/test_volume.py b/export/tests/disk/test_volume.py similarity index 100% rename from tests/disk/test_volume.py rename to export/tests/disk/test_volume.py diff --git a/tests/print/__init__.py b/export/tests/print/__init__.py similarity index 100% rename from tests/print/__init__.py rename to export/tests/print/__init__.py diff --git a/tests/print/test_service.py b/export/tests/print/test_service.py similarity index 100% rename from tests/print/test_service.py rename to export/tests/print/test_service.py diff --git a/tests/test_archive.py b/export/tests/test_archive.py similarity index 100% rename from tests/test_archive.py rename to export/tests/test_archive.py diff --git a/tests/test_directory.py b/export/tests/test_directory.py similarity index 100% rename from tests/test_directory.py rename to export/tests/test_directory.py diff --git a/tests/test_exceptions.py b/export/tests/test_exceptions.py similarity index 100% rename from tests/test_exceptions.py rename to export/tests/test_exceptions.py diff --git a/tests/test_main.py b/export/tests/test_main.py similarity index 100% rename from tests/test_main.py rename to export/tests/test_main.py diff --git a/update_version.sh b/export/update_version.sh similarity index 100% rename from update_version.sh rename to export/update_version.sh From 5a01a429204e56786a37f1cd5b140c10ea63df8a Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Mon, 11 Dec 2023 16:50:10 -0500 Subject: [PATCH 348/352] Move log files into log/ folder --- {.circleci => log/.circleci}/config.yml | 0 .flake8 => log/.flake8 | 0 .gitignore => log/.gitignore | 0 LICENSE => log/LICENSE | 0 MANIFEST.in => log/MANIFEST.in | 0 Makefile => log/Makefile | 0 README.md => log/README.md | 0 VERSION => log/VERSION | 0 build-requirements.txt => log/build-requirements.txt | 0 changelog.md => log/changelog.md | 0 ex1.py => log/ex1.py | 0 ex2.py => log/ex2.py | 0 example.py => log/example.py | 0 journal-example.py => log/journal-example.py | 0 poetry.lock => log/poetry.lock | 0 project.json => log/project.json | 0 pyproject.toml => log/pyproject.toml | 0 sd-rsyslog => log/sd-rsyslog | 0 sd-rsyslog-example.conf => log/sd-rsyslog-example.conf | 0 sdlog.conf => log/sdlog.conf | 0 securedrop-log => log/securedrop-log | 0 securedrop-log-saver => log/securedrop-log-saver | 0 securedrop-log.service => log/securedrop-log.service | 0 securedrop-redis-log => log/securedrop-redis-log | 0 securedrop.Log => log/securedrop.Log | 0 {securedrop_log => log/securedrop_log}/__init__.py | 0 setup.py => log/setup.py | 0 {tests => log/tests}/__init__.py | 0 {tests => log/tests}/test_logger.py | 0 update_version.sh => log/update_version.sh | 0 30 files changed, 0 insertions(+), 0 deletions(-) rename {.circleci => log/.circleci}/config.yml (100%) rename .flake8 => log/.flake8 (100%) rename .gitignore => log/.gitignore (100%) rename LICENSE => log/LICENSE (100%) rename MANIFEST.in => log/MANIFEST.in (100%) rename Makefile => log/Makefile (100%) rename README.md => log/README.md (100%) rename VERSION => log/VERSION (100%) rename build-requirements.txt => log/build-requirements.txt (100%) rename changelog.md => log/changelog.md (100%) rename ex1.py => log/ex1.py (100%) rename ex2.py => log/ex2.py (100%) rename example.py => log/example.py (100%) rename journal-example.py => log/journal-example.py (100%) rename poetry.lock => log/poetry.lock (100%) rename project.json => log/project.json (100%) rename pyproject.toml => log/pyproject.toml (100%) rename sd-rsyslog => log/sd-rsyslog (100%) rename sd-rsyslog-example.conf => log/sd-rsyslog-example.conf (100%) rename sdlog.conf => log/sdlog.conf (100%) rename securedrop-log => log/securedrop-log (100%) rename securedrop-log-saver => log/securedrop-log-saver (100%) rename securedrop-log.service => log/securedrop-log.service (100%) rename securedrop-redis-log => log/securedrop-redis-log (100%) rename securedrop.Log => log/securedrop.Log (100%) rename {securedrop_log => log/securedrop_log}/__init__.py (100%) rename setup.py => log/setup.py (100%) rename {tests => log/tests}/__init__.py (100%) rename {tests => log/tests}/test_logger.py (100%) rename update_version.sh => log/update_version.sh (100%) diff --git a/.circleci/config.yml b/log/.circleci/config.yml similarity index 100% rename from .circleci/config.yml rename to log/.circleci/config.yml diff --git a/.flake8 b/log/.flake8 similarity index 100% rename from .flake8 rename to log/.flake8 diff --git a/.gitignore b/log/.gitignore similarity index 100% rename from .gitignore rename to log/.gitignore diff --git a/LICENSE b/log/LICENSE similarity index 100% rename from LICENSE rename to log/LICENSE diff --git a/MANIFEST.in b/log/MANIFEST.in similarity index 100% rename from MANIFEST.in rename to log/MANIFEST.in diff --git a/Makefile b/log/Makefile similarity index 100% rename from Makefile rename to log/Makefile diff --git a/README.md b/log/README.md similarity index 100% rename from README.md rename to log/README.md diff --git a/VERSION b/log/VERSION similarity index 100% rename from VERSION rename to log/VERSION diff --git a/build-requirements.txt b/log/build-requirements.txt similarity index 100% rename from build-requirements.txt rename to log/build-requirements.txt diff --git a/changelog.md b/log/changelog.md similarity index 100% rename from changelog.md rename to log/changelog.md diff --git a/ex1.py b/log/ex1.py similarity index 100% rename from ex1.py rename to log/ex1.py diff --git a/ex2.py b/log/ex2.py similarity index 100% rename from ex2.py rename to log/ex2.py diff --git a/example.py b/log/example.py similarity index 100% rename from example.py rename to log/example.py diff --git a/journal-example.py b/log/journal-example.py similarity index 100% rename from journal-example.py rename to log/journal-example.py diff --git a/poetry.lock b/log/poetry.lock similarity index 100% rename from poetry.lock rename to log/poetry.lock diff --git a/project.json b/log/project.json similarity index 100% rename from project.json rename to log/project.json diff --git a/pyproject.toml b/log/pyproject.toml similarity index 100% rename from pyproject.toml rename to log/pyproject.toml diff --git a/sd-rsyslog b/log/sd-rsyslog similarity index 100% rename from sd-rsyslog rename to log/sd-rsyslog diff --git a/sd-rsyslog-example.conf b/log/sd-rsyslog-example.conf similarity index 100% rename from sd-rsyslog-example.conf rename to log/sd-rsyslog-example.conf diff --git a/sdlog.conf b/log/sdlog.conf similarity index 100% rename from sdlog.conf rename to log/sdlog.conf diff --git a/securedrop-log b/log/securedrop-log similarity index 100% rename from securedrop-log rename to log/securedrop-log diff --git a/securedrop-log-saver b/log/securedrop-log-saver similarity index 100% rename from securedrop-log-saver rename to log/securedrop-log-saver diff --git a/securedrop-log.service b/log/securedrop-log.service similarity index 100% rename from securedrop-log.service rename to log/securedrop-log.service diff --git a/securedrop-redis-log b/log/securedrop-redis-log similarity index 100% rename from securedrop-redis-log rename to log/securedrop-redis-log diff --git a/securedrop.Log b/log/securedrop.Log similarity index 100% rename from securedrop.Log rename to log/securedrop.Log diff --git a/securedrop_log/__init__.py b/log/securedrop_log/__init__.py similarity index 100% rename from securedrop_log/__init__.py rename to log/securedrop_log/__init__.py diff --git a/setup.py b/log/setup.py similarity index 100% rename from setup.py rename to log/setup.py diff --git a/tests/__init__.py b/log/tests/__init__.py similarity index 100% rename from tests/__init__.py rename to log/tests/__init__.py diff --git a/tests/test_logger.py b/log/tests/test_logger.py similarity index 100% rename from tests/test_logger.py rename to log/tests/test_logger.py diff --git a/update_version.sh b/log/update_version.sh similarity index 100% rename from update_version.sh rename to log/update_version.sh From 1666d1e67a68ff9cb67ae12cb6e586d58de26d80 Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Mon, 11 Dec 2023 17:03:35 -0500 Subject: [PATCH 349/352] Move proxy files into proxy/ folder --- {.circleci => proxy/.circleci}/config.yml | 0 .git-blame-ignore-revs => proxy/.git-blame-ignore-revs | 0 .gitignore => proxy/.gitignore | 0 LICENSE => proxy/LICENSE | 0 MANIFEST.in => proxy/MANIFEST.in | 0 Makefile => proxy/Makefile | 0 README.md => proxy/README.md | 0 build-requirements.txt => proxy/build-requirements.txt | 0 changelog.md => proxy/changelog.md | 0 config-example.yaml => proxy/config-example.yaml | 0 entrypoint.sh => proxy/entrypoint.sh | 0 {examples => proxy/examples}/bad.json | 0 {examples => proxy/examples}/html.json | 0 {examples => proxy/examples}/posts.json | 0 {fixtures => proxy/fixtures}/basic_proxy_functionality.yaml | 0 {fixtures => proxy/fixtures}/main_error_response.yaml | 0 {fixtures => proxy/fixtures}/main_input_body.yaml | 0 {fixtures => proxy/fixtures}/main_input_headers.yaml | 0 {fixtures => proxy/fixtures}/main_json_response.yaml | 0 {fixtures => proxy/fixtures}/main_json_response_with_timeout.yaml | 0 {fixtures => proxy/fixtures}/main_non_json_response.yaml | 0 {fixtures => proxy/fixtures}/proxy_200_valid_path.yaml | 0 {fixtures => proxy/fixtures}/proxy_404.yaml | 0 {fixtures => proxy/fixtures}/proxy_bad_request.yaml | 0 {fixtures => proxy/fixtures}/proxy_callbacks.yaml | 0 {fixtures => proxy/fixtures}/proxy_cannot_connect.yaml | 0 {fixtures => proxy/fixtures}/proxy_internal_error.yaml | 0 {fixtures => proxy/fixtures}/proxy_internal_server_error.yaml | 0 {fixtures => proxy/fixtures}/proxy_parameters.yaml | 0 {fixtures => proxy/fixtures}/proxy_unofficial_status.yaml | 0 poetry.lock => proxy/poetry.lock | 0 pyproject.toml => proxy/pyproject.toml | 0 {qubes => proxy/qubes}/securedrop.Proxy | 0 {securedrop_proxy => proxy/securedrop_proxy}/VERSION | 0 {securedrop_proxy => proxy/securedrop_proxy}/__init__.py | 0 {securedrop_proxy => proxy/securedrop_proxy}/entrypoint.py | 0 {securedrop_proxy => proxy/securedrop_proxy}/json.py | 0 {securedrop_proxy => proxy/securedrop_proxy}/main.py | 0 {securedrop_proxy => proxy/securedrop_proxy}/proxy.py | 0 {securedrop_proxy => proxy/securedrop_proxy}/version.py | 0 setup.cfg => proxy/setup.cfg | 0 setup.py => proxy/setup.py | 0 {tests => proxy/tests}/__init__.py | 0 {tests => proxy/tests}/files/badgateway-config.yaml | 0 {tests => proxy/tests}/files/dev-config.yaml | 0 {tests => proxy/tests}/files/invalid-config.yaml | 0 {tests => proxy/tests}/files/invalid_yaml.yaml | 0 {tests => proxy/tests}/files/local-config.yaml | 0 {tests => proxy/tests}/files/missing-key.yaml | 0 {tests => proxy/tests}/files/missing-target-vm.yaml | 0 {tests => proxy/tests}/files/valid-config.yaml | 0 {tests => proxy/tests}/test_entrypoint.py | 0 {tests => proxy/tests}/test_json.py | 0 {tests => proxy/tests}/test_main.py | 0 {tests => proxy/tests}/test_proxy.py | 0 update_version.sh => proxy/update_version.sh | 0 56 files changed, 0 insertions(+), 0 deletions(-) rename {.circleci => proxy/.circleci}/config.yml (100%) rename .git-blame-ignore-revs => proxy/.git-blame-ignore-revs (100%) rename .gitignore => proxy/.gitignore (100%) rename LICENSE => proxy/LICENSE (100%) rename MANIFEST.in => proxy/MANIFEST.in (100%) rename Makefile => proxy/Makefile (100%) rename README.md => proxy/README.md (100%) rename build-requirements.txt => proxy/build-requirements.txt (100%) rename changelog.md => proxy/changelog.md (100%) rename config-example.yaml => proxy/config-example.yaml (100%) rename entrypoint.sh => proxy/entrypoint.sh (100%) rename {examples => proxy/examples}/bad.json (100%) rename {examples => proxy/examples}/html.json (100%) rename {examples => proxy/examples}/posts.json (100%) rename {fixtures => proxy/fixtures}/basic_proxy_functionality.yaml (100%) rename {fixtures => proxy/fixtures}/main_error_response.yaml (100%) rename {fixtures => proxy/fixtures}/main_input_body.yaml (100%) rename {fixtures => proxy/fixtures}/main_input_headers.yaml (100%) rename {fixtures => proxy/fixtures}/main_json_response.yaml (100%) rename {fixtures => proxy/fixtures}/main_json_response_with_timeout.yaml (100%) rename {fixtures => proxy/fixtures}/main_non_json_response.yaml (100%) rename {fixtures => proxy/fixtures}/proxy_200_valid_path.yaml (100%) rename {fixtures => proxy/fixtures}/proxy_404.yaml (100%) rename {fixtures => proxy/fixtures}/proxy_bad_request.yaml (100%) rename {fixtures => proxy/fixtures}/proxy_callbacks.yaml (100%) rename {fixtures => proxy/fixtures}/proxy_cannot_connect.yaml (100%) rename {fixtures => proxy/fixtures}/proxy_internal_error.yaml (100%) rename {fixtures => proxy/fixtures}/proxy_internal_server_error.yaml (100%) rename {fixtures => proxy/fixtures}/proxy_parameters.yaml (100%) rename {fixtures => proxy/fixtures}/proxy_unofficial_status.yaml (100%) rename poetry.lock => proxy/poetry.lock (100%) rename pyproject.toml => proxy/pyproject.toml (100%) rename {qubes => proxy/qubes}/securedrop.Proxy (100%) rename {securedrop_proxy => proxy/securedrop_proxy}/VERSION (100%) rename {securedrop_proxy => proxy/securedrop_proxy}/__init__.py (100%) rename {securedrop_proxy => proxy/securedrop_proxy}/entrypoint.py (100%) rename {securedrop_proxy => proxy/securedrop_proxy}/json.py (100%) rename {securedrop_proxy => proxy/securedrop_proxy}/main.py (100%) rename {securedrop_proxy => proxy/securedrop_proxy}/proxy.py (100%) rename {securedrop_proxy => proxy/securedrop_proxy}/version.py (100%) rename setup.cfg => proxy/setup.cfg (100%) rename setup.py => proxy/setup.py (100%) rename {tests => proxy/tests}/__init__.py (100%) rename {tests => proxy/tests}/files/badgateway-config.yaml (100%) rename {tests => proxy/tests}/files/dev-config.yaml (100%) rename {tests => proxy/tests}/files/invalid-config.yaml (100%) rename {tests => proxy/tests}/files/invalid_yaml.yaml (100%) rename {tests => proxy/tests}/files/local-config.yaml (100%) rename {tests => proxy/tests}/files/missing-key.yaml (100%) rename {tests => proxy/tests}/files/missing-target-vm.yaml (100%) rename {tests => proxy/tests}/files/valid-config.yaml (100%) rename {tests => proxy/tests}/test_entrypoint.py (100%) rename {tests => proxy/tests}/test_json.py (100%) rename {tests => proxy/tests}/test_main.py (100%) rename {tests => proxy/tests}/test_proxy.py (100%) rename update_version.sh => proxy/update_version.sh (100%) diff --git a/.circleci/config.yml b/proxy/.circleci/config.yml similarity index 100% rename from .circleci/config.yml rename to proxy/.circleci/config.yml diff --git a/.git-blame-ignore-revs b/proxy/.git-blame-ignore-revs similarity index 100% rename from .git-blame-ignore-revs rename to proxy/.git-blame-ignore-revs diff --git a/.gitignore b/proxy/.gitignore similarity index 100% rename from .gitignore rename to proxy/.gitignore diff --git a/LICENSE b/proxy/LICENSE similarity index 100% rename from LICENSE rename to proxy/LICENSE diff --git a/MANIFEST.in b/proxy/MANIFEST.in similarity index 100% rename from MANIFEST.in rename to proxy/MANIFEST.in diff --git a/Makefile b/proxy/Makefile similarity index 100% rename from Makefile rename to proxy/Makefile diff --git a/README.md b/proxy/README.md similarity index 100% rename from README.md rename to proxy/README.md diff --git a/build-requirements.txt b/proxy/build-requirements.txt similarity index 100% rename from build-requirements.txt rename to proxy/build-requirements.txt diff --git a/changelog.md b/proxy/changelog.md similarity index 100% rename from changelog.md rename to proxy/changelog.md diff --git a/config-example.yaml b/proxy/config-example.yaml similarity index 100% rename from config-example.yaml rename to proxy/config-example.yaml diff --git a/entrypoint.sh b/proxy/entrypoint.sh similarity index 100% rename from entrypoint.sh rename to proxy/entrypoint.sh diff --git a/examples/bad.json b/proxy/examples/bad.json similarity index 100% rename from examples/bad.json rename to proxy/examples/bad.json diff --git a/examples/html.json b/proxy/examples/html.json similarity index 100% rename from examples/html.json rename to proxy/examples/html.json diff --git a/examples/posts.json b/proxy/examples/posts.json similarity index 100% rename from examples/posts.json rename to proxy/examples/posts.json diff --git a/fixtures/basic_proxy_functionality.yaml b/proxy/fixtures/basic_proxy_functionality.yaml similarity index 100% rename from fixtures/basic_proxy_functionality.yaml rename to proxy/fixtures/basic_proxy_functionality.yaml diff --git a/fixtures/main_error_response.yaml b/proxy/fixtures/main_error_response.yaml similarity index 100% rename from fixtures/main_error_response.yaml rename to proxy/fixtures/main_error_response.yaml diff --git a/fixtures/main_input_body.yaml b/proxy/fixtures/main_input_body.yaml similarity index 100% rename from fixtures/main_input_body.yaml rename to proxy/fixtures/main_input_body.yaml diff --git a/fixtures/main_input_headers.yaml b/proxy/fixtures/main_input_headers.yaml similarity index 100% rename from fixtures/main_input_headers.yaml rename to proxy/fixtures/main_input_headers.yaml diff --git a/fixtures/main_json_response.yaml b/proxy/fixtures/main_json_response.yaml similarity index 100% rename from fixtures/main_json_response.yaml rename to proxy/fixtures/main_json_response.yaml diff --git a/fixtures/main_json_response_with_timeout.yaml b/proxy/fixtures/main_json_response_with_timeout.yaml similarity index 100% rename from fixtures/main_json_response_with_timeout.yaml rename to proxy/fixtures/main_json_response_with_timeout.yaml diff --git a/fixtures/main_non_json_response.yaml b/proxy/fixtures/main_non_json_response.yaml similarity index 100% rename from fixtures/main_non_json_response.yaml rename to proxy/fixtures/main_non_json_response.yaml diff --git a/fixtures/proxy_200_valid_path.yaml b/proxy/fixtures/proxy_200_valid_path.yaml similarity index 100% rename from fixtures/proxy_200_valid_path.yaml rename to proxy/fixtures/proxy_200_valid_path.yaml diff --git a/fixtures/proxy_404.yaml b/proxy/fixtures/proxy_404.yaml similarity index 100% rename from fixtures/proxy_404.yaml rename to proxy/fixtures/proxy_404.yaml diff --git a/fixtures/proxy_bad_request.yaml b/proxy/fixtures/proxy_bad_request.yaml similarity index 100% rename from fixtures/proxy_bad_request.yaml rename to proxy/fixtures/proxy_bad_request.yaml diff --git a/fixtures/proxy_callbacks.yaml b/proxy/fixtures/proxy_callbacks.yaml similarity index 100% rename from fixtures/proxy_callbacks.yaml rename to proxy/fixtures/proxy_callbacks.yaml diff --git a/fixtures/proxy_cannot_connect.yaml b/proxy/fixtures/proxy_cannot_connect.yaml similarity index 100% rename from fixtures/proxy_cannot_connect.yaml rename to proxy/fixtures/proxy_cannot_connect.yaml diff --git a/fixtures/proxy_internal_error.yaml b/proxy/fixtures/proxy_internal_error.yaml similarity index 100% rename from fixtures/proxy_internal_error.yaml rename to proxy/fixtures/proxy_internal_error.yaml diff --git a/fixtures/proxy_internal_server_error.yaml b/proxy/fixtures/proxy_internal_server_error.yaml similarity index 100% rename from fixtures/proxy_internal_server_error.yaml rename to proxy/fixtures/proxy_internal_server_error.yaml diff --git a/fixtures/proxy_parameters.yaml b/proxy/fixtures/proxy_parameters.yaml similarity index 100% rename from fixtures/proxy_parameters.yaml rename to proxy/fixtures/proxy_parameters.yaml diff --git a/fixtures/proxy_unofficial_status.yaml b/proxy/fixtures/proxy_unofficial_status.yaml similarity index 100% rename from fixtures/proxy_unofficial_status.yaml rename to proxy/fixtures/proxy_unofficial_status.yaml diff --git a/poetry.lock b/proxy/poetry.lock similarity index 100% rename from poetry.lock rename to proxy/poetry.lock diff --git a/pyproject.toml b/proxy/pyproject.toml similarity index 100% rename from pyproject.toml rename to proxy/pyproject.toml diff --git a/qubes/securedrop.Proxy b/proxy/qubes/securedrop.Proxy similarity index 100% rename from qubes/securedrop.Proxy rename to proxy/qubes/securedrop.Proxy diff --git a/securedrop_proxy/VERSION b/proxy/securedrop_proxy/VERSION similarity index 100% rename from securedrop_proxy/VERSION rename to proxy/securedrop_proxy/VERSION diff --git a/securedrop_proxy/__init__.py b/proxy/securedrop_proxy/__init__.py similarity index 100% rename from securedrop_proxy/__init__.py rename to proxy/securedrop_proxy/__init__.py diff --git a/securedrop_proxy/entrypoint.py b/proxy/securedrop_proxy/entrypoint.py similarity index 100% rename from securedrop_proxy/entrypoint.py rename to proxy/securedrop_proxy/entrypoint.py diff --git a/securedrop_proxy/json.py b/proxy/securedrop_proxy/json.py similarity index 100% rename from securedrop_proxy/json.py rename to proxy/securedrop_proxy/json.py diff --git a/securedrop_proxy/main.py b/proxy/securedrop_proxy/main.py similarity index 100% rename from securedrop_proxy/main.py rename to proxy/securedrop_proxy/main.py diff --git a/securedrop_proxy/proxy.py b/proxy/securedrop_proxy/proxy.py similarity index 100% rename from securedrop_proxy/proxy.py rename to proxy/securedrop_proxy/proxy.py diff --git a/securedrop_proxy/version.py b/proxy/securedrop_proxy/version.py similarity index 100% rename from securedrop_proxy/version.py rename to proxy/securedrop_proxy/version.py diff --git a/setup.cfg b/proxy/setup.cfg similarity index 100% rename from setup.cfg rename to proxy/setup.cfg diff --git a/setup.py b/proxy/setup.py similarity index 100% rename from setup.py rename to proxy/setup.py diff --git a/tests/__init__.py b/proxy/tests/__init__.py similarity index 100% rename from tests/__init__.py rename to proxy/tests/__init__.py diff --git a/tests/files/badgateway-config.yaml b/proxy/tests/files/badgateway-config.yaml similarity index 100% rename from tests/files/badgateway-config.yaml rename to proxy/tests/files/badgateway-config.yaml diff --git a/tests/files/dev-config.yaml b/proxy/tests/files/dev-config.yaml similarity index 100% rename from tests/files/dev-config.yaml rename to proxy/tests/files/dev-config.yaml diff --git a/tests/files/invalid-config.yaml b/proxy/tests/files/invalid-config.yaml similarity index 100% rename from tests/files/invalid-config.yaml rename to proxy/tests/files/invalid-config.yaml diff --git a/tests/files/invalid_yaml.yaml b/proxy/tests/files/invalid_yaml.yaml similarity index 100% rename from tests/files/invalid_yaml.yaml rename to proxy/tests/files/invalid_yaml.yaml diff --git a/tests/files/local-config.yaml b/proxy/tests/files/local-config.yaml similarity index 100% rename from tests/files/local-config.yaml rename to proxy/tests/files/local-config.yaml diff --git a/tests/files/missing-key.yaml b/proxy/tests/files/missing-key.yaml similarity index 100% rename from tests/files/missing-key.yaml rename to proxy/tests/files/missing-key.yaml diff --git a/tests/files/missing-target-vm.yaml b/proxy/tests/files/missing-target-vm.yaml similarity index 100% rename from tests/files/missing-target-vm.yaml rename to proxy/tests/files/missing-target-vm.yaml diff --git a/tests/files/valid-config.yaml b/proxy/tests/files/valid-config.yaml similarity index 100% rename from tests/files/valid-config.yaml rename to proxy/tests/files/valid-config.yaml diff --git a/tests/test_entrypoint.py b/proxy/tests/test_entrypoint.py similarity index 100% rename from tests/test_entrypoint.py rename to proxy/tests/test_entrypoint.py diff --git a/tests/test_json.py b/proxy/tests/test_json.py similarity index 100% rename from tests/test_json.py rename to proxy/tests/test_json.py diff --git a/tests/test_main.py b/proxy/tests/test_main.py similarity index 100% rename from tests/test_main.py rename to proxy/tests/test_main.py diff --git a/tests/test_proxy.py b/proxy/tests/test_proxy.py similarity index 100% rename from tests/test_proxy.py rename to proxy/tests/test_proxy.py diff --git a/update_version.sh b/proxy/update_version.sh similarity index 100% rename from update_version.sh rename to proxy/update_version.sh From aed4155a266dc8a1fc5bf1f6f5b27231e842adb3 Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Mon, 11 Dec 2023 17:29:21 -0500 Subject: [PATCH 350/352] Merge CircleCI manifests into one This is a very naive complete merge by prefixing anchors and job names with component names. De-duplication and consolidation will happen in future commits. --- .circleci/config.yml | 679 ++++++++++++++++++++++++++++++++++++ client/.circleci/config.yml | 247 ------------- export/.circleci/config.yml | 171 --------- log/.circleci/config.yml | 90 ----- proxy/.circleci/config.yml | 179 ---------- 5 files changed, 679 insertions(+), 687 deletions(-) create mode 100644 .circleci/config.yml delete mode 100644 client/.circleci/config.yml delete mode 100644 export/.circleci/config.yml delete mode 100644 log/.circleci/config.yml delete mode 100644 proxy/.circleci/config.yml diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 000000000..b9f6468d4 --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,679 @@ +--- +common-steps: + - &client_install_poetry + run: + name: Install Poetry + command: | + set -e + source /etc/os-release + if [[ "$VERSION_CODENAME" == "bullseye" ]]; then + # Install Poetry via PyPI + apt-get update && apt-get install --yes --no-install-recommends python3-pip + pip install poetry==1.6.1 + elif [[ "$VERSION_CODENAME" == "bookworm" ]]; then + # Install Poetry via system package + apt-get update && apt-get install --yes --no-install-recommends python3-poetry + else + echo "Unsupported Debian version: $VERSION_CODENAME" + exit 1 + fi + + - &client_install_testing_dependencies + run: + name: Install testing dependencies + command: | + set -e + apt update && apt install -y git gnupg libarchive13 libmagic1 libqt5x11extras5 make python3-tk python3-dev gnupg python3-venv sqlite3 xvfb + cd client + poetry install --no-ansi + + - &client_configure_locales + run: + name: Configure locales + command: | + set -e + apt update && apt install -y locales + echo "en_US ISO-8859-1" >> /etc/locale.gen + echo "en_US UTF-8" >> /etc/locale.gen + locale-gen + + - &client_install_build_dependencies + run: + name: Install build dependencies + command: | + set -e + apt update && apt install -y git make sudo + + - &client_run_unit_tests + run: + name: Install requirements and run unit tests + command: | + set -e + cd client + export PYTHONPATH=$PYTHONPATH:. # so alembic can get to Base metadata + make test-random + + - &client_run_integration_tests + run: + name: Install requirements and run integration tests + command: | + set -e + cd client + export PYTHONPATH=$PYTHONPATH:. # so alembic can get to Base metadata + make test-integration + + - &client_run_functional_tests + run: + name: Install requirements and run functional tests + command: | + set -e + cd client + export PYTHONPATH=$PYTHONPATH:. # so alembic can get to Base metadata + make test-functional + + - &client_run_lint + run: + name: Run lint, type checking, code formatting + command: | + set -e + make -C client check-black check-isort lint mypy + + - &client_check_security + run: + name: Run static analysis on source code to find security issues + command: | + set -e + make -C client semgrep bandit + + - &client_check_source_strings + run: + name: Check that source strings are updated + command: | + set -e + make -C client check-strings + + - &client_check_mo_repro + run: + name: Check that translation machine objects are reproducible + command: | + set -e + make -C client verify-mo + + - &client_check_python_dependencies_for_vulnerabilities + run: + name: Check Python dependencies for known vulnerabilities + command: | + set -e + make -C client safety + + - &client_install_packaging_dependencies + run: + name: Install Debian packaging dependencies and download Python wheels + command: | + set -x + mkdir ~/packaging && cd ~/packaging + # local builds may not have an ssh url, so || true + git config --global --unset url.ssh://git@github.com.insteadof || true + git clone https://github.com/freedomofpress/securedrop-builder.git + cd securedrop-builder + apt-get update && apt-get install -y sudo make + make install-deps + source .venv/bin/activate + PKG_DIR=~/project/client make requirements + + - &client_check_packaging_requirements + run: + name: Ensure that the same Python requirements are used for packaging and production. + command: | + cd ~/project/client + # Fail if unstaged changes exist that are not comments (after `make requirements` in the previous run step). + git diff --ignore-matching-lines=# --exit-code + + - &client_build_debian_package + run: + name: Build debian package + command: | + cd ~/project/client + ./update_version.sh 1000.0 # Dummy version number, doesn't matter what we put here + cd ~/packaging/securedrop-builder + export PKG_VERSION=1000.0 + export PKG_PATH=~/project/client + source .venv/bin/activate + make securedrop-client + + - &export_install_poetry + run: + name: Install Poetry + command: | + set -e + source /etc/os-release + if [[ "$VERSION_CODENAME" == "bullseye" ]]; then + # Install Poetry via PyPI + apt-get update && apt-get install --yes --no-install-recommends python3-pip + pip install poetry==1.6.1 + elif [[ "$VERSION_CODENAME" == "bookworm" ]]; then + # Install Poetry via system package + apt-get update && apt-get install --yes --no-install-recommends python3-poetry + else + echo "Unsupported Debian version: $VERSION_CODENAME" + exit 1 + fi + + - &export_install_testing_dependencies + run: + name: Install testing dependencies + command: | + apt update && apt install -y git gnupg make python3-dev gnupg python3-venv libnotify-bin + cd export + poetry install --no-ansi + + - &export_install_build_dependencies + run: + name: Install build dependencies + command: | + apt update && apt install -y git make sudo + + - &export_run_unit_tests + run: + name: Install requirements and run unit tests + command: | + cd export + export PYTHONPATH=$PYTHONPATH:. # so alembic can get to Base metadata + make test + + - &export_run_lint + run: + name: Run lint, type checking, code formatting + command: | + make -C export check-black lint + + - &export_check_security + run: + name: Run static analysis on source code to find security issues + command: | + make -C export semgrep + + - &export_check_python_dependencies_for_vulnerabilities + run: + name: Check Python dependencies for known vulnerabilities + command: | + make -C export safety + + - &export_install_packaging_dependencies + run: + name: Install Debian packaging dependencies and download Python wheels + command: | + set -x + mkdir ~/packaging && cd ~/packaging + # local builds may not have an ssh url, so || true + git config --global --unset url.ssh://git@github.com.insteadof || true + git clone https://github.com/freedomofpress/securedrop-builder.git + cd securedrop-builder + make install-deps + source .venv/bin/activate + PKG_DIR=~/project/export make requirements + + - &export_check_packaging_requirements + run: + name: Ensure that the same Python requirements are used for packaging and production. + command: | + cd ~/project/export + # Fail if unstaged changes exist (after `make requirements` in the previous run step). + git diff --ignore-matching-lines=# --exit-code + + - &export_build_debian_package + run: + name: Build debian package + command: | + cd ~/packaging/securedrop-builder + export PKG_VERSION=1000.0 + export PKG_PATH=~/project/export + source .venv/bin/activate + make securedrop-export + + - &log_install_poetry + run: + name: Install Poetry + command: | + set -e + source /etc/os-release + if [[ "$VERSION_CODENAME" == "bullseye" ]]; then + # Install Poetry via PyPI + apt-get update && apt-get install --yes --no-install-recommends python3-pip + pip install poetry==1.6.1 + elif [[ "$VERSION_CODENAME" == "bookworm" ]]; then + # Install Poetry via system package + apt-get update && apt-get install --yes --no-install-recommends python3-poetry + else + echo "Unsupported Debian version: $VERSION_CODENAME" + exit 1 + fi + + - &log_install_testing_dependencies + run: + name: Install testing dependencies + command: | + apt-get install --yes --no-install-recommends git gnupg make + cd log + poetry install --no-ansi + + - &log_run_tests + run: + name: Install requirements and run tests + command: | + make -C log check + + - &log_install_packaging_dependencies + run: + name: Install Debian packaging dependencies and download wheels + command: | + apt-get update && apt-get install -y git git-lfs make sudo + mkdir ~/packaging && cd ~/packaging + git clone https://github.com/freedomofpress/securedrop-builder.git + cd securedrop-builder + make install-deps + source .venv/bin/activate + PKG_DIR=~/project/log make requirements + + - &log_verify_requirements + run: + name: Ensure that build-requirements.txt and requirements.txt are in sync. + command: | + cd ~/project/log + # Return 1 if unstaged changes exist (after `make requirements` in the + # previous run step), else return 0. + git diff --quiet + + - &log_build_debian_package + run: + name: Build debian package + command: | + cd ~/packaging/securedrop-builder + export PKG_VERSION=1000.0 + export PKG_PATH=~/project/log + source .venv/bin/activate + make securedrop-log + + - &proxy_install_poetry + run: + name: Install Poetry + command: | + set -e + source /etc/os-release + if [[ "$VERSION_CODENAME" == "bullseye" ]]; then + # Install Poetry via PyPI + apt-get update && apt-get install --yes --no-install-recommends python3-pip + pip install poetry==1.6.1 + elif [[ "$VERSION_CODENAME" == "bookworm" ]]; then + # Install Poetry via system package + apt-get update && apt-get install --yes --no-install-recommends python3-poetry + else + echo "Unsupported Debian version: $VERSION_CODENAME" + exit 1 + fi + + - &proxy_install_testing_dependencies + run: + name: Install testing dependencies + command: | + apt-get install --yes --no-install-recommends git gnupg make + cd proxy + poetry install --no-ansi + + - &proxy_install_build_dependencies + run: + name: Install build dependencies + command: | + set -e + apt-get update && apt-get install --yes git make sudo + + - &proxy_run_unit_tests + run: + name: Install requirements and run unit tests + command: | + cd proxy + export PYTHONPATH=$PYTHONPATH:. # so alembic can get to Base metadata + make test + + - &proxy_run_lint + run: + name: Run lint, type checking, code formatting + command: | + make -C proxy lint + + - &proxy_check_security + run: + name: Run static analysis on source code to find security issues + command: | + set -e + cd proxy + poetry update bandit + make bandit + + - &proxy_check_python_dependencies_for_vulnerabilities + run: + name: Check Python dependencies for known vulnerabilities + command: | + set -e + cd proxy + poetry update safety + make safety + + - &proxy_install_packaging_dependencies + run: + name: Install Debian packaging dependencies and download Python wheels + command: | + set -x + mkdir ~/packaging && cd ~/packaging + # local builds may not have an ssh url, so || true + git config --global --unset url.ssh://git@github.com.insteadof || true + git clone https://github.com/freedomofpress/securedrop-builder.git + cd securedrop-builder + apt-get update && apt-get install -y sudo make + make install-deps + source .venv/bin/activate + PKG_DIR=~/project/proxy make requirements + + - &proxy_verify_requirements + run: + name: Ensure that build-requirements.txt and requirements.txt are in sync. + command: | + cd ~/project/proxy + # Return 1 if unstaged changes exist (after `make requirements` in the + # previous run step), else return 0. + git diff --quiet + + - &proxy_build_debian_package + run: + name: Build debian package + command: | + cd ~/packaging/securedrop-builder + export PKG_VERSION=1000.0 + export PKG_PATH=~/project/proxy + source .venv/bin/activate + make securedrop-proxy + + +version: 2.1 + +jobs: + client_build: + parameters: ¶meters + image: + type: string + docker: &docker + - image: debian:<< parameters.image >> + steps: + - *client_install_build_dependencies + - checkout + - *client_install_packaging_dependencies + - *client_check_packaging_requirements + - *client_build_debian_package + + client_unit-test: + parameters: *parameters + docker: *docker + steps: + - *client_install_poetry + - checkout + - *client_install_testing_dependencies + - *client_configure_locales + - *client_run_unit_tests + - store_test_results: + path: test-results + + client_integration-test: + parameters: *parameters + docker: *docker + steps: + - *client_install_poetry + - checkout + - *client_install_testing_dependencies + - *client_run_integration_tests + + client_functional-test: + parameters: *parameters + docker: *docker + steps: + - *client_install_poetry + - checkout + - *client_install_testing_dependencies + - *client_run_functional_tests + + client_lint: + parameters: *parameters + docker: *docker + steps: + - *client_install_poetry + - checkout + - *client_install_testing_dependencies + - *client_run_lint + + client_check-security: + parameters: *parameters + docker: *docker + steps: + - *client_install_poetry + - checkout + - *client_install_testing_dependencies + - *client_check_security + + client_check-python-security: + parameters: *parameters + docker: *docker + steps: + - *client_install_poetry + - checkout + - *client_install_testing_dependencies + - *client_check_python_dependencies_for_vulnerabilities + + + client_check-internationalization: + parameters: *parameters + docker: *docker + steps: + - *client_install_poetry + - checkout + - *client_install_testing_dependencies + - *client_check_source_strings + - *client_check_mo_repro + + export_build: + parameters: *parameters + docker: *docker + steps: + - *export_install_build_dependencies + - checkout + - *export_install_packaging_dependencies + - *export_check_packaging_requirements + - *export_build_debian_package + + export_unit-test: + parameters: *parameters + docker: *docker + steps: + - *export_install_poetry + - checkout + - *export_install_testing_dependencies + - *export_run_unit_tests + - store_test_results: + path: test-results + + export_lint: + parameters: *parameters + docker: *docker + steps: + - *export_install_poetry + - checkout + - *export_install_testing_dependencies + - *export_run_lint + + export_check-security: + parameters: *parameters + docker: *docker + steps: + - *export_install_poetry + - checkout + - *export_install_testing_dependencies + - *export_check_security + + export_check-python-security: + parameters: *parameters + docker: *docker + steps: + - *export_install_poetry + - checkout + - *export_install_testing_dependencies + - *export_check_python_dependencies_for_vulnerabilities + + log_test-bullseye: + docker: + - image: debian:bullseye + steps: + - checkout + - *log_install_poetry + - *log_install_testing_dependencies + - *log_run_tests + + log_build-bullseye: + docker: + - image: debian:bullseye + steps: + - checkout + - *log_install_packaging_dependencies + - *log_verify_requirements + - *log_build_debian_package + + proxy_build: + parameters: *parameters + docker: *docker + steps: + - checkout + - *proxy_install_build_dependencies + - *proxy_install_packaging_dependencies + - *proxy_verify_requirements + - *proxy_build_debian_package + + proxy_unit-test: + parameters: *parameters + docker: *docker + steps: + - checkout + - *proxy_install_poetry + - *proxy_install_testing_dependencies + - *proxy_run_unit_tests + - store_test_results: + path: test-results + + proxy_lint: + parameters: *parameters + docker: *docker + steps: + - checkout + - *proxy_install_poetry + - *proxy_install_testing_dependencies + - *proxy_run_lint + + proxy_check-security: + parameters: *parameters + docker: *docker + steps: + - checkout + - *proxy_install_poetry + - *proxy_install_testing_dependencies + - *proxy_check_security + + proxy_check-python-security: + parameters: *parameters + docker: *docker + steps: + - checkout + - *proxy_install_poetry + - *proxy_install_testing_dependencies + - *proxy_check_python_dependencies_for_vulnerabilities + + +workflows: + securedrop_client_ci: + jobs: &client_jobs + - client_unit-test: + matrix: &matrix + parameters: + image: + - bullseye + - bookworm + - client_integration-test: + matrix: *matrix + - client_functional-test: + matrix: *matrix + - client_lint: + matrix: *matrix + - client_check-security: + matrix: *matrix + - client_check-python-security: + matrix: *matrix + - client_check-internationalization: + matrix: *matrix + - client_build: + matrix: *matrix + + securedrop_export_ci: + jobs: &export_jobs + - export_unit-test: + matrix: *matrix + - export_lint: + matrix: *matrix + - export_check-security: + matrix: *matrix + - export_check-python-security: + matrix: *matrix + - export_build: + matrix: *matrix + + securedrop_log_ci: + jobs: + - log_test-bullseye + - log_build-bullseye + + securedrop_proxy_ci: + jobs: &proxy_jobs + - proxy_unit-test: + matrix: *matrix + - proxy_lint: + matrix: *matrix + - proxy_check-security: + matrix: *matrix + - proxy_check-python-security: + matrix: *matrix + - proxy_build: + matrix: *matrix + + client_nightly: + triggers: + - schedule: + cron: "0 6 * * *" + filters: + branches: + only: + - main + jobs: *client_jobs + + export_nightly: + triggers: + - schedule: + cron: "0 6 * * *" + filters: + branches: + only: + - main + jobs: *export_jobs + + proxy_nightly: + triggers: + - schedule: + cron: "0 6 * * *" + filters: + branches: + only: + - main + jobs: *proxy_jobs diff --git a/client/.circleci/config.yml b/client/.circleci/config.yml deleted file mode 100644 index c7ee1e2c3..000000000 --- a/client/.circleci/config.yml +++ /dev/null @@ -1,247 +0,0 @@ ---- -common-steps: - - &install_poetry - run: - name: Install Poetry - command: | - set -e - source /etc/os-release - if [[ "$VERSION_CODENAME" == "bullseye" ]]; then - # Install Poetry via PyPI - apt-get update && apt-get install --yes --no-install-recommends python3-pip - pip install poetry==1.6.1 - elif [[ "$VERSION_CODENAME" == "bookworm" ]]; then - # Install Poetry via system package - apt-get update && apt-get install --yes --no-install-recommends python3-poetry - else - echo "Unsupported Debian version: $VERSION_CODENAME" - exit 1 - fi - - - &install_testing_dependencies - run: - name: Install testing dependencies - command: | - set -e - apt update && apt install -y git gnupg libarchive13 libmagic1 libqt5x11extras5 make python3-tk python3-dev gnupg python3-venv sqlite3 xvfb - poetry install --no-ansi - - - &configure_locales - run: - name: Configure locales - command: | - set -e - apt update && apt install -y locales - echo "en_US ISO-8859-1" >> /etc/locale.gen - echo "en_US UTF-8" >> /etc/locale.gen - locale-gen - - - &install_build_dependencies - run: - name: Install build dependencies - command: | - apt update && apt install -y git make sudo - - - &run_unit_tests - run: - name: Install requirements and run unit tests - command: | - export PYTHONPATH=$PYTHONPATH:. # so alembic can get to Base metadata - make test-random - - - &run_integration_tests - run: - name: Install requirements and run integration tests - command: | - export PYTHONPATH=$PYTHONPATH:. # so alembic can get to Base metadata - make test-integration - - - &run_functional_tests - run: - name: Install requirements and run functional tests - command: | - export PYTHONPATH=$PYTHONPATH:. # so alembic can get to Base metadata - make test-functional - - - &run_lint - run: - name: Run lint, type checking, code formatting - command: | - make check-black check-isort lint mypy - - - &check_security - run: - name: Run static analysis on source code to find security issues - command: | - make semgrep bandit - - - &check_source_strings - run: - name: Check that source strings are updated - command: | - make check-strings - - - &check_mo_repro - run: - name: Check that translation machine objects are reproducible - command: | - make verify-mo - - - &check_python_dependencies_for_vulnerabilities - run: - name: Check Python dependencies for known vulnerabilities - command: | - make safety - - - &install_packaging_dependencies - run: - name: Install Debian packaging dependencies and download Python wheels - command: | - set -x - mkdir ~/packaging && cd ~/packaging - # local builds may not have an ssh url, so || true - git config --global --unset url.ssh://git@github.com.insteadof || true - git clone https://github.com/freedomofpress/securedrop-builder.git - cd securedrop-builder - apt-get update && apt-get install -y sudo make - make install-deps - source .venv/bin/activate - PKG_DIR=~/project make requirements - - - &check_packaging_requirements - run: - name: Ensure that the same Python requirements are used for packaging and production. - command: | - cd ~/project - # Fail if unstaged changes exist that are not comments (after `make requirements` in the previous run step). - git diff --ignore-matching-lines=# --exit-code - - - &build_debian_package - run: - name: Build debian package - command: | - cd ~/project - ./update_version.sh 1000.0 # Dummy version number, doesn't matter what we put here - cd ~/packaging/securedrop-builder - export PKG_VERSION=1000.0 - export PKG_PATH=~/project/ - source .venv/bin/activate - make securedrop-client - -version: 2.1 - -jobs: - build: - parameters: ¶meters - image: - type: string - docker: &docker - - image: debian:<< parameters.image >> - steps: - - *install_build_dependencies - - checkout - - *install_packaging_dependencies - - *check_packaging_requirements - - *build_debian_package - - unit-test: - parameters: *parameters - docker: *docker - steps: - - *install_poetry - - checkout - - *install_testing_dependencies - - *configure_locales - - *run_unit_tests - - store_test_results: - path: test-results - - integration-test: - parameters: *parameters - docker: *docker - steps: - - *install_poetry - - checkout - - *install_testing_dependencies - - *run_integration_tests - - functional-test: - parameters: *parameters - docker: *docker - steps: - - *install_poetry - - checkout - - *install_testing_dependencies - - *run_functional_tests - - lint: - parameters: *parameters - docker: *docker - steps: - - *install_poetry - - checkout - - *install_testing_dependencies - - *run_lint - - check-security: - parameters: *parameters - docker: *docker - steps: - - *install_poetry - - checkout - - *install_testing_dependencies - - *check_security - - check-python-security: - parameters: *parameters - docker: *docker - steps: - - *install_poetry - - checkout - - *install_testing_dependencies - - *check_python_dependencies_for_vulnerabilities - - - check-internationalization: - parameters: *parameters - docker: *docker - steps: - - *install_poetry - - checkout - - *install_testing_dependencies - - *check_source_strings - - *check_mo_repro - -workflows: - securedrop_client_ci: - jobs: &jobs - - unit-test: - matrix: &matrix - parameters: - image: - - bullseye - - bookworm - - integration-test: - matrix: *matrix - - functional-test: - matrix: *matrix - - lint: - matrix: *matrix - - check-security: - matrix: *matrix - - check-python-security: - matrix: *matrix - - check-internationalization: - matrix: *matrix - - build: - matrix: *matrix - - nightly: - triggers: - - schedule: - cron: "0 6 * * *" - filters: - branches: - only: - - main - jobs: *jobs diff --git a/export/.circleci/config.yml b/export/.circleci/config.yml deleted file mode 100644 index f1549fb15..000000000 --- a/export/.circleci/config.yml +++ /dev/null @@ -1,171 +0,0 @@ ---- -common-steps: - - &install_poetry - run: - name: Install Poetry - command: | - set -e - source /etc/os-release - if [[ "$VERSION_CODENAME" == "bullseye" ]]; then - # Install Poetry via PyPI - apt-get update && apt-get install --yes --no-install-recommends python3-pip - pip install poetry==1.6.1 - elif [[ "$VERSION_CODENAME" == "bookworm" ]]; then - # Install Poetry via system package - apt-get update && apt-get install --yes --no-install-recommends python3-poetry - else - echo "Unsupported Debian version: $VERSION_CODENAME" - exit 1 - fi - - - &install_testing_dependencies - run: - name: Install testing dependencies - command: | - apt update && apt install -y git gnupg make python3-dev gnupg python3-venv libnotify-bin - poetry install --no-ansi - - - &install_build_dependencies - run: - name: Install build dependencies - command: | - apt update && apt install -y git make sudo - - - &run_unit_tests - run: - name: Install requirements and run unit tests - command: | - export PYTHONPATH=$PYTHONPATH:. # so alembic can get to Base metadata - make test - - - &run_lint - run: - name: Run lint, type checking, code formatting - command: | - make check-black lint - - - &check_security - run: - name: Run static analysis on source code to find security issues - command: | - make semgrep - - - &check_python_dependencies_for_vulnerabilities - run: - name: Check Python dependencies for known vulnerabilities - command: | - make safety - - - &install_packaging_dependencies - run: - name: Install Debian packaging dependencies and download Python wheels - command: | - set -x - mkdir ~/packaging && cd ~/packaging - # local builds may not have an ssh url, so || true - git config --global --unset url.ssh://git@github.com.insteadof || true - git clone https://github.com/freedomofpress/securedrop-builder.git - cd securedrop-builder - make install-deps - source .venv/bin/activate - PKG_DIR=~/project make requirements - - - &check_packaging_requirements - run: - name: Ensure that the same Python requirements are used for packaging and production. - command: | - cd ~/project - # Fail if unstaged changes exist (after `make requirements` in the previous run step). - git diff --ignore-matching-lines=# --exit-code - - - &build_debian_package - run: - name: Build debian package - command: | - cd ~/packaging/securedrop-builder - export PKG_VERSION=1000.0 - export PKG_PATH=~/project/ - source .venv/bin/activate - make securedrop-export - -version: 2.1 - -jobs: - build: - parameters: ¶meters - image: - type: string - docker: &docker - - image: debian:<< parameters.image >> - steps: - - *install_build_dependencies - - checkout - - *install_packaging_dependencies - - *check_packaging_requirements - - *build_debian_package - - unit-test: - parameters: *parameters - docker: *docker - steps: - - *install_poetry - - checkout - - *install_testing_dependencies - - *run_unit_tests - - store_test_results: - path: test-results - - lint: - parameters: *parameters - docker: *docker - steps: - - *install_poetry - - checkout - - *install_testing_dependencies - - *run_lint - - check-security: - parameters: *parameters - docker: *docker - steps: - - *install_poetry - - checkout - - *install_testing_dependencies - - *check_security - - check-python-security: - parameters: *parameters - docker: *docker - steps: - - *install_poetry - - checkout - - *install_testing_dependencies - - *check_python_dependencies_for_vulnerabilities - -workflows: - securedrop_export_ci: - jobs: &jobs - - unit-test: - matrix: &matrix - parameters: - image: - - bullseye - - bookworm - - lint: - matrix: *matrix - - check-security: - matrix: *matrix - - check-python-security: - matrix: *matrix - - build: - matrix: *matrix - - nightly: - triggers: - - schedule: - cron: "0 6 * * *" - filters: - branches: - only: - - main - jobs: *jobs diff --git a/log/.circleci/config.yml b/log/.circleci/config.yml deleted file mode 100644 index 2391acf1b..000000000 --- a/log/.circleci/config.yml +++ /dev/null @@ -1,90 +0,0 @@ ---- -common-steps: - - &install_poetry - run: - name: Install Poetry - command: | - set -e - source /etc/os-release - if [[ "$VERSION_CODENAME" == "bullseye" ]]; then - # Install Poetry via PyPI - apt-get update && apt-get install --yes --no-install-recommends python3-pip - pip install poetry==1.6.1 - elif [[ "$VERSION_CODENAME" == "bookworm" ]]; then - # Install Poetry via system package - apt-get update && apt-get install --yes --no-install-recommends python3-poetry - else - echo "Unsupported Debian version: $VERSION_CODENAME" - exit 1 - fi - - - &install_testing_dependencies - run: - name: Install testing dependencies - command: | - apt-get install --yes --no-install-recommends git gnupg make - poetry install --no-ansi - - - &run_tests - run: - name: Install requirements and run tests - command: | - make check - - - &install_packaging_dependencies - run: - name: Install Debian packaging dependencies and download wheels - command: | - apt-get update && apt-get install -y git git-lfs make sudo - mkdir ~/packaging && cd ~/packaging - git clone https://github.com/freedomofpress/securedrop-builder.git - cd securedrop-builder - make install-deps - source .venv/bin/activate - PKG_DIR=~/project make requirements - - - &verify_requirements - run: - name: Ensure that build-requirements.txt and requirements.txt are in sync. - command: | - cd ~/project - # Return 1 if unstaged changes exist (after `make requirements` in the - # previous run step), else return 0. - git diff --quiet - - - &build_debian_package - run: - name: Build debian package - command: | - cd ~/packaging/securedrop-builder - export PKG_VERSION=1000.0 - export PKG_PATH=~/project/ - source .venv/bin/activate - make securedrop-log - -version: 2 -jobs: - test-bullseye: - docker: - - image: debian:bullseye - steps: - - checkout - - *install_poetry - - *install_testing_dependencies - - *run_tests - - build-bullseye: - docker: - - image: debian:bullseye - steps: - - checkout - - *install_packaging_dependencies - - *verify_requirements - - *build_debian_package - -workflows: - version: 2 - per_pr: - jobs: - - test-bullseye - - build-bullseye diff --git a/proxy/.circleci/config.yml b/proxy/.circleci/config.yml deleted file mode 100644 index a39a4c5b3..000000000 --- a/proxy/.circleci/config.yml +++ /dev/null @@ -1,179 +0,0 @@ ---- -common-steps: - - &install_poetry - run: - name: Install Poetry - command: | - set -e - source /etc/os-release - if [[ "$VERSION_CODENAME" == "bullseye" ]]; then - # Install Poetry via PyPI - apt-get update && apt-get install --yes --no-install-recommends python3-pip - pip install poetry==1.6.1 - elif [[ "$VERSION_CODENAME" == "bookworm" ]]; then - # Install Poetry via system package - apt-get update && apt-get install --yes --no-install-recommends python3-poetry - else - echo "Unsupported Debian version: $VERSION_CODENAME" - exit 1 - fi - - - &install_testing_dependencies - run: - name: Install testing dependencies - command: | - apt-get install --yes --no-install-recommends git gnupg make - poetry install --no-ansi - - - &install_build_dependencies - run: - name: Install build dependencies - command: | - set -e - apt-get update && apt-get install --yes git make sudo - - - &run_unit_tests - run: - name: Install requirements and run unit tests - command: | - export PYTHONPATH=$PYTHONPATH:. # so alembic can get to Base metadata - make test - - - &run_lint - run: - name: Run lint, type checking, code formatting - command: | - make lint - - - &check_security - run: - name: Run static analysis on source code to find security issues - command: | - set -e - poetry update bandit - make bandit - - - &check_python_dependencies_for_vulnerabilities - run: - name: Check Python dependencies for known vulnerabilities - command: | - set -e - poetry update safety - make safety - - - &install_packaging_dependencies - run: - name: Install Debian packaging dependencies and download Python wheels - command: | - set -x - mkdir ~/packaging && cd ~/packaging - # local builds may not have an ssh url, so || true - git config --global --unset url.ssh://git@github.com.insteadof || true - git clone https://github.com/freedomofpress/securedrop-builder.git - cd securedrop-builder - apt-get update && apt-get install -y sudo make - make install-deps - source .venv/bin/activate - PKG_DIR=~/project make requirements - - - &verify_requirements - run: - name: Ensure that build-requirements.txt and requirements.txt are in sync. - command: | - cd ~/project - # Return 1 if unstaged changes exist (after `make requirements` in the - # previous run step), else return 0. - git diff --quiet - - - &build_debian_package - run: - name: Build debian package - command: | - cd ~/packaging/securedrop-builder - export PKG_VERSION=1000.0 - export PKG_PATH=~/project/ - source .venv/bin/activate - make securedrop-proxy - -version: 2.1 - -jobs: - build: - parameters: ¶meters - image: - type: string - docker: &docker - - image: debian:<< parameters.image >> - steps: - - checkout - - *install_build_dependencies - - *install_packaging_dependencies - - *verify_requirements - - *build_debian_package - - unit-test: - parameters: *parameters - docker: *docker - steps: - - checkout - - *install_poetry - - *install_testing_dependencies - - *run_unit_tests - - store_test_results: - path: test-results - - lint: - parameters: *parameters - docker: *docker - steps: - - checkout - - *install_poetry - - *install_testing_dependencies - - *run_lint - - check-security: - parameters: *parameters - docker: *docker - steps: - - checkout - - *install_poetry - - *install_testing_dependencies - - *check_security - - check-python-security: - parameters: *parameters - docker: *docker - steps: - - checkout - - *install_poetry - - *install_testing_dependencies - - *check_python_dependencies_for_vulnerabilities - - -workflows: - securedrop_proxy_ci: - jobs: &jobs - - unit-test: - matrix: &matrix - parameters: - image: - - bullseye - - bookworm - - lint: - matrix: *matrix - - check-security: - matrix: *matrix - - check-python-security: - matrix: *matrix - - build: - matrix: *matrix - - nightly: - triggers: - - schedule: - cron: "0 6 * * *" - filters: - branches: - only: - - main - jobs: *jobs From d313d0d850609ed4e7a05f3be52f4a2df9a31e1e Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Mon, 11 Dec 2023 17:48:46 -0500 Subject: [PATCH 351/352] Centralize some files back into the repository root --- .git-blame-ignore-revs | 3 + client/.gitignore => .gitignore | 0 client/CONTRIBUTING.md => CONTRIBUTING.md | 0 client/SECURITY.md => SECURITY.md | 0 client/.git-blame-ignore-revs | 2 - export/.gitignore | 106 ------------------ export/SECURITY.md | 8 -- log/.gitignore | 130 ---------------------- proxy/.git-blame-ignore-revs | 1 - proxy/.gitignore | 106 ------------------ 10 files changed, 3 insertions(+), 353 deletions(-) create mode 100644 .git-blame-ignore-revs rename client/.gitignore => .gitignore (100%) rename client/CONTRIBUTING.md => CONTRIBUTING.md (100%) rename client/SECURITY.md => SECURITY.md (100%) delete mode 100644 client/.git-blame-ignore-revs delete mode 100644 export/.gitignore delete mode 100644 export/SECURITY.md delete mode 100644 log/.gitignore delete mode 100644 proxy/.git-blame-ignore-revs delete mode 100644 proxy/.gitignore diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 000000000..07cafc0a1 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,3 @@ +8859979ca1ce036c1bf9ad4a96334326af2e1b4c +361ba8821de684b077dcad34bea3d16df246a8c4 +787844dc339e090aa0e9ac2241895365522c4119 diff --git a/client/.gitignore b/.gitignore similarity index 100% rename from client/.gitignore rename to .gitignore diff --git a/client/CONTRIBUTING.md b/CONTRIBUTING.md similarity index 100% rename from client/CONTRIBUTING.md rename to CONTRIBUTING.md diff --git a/client/SECURITY.md b/SECURITY.md similarity index 100% rename from client/SECURITY.md rename to SECURITY.md diff --git a/client/.git-blame-ignore-revs b/client/.git-blame-ignore-revs deleted file mode 100644 index 40638b97f..000000000 --- a/client/.git-blame-ignore-revs +++ /dev/null @@ -1,2 +0,0 @@ -8859979ca1ce036c1bf9ad4a96334326af2e1b4c -361ba8821de684b077dcad34bea3d16df246a8c4 \ No newline at end of file diff --git a/export/.gitignore b/export/.gitignore deleted file mode 100644 index 5c46f9c06..000000000 --- a/export/.gitignore +++ /dev/null @@ -1,106 +0,0 @@ -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -*.egg-info/ -.installed.cfg -*.egg -MANIFEST - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -.hypothesis/ -.pytest_cache/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py -db.sqlite3 - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# pyenv -.python-version - -# celery beat schedule file -celerybeat-schedule - -# SageMath parsed files -*.sage.py - -# Environments -.env -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ -.venv39 -.venv310 - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ diff --git a/export/SECURITY.md b/export/SECURITY.md deleted file mode 100644 index 1e84430bb..000000000 --- a/export/SECURITY.md +++ /dev/null @@ -1,8 +0,0 @@ -# Security Policy - -## Reporting a Vulnerability - -If you have found a vulnerability, please **DO NOT** file a public issue. Please send us your report privately either via: - -- SecureDrop's public bug bounty program managed by [Bugcrowd](https://bugcrowd.com/freedomofpress) -- Email to security@freedom.press (Optionally GPG-encrypted to [734F6E707434ECA6C007E1AE82BD6C9616DABB79](https://securedrop.org/documents/6/fpf-email.asc) diff --git a/log/.gitignore b/log/.gitignore deleted file mode 100644 index aa44ee2ad..000000000 --- a/log/.gitignore +++ /dev/null @@ -1,130 +0,0 @@ -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -pip-wheel-metadata/ -share/python-wheels/ -*.egg-info/ -.installed.cfg -*.egg -MANIFEST - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.nox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -*.py,cover -.hypothesis/ -.pytest_cache/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py -db.sqlite3 -db.sqlite3-journal - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# IPython -profile_default/ -ipython_config.py - -# pyenv -.python-version - -# pipenv -# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. -# However, in case of collaboration, if having platform-specific dependencies or dependencies -# having no cross-platform support, pipenv may install dependencies that don't work, or not -# install all needed dependencies. -#Pipfile.lock - -# PEP 582; used by e.g. github.com/David-OConnor/pyflow -__pypackages__/ - -# Celery stuff -celerybeat-schedule -celerybeat.pid - -# SageMath parsed files -*.sage.py - -# Environments -.env -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ -.dmypy.json -dmypy.json - -# Pyre type checker -.pyre/ - diff --git a/proxy/.git-blame-ignore-revs b/proxy/.git-blame-ignore-revs deleted file mode 100644 index dbeda0ff0..000000000 --- a/proxy/.git-blame-ignore-revs +++ /dev/null @@ -1 +0,0 @@ -787844dc339e090aa0e9ac2241895365522c4119 diff --git a/proxy/.gitignore b/proxy/.gitignore deleted file mode 100644 index dea2de221..000000000 --- a/proxy/.gitignore +++ /dev/null @@ -1,106 +0,0 @@ -*.sqlite - -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -*.egg-info/ -.installed.cfg -*.egg -MANIFEST - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -.hypothesis/ -.pytest_cache/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py -db.sqlite3 - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# pyenv -.python-version - -# celery beat schedule file -celerybeat-schedule - -# SageMath parsed files -*.sage.py - -# Environments -.env -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ From 24dd559e834d0dff5f46dd48a3f309c91607aee3 Mon Sep 17 00:00:00 2001 From: Kunal Mehta Date: Mon, 11 Dec 2023 17:50:12 -0500 Subject: [PATCH 352/352] Add a short README that covers the monorepo The language is roughly taken from the client README. --- README.md | 23 +++++++++++++++++++++++ client/README.md | 5 ----- export/README.md | 4 ---- log/README.md | 2 -- proxy/README.md | 5 ----- 5 files changed, 23 insertions(+), 16 deletions(-) create mode 100644 README.md diff --git a/README.md b/README.md new file mode 100644 index 000000000..44d36c1ae --- /dev/null +++ b/README.md @@ -0,0 +1,23 @@ +> [There are many ways to contribute, and we welcome your help!](CONTRIBUTING.md) By contributing to this project, you agree to abide by our [Code of Conduct](https://github.com/freedomofpress/.github/blob/main/CODE_OF_CONDUCT.md). + +[![CircleCI](https://circleci.com/gh/freedomofpress/securedrop-client.svg?style=svg)](https://circleci.com/gh/freedomofpress/securedrop-client) +[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/freedomofpress/securedrop) + +# securedrop-client + +The SecureDrop Client is a desktop application for journalists to communicate with sources and work with submissions on the +[SecureDrop Workstation](https://github.com/freedomofpress/securedrop-workstation). It runs within a [Qubes OS](https://www.qubes-os.org/intro/) +virtual machine that has no direct network access and opens files within individual, non-networked, disposable VMs. + +This repository contains multiple components, including: +* `client`: desktop GUI application +* `export`: logic for exporting submissions +* `log`: centralized logging +* `proxy`: restricted HTTP proxy + +Each component's folder has a README with more detail. + +To learn more about architecture and our rationale behind our Qubes OS approach, see the +[SecureDrop Workstation readme](https://github.com/freedomofpress/securedrop-workstation/blob/main/README.md). + +**IMPORTANT:** This project is currently undergoing a pilot study and should not be used in production environments. diff --git a/client/README.md b/client/README.md index d5c3b07db..0e3730190 100644 --- a/client/README.md +++ b/client/README.md @@ -1,8 +1,3 @@ -> [There are many ways to contribute, and we welcome your help!](CONTRIBUTING.md) By contributing to this project, you agree to abide by our [Code of Conduct](https://github.com/freedomofpress/.github/blob/main/CODE_OF_CONDUCT.md). - -[![CircleCI](https://circleci.com/gh/freedomofpress/securedrop-client.svg?style=svg)](https://circleci.com/gh/freedomofpress/securedrop-client) -[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/freedomofpress/securedrop?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) - # securedrop-client The SecureDrop Client is a desktop application for journalists to communicate with sources and work with submissions on the [SecureDrop Workstation](https://github.com/freedomofpress/securedrop-workstation). diff --git a/export/README.md b/export/README.md index 241f97b19..852a7f04f 100644 --- a/export/README.md +++ b/export/README.md @@ -1,7 +1,3 @@ -> By contributing to this project, you agree to abide by our [Code of Conduct](https://github.com/freedomofpress/.github/blob/main/CODE_OF_CONDUCT.md). - -[![CircleCI](https://circleci.com/gh/freedomofpress/securedrop-export.svg?style=svg)](https://circleci.com/gh/freedomofpress/securedrop-export) - # securedrop-export Code for exporting and printing files from the SecureDrop Qubes Workstation. diff --git a/log/README.md b/log/README.md index 88e0398db..55a645f40 100644 --- a/log/README.md +++ b/log/README.md @@ -1,5 +1,3 @@ -> By contributing to this project, you agree to abide by our [Code of Conduct](https://github.com/freedomofpress/.github/blob/main/CODE_OF_CONDUCT.md). - # securedrop-log `securedrop-log` is part of the [SecureDrop diff --git a/proxy/README.md b/proxy/README.md index 2204be5bc..afec296b6 100644 --- a/proxy/README.md +++ b/proxy/README.md @@ -1,9 +1,4 @@ -> By contributing to this project, you agree to abide by our [Code of Conduct](https://github.com/freedomofpress/.github/blob/main/CODE_OF_CONDUCT.md). - ## securedrop workstation proxy - -[![CircleCI](https://circleci.com/gh/freedomofpress/securedrop-proxy.svg?style=svg)](https://circleci.com/gh/freedomofpress/securedrop-proxy) - `securedrop-proxy` is part of the [SecureDrop Workstation](https://github.com/freedomofpress/securedrop-workstation) project.