diff --git a/.circleci/config.yml b/.circleci/config.yml index 90373ec41c..c63fce7812 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -43,7 +43,9 @@ commands: steps: - run: name: Core Python Checks - command: flake8 src/tokenserver/verify.py + command: | + flake8 src/tokenserver/verify.py + flake8 tools/integration_tests rust-clippy: steps: - run: @@ -150,6 +152,8 @@ jobs: - setup-gcp-grpc - rust-check - rust-clippy + - setup-python + - python-check build-and-test: docker: diff --git a/tools/integration_tests/run.py b/tools/integration_tests/run.py index 4f6c8ab2c3..0c506a6dbf 100644 --- a/tools/integration_tests/run.py +++ b/tools/integration_tests/run.py @@ -9,8 +9,8 @@ import time -DEBUG_BUILD = 'target/debug/syncstorage' -RELEASE_BUILD = '/app/bin/syncstorage' +DEBUG_BUILD = "target/debug/syncstorage" +RELEASE_BUILD = "/app/bin/syncstorage" if __name__ == "__main__": # When run as a script, this file will execute the @@ -21,9 +21,15 @@ elif os.path.exists(RELEASE_BUILD): target_binary = RELEASE_BUILD else: - raise RuntimeError("Neither target/debug/syncstorage nor /app/bin/syncstorage were found.") - the_server_subprocess = subprocess.Popen('SYNC_MASTER_SECRET=secret0 ' + target_binary, shell=True) - ## TODO we should change this to watch for a log message on startup to know when to continue instead of sleeping for a fixed amount + raise RuntimeError( + "Neither target/debug/syncstorage \ + nor /app/bin/syncstorage were found." + ) + the_server_subprocess = subprocess.Popen( + "SYNC_MASTER_SECRET=secret0 " + target_binary, shell=True + ) + # TODO we should change this to watch for a log message on startup + # to know when to continue instead of sleeping for a fixed amount time.sleep(20) def stop_subprocess(): diff --git a/tools/integration_tests/test_storage.py b/tools/integration_tests/test_storage.py index b0e28b2202..bf429fcddc 100644 --- a/tools/integration_tests/test_storage.py +++ b/tools/integration_tests/test_storage.py @@ -24,6 +24,7 @@ import urllib import webtest import contextlib + # import math import simplejson @@ -41,8 +42,8 @@ class BackendError(Exception): pass -WEAVE_INVALID_WBO = 8 # Invalid Weave Basic Object -WEAVE_SIZE_LIMIT_EXCEEDED = 17 # Size limit exceeded +WEAVE_INVALID_WBO = 8 # Invalid Weave Basic Object +WEAVE_SIZE_LIMIT_EXCEEDED = 17 # Size limit exceeded BATCH_MAX_IDS = 100 @@ -62,12 +63,12 @@ def json_loads(value): return simplejson.loads(value, use_decimal=True) -_PLD = '*' * 500 +_PLD = "*" * 500 _ASCII = string.ascii_letters + string.digits def randtext(size=10): - return ''.join([random.choice(_ASCII) for i in range(size)]) + return "".join([random.choice(_ASCII) for i in range(size)]) class TestStorage(StorageFunctionalTestCase): @@ -79,7 +80,7 @@ class TestStorage(StorageFunctionalTestCase): def setUp(self): super(TestStorage, self).setUp() - self.root = '/1.5/%d' % (self.user_id,) + self.root = "/1.5/%d" % (self.user_id,) # Reset the storage to a known state, aka "empty". self.retry_delete(self.root) @@ -88,7 +89,7 @@ def _switch_user(self): orig_root = self.root try: with super(TestStorage, self)._switch_user(): - self.root = '/1.5/%d' % (self.user_id,) + self.root = "/1.5/%d" % (self.user_id,) yield finally: self.root = orig_root @@ -120,7 +121,7 @@ def test_get_info_collections(self): resp = self.retry_post_json(self.root + "/storage/xxx_col2", bsos) ts2 = resp.json["modified"] # only those collections should appear in the query. - resp = self.app.get(self.root + '/info/collections') + resp = self.app.get(self.root + "/info/collections") res = resp.json keys = sorted(list(res.keys())) self.assertEquals(keys, ["xxx_col1", "xxx_col2"]) @@ -131,7 +132,7 @@ def test_get_info_collections(self): resp = self.retry_post_json(self.root + "/storage/xxx_col2", bsos) self.assertTrue(ts2 < resp.json["modified"]) ts2 = resp.json["modified"] - resp = self.app.get(self.root + '/info/collections') + resp = self.app.get(self.root + "/info/collections") res = resp.json keys = sorted(list(res.keys())) self.assertEquals(keys, ["xxx_col1", "xxx_col2"]) @@ -145,7 +146,7 @@ def test_get_collection_count(self): bsos = [{"id": str(i).zfill(2), "payload": "xxx"} for i in range(5)] self.retry_post_json(self.root + "/storage/xxx_col2", bsos) # those counts should be reflected back in query. - resp = self.app.get(self.root + '/info/collection_counts') + resp = self.app.get(self.root + "/info/collection_counts") res = resp.json self.assertEquals(len(res), 2) self.assertEquals(res["xxx_col1"], 3) @@ -158,15 +159,15 @@ def test_bad_cache(self): # but should get purged when new collections are added # 1. get collection info - resp = self.app.get(self.root + '/info/collections') + resp = self.app.get(self.root + "/info/collections") numcols = len(resp.json) # 2. add a new collection + stuff - bso = {'id': '125', 'payload': _PLD} - self.retry_put_json(self.root + '/storage/xxxx/125', bso) + bso = {"id": "125", "payload": _PLD} + self.retry_put_json(self.root + "/storage/xxxx/125", bso) # 3. get collection info again, should find the new ones - resp = self.app.get(self.root + '/info/collections') + resp = self.app.get(self.root + "/info/collections") self.assertEquals(len(resp.json), numcols + 1) def test_get_collection_only(self): @@ -174,246 +175,253 @@ def test_get_collection_only(self): self.retry_post_json(self.root + "/storage/xxx_col2", bsos) # non-existent collections appear as empty - resp = self.app.get(self.root + '/storage/nonexistent') + resp = self.app.get(self.root + "/storage/nonexistent") res = resp.json self.assertEquals(res, []) # try just getting all items at once. - resp = self.app.get(self.root + '/storage/xxx_col2') + resp = self.app.get(self.root + "/storage/xxx_col2") res = resp.json res.sort() - self.assertEquals(res, ['00', '01', '02', '03', '04']) - self.assertEquals(int(resp.headers['X-Weave-Records']), 5) + self.assertEquals(res, ["00", "01", "02", "03", "04"]) + self.assertEquals(int(resp.headers["X-Weave-Records"]), 5) # trying various filters # "ids" # Returns the ids for objects in the collection that are in the # provided comma-separated list. - res = self.app.get(self.root + '/storage/xxx_col2?ids=01,03,17') + res = self.app.get(self.root + "/storage/xxx_col2?ids=01,03,17") res = res.json res.sort() - self.assertEquals(res, ['01', '03']) + self.assertEquals(res, ["01", "03"]) # "newer" # Returns only ids for objects in the collection that have been last # modified after the timestamp given. - self.retry_delete(self.root + '/storage/xxx_col2') + self.retry_delete(self.root + "/storage/xxx_col2") - bso = {'id': '128', 'payload': 'x'} - res = self.retry_put_json(self.root + '/storage/xxx_col2/128', bso) + bso = {"id": "128", "payload": "x"} + res = self.retry_put_json(self.root + "/storage/xxx_col2/128", bso) ts1 = float(res.headers["X-Last-Modified"]) - bso = {'id': '129', 'payload': 'x'} - res = self.retry_put_json(self.root + '/storage/xxx_col2/129', bso) + bso = {"id": "129", "payload": "x"} + res = self.retry_put_json(self.root + "/storage/xxx_col2/129", bso) ts2 = float(res.headers["X-Last-Modified"]) self.assertTrue(ts1 < ts2) - res = self.app.get(self.root + '/storage/xxx_col2?newer=%s' % ts1) - self.assertEquals(res.json, ['129']) + res = self.app.get(self.root + "/storage/xxx_col2?newer=%s" % ts1) + self.assertEquals(res.json, ["129"]) - res = self.app.get(self.root + '/storage/xxx_col2?newer=%s' % ts2) + res = self.app.get(self.root + "/storage/xxx_col2?newer=%s" % ts2) self.assertEquals(res.json, []) res = self.app.get( - self.root + '/storage/xxx_col2?newer=%s' % (ts1 - 1)) - self.assertEquals(sorted(res.json), ['128', '129']) + self.root + "/storage/xxx_col2?newer=%s" % (ts1 - 1) + ) + self.assertEquals(sorted(res.json), ["128", "129"]) # "older" # Returns only ids for objects in the collection that have been last # modified before the timestamp given. - self.retry_delete(self.root + '/storage/xxx_col2') + self.retry_delete(self.root + "/storage/xxx_col2") - bso = {'id': '128', 'payload': 'x'} - res = self.retry_put_json(self.root + '/storage/xxx_col2/128', bso) + bso = {"id": "128", "payload": "x"} + res = self.retry_put_json(self.root + "/storage/xxx_col2/128", bso) ts1 = float(res.headers["X-Last-Modified"]) - bso = {'id': '129', 'payload': 'x'} - res = self.retry_put_json(self.root + '/storage/xxx_col2/129', bso) + bso = {"id": "129", "payload": "x"} + res = self.retry_put_json(self.root + "/storage/xxx_col2/129", bso) ts2 = float(res.headers["X-Last-Modified"]) self.assertTrue(ts1 < ts2) - res = self.app.get(self.root + '/storage/xxx_col2?older=%s' % ts1) + res = self.app.get(self.root + "/storage/xxx_col2?older=%s" % ts1) self.assertEquals(res.json, []) - res = self.app.get(self.root + '/storage/xxx_col2?older=%s' % ts2) - self.assertEquals(res.json, ['128']) + res = self.app.get(self.root + "/storage/xxx_col2?older=%s" % ts2) + self.assertEquals(res.json, ["128"]) res = self.app.get( - self.root + '/storage/xxx_col2?older=%s' % (ts2 + 1)) - self.assertEquals(sorted(res.json), ['128', '129']) + self.root + "/storage/xxx_col2?older=%s" % (ts2 + 1) + ) + self.assertEquals(sorted(res.json), ["128", "129"]) - qs = '?older=%s&newer=%s' % (ts2 + 1, ts1) - res = self.app.get(self.root + '/storage/xxx_col2' + qs) - self.assertEquals(sorted(res.json), ['129']) + qs = "?older=%s&newer=%s" % (ts2 + 1, ts1) + res = self.app.get(self.root + "/storage/xxx_col2" + qs) + self.assertEquals(sorted(res.json), ["129"]) # "full" # If defined, returns the full BSO, rather than just the id. - res = self.app.get(self.root + '/storage/xxx_col2?full=1') + res = self.app.get(self.root + "/storage/xxx_col2?full=1") keys = list(res.json[0].keys()) keys.sort() - wanted = ['id', 'modified', 'payload'] + wanted = ["id", "modified", "payload"] self.assertEquals(keys, wanted) - res = self.app.get(self.root + '/storage/xxx_col2') + res = self.app.get(self.root + "/storage/xxx_col2") self.assertTrue(isinstance(res.json, list)) # "limit" # Sets the maximum number of ids that will be returned - self.retry_delete(self.root + '/storage/xxx_col2') + self.retry_delete(self.root + "/storage/xxx_col2") bsos = [] for i in range(10): - bso = {'id': str(i).zfill(2), 'payload': 'x', 'sortindex': i} + bso = {"id": str(i).zfill(2), "payload": "x", "sortindex": i} bsos.append(bso) - self.retry_post_json(self.root + '/storage/xxx_col2', bsos) + self.retry_post_json(self.root + "/storage/xxx_col2", bsos) - query_url = self.root + '/storage/xxx_col2?sort=index' + query_url = self.root + "/storage/xxx_col2?sort=index" res = self.app.get(query_url) all_items = res.json self.assertEquals(len(all_items), 10) - res = self.app.get(query_url + '&limit=2') + res = self.app.get(query_url + "&limit=2") self.assertEquals(res.json, all_items[:2]) # "offset" # Skips over items that have already been returned. next_offset = res.headers["X-Weave-Next-Offset"] - res = self.app.get(query_url + '&limit=3&offset=' + next_offset) + res = self.app.get(query_url + "&limit=3&offset=" + next_offset) self.assertEquals(res.json, all_items[2:5]) next_offset = res.headers["X-Weave-Next-Offset"] - res = self.app.get(query_url + '&offset=' + next_offset) + res = self.app.get(query_url + "&offset=" + next_offset) self.assertEquals(res.json, all_items[5:]) self.assertTrue("X-Weave-Next-Offset" not in res.headers) - res = self.app.get( - query_url + '&limit=10000&offset=' + next_offset) + res = self.app.get(query_url + "&limit=10000&offset=" + next_offset) self.assertEquals(res.json, all_items[5:]) self.assertTrue("X-Weave-Next-Offset" not in res.headers) # "offset" again, this time ordering by descending timestamp. - query_url = self.root + '/storage/xxx_col2?sort=newest' + query_url = self.root + "/storage/xxx_col2?sort=newest" res = self.app.get(query_url) all_items = res.json self.assertEquals(len(all_items), 10) - res = self.app.get(query_url + '&limit=2') + res = self.app.get(query_url + "&limit=2") self.assertEquals(res.json, all_items[:2]) next_offset = res.headers["X-Weave-Next-Offset"] - res = self.app.get(query_url + '&limit=3&offset=' + next_offset) + res = self.app.get(query_url + "&limit=3&offset=" + next_offset) self.assertEquals(res.json, all_items[2:5]) next_offset = res.headers["X-Weave-Next-Offset"] - res = self.app.get(query_url + '&offset=' + next_offset) + res = self.app.get(query_url + "&offset=" + next_offset) self.assertEquals(res.json, all_items[5:]) self.assertTrue("X-Weave-Next-Offset" not in res.headers) - res = self.app.get( - query_url + '&limit=10000&offset=' + next_offset) + res = self.app.get(query_url + "&limit=10000&offset=" + next_offset) self.assertEquals(res.json, all_items[5:]) # "offset" again, this time ordering by ascending timestamp. - query_url = self.root + '/storage/xxx_col2?sort=oldest' + query_url = self.root + "/storage/xxx_col2?sort=oldest" res = self.app.get(query_url) all_items = res.json self.assertEquals(len(all_items), 10) - res = self.app.get(query_url + '&limit=2') + res = self.app.get(query_url + "&limit=2") self.assertEquals(res.json, all_items[:2]) next_offset = res.headers["X-Weave-Next-Offset"] - res = self.app.get(query_url + '&limit=3&offset=' + next_offset) + res = self.app.get(query_url + "&limit=3&offset=" + next_offset) self.assertEquals(res.json, all_items[2:5]) next_offset = res.headers["X-Weave-Next-Offset"] - res = self.app.get(query_url + '&offset=' + next_offset) + res = self.app.get(query_url + "&offset=" + next_offset) self.assertEquals(res.json, all_items[5:]) self.assertTrue("X-Weave-Next-Offset" not in res.headers) - res = self.app.get( - query_url + '&limit=10000&offset=' + next_offset) + res = self.app.get(query_url + "&limit=10000&offset=" + next_offset) self.assertEquals(res.json, all_items[5:]) # "offset" once more, this time with no explicit ordering - query_url = self.root + '/storage/xxx_col2?' + query_url = self.root + "/storage/xxx_col2?" res = self.app.get(query_url) all_items = res.json self.assertEquals(len(all_items), 10) - res = self.app.get(query_url + '&limit=2') + res = self.app.get(query_url + "&limit=2") self.assertEquals(res.json, all_items[:2]) next_offset = res.headers["X-Weave-Next-Offset"] - res = self.app.get(query_url + '&limit=3&offset=' + next_offset) + res = self.app.get(query_url + "&limit=3&offset=" + next_offset) self.assertEquals(res.json, all_items[2:5]) next_offset = res.headers["X-Weave-Next-Offset"] - res = self.app.get(query_url + '&offset=' + next_offset) + res = self.app.get(query_url + "&offset=" + next_offset) self.assertEquals(res.json, all_items[5:]) self.assertTrue("X-Weave-Next-Offset" not in res.headers) - res = self.app.get( - query_url + '&limit=10000&offset=' + next_offset) + res = self.app.get(query_url + "&limit=10000&offset=" + next_offset) # "sort" # 'newest': Orders by timestamp number (newest first) # 'oldest': Orders by timestamp number (oldest first) # 'index': Orders by the sortindex descending (highest weight first) - self.retry_delete(self.root + '/storage/xxx_col2') + self.retry_delete(self.root + "/storage/xxx_col2") - for index, sortindex in (('00', -1), ('01', 34), ('02', 12)): - bso = {'id': index, 'payload': 'x', 'sortindex': sortindex} - self.retry_post_json(self.root + '/storage/xxx_col2', [bso]) + for index, sortindex in (("00", -1), ("01", 34), ("02", 12)): + bso = {"id": index, "payload": "x", "sortindex": sortindex} + self.retry_post_json(self.root + "/storage/xxx_col2", [bso]) - res = self.app.get(self.root + '/storage/xxx_col2?sort=newest') + res = self.app.get(self.root + "/storage/xxx_col2?sort=newest") res = res.json - self.assertEquals(res, ['02', '01', '00']) + self.assertEquals(res, ["02", "01", "00"]) - res = self.app.get(self.root + '/storage/xxx_col2?sort=oldest') + res = self.app.get(self.root + "/storage/xxx_col2?sort=oldest") res = res.json - self.assertEquals(res, ['00', '01', '02']) + self.assertEquals(res, ["00", "01", "02"]) - res = self.app.get(self.root + '/storage/xxx_col2?sort=index') + res = self.app.get(self.root + "/storage/xxx_col2?sort=index") res = res.json - self.assertEquals(res, ['01', '02', '00']) + self.assertEquals(res, ["01", "02", "00"]) def test_alternative_formats(self): bsos = [{"id": str(i).zfill(2), "payload": "xxx"} for i in range(5)] self.retry_post_json(self.root + "/storage/xxx_col2", bsos) # application/json - res = self.app.get(self.root + '/storage/xxx_col2', - headers=[('Accept', 'application/json')]) - self.assertEquals(res.content_type.split(";")[0], 'application/json') + res = self.app.get( + self.root + "/storage/xxx_col2", + headers=[("Accept", "application/json")], + ) + self.assertEquals(res.content_type.split(";")[0], "application/json") res = res.json res.sort() - self.assertEquals(res, ['00', '01', '02', '03', '04']) + self.assertEquals(res, ["00", "01", "02", "03", "04"]) # application/newlines - res = self.app.get(self.root + '/storage/xxx_col2', - headers=[('Accept', 'application/newlines')]) - self.assertEquals(res.content_type, 'application/newlines') + res = self.app.get( + self.root + "/storage/xxx_col2", + headers=[("Accept", "application/newlines")], + ) + self.assertEquals(res.content_type, "application/newlines") - self.assertTrue(res.body.endswith(b'\n')) - res = [json_loads(line) for line in res.body.decode( - 'utf-8').strip().split('\n')] + self.assertTrue(res.body.endswith(b"\n")) + res = [ + json_loads(line) + for line in res.body.decode("utf-8").strip().split("\n") + ] res.sort() - self.assertEquals(res, ['00', '01', '02', '03', '04']) + self.assertEquals(res, ["00", "01", "02", "03", "04"]) # unspecified format defaults to json - res = self.app.get(self.root + '/storage/xxx_col2') - self.assertEquals(res.content_type.split(";")[0], 'application/json') + res = self.app.get(self.root + "/storage/xxx_col2") + self.assertEquals(res.content_type.split(";")[0], "application/json") # unkown format gets a 406 - self.app.get(self.root + '/storage/xxx_col2', - headers=[('Accept', 'x/yy')], status=406) + self.app.get( + self.root + "/storage/xxx_col2", + headers=[("Accept", "x/yy")], + status=406, + ) def test_set_collection_with_if_modified_since(self): # Create five items with different timestamps. @@ -421,148 +429,170 @@ def test_set_collection_with_if_modified_since(self): bsos = [{"id": str(i).zfill(2), "payload": "xxx"}] self.retry_post_json(self.root + "/storage/xxx_col2", bsos) # Get them all, along with their timestamps. - res = self.app.get(self.root + '/storage/xxx_col2?full=true').json + res = self.app.get(self.root + "/storage/xxx_col2?full=true").json self.assertEquals(len(res), 5) timestamps = sorted([r["modified"] for r in res]) # The timestamp of the collection should be the max of all those. - self.app.get(self.root + "/storage/xxx_col2", headers={ - "X-If-Modified-Since": str(timestamps[0]) - }, status=200) - res = self.app.get(self.root + "/storage/xxx_col2", headers={ - "X-If-Modified-Since": str(timestamps[-1]) - }, status=304) + self.app.get( + self.root + "/storage/xxx_col2", + headers={"X-If-Modified-Since": str(timestamps[0])}, + status=200, + ) + res = self.app.get( + self.root + "/storage/xxx_col2", + headers={"X-If-Modified-Since": str(timestamps[-1])}, + status=304, + ) self.assertTrue("X-Last-Modified" in res.headers) def test_get_item(self): bsos = [{"id": str(i).zfill(2), "payload": "xxx"} for i in range(5)] self.retry_post_json(self.root + "/storage/xxx_col2", bsos) # grabbing object 1 from xxx_col2 - res = self.app.get(self.root + '/storage/xxx_col2/01') + res = self.app.get(self.root + "/storage/xxx_col2/01") res = res.json keys = list(res.keys()) keys.sort() - self.assertEquals(keys, ['id', 'modified', 'payload']) - self.assertEquals(res['id'], '01') + self.assertEquals(keys, ["id", "modified", "payload"]) + self.assertEquals(res["id"], "01") # unexisting object - self.app.get(self.root + '/storage/xxx_col2/99', status=404) + self.app.get(self.root + "/storage/xxx_col2/99", status=404) # using x-if-modified-since header. - self.app.get(self.root + '/storage/xxx_col2/01', headers={ - "X-If-Modified-Since": str(res["modified"]) - }, status=304) - self.app.get(self.root + '/storage/xxx_col2/01', headers={ - "X-If-Modified-Since": str(res["modified"] + 1) - }, status=304) - res = self.app.get(self.root + '/storage/xxx_col2/01', headers={ - "X-If-Modified-Since": str(res["modified"] - 1) - }) - self.assertEquals(res.json['id'], '01') + self.app.get( + self.root + "/storage/xxx_col2/01", + headers={"X-If-Modified-Since": str(res["modified"])}, + status=304, + ) + self.app.get( + self.root + "/storage/xxx_col2/01", + headers={"X-If-Modified-Since": str(res["modified"] + 1)}, + status=304, + ) + res = self.app.get( + self.root + "/storage/xxx_col2/01", + headers={"X-If-Modified-Since": str(res["modified"] - 1)}, + ) + self.assertEquals(res.json["id"], "01") def test_set_item(self): # let's create an object - bso = {'payload': _PLD} - self.retry_put_json(self.root + '/storage/xxx_col2/12345', bso) - res = self.app.get(self.root + '/storage/xxx_col2/12345') + bso = {"payload": _PLD} + self.retry_put_json(self.root + "/storage/xxx_col2/12345", bso) + res = self.app.get(self.root + "/storage/xxx_col2/12345") res = res.json - self.assertEquals(res['payload'], _PLD) + self.assertEquals(res["payload"], _PLD) # now let's update it - bso = {'payload': 'YYY'} - self.retry_put_json(self.root + '/storage/xxx_col2/12345', bso) - res = self.app.get(self.root + '/storage/xxx_col2/12345') + bso = {"payload": "YYY"} + self.retry_put_json(self.root + "/storage/xxx_col2/12345", bso) + res = self.app.get(self.root + "/storage/xxx_col2/12345") res = res.json - self.assertEquals(res['payload'], 'YYY') + self.assertEquals(res["payload"], "YYY") def test_set_collection(self): # sending two bsos - bso1 = {'id': '12', 'payload': _PLD} - bso2 = {'id': '13', 'payload': _PLD} + bso1 = {"id": "12", "payload": _PLD} + bso2 = {"id": "13", "payload": _PLD} bsos = [bso1, bso2] - self.retry_post_json(self.root + '/storage/xxx_col2', bsos) + self.retry_post_json(self.root + "/storage/xxx_col2", bsos) # checking what we did - res = self.app.get(self.root + '/storage/xxx_col2/12') + res = self.app.get(self.root + "/storage/xxx_col2/12") res = res.json - self.assertEquals(res['payload'], _PLD) - res = self.app.get(self.root + '/storage/xxx_col2/13') + self.assertEquals(res["payload"], _PLD) + res = self.app.get(self.root + "/storage/xxx_col2/13") res = res.json - self.assertEquals(res['payload'], _PLD) + self.assertEquals(res["payload"], _PLD) # one more time, with changes - bso1 = {'id': '13', 'payload': 'XyX'} - bso2 = {'id': '14', 'payload': _PLD} + bso1 = {"id": "13", "payload": "XyX"} + bso2 = {"id": "14", "payload": _PLD} bsos = [bso1, bso2] - self.retry_post_json(self.root + '/storage/xxx_col2', bsos) + self.retry_post_json(self.root + "/storage/xxx_col2", bsos) # checking what we did - res = self.app.get(self.root + '/storage/xxx_col2/14') + res = self.app.get(self.root + "/storage/xxx_col2/14") res = res.json - self.assertEquals(res['payload'], _PLD) - res = self.app.get(self.root + '/storage/xxx_col2/13') + self.assertEquals(res["payload"], _PLD) + res = self.app.get(self.root + "/storage/xxx_col2/13") res = res.json - self.assertEquals(res['payload'], 'XyX') + self.assertEquals(res["payload"], "XyX") # sending two bsos with one bad sortindex - bso1 = {'id': 'one', 'payload': _PLD} - bso2 = {'id': 'two', 'payload': _PLD, - 'sortindex': 'FAIL'} + bso1 = {"id": "one", "payload": _PLD} + bso2 = {"id": "two", "payload": _PLD, "sortindex": "FAIL"} bsos = [bso1, bso2] - self.retry_post_json(self.root + '/storage/xxx_col2', bsos) - self.app.get(self.root + '/storage/xxx_col2/two', status=404) + self.retry_post_json(self.root + "/storage/xxx_col2", bsos) + self.app.get(self.root + "/storage/xxx_col2/two", status=404) def test_set_collection_input_formats(self): # If we send with application/newlines it should work. - bso1 = {'id': '12', 'payload': _PLD} - bso2 = {'id': '13', 'payload': _PLD} + bso1 = {"id": "12", "payload": _PLD} + bso2 = {"id": "13", "payload": _PLD} bsos = [bso1, bso2] body = "\n".join(json_dumps(bso) for bso in bsos) - self.app.post(self.root + '/storage/xxx_col2', body, headers={ - "Content-Type": "application/newlines" - }) + self.app.post( + self.root + "/storage/xxx_col2", + body, + headers={"Content-Type": "application/newlines"}, + ) items = self.app.get(self.root + "/storage/xxx_col2").json self.assertEquals(len(items), 2) # If we send an unknown content type, we get an error. self.retry_delete(self.root + "/storage/xxx_col2") body = json_dumps(bsos) - self.app.post(self.root + '/storage/xxx_col2', body, headers={ - "Content-Type": "application/octet-stream" - }, status=415) + self.app.post( + self.root + "/storage/xxx_col2", + body, + headers={"Content-Type": "application/octet-stream"}, + status=415, + ) items = self.app.get(self.root + "/storage/xxx_col2").json self.assertEquals(len(items), 0) def test_set_item_input_formats(self): # If we send with application/json it should work. - body = json_dumps({'payload': _PLD}) - self.app.put(self.root + '/storage/xxx_col2/TEST', body, headers={ - "Content-Type": "application/json" - }) + body = json_dumps({"payload": _PLD}) + self.app.put( + self.root + "/storage/xxx_col2/TEST", + body, + headers={"Content-Type": "application/json"}, + ) item = self.app.get(self.root + "/storage/xxx_col2/TEST").json self.assertEquals(item["payload"], _PLD) # If we send json with some other content type, it should fail self.retry_delete(self.root + "/storage/xxx_col2") - self.app.put(self.root + '/storage/xxx_col2/TEST', body, headers={ - "Content-Type": "application/octet-stream" - }, status=415) + self.app.put( + self.root + "/storage/xxx_col2/TEST", + body, + headers={"Content-Type": "application/octet-stream"}, + status=415, + ) self.app.get(self.root + "/storage/xxx_col2/TEST", status=404) # Unless we use text/plain, which is a special bw-compat case. - self.app.put(self.root + '/storage/xxx_col2/TEST', body, headers={ - "Content-Type": "text/plain" - }) + self.app.put( + self.root + "/storage/xxx_col2/TEST", + body, + headers={"Content-Type": "text/plain"}, + ) item = self.app.get(self.root + "/storage/xxx_col2/TEST").json self.assertEquals(item["payload"], _PLD) def test_app_newlines_when_payloads_contain_newlines(self): # Send some application/newlines with embedded newline chars. bsos = [ - {'id': '01', 'payload': 'hello\nworld'}, - {'id': '02', 'payload': '\nmarco\npolo\n'}, + {"id": "01", "payload": "hello\nworld"}, + {"id": "02", "payload": "\nmarco\npolo\n"}, ] body = "\n".join(json_dumps(bso) for bso in bsos) self.assertEquals(len(body.split("\n")), 2) - self.app.post(self.root + '/storage/xxx_col2', body, headers={ - "Content-Type": "application/newlines" - }) + self.app.post( + self.root + "/storage/xxx_col2", + body, + headers={"Content-Type": "application/newlines"}, + ) # Read them back as JSON list, check payloads. items = self.app.get(self.root + "/storage/xxx_col2?full=1").json self.assertEquals(len(items), 2) @@ -570,11 +600,16 @@ def test_app_newlines_when_payloads_contain_newlines(self): self.assertEquals(items[0]["payload"], bsos[0]["payload"]) self.assertEquals(items[1]["payload"], bsos[1]["payload"]) # Read them back as application/newlines, check payloads. - res = self.app.get(self.root + "/storage/xxx_col2?full=1", headers={ - "Accept": "application/newlines", - }) - items = [json_loads(line) for line in res.body.decode( - 'utf-8').strip().split('\n')] + res = self.app.get( + self.root + "/storage/xxx_col2?full=1", + headers={ + "Accept": "application/newlines", + }, + ) + items = [ + json_loads(line) + for line in res.body.decode("utf-8").strip().split("\n") + ] self.assertEquals(len(items), 2) items.sort(key=lambda bso: bso["id"]) self.assertEquals(items[0]["payload"], bsos[0]["payload"]) @@ -583,82 +618,82 @@ def test_app_newlines_when_payloads_contain_newlines(self): def test_collection_usage(self): self.retry_delete(self.root + "/storage") - bso1 = {'id': '13', 'payload': 'XyX'} - bso2 = {'id': '14', 'payload': _PLD} + bso1 = {"id": "13", "payload": "XyX"} + bso2 = {"id": "14", "payload": _PLD} bsos = [bso1, bso2] - self.retry_post_json(self.root + '/storage/xxx_col2', bsos) + self.retry_post_json(self.root + "/storage/xxx_col2", bsos) - res = self.app.get(self.root + '/info/collection_usage') + res = self.app.get(self.root + "/info/collection_usage") usage = res.json - xxx_col2_size = usage['xxx_col2'] - wanted = (len(bso1['payload']) + len(bso2['payload'])) / 1024.0 + xxx_col2_size = usage["xxx_col2"] + wanted = (len(bso1["payload"]) + len(bso2["payload"])) / 1024.0 self.assertEqual(round(xxx_col2_size, 2), round(wanted, 2)) def test_delete_collection_items(self): # creating a collection of three - bso1 = {'id': '12', 'payload': _PLD} - bso2 = {'id': '13', 'payload': _PLD} - bso3 = {'id': '14', 'payload': _PLD} + bso1 = {"id": "12", "payload": _PLD} + bso2 = {"id": "13", "payload": _PLD} + bso3 = {"id": "14", "payload": _PLD} bsos = [bso1, bso2, bso3] - self.retry_post_json(self.root + '/storage/xxx_col2', bsos) - res = self.app.get(self.root + '/storage/xxx_col2') + self.retry_post_json(self.root + "/storage/xxx_col2", bsos) + res = self.app.get(self.root + "/storage/xxx_col2") self.assertEquals(len(res.json), 3) # deleting all items - self.retry_delete(self.root + '/storage/xxx_col2') - items = self.app.get(self.root + '/storage/xxx_col2').json + self.retry_delete(self.root + "/storage/xxx_col2") + items = self.app.get(self.root + "/storage/xxx_col2").json self.assertEquals(len(items), 0) # Deletes the ids for objects in the collection that are in the # provided comma-separated list. - self.retry_post_json(self.root + '/storage/xxx_col2', bsos) - res = self.app.get(self.root + '/storage/xxx_col2') + self.retry_post_json(self.root + "/storage/xxx_col2", bsos) + res = self.app.get(self.root + "/storage/xxx_col2") self.assertEquals(len(res.json), 3) - self.retry_delete(self.root + '/storage/xxx_col2?ids=12,14') - res = self.app.get(self.root + '/storage/xxx_col2') + self.retry_delete(self.root + "/storage/xxx_col2?ids=12,14") + res = self.app.get(self.root + "/storage/xxx_col2") self.assertEquals(len(res.json), 1) - self.retry_delete(self.root + '/storage/xxx_col2?ids=13') - res = self.app.get(self.root + '/storage/xxx_col2') + self.retry_delete(self.root + "/storage/xxx_col2?ids=13") + res = self.app.get(self.root + "/storage/xxx_col2") self.assertEquals(len(res.json), 0) def test_delete_item(self): # creating a collection of three - bso1 = {'id': '12', 'payload': _PLD} - bso2 = {'id': '13', 'payload': _PLD} - bso3 = {'id': '14', 'payload': _PLD} + bso1 = {"id": "12", "payload": _PLD} + bso2 = {"id": "13", "payload": _PLD} + bso3 = {"id": "14", "payload": _PLD} bsos = [bso1, bso2, bso3] - self.retry_post_json(self.root + '/storage/xxx_col2', bsos) - res = self.app.get(self.root + '/storage/xxx_col2') + self.retry_post_json(self.root + "/storage/xxx_col2", bsos) + res = self.app.get(self.root + "/storage/xxx_col2") self.assertEquals(len(res.json), 3) - ts = float(res.headers['X-Last-Modified']) + ts = float(res.headers["X-Last-Modified"]) # deleting item 13 - self.retry_delete(self.root + '/storage/xxx_col2/13') - res = self.app.get(self.root + '/storage/xxx_col2') + self.retry_delete(self.root + "/storage/xxx_col2/13") + res = self.app.get(self.root + "/storage/xxx_col2") self.assertEquals(len(res.json), 2) # unexisting item should return a 404 - self.retry_delete(self.root + '/storage/xxx_col2/12982', status=404) + self.retry_delete(self.root + "/storage/xxx_col2/12982", status=404) # The collection should get an updated timestsamp. - res = self.app.get(self.root + '/info/collections') - self.assertTrue(ts < float(res.headers['X-Last-Modified'])) + res = self.app.get(self.root + "/info/collections") + self.assertTrue(ts < float(res.headers["X-Last-Modified"])) def test_delete_storage(self): # creating a collection of three - bso1 = {'id': '12', 'payload': _PLD} - bso2 = {'id': '13', 'payload': _PLD} - bso3 = {'id': '14', 'payload': _PLD} + bso1 = {"id": "12", "payload": _PLD} + bso2 = {"id": "13", "payload": _PLD} + bso3 = {"id": "14", "payload": _PLD} bsos = [bso1, bso2, bso3] - self.retry_post_json(self.root + '/storage/xxx_col2', bsos) - res = self.app.get(self.root + '/storage/xxx_col2') + self.retry_post_json(self.root + "/storage/xxx_col2", bsos) + res = self.app.get(self.root + "/storage/xxx_col2") self.assertEquals(len(res.json), 3) # deleting all - self.retry_delete(self.root + '/storage') - items = self.app.get(self.root + '/storage/xxx_col2').json + self.retry_delete(self.root + "/storage") + items = self.app.get(self.root + "/storage/xxx_col2").json self.assertEquals(len(items), 0) - self.retry_delete(self.root + '/storage/xxx_col2', status=200) + self.retry_delete(self.root + "/storage/xxx_col2", status=200) self.assertEquals(len(items), 0) def test_x_timestamp_header(self): @@ -671,110 +706,132 @@ def test_x_timestamp_header(self): now = round(time.time(), 2) time.sleep(0.01) - res = self.app.get(self.root + '/storage/xxx_col2') - self.assertTrue(now <= float(res.headers['X-Weave-Timestamp'])) + res = self.app.get(self.root + "/storage/xxx_col2") + self.assertTrue(now <= float(res.headers["X-Weave-Timestamp"])) # getting the timestamp with a PUT now = round(time.time(), 2) time.sleep(0.01) - bso = {'payload': _PLD} - res = self.retry_put_json(self.root + '/storage/xxx_col2/12345', bso) - self.assertTrue(now <= float(res.headers['X-Weave-Timestamp'])) - self.assertTrue(abs(now - - float(res.headers['X-Weave-Timestamp'])) <= 200) + bso = {"payload": _PLD} + res = self.retry_put_json(self.root + "/storage/xxx_col2/12345", bso) + self.assertTrue(now <= float(res.headers["X-Weave-Timestamp"])) + self.assertTrue( + abs(now - float(res.headers["X-Weave-Timestamp"])) <= 200 + ) # getting the timestamp with a POST now = round(time.time(), 2) time.sleep(0.01) - bso1 = {'id': '12', 'payload': _PLD} - bso2 = {'id': '13', 'payload': _PLD} + bso1 = {"id": "12", "payload": _PLD} + bso2 = {"id": "13", "payload": _PLD} bsos = [bso1, bso2] - res = self.retry_post_json(self.root + '/storage/xxx_col2', bsos) - self.assertTrue(now <= float(res.headers['X-Weave-Timestamp'])) + res = self.retry_post_json(self.root + "/storage/xxx_col2", bsos) + self.assertTrue(now <= float(res.headers["X-Weave-Timestamp"])) def test_ifunmodifiedsince(self): - bso = {'id': '12345', 'payload': _PLD} - res = self.retry_put_json(self.root + '/storage/xxx_col2/12345', bso) + bso = {"id": "12345", "payload": _PLD} + res = self.retry_put_json(self.root + "/storage/xxx_col2/12345", bso) # Using an X-If-Unmodified-Since in the past should cause 412s. - ts = str(float(res.headers['X-Last-Modified']) - 1) - bso = {'id': '12345', 'payload': _PLD + "XXX"} + ts = str(float(res.headers["X-Last-Modified"]) - 1) + bso = {"id": "12345", "payload": _PLD + "XXX"} res = self.retry_put_json( - self.root + '/storage/xxx_col2/12345', bso, - headers=[('X-If-Unmodified-Since', ts)], - status=412) + self.root + "/storage/xxx_col2/12345", + bso, + headers=[("X-If-Unmodified-Since", ts)], + status=412, + ) self.assertTrue("X-Last-Modified" in res.headers) res = self.retry_delete( - self.root + '/storage/xxx_col2/12345', - headers=[('X-If-Unmodified-Since', ts)], - status=412) + self.root + "/storage/xxx_col2/12345", + headers=[("X-If-Unmodified-Since", ts)], + status=412, + ) self.assertTrue("X-Last-Modified" in res.headers) self.retry_post_json( - self.root + '/storage/xxx_col2', [bso], - headers=[('X-If-Unmodified-Since', ts)], - status=412) + self.root + "/storage/xxx_col2", + [bso], + headers=[("X-If-Unmodified-Since", ts)], + status=412, + ) self.retry_delete( - self.root + '/storage/xxx_col2?ids=12345', - headers=[('X-If-Unmodified-Since', ts)], - status=412) + self.root + "/storage/xxx_col2?ids=12345", + headers=[("X-If-Unmodified-Since", ts)], + status=412, + ) self.app.get( - self.root + '/storage/xxx_col2/12345', - headers=[('X-If-Unmodified-Since', ts)], - status=412) + self.root + "/storage/xxx_col2/12345", + headers=[("X-If-Unmodified-Since", ts)], + status=412, + ) self.app.get( - self.root + '/storage/xxx_col2', - headers=[('X-If-Unmodified-Since', ts)], - status=412) + self.root + "/storage/xxx_col2", + headers=[("X-If-Unmodified-Since", ts)], + status=412, + ) # Deleting items from a collection should give 412 even if some # other, unrelated item in the collection has been modified. - ts = res.headers['X-Last-Modified'] - res2 = self.retry_put_json(self.root + '/storage/xxx_col2/54321', { - 'payload': _PLD, - }) + ts = res.headers["X-Last-Modified"] + res2 = self.retry_put_json( + self.root + "/storage/xxx_col2/54321", + { + "payload": _PLD, + }, + ) self.retry_delete( - self.root + '/storage/xxx_col2?ids=12345', - headers=[('X-If-Unmodified-Since', ts)], - status=412) - ts = res2.headers['X-Last-Modified'] + self.root + "/storage/xxx_col2?ids=12345", + headers=[("X-If-Unmodified-Since", ts)], + status=412, + ) + ts = res2.headers["X-Last-Modified"] # All of those should have left the BSO unchanged - res2 = self.app.get(self.root + '/storage/xxx_col2/12345') - self.assertEquals(res2.json['payload'], _PLD) - self.assertEquals(res2.headers['X-Last-Modified'], - res.headers['X-Last-Modified']) + res2 = self.app.get(self.root + "/storage/xxx_col2/12345") + self.assertEquals(res2.json["payload"], _PLD) + self.assertEquals( + res2.headers["X-Last-Modified"], res.headers["X-Last-Modified"] + ) # Using an X-If-Unmodified-Since equal to # X-Last-Modified should allow the request to succeed. res = self.retry_post_json( - self.root + '/storage/xxx_col2', [bso], - headers=[('X-If-Unmodified-Since', ts)], - status=200) - ts = res.headers['X-Last-Modified'] + self.root + "/storage/xxx_col2", + [bso], + headers=[("X-If-Unmodified-Since", ts)], + status=200, + ) + ts = res.headers["X-Last-Modified"] self.app.get( - self.root + '/storage/xxx_col2/12345', - headers=[('X-If-Unmodified-Since', ts)], - status=200) + self.root + "/storage/xxx_col2/12345", + headers=[("X-If-Unmodified-Since", ts)], + status=200, + ) self.retry_delete( - self.root + '/storage/xxx_col2/12345', - headers=[('X-If-Unmodified-Since', ts)], - status=200) + self.root + "/storage/xxx_col2/12345", + headers=[("X-If-Unmodified-Since", ts)], + status=200, + ) res = self.retry_put_json( - self.root + '/storage/xxx_col2/12345', bso, - headers=[('X-If-Unmodified-Since', '0')], - status=200) - ts = res.headers['X-Last-Modified'] + self.root + "/storage/xxx_col2/12345", + bso, + headers=[("X-If-Unmodified-Since", "0")], + status=200, + ) + ts = res.headers["X-Last-Modified"] self.app.get( - self.root + '/storage/xxx_col2', - headers=[('X-If-Unmodified-Since', ts)], - status=200) + self.root + "/storage/xxx_col2", + headers=[("X-If-Unmodified-Since", ts)], + status=200, + ) self.retry_delete( - self.root + '/storage/xxx_col2?ids=12345', - headers=[('X-If-Unmodified-Since', ts)], - status=200) + self.root + "/storage/xxx_col2?ids=12345", + headers=[("X-If-Unmodified-Since", ts)], + status=200, + ) def test_quota(self): - res = self.app.get(self.root + '/info/quota') + res = self.app.get(self.root + "/info/quota") old_used = res.json[0] - bso = {'payload': _PLD} - self.retry_put_json(self.root + '/storage/xxx_col2/12345', bso) - res = self.app.get(self.root + '/info/quota') + bso = {"payload": _PLD} + self.retry_put_json(self.root + "/storage/xxx_col2/12345", bso) + res = self.app.get(self.root + "/info/quota") used = res.json[0] self.assertEquals(used - old_used, len(_PLD) / 1024.0) @@ -791,148 +848,162 @@ def test_overquota(self): self.config.registry.settings["storage.quota_size"] = 700 # Check the the remaining quota is correctly reported. - bso = {'payload': _PLD} - res = self.retry_put_json(self.root + '/storage/xxx_col2/12345', bso) + bso = {"payload": _PLD} + res = self.retry_put_json(self.root + "/storage/xxx_col2/12345", bso) wanted = str(round(200 / 1024.0, 2)) - self.assertEquals(res.headers['X-Weave-Quota-Remaining'], wanted) + self.assertEquals(res.headers["X-Weave-Quota-Remaining"], wanted) # Set the quota so that they're over their limit. self.config.registry.settings["storage.quota_size"] = 10 - bso = {'payload': _PLD} - res = self.retry_put_json(self.root + '/storage/xxx_col2/12345', bso, - status=403) - self.assertEquals(res.content_type.split(";")[0], 'application/json') + bso = {"payload": _PLD} + res = self.retry_put_json( + self.root + "/storage/xxx_col2/12345", bso, status=403 + ) + self.assertEquals(res.content_type.split(";")[0], "application/json") self.assertEquals(res.json["status"], "quota-exceeded") def test_get_collection_ttl(self): - bso = {'payload': _PLD, 'ttl': 0} - res = self.retry_put_json(self.root + '/storage/xxx_col2/12345', bso) + bso = {"payload": _PLD, "ttl": 0} + res = self.retry_put_json(self.root + "/storage/xxx_col2/12345", bso) time.sleep(1.1) - res = self.app.get(self.root + '/storage/xxx_col2') + res = self.app.get(self.root + "/storage/xxx_col2") self.assertEquals(res.json, []) - bso = {'payload': _PLD, 'ttl': 2} - res = self.retry_put_json(self.root + '/storage/xxx_col2/123456', bso) + bso = {"payload": _PLD, "ttl": 2} + res = self.retry_put_json(self.root + "/storage/xxx_col2/123456", bso) # it should exists now - res = self.app.get(self.root + '/storage/xxx_col2') + res = self.app.get(self.root + "/storage/xxx_col2") self.assertEquals(len(res.json), 1) # trying a second put again - self.retry_put_json(self.root + '/storage/xxx_col2/123456', bso) + self.retry_put_json(self.root + "/storage/xxx_col2/123456", bso) - res = self.app.get(self.root + '/storage/xxx_col2') + res = self.app.get(self.root + "/storage/xxx_col2") self.assertEquals(len(res.json), 1) time.sleep(2.1) - res = self.app.get(self.root + '/storage/xxx_col2') + res = self.app.get(self.root + "/storage/xxx_col2") self.assertEquals(len(res.json), 0) def test_multi_item_post_limits(self): - res = self.app.get(self.root + '/info/configuration') + res = self.app.get(self.root + "/info/configuration") try: - max_bytes = res.json['max_post_bytes'] - max_count = res.json['max_post_records'] - max_req_bytes = res.json['max_request_bytes'] + max_bytes = res.json["max_post_bytes"] + max_count = res.json["max_post_records"] + max_req_bytes = res.json["max_request_bytes"] except KeyError: # Can't run against live server if it doesn't # report the right config options. if self.distant: raise unittest2.SkipTest - max_bytes = get_limit_config(self.config, 'max_post_bytes') - max_count = get_limit_config(self.config, 'max_post_records') - max_req_bytes = get_limit_config(self.config, 'max_request_bytes') + max_bytes = get_limit_config(self.config, "max_post_bytes") + max_count = get_limit_config(self.config, "max_post_records") + max_req_bytes = get_limit_config(self.config, "max_request_bytes") # Uploading max_count-5 small objects should succeed. - bsos = [{'id': str(i).zfill(2), - 'payload': 'X'} for i in range(max_count - 5)] - res = self.retry_post_json(self.root + '/storage/xxx_col2', bsos) + bsos = [ + {"id": str(i).zfill(2), "payload": "X"} + for i in range(max_count - 5) + ] + res = self.retry_post_json(self.root + "/storage/xxx_col2", bsos) res = res.json - self.assertEquals(len(res['success']), max_count - 5) - self.assertEquals(len(res['failed']), 0) + self.assertEquals(len(res["success"]), max_count - 5) + self.assertEquals(len(res["failed"]), 0) # Uploading max_count+5 items should produce five failures. - bsos = [{'id': str(i).zfill(2), - 'payload': 'X'} for i in range(max_count + 5)] - res = self.retry_post_json(self.root + '/storage/xxx_col2', bsos) + bsos = [ + {"id": str(i).zfill(2), "payload": "X"} + for i in range(max_count + 5) + ] + res = self.retry_post_json(self.root + "/storage/xxx_col2", bsos) res = res.json - self.assertEquals(len(res['success']), max_count) - self.assertEquals(len(res['failed']), 5) + self.assertEquals(len(res["success"]), max_count) + self.assertEquals(len(res["failed"]), 5) # Uploading items such that the last item puts us over the # cumulative limit on payload size, should produce 1 failure. # The item_size here is arbitrary, so I made it a prime in kB. - item_size = (227 * 1024) + item_size = 227 * 1024 max_items, leftover = divmod(max_bytes, item_size) - bsos = [{'id': str(i).zfill(2), 'payload': 'X' * item_size} - for i in range(max_items)] - bsos.append({'id': str(max_items), 'payload': 'X' * (leftover + 1)}) + bsos = [ + {"id": str(i).zfill(2), "payload": "X" * item_size} + for i in range(max_items) + ] + bsos.append({"id": str(max_items), "payload": "X" * (leftover + 1)}) # Check that we don't go over the limit on raw request bytes, # which would get us rejected in production with a 413. self.assertTrue(len(json.dumps(bsos)) < max_req_bytes) - res = self.retry_post_json(self.root + '/storage/xxx_col2', bsos) + res = self.retry_post_json(self.root + "/storage/xxx_col2", bsos) res = res.json - self.assertEquals(len(res['success']), max_items) - self.assertEquals(len(res['failed']), 1) + self.assertEquals(len(res["success"]), max_items) + self.assertEquals(len(res["failed"]), 1) def test_weird_args(self): # pushing some data in xxx_col2 - bsos = [{'id': str(i).zfill(2), 'payload': _PLD} for i in range(10)] - res = self.retry_post_json(self.root + '/storage/xxx_col2', bsos) + bsos = [{"id": str(i).zfill(2), "payload": _PLD} for i in range(10)] + res = self.retry_post_json(self.root + "/storage/xxx_col2", bsos) res = res.json # trying weird args and make sure the server returns 400s # Note: "Offset" is a string since the bsoid could be anything. # skipping that for now. - args = ('newer', 'older', 'limit') + args = ("newer", "older", "limit") for arg in args: value = randtext() - self.app.get(self.root + '/storage/xxx_col2?%s=%s' % (arg, value), - status=400) + self.app.get( + self.root + "/storage/xxx_col2?%s=%s" % (arg, value), + status=400, + ) # what about a crazy ids= string ? - ids = ','.join([randtext(10) for i in range(100)]) - res = self.app.get(self.root + '/storage/xxx_col2?ids=%s' % ids) + ids = ",".join([randtext(10) for i in range(100)]) + res = self.app.get(self.root + "/storage/xxx_col2?ids=%s" % ids) self.assertEquals(res.json, []) # trying unexpected args - they should not break - self.app.get(self.root + '/storage/xxx_col2?blabla=1', - status=200) + self.app.get(self.root + "/storage/xxx_col2?blabla=1", status=200) def test_guid_deletion(self): # pushing some data in xxx_col2 - bsos = [{'id': '6820f3ca-6e8a-4ff4-8af7-8b3625d7d65%d' % i, - 'payload': _PLD} for i in range(5)] - res = self.retry_post_json(self.root + '/storage/passwords', bsos) + bsos = [ + { + "id": "6820f3ca-6e8a-4ff4-8af7-8b3625d7d65%d" % i, + "payload": _PLD, + } + for i in range(5) + ] + res = self.retry_post_json(self.root + "/storage/passwords", bsos) res = res.json self.assertEquals(len(res["success"]), 5) # now deleting some of them - ids = ','.join(['6820f3ca-6e8a-4ff4-8af7-8b3625d7d65%d' % i - for i in range(2)]) + ids = ",".join( + ["6820f3ca-6e8a-4ff4-8af7-8b3625d7d65%d" % i for i in range(2)] + ) - self.retry_delete(self.root + '/storage/passwords?ids=%s' % ids) + self.retry_delete(self.root + "/storage/passwords?ids=%s" % ids) - res = self.app.get(self.root + '/storage/passwords?ids=%s' % ids) + res = self.app.get(self.root + "/storage/passwords?ids=%s" % ids) self.assertEqual(len(res.json), 0) - res = self.app.get(self.root + '/storage/passwords') + res = self.app.get(self.root + "/storage/passwords") self.assertEqual(len(res.json), 3) def test_specifying_ids_with_percent_encoded_query_string(self): # create some items - bsos = [{'id': 'test-%d' % i, 'payload': _PLD} for i in range(5)] - res = self.retry_post_json(self.root + '/storage/xxx_col2', bsos) + bsos = [{"id": "test-%d" % i, "payload": _PLD} for i in range(5)] + res = self.retry_post_json(self.root + "/storage/xxx_col2", bsos) res = res.json self.assertEquals(len(res["success"]), 5) # now delete some of them - ids = ','.join(['test-%d' % i for i in range(2)]) + ids = ",".join(["test-%d" % i for i in range(2)]) ids = urllib.request.quote(ids) - self.retry_delete(self.root + '/storage/xxx_col2?ids=%s' % ids) + self.retry_delete(self.root + "/storage/xxx_col2?ids=%s" % ids) # check that the correct items were deleted - res = self.app.get(self.root + '/storage/xxx_col2?ids=%s' % ids) + res = self.app.get(self.root + "/storage/xxx_col2?ids=%s" % ids) self.assertEqual(len(res.json), 0) - res = self.app.get(self.root + '/storage/xxx_col2') + res = self.app.get(self.root + "/storage/xxx_col2") self.assertEqual(len(res.json), 3) def test_timestamp_numbers_are_decimals(self): @@ -942,11 +1013,11 @@ def test_timestamp_numbers_are_decimals(self): self.retry_post_json(self.root + "/storage/xxx_col2", bsos) # make sure the server returns only proper precision timestamps. - resp = self.app.get(self.root + '/storage/xxx_col2?full=1') + resp = self.app.get(self.root + "/storage/xxx_col2?full=1") bsos = json_loads(resp.body) timestamps = [] for bso in bsos: - ts = bso['modified'] + ts = bso["modified"] # timestamps could be on the hundred seconds (.10) or on the # second (.0) and the zero could be dropped. We just don't want # anything beyond milisecond. @@ -960,100 +1031,107 @@ def test_timestamp_numbers_are_decimals(self): # Returns only ids for objects in the collection that have been # last modified since the timestamp given. - res = self.app.get(self.root + '/storage/xxx_col2?newer=%s' % ts) + res = self.app.get(self.root + "/storage/xxx_col2?newer=%s" % ts) res = res.json try: - self.assertEquals(sorted(res), ['03', '04']) + self.assertEquals(sorted(res), ["03", "04"]) except AssertionError: # need to display the whole collection to understand the issue - msg = 'Timestamp used: %s' % ts - msg += ' ' + self.app.get(self.root + - '/storage/xxx_col2?full=1').body - msg += ' Timestamps received: %s' % str(timestamps) - msg += ' Result of newer query: %s' % res + msg = "Timestamp used: %s" % ts + msg += ( + " " + self.app.get(self.root + "/storage/xxx_col2?full=1").body + ) + msg += " Timestamps received: %s" % str(timestamps) + msg += " Result of newer query: %s" % res raise AssertionError(msg) def test_strict_newer(self): # send two bsos in the 'meh' collection - bso1 = {'id': '01', 'payload': _PLD} - bso2 = {'id': '02', 'payload': _PLD} + bso1 = {"id": "01", "payload": _PLD} + bso2 = {"id": "02", "payload": _PLD} bsos = [bso1, bso2] - res = self.retry_post_json(self.root + '/storage/xxx_meh', bsos) + res = self.retry_post_json(self.root + "/storage/xxx_meh", bsos) ts = float(res.headers["X-Last-Modified"]) # send two more bsos - bso3 = {'id': '03', 'payload': _PLD} - bso4 = {'id': '04', 'payload': _PLD} + bso3 = {"id": "03", "payload": _PLD} + bso4 = {"id": "04", "payload": _PLD} bsos = [bso3, bso4] - res = self.retry_post_json(self.root + '/storage/xxx_meh', bsos) + res = self.retry_post_json(self.root + "/storage/xxx_meh", bsos) # asking for bsos using newer=ts where newer is the timestamp # of bso 1 and 2, should not return them - res = self.app.get(self.root + '/storage/xxx_meh?newer=%s' % ts) + res = self.app.get(self.root + "/storage/xxx_meh?newer=%s" % ts) res = res.json - self.assertEquals(sorted(res), ['03', '04']) + self.assertEquals(sorted(res), ["03", "04"]) def test_strict_older(self): # send two bsos in the 'xxx_meh' collection - bso1 = {'id': '01', 'payload': _PLD} - bso2 = {'id': '02', 'payload': _PLD} + bso1 = {"id": "01", "payload": _PLD} + bso2 = {"id": "02", "payload": _PLD} bsos = [bso1, bso2] - res = self.retry_post_json(self.root + '/storage/xxx_meh', bsos) + res = self.retry_post_json(self.root + "/storage/xxx_meh", bsos) # send two more bsos - bso3 = {'id': '03', 'payload': _PLD} - bso4 = {'id': '04', 'payload': _PLD} + bso3 = {"id": "03", "payload": _PLD} + bso4 = {"id": "04", "payload": _PLD} bsos = [bso3, bso4] - res = self.retry_post_json(self.root + '/storage/xxx_meh', bsos) + res = self.retry_post_json(self.root + "/storage/xxx_meh", bsos) ts = float(res.headers["X-Last-Modified"]) # asking for bsos using older=ts where older is the timestamp # of bso 3 and 4, should not return them - res = self.app.get(self.root + '/storage/xxx_meh?older=%s' % ts) + res = self.app.get(self.root + "/storage/xxx_meh?older=%s" % ts) res = res.json - self.assertEquals(sorted(res), ['01', '02']) + self.assertEquals(sorted(res), ["01", "02"]) def test_handling_of_invalid_json_in_bso_uploads(self): # Single upload with JSON that's not a BSO. bso = "notabso" - res = self.retry_put_json(self.root + '/storage/xxx_col2/invalid', bso, - status=400) + res = self.retry_put_json( + self.root + "/storage/xxx_col2/invalid", bso, status=400 + ) self.assertEquals(res.json, WEAVE_INVALID_WBO) bso = 42 - res = self.retry_put_json(self.root + '/storage/xxx_col2/invalid', bso, - status=400) + res = self.retry_put_json( + self.root + "/storage/xxx_col2/invalid", bso, status=400 + ) self.assertEquals(res.json, WEAVE_INVALID_WBO) - bso = {'id': ["01", "02"], 'payload': {'3': '4'}} - res = self.retry_put_json(self.root + '/storage/xxx_col2/invalid', bso, - status=400) + bso = {"id": ["01", "02"], "payload": {"3": "4"}} + res = self.retry_put_json( + self.root + "/storage/xxx_col2/invalid", bso, status=400 + ) self.assertEquals(res.json, WEAVE_INVALID_WBO) # Batch upload with JSON that's not a list of BSOs bsos = "notalist" - res = self.retry_post_json(self.root + '/storage/xxx_col2', bsos, - status=400) + res = self.retry_post_json( + self.root + "/storage/xxx_col2", bsos, status=400 + ) self.assertEquals(res.json, WEAVE_INVALID_WBO) bsos = 42 - res = self.retry_post_json(self.root + '/storage/xxx_col2', bsos, - status=400) + res = self.retry_post_json( + self.root + "/storage/xxx_col2", bsos, status=400 + ) self.assertEquals(res.json, WEAVE_INVALID_WBO) # Batch upload a list with something that's not a valid data dict. # It should fail out entirely, as the input is seriously broken. - bsos = [{'id': '01', 'payload': 'GOOD'}, "BAD"] - res = self.retry_post_json(self.root + '/storage/xxx_col2', bsos, - status=400) + bsos = [{"id": "01", "payload": "GOOD"}, "BAD"] + res = self.retry_post_json( + self.root + "/storage/xxx_col2", bsos, status=400 + ) # Batch upload a list with something that's an invalid BSO. # It should process the good entry and fail for the bad. - bsos = [{'id': '01', 'payload': 'GOOD'}, {'id': '02', 'invalid': 'ya'}] - res = self.retry_post_json(self.root + '/storage/xxx_col2', bsos) + bsos = [{"id": "01", "payload": "GOOD"}, {"id": "02", "invalid": "ya"}] + res = self.retry_post_json(self.root + "/storage/xxx_col2", bsos) res = res.json - self.assertEquals(len(res['success']), 1) - self.assertEquals(len(res['failed']), 1) + self.assertEquals(len(res["success"]), 1) + self.assertEquals(len(res["failed"]), 1) def test_handling_of_invalid_bso_fields(self): coll_url = self.root + "/storage/xxx_col2" @@ -1089,43 +1167,37 @@ def test_handling_of_invalid_bso_fields(self): bso = {"id": "TEST", "payload": "testing", "sortindex": "xxx_meh"} res = self.retry_post_json(coll_url, [bso]) self.assertTrue(res.json["failed"] and not res.json["success"]) - res = self.retry_put_json(coll_url + "/" + bso["id"], bso, - status=400) + res = self.retry_put_json(coll_url + "/" + bso["id"], bso, status=400) self.assertEquals(res.json, WEAVE_INVALID_WBO) # Invalid sortindex - not an integer bso = {"id": "TEST", "payload": "testing", "sortindex": "2.6"} res = self.retry_post_json(coll_url, [bso]) self.assertTrue(res.json["failed"] and not res.json["success"]) - res = self.retry_put_json(coll_url + "/" + bso["id"], bso, - status=400) + res = self.retry_put_json(coll_url + "/" + bso["id"], bso, status=400) self.assertEquals(res.json, WEAVE_INVALID_WBO) # Invalid sortindex - larger than max value bso = {"id": "TEST", "payload": "testing", "sortindex": "1" + "0" * 9} res = self.retry_post_json(coll_url, [bso]) self.assertTrue(res.json["failed"] and not res.json["success"]) - res = self.retry_put_json(coll_url + "/" + bso["id"], bso, - status=400) + res = self.retry_put_json(coll_url + "/" + bso["id"], bso, status=400) self.assertEquals(res.json, WEAVE_INVALID_WBO) # Invalid payload - not a string bso = {"id": "TEST", "payload": 42} res = self.retry_post_json(coll_url, [bso]) self.assertTrue(res.json["failed"] and not res.json["success"]) - res = self.retry_put_json(coll_url + "/" + bso["id"], bso, - status=400) + res = self.retry_put_json(coll_url + "/" + bso["id"], bso, status=400) self.assertEquals(res.json, WEAVE_INVALID_WBO) # Invalid ttl - not an integer bso = {"id": "TEST", "payload": "testing", "ttl": "eh?"} res = self.retry_post_json(coll_url, [bso]) self.assertTrue(res.json["failed"] and not res.json["success"]) - res = self.retry_put_json(coll_url + "/" + bso["id"], bso, - status=400) + res = self.retry_put_json(coll_url + "/" + bso["id"], bso, status=400) self.assertEquals(res.json, WEAVE_INVALID_WBO) # Invalid ttl - not an integer bso = {"id": "TEST", "payload": "testing", "ttl": "4.2"} res = self.retry_post_json(coll_url, [bso]) self.assertTrue(res.json["failed"] and not res.json["success"]) - res = self.retry_put_json(coll_url + "/" + bso["id"], bso, - status=400) + res = self.retry_put_json(coll_url + "/" + bso["id"], bso, status=400) self.assertEquals(res.json, WEAVE_INVALID_WBO) # Invalid BSO - unknown field bso = {"id": "TEST", "unexpected": "spanish-inquisition"} @@ -1168,8 +1240,9 @@ def test_that_batch_deletes_are_limited_to_max_number_of_ids(self): # Deleting with more than the limit fails. self.retry_put_json(self.root + "/storage/xxx_col2/01", bso) ids = ",".join(str(i).zfill(2) for i in range(BATCH_MAX_IDS + 1)) - self.retry_delete(self.root + "/storage/xxx_col2?ids=" + ids, - status=400) + self.retry_delete( + self.root + "/storage/xxx_col2?ids=" + ids, status=400 + ) def test_that_expired_items_can_be_overwritten_via_PUT(self): # Upload something with a small ttl. @@ -1186,8 +1259,12 @@ def test_if_modified_since_on_info_views(self): # Store something, so the views have a modified time > 0. bsos = [{"id": str(i).zfill(2), "payload": "xxx"} for i in range(3)] self.retry_post_json(self.root + "/storage/xxx_col1", bsos) - INFO_VIEWS = ("/info/collections", "/info/quota", - "/info/collection_usage", "/info/collection_counts") + INFO_VIEWS = ( + "/info/collections", + "/info/quota", + "/info/collection_usage", + "/info/collection_counts", + ) # Get the initial last-modified version. r = self.app.get(self.root + "/info/collections") ts1 = float(r.headers["X-Last-Modified"]) @@ -1266,8 +1343,9 @@ def test_update_of_ttl_without_sending_data(self): def test_bulk_update_of_ttls_without_sending_data(self): # Create 5 BSOs with a ttl of 1 second. - bsos = [{"id": str(i).zfill(2), - "payload": "x", "ttl": 1} for i in range(5)] + bsos = [ + {"id": str(i).zfill(2), "payload": "x", "ttl": 1} for i in range(5) + ] r = self.retry_post_json(self.root + "/storage/xxx_col2", bsos) ts1 = float(r.headers["X-Last-Modified"]) # Before they expire, bulk-update the ttl to something longer. @@ -1301,76 +1379,99 @@ def test_bulk_update_of_ttls_without_sending_data(self): def test_that_negative_integer_fields_are_not_accepted(self): # ttls cannot be negative - self.retry_put_json(self.root + "/storage/xxx_col2/TEST", { - "payload": "TEST", - "ttl": -1, - }, status=400) + self.retry_put_json( + self.root + "/storage/xxx_col2/TEST", + { + "payload": "TEST", + "ttl": -1, + }, + status=400, + ) # limit cannot be negative - self.retry_put_json(self.root + "/storage/xxx_col2/TEST", - {"payload": "X"}) + self.retry_put_json( + self.root + "/storage/xxx_col2/TEST", {"payload": "X"} + ) self.app.get(self.root + "/storage/xxx_col2?limit=-1", status=400) # X-If-Modified-Since cannot be negative - self.app.get(self.root + "/storage/xxx_col2", headers={ - "X-If-Modified-Since": "-3", - }, status=400) + self.app.get( + self.root + "/storage/xxx_col2", + headers={ + "X-If-Modified-Since": "-3", + }, + status=400, + ) # X-If-Unmodified-Since cannot be negative - self.retry_put_json(self.root + "/storage/xxx_col2/TEST", { - "payload": "TEST", - }, headers={ - "X-If-Unmodified-Since": "-3", - }, status=400) + self.retry_put_json( + self.root + "/storage/xxx_col2/TEST", + { + "payload": "TEST", + }, + headers={ + "X-If-Unmodified-Since": "-3", + }, + status=400, + ) # sortindex actually *can* be negative - self.retry_put_json(self.root + "/storage/xxx_col2/TEST", { - "payload": "TEST", - "sortindex": -42, - }, status=200) + self.retry_put_json( + self.root + "/storage/xxx_col2/TEST", + { + "payload": "TEST", + "sortindex": -42, + }, + status=200, + ) def test_meta_global_sanity(self): # Memcache backend is configured to store 'meta' in write-through # cache, so we want to check it explicitly. We might as well put it # in the base tests because there's nothing memcached-specific here. - self.app.get(self.root + '/storage/meta/global', status=404) - res = self.app.get(self.root + '/storage/meta') + self.app.get(self.root + "/storage/meta/global", status=404) + res = self.app.get(self.root + "/storage/meta") self.assertEquals(res.json, []) - self.retry_put_json(self.root + '/storage/meta/global', - {'payload': 'blob'}) - res = self.app.get(self.root + '/storage/meta') - self.assertEquals(res.json, ['global']) - res = self.app.get(self.root + '/storage/meta/global') - self.assertEquals(res.json['payload'], 'blob') + self.retry_put_json( + self.root + "/storage/meta/global", {"payload": "blob"} + ) + res = self.app.get(self.root + "/storage/meta") + self.assertEquals(res.json, ["global"]) + res = self.app.get(self.root + "/storage/meta/global") + self.assertEquals(res.json["payload"], "blob") # It should not have extra keys. keys = list(res.json.keys()) keys.sort() - self.assertEquals(keys, ['id', 'modified', 'payload']) + self.assertEquals(keys, ["id", "modified", "payload"]) # It should have a properly-formatted "modified" field. modified_re = r"['\"]modified['\"]:\s*[0-9]+\.[0-9][0-9]\s*[,}]" - self.assertTrue(re.search(modified_re, res.body.decode('utf-8'))) + self.assertTrue(re.search(modified_re, res.body.decode("utf-8"))) # Any client-specified "modified" field should be ignored - res = self.retry_put_json(self.root + '/storage/meta/global', - {'payload': 'blob', 'modified': 12}) - ts = float(res.headers['X-Weave-Timestamp']) - res = self.app.get(self.root + '/storage/meta/global') - self.assertEquals(res.json['modified'], ts) + res = self.retry_put_json( + self.root + "/storage/meta/global", + {"payload": "blob", "modified": 12}, + ) + ts = float(res.headers["X-Weave-Timestamp"]) + res = self.app.get(self.root + "/storage/meta/global") + self.assertEquals(res.json["modified"], ts) def test_that_404_responses_have_a_json_body(self): - res = self.app.get(self.root + '/nonexistent/url', status=404) + res = self.app.get(self.root + "/nonexistent/url", status=404) self.assertEquals(res.content_type, "application/json") self.assertEquals(res.json, 0) def test_that_internal_server_fields_are_not_echoed(self): - self.retry_post_json(self.root + '/storage/xxx_col1', - [{'id': 'one', 'payload': 'blob'}]) - self.retry_put_json(self.root + '/storage/xxx_col1/two', - {'payload': 'blub'}) - res = self.app.get(self.root + '/storage/xxx_col1?full=1') + self.retry_post_json( + self.root + "/storage/xxx_col1", [{"id": "one", "payload": "blob"}] + ) + self.retry_put_json( + self.root + "/storage/xxx_col1/two", {"payload": "blub"} + ) + res = self.app.get(self.root + "/storage/xxx_col1?full=1") self.assertEquals(len(res.json), 2) for item in res.json: self.assertTrue("id" in item) self.assertTrue("payload" in item) self.assertFalse("payload_size" in item) self.assertFalse("ttl" in item) - for id in ('one', 'two'): - res = self.app.get(self.root + '/storage/xxx_col1/' + id) + for id in ("one", "two"): + res = self.app.get(self.root + "/storage/xxx_col1/" + id) self.assertTrue("id" in res.json) self.assertTrue("payload" in res.json) self.assertFalse("payload_size" in res.json) @@ -1388,7 +1489,7 @@ def test_accessing_info_collections_with_an_expired_token(self): ts = float(resp.headers["X-Last-Modified"]) # Check that we can read the info correctly. - resp = self.app.get(self.root + '/info/collections') + resp = self.app.get(self.root + "/info/collections") self.assertEquals(list(resp.json.keys()), ["xxx_col1"]) self.assertEquals(resp.json["xxx_col1"], ts) @@ -1403,7 +1504,7 @@ def test_accessing_info_collections_with_an_expired_token(self): "expires": exp, "hashed_fxa_uid": self.hashed_fxa_uid, "fxa_uid": self.fxa_uid, - "fxa_kid": self.fxa_kid + "fxa_kid": self.fxa_kid, } self.auth_token = tm.make_token(data) self.auth_secret = tm.get_derived_secret(self.auth_token) @@ -1414,7 +1515,7 @@ def test_accessing_info_collections_with_an_expired_token(self): self.app.get(self.root + "/storage/xxx_col1", status=401) # But it still allows access to /info/collections. - resp = self.app.get(self.root + '/info/collections') + resp = self.app.get(self.root + "/info/collections") self.assertEquals(list(resp.json.keys()), ["xxx_col1"]) self.assertEquals(resp.json["xxx_col1"], ts) @@ -1424,11 +1525,12 @@ def test_pagination_with_newer_and_sort_by_oldest(self): bsos = [] timestamps = [] for i in range(NUM_ITEMS): - bso = {'id': str(i).zfill(2), 'payload': 'x'} + bso = {"id": str(i).zfill(2), "payload": "x"} bsos.append(bso) if i % 4 == 3: - res = self.retry_post_json(self.root + '/storage/xxx_col2', - bsos) + res = self.retry_post_json( + self.root + "/storage/xxx_col2", bsos + ) ts = float(res.headers["X-Last-Modified"]) timestamps.append((i, ts)) bsos = [] @@ -1437,29 +1539,32 @@ def test_pagination_with_newer_and_sort_by_oldest(self): # to hit various boundary conditions. for limit in (2, 3, 4, 5, 6): for (start, ts) in timestamps: - query_url = self.root + \ - '/storage/xxx_col2?full=true&sort=oldest' - query_url += '&newer=%s&limit=%s' % (ts, limit) + query_url = ( + self.root + "/storage/xxx_col2?full=true&sort=oldest" + ) + query_url += "&newer=%s&limit=%s" % (ts, limit) # Paginated-ly fetch all items. items = [] res = self.app.get(query_url) for item in res.json: if items: - assert items[-1]['modified'] <= item['modified'] + assert items[-1]["modified"] <= item["modified"] items.append(item) - next_offset = res.headers.get('X-Weave-Next-Offset') + next_offset = res.headers.get("X-Weave-Next-Offset") while next_offset is not None: res = self.app.get(query_url + "&offset=" + next_offset) for item in res.json: - assert items[-1]['modified'] <= item['modified'] + assert items[-1]["modified"] <= item["modified"] items.append(item) - next_offset = res.headers.get('X-Weave-Next-Offset') + next_offset = res.headers.get("X-Weave-Next-Offset") # They should all be in order, starting from the item # *after* the one that was used for the newer= timestamp. - self.assertEquals(sorted(int(item['id']) for item in items), - list(range(start + 1, NUM_ITEMS))) + self.assertEquals( + sorted(int(item["id"]) for item in items), + list(range(start + 1, NUM_ITEMS)), + ) def test_pagination_with_older_and_sort_by_newest(self): # Twelve bsos with three different modification times. @@ -1467,11 +1572,12 @@ def test_pagination_with_older_and_sort_by_newest(self): bsos = [] timestamps = [] for i in range(NUM_ITEMS): - bso = {'id': str(i).zfill(2), 'payload': 'x'} + bso = {"id": str(i).zfill(2), "payload": "x"} bsos.append(bso) if i % 4 == 3: - res = self.retry_post_json(self.root + '/storage/xxx_col2', - bsos) + res = self.retry_post_json( + self.root + "/storage/xxx_col2", bsos + ) ts = float(res.headers["X-Last-Modified"]) timestamps.append((i - 3, ts)) bsos = [] @@ -1480,115 +1586,120 @@ def test_pagination_with_older_and_sort_by_newest(self): # to hit various boundary conditions. for limit in (2, 3, 4, 5, 6): for (start, ts) in timestamps: - query_url = self.root + \ - '/storage/xxx_col2?full=true&sort=newest' - query_url += '&older=%s&limit=%s' % (ts, limit) + query_url = ( + self.root + "/storage/xxx_col2?full=true&sort=newest" + ) + query_url += "&older=%s&limit=%s" % (ts, limit) # Paginated-ly fetch all items. items = [] res = self.app.get(query_url) for item in res.json: if items: - assert items[-1]['modified'] >= item['modified'] + assert items[-1]["modified"] >= item["modified"] items.append(item) - next_offset = res.headers.get('X-Weave-Next-Offset') + next_offset = res.headers.get("X-Weave-Next-Offset") while next_offset is not None: res = self.app.get(query_url + "&offset=" + next_offset) for item in res.json: - assert items[-1]['modified'] >= item['modified'] + assert items[-1]["modified"] >= item["modified"] items.append(item) - next_offset = res.headers.get('X-Weave-Next-Offset') + next_offset = res.headers.get("X-Weave-Next-Offset") # They should all be in order, up to the item *before* # the one that was used for the older= timestamp. - self.assertEquals(sorted(int(item['id']) for item in items), - list(range(0, start))) + self.assertEquals( + sorted(int(item["id"]) for item in items), + list(range(0, start)), + ) def assertCloseEnough(self, val1, val2, delta=0.05): if abs(val1 - val2) < delta: return True - raise AssertionError("abs(%.2f - %.2f) = %.2f > %.2f" - % (val1, val2, abs(val1 - val2), delta)) + raise AssertionError( + "abs(%.2f - %.2f) = %.2f > %.2f" + % (val1, val2, abs(val1 - val2), delta) + ) def test_batches(self): - endpoint = self.root + '/storage/xxx_col2' + endpoint = self.root + "/storage/xxx_col2" - bso1 = {'id': '12', 'payload': 'elegance'} - bso2 = {'id': '13', 'payload': 'slovenly'} + bso1 = {"id": "12", "payload": "elegance"} + bso2 = {"id": "13", "payload": "slovenly"} bsos = [bso1, bso2] self.retry_post_json(endpoint, bsos) - resp = self.app.get(endpoint + '/12') - orig_modified = resp.headers['X-Last-Modified'] + resp = self.app.get(endpoint + "/12") + orig_modified = resp.headers["X-Last-Modified"] - bso3 = {'id': 'a', 'payload': 'internal'} - bso4 = {'id': 'b', 'payload': 'pancreas'} - resp = self.retry_post_json(endpoint + '?batch=true', [bso3, bso4]) + bso3 = {"id": "a", "payload": "internal"} + bso4 = {"id": "b", "payload": "pancreas"} + resp = self.retry_post_json(endpoint + "?batch=true", [bso3, bso4]) batch = resp.json["batch"] # The collection should not be reported as modified. - self.assertEquals(orig_modified, resp.headers['X-Last-Modified']) + self.assertEquals(orig_modified, resp.headers["X-Last-Modified"]) # And reading from it shouldn't show the new records yet. resp = self.app.get(endpoint) res = resp.json res.sort() - self.assertEquals(res, ['12', '13']) - self.assertEquals(int(resp.headers['X-Weave-Records']), 2) - self.assertEquals(orig_modified, resp.headers['X-Last-Modified']) - - bso5 = {'id': 'c', 'payload': 'tinsel'} - bso6 = {'id': '13', 'payload': 'portnoy'} - bso0 = {'id': '14', 'payload': 'itsybitsy'} - commit = '?batch={0}&commit=true'.format(batch) + self.assertEquals(res, ["12", "13"]) + self.assertEquals(int(resp.headers["X-Weave-Records"]), 2) + self.assertEquals(orig_modified, resp.headers["X-Last-Modified"]) + + bso5 = {"id": "c", "payload": "tinsel"} + bso6 = {"id": "13", "payload": "portnoy"} + bso0 = {"id": "14", "payload": "itsybitsy"} + commit = "?batch={0}&commit=true".format(batch) resp = self.retry_post_json(endpoint + commit, [bso5, bso6, bso0]) - committed = resp.json['modified'] - self.assertEquals(resp.json['modified'], - float(resp.headers['X-Last-Modified'])) + committed = resp.json["modified"] + self.assertEquals( + resp.json["modified"], float(resp.headers["X-Last-Modified"]) + ) # make sure /info/collections got updated - resp = self.app.get(self.root + '/info/collections') - self.assertEquals(float(resp.headers['X-Last-Modified']), committed) - self.assertEquals(resp.json['xxx_col2'], committed) + resp = self.app.get(self.root + "/info/collections") + self.assertEquals(float(resp.headers["X-Last-Modified"]), committed) + self.assertEquals(resp.json["xxx_col2"], committed) # make sure the changes applied resp = self.app.get(endpoint) res = resp.json res.sort() - self.assertEquals(res, ['12', '13', '14', 'a', 'b', 'c']) - self.assertEquals(int(resp.headers['X-Weave-Records']), 6) - resp = self.app.get(endpoint + '/13') - self.assertEquals(resp.json['payload'], 'portnoy') - self.assertEquals(committed, float(resp.headers['X-Last-Modified'])) - self.assertEquals(committed, resp.json['modified']) - resp = self.app.get(endpoint + '/c') - self.assertEquals(resp.json['payload'], 'tinsel') - self.assertEquals(committed, resp.json['modified']) - resp = self.app.get(endpoint + '/14') - self.assertEquals(resp.json['payload'], 'itsybitsy') - self.assertEquals(committed, resp.json['modified']) + self.assertEquals(res, ["12", "13", "14", "a", "b", "c"]) + self.assertEquals(int(resp.headers["X-Weave-Records"]), 6) + resp = self.app.get(endpoint + "/13") + self.assertEquals(resp.json["payload"], "portnoy") + self.assertEquals(committed, float(resp.headers["X-Last-Modified"])) + self.assertEquals(committed, resp.json["modified"]) + resp = self.app.get(endpoint + "/c") + self.assertEquals(resp.json["payload"], "tinsel") + self.assertEquals(committed, resp.json["modified"]) + resp = self.app.get(endpoint + "/14") + self.assertEquals(resp.json["payload"], "itsybitsy") + self.assertEquals(committed, resp.json["modified"]) # empty commit POST - bso7 = {'id': 'a', 'payload': 'burrito'} - bso8 = {'id': 'e', 'payload': 'chocolate'} - resp = self.retry_post_json(endpoint + '?batch=true', [bso7, bso8]) + bso7 = {"id": "a", "payload": "burrito"} + bso8 = {"id": "e", "payload": "chocolate"} + resp = self.retry_post_json(endpoint + "?batch=true", [bso7, bso8]) batch = resp.json["batch"] time.sleep(1) - commit = '?batch={0}&commit=true'.format(batch) + commit = "?batch={0}&commit=true".format(batch) resp1 = self.retry_post_json(endpoint + commit, []) - committed = resp1.json['modified'] - self.assertEquals(committed, float(resp1.headers['X-Last-Modified'])) + committed = resp1.json["modified"] + self.assertEquals(committed, float(resp1.headers["X-Last-Modified"])) - resp2 = self.app.get(endpoint + '/a') - self.assertEquals(committed, float(resp2.headers['X-Last-Modified'])) - self.assertEquals(committed, resp2.json['modified']) - self.assertEquals(resp2.json['payload'], 'burrito') - - resp3 = self.app.get(endpoint + '/e') - self.assertEquals(committed, resp3.json['modified']) + resp2 = self.app.get(endpoint + "/a") + self.assertEquals(committed, float(resp2.headers["X-Last-Modified"])) + self.assertEquals(committed, resp2.json["modified"]) + self.assertEquals(resp2.json["payload"], "burrito") + resp3 = self.app.get(endpoint + "/e") + self.assertEquals(committed, resp3.json["modified"]) def test_aaa_batch_commit_collision(self): # It's possible that a batch contain a BSO inside a batch as well @@ -1596,48 +1707,45 @@ def test_aaa_batch_commit_collision(self): # for spanner because of conflicting ways that the data is written # to the database and the discoverability of IDs in previously # submitted batches. - endpoint = self.root + '/storage/xxx_col2' + endpoint = self.root + "/storage/xxx_col2" orig = "Letting the days go by" repl = "Same as it ever was" batch_num = self.retry_post_json( - endpoint + "?batch=true", - [{"id":"b0", "payload": orig}] + endpoint + "?batch=true", [{"id": "b0", "payload": orig}] ).json["batch"] resp = self.retry_post_json( endpoint + "?batch={}&commit=true".format(batch_num), - [{"id":"b0", "payload": repl}] + [{"id": "b0", "payload": repl}], ) # this should succeed, using the newerer payload value. assert resp.json["failed"] == {}, "batch commit failed" assert resp.json["success"] == ["b0"], "batch commit id incorrect" - resp = self.app.get(endpoint+"?full=1") - assert resp.json[0].get( - "payload") == repl, "wrong payload returned" - + resp = self.app.get(endpoint + "?full=1") + assert resp.json[0].get("payload") == repl, "wrong payload returned" def test_we_dont_need_no_stinkin_batches(self): - endpoint = self.root + '/storage/xxx_col2' + endpoint = self.root + "/storage/xxx_col2" # invalid batch ID - bso1 = {'id': 'f', 'payload': 'pantomime'} - self.retry_post_json(endpoint + '?batch=sammich', [bso1], status=400) + bso1 = {"id": "f", "payload": "pantomime"} + self.retry_post_json(endpoint + "?batch=sammich", [bso1], status=400) # commit with no batch ID - self.retry_post_json(endpoint + '?commit=true', [], status=400) + self.retry_post_json(endpoint + "?commit=true", [], status=400) def test_batch_size_limits(self): - limits = self.app.get(self.root + '/info/configuration').json - self.assertTrue('max_post_records' in limits) - self.assertTrue('max_post_bytes' in limits) - self.assertTrue('max_total_records' in limits) - self.assertTrue('max_total_bytes' in limits) - self.assertTrue('max_record_payload_bytes' in limits) - self.assertTrue('max_request_bytes' in limits) - - endpoint = self.root + '/storage/xxx_col2?batch=true' + limits = self.app.get(self.root + "/info/configuration").json + self.assertTrue("max_post_records" in limits) + self.assertTrue("max_post_bytes" in limits) + self.assertTrue("max_total_records" in limits) + self.assertTrue("max_total_bytes" in limits) + self.assertTrue("max_record_payload_bytes" in limits) + self.assertTrue("max_request_bytes" in limits) + + endpoint = self.root + "/storage/xxx_col2?batch=true" # There are certain obvious constraints on these limits, # violations of which would be very confusing for clients. # @@ -1697,130 +1805,138 @@ def test_batch_size_limits(self): # }, status=400) # self.assertEquals(res.json, WEAVE_SIZE_LIMIT_EXCEEDED) bsos = [ - {'id': 'little', 'payload': 'XXX'}, - {'id': 'big', 'payload': 'X' * (limits['max_post_bytes'] - 3)} + {"id": "little", "payload": "XXX"}, + {"id": "big", "payload": "X" * (limits["max_post_bytes"] - 3)}, ] res = self.retry_post_json(endpoint, bsos) - self.assertFalse(res.json['failed']) - bsos[1]['payload'] += 'X' + self.assertFalse(res.json["failed"]) + bsos[1]["payload"] += "X" res = self.retry_post_json(endpoint, bsos) - self.assertEqual(res.json['success'], ['little']) - self.assertEqual(res.json['failed']['big'], 'retry bytes') + self.assertEqual(res.json["success"], ["little"]) + self.assertEqual(res.json["failed"]["big"], "retry bytes") # `max_total_bytes` is an (inclusive) limit on the # total size of all payloads in a batch. We can only enforce # it if the client tells us this via header. - self.retry_post_json(endpoint, [], headers={ - 'X-Weave-Total-Bytes': str(limits['max_total_bytes']) - }) - res = self.retry_post_json(endpoint, [], headers={ - 'X-Weave-Total-Bytes': str(limits['max_total_bytes'] + 1) - }, status=400) + self.retry_post_json( + endpoint, + [], + headers={"X-Weave-Total-Bytes": str(limits["max_total_bytes"])}, + ) + res = self.retry_post_json( + endpoint, + [], + headers={ + "X-Weave-Total-Bytes": str(limits["max_total_bytes"] + 1) + }, + status=400, + ) self.assertEquals(res.json, WEAVE_SIZE_LIMIT_EXCEEDED) def test_batch_partial_update(self): - collection = self.root + '/storage/xxx_col2' + collection = self.root + "/storage/xxx_col2" bsos = [ - {'id': 'a', 'payload': 'aai'}, - {'id': 'b', 'payload': 'bee', 'sortindex': 17} + {"id": "a", "payload": "aai"}, + {"id": "b", "payload": "bee", "sortindex": 17}, ] resp = self.retry_post_json(collection, bsos) - orig_ts = float(resp.headers['X-Last-Modified']) + orig_ts = float(resp.headers["X-Last-Modified"]) # Update one, and add a new one. bsos = [ - {'id': 'b', 'payload': 'bii'}, - {'id': 'c', 'payload': 'sea'}, + {"id": "b", "payload": "bii"}, + {"id": "c", "payload": "sea"}, ] - resp = self.retry_post_json(collection + '?batch=true', bsos) + resp = self.retry_post_json(collection + "?batch=true", bsos) batch = resp.json["batch"] - self.assertEquals(orig_ts, float(resp.headers['X-Last-Modified'])) + self.assertEquals(orig_ts, float(resp.headers["X-Last-Modified"])) # The updated item hasn't been written yet. - resp = self.app.get(collection + '?full=1') + resp = self.app.get(collection + "?full=1") res = resp.json - res.sort(key=lambda bso: bso['id']) + res.sort(key=lambda bso: bso["id"]) self.assertEquals(len(res), 2) - self.assertEquals(res[0]['payload'], 'aai') - self.assertEquals(res[1]['payload'], 'bee') - self.assertEquals(res[0]['modified'], orig_ts) - self.assertEquals(res[1]['modified'], orig_ts) - self.assertEquals(res[1]['sortindex'], 17) + self.assertEquals(res[0]["payload"], "aai") + self.assertEquals(res[1]["payload"], "bee") + self.assertEquals(res[0]["modified"], orig_ts) + self.assertEquals(res[1]["modified"], orig_ts) + self.assertEquals(res[1]["sortindex"], 17) - endpoint = collection + '?batch={0}&commit=true'.format(batch) + endpoint = collection + "?batch={0}&commit=true".format(batch) resp = self.retry_post_json(endpoint, []) - commit_ts = float(resp.headers['X-Last-Modified']) + commit_ts = float(resp.headers["X-Last-Modified"]) # The changes have now been applied. - resp = self.app.get(collection + '?full=1') + resp = self.app.get(collection + "?full=1") res = resp.json - res.sort(key=lambda bso: bso['id']) + res.sort(key=lambda bso: bso["id"]) self.assertEquals(len(res), 3) - self.assertEquals(res[0]['payload'], 'aai') - self.assertEquals(res[1]['payload'], 'bii') - self.assertEquals(res[2]['payload'], 'sea') - self.assertEquals(res[0]['modified'], orig_ts) - self.assertEquals(res[1]['modified'], commit_ts) - self.assertEquals(res[2]['modified'], commit_ts) + self.assertEquals(res[0]["payload"], "aai") + self.assertEquals(res[1]["payload"], "bii") + self.assertEquals(res[2]["payload"], "sea") + self.assertEquals(res[0]["modified"], orig_ts) + self.assertEquals(res[1]["modified"], commit_ts) + self.assertEquals(res[2]["modified"], commit_ts) # Fields not touched by the batch, should have been preserved. - self.assertEquals(res[1]['sortindex'], 17) + self.assertEquals(res[1]["sortindex"], 17) def test_batch_ttl_update(self): - collection = self.root + '/storage/xxx_col2' + collection = self.root + "/storage/xxx_col2" bsos = [ - {'id': 'a', 'payload': 'ayy'}, - {'id': 'b', 'payload': 'bea'}, - {'id': 'c', 'payload': 'see'} + {"id": "a", "payload": "ayy"}, + {"id": "b", "payload": "bea"}, + {"id": "c", "payload": "see"}, ] resp = self.retry_post_json(collection, bsos) # Bump ttls as a series of individual batch operations. - resp = self.retry_post_json(collection + '?batch=true', [], - status=202) - orig_ts = float(resp.headers['X-Last-Modified']) + resp = self.retry_post_json(collection + "?batch=true", [], status=202) + orig_ts = float(resp.headers["X-Last-Modified"]) batch = resp.json["batch"] - endpoint = collection + '?batch={0}'.format(batch) - resp = self.retry_post_json(endpoint, [{'id': 'a', 'ttl': 2}], - status=202) - self.assertEquals(orig_ts, float(resp.headers['X-Last-Modified'])) - resp = self.retry_post_json(endpoint, [{'id': 'b', 'ttl': 2}], - status=202) - self.assertEquals(orig_ts, float(resp.headers['X-Last-Modified'])) - resp = self.retry_post_json(endpoint + '&commit=true', [], status=200) + endpoint = collection + "?batch={0}".format(batch) + resp = self.retry_post_json( + endpoint, [{"id": "a", "ttl": 2}], status=202 + ) + self.assertEquals(orig_ts, float(resp.headers["X-Last-Modified"])) + resp = self.retry_post_json( + endpoint, [{"id": "b", "ttl": 2}], status=202 + ) + self.assertEquals(orig_ts, float(resp.headers["X-Last-Modified"])) + resp = self.retry_post_json(endpoint + "&commit=true", [], status=200) # The payloads should be unchanged - resp = self.app.get(collection + '?full=1') + resp = self.app.get(collection + "?full=1") res = resp.json - res.sort(key=lambda bso: bso['id']) + res.sort(key=lambda bso: bso["id"]) self.assertEquals(len(res), 3) - self.assertEquals(res[0]['payload'], 'ayy') - self.assertEquals(res[1]['payload'], 'bea') - self.assertEquals(res[2]['payload'], 'see') + self.assertEquals(res[0]["payload"], "ayy") + self.assertEquals(res[1]["payload"], "bea") + self.assertEquals(res[2]["payload"], "see") # If we wait, the ttls should kick in time.sleep(2.1) - resp = self.app.get(collection + '?full=1') + resp = self.app.get(collection + "?full=1") res = resp.json self.assertEquals(len(res), 1) - self.assertEquals(res[0]['payload'], 'see') + self.assertEquals(res[0]["payload"], "see") def test_batch_ttl_is_based_on_commit_timestamp(self): - collection = self.root + '/storage/xxx_col2' + collection = self.root + "/storage/xxx_col2" - resp = self.retry_post_json(collection + '?batch=true', [], status=202) + resp = self.retry_post_json(collection + "?batch=true", [], status=202) batch = resp.json["batch"] - endpoint = collection + '?batch={0}'.format(batch) - resp = self.retry_post_json(endpoint, [{'id': 'a', 'ttl': 3}], - status=202) + endpoint = collection + "?batch={0}".format(batch) + resp = self.retry_post_json( + endpoint, [{"id": "a", "ttl": 3}], status=202 + ) # Put some time between upload timestamp and commit timestamp. time.sleep(1.5) - resp = self.retry_post_json(endpoint + '&commit=true', [], - status=200) + resp = self.retry_post_json(endpoint + "&commit=true", [], status=200) # Wait a little; if ttl is taken from the time of the commit # then it should not kick in just yet. @@ -1828,7 +1944,7 @@ def test_batch_ttl_is_based_on_commit_timestamp(self): resp = self.app.get(collection) res = resp.json self.assertEquals(len(res), 1) - self.assertEquals(res[0], 'a') + self.assertEquals(res[0], "a") # Wait some more, and the ttl should kick in. time.sleep(1.6) @@ -1837,172 +1953,174 @@ def test_batch_ttl_is_based_on_commit_timestamp(self): self.assertEquals(len(res), 0) def test_batch_with_immediate_commit(self): - collection = self.root + '/storage/xxx_col2' + collection = self.root + "/storage/xxx_col2" bsos = [ - {'id': 'a', 'payload': 'aih'}, - {'id': 'b', 'payload': 'bie'}, - {'id': 'c', 'payload': 'cee'} + {"id": "a", "payload": "aih"}, + {"id": "b", "payload": "bie"}, + {"id": "c", "payload": "cee"}, ] - resp = self.retry_post_json(collection + '?batch=true&commit=true', - bsos, status=200) - self.assertTrue('batch' not in resp.json) - self.assertTrue('modified' in resp.json) - committed = resp.json['modified'] + resp = self.retry_post_json( + collection + "?batch=true&commit=true", bsos, status=200 + ) + self.assertTrue("batch" not in resp.json) + self.assertTrue("modified" in resp.json) + committed = resp.json["modified"] - resp = self.app.get(self.root + '/info/collections') - self.assertEquals(float(resp.headers['X-Last-Modified']), committed) - self.assertEquals(resp.json['xxx_col2'], committed) + resp = self.app.get(self.root + "/info/collections") + self.assertEquals(float(resp.headers["X-Last-Modified"]), committed) + self.assertEquals(resp.json["xxx_col2"], committed) - resp = self.app.get(collection + '?full=1') - self.assertEquals(float(resp.headers['X-Last-Modified']), committed) + resp = self.app.get(collection + "?full=1") + self.assertEquals(float(resp.headers["X-Last-Modified"]), committed) res = resp.json - res.sort(key=lambda bso: bso['id']) + res.sort(key=lambda bso: bso["id"]) self.assertEquals(len(res), 3) - self.assertEquals(res[0]['payload'], 'aih') - self.assertEquals(res[1]['payload'], 'bie') - self.assertEquals(res[2]['payload'], 'cee') + self.assertEquals(res[0]["payload"], "aih") + self.assertEquals(res[1]["payload"], "bie") + self.assertEquals(res[2]["payload"], "cee") def test_batch_uploads_properly_update_info_collections(self): - collection1 = self.root + '/storage/xxx_col1' - collection2 = self.root + '/storage/xxx_col2' + collection1 = self.root + "/storage/xxx_col1" + collection2 = self.root + "/storage/xxx_col2" bsos = [ - {'id': 'a', 'payload': 'aih'}, - {'id': 'b', 'payload': 'bie'}, - {'id': 'c', 'payload': 'cee'} + {"id": "a", "payload": "aih"}, + {"id": "b", "payload": "bie"}, + {"id": "c", "payload": "cee"}, ] resp = self.retry_post_json(collection1, bsos) - ts1 = resp.json['modified'] + ts1 = resp.json["modified"] resp = self.retry_post_json(collection2, bsos) - ts2 = resp.json['modified'] + ts2 = resp.json["modified"] - resp = self.app.get(self.root + '/info/collections') - self.assertEquals(float(resp.headers['X-Last-Modified']), ts2) - self.assertEquals(resp.json['xxx_col1'], ts1) - self.assertEquals(resp.json['xxx_col2'], ts2) + resp = self.app.get(self.root + "/info/collections") + self.assertEquals(float(resp.headers["X-Last-Modified"]), ts2) + self.assertEquals(resp.json["xxx_col1"], ts1) + self.assertEquals(resp.json["xxx_col2"], ts2) # Overwrite in place, timestamp should change. - resp = self.retry_post_json(collection2 + '?batch=true&commit=true', - bsos[:2]) - self.assertTrue(resp.json['modified'] > ts2) - ts2 = resp.json['modified'] + resp = self.retry_post_json( + collection2 + "?batch=true&commit=true", bsos[:2] + ) + self.assertTrue(resp.json["modified"] > ts2) + ts2 = resp.json["modified"] - resp = self.app.get(self.root + '/info/collections') - self.assertEquals(float(resp.headers['X-Last-Modified']), ts2) - self.assertEquals(resp.json['xxx_col1'], ts1) - self.assertEquals(resp.json['xxx_col2'], ts2) + resp = self.app.get(self.root + "/info/collections") + self.assertEquals(float(resp.headers["X-Last-Modified"]), ts2) + self.assertEquals(resp.json["xxx_col1"], ts1) + self.assertEquals(resp.json["xxx_col2"], ts2) # Add new items, timestamp should change - resp = self.retry_post_json(collection1 + '?batch=true&commit=true', - [{'id': 'd', 'payload': 'dee'}]) - self.assertTrue(resp.json['modified'] > ts1) - self.assertTrue(resp.json['modified'] >= ts2) - ts1 = resp.json['modified'] + resp = self.retry_post_json( + collection1 + "?batch=true&commit=true", + [{"id": "d", "payload": "dee"}], + ) + self.assertTrue(resp.json["modified"] > ts1) + self.assertTrue(resp.json["modified"] >= ts2) + ts1 = resp.json["modified"] - resp = self.app.get(self.root + '/info/collections') - self.assertEquals(float(resp.headers['X-Last-Modified']), ts1) - self.assertEquals(resp.json['xxx_col1'], ts1) - self.assertEquals(resp.json['xxx_col2'], ts2) + resp = self.app.get(self.root + "/info/collections") + self.assertEquals(float(resp.headers["X-Last-Modified"]), ts1) + self.assertEquals(resp.json["xxx_col1"], ts1) + self.assertEquals(resp.json["xxx_col2"], ts2) def test_batch_with_failing_bsos(self): - collection = self.root + '/storage/xxx_col2' + collection = self.root + "/storage/xxx_col2" bsos = [ - {'id': 'a', 'payload': 'aai'}, - {'id': 'b\n', 'payload': 'i am invalid', 'sortindex': 17} + {"id": "a", "payload": "aai"}, + {"id": "b\n", "payload": "i am invalid", "sortindex": 17}, ] - resp = self.retry_post_json(collection + '?batch=true', bsos) - self.assertEqual(len(resp.json['failed']), 1) - self.assertEqual(len(resp.json['success']), 1) + resp = self.retry_post_json(collection + "?batch=true", bsos) + self.assertEqual(len(resp.json["failed"]), 1) + self.assertEqual(len(resp.json["success"]), 1) batch = resp.json["batch"] bsos = [ - {'id': 'c', 'payload': 'sea'}, - {'id': 'd', 'payload': 'dii', 'ttl': -12}, + {"id": "c", "payload": "sea"}, + {"id": "d", "payload": "dii", "ttl": -12}, ] - endpoint = collection + '?batch={0}&commit=true'.format(batch) + endpoint = collection + "?batch={0}&commit=true".format(batch) resp = self.retry_post_json(endpoint, bsos) - self.assertEqual(len(resp.json['failed']), 1) - self.assertEqual(len(resp.json['success']), 1) + self.assertEqual(len(resp.json["failed"]), 1) + self.assertEqual(len(resp.json["success"]), 1) # To correctly match semantics of batchless POST, the batch # should be committed including only the successful items. # It is the client's responsibility to detect that some items # failed, and decide whether to commit the batch. - resp = self.app.get(collection + '?full=1') + resp = self.app.get(collection + "?full=1") res = resp.json - res.sort(key=lambda bso: bso['id']) + res.sort(key=lambda bso: bso["id"]) self.assertEquals(len(res), 2) - self.assertEquals(res[0]['payload'], 'aai') - self.assertEquals(res[1]['payload'], 'sea') + self.assertEquals(res[0]["payload"], "aai") + self.assertEquals(res[1]["payload"], "sea") def test_batch_id_is_correctly_scoped_to_a_collection(self): - collection1 = self.root + '/storage/xxx_col1' + collection1 = self.root + "/storage/xxx_col1" bsos = [ - {'id': 'a', 'payload': 'aih'}, - {'id': 'b', 'payload': 'bie'}, - {'id': 'c', 'payload': 'cee'} + {"id": "a", "payload": "aih"}, + {"id": "b", "payload": "bie"}, + {"id": "c", "payload": "cee"}, ] - resp = self.retry_post_json(collection1 + '?batch=true', bsos) - batch = resp.json['batch'] + resp = self.retry_post_json(collection1 + "?batch=true", bsos) + batch = resp.json["batch"] # I should not be able to add to that batch in a different collection. - endpoint2 = self.root + '/storage/xxx_col2?batch={0}'.format(batch) + endpoint2 = self.root + "/storage/xxx_col2?batch={0}".format(batch) resp = self.retry_post_json( - endpoint2, - [{'id': 'd', 'payload': 'dii'}], - status=400) + endpoint2, [{"id": "d", "payload": "dii"}], status=400 + ) # I should not be able to commit that batch in a different collection. - resp = self.retry_post_json(endpoint2 + '&commit=true', [], status=400) + resp = self.retry_post_json(endpoint2 + "&commit=true", [], status=400) # I should still be able to use the batch in the correct collection. - endpoint1 = collection1 + '?batch={0}'.format(batch) - resp = self.retry_post_json(endpoint1, - [{'id': 'd', 'payload': 'dii'}]) - resp = self.retry_post_json(endpoint1 + '&commit=true', []) + endpoint1 = collection1 + "?batch={0}".format(batch) + resp = self.retry_post_json(endpoint1, [{"id": "d", "payload": "dii"}]) + resp = self.retry_post_json(endpoint1 + "&commit=true", []) - resp = self.app.get(collection1 + '?full=1') + resp = self.app.get(collection1 + "?full=1") res = resp.json - res.sort(key=lambda bso: bso['id']) + res.sort(key=lambda bso: bso["id"]) self.assertEquals(len(res), 4) - self.assertEquals(res[0]['payload'], 'aih') - self.assertEquals(res[1]['payload'], 'bie') - self.assertEquals(res[2]['payload'], 'cee') - self.assertEquals(res[3]['payload'], 'dii') + self.assertEquals(res[0]["payload"], "aih") + self.assertEquals(res[1]["payload"], "bie") + self.assertEquals(res[2]["payload"], "cee") + self.assertEquals(res[3]["payload"], "dii") def test_users_with_the_same_batch_id_get_separate_data(self): # Try to generate two users with the same batch-id. # It might take a couple of attempts... for _ in range(100): - bsos = [{'id': 'a', 'payload': 'aih'}] - req = '/storage/xxx_col1?batch=true' + bsos = [{"id": "a", "payload": "aih"}] + req = "/storage/xxx_col1?batch=true" resp = self.retry_post_json(self.root + req, bsos) - batch1 = resp.json['batch'] + batch1 = resp.json["batch"] with self._switch_user(): - bsos = [{'id': 'b', 'payload': 'bee'}] - req = '/storage/xxx_col1?batch=true' + bsos = [{"id": "b", "payload": "bee"}] + req = "/storage/xxx_col1?batch=true" resp = self.retry_post_json(self.root + req, bsos) - batch2 = resp.json['batch'] + batch2 = resp.json["batch"] # Let the second user commit their batch. - req = '/storage/xxx_col1?batch={0}&commit=true'.format(batch2) + req = "/storage/xxx_col1?batch={0}&commit=true".format(batch2) self.retry_post_json(self.root + req, []) # It should only have a single item. - resp = self.app.get(self.root + '/storage/xxx_col1') - self.assertEquals(resp.json, ['b']) + resp = self.app.get(self.root + "/storage/xxx_col1") + self.assertEquals(resp.json, ["b"]) # The first user's collection should still be empty. # Now have the first user commit their batch. - req = '/storage/xxx_col1?batch={0}&commit=true'.format(batch1) + req = "/storage/xxx_col1?batch={0}&commit=true".format(batch1) self.retry_post_json(self.root + req, []) # It should only have a single item. - resp = self.app.get(self.root + '/storage/xxx_col1') - self.assertEquals(resp.json, ['a']) + resp = self.app.get(self.root + "/storage/xxx_col1") + self.assertEquals(resp.json, ["a"]) # If we didn't make a conflict, try again. if batch1 == batch2: break else: - raise unittest2.SkipTest('failed to generate conflicting batchid') + raise unittest2.SkipTest("failed to generate conflicting batchid") def test_that_we_dont_resurrect_committed_batches(self): # This retry loop tries to trigger a situation where we: @@ -2010,63 +2128,63 @@ def test_that_we_dont_resurrect_committed_batches(self): # * successfully commit that batch # * create a new batch tht re-uses the same batchid for _ in range(100): - bsos = [{'id': 'i', 'payload': 'aye'}] - req = '/storage/xxx_col1?batch=true' + bsos = [{"id": "i", "payload": "aye"}] + req = "/storage/xxx_col1?batch=true" resp = self.retry_post_json(self.root + req, bsos) - batch1 = resp.json['batch'] - req = '/storage/xxx_col1?batch={0}&commit=true'.format(batch1) + batch1 = resp.json["batch"] + req = "/storage/xxx_col1?batch={0}&commit=true".format(batch1) self.retry_post_json(self.root + req, []) - req = '/storage/xxx_col2?batch=true' + req = "/storage/xxx_col2?batch=true" resp = self.retry_post_json(self.root + req, []) - batch2 = resp.json['batch'] - bsos = [{'id': 'j', 'payload': 'jay'}] - req = '/storage/xxx_col2?batch={0}&commit=true'.format(batch2) + batch2 = resp.json["batch"] + bsos = [{"id": "j", "payload": "jay"}] + req = "/storage/xxx_col2?batch={0}&commit=true".format(batch2) self.retry_post_json(self.root + req, bsos) # Retry if we failed to trigger re-use of the batchid. if batch1 == batch2: break else: - raise unittest2.SkipTest('failed to trigger re-use of batchid') + raise unittest2.SkipTest("failed to trigger re-use of batchid") # Despite having the same batchid, the second batch should # be completely independent of the first. - resp = self.app.get(self.root + '/storage/xxx_col2') - self.assertEquals(resp.json, ['j']) + resp = self.app.get(self.root + "/storage/xxx_col2") + self.assertEquals(resp.json, ["j"]) def test_batch_id_is_correctly_scoped_to_a_user(self): - collection = self.root + '/storage/xxx_col1' + collection = self.root + "/storage/xxx_col1" bsos = [ - {'id': 'a', 'payload': 'aih'}, - {'id': 'b', 'payload': 'bie'}, - {'id': 'c', 'payload': 'cee'} + {"id": "a", "payload": "aih"}, + {"id": "b", "payload": "bie"}, + {"id": "c", "payload": "cee"}, ] - resp = self.retry_post_json(collection + '?batch=true', bsos) - batch = resp.json['batch'] + resp = self.retry_post_json(collection + "?batch=true", bsos) + batch = resp.json["batch"] with self._switch_user(): # I should not be able to add to that batch as a different user. - endpoint = self.root + '/storage/xxx_col1?batch={0}'.format(batch) + endpoint = self.root + "/storage/xxx_col1?batch={0}".format(batch) resp = self.retry_post_json( - endpoint, - [{'id': 'd', 'payload': 'di'}], - status=400) + endpoint, [{"id": "d", "payload": "di"}], status=400 + ) # I should not be able to commit that batch as a different user. - resp = self.retry_post_json(endpoint + '&commit=true', [], - status=400) + resp = self.retry_post_json( + endpoint + "&commit=true", [], status=400 + ) # I should still be able to use the batch in the original user. - endpoint = collection + '?batch={0}'.format(batch) - resp = self.retry_post_json(endpoint, [{'id': 'd', 'payload': 'di'}]) - resp = self.retry_post_json(endpoint + '&commit=true', []) + endpoint = collection + "?batch={0}".format(batch) + resp = self.retry_post_json(endpoint, [{"id": "d", "payload": "di"}]) + resp = self.retry_post_json(endpoint + "&commit=true", []) - resp = self.app.get(collection + '?full=1') + resp = self.app.get(collection + "?full=1") res = resp.json - res.sort(key=lambda bso: bso['id']) + res.sort(key=lambda bso: bso["id"]) self.assertEquals(len(res), 4) - self.assertEquals(res[0]['payload'], 'aih') - self.assertEquals(res[1]['payload'], 'bie') - self.assertEquals(res[2]['payload'], 'cee') - self.assertEquals(res[3]['payload'], 'di') + self.assertEquals(res[0]["payload"], "aih") + self.assertEquals(res[1]["payload"], "bie") + self.assertEquals(res[2]["payload"], "cee") + self.assertEquals(res[3]["payload"], "di") # bug 1332552 make sure ttl:null use the default ttl def test_create_bso_with_null_ttl(self): @@ -2079,36 +2197,44 @@ def test_create_bso_with_null_ttl(self): def test_rejection_of_known_bad_payloads(self): bso = { "id": "keys", - "payload": json_dumps({ - "ciphertext": "IDontKnowWhatImDoing", - "IV": "AAAAAAAAAAAAAAAAAAAAAA==", - }) + "payload": json_dumps( + { + "ciphertext": "IDontKnowWhatImDoing", + "IV": "AAAAAAAAAAAAAAAAAAAAAA==", + } + ), } # Fishy IVs are rejected on the "crypto" collection. - self.retry_put_json(self.root + "/storage/crypto/keys", bso, - status=400) - self.retry_put_json(self.root + "/storage/crypto/blerg", bso, - status=400) + self.retry_put_json( + self.root + "/storage/crypto/keys", bso, status=400 + ) + self.retry_put_json( + self.root + "/storage/crypto/blerg", bso, status=400 + ) self.retry_post_json(self.root + "/storage/crypto", [bso], status=400) # But are allowed on other collections. - self.retry_put_json(self.root + "/storage/xxx_col2/keys", bso, - status=200) - self.retry_post_json(self.root + "/storage/xxx_col2", [bso], - status=200) + self.retry_put_json( + self.root + "/storage/xxx_col2/keys", bso, status=200 + ) + self.retry_post_json( + self.root + "/storage/xxx_col2", [bso], status=200 + ) # bug 1397357 def test_batch_empty_commit(self): def testEmptyCommit(contentType, body, status=200): - bsos = [{'id': str(i).zfill(2), 'payload': 'X'} for i in range(5)] - res = self.retry_post_json(self.root+'/storage/xxx_col?batch=true', - bsos) - self.assertEquals(len(res.json['success']), 5) - self.assertEquals(len(res.json['failed']), 0) + bsos = [{"id": str(i).zfill(2), "payload": "X"} for i in range(5)] + res = self.retry_post_json( + self.root + "/storage/xxx_col?batch=true", bsos + ) + self.assertEquals(len(res.json["success"]), 5) + self.assertEquals(len(res.json["failed"]), 0) batch = res.json["batch"] self.app.post( - self.root+'/storage/xxx_col?commit=true&batch='+batch, - body, headers={"Content-Type": contentType}, - status=status + self.root + "/storage/xxx_col?commit=true&batch=" + batch, + body, + headers={"Content-Type": contentType}, + status=status, ) testEmptyCommit("application/json", "[]") diff --git a/tools/integration_tests/test_support.py b/tools/integration_tests/test_support.py index 5a1c6dd3f0..d27af9b04f 100644 --- a/tools/integration_tests/test_support.py +++ b/tools/integration_tests/test_support.py @@ -43,6 +43,7 @@ class Secrets(object): Options: - **filename**: a list of file paths, or a single path. """ + def __init__(self, filename=None): self._secrets = defaultdict(list) if filename is not None: @@ -56,9 +57,9 @@ def load(self, filename): filename = [filename] for name in filename: - with open(name, 'rb') as f: + with open(name, "rb") as f: - reader = csv.reader(f, delimiter=',') + reader = csv.reader(f, delimiter=",") for line, row in enumerate(reader): if len(row) < 2: continue @@ -67,7 +68,7 @@ def load(self, filename): raise ValueError("Duplicate node line %d" % line) secrets = [] for secret in row[1:]: - secret = secret.split(':') + secret = secret.split(":") if len(secret) != 2: raise ValueError("Invalid secret line %d" % line) secrets.append(tuple(secret)) @@ -75,11 +76,13 @@ def load(self, filename): self._secrets[node] = secrets def save(self, filename): - with open(filename, 'wb') as f: - writer = csv.writer(f, delimiter=',') + with open(filename, "wb") as f: + writer = csv.writer(f, delimiter=",") for node, secrets in self._secrets.items(): - secrets = ['%s:%s' % (timestamp, secret) - for timestamp, secret in secrets] + secrets = [ + "%s:%s" % (timestamp, secret) + for timestamp, secret in secrets + ] secrets.insert(0, node) writer.writerow(secrets) @@ -107,6 +110,7 @@ class FixedSecrets(object): Options: - **secrets**: a list of hex-encoded secrets to use for all nodes. """ + def __init__(self, secrets): if isinstance(secrets, str): secrets = secrets.split() @@ -149,7 +153,7 @@ def main(global_config, **settings): # Konfig keywords are added to every section when present, we have to # filter them out, otherwise plugin.load_from_config and # plugin.load_from_settings are unable to create instances. - konfig_keywords = ['extends', 'overrides'] + konfig_keywords = ["extends", "overrides"] # Put values from the config file into the pyramid settings dict. for section in config.sections(): @@ -159,7 +163,7 @@ def main(global_config, **settings): settings[setting_prefix + "." + name] = value # Store a reference to the Config object itself for later retrieval. - settings['config'] = config + settings["config"] = config return config @@ -178,7 +182,8 @@ def get_test_configurator(root, ini_file="tests.ini"): authz_policy = ACLAuthorizationPolicy() config.set_authorization_policy(authz_policy) authn_policy = TokenServerAuthenticationPolicy.from_settings( - config.get_settings()) + config.get_settings() + ) config.set_authentication_policy(authn_policy) return config @@ -193,7 +198,7 @@ def get_configurator(global_config, **settings): """ # Populate a SettingsDict with settings from the deployment file. settings = SettingsDict(settings) - config_file = global_config.get('__file__') + config_file = global_config.get("__file__") if config_file is not None: load_into_settings(config_file, settings) # Update with default pyramid settings, and then insert for all to use. @@ -210,6 +215,7 @@ def restore_env(*keys): current values of those environment variables at the start of the call and restore them to those values at the end. """ + def decorator(func): @functools.wraps(func) def wrapper(*args, **kwds): @@ -222,7 +228,9 @@ def wrapper(*args, **kwds): os.environ.pop(key, None) else: os.environ[key] = value + return wrapper + return decorator @@ -246,8 +254,9 @@ def get_configurator(self): self.ini_file = self.TEST_INI_FILE else: # The file to use may be specified in the environment. - self.ini_file = os.environ.get("MOZSVC_TEST_INI_FILE", - "tests.ini") + self.ini_file = os.environ.get( + "MOZSVC_TEST_INI_FILE", "tests.ini" + ) __file__ = sys.modules[self.__class__.__module__].__file__ config = get_test_configurator(__file__, self.ini_file) config.begin() @@ -308,18 +317,18 @@ def _cleanup_test_databases(self): # For server-based dbs, drop the tables to clear them. if storage.dbconnector.driver in ("mysql", "postgres"): with storage.dbconnector.connect() as c: - c.execute('DROP TABLE bso') - c.execute('DROP TABLE user_collections') - c.execute('DROP TABLE collections') - c.execute('DROP TABLE batch_uploads') - c.execute('DROP TABLE batch_upload_items') + c.execute("DROP TABLE bso") + c.execute("DROP TABLE user_collections") + c.execute("DROP TABLE collections") + c.execute("DROP TABLE batch_uploads") + c.execute("DROP TABLE batch_upload_items") # Explicitly free any pooled connections. storage.dbconnector.engine.dispose() # Find any sqlite database files and delete them. for key, value in self.config.registry.settings.items(): if key.endswith(".sqluri"): sqluri = urlparse.urlparse(value) - if sqluri.scheme == 'sqlite' and ":memory:" not in value: + if sqluri.scheme == "sqlite" and ":memory:" not in value: if os.path.isfile(sqluri.path): os.remove(sqluri.path) @@ -349,13 +358,16 @@ def setUp(self): self.config.make_wsgi_app() host_url = urlparse.urlparse(self.host_url) - self.app = TestApp(self.host_url, extra_environ={ - "HTTP_HOST": host_url.netloc, - "wsgi.url_scheme": host_url.scheme or "http", - "SERVER_NAME": host_url.hostname, - "REMOTE_ADDR": "127.0.0.1", - "SCRIPT_NAME": host_url.path, - }) + self.app = TestApp( + self.host_url, + extra_environ={ + "HTTP_HOST": host_url.netloc, + "wsgi.url_scheme": host_url.scheme or "http", + "SERVER_NAME": host_url.hostname, + "REMOTE_ADDR": "127.0.0.1", + "SCRIPT_NAME": host_url.path, + }, + ) class StorageFunctionalTestCase(FunctionalTestCase, StorageTestCase): @@ -373,6 +385,7 @@ def setUp(self): def new_do_request(req, *args, **kwds): hawkauthlib.sign_request(req, self.auth_token, self.auth_secret) return orig_do_request(req, *args, **kwds) + orig_do_request = self.app.do_request self.app.do_request = new_do_request @@ -388,17 +401,18 @@ def _authenticate(self): self.user_id = random.randint(1, 100000) self.fxa_uid = "DECAFBAD" + str(uuid.uuid4().hex)[8:] self.hashed_fxa_uid = str(uuid.uuid4().hex) - self.fxa_kid = "0000000000000-DECAFBAD" + str( - uuid.uuid4().hex)[8:] + self.fxa_kid = "0000000000000-DECAFBAD" + str(uuid.uuid4().hex)[8:] auth_policy = self.config.registry.getUtility(IAuthenticationPolicy) req = Request.blank(self.host_url) creds = auth_policy.encode_hawk_id( - req, self.user_id, extra={ + req, + self.user_id, + extra={ # Include a hashed_fxa_uid to trigger uid/kid extraction "hashed_fxa_uid": self.hashed_fxa_uid, "fxa_uid": self.fxa_uid, "fxa_kid": self.fxa_kid, - } + }, ) self.auth_token, self.auth_secret = creds @@ -438,18 +452,18 @@ def _cleanup_test_databases(self): "algorithm": "DS", "x": "385cb3509f086e110c5e24bdd395a84b335a09ae", "y": "738ec929b559b604a232a9b55a5295afc368063bb9c20fac4e53a74970a4db795" - "6d48e4c7ed523405f629b4cc83062f13029c4d615bbacb8b97f5e56f0c7ac9bc1" - "d4e23809889fa061425c984061fca1826040c399715ce7ed385c4dd0d40225691" - "2451e03452d3c961614eb458f188e3e8d2782916c43dbe2e571251ce38262", + "6d48e4c7ed523405f629b4cc83062f13029c4d615bbacb8b97f5e56f0c7ac9bc1" + "d4e23809889fa061425c984061fca1826040c399715ce7ed385c4dd0d40225691" + "2451e03452d3c961614eb458f188e3e8d2782916c43dbe2e571251ce38262", "p": "ff600483db6abfc5b45eab78594b3533d550d9f1bf2a992a7a8daa6dc34f8045a" - "d4e6e0c429d334eeeaaefd7e23d4810be00e4cc1492cba325ba81ff2d5a5b305a" - "8d17eb3bf4a06a349d392e00d329744a5179380344e82a18c47933438f891e22a" - "eef812d69c8f75e326cb70ea000c3f776dfdbd604638c2ef717fc26d02e17", + "d4e6e0c429d334eeeaaefd7e23d4810be00e4cc1492cba325ba81ff2d5a5b305a" + "8d17eb3bf4a06a349d392e00d329744a5179380344e82a18c47933438f891e22a" + "eef812d69c8f75e326cb70ea000c3f776dfdbd604638c2ef717fc26d02e17", "q": "e21e04f911d1ed7991008ecaab3bf775984309c3", "g": "c52a4a0ff3b7e61fdf1867ce84138369a6154f4afa92966e3c827e25cfa6cf508b" - "90e5de419e1337e07a2e9e2a3cd5dea704d175f8ebf6af397d69e110b96afb17c7" - "a03259329e4829b0d03bbc7896b15b4ade53e130858cc34d96269aa89041f40913" - "6c7242a38895c9d5bccad4f389af1d7a4bd1398bd072dffa896233397a", + "90e5de419e1337e07a2e9e2a3cd5dea704d175f8ebf6af397d69e110b96afb17c7" + "a03259329e4829b0d03bbc7896b15b4ade53e130858cc34d96269aa89041f40913" + "6c7242a38895c9d5bccad4f389af1d7a4bd1398bd072dffa896233397a", } @@ -496,9 +510,11 @@ def __init__(self, secrets=None, **kwds): # twiddle any configuration files, but probably not what anyone # wants to use long-term. secrets = None - msgs = ["WARNING: using a randomly-generated token secret.", - "You probably want to set 'secret' or 'secrets_file' in " - "the [hawkauth] section of your configuration"] + msgs = [ + "WARNING: using a randomly-generated token secret.", + "You probably want to set 'secret' or 'secrets_file' in " + "the [hawkauth] section of your configuration", + ] for msg in msgs: print("warn:", msg) elif isinstance(secrets, (str, list)): @@ -530,7 +546,7 @@ def _parse_settings(cls, settings): for name in settings.keys(): if name.startswith(secrets_prefix): secrets[name[len(secrets_prefix):]] = settings.pop(name) - kwds['secrets'] = secrets + kwds["secrets"] = secrets return kwds def decode_hawk_id(self, request, tokenid): @@ -795,16 +811,31 @@ def run_live_functional_tests(TestCaseClass, argv=None): usage = "Usage: %prog [options] " parser = optparse.OptionParser(usage=usage) - parser.add_option("-x", "--failfast", action="store_true", - help="stop after the first failed test") - parser.add_option("", "--config-file", - help="name of the config file in use by the server") - parser.add_option("", "--use-token-server", action="store_true", - help="the given URL is a tokenserver, not an endpoint") - parser.add_option("", "--email", - help="email address to use for tokenserver tests") - parser.add_option("", "--audience", - help="assertion audience to use for tokenserver tests") + parser.add_option( + "-x", + "--failfast", + action="store_true", + help="stop after the first failed test", + ) + parser.add_option( + "", + "--config-file", + help="name of the config file in use by the server", + ) + parser.add_option( + "", + "--use-token-server", + action="store_true", + help="the given URL is a tokenserver, not an endpoint", + ) + parser.add_option( + "", "--email", help="email address to use for tokenserver tests" + ) + parser.add_option( + "", + "--audience", + help="assertion audience to use for tokenserver tests", + ) try: opts, args = parser.parse_args(argv) @@ -832,11 +863,12 @@ def run_live_functional_tests(TestCaseClass, argv=None): global global_secret global_secret = host_url.fragment host_url = host_url._replace(fragment="") - os.environ["MOZSVC_TEST_REMOTE"] = 'localhost' + os.environ["MOZSVC_TEST_REMOTE"] = "localhost" # Now use the unittest2 runner to execute them. suite = unittest2.TestSuite() import test_storage + test_prefix = os.environ.get("SYNC_TEST_PREFIX", "test") suite.addTest(unittest2.findTestCases(test_storage, test_prefix)) # suite.addTest(unittest2.makeSuite(LiveTestCases, prefix=test_prefix))