diff --git a/.travis.yml b/.travis.yml
index b3a4a8dfe..77b3938d5 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,14 +1,13 @@
dist: trusty
language: python
-python: 2.7
+python: 3.5
cache: pip
services: redis-server
addons:
postgresql: "9.5"
env:
- - TOX_ENV=py27
- - TOX_ENV=py34
- - TOX_ENV=py27-raw
+ - TOX_ENV=py35
+ - TOX_ENV=py35-raw
- TOX_ENV=flake8
- TOX_ENV=docs
install:
@@ -31,9 +30,3 @@ matrix:
- python: 3.6
env:
- TOX_ENV=py36
- - env: ACTION=loadtest_tutorial
- before_script: echo 'Tutorial'
- script: make loadtest-check-tutorial
- - env: ACTION=loadtest_simulation
- before_script: echo 'Simulation'
- script: make loadtest-check-simulation
diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index 0f933dc7a..884500a20 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -8,11 +8,12 @@ This document describes changes between each past release.
**Breaking changes**
-- Upgraded to PostgreSQL 9.5 (#1056)
+- Remove Python 2.7 support and upgrade to Python 3.5. (#1050)
+- Upgraded minimal PostgreSQL support to PostgreSQL 9.5 (#1056)
**Bug fixes**
-- Prevent injections in the PostgreSQL permission backend. (#1061)
+- Prevent injections in the PostgreSQL permission backend (#1061)
- Fix crash on ``If-Match: *`` (#1064)
- Handle Integer overflow in querystring parameters. (#1076)
@@ -20,9 +21,6 @@ This document describes changes between each past release.
- Update the upsert query to use an INSERT or UPDATE on CONFLICT behavior (fixes #1055)
- Remove pypy supports. (#1049)
-
-**Internal changes**
-
- Permission schema children fields are now set during initialization instead of on
deserialization (#1046).
- Request schemas (including validation and deserialization) are now isolated by method
diff --git a/Makefile b/Makefile
index 9e07b8a06..0f30d2117 100644
--- a/Makefile
+++ b/Makefile
@@ -1,9 +1,9 @@
SERVER_CONFIG = config/kinto.ini
-VIRTUALENV = virtualenv
+VIRTUALENV = virtualenv --python=python3
SPHINX_BUILDDIR = docs/_build
VENV := $(shell echo $${VIRTUAL_ENV-.venv})
-PYTHON = $(VENV)/bin/python
+PYTHON = $(VENV)/bin/python3
DEV_STAMP = $(VENV)/.dev_env_installed.stamp
DOC_STAMP = $(VENV)/.doc_env_installed.stamp
INSTALL_STAMP = $(VENV)/.install.stamp
diff --git a/README.rst b/README.rst
index d281628c2..253162966 100644
--- a/README.rst
+++ b/README.rst
@@ -36,5 +36,5 @@ Kinto is a minimalist JSON storage service with synchronisation and sharing abil
Requirements
------------
-* **Python**: 2.7, 3.4+
+* **Python**: 3.5+
* **Backends**: In-memory (development), Postgresql 9.5+ (production)
diff --git a/docs/api/1.x/collections.rst b/docs/api/1.x/collections.rst
index d97015373..5e2f73bda 100644
--- a/docs/api/1.x/collections.rst
+++ b/docs/api/1.x/collections.rst
@@ -598,14 +598,14 @@ Once a schema has been defined, the posted records must match it:
"code": 400,
"details": [
{
- "description": "u'title' is a required property",
+ "description": "'title' is a required property",
"location": "body",
"name": "title"
}
],
"errno": 107,
"error": "Invalid parameters",
- "message": "u'title' is a required property"
+ "message": "'title' is a required property"
}
diff --git a/docs/conf.py b/docs/conf.py
index 15eb80cf3..2452fda30 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
#
# Kinto documentation build configuration file, created by
# sphinx-quickstart on Mon Feb 2 15:08:06 2015.
@@ -64,8 +63,8 @@
master_doc = 'index'
# General information about the project.
-project = u'Kinto'
-copyright = u'2015-2017 — Mozilla Services'
+project = 'Kinto'
+copyright = '2015-2017 — Mozilla Services'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@@ -175,8 +174,8 @@ def setup(app):
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
- ('index', 'Kinto.tex', u'Kinto Documentation',
- u'Mozilla Services — Da French Team', 'manual'),
+ ('index', 'Kinto.tex', 'Kinto Documentation',
+ 'Mozilla Services — Da French Team', 'manual'),
]
@@ -185,8 +184,8 @@ def setup(app):
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
- ('index', 'kinto', u'Kinto Documentation',
- [u'Mozilla Services — Da French Team'], 1)
+ ('index', 'kinto', 'Kinto Documentation',
+ ['Mozilla Services — Da French Team'], 1)
]
@@ -196,8 +195,8 @@ def setup(app):
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
- ('index', 'Kinto', u'Kinto Documentation',
- u'Mozilla Services — Da French Team', 'Kinto',
+ ('index', 'Kinto', 'Kinto Documentation',
+ 'Mozilla Services — Da French Team', 'Kinto',
'A remote storage service with syncing and sharing abilities.',
'Miscellaneous'),
]
diff --git a/docs/core/notifications.rst b/docs/core/notifications.rst
index 5dc6eb791..1209f959b 100644
--- a/docs/core/notifications.rst
+++ b/docs/core/notifications.rst
@@ -125,8 +125,8 @@ Example, when deleting a collection with two records:
::
>>> event.impacted_records
- [{'old': {'deleted': True, 'last_modified': 1447240896769, 'id': u'a1f4af60-ddf5-4c49-933f-4cfeff18ad07'}},
- {'old': {'deleted': True, 'last_modified': 1447240896770, 'id': u'7a6916aa-0ea1-42a7-9741-c24fe13cb70b'}}]
+ [{'old': {'deleted': True, 'last_modified': 1447240896769, 'id': 'a1f4af60-ddf5-4c49-933f-4cfeff18ad07'}},
+ {'old': {'deleted': True, 'last_modified': 1447240896770, 'id': '7a6916aa-0ea1-42a7-9741-c24fe13cb70b'}}]
Event listeners
diff --git a/docs/core/permission.rst b/docs/core/permission.rst
index c52cda53f..fc7ac4df8 100644
--- a/docs/core/permission.rst
+++ b/docs/core/permission.rst
@@ -294,7 +294,7 @@ on the resource during registration.
class MyViewSet(resource.ViewSet):
def get_view_arguments(self, endpoint_type, resource_cls, method):
- args = super(MyViewSet, self).get_view_arguments(endpoint_type,
+ args = super().get_view_arguments(endpoint_type,
resource_cls,
method)
if method.lower() not in ('get', 'head'):
@@ -302,7 +302,7 @@ on the resource during registration.
return args
def get_service_arguments(self):
- args = super(MyViewSet, self).get_service_arguments()
+ args = super().get_service_arguments()
args['factory'] = myapp.MyRootFactory
return args
@@ -323,7 +323,7 @@ For example, a simplistic example with the previous resource viewset:
from pyramid.security import IAuthorizationPolicy
- class MyRootFactory(object):
+ class MyRootFactory:
def __init__(self, request):
self.current_resource = None
service = request.current_service
@@ -332,7 +332,7 @@ For example, a simplistic example with the previous resource viewset:
@implementer(IAuthorizationPolicy)
- class AuthorizationPolicy(object):
+ class AuthorizationPolicy:
def permits(self, context, principals, permission):
if context.current_resource == BlogArticle:
if permission == 'publish':
diff --git a/docs/core/resource.rst b/docs/core/resource.rst
index 88277c013..515c836a4 100644
--- a/docs/core/resource.rst
+++ b/docs/core/resource.rst
@@ -34,7 +34,7 @@ Full example
schema = BookmarkSchema
def process_record(self, new, old=None):
- new = super(Bookmark, self).process_record(new, old)
+ new = super().process_record(new, old)
if new['device'] != old['device']:
new['device'] = self.request.headers.get('User-Agent')
@@ -136,7 +136,7 @@ a custom model can be plugged-in:
class TrackedModel(resource.Model):
def create_record(self, record, parent_id=None):
- record = super(TrackedModel, self).create_record(record, parent_id)
+ record = super().create_record(record, parent_id)
trackid = index.track(record)
record['trackid'] = trackid
return record
@@ -241,7 +241,7 @@ or at the resource level:
@resource.register()
class Mushroom(resource.UserResource):
def __init__(request):
- super(Mushroom, self).__init__(request)
+ super().__init__(request)
self.model.id_generator = MsecId()
diff --git a/docs/requirements.txt b/docs/requirements.txt
index c6cf992d8..66173d2e3 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -6,5 +6,4 @@ kinto-redis
mock
webtest
cornice
-enum
pyramid
diff --git a/docs/tutorials/notifications-custom.rst b/docs/tutorials/notifications-custom.rst
index 1029224d4..2a0291dcd 100644
--- a/docs/tutorials/notifications-custom.rst
+++ b/docs/tutorials/notifications-custom.rst
@@ -321,6 +321,6 @@ But 2 seconds later, look at the worker output:
::
- {u'resource_name': u'record', u'user_id': u'basicauth:fea1e21d339299506d89e60f048cefd5b424ea641ba48267c35a4ce921439fa4', u'timestamp': 1453459942672, u'uri': u'/buckets/c8c94a74-5bf6-9fb0-5b72-b0777da6718e/collections/assets/records', u'bucket_id': u'c8c94a74-5bf6-9fb0-5b72-b0777da6718e', u'action': u'create', u'collection_id': u'assets'}
+ {'resource_name': 'record', 'user_id': 'basicauth:fea1e21d339299506d89e60f048cefd5b424ea641ba48267c35a4ce921439fa4', 'timestamp': 1453459942672, 'uri': '/buckets/c8c94a74-5bf6-9fb0-5b72-b0777da6718e/collections/assets/records', 'bucket_id': 'c8c94a74-5bf6-9fb0-5b72-b0777da6718e', 'action': 'create', 'collection_id': 'assets'}
It worked!
diff --git a/docs/tutorials/write-plugin.rst b/docs/tutorials/write-plugin.rst
index 7611e8ef7..887ecca9d 100644
--- a/docs/tutorials/write-plugin.rst
+++ b/docs/tutorials/write-plugin.rst
@@ -85,7 +85,7 @@ It is a wrapper basically, and the code is kept simple for the simplicity of thi
import elasticsearch
- class Indexer(object):
+ class Indexer:
def __init__(self, hosts):
self.client = elasticsearch.Elasticsearch(hosts)
diff --git a/kinto/__main__.py b/kinto/__main__.py
index 3b02cf5d1..3930afda0 100644
--- a/kinto/__main__.py
+++ b/kinto/__main__.py
@@ -4,7 +4,6 @@
import sys
import logging
import logging.config
-from six.moves import input
from kinto.core import scripts
from pyramid.scripts import pserve
@@ -103,7 +102,7 @@ def main(args=None):
if which_command == 'init':
if os.path.exists(config_file):
- print("%s already exists." % config_file, file=sys.stderr)
+ print("{} already exists.".format(config_file), file=sys.stderr)
return 1
backend = parsed_args['backend']
@@ -150,7 +149,7 @@ def main(args=None):
pserve_argv = ['pserve', config_file]
if parsed_args['reload']:
pserve_argv.append('--reload')
- pserve_argv.append('http_port=%s' % parsed_args['port'])
+ pserve_argv.append('http_port={}'.format(parsed_args['port']))
pserve.main(pserve_argv)
elif which_command == 'version':
diff --git a/kinto/authorization.py b/kinto/authorization.py
index 4e67edba7..ceef8bea2 100644
--- a/kinto/authorization.py
+++ b/kinto/authorization.py
@@ -119,8 +119,8 @@ def _relative_object_uri(resource_name, object_uri):
if resource_name == parent_resource_name:
return parent_uri
- error_msg = 'Cannot get URL of resource %r from parent %r.'
- raise ValueError(error_msg % (resource_name, object_uri))
+ error_msg = "Cannot get URL of resource '{}' from parent '{}'."
+ raise ValueError(error_msg.format(resource_name, object_uri))
def _inherited_permissions(object_uri, permission):
@@ -142,7 +142,7 @@ def _inherited_permissions(object_uri, permission):
# When requesting permissions for a single object, we check if they are any
# specific inherited permissions for the attributes.
- attributes_permission = '%s:attributes' % permission if not plural else permission
+ attributes_permission = '{}:attributes'.format(permission) if not plural else permission
inherited_perms = object_perms_tree.get(attributes_permission, object_perms_tree[permission])
granters = set()
diff --git a/kinto/config/__init__.py b/kinto/config/__init__.py
index 1406a911d..cb2b38b18 100644
--- a/kinto/config/__init__.py
+++ b/kinto/config/__init__.py
@@ -21,7 +21,7 @@ def render_template(template, destination, **kwargs):
with codecs.open(template, 'r', encoding='utf-8') as f:
raw_template = f.read()
- rendered = raw_template.format(**kwargs)
+ rendered = raw_template.format_map(kwargs)
with codecs.open(destination, 'w+', encoding='utf-8') as output:
output.write(rendered)
@@ -33,12 +33,11 @@ def init(config_file, backend, host='127.0.0.1'):
values['secret'] = core_utils.random_bytes_hex(32)
values['kinto_version'] = __version__
- values['config_file_timestamp'] = core_utils._encoded(
- strftime('%a, %d %b %Y %H:%M:%S %z'))
+ values['config_file_timestamp'] = str(strftime('%a, %d %b %Y %H:%M:%S %z'))
- values['storage_backend'] = "kinto.core.storage.%s" % backend
- values['cache_backend'] = "kinto.core.cache.%s" % backend
- values['permission_backend'] = "kinto.core.permission.%s" % backend
+ values['storage_backend'] = "kinto.core.storage.{}".format(backend)
+ values['cache_backend'] = "kinto.core.cache.{}".format(backend)
+ values['permission_backend'] = "kinto.core.permission.{}".format(backend)
if backend == 'postgresql':
postgresql_url = "postgres://postgres:postgres@localhost/postgres"
diff --git a/kinto/core/__init__.py b/kinto/core/__init__.py
index d6fb0b602..c64ac7481 100644
--- a/kinto/core/__init__.py
+++ b/kinto/core/__init__.py
@@ -126,8 +126,8 @@ def includeme(config):
def add_api_capability(config, identifier, description="", url="", **kw):
existing = config.registry.api_capabilities.get(identifier)
if existing:
- error_msg = "The '%s' API capability was already registered (%s)."
- raise ValueError(error_msg % (identifier, existing))
+ error_msg = "The '{}' API capability was already registered ({})."
+ raise ValueError(error_msg.format(identifier, existing))
capability = dict(description=description, url=url, **kw)
config.registry.api_capabilities[identifier] = capability
@@ -174,11 +174,11 @@ def add_api_capability(config, identifier, description="", url="", **kw):
# # Show settings to output.
# for key, value in settings.items():
- # logger.info('Using %s = %s' % (key, value))
+ # logger.info('Using {} = {}'.format(key, value))
# Scan views.
config.scan("kinto.core.views")
# Give sign of life.
- msg = "Running %(project_name)s %(project_version)s."
- logger.info(msg % settings)
+ msg = "Running {project_name} {project_version}."
+ logger.info(msg.format_map(settings))
diff --git a/kinto/core/authentication.py b/kinto/core/authentication.py
index af8911955..76bfdbb72 100644
--- a/kinto/core/authentication.py
+++ b/kinto/core/authentication.py
@@ -13,9 +13,7 @@ class BasicAuthAuthenticationPolicy(base_auth.BasicAuthAuthenticationPolicy):
def __init__(self, *args, **kwargs):
def noop_check(*a):
return []
- super(BasicAuthAuthenticationPolicy, self).__init__(noop_check,
- *args,
- **kwargs)
+ super().__init__(noop_check, *args, **kwargs)
def effective_principals(self, request):
# Bypass default Pyramid construction of principals because
@@ -33,7 +31,7 @@ def unauthenticated_userid(self, request):
return
hmac_secret = settings['userid_hmac_secret']
- credentials = '%s:%s' % credentials
+ credentials = '{}:{}'.format(*credentials)
userid = utils.hmac_digest(hmac_secret, credentials)
return userid
diff --git a/kinto/core/authorization.py b/kinto/core/authorization.py
index 1b6f3690d..c58a8b89a 100644
--- a/kinto/core/authorization.py
+++ b/kinto/core/authorization.py
@@ -1,6 +1,5 @@
import functools
-import six
from pyramid.settings import aslist
from pyramid.security import IAuthorizationPolicy, Authenticated
from zope.interface import implementer
@@ -40,7 +39,7 @@ def groupfinder(userid, request):
@implementer(IAuthorizationPolicy)
-class AuthorizationPolicy(object):
+class AuthorizationPolicy:
"""Default authorization class, that leverages the permission backend
for shareable resources.
"""
@@ -59,7 +58,7 @@ def permits(self, context, principals, permission):
if permission == DYNAMIC:
permission = context.required_permission
- create_permission = '%s:create' % context.resource_name
+ create_permission = '{}:create'.format(context.resource_name)
if permission == 'create':
permission = create_permission
@@ -107,7 +106,7 @@ def principals_allowed_by_permission(self, context, permission):
raise NotImplementedError() # PRAGMA NOCOVER
-class RouteFactory(object):
+class RouteFactory:
resource_name = None
on_collection = False
required_permission = None
@@ -164,7 +163,7 @@ def check_permission(self, principals, bound_perms):
if not bound_perms:
bound_perms = [(self.resource_name, self.required_permission)]
for (_, permission) in bound_perms:
- setting = '%s_%s_principals' % (self.resource_name, permission)
+ setting = '{}_{}_principals'.format(self.resource_name, permission)
allowed_principals = aslist(self._settings.get(setting, ''))
if allowed_principals:
if bool(set(allowed_principals) & set(principals)):
@@ -209,8 +208,7 @@ def get_permission_object_id(self, request, object_id=None):
# With the current request on a collection, the record URI must
# be found out by inspecting the collection service and its sibling
# record service.
- matchdict = request.matchdict.copy()
- matchdict['id'] = object_id
+ matchdict = {**request.matchdict, 'id': object_id}
try:
object_uri = utils.instance_uri(request,
self.resource_name,
@@ -220,7 +218,7 @@ def get_permission_object_id(self, request, object_id=None):
# Maybe the resource has no single record endpoint.
# We consider that object URIs in permissions backend will
# be stored naively:
- object_uri = object_uri + '/' + object_id
+ object_uri = '{}/{}'.format(object_uri, object_id)
return object_uri
@@ -242,8 +240,8 @@ def _find_required_permission(self, request, service):
required_permission = self.method_permissions.get(method)
# For create permission, the object id is the plural endpoint.
- collection_path = six.text_type(service.collection_path)
- collection_path = collection_path.format(**request.matchdict)
+ collection_path = str(service.collection_path)
+ collection_path = collection_path.format_map(request.matchdict)
# In the case of a "PUT", check if the targetted record already
# exists, return "write" if it does, "create" otherwise.
diff --git a/kinto/core/cache/__init__.py b/kinto/core/cache/__init__.py
index 13b08f8ef..23ecdfed2 100644
--- a/kinto/core/cache/__init__.py
+++ b/kinto/core/cache/__init__.py
@@ -8,7 +8,7 @@
_HEARTBEAT_TTL_SECONDS = 3600
-class CacheBase(object):
+class CacheBase:
def __init__(self, *args, **kwargs):
self.prefix = kwargs['cache_prefix']
diff --git a/kinto/core/cache/memory.py b/kinto/core/cache/memory.py
index 5508d0ae0..66bf88786 100644
--- a/kinto/core/cache/memory.py
+++ b/kinto/core/cache/memory.py
@@ -15,7 +15,7 @@ class Cache(CacheBase):
"""
def __init__(self, *args, **kwargs):
- super(Cache, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.flush()
def initialize_schema(self, dry_run=False):
@@ -61,7 +61,7 @@ def set(self, key, value, ttl=None):
if ttl is not None:
self.expire(key, ttl)
else:
- logger.warning("No TTL for cache key %r" % key)
+ logger.warning("No TTL for cache key '{}'".format(key))
item_key = self.prefix + key
self._store[item_key] = value
self._created_at[item_key] = msec_time()
diff --git a/kinto/core/cache/postgresql/__init__.py b/kinto/core/cache/postgresql/__init__.py
index 93f025076..60279d689 100644
--- a/kinto/core/cache/postgresql/__init__.py
+++ b/kinto/core/cache/postgresql/__init__.py
@@ -62,7 +62,7 @@ class Cache(CacheBase):
:noindex:
""" # NOQA
def __init__(self, client, *args, **kwargs):
- super(Cache, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.client = client
def initialize_schema(self, dry_run=False):
@@ -83,7 +83,7 @@ def initialize_schema(self, dry_run=False):
sql_file = os.path.join(here, 'schema.sql')
if dry_run:
- logger.info("Create cache schema from %s" % sql_file)
+ logger.info("Create cache schema from '{}'".format(sql_file))
return
# Since called outside request, force commit.
@@ -123,7 +123,7 @@ def expire(self, key, ttl):
def set(self, key, value, ttl=None):
if ttl is None:
- logger.warning("No TTL for cache key %r" % key)
+ logger.warning("No TTL for cache key '{}'".format(key))
query = """
INSERT INTO cache (key, value, ttl)
VALUES (:key, :value, sec2ttl(:ttl))
diff --git a/kinto/core/cache/testing.py b/kinto/core/cache/testing.py
index 81065bec9..993643b8b 100644
--- a/kinto/core/cache/testing.py
+++ b/kinto/core/cache/testing.py
@@ -7,12 +7,12 @@
from kinto.core.cache import heartbeat
-class CacheTest(object):
+class CacheTest:
backend = None
settings = {}
def setUp(self):
- super(CacheTest, self).setUp()
+ super().setUp()
self.cache = self.backend.load_from_config(self._get_config())
self.cache.initialize_schema()
self.request = None
@@ -29,11 +29,11 @@ def _get_config(self, settings=None):
def tearDown(self):
mock.patch.stopall()
- super(CacheTest, self).tearDown()
+ super().tearDown()
self.cache.flush()
def get_backend_prefix(self, prefix):
- settings_prefix = self.settings.copy()
+ settings_prefix = {**self.settings}
settings_prefix['cache_prefix'] = prefix
config_prefix = self._get_config(settings=settings_prefix)
diff --git a/kinto/core/decorators.py b/kinto/core/decorators.py
index d1d0a4395..144f12352 100644
--- a/kinto/core/decorators.py
+++ b/kinto/core/decorators.py
@@ -3,7 +3,7 @@
from pyramid.response import Response
-class cache_forever(object):
+class cache_forever:
def __init__(self, wrapped):
self.wrapped = wrapped
self.saved = None
diff --git a/kinto/core/errors.py b/kinto/core/errors.py
index 27d0237dc..00458f384 100644
--- a/kinto/core/errors.py
+++ b/kinto/core/errors.py
@@ -1,9 +1,8 @@
-import six
from pyramid import httpexceptions
from enum import Enum
from kinto.core.logs import logger
-from kinto.core.utils import json, reapply_cors, encode_header
+from kinto.core.utils import json, reapply_cors
class ERRORS(Enum):
@@ -132,22 +131,22 @@ def json_error_handler(request):
(c.f. HTTP API).
"""
errors = request.errors
- sorted_errors = sorted(errors, key=lambda x: six.text_type(x['name']))
+ sorted_errors = sorted(errors, key=lambda x: str(x['name']))
# In Cornice, we call error handler if at least one error was set.
error = sorted_errors[0]
name = error['name']
description = error['description']
- if isinstance(description, six.binary_type):
+ if isinstance(description, bytes):
description = error['description'].decode('utf-8')
if name is not None:
if name in description:
message = description
else:
- message = '%(name)s in %(location)s: %(description)s' % error
+ message = '{name} in {location}: {description}'.format_map(error)
else:
- message = '%(location)s: %(description)s' % error
+ message = '{location}: {description}'.format_map(error)
response = http_error(httpexceptions.HTTPBadRequest(),
code=errors.status,
@@ -185,8 +184,8 @@ def send_alert(request, message=None, url=None, code='soft-eol'):
if url is None:
url = request.registry.settings['project_docs']
- request.response.headers['Alert'] = encode_header(json.dumps({
+ request.response.headers['Alert'] = json.dumps({
'code': code,
'message': message,
'url': url
- }))
+ })
diff --git a/kinto/core/events.py b/kinto/core/events.py
index 5d255af7b..015537ae8 100644
--- a/kinto/core/events.py
+++ b/kinto/core/events.py
@@ -19,7 +19,7 @@ def from_string_list(elements):
return tuple(ACTIONS(el) for el in elements)
-class _ResourceEvent(object):
+class _ResourceEvent:
def __init__(self, payload, request):
self.payload = payload
self.request = request
@@ -34,7 +34,7 @@ class ResourceRead(_ResourceEvent):
"""Triggered when a resource is being read.
"""
def __init__(self, payload, read_records, request):
- super(ResourceRead, self).__init__(payload, request)
+ super().__init__(payload, request)
self.read_records = read_records
@@ -42,7 +42,7 @@ class ResourceChanged(_ResourceEvent):
"""Triggered when a resource is being changed.
"""
def __init__(self, payload, impacted_records, request):
- super(ResourceChanged, self).__init__(payload, request)
+ super().__init__(payload, request)
self.impacted_records = impacted_records
@@ -50,7 +50,7 @@ class AfterResourceRead(_ResourceEvent):
"""Triggered after a resource was successfully read.
"""
def __init__(self, payload, read_records, request):
- super(AfterResourceRead, self).__init__(payload, request)
+ super().__init__(payload, request)
self.read_records = read_records
@@ -58,7 +58,7 @@ class AfterResourceChanged(_ResourceEvent):
"""Triggered after a resource was successfully changed.
"""
def __init__(self, payload, impacted_records, request):
- super(AfterResourceChanged, self).__init__(payload, request)
+ super().__init__(payload, request)
self.impacted_records = impacted_records
diff --git a/kinto/core/initialization.py b/kinto/core/initialization.py
index b288402dd..267a8c3d7 100644
--- a/kinto/core/initialization.py
+++ b/kinto/core/initialization.py
@@ -80,7 +80,7 @@ def _redirect_to_version_view(request):
querystring = request.url[(request.url.rindex(request.path) +
len(request.path)):]
- redirect = '/%s%s%s' % (route_prefix, request.path, querystring)
+ redirect = '/{}{}{}'.format(route_prefix, request.path, querystring)
raise HTTPTemporaryRedirect(redirect)
# Disable the route prefix passed by the app.
@@ -128,8 +128,7 @@ def on_new_response(event):
# Add backoff in response headers.
backoff = config.registry.settings['backoff']
if backoff is not None:
- backoff = utils.encode_header('%s' % backoff)
- event.response.headers['Backoff'] = backoff
+ event.response.headers['Backoff'] = str(backoff)
config.add_subscriber(on_new_response, NewResponse)
@@ -204,7 +203,7 @@ def setup_storage(config):
storage_mod = config.maybe_dotted(storage_mod)
backend = storage_mod.load_from_config(config)
if not isinstance(backend, storage.StorageBase):
- raise ConfigurationError("Invalid storage backend: %s" % backend)
+ raise ConfigurationError("Invalid storage backend: {}".format(backend))
config.registry.storage = backend
heartbeat = storage.heartbeat(backend)
@@ -220,7 +219,7 @@ def setup_permission(config):
permission_mod = config.maybe_dotted(permission_mod)
backend = permission_mod.load_from_config(config)
if not isinstance(backend, permission.PermissionBase):
- raise ConfigurationError("Invalid permission backend: %s" % backend)
+ raise ConfigurationError("Invalid permission backend: {}".format(backend))
config.registry.permission = backend
heartbeat = permission.heartbeat(backend)
@@ -236,7 +235,7 @@ def setup_cache(config):
cache_mod = config.maybe_dotted(cache_mod)
backend = cache_mod.load_from_config(config)
if not isinstance(backend, cache.CacheBase):
- raise ConfigurationError("Invalid cache backend: %s" % backend)
+ raise ConfigurationError("Invalid cache backend: {}".format(backend))
config.registry.cache = backend
heartbeat = cache.heartbeat(backend)
@@ -279,12 +278,12 @@ def on_new_response(event):
# Count authentication verifications.
if hasattr(request, 'authn_type'):
- client.count('authn_type.%s' % request.authn_type)
+ client.count('authn_type.{}'.format(request.authn_type))
# Count view calls.
service = request.current_service
if service:
- client.count('view.%s.%s' % (service.name, request.method))
+ client.count('view.{}.{}'.format(service.name, request.method))
config.add_subscriber(on_new_response, NewResponse)
@@ -381,25 +380,25 @@ def on_new_response(event):
config.add_subscriber(on_new_response, NewResponse)
-class EventActionFilter(object):
+class EventActionFilter:
def __init__(self, actions, config):
actions = ACTIONS.from_string_list(actions)
self.actions = [action.value for action in actions]
def phash(self):
- return 'for_actions = %s' % (','.join(self.actions))
+ return 'for_actions = {}'.format(','.join(self.actions))
def __call__(self, event):
action = event.payload.get('action')
return not action or action in self.actions
-class EventResourceFilter(object):
+class EventResourceFilter:
def __init__(self, resources, config):
self.resources = resources
def phash(self):
- return 'for_resources = %s' % (','.join(self.resources))
+ return 'for_resources = {}'.format(','.join(self.resources))
def __call__(self, event):
resource = event.payload.get('resource_name')
@@ -417,17 +416,17 @@ def setup_listeners(config):
listeners = aslist(settings['event_listeners'])
for name in listeners:
- logger.info('Setting up %r listener' % name)
- prefix = 'event_listeners.%s.' % name
+ logger.info("Setting up '{}' listener".format(name))
+ prefix = 'event_listeners.{}.'.format(name)
try:
listener_mod = config.maybe_dotted(name)
- prefix = 'event_listeners.%s.' % name.split('.')[-1]
+ prefix = 'event_listeners.{}.'.format(name.split('.')[-1])
listener = listener_mod.load_from_config(config, prefix)
except (ImportError, AttributeError):
module_setting = prefix + "use"
# Read from ENV or settings.
- module_value = utils.read_env(project_name + "." + module_setting,
+ module_value = utils.read_env('{}.{}'.format(project_name, module_setting),
settings.get(module_setting))
listener_mod = config.maybe_dotted(module_value)
listener = listener_mod.load_from_config(config, prefix)
@@ -435,13 +434,13 @@ def setup_listeners(config):
# If StatsD is enabled, monitor execution time of listeners.
if getattr(config.registry, "statsd", None):
statsd_client = config.registry.statsd
- key = 'listeners.%s' % name
+ key = 'listeners.{}'.format(name)
listener = statsd_client.timer(key)(listener.__call__)
# Optional filter by event action.
actions_setting = prefix + "actions"
# Read from ENV or settings.
- actions_value = utils.read_env(project_name + "." + actions_setting,
+ actions_value = utils.read_env('{}.{}'.format(project_name, actions_setting),
settings.get(actions_setting, ""))
actions = aslist(actions_value)
if len(actions) > 0:
@@ -452,7 +451,7 @@ def setup_listeners(config):
# Optional filter by event resource name.
resource_setting = prefix + "resources"
# Read from ENV or settings.
- resource_value = utils.read_env(project_name + "." + resource_setting,
+ resource_value = utils.read_env('{}.{}'.format(project_name, resource_setting),
settings.get(resource_setting, ""))
resource_names = aslist(resource_value)
@@ -479,8 +478,8 @@ def _prefixed_keys(key):
unprefixed = key
if key.startswith('kinto.') or key.startswith(project_name + '.'):
unprefixed = key.split('.', 1)[1]
- project_prefix = project_name + '.' + unprefixed
- kinto_prefix = 'kinto.' + unprefixed
+ project_prefix = '{}.{}'.format(project_name, unprefixed)
+ kinto_prefix = 'kinto.{}'.format(unprefixed)
return unprefixed, project_prefix, kinto_prefix
# Fill settings with default values if not defined.
@@ -499,7 +498,7 @@ def _prefixed_keys(key):
if len(defined) > 1 and len(distinct_values) > 1:
names = "', '".join(defined)
- raise ValueError("Settings '%s' are in conflict." % names)
+ raise ValueError("Settings '{}' are in conflict.".format(names))
# Maintain backwards compatibility with old settings files that
# have backend settings like cliquet.foo (which is now
@@ -556,7 +555,7 @@ def initialize(config, version=None, project_name='', default_settings=None):
if not project_name:
warnings.warn('No value specified for `project_name`')
- kinto_core_defaults = DEFAULT_SETTINGS.copy()
+ kinto_core_defaults = {**DEFAULT_SETTINGS}
if default_settings:
kinto_core_defaults.update(default_settings)
@@ -570,7 +569,7 @@ def initialize(config, version=None, project_name='', default_settings=None):
# Override project version from settings.
project_version = settings.get('project_version') or version
if not project_version:
- error_msg = "Invalid project version: %s" % project_version
+ error_msg = "Invalid project version: {}".format(project_version)
raise ConfigurationError(error_msg)
settings['project_version'] = project_version = str(project_version)
@@ -580,7 +579,7 @@ def initialize(config, version=None, project_name='', default_settings=None):
# The API version is derivated from the module version if not provided.
http_api_version = '.'.join(project_version.split('.')[0:2])
settings['http_api_version'] = http_api_version = str(http_api_version)
- api_version = 'v%s' % http_api_version.split('.')[0]
+ api_version = 'v{}'.format(http_api_version.split('.')[0])
# Include kinto.core views with the correct api version prefix.
config.include("kinto.core", route_prefix=api_version)
diff --git a/kinto/core/listeners/__init__.py b/kinto/core/listeners/__init__.py
index ae116c82d..4ba5e3e18 100644
--- a/kinto/core/listeners/__init__.py
+++ b/kinto/core/listeners/__init__.py
@@ -1,4 +1,4 @@
-class ListenerBase(object):
+class ListenerBase:
def __init__(self, *args, **kwargs):
pass
diff --git a/kinto/core/logs.py b/kinto/core/logs.py
index 19b976a84..a5771f09f 100644
--- a/kinto/core/logs.py
+++ b/kinto/core/logs.py
@@ -1,7 +1,6 @@
import os
import colorama
-import six
import structlog
from kinto.core import utils
@@ -12,12 +11,12 @@
def decode_value(value):
try:
- return six.text_type(value)
+ return str(value)
except UnicodeDecodeError: # pragma: no cover
- return six.binary_type(value).decode('utf-8')
+ return bytes(value).decode('utf-8')
-class ClassicLogRenderer(object):
+class ClassicLogRenderer:
"""Classic log output for structlog.
::
@@ -37,21 +36,21 @@ def __call__(self, logger, name, event_dict):
if 'path' in event_dict:
pattern = (BRIGHT +
- u'"{method: <5} {path}{querystring}"' +
+ '"{method: <5} {path}{querystring}"' +
RESET_ALL +
- YELLOW + u' {code} ({t} ms)' +
+ YELLOW + ' {code} ({t} ms)' +
RESET_ALL +
- u' {event} {context}')
+ ' {event} {context}')
else:
- pattern = u'{event} {context}'
+ pattern = '{event} {context}'
output = {}
for field in ['method', 'path', 'code', 't', 'event']:
output[field] = decode_value(event_dict.pop(field, '?'))
querystring = event_dict.pop('querystring', {})
- params = [decode_value('%s=%s' % qs) for qs in querystring.items()]
- output['querystring'] = '?%s' % '&'.join(params) if params else ''
+ params = [decode_value('{}={}'.format(*qs)) for qs in querystring.items()]
+ output['querystring'] = '?{}'.format('&'.join(params) if params else '')
output['context'] = " ".join(
CYAN + key + RESET_ALL +
@@ -61,11 +60,11 @@ def __call__(self, logger, name, event_dict):
for key in sorted(event_dict.keys())
)
- log_msg = pattern.format(**output)
+ log_msg = pattern.format_map(output)
return log_msg
-class MozillaHekaRenderer(object):
+class MozillaHekaRenderer:
"""Build structured log entries as expected by Mozilla Services standard:
* https://mana.mozilla.org/wiki/display/CLOUDSERVICES/Logging+Standard
@@ -74,7 +73,7 @@ class MozillaHekaRenderer(object):
ENV_VERSION = '2.0'
def __init__(self, settings):
- super(MozillaHekaRenderer, self).__init__()
+ super().__init__()
self.appname = settings['project_name']
self.hostname = utils.read_env('HOSTNAME', os.uname()[1])
self.pid = os.getpid()
@@ -118,7 +117,7 @@ def __call__(self, logger, name, event_dict):
if isinstance(value, dict):
value = utils.json.dumps(value)
elif isinstance(value, (list, tuple)):
- if not all([isinstance(i, six.string_types) for i in value]):
+ if not all([isinstance(i, str) for i in value]):
value = utils.json.dumps(value)
event_dict['Fields'][f] = value
diff --git a/kinto/core/permission/__init__.py b/kinto/core/permission/__init__.py
index 00d3192ff..33630a6d9 100644
--- a/kinto/core/permission/__init__.py
+++ b/kinto/core/permission/__init__.py
@@ -6,7 +6,7 @@
__HEARTBEAT_KEY__ = '__heartbeat__'
-class PermissionBase(object):
+class PermissionBase:
def __init__(self, *args, **kwargs):
pass
diff --git a/kinto/core/permission/memory.py b/kinto/core/permission/memory.py
index 5238fd36c..611e824bd 100644
--- a/kinto/core/permission/memory.py
+++ b/kinto/core/permission/memory.py
@@ -15,7 +15,7 @@ class Permission(PermissionBase):
"""
def __init__(self, *args, **kwargs):
- super(Permission, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.flush()
def initialize_schema(self, dry_run=False):
@@ -27,14 +27,14 @@ def flush(self):
@synchronized
def add_user_principal(self, user_id, principal):
- user_key = 'user:%s' % user_id
+ user_key = 'user:{}'.format(user_id)
user_principals = self._store.get(user_key, set())
user_principals.add(principal)
self._store[user_key] = user_principals
@synchronized
def remove_user_principal(self, user_id, principal):
- user_key = 'user:%s' % user_id
+ user_key = 'user:{}'.format(user_id)
user_principals = self._store.get(user_key, set())
try:
user_principals.remove(principal)
@@ -57,7 +57,7 @@ def remove_principal(self, principal):
@synchronized
def get_user_principals(self, user_id):
# Fetch the groups the user is in.
- user_key = 'user:%s' % user_id
+ user_key = 'user:{}'.format(user_id)
members = self._store.get(user_key, set())
# Fetch the groups system.Authenticated is in.
group_authenticated = self._store.get('user:system.Authenticated', set())
@@ -65,14 +65,14 @@ def get_user_principals(self, user_id):
@synchronized
def add_principal_to_ace(self, object_id, permission, principal):
- permission_key = 'permission:%s:%s' % (object_id, permission)
+ permission_key = 'permission:{}:{}'.format(object_id, permission)
object_permission_principals = self._store.get(permission_key, set())
object_permission_principals.add(principal)
self._store[permission_key] = object_permission_principals
@synchronized
def remove_principal_from_ace(self, object_id, permission, principal):
- permission_key = 'permission:%s:%s' % (object_id, permission)
+ permission_key = 'permission:{}:{}'.format(object_id, permission)
object_permission_principals = self._store.get(permission_key, set())
try:
object_permission_principals.remove(principal)
@@ -86,7 +86,7 @@ def remove_principal_from_ace(self, object_id, permission, principal):
@synchronized
def get_object_permission_principals(self, object_id, permission):
- permission_key = 'permission:%s:%s' % (object_id, permission)
+ permission_key = 'permission:{}:{}'.format(object_id, permission)
members = self._store.get(permission_key, set())
return members
@@ -101,7 +101,7 @@ def get_accessible_objects(self, principals, bound_permissions=None, with_childr
else:
for pattern, perm in bound_permissions:
id_match = '.*' if with_children else '[^/]+'
- regexp = re.compile('^%s$' % pattern.replace('*', id_match))
+ regexp = re.compile('^{}$'.format(pattern.replace('*', id_match)))
for key, value in self._store.items():
if key.endswith(perm):
object_id = key.split(':')[1]
@@ -127,9 +127,9 @@ def get_objects_permissions(self, objects_ids, permissions=None):
for object_id in objects_ids:
if permissions is None:
aces = [k for k in self._store.keys()
- if k.startswith('permission:%s:' % object_id)]
+ if k.startswith('permission:{}:'.format(object_id))]
else:
- aces = ['permission:%s:%s' % (object_id, permission)
+ aces = ['permission:{}:{}'.format(object_id, permission)
for permission in permissions]
perms = {}
for ace in aces:
@@ -142,7 +142,7 @@ def get_objects_permissions(self, objects_ids, permissions=None):
@synchronized
def replace_object_permissions(self, object_id, permissions):
for permission, principals in permissions.items():
- permission_key = 'permission:%s:%s' % (object_id, permission)
+ permission_key = 'permission:{}:{}'.format(object_id, permission)
if permission_key in self._store and len(principals) == 0:
del self._store[permission_key]
else:
@@ -155,7 +155,7 @@ def delete_object_permissions(self, *object_id_list):
for key in self._store.keys():
object_id = key.split(':')[1]
for pattern in object_id_list:
- regexp = re.compile('^%s$' % pattern.replace('*', '.*'))
+ regexp = re.compile('^{}$'.format(pattern.replace('*', '.*')))
if regexp.match(object_id):
to_delete.append(key)
for k in to_delete:
diff --git a/kinto/core/permission/postgresql/__init__.py b/kinto/core/permission/postgresql/__init__.py
index 4aa7d1526..e9055ffb6 100644
--- a/kinto/core/permission/postgresql/__init__.py
+++ b/kinto/core/permission/postgresql/__init__.py
@@ -63,7 +63,7 @@ class Permission(PermissionBase):
:noindex:
""" # NOQA
def __init__(self, client, *args, **kwargs):
- super(Permission, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.client = client
def initialize_schema(self, dry_run=False):
@@ -84,7 +84,7 @@ def initialize_schema(self, dry_run=False):
sql_file = os.path.join(here, 'schema.sql')
if dry_run:
- logger.info("Create permission schema from %s" % sql_file)
+ logger.info("Create permission schema from '{}'".format(sql_file))
return
# Since called outside request, force commit.
@@ -195,12 +195,12 @@ def get_authorized_principals(self, bound_permissions):
query = """
WITH required_perms AS (
- VALUES %s
+ VALUES {}
)
SELECT principal
FROM required_perms JOIN access_control_entries
ON (object_id = column1 AND permission = column2);
- """ % ','.join(perm_values)
+ """.format(','.join(perm_values))
with self.client.connect(readonly=True) as conn:
result = conn.execute(query, placeholders)
results = result.fetchall()
@@ -244,10 +244,10 @@ def get_accessible_objects(self, principals, bound_permissions=None, with_childr
"AND object_id NOT LIKE pattern || '/%'")
query = """
WITH required_perms AS (
- VALUES %(perms)s
+ VALUES {perms}
),
user_principals AS (
- VALUES %(principals)s
+ VALUES {principals}
),
potential_objects AS (
SELECT object_id, permission, required_perms.column1 AS pattern
@@ -259,8 +259,8 @@ def get_accessible_objects(self, principals, bound_permissions=None, with_childr
)
SELECT object_id, permission
FROM potential_objects
- WHERE %(object_id_condition)s;
- """ % dict(perms=','.join(perm_values),
+ WHERE {object_id_condition};
+ """.format(perms=','.join(perm_values),
principals=','.join(principals_values),
object_id_condition=object_id_condition)
@@ -291,7 +291,7 @@ def check_permission(self, principals, bound_permissions):
query = """
WITH required_perms AS (
- VALUES %(perms)s
+ VALUES {perms}
),
allowed_principals AS (
SELECT principal
@@ -299,12 +299,12 @@ def check_permission(self, principals, bound_permissions):
ON (object_id = column1 AND permission = column2)
),
required_principals AS (
- VALUES %(principals)s
+ VALUES {principals}
)
SELECT COUNT(*) AS matched
FROM required_principals JOIN allowed_principals
ON (required_principals.column1 = principal);
- """ % dict(perms=','.join(perms_values),
+ """.format(perms=','.join(perms_values),
principals=','.join(principals_values))
with self.client.connect(readonly=True) as conn:
@@ -321,12 +321,12 @@ def get_objects_permissions(self, objects_ids, permissions=None):
query = """
WITH required_object_ids AS (
- VALUES %(objects_ids)s
+ VALUES {objects_ids}
)
SELECT object_id, permission, principal
FROM required_object_ids JOIN access_control_entries
ON (object_id = column2)
- %(permissions_condition)s
+ {permissions_condition}
ORDER BY column1 ASC;
"""
safeholders = {
@@ -339,7 +339,7 @@ def get_objects_permissions(self, objects_ids, permissions=None):
placeholders["permissions"] = tuple(permissions)
with self.client.connect(readonly=True) as conn:
- result = conn.execute(query % safeholders, placeholders)
+ result = conn.execute(query.format_map(safeholders), placeholders)
rows = result.fetchall()
groupby_id = OrderedDict()
@@ -363,30 +363,30 @@ def replace_object_permissions(self, object_id, permissions):
new_perms = []
specified_perms = []
for i, (perm, principals) in enumerate(permissions.items()):
- placeholders['perm_%s' % i] = perm
- specified_perms.append("(:perm_%s)" % i)
+ placeholders['perm_{}'.format(i)] = perm
+ specified_perms.append("(:perm_{})".format(i))
for principal in set(principals):
j = len(new_perms)
- placeholders['principal_%s' % j] = principal
- new_perms.append("(:perm_%s, :principal_%s)" % (i, j))
+ placeholders['principal_{}'.format(j)] = principal
+ new_perms.append("(:perm_{}, :principal_{})".format(i, j))
delete_query = """
WITH specified_perms AS (
- VALUES %(specified_perms)s
+ VALUES {specified_perms}
)
DELETE FROM access_control_entries
USING specified_perms
WHERE object_id = :object_id AND permission = column1
- """ % dict(specified_perms=','.join(specified_perms))
+ """.format(specified_perms=','.join(specified_perms))
insert_query = """
WITH new_aces AS (
- VALUES %(new_perms)s
+ VALUES {new_perms}
)
INSERT INTO access_control_entries(object_id, permission, principal)
SELECT :object_id, column1, column2
FROM new_aces;
- """ % dict(new_perms=','.join(new_perms))
+ """.format(new_perms=','.join(new_perms))
with self.client.connect() as conn:
conn.execute(delete_query, placeholders)
@@ -405,7 +405,7 @@ def delete_object_permissions(self, *object_id_list):
query = """
WITH object_ids AS (
- VALUES %(object_ids_values)s
+ VALUES {object_ids_values}
)
DELETE FROM access_control_entries
USING object_ids
@@ -414,7 +414,7 @@ def delete_object_permissions(self, *object_id_list):
'object_ids_values': ','.join(object_ids_values)
}
with self.client.connect() as conn:
- conn.execute(query % safeholders, placeholders)
+ conn.execute(query.format_map(safeholders), placeholders)
def load_from_config(config):
diff --git a/kinto/core/permission/testing.py b/kinto/core/permission/testing.py
index b7cc076ba..5ed4821b5 100644
--- a/kinto/core/permission/testing.py
+++ b/kinto/core/permission/testing.py
@@ -7,12 +7,12 @@
from kinto.core.testing import DummyRequest
-class PermissionTest(object):
+class PermissionTest:
backend = None
settings = {}
def setUp(self):
- super(PermissionTest, self).setUp()
+ super().setUp()
self.permission = self.backend.load_from_config(self._get_config())
self.permission.initialize_schema()
self.request = DummyRequest()
@@ -29,7 +29,7 @@ def _get_config(self, settings=None):
def tearDown(self):
mock.patch.stopall()
- super(PermissionTest, self).tearDown()
+ super().tearDown()
self.permission.flush()
def test_backend_error_is_raised_anywhere(self):
diff --git a/kinto/core/resource/__init__.py b/kinto/core/resource/__init__.py
index 136ae9483..9ec209ee6 100644
--- a/kinto/core/resource/__init__.py
+++ b/kinto/core/resource/__init__.py
@@ -3,7 +3,7 @@
import colander
import venusian
-import six
+
from pyramid import exceptions as pyramid_exceptions
from pyramid.decorator import reify
from pyramid.security import Everyone
@@ -17,8 +17,7 @@
from kinto.core.storage import exceptions as storage_exceptions, Filter, Sort
from kinto.core.utils import (
COMPARISON, classname, decode64, encode64, json,
- encode_header, dict_subset, recursive_update_dict,
- apply_json_patch
+ dict_subset, recursive_update_dict, apply_json_patch
)
from .model import Model, ShareableModel
@@ -68,9 +67,9 @@ def register_resource(resource_cls, settings=None, viewset=None, depth=1,
def register_service(endpoint_type, settings):
"""Registers a service in cornice, for the given type.
"""
- path_pattern = getattr(viewset, '%s_path' % endpoint_type)
+ path_pattern = getattr(viewset, '{}_path'.format(endpoint_type))
path_values = {'resource_name': resource_name}
- path = path_pattern.format(**path_values)
+ path = path_pattern.format_map(path_values)
name = viewset.get_service_name(endpoint_type, resource_cls)
@@ -82,17 +81,17 @@ def register_service(endpoint_type, settings):
service.resource = resource_cls
service.type = endpoint_type
# Attach collection and record paths.
- service.collection_path = viewset.collection_path.format(**path_values)
- service.record_path = (viewset.record_path.format(**path_values)
+ service.collection_path = viewset.collection_path.format_map(path_values)
+ service.record_path = (viewset.record_path.format_map(path_values)
if viewset.record_path is not None else None)
- methods = getattr(viewset, '%s_methods' % endpoint_type)
+ methods = getattr(viewset, '{}_methods'.format(endpoint_type))
for method in methods:
if not viewset.is_endpoint_enabled(
endpoint_type, resource_name, method.lower(), settings):
continue
- argument_getter = getattr(viewset, '%s_arguments' % endpoint_type)
+ argument_getter = getattr(viewset, '{}_arguments'.format(endpoint_type))
view_args = argument_getter(resource_cls, method)
view = viewset.get_view(endpoint_type, method.lower())
@@ -133,7 +132,7 @@ def callback(context, name, ob):
return callback
-class UserResource(object):
+class UserResource:
"""Base resource class providing every endpoint."""
default_viewset = ViewSet
@@ -287,7 +286,7 @@ def collection_get(self):
if limit and len(records) == limit and offset < total_records:
lastrecord = records[-1]
next_page = self._next_page_url(sorting, limit, lastrecord, offset)
- headers['Next-Page'] = encode_header(next_page)
+ headers['Next-Page'] = next_page
if partial_fields:
records = [
@@ -297,7 +296,7 @@ def collection_get(self):
# Bind metric about response size.
logger.bind(nb_records=len(records), limit=limit)
- headers['Total-Records'] = encode_header('%s' % total_records)
+ headers['Total-Records'] = str(total_records)
return self.postprocess(records)
@@ -381,12 +380,12 @@ def collection_delete(self):
offset = offset + len(deleted)
if limit and len(deleted) == limit and offset < total_records:
next_page = self._next_page_url(sorting, limit, lastrecord, offset)
- self.request.response.headers['Next-Page'] = encode_header(next_page)
+ self.request.response.headers['Next-Page'] = next_page
else:
self._add_timestamp_header(self.request.response)
headers = self.request.response.headers
- headers['Total-Records'] = encode_header('%s' % total_records)
+ headers['Total-Records'] = str(total_records)
action = len(deleted) > 0 and ACTIONS.DELETE or ACTIONS.READ
return self.postprocess(deleted, action=action, old=records)
@@ -599,7 +598,7 @@ def process_record(self, new, old=None):
.. code-block:: python
def process_record(self, new, old=None):
- new = super(MyResource, self).process_record(new, old)
+ new = super().process_record(new, old)
version = old['version'] if old else 0
new['version'] = version + 1
return new
@@ -611,7 +610,7 @@ def process_record(self, new, old=None):
from kinto.core.errors import raise_invalid
def process_record(self, new, old=None):
- new = super(MyResource, self).process_record(new, old)
+ new = super().process_record(new, old)
if new['browser'] not in request.headers['User-Agent']:
raise_invalid(self.request, name='browser', error='Wrong')
return new
@@ -655,7 +654,7 @@ def apply_changes(self, record, requested_changes):
# Ignore value change if inferior
if record['position'] > changes.get('position', -1):
changes.pop('position', None)
- return super(MyResource, self).apply_changes(record, requested_changes)
+ return super().apply_changes(record, requested_changes)
:raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPBadRequest`
if result does not comply with resource schema.
@@ -666,17 +665,17 @@ def apply_changes(self, record, requested_changes):
if self._is_json_patch:
try:
applied_changes = apply_json_patch(record, requested_changes)['data']
- updated = applied_changes.copy()
+ updated = {**applied_changes}
except ValueError as e:
error_details = {
'location': 'body',
- 'description': 'JSON Patch operation failed: %s' % e
+ 'description': 'JSON Patch operation failed: {}'.format(e)
}
raise_invalid(self.request, **error_details)
else:
- applied_changes = requested_changes.copy()
- updated = record.copy()
+ applied_changes = {**requested_changes}
+ updated = {**record}
content_type = str(self.request.headers.get('Content-Type')).lower()
# recursive patch and remove field if null attribute is passed (RFC 7396)
@@ -690,7 +689,7 @@ def apply_changes(self, record, requested_changes):
if self.schema.is_readonly(field) and has_changed:
error_details = {
'name': field,
- 'description': 'Cannot modify {0}'.format(field)
+ 'description': 'Cannot modify {}'.format(field)
}
raise_invalid(self.request, **error_details)
@@ -753,7 +752,7 @@ def _add_timestamp_header(self, response, timestamp=None):
# Pyramid takes care of converting.
response.last_modified = timestamp / 1000.0
# Return timestamp as ETag.
- response.headers['ETag'] = encode_header('"%s"' % timestamp)
+ response.headers['ETag'] = '"{}"'.format(timestamp)
def _add_cache_header(self, response):
"""Add Cache-Control and Expire headers, based a on a setting for the
@@ -770,7 +769,7 @@ def _add_cache_header(self, response):
``304 Not modified`` is returned before serving content from cache.
"""
resource_name = self.context.resource_name if self.context else ''
- setting_key = '%s_cache_expires_seconds' % resource_name
+ setting_key = '{}_cache_expires_seconds'.format(resource_name)
collection_expires = self.request.registry.settings.get(setting_key)
is_anonymous = self.request.prefixed_userid is None
if collection_expires and is_anonymous:
@@ -788,7 +787,7 @@ def _raise_400_if_invalid_id(self, record_id):
:raises: :class:`pyramid.httpexceptions.HTTPBadRequest`
"""
- is_string = isinstance(record_id, six.string_types)
+ is_string = isinstance(record_id, str)
if not is_string or not self.model.id_generator.match(record_id):
error_details = {
'location': 'path',
@@ -883,7 +882,7 @@ def _extract_partial_fields(self):
invalid_fields = set(root_fields) - set(known_fields)
preserve_unknown = self.schema.get_option('preserve_unknown')
if not preserve_unknown and invalid_fields:
- error_msg = "Fields %s do not exist" % ','.join(invalid_fields)
+ error_msg = "Fields {} do not exist".format(','.join(invalid_fields))
error_details = {
'name': "Invalid _fields parameter",
'description': error_msg
@@ -920,7 +919,7 @@ def _extract_filters(self, queryparams=None):
error_details = {
'name': param,
'location': 'querystring',
- 'description': 'Invalid value for %s' % param
+ 'description': 'Invalid value for {}'.format(param)
}
# Ignore specific fields
@@ -957,14 +956,14 @@ def _extract_filters(self, queryparams=None):
operator, field = COMPARISON.EQ, param
if not self.is_known_field(field):
- error_msg = "Unknown filter field '{0}'".format(param)
+ error_msg = "Unknown filter field '{}'".format(param)
error_details['description'] = error_msg
raise_invalid(self.request, **error_details)
if operator in (COMPARISON.IN, COMPARISON.EXCLUDE):
- all_integers = all([isinstance(v, six.integer_types)
+ all_integers = all([isinstance(v, int)
for v in value])
- all_strings = all([isinstance(v, six.text_type)
+ all_strings = all([isinstance(v, str)
for v in value])
has_invalid_value = (
(field == self.model.id_field and not all_strings) or
@@ -991,7 +990,7 @@ def _extract_sorting(self, limit):
if not self.is_known_field(field):
error_details = {
'location': 'querystring',
- 'description': "Unknown sort field '{0}'".format(field)
+ 'description': "Unknown sort field '{}'".format(field)
}
raise_invalid(self.request, **error_details)
@@ -1059,9 +1058,7 @@ def _next_page_url(self, sorting, limit, last_record, offset):
"""Build the Next-Page header from where we stopped."""
token = self._build_pagination_token(sorting, last_record, offset)
- params = self.request.GET.copy()
- params['_limit'] = limit
- params['_token'] = token
+ params = {**self.request.GET, '_limit': limit, '_token': token}
service = self.request.current_service
next_page_url = self.request.route_url(service.name, _query=params,
@@ -1096,7 +1093,7 @@ class ShareableResource(UserResource):
"""List of allowed permissions names."""
def __init__(self, *args, **kwargs):
- super(ShareableResource, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
# In base resource, PATCH only hit storage if no data has changed.
# Here, we force update because we add the current principal to
# the ``write`` ACE.
@@ -1132,7 +1129,7 @@ def _extract_filters(self, queryparams=None):
XXX: find more elegant approach to add custom filters.
"""
- filters = super(ShareableResource, self)._extract_filters(queryparams)
+ filters = super()._extract_filters(queryparams)
ids = self.context.shared_ids
if ids is not None:
@@ -1146,15 +1143,15 @@ def _raise_412_if_modified(self, record=None):
Ref: https://github.com/Kinto/kinto/issues/224
"""
if record:
- record = record.copy()
+ record = {**record}
record.pop(self.model.permissions_field, None)
- return super(ShareableResource, self)._raise_412_if_modified(record)
+ return super()._raise_412_if_modified(record)
def process_record(self, new, old=None):
"""Read permissions from request body, and in the case of ``PUT`` every
existing ACE is removed (using empty list).
"""
- new = super(ShareableResource, self).process_record(new, old)
+ new = super().process_record(new, old)
# patch is specified as a list of of operations (RFC 6902)
if self._is_json_patch:
@@ -1163,7 +1160,7 @@ def process_record(self, new, old=None):
else:
permissions = self.request.validated['body'].get('permissions', {})
- annotated = new.copy()
+ annotated = {**new}
if permissions:
is_put = (self.request.method.lower() == 'put')
@@ -1193,6 +1190,6 @@ def postprocess(self, result, action=ACTIONS.READ, old=None):
# Remove permissions from event payload.
old.pop(self.model.permissions_field, None)
- data = super(ShareableResource, self).postprocess(result, action, old)
+ data = super().postprocess(result, action, old)
body.update(data)
return body
diff --git a/kinto/core/resource/model.py b/kinto/core/resource/model.py
index ed5370c76..59c1ee9ee 100644
--- a/kinto/core/resource/model.py
+++ b/kinto/core/resource/model.py
@@ -1,4 +1,4 @@
-class Model(object):
+class Model:
"""A collection stores and manipulate records in its attached storage.
It is not aware of HTTP environment nor HTTP API.
@@ -170,7 +170,7 @@ def create_record(self, record, parent_id=None):
.. code-block:: python
def create_record(self, record):
- record = super(MyModel, self).create_record(record)
+ record = super().create_record(record)
idx = index.store(record)
record['index'] = idx
return record
@@ -199,7 +199,7 @@ def update_record(self, record, parent_id=None):
.. code-block:: python
def update_record(self, record, parent_id=None):
- record = super(MyModel, self).update_record(record, parent_id)
+ record = super().update_record(record, parent_id)
subject = 'Record {} was changed'.format(record[self.id_field])
send_email(subject)
return record
@@ -228,7 +228,7 @@ def delete_record(self, record, parent_id=None, last_modified=None):
.. code-block:: python
def delete_record(self, record):
- deleted = super(MyModel, self).delete_record(record)
+ deleted = super().delete_record(record)
erase_media(record)
deleted['media'] = 0
return deleted
@@ -257,7 +257,7 @@ class ShareableModel(Model):
permissions_field = '__permissions__'
def __init__(self, *args, **kwargs):
- super(ShareableModel, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
# Permission backend.
self.permission = None
# Object permission id.
@@ -281,19 +281,14 @@ def _annotate(self, record, perm_object_id):
if len(set(writers) & set(principals)) == 0:
permissions = {}
# Insert the permissions values in the response.
- annotated = record.copy()
- annotated[self.permissions_field] = permissions
+ annotated = {**record, self.permissions_field: permissions}
return annotated
def delete_records(self, filters=None, sorting=None, pagination_rules=None,
limit=None, parent_id=None):
"""Delete permissions when collection records are deleted in bulk.
"""
- deleted = super(ShareableModel, self).delete_records(filters,
- sorting,
- pagination_rules,
- limit,
- parent_id)
+ deleted = super().delete_records(filters, sorting, pagination_rules, limit, parent_id)
# Take a huge shortcut in case we want to delete everything.
if not filters:
perm_ids = [self.get_permission_object_id(object_id='*')]
@@ -306,7 +301,7 @@ def delete_records(self, filters=None, sorting=None, pagination_rules=None,
def get_record(self, record_id, parent_id=None):
"""Fetch current permissions and add them to returned record.
"""
- record = super(ShareableModel, self).get_record(record_id, parent_id)
+ record = super().get_record(record_id, parent_id)
perm_object_id = self.get_permission_object_id(record_id)
return self._annotate(record, perm_object_id)
@@ -317,7 +312,7 @@ def create_record(self, record, parent_id=None):
The current principal is added to the owner (``write`` permission).
"""
permissions = record.pop(self.permissions_field, {})
- record = super(ShareableModel, self).create_record(record, parent_id)
+ record = super().create_record(record, parent_id)
record_id = record[self.id_field]
perm_object_id = self.get_permission_object_id(record_id)
self.permission.replace_object_permissions(perm_object_id, permissions)
@@ -334,7 +329,7 @@ def update_record(self, record, parent_id=None):
The current principal is added to the owner (``write`` permission).
"""
permissions = record.pop(self.permissions_field, {})
- record = super(ShareableModel, self).update_record(record, parent_id)
+ record = super().update_record(record, parent_id)
record_id = record[self.id_field]
perm_object_id = self.get_permission_object_id(record_id)
self.permission.replace_object_permissions(perm_object_id, permissions)
@@ -345,7 +340,7 @@ def update_record(self, record, parent_id=None):
def delete_record(self, record_id, parent_id=None, last_modified=None):
"""Delete record and its associated permissions.
"""
- record = super(ShareableModel, self).delete_record(
+ record = super().delete_record(
record_id, parent_id, last_modified=last_modified)
perm_object_id = self.get_permission_object_id(record_id)
self.permission.delete_object_permissions(perm_object_id)
diff --git a/kinto/core/resource/schema.py b/kinto/core/resource/schema.py
index f21345c9f..43ce5a8a7 100644
--- a/kinto/core/resource/schema.py
+++ b/kinto/core/resource/schema.py
@@ -19,7 +19,7 @@ def __init__(self, *args, **kwargs):
message = ("`kinto.core.resource.schema.TimeStamp` is deprecated, "
"use `kinto.core.schema.TimeStamp` instead.")
warnings.warn(message, DeprecationWarning)
- super(TimeStamp, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
class URL(URL):
@@ -29,7 +29,7 @@ def __init__(self, *args, **kwargs):
message = ("`kinto.core.resource.schema.URL` is deprecated, "
"use `kinto.core.schema.URL` instead.")
warnings.warn(message, DeprecationWarning)
- super(URL, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
# Resource related schemas
@@ -107,7 +107,7 @@ class PermissionsSchema(colander.SchemaNode):
def __init__(self, *args, **kwargs):
self.known_perms = kwargs.pop('permissions', tuple())
- super(PermissionsSchema, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
for perm in self.known_perms:
self[perm] = self._get_node_principals(perm)
@@ -122,13 +122,13 @@ def deserialize(self, cstruct=colander.null):
# If permissions are not a mapping (e.g null or invalid), try deserializing
if not isinstance(cstruct, dict):
- return super(PermissionsSchema, self).deserialize(cstruct)
+ return super().deserialize(cstruct)
# If permissions are listed, check fields and produce fancy error messages
if self.known_perms:
for perm in cstruct:
colander.OneOf(choices=self.known_perms)(self, perm)
- return super(PermissionsSchema, self).deserialize(cstruct)
+ return super().deserialize(cstruct)
# Else deserialize the fields that are not on the schema
permissions = {}
@@ -194,7 +194,7 @@ def deserialize(self, cstruct=colander.null):
"""
values = {}
- schema_values = super(QuerySchema, self).deserialize(cstruct)
+ schema_values = super().deserialize(cstruct)
if schema_values is colander.drop:
return schema_values
diff --git a/kinto/core/resource/viewset.py b/kinto/core/resource/viewset.py
index b7277b771..9ad39669f 100644
--- a/kinto/core/resource/viewset.py
+++ b/kinto/core/resource/viewset.py
@@ -35,7 +35,7 @@ def schema_type():
return colander.Mapping(unknown='preserve')
-class ViewSet(object):
+class ViewSet:
"""The default ViewSet object.
A viewset contains all the information needed to register
@@ -117,16 +117,16 @@ def get_view_arguments(self, endpoint_type, resource_cls, method):
:param resource_cls: the resource class.
:param str method: the HTTP method.
"""
- args = self.default_arguments.copy()
+ args = {**self.default_arguments}
default_arguments = getattr(self,
- 'default_%s_arguments' % endpoint_type)
+ 'default_{}_arguments'.format(endpoint_type))
args.update(**default_arguments)
- by_http_verb = 'default_%s_arguments' % method.lower()
+ by_http_verb = 'default_{}_arguments'.format(method.lower())
method_args = getattr(self, by_http_verb, {})
args.update(**method_args)
- by_method = '%s_%s_arguments' % (endpoint_type, method.lower())
+ by_method = '{}_{}_arguments'.format(endpoint_type, method.lower())
endpoint_args = getattr(self, by_method, {})
args.update(**endpoint_args)
@@ -167,7 +167,7 @@ def get_view(self, endpoint_type, method):
"""
if endpoint_type == 'record':
return method.lower()
- return '%s_%s' % (endpoint_type, method.lower())
+ return '{}_{}'.format(endpoint_type, method.lower())
def get_name(self, resource_cls):
"""Returns the name of the resource.
@@ -194,7 +194,7 @@ def get_service_name(self, endpoint_type, resource_cls):
endpoint_type=endpoint_type)
def get_service_arguments(self):
- return self.service_arguments.copy()
+ return {**self.service_arguments}
def is_endpoint_enabled(self, endpoint_type, resource_name, method,
settings):
@@ -208,7 +208,7 @@ def is_endpoint_enabled(self, endpoint_type, resource_name, method,
if readonly_enabled and not readonly_method:
return False
- setting_enabled = '%s_%s_%s_enabled' % (
+ setting_enabled = '{}_{}_{}_enabled'.format(
endpoint_type, resource_name, method.lower())
return asbool(settings.get(setting_enabled, True))
@@ -231,13 +231,11 @@ def get_record_schema(self, resource_cls, method):
return record_schema
def get_view_arguments(self, endpoint_type, resource_cls, method):
- args = super(ShareableViewSet, self).get_view_arguments(endpoint_type,
- resource_cls,
- method)
+ args = super().get_view_arguments(endpoint_type, resource_cls, method)
args['permission'] = authorization.DYNAMIC
return args
def get_service_arguments(self):
- args = super(ShareableViewSet, self).get_service_arguments()
+ args = super().get_service_arguments()
args['factory'] = self.factory
return args
diff --git a/kinto/core/schema.py b/kinto/core/schema.py
index 8051fa8e2..d9db96f40 100644
--- a/kinto/core/schema.py
+++ b/kinto/core/schema.py
@@ -7,10 +7,9 @@
- If a schema is view specific, you should declare it on the respective view.
"""
-import six
import colander
-from kinto.core.utils import strip_whitespace, msec_time, decode_header, native_value
+from kinto.core.utils import strip_whitespace, msec_time, native_value
class TimeStamp(colander.SchemaNode):
@@ -70,9 +69,9 @@ class HeaderField(colander.SchemaNode):
missing = colander.drop
def deserialize(self, cstruct=colander.null):
- if isinstance(cstruct, six.binary_type):
+ if isinstance(cstruct, bytes):
try:
- cstruct = decode_header(cstruct)
+ cstruct = cstruct.decode('utf-8')
except UnicodeDecodeError:
raise colander.Invalid(self, msg='Headers should be UTF-8 encoded')
return super(HeaderField, self).deserialize(cstruct)
@@ -84,7 +83,7 @@ class QueryField(colander.SchemaNode):
missing = colander.drop
def deserialize(self, cstruct=colander.null):
- if isinstance(cstruct, six.string_types):
+ if isinstance(cstruct, str):
cstruct = native_value(cstruct)
return super(QueryField, self).deserialize(cstruct)
@@ -98,7 +97,7 @@ class FieldList(QueryField):
fields = colander.SchemaNode(colander.String(), missing=colander.drop)
def deserialize(self, cstruct=colander.null):
- if isinstance(cstruct, six.string_types):
+ if isinstance(cstruct, str):
cstruct = cstruct.split(',')
return super(FieldList, self).deserialize(cstruct)
diff --git a/kinto/core/scripts.py b/kinto/core/scripts.py
index 12e21cbcc..7317cbc0b 100644
--- a/kinto/core/scripts.py
+++ b/kinto/core/scripts.py
@@ -21,8 +21,8 @@ def migrate(env, dry_run=False):
for backend in ('cache', 'storage', 'permission'):
if hasattr(registry, backend):
if readonly_mode and backend in readonly_backends:
- message = ('Cannot migrate the %s backend while '
- 'in readonly mode.' % backend)
+ message = ('Cannot migrate the {} backend while '
+ 'in readonly mode.'.format(backend))
logger.error(message)
else:
getattr(registry, backend).initialize_schema(dry_run=dry_run)
@@ -38,15 +38,15 @@ def delete_collection(env, bucket_id, collection_id):
logger.error(message)
return 31
- bucket = '/buckets/%s' % bucket_id
- collection = '/buckets/%s/collections/%s' % (bucket_id, collection_id)
+ bucket = '/buckets/{}'.format(bucket_id)
+ collection = '/buckets/{}/collections/{}'.format(bucket_id, collection_id)
try:
registry.storage.get(collection_id='bucket',
parent_id='',
object_id=bucket_id)
except storage_exceptions.RecordNotFoundError:
- logger.error("Bucket %r does not exist." % bucket)
+ logger.error("Bucket '{}' does not exist.".format(bucket))
return 32
try:
@@ -54,22 +54,22 @@ def delete_collection(env, bucket_id, collection_id):
parent_id=bucket,
object_id=collection_id)
except storage_exceptions.RecordNotFoundError:
- logger.error("Collection %r does not exist." % collection)
+ logger.error("Collection '{}' does not exist.".format(collection))
return 33
deleted = registry.storage.delete_all(collection_id='record',
parent_id=collection,
with_deleted=False)
if len(deleted) == 0:
- logger.info('No records found for %r.' % collection)
+ logger.info("No records found for '{}'.".format(collection))
else:
- logger.info('%d record(s) were deleted.' % len(deleted))
+ logger.info('{} record(s) were deleted.'.format(len(deleted)))
registry.storage.delete(collection_id='collection',
parent_id=bucket,
object_id=collection_id,
with_deleted=False)
- logger.info("%r collection object was deleted." % collection)
+ logger.info("'{}' collection object was deleted.".format(collection))
record = ('/buckets/{bucket_id}'
'/collections/{collection_id}'
diff --git a/kinto/core/statsd.py b/kinto/core/statsd.py
index 9e2fc3ede..f122180d4 100644
--- a/kinto/core/statsd.py
+++ b/kinto/core/statsd.py
@@ -7,12 +7,12 @@
statsd_module = None
from pyramid.exceptions import ConfigurationError
-from six.moves.urllib import parse as urlparse
+from urllib.parse import urlparse
from kinto.core import utils
-class Client(object):
+class Client:
def __init__(self, host, port, prefix):
self._client = statsd_module.StatsClient(host, port, prefix=prefix)
@@ -23,7 +23,7 @@ def watch_execution_time(self, obj, prefix='', classname=None):
value = getattr(obj, name)
is_method = isinstance(value, types.MethodType)
if not name.startswith('_') and is_method:
- statsd_key = "%s.%s.%s" % (prefix, classname, name)
+ statsd_key = "{}.{}.{}".format(prefix, classname, name)
decorated_method = self.timer(statsd_key)(value)
setattr(obj, name, decorated_method)
@@ -53,7 +53,7 @@ def load_from_config(config):
settings = config.get_settings()
uri = settings['statsd_url']
- uri = urlparse.urlparse(uri)
+ uri = urlparse(uri)
if settings['project_name'] != '':
prefix = settings['project_name']
diff --git a/kinto/core/storage/__init__.py b/kinto/core/storage/__init__.py
index 7a1aad06b..fe65cc9e0 100644
--- a/kinto/core/storage/__init__.py
+++ b/kinto/core/storage/__init__.py
@@ -22,7 +22,7 @@
_HEARTBEAT_RECORD = {'__heartbeat__': True}
-class StorageBase(object):
+class StorageBase:
"""Storage abstraction used by resource views.
It is meant to be instantiated at application startup.
diff --git a/kinto/core/storage/exceptions.py b/kinto/core/storage/exceptions.py
index bf666733d..50d7ddf21 100644
--- a/kinto/core/storage/exceptions.py
+++ b/kinto/core/storage/exceptions.py
@@ -11,9 +11,9 @@ class BackendError(Exception):
def __init__(self, original=None, message=None, *args, **kwargs):
self.original = original
if message is None:
- message = "%s: %s" % (original.__class__.__name__,
- original)
- super(BackendError, self).__init__(message, *args, **kwargs)
+ message = "{}: {}".format(original.__class__.__name__,
+ original)
+ super().__init__(message, *args, **kwargs)
class RecordNotFoundError(Exception):
@@ -37,5 +37,5 @@ class UnicityError(IntegrityError):
def __init__(self, field, record, *args, **kwargs):
self.field = field
self.record = record
- self.msg = "{0} is not unique: {1}".format(field, record)
- super(UnicityError, self).__init__(*args, **kwargs)
+ self.msg = "{} is not unique: {}".format(field, record)
+ super().__init__(*args, **kwargs)
diff --git a/kinto/core/storage/generators.py b/kinto/core/storage/generators.py
index 7ef60c359..9de4f1e97 100644
--- a/kinto/core/storage/generators.py
+++ b/kinto/core/storage/generators.py
@@ -1,10 +1,8 @@
import re
from uuid import uuid4
-import six
-
-class Generator(object):
+class Generator:
"""Base generator for records ids.
Id generators are used by storage backend during record creation, and at
@@ -57,4 +55,4 @@ class UUID4(Generator):
"""UUID4 accurate pattern."""
def __call__(self):
- return six.text_type(uuid4())
+ return str(uuid4())
diff --git a/kinto/core/storage/memory.py b/kinto/core/storage/memory.py
index 57971593e..67dbe6534 100644
--- a/kinto/core/storage/memory.py
+++ b/kinto/core/storage/memory.py
@@ -74,7 +74,7 @@ class Storage(MemoryBasedStorage):
kinto.storage_backend = kinto.core.storage.memory
"""
def __init__(self, *args, **kwargs):
- super(Storage, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.flush()
def flush(self, auth=None):
@@ -134,7 +134,7 @@ def create(self, collection_id, parent_id, record, id_generator=None,
id_field=DEFAULT_ID_FIELD,
modified_field=DEFAULT_MODIFIED_FIELD, auth=None):
id_generator = id_generator or self.id_generator
- record = record.copy()
+ record = {**record}
if id_field in record:
# Raise unicity error if record with same id already exists.
try:
@@ -160,14 +160,14 @@ def get(self, collection_id, parent_id, object_id,
collection = self._store[parent_id][collection_id]
if object_id not in collection:
raise exceptions.RecordNotFoundError(object_id)
- return collection[object_id].copy()
+ return {**collection[object_id]}
@synchronized
def update(self, collection_id, parent_id, object_id, record,
id_field=DEFAULT_ID_FIELD,
modified_field=DEFAULT_MODIFIED_FIELD,
auth=None):
- record = record.copy()
+ record = {**record}
record[id_field] = object_id
self.set_record_timestamp(collection_id, parent_id, record,
@@ -195,7 +195,7 @@ def delete(self, collection_id, parent_id, object_id,
# Add to deleted items, remove from store.
if with_deleted:
- deleted = existing.copy()
+ deleted = {**existing}
self._cemetery[parent_id][collection_id][object_id] = deleted
self._store[parent_id][collection_id].pop(object_id)
return existing
@@ -381,7 +381,7 @@ def column(first, record, name):
def _get_objects_by_parent_id(store, parent_id, collection_id, with_meta=False):
if parent_id is not None:
- parent_id_match = re.compile("^%s$" % parent_id.replace('*', '.*'))
+ parent_id_match = re.compile("^{}$".format(parent_id.replace('*', '.*')))
by_parent_id = {pid: collections
for pid, collections in store.items()
if parent_id_match.match(pid)}
diff --git a/kinto/core/storage/postgresql/__init__.py b/kinto/core/storage/postgresql/__init__.py
index 6585ad048..364ac560b 100644
--- a/kinto/core/storage/postgresql/__init__.py
+++ b/kinto/core/storage/postgresql/__init__.py
@@ -2,8 +2,6 @@
import warnings
from collections import defaultdict
-import six
-
from kinto.core import logger
from kinto.core.storage import (
StorageBase, exceptions,
@@ -70,7 +68,7 @@ class Storage(StorageBase):
schema_version = 14
def __init__(self, client, max_fetch_size, *args, **kwargs):
- super(Storage, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.client = client
self._max_fetch_size = max_fetch_size
@@ -93,18 +91,18 @@ def initialize_schema(self, dry_run=False):
if not version:
filepath = os.path.join(here, 'schema.sql')
logger.info("Create PostgreSQL storage schema at version "
- "%s from %s" % (self.schema_version, filepath))
+ "{} from {}".format(self.schema_version, filepath))
# Create full schema.
self._check_database_encoding()
self._check_database_timezone()
# Create full schema.
if not dry_run:
self._execute_sql_file(filepath)
- logger.info('Created PostgreSQL storage schema '
- '(version %s).' % self.schema_version)
+ logger.info('Created PostgreSQL storage schema (version {}).'.format(
+ self.schema_version))
return
- logger.info('Detected PostgreSQL storage schema version %s.' % version)
+ logger.info('Detected PostgreSQL storage schema version {}.'.format(version))
migrations = [(v, v + 1) for v in range(version, self.schema_version)]
if not migrations:
logger.info('PostgreSQL storage schema is up-to-date.')
@@ -114,20 +112,19 @@ def initialize_schema(self, dry_run=False):
# Check order of migrations.
expected = migration[0]
current = self._get_installed_version()
- error_msg = "Expected version %s. Found version %s."
+ error_msg = "Expected version {}. Found version {}."
if not dry_run and expected != current:
- raise AssertionError(error_msg % (expected, current))
+ raise AssertionError(error_msg.format(expected, current))
logger.info('Migrate PostgreSQL storage schema from'
- ' version %s to %s.' % migration)
- filename = 'migration_%03d_%03d.sql' % migration
+ ' version {} to {}.'.format(*migration))
+ filename = 'migration_{0:03d}_{1:03d}.sql'.format(*migration)
filepath = os.path.join(here, 'migrations', filename)
- logger.info("Execute PostgreSQL storage migration"
- " from %s" % filepath)
+ logger.info("Execute PostgreSQL storage migration from {}".format(filepath))
if not dry_run:
self._execute_sql_file(filepath)
- logger.info("PostgreSQL storage schema migration " +
- ("simulated." if dry_run else "done."))
+ logger.info("PostgreSQL storage schema migration {}".format(
+ "simulated." if dry_run else "done."))
def _check_database_timezone(self):
# Make sure database has UTC timezone.
@@ -137,7 +134,7 @@ def _check_database_timezone(self):
record = result.fetchone()
timezone = record['timezone'].upper()
if timezone != 'UTC': # pragma: no cover
- msg = 'Database timezone is not UTC (%s)' % timezone
+ msg = 'Database timezone is not UTC ({})'.format(timezone)
warnings.warn(msg)
logger.warning(msg)
@@ -153,7 +150,7 @@ def _check_database_encoding(self):
record = result.fetchone()
encoding = record['encoding'].lower()
if encoding != 'utf8': # pragma: no cover
- raise AssertionError('Unexpected database encoding %s' % encoding)
+ raise AssertionError('Unexpected database encoding {}'.format(encoding))
def _get_installed_version(self):
"""Return current version of schema or None if not any found.
@@ -182,8 +179,8 @@ def _get_installed_version(self):
result = conn.execute(query)
was_flushed = int(result.fetchone()[0]) == 0
if was_flushed:
- error_msg = 'Missing schema history: consider version %s.'
- logger.warning(error_msg % self.schema_version)
+ error_msg = 'Missing schema history: consider version {}.'
+ logger.warning(error_msg.format(self.schema_version))
return self.schema_version
# In the first versions of Cliquet, there was no migration.
@@ -219,7 +216,7 @@ def create(self, collection_id, parent_id, record, id_generator=None,
modified_field=DEFAULT_MODIFIED_FIELD,
auth=None):
id_generator = id_generator or self.id_generator
- record = record.copy()
+ record = {**record}
if id_field in record:
# Raise unicity error if record with same id already exists.
try:
@@ -231,7 +228,7 @@ def create(self, collection_id, parent_id, record, id_generator=None,
record[id_field] = id_generator()
# Remove redundancy in data field
- query_record = record.copy()
+ query_record = {**record}
query_record.pop(id_field, None)
query_record.pop(modified_field, None)
@@ -292,7 +289,7 @@ def update(self, collection_id, parent_id, object_id, record,
auth=None):
# Remove redundancy in data field
- query_record = record.copy()
+ query_record = {**record}
query_record.pop(id_field, None)
query_record.pop(modified_field, None)
@@ -324,8 +321,7 @@ def update(self, collection_id, parent_id, object_id, record,
last_modified=record.get(modified_field),
data=json.dumps(query_record))
- record = record.copy()
- record[id_field] = object_id
+ record = {**record, id_field: object_id}
with self.client.connect() as conn:
# Create or update ?
@@ -404,12 +400,12 @@ def delete_all(self, collection_id, parent_id, filters=None,
FROM records
WHERE id IN (SELECT id
FROM records
- WHERE %(parent_id_filter)s
- %(collection_id_filter)s
- %(conditions_filter)s
- %(pagination_rules)s
- %(sorting)s
- %(pagination_limit)s)
+ WHERE {parent_id_filter}
+ {collection_id_filter}
+ {conditions_filter}
+ {pagination_rules}
+ {sorting}
+ {pagination_limit})
RETURNING id, parent_id, collection_id
)
INSERT INTO deleted (id, parent_id, collection_id)
@@ -423,12 +419,12 @@ def delete_all(self, collection_id, parent_id, filters=None,
FROM records
WHERE id IN (SELECT id
FROM records
- WHERE %(parent_id_filter)s
- %(collection_id_filter)s
- %(conditions_filter)s
- %(pagination_rules)s
- %(sorting)s
- %(pagination_limit)s)
+ WHERE {parent_id_filter}
+ {collection_id_filter}
+ {conditions_filter}
+ {pagination_rules}
+ {sorting}
+ {pagination_limit})
RETURNING id, as_epoch(last_modified) AS last_modified;
"""
@@ -437,7 +433,7 @@ def delete_all(self, collection_id, parent_id, filters=None,
placeholders = dict(parent_id=parent_id,
collection_id=collection_id)
# Safe strings
- safeholders = defaultdict(six.text_type)
+ safeholders = defaultdict(str)
# Handle parent_id as a regex only if it contains *
if '*' in parent_id:
safeholders['parent_id_filter'] = 'parent_id LIKE :parent_id'
@@ -454,7 +450,7 @@ def delete_all(self, collection_id, parent_id, filters=None,
safe_sql, holders = self._format_conditions(filters,
id_field,
modified_field)
- safeholders['conditions_filter'] = 'AND %s' % safe_sql
+ safeholders['conditions_filter'] = 'AND {}'.format(safe_sql)
placeholders.update(**holders)
if sorting:
@@ -466,15 +462,15 @@ def delete_all(self, collection_id, parent_id, filters=None,
if pagination_rules:
sql, holders = self._format_pagination(pagination_rules, id_field,
modified_field)
- safeholders['pagination_rules'] = 'AND %s' % sql
+ safeholders['pagination_rules'] = 'AND {}'.format(sql)
placeholders.update(**holders)
if limit:
# We validate the limit value in the resource class as integer.
- safeholders['pagination_limit'] = 'LIMIT %s' % limit
+ safeholders['pagination_limit'] = 'LIMIT {}'.format(limit)
with self.client.connect() as conn:
- result = conn.execute(query % safeholders, placeholders)
+ result = conn.execute(query.format_map(safeholders), placeholders)
deleted = result.fetchmany(self._max_fetch_size)
records = []
@@ -494,16 +490,16 @@ def purge_deleted(self, collection_id, parent_id, before=None,
query = """
DELETE
FROM deleted
- WHERE %(parent_id_filter)s
- %(collection_id_filter)s
- %(conditions_filter)s;
+ WHERE {parent_id_filter}
+ {collection_id_filter}
+ {conditions_filter};
"""
id_field = id_field or self.id_field
modified_field = modified_field or self.modified_field
placeholders = dict(parent_id=parent_id,
collection_id=collection_id)
# Safe strings
- safeholders = defaultdict(six.text_type)
+ safeholders = defaultdict(str)
# Handle parent_id as a regex only if it contains *
if '*' in parent_id:
safeholders['parent_id_filter'] = 'parent_id LIKE :parent_id'
@@ -522,7 +518,7 @@ def purge_deleted(self, collection_id, parent_id, before=None,
placeholders['before'] = before
with self.client.connect() as conn:
- result = conn.execute(query % safeholders, placeholders)
+ result = conn.execute(query.format_map(safeholders), placeholders)
return result.rowcount
@@ -536,17 +532,17 @@ def get_all(self, collection_id, parent_id, filters=None, sorting=None,
WITH total_filtered AS (
SELECT COUNT(id) AS count
FROM records
- WHERE %(parent_id_filter)s
+ WHERE {parent_id_filter}
AND collection_id = :collection_id
- %(conditions_filter)s
+ {conditions_filter}
),
collection_filtered AS (
SELECT id, last_modified, data
FROM records
- WHERE %(parent_id_filter)s
+ WHERE {parent_id_filter}
AND collection_id = :collection_id
- %(conditions_filter)s
- LIMIT %(max_fetch_size)s
+ {conditions_filter}
+ LIMIT {max_fetch_size}
),
fake_deleted AS (
SELECT (:deleted_field)::JSONB AS data
@@ -554,10 +550,10 @@ def get_all(self, collection_id, parent_id, filters=None, sorting=None,
filtered_deleted AS (
SELECT id, last_modified, fake_deleted.data AS data
FROM deleted, fake_deleted
- WHERE %(parent_id_filter)s
+ WHERE {parent_id_filter}
AND collection_id = :collection_id
- %(conditions_filter)s
- %(deleted_limit)s
+ {conditions_filter}
+ {deleted_limit}
),
all_records AS (
SELECT * FROM filtered_deleted
@@ -567,14 +563,14 @@ def get_all(self, collection_id, parent_id, filters=None, sorting=None,
paginated_records AS (
SELECT DISTINCT id
FROM all_records
- %(pagination_rules)s
+ {pagination_rules}
)
SELECT total_filtered.count AS count_total,
a.id, as_epoch(a.last_modified) AS last_modified, a.data
FROM paginated_records AS p JOIN all_records AS a ON (a.id = p.id),
total_filtered
- %(sorting)s
- %(pagination_limit)s;
+ {sorting}
+ {pagination_limit};
"""
deleted_field = json.dumps(dict([(deleted_field, True)]))
@@ -584,7 +580,7 @@ def get_all(self, collection_id, parent_id, filters=None, sorting=None,
deleted_field=deleted_field)
# Safe strings
- safeholders = defaultdict(six.text_type)
+ safeholders = defaultdict(str)
safeholders['max_fetch_size'] = self._max_fetch_size
# Handle parent_id as a regex only if it contains *
@@ -598,7 +594,7 @@ def get_all(self, collection_id, parent_id, filters=None, sorting=None,
safe_sql, holders = self._format_conditions(filters,
id_field,
modified_field)
- safeholders['conditions_filter'] = 'AND %s' % safe_sql
+ safeholders['conditions_filter'] = 'AND {}'.format(safe_sql)
placeholders.update(**holders)
if not include_deleted:
@@ -613,15 +609,15 @@ def get_all(self, collection_id, parent_id, filters=None, sorting=None,
if pagination_rules:
sql, holders = self._format_pagination(pagination_rules, id_field,
modified_field)
- safeholders['pagination_rules'] = 'WHERE %s' % sql
+ safeholders['pagination_rules'] = 'WHERE {}'.format(sql)
placeholders.update(**holders)
if limit:
# We validate the limit value in the resource class as integer.
- safeholders['pagination_limit'] = 'LIMIT %s' % limit
+ safeholders['pagination_limit'] = 'LIMIT {}'.format(limit)
with self.client.connect(readonly=True) as conn:
- result = conn.execute(query % safeholders, placeholders)
+ result = conn.execute(query.format_map(safeholders), placeholders)
retrieved = result.fetchmany(self._max_fetch_size)
if not len(retrieved):
@@ -678,23 +674,23 @@ def _format_conditions(self, filters, id_field, modified_field,
subfields = filtr.field.split('.')
for j, subfield in enumerate(subfields):
# Safely escape field name
- field_holder = '%s_field_%s_%s' % (prefix, i, j)
+ field_holder = '{}_field_{}_{}'.format(prefix, i, j)
holders[field_holder] = subfield
# Use ->> to convert the last level to text.
column_name += "->>" if j == len(subfields) - 1 else "->"
- column_name += ":%s" % field_holder
+ column_name += ":{}".format(field_holder)
# If field is missing, we default to ''.
- sql_field = "coalesce(%s, '')" % column_name
+ sql_field = "coalesce({}, '')".format(column_name)
# Cast when comparing to number (eg. '4' < '12')
if isinstance(value, (int, float)) and \
value not in (True, False):
- sql_field = "(%s)::numeric" % column_name
+ sql_field = "({})::numeric".format(column_name)
if filtr.operator not in (COMPARISON.IN, COMPARISON.EXCLUDE):
# For the IN operator, let psycopg escape the values list.
# Otherwise JSON-ify the native value (e.g. True -> 'true')
- if not isinstance(filtr.value, six.string_types):
+ if not isinstance(filtr.value, str):
value = json.dumps(filtr.value).strip('"')
else:
value = tuple(value)
@@ -703,15 +699,15 @@ def _format_conditions(self, filters, id_field, modified_field,
value = (None,)
if filtr.operator == COMPARISON.LIKE:
- value = '%{0}%'.format(value)
+ value = '%{}%'.format(value)
# Safely escape value
- value_holder = '%s_value_%s' % (prefix, i)
+ value_holder = '{}_value_{}'.format(prefix, i)
holders[value_holder] = value
sql_operator = operators.setdefault(filtr.operator,
filtr.operator.value)
- cond = "%s %s :%s" % (sql_field, sql_operator, value_holder)
+ cond = "{} {} :{}".format(sql_field, sql_operator, value_holder)
conditions.append(cond)
safe_sql = ' AND '.join(conditions)
@@ -737,7 +733,7 @@ def _format_pagination(self, pagination_rules, id_field, modified_field):
placeholders = {}
for i, rule in enumerate(pagination_rules):
- prefix = 'rules_%s' % i
+ prefix = 'rules_{}'.format(i)
safe_sql, holders = self._format_conditions(rule,
id_field,
modified_field,
@@ -745,7 +741,7 @@ def _format_pagination(self, pagination_rules, id_field, modified_field):
rules.append(safe_sql)
placeholders.update(**holders)
- safe_sql = ' OR '.join(['(%s)' % r for r in rules])
+ safe_sql = ' OR '.join(['({})'.format(r) for r in rules])
return safe_sql, placeholders
def _format_sorting(self, sorting, id_field, modified_field):
@@ -773,16 +769,16 @@ def _format_sorting(self, sorting, id_field, modified_field):
sql_field = 'data'
for j, subfield in enumerate(subfields):
# Safely escape field name
- field_holder = 'sort_field_%s_%s' % (i, j)
+ field_holder = 'sort_field_{}_{}'.format(i, j)
holders[field_holder] = subfield
# Use ->> to convert the last level to text.
- sql_field += '->(:%s)' % field_holder
+ sql_field += '->(:{})'.format(field_holder)
sql_direction = 'ASC' if sort.direction > 0 else 'DESC'
- sql_sort = "%s %s" % (sql_field, sql_direction)
+ sql_sort = "{} {}".format(sql_field, sql_direction)
sorts.append(sql_sort)
- safe_sql = 'ORDER BY %s' % (', '.join(sorts))
+ safe_sql = 'ORDER BY {}'.format(', '.join(sorts))
return safe_sql, holders
diff --git a/kinto/core/storage/postgresql/client.py b/kinto/core/storage/postgresql/client.py
index 1ceb666b2..dc45aa3b7 100644
--- a/kinto/core/storage/postgresql/client.py
+++ b/kinto/core/storage/postgresql/client.py
@@ -8,7 +8,7 @@
import transaction as zope_transaction
-class PostgreSQLClient(object):
+class PostgreSQLClient:
def __init__(self, session_factory, commit_manually=True, invalidate=None):
self.session_factory = session_factory
self.commit_manually = commit_manually
@@ -50,8 +50,7 @@ def connect(self, readonly=False, force_commit=False):
logger.error(e)
if session and with_transaction:
session.rollback()
- raise exceptions.BackendError(original=e)
-
+ raise exceptions.BackendError(original=e) from e
finally:
if session and self.commit_manually:
# Give back to pool if commit done manually.
@@ -73,7 +72,7 @@ def create_from_config(config, prefix='', with_transaction=True):
from zope.sqlalchemy import ZopeTransactionExtension, invalidate
from sqlalchemy.orm import sessionmaker, scoped_session
- settings = config.get_settings().copy()
+ settings = {**config.get_settings()}
# Custom Kinto settings, unsupported by SQLAlchemy.
settings.pop(prefix + 'backend', None)
settings.pop(prefix + 'max_fetch_size', None)
@@ -85,7 +84,7 @@ def create_from_config(config, prefix='', with_transaction=True):
existing_client = _CLIENTS[transaction_per_request].get(url)
if existing_client:
msg = ("Reuse existing PostgreSQL connection. "
- "Parameters %s* will be ignored." % prefix)
+ "Parameters {}* will be ignored.".format(prefix))
warnings.warn(msg)
return existing_client
diff --git a/kinto/core/storage/testing.py b/kinto/core/storage/testing.py
index 1597a5cf0..5f4ef5b4c 100644
--- a/kinto/core/storage/testing.py
+++ b/kinto/core/storage/testing.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
import time
import mock
@@ -11,13 +10,13 @@
RECORD_ID = '472be9ec-26fe-461b-8282-9c4e4b207ab3'
-class BaseTestStorage(object):
+class BaseTestStorage:
backend = None
settings = {}
def setUp(self):
- super(BaseTestStorage, self).setUp()
+ super().setUp()
self.storage = self.backend.load_from_config(self._get_config())
self.storage.initialize_schema()
self.id_field = 'id'
@@ -44,13 +43,12 @@ def _get_config(self, settings=None):
def tearDown(self):
mock.patch.stopall()
- super(BaseTestStorage, self).tearDown()
+ super().tearDown()
self.storage.flush()
def create_record(self, record=None, id_generator=None, **kwargs):
record = record or self.record
- kw = self.storage_kw.copy()
- kw.update(**kwargs)
+ kw = {**self.storage_kw, **kwargs}
return self.storage.create(record=record,
id_generator=id_generator,
**kw)
@@ -173,27 +171,24 @@ def test_create_uses_the_resource_id_generator(self):
self.assertEquals(record['id'], RECORD_ID)
def test_create_supports_unicode_for_parent_and_id(self):
- unicode_id = u'Rémy'
+ unicode_id = 'Rémy'
self.create_record(parent_id=unicode_id, collection_id=unicode_id)
def test_create_does_not_overwrite_the_provided_id(self):
- record = self.record.copy()
- record[self.id_field] = RECORD_ID
+ record = {**self.record, self.id_field: RECORD_ID}
stored = self.create_record(record=record)
self.assertEqual(stored[self.id_field], RECORD_ID)
def test_create_raise_unicity_error_if_provided_id_exists(self):
- record = self.record.copy()
- record[self.id_field] = RECORD_ID
+ record = {**self.record, self.id_field: RECORD_ID}
self.create_record(record=record)
- record = self.record.copy()
- record[self.id_field] = RECORD_ID
+ record = {**self.record, self.id_field: RECORD_ID}
self.assertRaises(exceptions.UnicityError,
self.create_record,
record=record)
def test_create_does_generate_a_new_last_modified_field(self):
- record = self.record.copy()
+ record = {**self.record}
self.assertNotIn(self.modified_field, record)
created = self.create_record(record=record)
self.assertIn(self.modified_field, created)
@@ -486,7 +481,7 @@ def test_get_all_handle_all_pagination_rules(self):
self.assertEqual(len(records), 4)
-class TimestampsTest(object):
+class TimestampsTest:
def test_timestamp_are_incremented_on_create(self):
self.create_record() # init
before = self.storage.collection_timestamp(**self.storage_kw)
@@ -550,7 +545,7 @@ def test_the_timestamp_are_based_on_real_time_milliseconds(self):
time.sleep(0.002) # 2 msec
after = utils.msec_time()
self.assertTrue(before < now < after,
- '%s < %s < %s' % (before, now, after))
+ '{} < {} < {}'.format(before, now, after))
def test_timestamp_are_always_incremented_above_existing_value(self):
# Create a record with normal clock
@@ -566,14 +561,12 @@ def test_timestamp_are_always_incremented_above_existing_value(self):
# Expect the last one to be based on the highest value
self.assertTrue(0 < current < after,
- '0 < %s < %s' % (current, after))
+ '0 < {} < {}'.format(current, after))
def test_create_uses_specified_last_modified_if_collection_empty(self):
# Collection is empty, create a new record with a specified timestamp.
last_modified = 1448881675541
- record = self.record.copy()
- record[self.id_field] = RECORD_ID
- record[self.modified_field] = last_modified
+ record = {**self.record, self.id_field: RECORD_ID, self.modified_field: last_modified}
self.create_record(record=record)
# Check that the record was assigned the specified timestamp.
@@ -590,9 +583,9 @@ def test_create_ignores_specified_last_modified_if_in_the_past(self):
timestamp_before = first_record[self.modified_field]
# Create a new record with its timestamp in the past.
- record = self.record.copy()
- record[self.id_field] = RECORD_ID
- record[self.modified_field] = timestamp_before - 10
+ record = {**self.record,
+ self.id_field: RECORD_ID,
+ self.modified_field: timestamp_before - 10}
self.create_record(record=record)
# Check that record timestamp is the one specified.
@@ -611,9 +604,9 @@ def test_create_ignores_specified_last_modified_if_equal(self):
timestamp_before = first_record[self.modified_field]
# Create a new record with its timestamp in the past.
- record = self.record.copy()
- record[self.id_field] = RECORD_ID
- record[self.modified_field] = timestamp_before
+ record = {**self.record,
+ self.id_field: RECORD_ID,
+ self.modified_field: timestamp_before}
self.create_record(record=record)
# Check that record timestamp is the one specified.
@@ -687,7 +680,7 @@ def test_update_ignores_specified_last_modified_if_equal(self):
self.assertGreater(timestamp, timestamp_before)
-class DeletedRecordsTest(object):
+class DeletedRecordsTest:
def _get_last_modified_filters(self):
start = self.storage.collection_timestamp(**self.storage_kw)
time.sleep(0.1)
@@ -1132,7 +1125,7 @@ def test_pagination_rules_on_last_modified_apply_to_deleted_records(self):
self.assertNotIn('deleted', records[1])
-class ParentRecordAccessTest(object):
+class ParentRecordAccessTest:
def test_parent_cannot_access_other_parent_record(self):
record = self.create_record()
self.assertRaises(
@@ -1157,9 +1150,9 @@ def test_parent_cannot_update_other_parent_record(self):
record = self.create_record()
new_record = {"another": "record"}
- kw = self.storage_kw.copy()
- kw['parent_id'] = self.other_parent_id
- kw['auth'] = self.other_auth
+ kw = {**self.storage_kw,
+ 'parent_id': self.other_parent_id,
+ 'auth': self.other_auth}
self.storage.update(object_id=record['id'], record=new_record, **kw)
not_updated = self.storage.get(object_id=record['id'],
diff --git a/kinto/core/testing.py b/kinto/core/testing.py
index 2a8de6c1d..bf7466caa 100644
--- a/kinto/core/testing.py
+++ b/kinto/core/testing.py
@@ -23,9 +23,9 @@ class DummyRequest(mock.MagicMock):
"""Fully mocked request.
"""
def __init__(self, *args, **kwargs):
- super(DummyRequest, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.upath_info = '/v0/'
- self.registry = mock.MagicMock(settings=DEFAULT_SETTINGS.copy())
+ self.registry = mock.MagicMock(settings={**DEFAULT_SETTINGS})
self.registry.id_generators = defaultdict(generators.UUID4)
self.GET = {}
self.headers = {}
@@ -60,13 +60,13 @@ class PrefixedRequestClass(webtest.app.TestRequest):
@classmethod
def blank(cls, path, *args, **kwargs):
if prefix:
- path = '/%s%s' % (prefix, path)
+ path = '/{prefix}{path}'.format(prefix=prefix, path=path)
return webtest.app.TestRequest.blank(path, *args, **kwargs)
return PrefixedRequestClass
-class FormattedErrorMixin(object):
+class FormattedErrorMixin:
"""Test mixin in order to perform advanced error responses assertions.
"""
@@ -96,14 +96,14 @@ def get_user_headers(user):
:rtype: dict
"""
- credentials = "%s:secret" % user
- authorization = 'Basic {0}'.format(encode64(credentials))
+ credentials = "{}:secret".format(user)
+ authorization = 'Basic {}'.format(encode64(credentials))
return {
'Authorization': authorization
}
-class BaseWebTest(object):
+class BaseWebTest:
"""Base Web Test to test your kinto.core service.
It setups the database before each test and delete it after.
@@ -116,7 +116,7 @@ class BaseWebTest(object):
"""Main application entry"""
def __init__(self, *args, **kwargs):
- super(BaseWebTest, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.app = self.make_app()
self.storage = self.app.app.registry.storage
self.cache = self.app.app.registry.cache
@@ -157,7 +157,7 @@ def get_app_settings(self, extras=None):
:param dict extras: extra settings values
:rtype: dict
"""
- settings = DEFAULT_SETTINGS.copy()
+ settings = {**DEFAULT_SETTINGS}
settings['storage_backend'] = 'kinto.core.storage.memory'
settings['cache_backend'] = 'kinto.core.cache.memory'
@@ -169,20 +169,20 @@ def get_app_settings(self, extras=None):
return settings
def tearDown(self):
- super(BaseWebTest, self).tearDown()
+ super().tearDown()
self.storage.flush()
self.cache.flush()
self.permission.flush()
-class ThreadMixin(object):
+class ThreadMixin:
def setUp(self):
- super(ThreadMixin, self).setUp()
+ super().setUp()
self._threads = []
def tearDown(self):
- super(ThreadMixin, self).tearDown()
+ super().tearDown()
for thread in self._threads:
thread.join()
diff --git a/kinto/core/utils.py b/kinto/core/utils.py
index 1ce9cf263..186fc47f9 100644
--- a/kinto/core/utils.py
+++ b/kinto/core/utils.py
@@ -5,11 +5,10 @@
import jsonpatch
import os
import re
-import six
import time
from base64 import b64decode, b64encode
from binascii import hexlify
-from six.moves.urllib import parse as urlparse
+from urllib.parse import unquote
from enum import Enum
import ujson as json # NOQA
@@ -106,7 +105,7 @@ def native_value(value):
:param str value: value to interprete.
:returns: the value coerced to python type
"""
- if isinstance(value, six.string_types):
+ if isinstance(value, str):
if value.lower() in ['on', 'true', 'yes']:
value = True
elif value.lower() in ['off', 'false', 'no']:
@@ -147,7 +146,7 @@ def decode64(encoded_content, encoding='utf-8'):
def hmac_digest(secret, message, encoding='utf-8'):
"""Return hex digest of a message HMAC using secret"""
- if isinstance(secret, six.text_type):
+ if isinstance(secret, str):
secret = secret.encode(encoding)
return hmac.new(secret,
message.encode(encoding),
@@ -213,9 +212,8 @@ def reapply_cors(request, response):
if origin:
settings = request.registry.settings
allowed_origins = set(aslist(settings['cors_origins']))
- required_origins = {'*', decode_header(origin)}
+ required_origins = {'*', origin}
if allowed_origins.intersection(required_origins):
- origin = encode_header(origin)
response.headers['Access-Control-Allow-Origin'] = origin
# Import service here because kinto.core import utils
@@ -266,7 +264,7 @@ def prefixed_userid(request):
# (see :func:`kinto.core.initialization.setup_authentication`)
authn_type = getattr(request, 'authn_type', None)
if authn_type is not None:
- return authn_type + ':' + request.selected_userid
+ return '{}:{}'.format(authn_type, request.selected_userid)
def prefixed_principals(request):
@@ -300,7 +298,7 @@ def build_request(original, dict_obj):
:param original: the original request.
:param dict_obj: a dict object with the sub-request specifications.
"""
- api_prefix = '/%s' % original.upath_info.split('/')[1]
+ api_prefix = '/{}'.format(original.upath_info.split('/')[1])
path = dict_obj['path']
if not path.startswith(api_prefix):
path = api_prefix + path
@@ -319,14 +317,10 @@ def build_request(original, dict_obj):
# Payload is always a dict (from ``BatchRequestSchema.body``).
# Send it as JSON for subrequests.
if isinstance(payload, dict):
- headers['Content-Type'] = encode_header(
- 'application/json; charset=utf-8')
+ headers['Content-Type'] = 'application/json; charset=utf-8'
payload = json.dumps(payload)
- if six.PY3: # pragma: no cover
- path = path.decode('latin-1')
-
- request = Request.blank(path=path,
+ request = Request.blank(path=path.decode('latin-1'),
headers=headers,
POST=payload,
method=method)
@@ -349,7 +343,7 @@ def build_response(response, request):
:param request: the request that was used to get the response.
"""
dict_obj = {}
- dict_obj['path'] = urlparse.unquote(request.path)
+ dict_obj['path'] = unquote(request.path)
dict_obj['status'] = response.status_code
dict_obj['headers'] = dict(response.headers)
@@ -391,35 +385,11 @@ def follow_subrequest(request, subrequest, **kwargs):
return request.invoke_subrequest(new_request, **kwargs), new_request
-def encode_header(value, encoding='utf-8'):
- return _encoded(value, encoding)
-
-
-def _encoded(value, encoding='utf-8'):
- """Make sure the value is of type ``str`` in both PY2 and PY3."""
- value_type = type(value)
- if value_type != str:
- # Test for Python3
- if value_type == six.binary_type: # pragma: no cover
- value = value.decode(encoding)
- # Test for Python2
- elif value_type == six.text_type: # pragma: no cover
- value = value.encode(encoding)
- return value
-
-
-def decode_header(value, encoding='utf-8'):
- """Make sure the header is an unicode string."""
- if type(value) == six.binary_type:
- value = value.decode(encoding)
- return value
-
-
def strip_uri_prefix(path):
"""
Remove potential version prefix in URI.
"""
- return re.sub(r'^(/v\d+)?', '', six.text_type(path))
+ return re.sub(r'^(/v\d+)?', '', str(path))
def view_lookup(request, uri):
@@ -432,9 +402,8 @@ def view_lookup(request, uri):
:rtype: tuple
:returns: the resource name and the associated matchdict.
"""
- api_prefix = '/%s' % request.upath_info.split('/')[1]
- # Path should be bytes in PY2, and unicode in PY3
- path = _encoded(api_prefix + uri)
+ api_prefix = '/{}'.format(request.upath_info.split('/')[1])
+ path = (api_prefix + uri)
q = request.registry.queryUtility
routes_mapper = q(IRoutesMapper)
@@ -452,8 +421,8 @@ def view_lookup(request, uri):
def instance_uri(request, resource_name, **params):
"""Return the URI for the given resource."""
- return strip_uri_prefix(request.route_path('%s-record' % resource_name,
- **params))
+ return strip_uri_prefix(request.route_path(
+ '{}-record'.format(resource_name), **params))
def parse_resource(resource):
@@ -496,7 +465,7 @@ def apply_json_patch(record, ops):
:returns dict data: patched record data.
dict permissions: patched record permissions
"""
- data = record.copy()
+ data = {**record}
# Permissions should always have read and write fields defined (to allow add)
permissions = {'read': set(), 'write': set()}
diff --git a/kinto/core/views/batch.py b/kinto/core/views/batch.py
index fde942602..8d3d89f42 100644
--- a/kinto/core/views/batch.py
+++ b/kinto/core/views/batch.py
@@ -1,5 +1,4 @@
import colander
-import six
from cornice.validators import colander_validator
from pyramid import httpexceptions
@@ -22,9 +21,9 @@ def string_values(node, cstruct):
Should be associated to a ``colander.Mapping`` schema node.
"""
- are_strings = [isinstance(v, six.string_types) for v in cstruct.values()]
+ are_strings = [isinstance(v, str) for v in cstruct.values()]
if not all(are_strings):
- error_msg = '%s contains non string value' % cstruct
+ error_msg = '{} contains non string value'.format(cstruct)
raise colander.Invalid(node, error_msg)
@@ -47,7 +46,7 @@ class BatchPayloadSchema(colander.MappingSchema):
BatchRequestSchema())
def __init__(self, *args, **kwargs):
- super(BatchPayloadSchema, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
# On defaults, path is not mandatory.
self.get('defaults').get('path').missing = colander.drop
@@ -61,7 +60,7 @@ def deserialize(self, cstruct=colander.null):
for request in requests:
if isinstance(request, dict):
merge_dicts(request, defaults)
- return super(BatchPayloadSchema, self).deserialize(cstruct)
+ return super().deserialize(cstruct)
class BatchRequest(colander.MappingSchema):
@@ -81,12 +80,12 @@ def post_batch(request):
limit = request.registry.settings['batch_max_requests']
if limit and len(requests) > int(limit):
- error_msg = 'Number of requests is limited to %s' % limit
+ error_msg = 'Number of requests is limited to {}'.format(limit)
request.errors.add('body', 'requests', error_msg)
return
if any([batch.path in req['path'] for req in requests]):
- error_msg = 'Recursive call on %s endpoint is forbidden.' % batch.path
+ error_msg = 'Recursive call on {} endpoint is forbidden.'.format(batch.path)
request.errors.add('body', 'requests', error_msg)
return
diff --git a/kinto/core/views/errors.py b/kinto/core/views/errors.py
index 61b8a3fee..bcdf14667 100644
--- a/kinto/core/views/errors.py
+++ b/kinto/core/views/errors.py
@@ -7,7 +7,7 @@
from kinto.core.errors import http_error, ERRORS
from kinto.core.logs import logger
from kinto.core.storage import exceptions as storage_exceptions
-from kinto.core.utils import reapply_cors, encode_header
+from kinto.core.utils import reapply_cors
@view_config(context=httpexceptions.HTTPForbidden,
@@ -46,7 +46,7 @@ def page_not_found(response, request):
errno = ERRORS.MISSING_RESOURCE
error_msg = "The resource you are looking for could not be found."
- if not request.path.startswith('/' + request.registry.route_prefix):
+ if not request.path.startswith('/{}'.format(request.registry.route_prefix)):
errno = ERRORS.VERSION_NOT_AVAILABLE
error_msg = ("The requested API version is not available "
"on this server.")
@@ -55,10 +55,10 @@ def page_not_found(response, request):
if request.path.endswith('/'):
path = request.path.rstrip('/')
- redirect = '%s%s' % (path, querystring)
- elif request.path == '/' + request.registry.route_prefix:
+ redirect = '{}{}'.format(path, querystring)
+ elif request.path == '/{}'.format(request.registry.route_prefix):
# Case for /v0 -> /v0/
- redirect = '/%s/%s' % (request.registry.route_prefix, querystring)
+ redirect = '/{}/{}'.format(request.registry.route_prefix, querystring)
if redirect:
return reapply_cors(request, HTTPTemporaryRedirect(redirect))
@@ -80,7 +80,7 @@ def service_unavailable(response, request):
message=error_msg)
retry_after = request.registry.settings['retry_after_seconds']
- response.headers["Retry-After"] = encode_header('%s' % retry_after)
+ response.headers["Retry-After"] = str(retry_after)
return reapply_cors(request, response)
diff --git a/kinto/core/views/heartbeat.py b/kinto/core/views/heartbeat.py
index 43bd0eb5d..0d128a011 100644
--- a/kinto/core/views/heartbeat.py
+++ b/kinto/core/views/heartbeat.py
@@ -43,14 +43,14 @@ def heartbeat_check(name, func):
for future in done:
exc = future.exception()
if exc is not None:
- logger.error("%r heartbeat failed." % future.__heartbeat_name)
+ logger.error("'{}' heartbeat failed.".format(future.__heartbeat_name))
logger.error(exc)
# Log timed-out heartbeats.
for future in not_done:
name = future.__heartbeat_name
- error_msg = "%r heartbeat has exceeded timeout of %s seconds."
- logger.error(error_msg % (name, seconds))
+ error_msg = "'{}' heartbeat has exceeded timeout of {} seconds."
+ logger.error(error_msg.format(name, seconds))
# If any has failed, return a 503 error response.
has_error = not all([v or v is None for v in status.values()])
diff --git a/kinto/events.py b/kinto/events.py
index a70b2ac61..d00142481 100644
--- a/kinto/events.py
+++ b/kinto/events.py
@@ -1,3 +1,3 @@
-class ServerFlushed(object):
+class ServerFlushed:
def __init__(self, request):
self.request = request
diff --git a/kinto/plugins/admin/views.py b/kinto/plugins/admin/views.py
index 7914e23a0..a684f4098 100644
--- a/kinto/plugins/admin/views.py
+++ b/kinto/plugins/admin/views.py
@@ -14,7 +14,7 @@ def admin_home_view(request):
settings = {
"authMethods": aslist(request.registry.settings.get('multiauth.policies'))
}
- globalSettings = "" % json.dumps(settings)
+ globalSettings = "".format(json.dumps(settings))
# Update the file built by react-scripts to load the globalSettings.
with open(os.path.join(HERE, 'build/index.html')) as f:
diff --git a/kinto/plugins/default_bucket/__init__.py b/kinto/plugins/default_bucket/__init__.py
index ac7061b16..1b640bcb7 100644
--- a/kinto/plugins/default_bucket/__init__.py
+++ b/kinto/plugins/default_bucket/__init__.py
@@ -1,6 +1,5 @@
import uuid
-import six
from pyramid import httpexceptions
from pyramid.settings import asbool
from pyramid.security import NO_PERMISSION_REQUIRED, Authenticated
@@ -97,7 +96,7 @@ def resource_create_object(request, resource_cls, uri):
if not resource.model.id_generator.match(obj_id):
error_details = {
'location': 'path',
- 'description': "Invalid %s id" % resource_name
+ 'description': "Invalid {} id".format(resource_name)
}
raise_invalid(resource.request, **error_details)
@@ -139,7 +138,7 @@ def default_bucket(request):
# Make sure the collection exists
create_collection(request, bucket_id)
- path = request.path.replace('/buckets/default', '/buckets/%s' % bucket_id)
+ path = request.path.replace('/buckets/default', '/buckets/{}'.format(bucket_id))
querystring = request.url[(request.url.index(request.path) +
len(request.path)):]
try:
@@ -172,7 +171,7 @@ def default_bucket_id(request):
secret = settings['userid_hmac_secret']
# Build the user unguessable bucket_id UUID from its user_id
digest = hmac_digest(secret, request.prefixed_userid)
- return six.text_type(uuid.UUID(digest[:32]))
+ return str(uuid.UUID(digest[:32]))
def get_user_info(request):
diff --git a/kinto/plugins/history/listener.py b/kinto/plugins/history/listener.py
index 3e0a90852..2cc94ce83 100644
--- a/kinto/plugins/history/listener.py
+++ b/kinto/plugins/history/listener.py
@@ -96,7 +96,7 @@ def on_resource_changed(event):
eventattrs = dict(**payload)
eventattrs.pop('timestamp', None) # Already in target `last_modified`.
eventattrs.pop('bucket_id', None)
- eventattrs['%s_id' % resource_name] = obj_id
+ eventattrs['{}_id'.format(resource_name)] = obj_id
eventattrs['uri'] = uri
attrs = dict(date=datetime.now().isoformat(),
target={'data': target, 'permissions': perms},
@@ -116,5 +116,5 @@ def on_resource_changed(event):
entry_principals.update(perms.get('write', []))
entry_perms = {'read': list(entry_principals)}
# /buckets/{id}/history is the URI for the list of history entries.
- entry_perm_id = '/buckets/%s/history/%s' % (bucket_id, entry['id'])
+ entry_perm_id = '/buckets/{}/history/{}'.format(bucket_id, entry['id'])
permission.replace_object_permissions(entry_perm_id, entry_perms)
diff --git a/kinto/plugins/history/views.py b/kinto/plugins/history/views.py
index 2f82c87ea..f151ef7e0 100644
--- a/kinto/plugins/history/views.py
+++ b/kinto/plugins/history/views.py
@@ -34,7 +34,7 @@ def get_parent_id(self, request):
return instance_uri(request, 'bucket', id=self.bucket_id)
def _extract_filters(self, queryparams=None):
- filters = super(History, self)._extract_filters(queryparams)
+ filters = super()._extract_filters(queryparams)
filters_str_id = []
for filt in filters:
if filt.field in ('record_id', 'collection_id', 'bucket_id'):
diff --git a/kinto/plugins/quotas/listener.py b/kinto/plugins/quotas/listener.py
index 2886d31cf..e689a0e94 100644
--- a/kinto/plugins/quotas/listener.py
+++ b/kinto/plugins/quotas/listener.py
@@ -137,7 +137,7 @@ def on_resource_changed(event):
if max_bytes_per_item is not None and action != "delete":
if new_size > max_bytes_per_item:
message = ("Maximum bytes per object exceeded "
- "(%d > %d Bytes." % (new_size, max_bytes_per_item))
+ "({} > {} Bytes.".format(new_size, max_bytes_per_item))
raise http_error(HTTPInsufficientStorage(),
errno=ERRORS.FORBIDDEN.value,
message=message)
@@ -183,8 +183,8 @@ def on_resource_changed(event):
if bucket_max_bytes is not None:
if bucket_info['storage_size'] > bucket_max_bytes:
message = ("Bucket maximum total size exceeded "
- "(%d > %d Bytes). " % (bucket_info['storage_size'],
- bucket_max_bytes))
+ "({} > {} Bytes). ".format(bucket_info['storage_size'],
+ bucket_max_bytes))
raise http_error(HTTPInsufficientStorage(),
errno=ERRORS.FORBIDDEN.value,
message=message)
@@ -192,8 +192,8 @@ def on_resource_changed(event):
if bucket_max_items is not None:
if bucket_info['record_count'] > bucket_max_items:
message = ("Bucket maximum number of objects exceeded "
- "(%d > %d objects)." % (bucket_info['record_count'],
- bucket_max_items))
+ "({} > {} objects).".format(bucket_info['record_count'],
+ bucket_max_items))
raise http_error(HTTPInsufficientStorage(),
errno=ERRORS.FORBIDDEN.value,
message=message)
@@ -201,8 +201,8 @@ def on_resource_changed(event):
if collection_max_bytes is not None:
if collection_info['storage_size'] > collection_max_bytes:
message = ("Collection maximum size exceeded "
- "(%d > %d Bytes)." % (collection_info['storage_size'],
- collection_max_bytes))
+ "({} > {} Bytes).".format(collection_info['storage_size'],
+ collection_max_bytes))
raise http_error(HTTPInsufficientStorage(),
errno=ERRORS.FORBIDDEN.value,
message=message)
@@ -210,8 +210,8 @@ def on_resource_changed(event):
if collection_max_items is not None:
if collection_info['record_count'] > collection_max_items:
message = ("Collection maximum number of objects exceeded "
- "(%d > %d objects)." % (collection_info['record_count'],
- collection_max_items))
+ "({} > {} objects).".format(collection_info['record_count'],
+ collection_max_items))
raise http_error(HTTPInsufficientStorage(),
errno=ERRORS.FORBIDDEN.value,
message=message)
diff --git a/kinto/views/collections.py b/kinto/views/collections.py
index 733a45a59..57dec96f3 100644
--- a/kinto/views/collections.py
+++ b/kinto/views/collections.py
@@ -12,7 +12,7 @@ def schema_type(self, **kw):
def deserialize(self, cstruct=colander.null):
# Start by deserializing a simple mapping.
- validated = super(JSONSchemaMapping, self).deserialize(cstruct)
+ validated = super().deserialize(cstruct)
# In case it is optional in parent schema.
if not validated or validated in (colander.null, colander.drop):
diff --git a/kinto/views/groups.py b/kinto/views/groups.py
index 465020d9f..79a726a09 100644
--- a/kinto/views/groups.py
+++ b/kinto/views/groups.py
@@ -7,7 +7,7 @@
def validate_member(node, member):
if member.startswith('/buckets/') or member == 'system.Everyone':
- raise colander.Invalid(node, "%r is not a valid user ID." % member)
+ raise colander.Invalid(node, "'{}' is not a valid user ID.".format(member))
class GroupSchema(resource.ResourceSchema):
diff --git a/kinto/views/permissions.py b/kinto/views/permissions.py
index 680cf7e84..a30302c75 100644
--- a/kinto/views/permissions.py
+++ b/kinto/views/permissions.py
@@ -35,7 +35,9 @@ def allowed_from_settings(settings, principals):
continue
# ``collection_create_principals`` means ``collection:create`` in bucket.
if permission == 'create':
- permission = '%s:%s' % (resource_name, permission)
+ permission = '{resource_name}:{permission}'.format(
+ resource_name=resource_name,
+ permission=permission)
resource_name = { # resource parents.
'bucket': '',
'collection': 'bucket',
@@ -46,7 +48,7 @@ def allowed_from_settings(settings, principals):
return from_settings
-class PermissionsModel(object):
+class PermissionsModel:
id_field = 'id'
modified_field = 'last_modified'
deleted_field = 'deleted'
@@ -84,7 +86,7 @@ def get_records(self, filters=None, sorting=None, pagination_rules=None,
storage = self.request.registry.storage
every_bucket, _ = storage.get_all(parent_id='', collection_id='bucket')
for bucket in every_bucket:
- bucket_uri = '/buckets/{id}'.format(**bucket)
+ bucket_uri = '/buckets/{id}'.format_map(bucket)
for res in allowed_resources:
resource_perms = from_settings[res]
# Bucket is always fetched.
@@ -158,19 +160,19 @@ class Permissions(resource.ShareableResource):
schema = PermissionsSchema
def __init__(self, request, context=None):
- super(Permissions, self).__init__(request, context)
+ super().__init__(request, context)
self.model = PermissionsModel(request)
def _extract_sorting(self, limit):
# Permissions entries are not stored with timestamp, so do not
# force it.
- result = super(Permissions, self)._extract_sorting(limit)
+ result = super()._extract_sorting(limit)
without_last_modified = [s for s in result
if s.field != self.model.modified_field]
return without_last_modified
def _extract_filters(self, queryparams=None):
- result = super(Permissions, self)._extract_filters(queryparams)
+ result = super()._extract_filters(queryparams)
without_last_modified = [s for s in result
if s.field != self.model.modified_field]
return without_last_modified
diff --git a/kinto/views/records.py b/kinto/views/records.py
index a01f8fb29..ea4f9ede4 100644
--- a/kinto/views/records.py
+++ b/kinto/views/records.py
@@ -35,7 +35,7 @@ def __init__(self, request, **kwargs):
object_id=self.collection_id)
collections[collection_uri] = collection
- super(Record, self).__init__(request, **kwargs)
+ super().__init__(request, **kwargs)
self._collection = collections[collection_uri]
def get_parent_id(self, request):
@@ -47,7 +47,7 @@ def get_parent_id(self, request):
def process_record(self, new, old=None):
"""Validate records against collection schema, if any."""
- new = super(Record, self).process_record(new, old)
+ new = super().process_record(new, old)
schema = self._collection.get('schema')
settings = self.request.registry.settings
@@ -75,12 +75,12 @@ def process_record(self, new, old=None):
return new
def collection_get(self):
- result = super(Record, self).collection_get()
+ result = super().collection_get()
self._handle_cache_expires(self.request.response)
return result
def get(self):
- result = super(Record, self).get()
+ result = super().get()
self._handle_cache_expires(self.request.response)
return result
@@ -99,8 +99,8 @@ def _handle_cache_expires(self, response):
cache_expires = self._collection.get('cache_expires')
if cache_expires is None:
- by_bucket = '%s_record_cache_expires_seconds' % (self.bucket_id)
- by_collection = '%s_%s_record_cache_expires_seconds' % (
+ by_bucket = '{}_record_cache_expires_seconds'.format(self.bucket_id)
+ by_collection = '{}_{}_record_cache_expires_seconds'.format(
self.bucket_id, self.collection_id)
settings = self.request.registry.settings
cache_expires = settings.get(by_collection,
diff --git a/loadtests/loadtest/__init__.py b/loadtests/loadtest/__init__.py
index bf6d8baa5..7b9cc3d55 100644
--- a/loadtests/loadtest/__init__.py
+++ b/loadtests/loadtest/__init__.py
@@ -24,7 +24,7 @@ def __init__(self, *args, **kwargs):
This method is called as many times as number of users.
"""
- super(BaseLoadTest, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.conf = self._get_configuration()
diff --git a/loadtests/loadtest/schemas.py b/loadtests/loadtest/schemas.py
index 8b3c2ec52..69c5e0206 100644
--- a/loadtests/loadtest/schemas.py
+++ b/loadtests/loadtest/schemas.py
@@ -12,7 +12,7 @@
class SchemaValidationTest(BaseLoadTest):
def __init__(self, *args, **kwargs):
- super(SchemaValidationTest, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self._init_collections()
def _init_collections(self):
diff --git a/loadtests/loadtest/simulation.py b/loadtests/loadtest/simulation.py
index ff82d9b9f..00ac512c1 100644
--- a/loadtests/loadtest/simulation.py
+++ b/loadtests/loadtest/simulation.py
@@ -38,7 +38,7 @@ def build_article():
class SimulationLoadTest(BaseLoadTest):
def __init__(self, *args, **kwargs):
- super(SimulationLoadTest, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.collection = 'articles'
self.init_user()
diff --git a/setup.py b/setup.py
index fd6409cc0..1e8f76247 100644
--- a/setup.py
+++ b/setup.py
@@ -28,8 +28,7 @@ def read_file(filename):
'ruamel.yaml',
'transaction',
'pyramid_tm',
- 'requests < 2.13.0',
- 'six',
+ 'requests',
'structlog >= 16.1.0',
'enum34',
'waitress',
@@ -79,15 +78,13 @@ def read_file(filename):
setup(name='kinto',
version='5.4.0.dev0',
description='Kinto Web Service - Store, Sync, Share, and Self-Host.',
- long_description=README + "\n\n" + CHANGELOG + "\n\n" + CONTRIBUTORS,
+ long_description="{}\n\n{}\n\n{}".format(README, CHANGELOG, CONTRIBUTORS),
license='Apache License (2.0)',
classifiers=[
"Programming Language :: Python",
- "Programming Language :: Python :: 2",
- "Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
@@ -108,7 +105,6 @@ def read_file(filename):
'redis': REDIS_REQUIRES,
'postgresql': POSTGRESQL_REQUIRES,
'monitoring': MONITORING_REQUIRES,
- ":python_version=='2.7'": ["functools32", "futures"],
},
test_suite="tests",
dependency_links=DEPENDENCY_LINKS,
diff --git a/tests/core/resource/test_cache_expires.py b/tests/core/resource/test_cache_expires.py
index 0032e702e..b8cacaaa9 100644
--- a/tests/core/resource/test_cache_expires.py
+++ b/tests/core/resource/test_cache_expires.py
@@ -5,12 +5,12 @@ class CacheExpires(BaseTest):
setting = 'test_cache_expires_seconds'
def get_context(self):
- context = super(CacheExpires, self).get_context()
+ context = super().get_context()
context.resource_name = 'test'
return context
def get_request(self):
- request = super(CacheExpires, self).get_request()
+ request = super().get_request()
request.prefixed_userid = None # Anonymous.
return request
diff --git a/tests/core/resource/test_events.py b/tests/core/resource/test_events.py
index 20361e866..04b16b853 100644
--- a/tests/core/resource/test_events.py
+++ b/tests/core/resource/test_events.py
@@ -32,13 +32,13 @@ class BaseEventTest(BaseWebTest):
subscribed = tuple()
def setUp(self):
- super(BaseEventTest, self).setUp()
+ super().setUp()
self.events = []
self.body = {'data': {'name': 'de Paris'}}
def tearDown(self):
self.events = []
- super(BaseEventTest, self).tearDown()
+ super().tearDown()
def listener(self, event):
self.events.append(event)
@@ -49,8 +49,7 @@ def make_app(self, settings=None, config=None):
for event_cls in self.subscribed:
config.add_subscriber(self.listener, event_cls)
config.commit()
- return super(BaseEventTest, self).make_app(settings=settings,
- config=config)
+ return super().make_app(settings=settings, config=config)
class ResourceReadTest(BaseEventTest, unittest.TestCase):
@@ -96,7 +95,7 @@ def test_events_have_custom_representation(self):
headers=self.headers, status=201)
self.assertEqual(repr(self.events[0]),
"" % self.collection_url)
+ "uri={}>".format(self.collection_url))
def test_post_sends_create_action(self):
self.app.post_json(self.collection_url, self.body,
@@ -119,8 +118,7 @@ def test_not_triggered_on_failed_put(self):
record_id = str(uuid.uuid4())
record_url = self.get_item_url(record_id)
self.app.put_json(record_url, self.body, headers=self.headers)
- headers = self.headers.copy()
- headers['If-Match'] = '"12345"'
+ headers = {**self.headers, 'If-Match': '"12345"'}
self.app.put_json(record_url, self.body, headers=headers, status=412)
self.assertEqual(len(self.events), 1)
self.assertEqual(self.events[0].payload['action'],
@@ -209,7 +207,7 @@ def test_permissions_are_stripped_from_event_on_protected_resource(self):
resp = app.post_json('/psilos', self.body,
headers=self.headers, status=201)
record = resp.json['data']
- record_url = '/psilos/' + record['id']
+ record_url = '/psilos/{}'.format(record['id'])
app.patch_json(record_url, {"data": {"name": "De barcelona"}},
headers=self.headers)
impacted_records = self.events[-1].impacted_records
@@ -423,7 +421,7 @@ def test_events_are_not_sent_if_subrequest_fails(self):
def load_from_config(config, prefix):
- class ClassListener(object):
+ class ClassListener:
def __call__(self, event):
pass
return ClassListener()
@@ -432,7 +430,7 @@ def __call__(self, event):
@unittest.skipIf(not statsd.statsd_module, "statsd is not installed.")
class StatsDTest(BaseWebTest, unittest.TestCase):
def get_app_settings(self, *args, **kwargs):
- settings = super(StatsDTest, self).get_app_settings(*args, **kwargs)
+ settings = super().get_app_settings(*args, **kwargs)
if not statsd.statsd_module:
return settings
diff --git a/tests/core/resource/test_filter.py b/tests/core/resource/test_filter.py
index 0a0a99cb2..42d72b2b3 100644
--- a/tests/core/resource/test_filter.py
+++ b/tests/core/resource/test_filter.py
@@ -6,7 +6,7 @@
class FilteringTest(BaseTest):
def setUp(self):
- super(FilteringTest, self).setUp()
+ super().setUp()
self.validated = self.resource.request.validated
self.patch_known_field.start()
records = [
@@ -34,7 +34,7 @@ def test_list_can_be_filtered_on_deleted_with_since(self):
def test_filter_on_id_is_supported(self):
self.patch_known_field.stop()
r = self.model.create_record({})
- self.validated['querystring'] = {'id': '%s' % r['id']}
+ self.validated['querystring'] = {'id': '{}'.format(r['id'])}
result = self.resource.collection_get()
self.assertEqual(result['data'][0], r)
@@ -198,7 +198,7 @@ def test_exclude_returns_400_if_value_has_wrong_type(self):
class SubobjectFilteringTest(BaseTest):
def setUp(self):
- super(SubobjectFilteringTest, self).setUp()
+ super().setUp()
self.validated = self.resource.request.validated
self.patch_known_field.start()
for i in range(6):
diff --git a/tests/core/resource/test_model.py b/tests/core/resource/test_model.py
index 35fee3f1d..f4727261a 100644
--- a/tests/core/resource/test_model.py
+++ b/tests/core/resource/test_model.py
@@ -5,7 +5,7 @@
class ModelTest(BaseTest):
def setUp(self):
- super(ModelTest, self).setUp()
+ super().setUp()
self.record = self.model.create_record({'field': 'value'})
def test_list_returns_all_records_in_data(self):
@@ -17,7 +17,7 @@ def test_list_returns_all_records_in_data(self):
class CreateTest(BaseTest):
def setUp(self):
- super(CreateTest, self).setUp()
+ super().setUp()
self.resource.request.validated['body'] = {'data': {'field': 'new'}}
def test_new_records_are_linked_to_owner(self):
@@ -34,7 +34,7 @@ def test_create_record_returns_at_least_id_and_last_modified(self):
class DeleteModelTest(BaseTest):
def setUp(self):
- super(DeleteModelTest, self).setUp()
+ super().setUp()
self.patch_known_field.start()
self.model.create_record({'field': 'a'})
self.model.create_record({'field': 'b'})
@@ -61,17 +61,17 @@ def test_delete_supports_collection_filters(self):
class IsolatedModelsTest(BaseTest):
def setUp(self):
- super(IsolatedModelsTest, self).setUp()
+ super().setUp()
self.stored = self.model.create_record({}, parent_id='bob')
self.resource.record_id = self.stored['id']
def get_request(self):
- request = super(IsolatedModelsTest, self).get_request()
+ request = super().get_request()
request.prefixed_userid = 'basicauth:alice'
return request
def get_context(self):
- context = super(IsolatedModelsTest, self).get_context()
+ context = super().get_context()
context.prefixed_userid = 'basicauth:alice'
return context
diff --git a/tests/core/resource/test_object_permissions.py b/tests/core/resource/test_object_permissions.py
index e9b20469e..89e8d9ef1 100644
--- a/tests/core/resource/test_object_permissions.py
+++ b/tests/core/resource/test_object_permissions.py
@@ -13,17 +13,17 @@ class PermissionTest(BaseTest):
def setUp(self):
self.permission = Permission()
- super(PermissionTest, self).setUp()
+ super().setUp()
def get_request(self):
- request = super(PermissionTest, self).get_request()
+ request = super().get_request()
request.registry.permission = self.permission
return request
class CollectionPermissionTest(PermissionTest):
def setUp(self):
- super(CollectionPermissionTest, self).setUp()
+ super().setUp()
self.result = self.resource.collection_get()
def test_permissions_are_not_provided_in_collection_get(self):
@@ -36,10 +36,10 @@ def test_permissions_are_not_provided_in_collection_delete(self):
class ObtainRecordPermissionTest(PermissionTest):
def setUp(self):
- super(ObtainRecordPermissionTest, self).setUp()
+ super().setUp()
record = self.resource.model.create_record({})
record_id = record['id']
- record_uri = '/articles/%s' % record_id
+ record_uri = '/articles/{}'.format(record_id)
self.permission.add_principal_to_ace(record_uri, 'read', 'basicauth:bob')
self.permission.add_principal_to_ace(record_uri, 'read', 'account:readonly')
self.permission.add_principal_to_ace(record_uri, 'write', 'basicauth:bob')
@@ -78,10 +78,10 @@ def test_permissions_are_hidden_if_user_has_only_read_permission(self):
class SpecifyRecordPermissionTest(PermissionTest):
def setUp(self):
- super(SpecifyRecordPermissionTest, self).setUp()
+ super().setUp()
self.record = self.resource.model.create_record({})
record_id = self.record['id']
- self.record_uri = '/articles/%s' % record_id
+ self.record_uri = '/articles/{}'.format(record_id)
self.permission.add_principal_to_ace(self.record_uri,
'read',
'account:readonly')
@@ -106,8 +106,7 @@ def test_write_permission_is_given_to_anonymous(self):
request = self.get_request()
# Simulate an anonymous PUT
request.method = 'PUT'
- request.validated = self.resource.request.validated.copy()
- request.validated['body'] = {'data': self.record}
+ request.validated = {**self.resource.request.validated, 'body': {'data': {**self.record}}}
request.prefixed_userid = None
request.matchdict = {'id': self.record['id']}
resource = self.resource_class(request=request,
@@ -182,10 +181,10 @@ def test_412_errors_do_not_put_permission_in_record(self):
class DeletedRecordPermissionTest(PermissionTest):
def setUp(self):
- super(DeletedRecordPermissionTest, self).setUp()
+ super().setUp()
record = self.resource.model.create_record({})
self.resource.record_id = record_id = record['id']
- self.record_uri = '/articles/%s' % record_id
+ self.record_uri = '/articles/{}'.format(record_id)
self.resource.request.route_path.return_value = self.record_uri
self.resource.request.path = self.record_uri
self.permission.add_principal_to_ace(self.record_uri,
@@ -208,14 +207,14 @@ def test_permissions_are_deleted_when_collection_is_deleted(self):
class GuestCollectionListTest(PermissionTest):
def setUp(self):
- super(GuestCollectionListTest, self).setUp()
+ super().setUp()
record1 = self.resource.model.create_record({'letter': 'a'})
record2 = self.resource.model.create_record({'letter': 'b'})
record3 = self.resource.model.create_record({'letter': 'c'})
- uri1 = '/articles/%s' % record1['id']
- uri2 = '/articles/%s' % record2['id']
- uri3 = '/articles/%s' % record3['id']
+ uri1 = '/articles/{}'.format(record1['id'])
+ uri2 = '/articles/{}'.format(record2['id'])
+ uri3 = '/articles/{}'.format(record3['id'])
self.permission.add_principal_to_ace(uri1, 'read', 'fxa:user')
self.permission.add_principal_to_ace(uri2, 'read', 'group')
@@ -250,16 +249,16 @@ def test_unauthorized_error_if_collection_does_not_exist(self):
class GuestCollectionDeleteTest(PermissionTest):
def setUp(self):
- super(GuestCollectionDeleteTest, self).setUp()
+ super().setUp()
record1 = self.resource.model.create_record({'letter': 'a'})
record2 = self.resource.model.create_record({'letter': 'b'})
record3 = self.resource.model.create_record({'letter': 'c'})
record4 = self.resource.model.create_record({'letter': 'd'})
- uri1 = '/articles/%s' % record1['id']
- uri2 = '/articles/%s' % record2['id']
- uri3 = '/articles/%s' % record3['id']
- uri4 = '/articles/%s' % record4['id']
+ uri1 = '/articles/{}'.format(record1['id'])
+ uri2 = '/articles/{}'.format(record2['id'])
+ uri3 = '/articles/{}'.format(record3['id'])
+ uri4 = '/articles/{}'.format(record4['id'])
self.permission.add_principal_to_ace(uri1, 'read', 'fxa:user')
self.permission.add_principal_to_ace(uri2, 'write', 'fxa:user')
@@ -270,7 +269,7 @@ def setUp(self):
self.resource.request.method = 'DELETE'
def get_request(self):
- request = super(GuestCollectionDeleteTest, self).get_request()
+ request = super().get_request()
# RouteFactory must be aware of DELETE to query 'write' permission.
request.method = 'DELETE'
return request
diff --git a/tests/core/resource/test_pagination.py b/tests/core/resource/test_pagination.py
index eb4dc135d..801de9ba0 100644
--- a/tests/core/resource/test_pagination.py
+++ b/tests/core/resource/test_pagination.py
@@ -2,7 +2,7 @@
from base64 import b64encode, b64decode
import mock
-from six.moves.urllib.parse import parse_qs, urlparse
+from urllib.parse import parse_qs, urlparse
from pyramid.httpexceptions import HTTPBadRequest
from kinto.core.utils import json
@@ -12,7 +12,7 @@
class BasePaginationTest(BaseTest):
def setUp(self):
- super(BasePaginationTest, self).setUp()
+ super().setUp()
self.patch_known_field.start()
indices = list(range(20))
@@ -231,7 +231,7 @@ def test_return_total_records_in_headers_matching_deletable(self):
class BuildPaginationTokenTest(BaseTest):
def setUp(self):
- super(BuildPaginationTokenTest, self).setUp()
+ super().setUp()
self.patch_known_field.start()
self.record = {
'id': 1, 'status': 2, 'unread': True,
diff --git a/tests/core/resource/test_partial_response.py b/tests/core/resource/test_partial_response.py
index 037867e18..f600ede82 100644
--- a/tests/core/resource/test_partial_response.py
+++ b/tests/core/resource/test_partial_response.py
@@ -7,7 +7,7 @@
class PartialResponseBase(BaseTest):
def setUp(self):
- super(PartialResponseBase, self).setUp()
+ super().setUp()
self.resource._get_known_fields = lambda: ['field', 'other', 'orig']
self.record = self.model.create_record(
{
diff --git a/tests/core/resource/test_preconditions.py b/tests/core/resource/test_preconditions.py
index aa1fe9bc5..01ed55d83 100644
--- a/tests/core/resource/test_preconditions.py
+++ b/tests/core/resource/test_preconditions.py
@@ -7,12 +7,12 @@
class NotModifiedTest(BaseTest):
def setUp(self):
- super(NotModifiedTest, self).setUp()
+ super().setUp()
self.stored = self.model.create_record({})
self.resource = self.resource_class(request=self.get_request(),
context=self.get_context())
- self.resource.request.validated = self.validated.copy()
+ self.resource.request.validated = {**self.validated}
self.resource.collection_get()
self.validated = self.resource.request.validated
current = self.last_response.headers['ETag'][1:-1]
@@ -53,7 +53,7 @@ def test_single_record_last_modified_is_returned(self):
class ModifiedMeanwhileTest(BaseTest):
def setUp(self):
- super(ModifiedMeanwhileTest, self).setUp()
+ super().setUp()
self.stored = self.model.create_record({})
self.resource.collection_get()
self.validated = self.resource.request.validated
diff --git a/tests/core/resource/test_record.py b/tests/core/resource/test_record.py
index 0179fd4a7..0836dfb60 100644
--- a/tests/core/resource/test_record.py
+++ b/tests/core/resource/test_record.py
@@ -30,13 +30,13 @@ def test_etag_contains_record_timestamp(self):
# Create another one, bump collection timestamp.
self.model.create_record({'field': 'value'})
self.resource.get()
- expected = '"%s"' % record['last_modified']
+ expected = '"{}"'.format(record['last_modified'])
self.assertEqual(expected, self.last_response.headers['ETag'])
class PutTest(BaseTest):
def setUp(self):
- super(PutTest, self).setUp()
+ super().setUp()
self.record = self.model.create_record({'field': 'old'})
self.resource.record_id = self.record['id']
@@ -48,7 +48,7 @@ def test_etag_is_provided(self):
def test_etag_contains_record_new_timestamp(self):
self.validated['body'] = {'data': {'field': 'new'}}
new = self.resource.put()['data']
- expected = '"%s"' % new['last_modified']
+ expected = '"{}"'.format(new['last_modified'])
self.assertEqual(expected, self.last_response.headers['ETag'])
def test_returns_201_if_created(self):
@@ -130,7 +130,7 @@ def test_storage_is_not_used_if_context_provides_current_record(self):
class DeleteTest(BaseTest):
def test_delete_record_returns_last_timestamp(self):
record = {'field': 'value'}
- record = self.model.create_record(record).copy()
+ record = {**self.model.create_record(record)}
self.resource.record_id = record['id']
result = self.resource.delete()['data']
self.assertNotEqual(result['last_modified'], record['last_modified'])
@@ -145,7 +145,7 @@ def test_etag_contains_deleted_timestamp(self):
record = self.model.create_record({'field': 'value'})
self.resource.record_id = record['id']
deleted = self.resource.delete()
- expected = '"%s"' % deleted['data']['last_modified']
+ expected = '"{}"'.format(deleted['data']['last_modified'])
self.assertEqual(expected, self.last_response.headers['ETag'])
def test_delete_record_returns_stripped_record(self):
@@ -190,7 +190,7 @@ def test_delete_ignores_last_modified_if_less(self):
class PatchTest(BaseTest):
def setUp(self):
- super(PatchTest, self).setUp()
+ super().setUp()
self.stored = self.model.create_record({})
self.resource.record_id = self.stored['id']
self.validated['body'] = {'data': {'position': 10}}
@@ -207,13 +207,13 @@ def test_etag_is_provided(self):
self.assertIn('ETag', self.last_response.headers)
def test_etag_contains_record_new_timestamp(self):
- expected = '"%s"' % self.result['last_modified']
+ expected = '"{}"'.format(self.result['last_modified'])
self.assertEqual(expected, self.last_response.headers['ETag'])
def test_etag_contains_old_timestamp_if_no_field_changed(self):
self.validated['body'] = {'data': {'position': 10}}
self.resource.patch()['data']
- expected = '"%s"' % self.result['last_modified']
+ expected = '"{}"'.format(self.result['last_modified'])
self.assertEqual(expected, self.last_response.headers['ETag'])
def test_modify_record_updates_timestamp(self):
@@ -278,7 +278,7 @@ def test_returns_changed_fields_if_behaviour_is_light(self):
class MergePatchTest(BaseTest):
def setUp(self):
- super(MergePatchTest, self).setUp()
+ super().setUp()
self.stored = self.model.create_record({})
self.resource.record_id = self.stored['id']
self.headers = self.resource.request.headers
@@ -357,7 +357,7 @@ def test_merge_patch_doesnt_remove_previously_inserted_nones(self):
class JsonPatchTest(BaseTest):
def setUp(self):
- super(JsonPatchTest, self).setUp()
+ super().setUp()
self.stored = self.model.create_record({})
self.resource.record_id = self.stored['id']
self.validated['body'] = {'data': {'a': 'aaa', 'b': ['bb', 'bbb'], 'd': []}}
@@ -468,7 +468,7 @@ def test_json_patch_format_not_accepted_without_header(self):
class UnknownRecordTest(BaseTest):
def setUp(self):
- super(UnknownRecordTest, self).setUp()
+ super().setUp()
self.unknown_id = '1cea99eb-5e3d-44ad-a53a-2fb68473b538'
self.resource.record_id = self.unknown_id
self.validated['body'] = {'data': {'field': 'new'}}
@@ -489,7 +489,7 @@ def test_delete_record_unknown_raises_404(self):
class InvalidIdTest(BaseTest):
def setUp(self):
- super(InvalidIdTest, self).setUp()
+ super().setUp()
self.resource.record_id = 'a*b'
def test_get_with_invalid_id_raises_400(self):
@@ -507,7 +507,7 @@ def test_delete_with_invalid_id_raises_400(self):
class ReadonlyFieldsTest(BaseTest):
def setUp(self):
- super(ReadonlyFieldsTest, self).setUp()
+ super().setUp()
self.stored = self.model.create_record({'age': 32})
self.resource.schema.Options.readonly_fields = ('age',)
self.resource.record_id = self.stored['id']
diff --git a/tests/core/resource/test_schema.py b/tests/core/resource/test_schema.py
index e2febebfc..bc32fc647 100644
--- a/tests/core/resource/test_schema.py
+++ b/tests/core/resource/test_schema.py
@@ -1,4 +1,3 @@
-import six
import colander
import mock
@@ -111,15 +110,13 @@ def setUp(self):
self.schema = schema.HeaderField(colander.String())
def test_decode_unicode(self):
- value = six.u('\xe7 is not a c')
+ value = '\xe7 is not a c'
deserialized = self.schema.deserialize(value.encode('utf-8'))
self.assertEquals(deserialized, value)
def test_bad_unicode_raises_invalid(self):
value = b'utf8 \xe9'
- self.assertRaises(colander.Invalid,
- self.schema.deserialize,
- value)
+ self.assertRaises(colander.Invalid, self.schema.deserialize, value)
class QueryFieldSchemaTest(unittest.TestCase):
diff --git a/tests/core/resource/test_sort.py b/tests/core/resource/test_sort.py
index 71a1e1794..fa575ae47 100644
--- a/tests/core/resource/test_sort.py
+++ b/tests/core/resource/test_sort.py
@@ -9,7 +9,7 @@
class SortingTest(BaseTest):
def setUp(self):
- super(SortingTest, self).setUp()
+ super().setUp()
self.patch_known_field.start()
indices = list(range(20))
@@ -119,7 +119,7 @@ def test_default_sort_is_last_modified_records_have_same_status(self):
class SubobjectSortingTest(BaseTest):
def setUp(self):
- super(SubobjectSortingTest, self).setUp()
+ super().setUp()
self.patch_known_field.start()
indices = list(range(20))
diff --git a/tests/core/resource/test_sync.py b/tests/core/resource/test_sync.py
index a83b98e15..d06fa0382 100644
--- a/tests/core/resource/test_sync.py
+++ b/tests/core/resource/test_sync.py
@@ -2,7 +2,6 @@
import mock
import time
-from kinto.core.utils import decode_header
from kinto.core.testing import ThreadMixin
from . import BaseTest
@@ -11,7 +10,7 @@
class SinceModifiedTest(ThreadMixin, BaseTest):
def setUp(self):
- super(SinceModifiedTest, self).setUp()
+ super().setUp()
self.validated['body'] = {'data': {}}
with mock.patch.object(self.model.storage,
@@ -41,7 +40,7 @@ def test_filter_with__to_return_an_alert_header(self):
self.assertIn('Alert', self.resource.request.response.headers)
alert = self.resource.request.response.headers['Alert']
self.assertDictEqual(
- decode_header(json.loads(alert)),
+ json.loads(alert),
{
'code': 'soft-eol',
'message': ('_to is now deprecated, '
diff --git a/tests/core/resource/test_views.py b/tests/core/resource/test_views.py
index 1e59d6f33..b20d2332d 100644
--- a/tests/core/resource/test_views.py
+++ b/tests/core/resource/test_views.py
@@ -101,7 +101,7 @@ def test_data_is_not_required_when_schema_has_no_required_fields(self):
'permissions': {'read': ['group:readers']}}
resp = self.app.post_json('/psilos', body,
headers=self.headers)
- object_uri = '/psilos/' + resp.json['data']['id']
+ object_uri = '/psilos/{}'.format(resp.json['data']['id'])
body.pop('data')
resp = self.app.put_json(object_uri, body, headers=self.headers)
@@ -113,7 +113,7 @@ def test_data_are_not_modified_if_not_specified_on_schemaless(self):
'permissions': {'read': ['group:readers']}}
resp = self.app.post_json('/spores', body,
headers=self.headers)
- object_uri = '/spores/' + resp.json['data']['id']
+ object_uri = '/spores/{}'.format(resp.json['data']['id'])
self.add_permission(object_uri, 'write')
body.pop('data')
@@ -179,7 +179,7 @@ def test_collection_delete_is_denied_when_not_authorized(self):
class RecordAuthzGrantedTest(AuthzAuthnTest):
def setUp(self):
- super(RecordAuthzGrantedTest, self).setUp()
+ super().setUp()
self.add_permission(self.collection_url, 'toadstool:create')
resp = self.app.post_json(self.collection_url,
@@ -215,7 +215,7 @@ def test_record_put_on_unexisting_record_is_granted_when_authorized(self):
class RecordAuthzDeniedTest(AuthzAuthnTest):
def setUp(self):
- super(RecordAuthzDeniedTest, self).setUp()
+ super().setUp()
# Add permission to create a sample record.
self.add_permission(self.collection_url, 'toadstool:create')
resp = self.app.post_json(self.collection_url,
@@ -261,11 +261,10 @@ def test_record_put_on_unexisting_record_is_rejected_if_write_perm(self):
class RecordAuthzGrantedOnCollectionTest(AuthzAuthnTest):
def setUp(self):
- super(RecordAuthzGrantedOnCollectionTest, self).setUp()
+ super().setUp()
self.add_permission(self.collection_url, 'toadstool:create')
- self.guest_headers = self.headers.copy()
- self.guest_headers['Authorization'] = "Basic bmF0aW06"
+ self.guest_headers = {**self.headers, 'Authorization': "Basic bmF0aW06"}
resp = self.app.get('/', headers=self.guest_headers)
self.guest_id = resp.json['user']['id']
@@ -354,7 +353,7 @@ def test_known_fields_are_saved(self):
class InvalidRecordTest(BaseWebTest, unittest.TestCase):
def setUp(self):
- super(InvalidRecordTest, self).setUp()
+ super().setUp()
body = {'data': MINIMALIST_RECORD}
resp = self.app.post_json(self.collection_url,
body,
@@ -418,8 +417,7 @@ def test_replace_with_invalid_record_returns_400(self):
status=400)
def test_id_is_validated_on_post(self):
- record = MINIMALIST_RECORD.copy()
- record['id'] = 3.14
+ record = {**MINIMALIST_RECORD, 'id': 3.14}
self.app.post_json(self.collection_url,
{'data': record},
headers=self.headers,
@@ -433,60 +431,43 @@ def test_id_is_validated_on_post(self):
status=400)
def test_id_is_preserved_on_post(self):
- record = MINIMALIST_RECORD.copy()
- record_id = record['id'] = '472be9ec-26fe-461b-8282-9c4e4b207ab3'
+ record = {**MINIMALIST_RECORD, 'id': '472be9ec-26fe-461b-8282-9c4e4b207ab3'}
resp = self.app.post_json(self.collection_url,
{'data': record},
headers=self.headers)
- self.assertEqual(resp.json['data']['id'], record_id)
+ self.assertEqual(resp.json['data']['id'], record['id'])
def test_200_is_returned_if_id_matches_existing_record(self):
- record = MINIMALIST_RECORD.copy()
- record['id'] = self.record['id']
+ record = {**MINIMALIST_RECORD, 'id': self.record['id']}
self.app.post_json(self.collection_url,
{'data': record},
headers=self.headers,
status=200)
def test_invalid_accept_header_on_collections_returns_406(self):
- headers = self.headers.copy()
- headers['Accept'] = 'text/plain'
- resp = self.app.post(self.collection_url,
- '',
- headers=headers,
- status=406)
+ headers = {**self.headers, 'Accept': 'text/plain'}
+ resp = self.app.post(self.collection_url, '', headers=headers, status=406)
self.assertEqual(resp.json['code'], 406)
message = "Accept header should be one of ['application/json']"
self.assertEqual(resp.json['message'], message)
def test_invalid_content_type_header_on_collections_returns_415(self):
- headers = self.headers.copy()
- headers['Content-Type'] = 'text/plain'
- resp = self.app.post(self.collection_url,
- '',
- headers=headers,
- status=415)
+ headers = {**self.headers, 'Content-Type': 'text/plain'}
+ resp = self.app.post(self.collection_url, '', headers=headers, status=415)
self.assertEqual(resp.json['code'], 415)
message = "Content-Type header should be one of ['application/json']"
self.assertEqual(resp.json['message'], message)
def test_invalid_accept_header_on_record_returns_406(self):
- headers = self.headers.copy()
- headers['Accept'] = 'text/plain'
- resp = self.app.get(self.get_item_url(),
- headers=headers,
- status=406)
+ headers = {**self.headers, 'Accept': 'text/plain'}
+ resp = self.app.get(self.get_item_url(), headers=headers, status=406)
self.assertEqual(resp.json['code'], 406)
message = "Accept header should be one of ['application/json']"
self.assertEqual(resp.json['message'], message)
def test_invalid_content_type_header_on_record_returns_415(self):
- headers = self.headers.copy()
- headers['Content-Type'] = 'text/plain'
- resp = self.app.patch_json(self.get_item_url(),
- '',
- headers=headers,
- status=415)
+ headers = {**self.headers, 'Content-Type': 'text/plain'}
+ resp = self.app.patch_json(self.get_item_url(), '', headers=headers, status=415)
self.assertEqual(resp.json['code'], 415)
messages = (
"Content-Type header should be one of [",
@@ -503,7 +484,7 @@ def test_invalid_content_type_header_on_record_returns_415(self):
class IgnoredFieldsTest(BaseWebTest, unittest.TestCase):
def setUp(self):
- super(IgnoredFieldsTest, self).setUp()
+ super().setUp()
body = {'data': MINIMALIST_RECORD}
resp = self.app.post_json(self.collection_url,
body,
@@ -511,11 +492,9 @@ def setUp(self):
self.record = resp.json['data']
def test_last_modified_is_not_validated_and_overwritten(self):
- record = MINIMALIST_RECORD.copy()
- record['last_modified'] = 'abc'
- body = {'data': record}
+ record = {**MINIMALIST_RECORD, 'last_modified': 'abc'}
resp = self.app.post_json(self.collection_url,
- body,
+ {'data': record},
headers=self.headers)
self.assertNotEqual(resp.json['data']['last_modified'], 'abc')
@@ -527,22 +506,20 @@ def test_modify_works_with_invalid_last_modified(self):
self.assertNotEqual(resp.json['data']['last_modified'], 'abc')
def test_replace_works_with_invalid_last_modified(self):
- record = MINIMALIST_RECORD.copy()
- record['last_modified'] = 'abc'
- body = {'data': record}
+ record = {**MINIMALIST_RECORD, 'last_modified': 'abc'}
resp = self.app.put_json(self.get_item_url(),
- body,
+ {'data': record},
headers=self.headers)
self.assertNotEqual(resp.json['data']['last_modified'], 'abc')
class InvalidBodyTest(BaseWebTest, unittest.TestCase):
def __init__(self, *args, **kwargs):
- super(InvalidBodyTest, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.invalid_body = "{'foo>}"
def setUp(self):
- super(InvalidBodyTest, self).setUp()
+ super().setUp()
body = {'data': MINIMALIST_RECORD}
resp = self.app.post_json(self.collection_url,
body,
@@ -615,7 +592,7 @@ def test_modify_shareable_resource_with_empty_body_returns_400(self):
body,
headers=self.headers)
record = resp.json['data']
- item_url = '/toadstools/' + record['id']
+ item_url = '/toadstools/{}'.format(record['id'])
self.app.patch(item_url,
headers=self.headers,
status=400)
@@ -625,7 +602,7 @@ class InvalidPermissionsTest(BaseWebTest, unittest.TestCase):
collection_url = '/toadstools'
def setUp(self):
- super(InvalidPermissionsTest, self).setUp()
+ super().setUp()
body = {'data': MINIMALIST_RECORD}
resp = self.app.post_json(self.collection_url,
body,
@@ -674,7 +651,7 @@ class CacheControlTest(BaseWebTest, unittest.TestCase):
collection_url = '/toadstools'
def get_app_settings(self, extras=None):
- settings = super(CacheControlTest, self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['toadstool_cache_expires_seconds'] = 3600
settings['toadstool_read_principals'] = 'system.Everyone'
settings['psilo_cache_expires_seconds'] = 0
@@ -706,7 +683,7 @@ def test_cache_control_provides_no_cache_by_default(self):
class StorageErrorTest(BaseWebTest, unittest.TestCase):
def __init__(self, *args, **kwargs):
- super(StorageErrorTest, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.error = storage_exceptions.BackendError(ValueError())
self.storage_error_patcher = mock.patch(
'kinto.core.storage.memory.Storage.create',
@@ -737,7 +714,7 @@ class PaginationNextURLTest(BaseWebTest, unittest.TestCase):
"""
def setUp(self):
- super(PaginationNextURLTest, self).setUp()
+ super().setUp()
body = {'data': MINIMALIST_RECORD}
self.app.post_json(self.collection_url,
body,
@@ -765,8 +742,7 @@ def test_next_page_url_relies_on_pyramid_url_system(self):
self.assertIn('https://', resp.headers['Next-Page'])
def test_next_page_url_relies_on_headers_information(self):
- headers = self.headers.copy()
- headers['Host'] = 'https://server.name:443'
+ headers = {**self.headers, 'Host': 'https://server.name:443'}
resp = self.app.get(self.collection_url + '?_limit=1',
headers=headers)
self.assertIn('https://server.name:443', resp.headers['Next-Page'])
@@ -778,7 +754,7 @@ class SchemaLessPartialResponseTest(BaseWebTest, unittest.TestCase):
collection_url = '/spores'
def setUp(self):
- super(SchemaLessPartialResponseTest, self).setUp()
+ super().setUp()
body = {'data': {'size': 42, 'category': 'some-cat', 'owner': 'loco'}}
resp = self.app.post_json(self.collection_url,
body,
diff --git a/tests/core/resource/test_views_cors.py b/tests/core/resource/test_views_cors.py
index dd0298b88..e794809ac 100644
--- a/tests/core/resource/test_views_cors.py
+++ b/tests/core/resource/test_views_cors.py
@@ -11,7 +11,7 @@
class CORSOriginHeadersTest(BaseWebTest, unittest.TestCase):
def setUp(self):
- super(CORSOriginHeadersTest, self).setUp()
+ super().setUp()
self.headers['Origin'] = 'notmyidea.org'
body = {'data': MINIMALIST_RECORD}
@@ -23,8 +23,7 @@ def setUp(self):
def test_can_be_configured_from_settings(self):
app = self.make_app({'cors_origins': '*.daybed.io'})
- headers = self.headers.copy()
- headers['Origin'] = 'demo.daybed.io'
+ headers = {**self.headers, 'Origin': 'demo.daybed.io'}
resp = app.get(self.collection_url, headers=headers)
self.assertEqual(resp.headers['Access-Control-Allow-Origin'],
'demo.daybed.io')
@@ -46,8 +45,7 @@ def test_present_on_deletion(self):
self.assertIn('Access-Control-Allow-Origin', response.headers)
def test_present_on_unknown_url(self):
- headers = self.headers.copy()
- headers['Origin'] = 'notmyidea.org'
+ headers = {**self.headers, 'Origin': 'notmyidea.org'}
response = self.app.get('/unknown',
headers=headers,
status=404)
@@ -55,8 +53,7 @@ def test_present_on_unknown_url(self):
'notmyidea.org')
def test_not_present_on_unknown_url_if_setting_does_not_match(self):
- headers = self.headers.copy()
- headers['Origin'] = 'notmyidea.org'
+ headers = {**self.headers, 'Origin': 'notmyidea.org'}
with mock.patch.dict(self.app.app.registry.settings,
[('cors_origins', 'daybed.io')]):
response = self.app.get('/unknown',
@@ -184,7 +181,7 @@ def test_present_on_unknown_url(self):
class CORSMaxAgeTest(BaseWebTest, unittest.TestCase):
def setUp(self):
- super(CORSMaxAgeTest, self).setUp()
+ super().setUp()
self.headers.update({
'Origin': 'lolnet.org',
'Access-Control-Request-Method': 'GET'
diff --git a/tests/core/resource/test_viewset.py b/tests/core/resource/test_viewset.py
index 26d048527..b61ddaa91 100644
--- a/tests/core/resource/test_viewset.py
+++ b/tests/core/resource/test_viewset.py
@@ -28,7 +28,7 @@ def arguments(self, resource, method):
return {}
-class FakeResource(object):
+class FakeResource:
"""Fake resource class used for tests"""
name = "fake"
diff --git a/tests/core/support.py b/tests/core/support.py
index 0ebd6386a..41d74cd09 100644
--- a/tests/core/support.py
+++ b/tests/core/support.py
@@ -29,7 +29,7 @@ class BaseWebTest(testing.BaseWebTest):
collection_url = '/mushrooms'
def __init__(self, *args, **kwargs):
- super(BaseWebTest, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.headers.update(testing.get_user_headers('mat'))
def get_app_settings(self, extras=None):
@@ -40,17 +40,17 @@ def get_app_settings(self, extras=None):
extras.setdefault('project_docs', 'https://kinto.readthedocs.io/')
extras.setdefault('multiauth.authorization_policy',
self.authorization_policy)
- return super(BaseWebTest, self).get_app_settings(extras)
+ return super().get_app_settings(extras)
def get_item_url(self, id=None):
"""Return the URL of the item using self.item_url."""
if id is None:
id = self.record['id']
- return self.collection_url + '/' + str(id)
+ return '{}/{}'.format(self.collection_url, id)
@implementer(IAuthorizationPolicy)
-class AllowAuthorizationPolicy(object):
+class AllowAuthorizationPolicy:
def permits(self, context, principals, permission):
if permission == PRIVATE:
return Authenticated in principals
@@ -75,7 +75,7 @@ def wrapper(f):
@functools.wraps(f)
def wrapped(*args, **kwargs):
with mock.patch(
- '%s.permits' % authz_class,
+ '{}.permits'.format(authz_class),
return_value=permits):
return f(*args, **kwargs)
return wrapped
diff --git a/tests/core/test_authentication.py b/tests/core/test_authentication.py
index 549483d06..da252a045 100644
--- a/tests/core/test_authentication.py
+++ b/tests/core/test_authentication.py
@@ -98,11 +98,11 @@ def test_userid_is_hashed(self, mocked):
def test_userid_is_built_using_password(self):
auth_password = utils.encode64('user:secret1', encoding='ascii')
- self.request.headers['Authorization'] = 'Basic %s' % auth_password
+ self.request.headers['Authorization'] = 'Basic {}'.format(auth_password)
user_id1 = self.policy.unauthenticated_userid(self.request)
auth_password = utils.encode64('user:secret2', encoding='ascii')
- self.request.headers['Authorization'] = 'Basic %s' % auth_password
+ self.request.headers['Authorization'] = 'Basic {}'.format(auth_password)
user_id2 = self.policy.unauthenticated_userid(self.request)
self.assertNotEqual(user_id1, user_id2)
@@ -114,12 +114,12 @@ def test_views_are_forbidden_if_basic_is_wrong(self):
def test_returns_none_if_username_is_empty(self):
auth_password = utils.encode64(':secret', encoding='ascii')
- self.request.headers['Authorization'] = 'Basic %s' % auth_password
+ self.request.headers['Authorization'] = 'Basic {}'.format(auth_password)
user_id = self.policy.unauthenticated_userid(self.request)
self.assertIsNone(user_id)
def test_providing_empty_password_is_supported(self):
auth_password = utils.encode64('secret:', encoding='ascii')
- self.request.headers['Authorization'] = 'Basic %s' % auth_password
+ self.request.headers['Authorization'] = 'Basic {}'.format(auth_password)
user_id = self.policy.unauthenticated_userid(self.request)
self.assertIsNotNone(user_id)
diff --git a/tests/core/test_authorization.py b/tests/core/test_authorization.py
index 72c246554..9e4d6088e 100644
--- a/tests/core/test_authorization.py
+++ b/tests/core/test_authorization.py
@@ -283,7 +283,7 @@ def route_path(service_name, **kwargs):
# Simulate a resource that has no record_path (only list).
if service_name == 'article-record':
raise KeyError
- return '/comments/sub/{id}'.format(**kwargs)
+ return '/comments/sub/{id}'.format_map(kwargs)
self.request.route_path.side_effect = route_path
diff --git a/tests/core/test_cache.py b/tests/core/test_cache.py
index 4bf7fb94e..0de6226b4 100644
--- a/tests/core/test_cache.py
+++ b/tests/core/test_cache.py
@@ -67,7 +67,7 @@ def test_clean_expired_expires_items(self):
def test_add_over_quota_clean_oversized_items(self):
for x in range(100):
# Each entry is 70 bytes
- self.cache.set('foo' + str(x).zfill(3), 'toto')
+ self.cache.set('foo{0:03d}'.format(x), 'toto')
time.sleep(0.001)
assert self.cache.get('foo000') == 'toto'
# This should delete the 2 first entries
@@ -88,7 +88,7 @@ class PostgreSQLCacheTest(CacheTest, unittest.TestCase):
}
def setUp(self):
- super(PostgreSQLCacheTest, self).setUp()
+ super().setUp()
self.client_error_patcher = mock.patch.object(
self.cache.client,
'session_factory',
diff --git a/tests/core/test_decorators.py b/tests/core/test_decorators.py
index 340331c63..214f1936b 100644
--- a/tests/core/test_decorators.py
+++ b/tests/core/test_decorators.py
@@ -1,6 +1,6 @@
import mock
import pytest
-from six import StringIO
+from io import StringIO
from pyramid.httpexceptions import HTTPOk
from kinto.core.decorators import cache_forever
@@ -13,7 +13,7 @@ def demo1(request):
@cache_forever
def demo2(request, name):
- return "demo2: " + name
+ return "demo2: {}".format(name)
@cache_forever
diff --git a/tests/core/test_errors.py b/tests/core/test_errors.py
index adfce4b6f..12e513eea 100644
--- a/tests/core/test_errors.py
+++ b/tests/core/test_errors.py
@@ -2,7 +2,6 @@
import unittest
from kinto.core.errors import send_alert
-from kinto.core.utils import decode_header
from kinto.core.testing import DummyRequest
@@ -11,9 +10,8 @@ class SendAlertTest(unittest.TestCase):
def verify_alert_header(self, request, expected):
self.assertIn('Alert', request.response.headers)
alert = request.response.headers['Alert']
- self.assertDictEqual(
- decode_header(json.loads(alert)),
- expected)
+ self.assertDictEqual(json.loads(alert),
+ expected)
def test_send_alert_default_to_project_url(self):
request = DummyRequest()
diff --git a/tests/core/test_initialization.py b/tests/core/test_initialization.py
index a1c10ad6c..e27862b36 100644
--- a/tests/core/test_initialization.py
+++ b/tests/core/test_initialization.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
import mock
import webtest
@@ -266,8 +265,7 @@ def test_load_default_settings_handle_prefix_attributes(self):
class StatsDConfigurationTest(unittest.TestCase):
def setUp(self):
- settings = kinto.core.DEFAULT_SETTINGS.copy()
- settings['statsd_url'] = 'udp://host:8080'
+ settings = {**kinto.core.DEFAULT_SETTINGS, 'statsd_url': 'udp://host:8080'}
self.config = Configurator(settings=settings)
self.config.registry.storage = {}
self.config.registry.cache = {}
@@ -336,7 +334,7 @@ def test_statsd_counts_unknown_urls(self):
@mock.patch('kinto.core.utils.hmac_digest')
def test_statsd_counts_unique_users(self, digest_mocked):
- digest_mocked.return_value = u'mat'
+ digest_mocked.return_value = 'mat'
kinto.core.initialize(self.config, '0.0.1', 'project_name')
app = webtest.TestApp(self.config.make_wsgi_app())
headers = {'Authorization': 'Basic bWF0Og=='}
diff --git a/tests/core/test_listeners.py b/tests/core/test_listeners.py
index 38b594747..95f6e60ee 100644
--- a/tests/core/test_listeners.py
+++ b/tests/core/test_listeners.py
@@ -12,21 +12,21 @@
UID = str(uuid.uuid4())
-class ViewSet(object):
+class ViewSet:
def get_name(*args, **kw):
return 'collection'
-class Service(object):
+class Service:
viewset = ViewSet()
-class Match(object):
+class Match:
cornice_services = {'watev': Service()}
pattern = 'watev'
-class Request(object):
+class Request:
path = '/1/bucket/collection/'
prefixed_userid = 'tarek'
matchdict = {'id': UID}
diff --git a/tests/core/test_logging.py b/tests/core/test_logging.py
index f7decfb67..d8ba8ecc5 100644
--- a/tests/core/test_logging.py
+++ b/tests/core/test_logging.py
@@ -1,10 +1,8 @@
-# -*- coding: utf-8 -*-
import logging
import os
import re
import mock
-import six
from pyramid import testing
from kinto.core import DEFAULT_SETTINGS
@@ -31,7 +29,7 @@ def strip_ansi(text):
class LoggingSetupTest(unittest.TestCase):
def tearDown(self):
- super(LoggingSetupTest, self).tearDown()
+ super().tearDown()
core_logs.structlog.reset_defaults()
def test_classic_logger_is_used_by_default(self):
@@ -64,7 +62,7 @@ def setUp(self):
def test_output_is_serialized_as_string(self):
value = self.renderer(self.logger, 'debug', {})
- self.assertIsInstance(value, six.string_types)
+ self.assertIsInstance(value, str)
def test_output_is_simple_if_no_request_is_bound(self):
value = self.renderer(self.logger, 'debug', {'event': ':)'})
@@ -104,14 +102,8 @@ def test_every_event_dict_entry_appears_in_log_message(self):
' app.event nb_records=5'), strip_ansi(value))
def test_fields_values_support_unicode(self):
- value = self.renderer(self.logger, 'critical', {'value': u'\u2014'})
- self.assertIn(u'\u2014', value)
-
- @unittest.skipIf(six.PY3, "Error with Python2 only")
- def test_fields_values_support_bytes(self):
- value = self.renderer(self.logger, 'critical',
- {'event': AssertionError('\xc3\xa8')})
- self.assertIn(u'è', value)
+ value = self.renderer(self.logger, 'critical', {'value': '\u2014'})
+ self.assertIn('\u2014', value)
class MozillaHekaRendererTest(unittest.TestCase):
@@ -122,7 +114,7 @@ def setUp(self):
def test_output_is_serialized_json(self):
value = self.renderer(self.logger, 'debug', {})
- self.assertIsInstance(value, six.string_types)
+ self.assertIsInstance(value, str)
def test_standard_entries_are_filled(self):
with mock.patch('kinto.core.utils.msec_time', return_value=12):
@@ -192,13 +184,13 @@ def test_list_of_string_values_are_not_serialized(self):
class RequestSummaryTest(BaseWebTest, unittest.TestCase):
def setUp(self):
- super(RequestSummaryTest, self).setUp()
+ super().setUp()
config = testing.setUp()
config.registry.settings = DEFAULT_SETTINGS
initialization.setup_logging(config)
def tearDown(self):
- super(RequestSummaryTest, self).tearDown()
+ super().tearDown()
core_logs.structlog.reset_defaults()
def test_request_summary_is_sent_as_info(self):
@@ -248,9 +240,8 @@ def test_basic_authn_type_is_bound(self):
class BatchSubrequestTest(BaseWebTest, unittest.TestCase):
def setUp(self):
- super(BatchSubrequestTest, self).setUp()
- headers = self.headers.copy()
- headers['User-Agent'] = 'readinglist'
+ super().setUp()
+ headers = {**self.headers, 'User-Agent': 'readinglist'}
body = {
'requests': [{
'path': '/unknown',
@@ -290,13 +281,13 @@ def test_subrequests_are_logged_as_subrequest_summary(self):
class ResourceInfoTest(BaseWebTest, unittest.TestCase):
def setUp(self):
- super(ResourceInfoTest, self).setUp()
+ super().setUp()
config = testing.setUp()
config.registry.settings = DEFAULT_SETTINGS
initialization.setup_logging(config)
def tearDown(self):
- super(ResourceInfoTest, self).tearDown()
+ super().tearDown()
core_logs.structlog.reset_defaults()
def test_collection_id_is_bound(self):
diff --git a/tests/core/test_permission.py b/tests/core/test_permission.py
index 254c184f7..1b1f9a2ce 100644
--- a/tests/core/test_permission.py
+++ b/tests/core/test_permission.py
@@ -56,7 +56,7 @@ class PostgreSQLPermissionTest(PermissionTest, unittest.TestCase):
}
def setUp(self):
- super(PostgreSQLPermissionTest, self).setUp()
+ super().setUp()
self.client_error_patcher = [mock.patch.object(
self.permission.client,
'session_factory',
diff --git a/tests/core/test_statsd.py b/tests/core/test_statsd.py
index a96d045da..c5c3640c1 100644
--- a/tests/core/test_statsd.py
+++ b/tests/core/test_statsd.py
@@ -9,7 +9,7 @@
from .support import BaseWebTest
-class TestedClass(object):
+class TestedClass:
attribute = 3.14
def test_method(self):
@@ -80,8 +80,7 @@ def test_load_from_config(self, module_mock):
@mock.patch('kinto.core.statsd.statsd_module')
def test_load_from_config_uses_project_name_if_defined(self, module_mock):
config = testing.setUp()
- config.registry.settings = self.settings.copy()
- config.registry.settings['project_name'] = 'projectname'
+ config.registry.settings = {**self.settings, 'project_name': 'projectname'}
statsd.load_from_config(config)
module_mock.StatsClient.assert_called_with('foo', 1234,
prefix='projectname')
@@ -101,7 +100,7 @@ def test_statsd_count_call_the_client_if_configured(self):
@skip_if_no_statsd
class TimingTest(BaseWebTest, unittest.TestCase):
def get_app_settings(self, *args, **kwargs):
- settings = super(TimingTest, self).get_app_settings(*args, **kwargs)
+ settings = super().get_app_settings(*args, **kwargs)
if not statsd.statsd_module:
return settings
diff --git a/tests/core/test_storage.py b/tests/core/test_storage.py
index 9e68b928a..4b1a557cf 100644
--- a/tests/core/test_storage.py
+++ b/tests/core/test_storage.py
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
import mock
from kinto.core.utils import sqlalchemy
@@ -78,7 +76,7 @@ class MemoryStorageTest(StorageTest, unittest.TestCase):
backend = memory
def setUp(self):
- super(MemoryStorageTest, self).setUp()
+ super().setUp()
self.client_error_patcher = mock.patch.object(
self.storage,
'_bump_timestamp',
@@ -111,7 +109,7 @@ class PostgreSQLStorageTest(StorageTest, unittest.TestCase):
}
def setUp(self):
- super(PostgreSQLStorageTest, self).setUp()
+ super().setUp()
self.client_error_patcher = mock.patch.object(
self.storage.client,
'session_factory',
@@ -119,13 +117,12 @@ def setUp(self):
def test_number_of_fetched_records_can_be_limited_in_settings(self):
for i in range(4):
- self.create_record({'phone': 'tel-%s' % i})
+ self.create_record({'phone': 'tel-{}'.format(i)})
results, count = self.storage.get_all(**self.storage_kw)
self.assertEqual(len(results), 4)
- settings = self.settings.copy()
- settings['storage_max_fetch_size'] = 2
+ settings = {**self.settings, 'storage_max_fetch_size': 2}
config = self._get_config(settings=settings)
limited = self.backend.load_from_config(config)
@@ -164,8 +161,7 @@ def test_pool_object_is_shared_among_backend_instances(self):
def test_warns_if_configured_pool_size_differs_for_same_backend_type(self):
self.backend.load_from_config(self._get_config())
- settings = self.settings.copy()
- settings['storage_pool_size'] = 1
+ settings = {**self.settings, 'storage_pool_size': 1}
msg = ('Reuse existing PostgreSQL connection. Parameters storage_* '
'will be ignored.')
with mock.patch('kinto.core.storage.postgresql.client.'
diff --git a/tests/core/test_storage_migrations.py b/tests/core/test_storage_migrations.py
index 75a987d4b..142e85f94 100644
--- a/tests/core/test_storage_migrations.py
+++ b/tests/core/test_storage_migrations.py
@@ -2,7 +2,6 @@
import unittest
import mock
-import six
from pyramid import testing
from kinto.core.cache import postgresql as postgresql_cache
@@ -15,13 +14,13 @@
@skip_if_no_postgresql
class PostgresqlStorageMigrationTest(unittest.TestCase):
def __init__(self, *args, **kwargs):
- super(PostgresqlStorageMigrationTest, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
from kinto.core.utils import sqlalchemy
if sqlalchemy is None:
return
from .test_storage import PostgreSQLStorageTest
- self.settings = PostgreSQLStorageTest.settings.copy()
+ self.settings = {**PostgreSQLStorageTest.settings}
self.config = testing.setUp()
self.config.add_settings(self.settings)
self.version = postgresql_storage.Storage.schema_version
@@ -125,7 +124,7 @@ def test_every_available_migration(self):
data=json.dumps(before))
result = conn.execute(query, placeholders)
inserted = result.fetchone()
- before['id'] = six.text_type(inserted['id'])
+ before['id'] = str(inserted['id'])
before['last_modified'] = inserted['last_modified']
# In cliquet 1.6, version = 1.
@@ -207,13 +206,13 @@ def test_migration_12_clean_tombstones(self):
@skip_if_no_postgresql
class PostgresqlPermissionMigrationTest(unittest.TestCase):
def __init__(self, *args, **kw):
- super(PostgresqlPermissionMigrationTest, self).__init__(*args, **kw)
+ super().__init__(*args, **kw)
from kinto.core.utils import sqlalchemy
if sqlalchemy is None:
return
from .test_permission import PostgreSQLPermissionTest
- settings = PostgreSQLPermissionTest.settings.copy()
+ settings = {**PostgreSQLPermissionTest.settings}
config = testing.setUp()
config.add_settings(settings)
self.permission = postgresql_permission.load_from_config(config)
@@ -246,13 +245,13 @@ def test_does_not_execute_if_ran_with_dry(self):
@skip_if_no_postgresql
class PostgresqlCacheMigrationTest(unittest.TestCase):
def __init__(self, *args, **kw):
- super(PostgresqlCacheMigrationTest, self).__init__(*args, **kw)
+ super().__init__(*args, **kw)
from kinto.core.utils import sqlalchemy
if sqlalchemy is None:
return
from .test_cache import PostgreSQLCacheTest
- settings = PostgreSQLCacheTest.settings.copy()
+ settings = {**PostgreSQLCacheTest.settings}
config = testing.setUp()
config.add_settings(settings)
self.cache = postgresql_cache.load_from_config(config)
diff --git a/tests/core/test_utils.py b/tests/core/test_utils.py
index 4df35db29..9ed8da113 100644
--- a/tests/core/test_utils.py
+++ b/tests/core/test_utils.py
@@ -1,12 +1,9 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
import unittest
import os
import pytest
import colander
import mock
-import six
from kinto.core import includeme
from kinto.core import DEFAULT_SETTINGS
from pyramid import httpexceptions
@@ -15,8 +12,8 @@
from kinto.core.utils import (
native_value, strip_whitespace, random_bytes_hex, read_env, hmac_digest,
- current_service, encode_header, decode_header, follow_subrequest,
- build_request, dict_subset, dict_merge, parse_resource
+ current_service, follow_subrequest, build_request, dict_subset, dict_merge,
+ parse_resource
)
from kinto.core.testing import DummyRequest
@@ -81,7 +78,7 @@ def test_return_hex_string(self):
try:
int(value, 16)
except ValueError:
- self.fail("%s is not an hexadecimal value." % value)
+ self.fail("{} is not an hexadecimal value.".format(value))
def test_return_right_length_string(self):
for x in range(2, 4):
@@ -90,7 +87,7 @@ def test_return_right_length_string(self):
def test_return_text_string(self):
value = random_bytes_hex(16)
- self.assertIsInstance(value, six.text_type)
+ self.assertIsInstance(value, str)
class HmacDigestTest(unittest.TestCase):
@@ -145,53 +142,6 @@ def test_built_request_has_kinto_core_custom_methods(self):
self.assertTrue(hasattr(request, 'current_service'))
-class EncodeHeaderTest(unittest.TestCase):
-
- def test_returns_a_string_if_passed_a_string(self):
- entry = str('Toto')
- value = encode_header(entry)
- self.assertEqual(entry, value)
- self.assertEqual(type(value), str)
-
- def test_returns_a_string_if_passed_bytes(self):
- entry = 'Toto'.encode('utf-8')
- value = encode_header(entry)
- self.assertEqual(type(value), str)
-
- def test_returns_a_string_if_passed_bytes_and_encoding(self):
- entry = 'Rémy'.encode('latin-1')
- value = encode_header(entry, 'latin-1')
- self.assertEqual(type(value), str)
-
- def test_returns_a_string_if_passed_unicode(self):
- entry = six.text_type('Rémy')
- value = encode_header(entry)
- self.assertEqual(type(value), str)
-
- def test_returns_a_string_if_passed_unicode_with_encoding(self):
- entry = six.text_type('Rémy')
- value = encode_header(entry, 'latin-1')
- self.assertEqual(type(value), str)
-
-
-class DecodeHeaderTest(unittest.TestCase):
-
- def test_returns_an_unicode_string_if_passed_a_string(self):
- entry = 'Toto'
- value = decode_header(entry)
- self.assertEqual(entry, value)
-
- def test_returns_an_unicode__string_if_passed_bytes(self):
- entry = 'Toto'.encode('utf-8')
- value = decode_header(entry)
- self.assertEqual(type(value), six.text_type)
-
- def test_returns_an_unicode__string_if_passed_bytes_and_encoding(self):
- entry = 'Rémy'.encode('latin-1')
- value = decode_header(entry, 'latin-1')
- self.assertEqual(type(value), six.text_type)
-
-
class FollowSubrequestTest(unittest.TestCase):
def test_parent_and_bound_data_are_preserved(self):
diff --git a/tests/core/test_views_batch.py b/tests/core/test_views_batch.py
index fa043ad03..f4117713e 100644
--- a/tests/core/test_views_batch.py
+++ b/tests/core/test_views_batch.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
import colander
import mock
import uuid
@@ -128,7 +127,7 @@ def test_redirect_responses_are_followed(self):
def test_body_is_transmitted_during_redirect(self):
request = {
'method': 'PUT',
- 'path': '/mushrooms/%s/' % str(uuid.uuid4()),
+ 'path': '/mushrooms/{}/'.format(str(uuid.uuid4())),
'body': {'data': {'name': 'Trompette de la mort'}}
}
body = {'requests': [request]}
@@ -139,11 +138,10 @@ def test_body_is_transmitted_during_redirect(self):
self.assertEqual(record['name'], 'Trompette de la mort')
def test_400_error_message_is_forwarded(self):
- headers = self.headers.copy()
- headers['If-Match'] = '"*"'
+ headers = {**self.headers, 'If-Match': '"*"'}
request = {
'method': 'PUT',
- 'path': '/mushrooms/%s' % str(uuid.uuid4()),
+ 'path': '/mushrooms/{}'.format(str(uuid.uuid4())),
'body': {'data': {'name': 'Trompette de la mort'}},
'headers': headers
}
@@ -154,11 +152,10 @@ def test_400_error_message_is_forwarded(self):
self.assertEqual(resp.json['responses'][1]['body']['message'], msg)
def test_412_errors_are_forwarded(self):
- headers = self.headers.copy()
- headers['If-None-Match'] = '*'
+ headers = {**self.headers, 'If-None-Match': '*'}
request = {
'method': 'PUT',
- 'path': '/mushrooms/%s' % str(uuid.uuid4()),
+ 'path': '/mushrooms/{}'.format(str(uuid.uuid4())),
'body': {'data': {'name': 'Trompette de la mort'}},
'headers': headers
}
@@ -360,31 +357,31 @@ def test_subrequests_body_have_json_content_type(self):
subrequest.headers['Content-Type'])
def test_subrequests_body_have_utf8_charset(self):
- request = {'path': '/', 'body': {'json': u"😂"}}
+ request = {'path': '/', 'body': {'json': "😂"}}
self.post({'requests': [request]})
subrequest, = self.request.invoke_subrequest.call_args[0]
self.assertIn('charset=utf-8', subrequest.headers['Content-Type'])
- wanted = {"json": u"😂"}
+ wanted = {"json": "😂"}
self.assertEqual(subrequest.body.decode('utf8'),
json.dumps(wanted))
def test_subrequests_paths_are_url_encoded(self):
- request = {'path': u'/test?param=©'}
+ request = {'path': '/test?param=©'}
self.post({'requests': [request]})
subrequest, = self.request.invoke_subrequest.call_args[0]
- self.assertEqual(subrequest.path, u'/v0/test')
- self.assertEqual(subrequest.GET['param'], u'©')
+ self.assertEqual(subrequest.path, '/v0/test')
+ self.assertEqual(subrequest.GET['param'], '©')
def test_subrequests_responses_paths_are_url_decoded(self):
- request = {'path': u'/test?param=©'}
+ request = {'path': '/test?param=©'}
resp = self.post({'requests': [request]})
path = resp['responses'][0]['path']
- self.assertEqual(path, u'/v0/test')
+ self.assertEqual(path, '/v0/test')
def test_response_body_is_string_if_remote_response_is_not_json(self):
response = Response(body='Internal Error')
self.request.invoke_subrequest.return_value = response
- request = {'path': u'/test'}
+ request = {'path': '/test'}
resp = self.post({'requests': [request]})
body = resp['responses'][0]['body'].decode('utf-8')
self.assertEqual(body, 'Internal Error')
diff --git a/tests/core/test_views_errors.py b/tests/core/test_views_errors.py
index 3dc8924fb..4f4f93b77 100644
--- a/tests/core/test_views_errors.py
+++ b/tests/core/test_views_errors.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
import unittest
import mock
@@ -26,8 +25,7 @@ def test_backoff_headers_is_present_if_configured(self):
def test_backoff_headers_is_present_on_304(self):
first = self.app.get(self.sample_url, headers=self.headers)
etag = first.headers['ETag']
- headers = self.headers.copy()
- headers['If-None-Match'] = etag
+ headers = {**self.headers, 'If-None-Match': etag}
with mock.patch.dict(self.app.app.registry.settings, [('backoff', 10)]):
response = self.app.get(self.sample_url, headers=headers, status=304)
self.assertIn('Backoff', response.headers)
diff --git a/tests/core/test_views_hello.py b/tests/core/test_views_hello.py
index a4a75ce93..8c635c0bd 100644
--- a/tests/core/test_views_hello.py
+++ b/tests/core/test_views_hello.py
@@ -52,7 +52,7 @@ def test_if_user_authenticated_userid_is_provided(self):
response = self.app.get('/', headers=self.headers)
userid = response.json['user']['id']
self.assertTrue(userid.startswith('basicauth:'),
- '"%s" does not start with "basicauth:"' % userid)
+ '"{}" does not start with "basicauth:"'.format(userid))
def test_return_http_api_version_when_set(self):
with mock.patch.dict(
diff --git a/tests/core/test_views_transaction.py b/tests/core/test_views_transaction.py
index 31553bafd..73bea8ede 100644
--- a/tests/core/test_views_transaction.py
+++ b/tests/core/test_views_transaction.py
@@ -14,7 +14,7 @@
class PostgreSQLTest(BaseWebTest):
def get_app_settings(self, extras=None):
- settings = super(PostgreSQLTest, self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
if sqlalchemy is not None:
from .test_storage import PostgreSQLStorageTest
from .test_cache import PostgreSQLCacheTest
@@ -227,7 +227,7 @@ def store_record(event):
class WithoutTransactionTest(PostgreSQLTest, unittest.TestCase):
def get_app_settings(self, extras=None):
- settings = super(WithoutTransactionTest, self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['transaction_per_request'] = False
return settings
diff --git a/tests/plugins/test_admin.py b/tests/plugins/test_admin.py
index 774142b3b..39d470473 100644
--- a/tests/plugins/test_admin.py
+++ b/tests/plugins/test_admin.py
@@ -5,7 +5,7 @@
class AdminViewTest(BaseWebTest, unittest.TestCase):
def get_app_settings(self, extras=None):
- settings = super(AdminViewTest, self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['includes'] = 'kinto.plugins.admin'
return settings
diff --git a/tests/plugins/test_default_bucket.py b/tests/plugins/test_default_bucket.py
index cecfcee98..dd45182ea 100644
--- a/tests/plugins/test_default_bucket.py
+++ b/tests/plugins/test_default_bucket.py
@@ -1,6 +1,5 @@
import mock
import unittest
-from six import text_type
from uuid import UUID
from pyramid.httpexceptions import HTTPBadRequest
@@ -16,7 +15,7 @@
class DefaultBucketWebTest(BaseWebTest, unittest.TestCase):
def get_app_settings(self, extras=None):
- settings = super(DefaultBucketWebTest, self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['includes'] = 'kinto.plugins.default_bucket'
return settings
@@ -33,7 +32,7 @@ def test_default_bucket_exists_and_has_user_id(self):
hmac_secret = settings['userid_hmac_secret']
bucket_id = hmac_digest(hmac_secret, self.principal)[:32]
- self.assertEqual(result['data']['id'], text_type(UUID(bucket_id)))
+ self.assertEqual(result['data']['id'], str(UUID(bucket_id)))
self.assertEqual(result['permissions']['write'], [self.principal])
def test_default_bucket_can_still_be_explicitly_created(self):
@@ -46,10 +45,10 @@ def test_default_bucket_collections_are_automatically_created(self):
self.app.get(self.collection_url, headers=self.headers, status=200)
def test_adding_a_task_for_bob_doesnt_add_it_for_alice(self):
- record = MINIMALIST_RECORD.copy()
+ record = {**MINIMALIST_RECORD}
resp = self.app.post_json(self.collection_url + '/records',
record, headers=get_user_headers('bob'))
- record_id = self.collection_url + '/records/' + resp.json['data']['id']
+ record_id = '{}/records/{}'.format(self.collection_url, resp.json['data']['id'])
resp = self.app.get(record_id, headers=get_user_headers('alice'),
status=404)
@@ -65,7 +64,7 @@ def test_bucket_id_is_an_uuid_with_dashes(self):
try:
UUID(bucket_id)
except ValueError:
- self.fail('bucket_id: %s is not a valid UUID.' % bucket_id)
+ self.fail('bucket_id: {} is not a valid UUID.'.format(bucket_id))
def test_second_call_on_default_bucket_doesnt_raise_a_412(self):
self.app.get(self.bucket_url, headers=self.headers)
@@ -93,11 +92,8 @@ def test_cors_headers_are_provided_on_errors(self):
MINIMALIST_RECORD,
headers=self.headers)
current = resp.json['data']['last_modified']
- headers = self.headers.copy()
- headers.update({
- 'Origin': 'http://localhost:8000',
- 'If-None-Match': ('"%s"' % current).encode('utf-8')
- })
+ headers = {**self.headers, 'Origin': 'http://localhost:8000',
+ 'If-None-Match': ('"{}"'.format(current)).encode('utf-8')}
resp = self.app.get(self.collection_url + '/records',
headers=headers, status=304)
self.assertIn('Access-Control-Allow-Origin', resp.headers)
@@ -108,11 +104,8 @@ def test_etag_is_present_and_exposed_in_304_error(self):
MINIMALIST_RECORD,
headers=self.headers)
current = resp.json['data']['last_modified']
- headers = self.headers.copy()
- headers.update({
- 'Origin': 'http://localhost:8000',
- 'If-None-Match': ('"%s"' % current).encode('utf-8')
- })
+ headers = {**self.headers, 'Origin': 'http://localhost:8000',
+ 'If-None-Match': ('"{}"'.format(current)).encode('utf-8')}
resp = self.app.get(self.collection_url + '/records',
headers=headers, status=304)
self.assertIn('Access-Control-Expose-Headers', resp.headers)
@@ -272,15 +265,13 @@ def listener(event):
class EventsTest(DefaultBucketWebTest):
def tearDown(self):
- super(EventsTest, self).tearDown()
+ super().tearDown()
del _events[:]
def get_app_settings(self, extras=None):
- settings = super(EventsTest, self).get_app_settings(extras)
- settings = settings.copy()
- settings['event_listeners'] = 'testevent',
- settings['event_listeners.testevent.use'] = (
- 'tests.plugins.test_default_bucket')
+ settings = super().get_app_settings(extras)
+ settings = {**settings, 'event_listeners': 'testevent',
+ 'event_listeners.testevent.use': 'tests.plugins.test_default_bucket'}
return settings
def test_an_event_is_sent_on_implicit_bucket_creation(self):
@@ -312,7 +303,7 @@ def test_events_sent_on_bucket_and_collection_creation(self):
assert 'subpath' not in _events[0].payload
assert _events[0].payload['action'] == 'create'
assert _events[0].payload['bucket_id'] == bucket_id
- assert _events[0].payload['uri'] == '/buckets/%s' % bucket_id
+ assert _events[0].payload['uri'] == '/buckets/{}'.format(bucket_id)
# Implicit creation of collection
assert 'subpath' not in _events[1].payload
@@ -320,8 +311,7 @@ def test_events_sent_on_bucket_and_collection_creation(self):
assert _events[1].payload['resource_name'] == 'collection'
assert _events[1].payload['bucket_id'] == bucket_id
assert _events[1].payload['collection_id'] == 'articles'
- assert _events[1].payload['uri'] == ('/buckets/%s/collections'
- '/articles') % bucket_id
+ assert _events[1].payload['uri'] == '/buckets/{}/collections/articles'.format(bucket_id)
# Creation of record
assert _events[2].payload['action'] == 'create'
@@ -335,7 +325,7 @@ def test_events_sent_on_bucket_and_collection_creation(self):
class ReadonlyDefaultBucket(DefaultBucketWebTest):
def get_app_settings(self, extras=None):
- settings = super(ReadonlyDefaultBucket, self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['readonly'] = True
return settings
@@ -345,7 +335,7 @@ def test_implicit_creation_is_rejected(self):
class BackendErrorTest(DefaultBucketWebTest):
def setUp(self):
- super(BackendErrorTest, self).setUp()
+ super().setUp()
self.patcher = mock.patch.object(
self.storage, 'create',
side_effect=storage_exceptions.BackendError())
diff --git a/tests/plugins/test_history.py b/tests/plugins/test_history.py
index a01cd384d..17aad57c6 100644
--- a/tests/plugins/test_history.py
+++ b/tests/plugins/test_history.py
@@ -1,3 +1,4 @@
+import json
import re
import unittest
import mock
@@ -27,7 +28,7 @@ def test_a_statsd_timer_is_used_for_history_if_configured(self):
class HistoryWebTest(support.BaseWebTest, unittest.TestCase):
def get_app_settings(self, extras=None):
- settings = super(HistoryWebTest, self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['includes'] = 'kinto.plugins.history'
return settings
@@ -69,7 +70,7 @@ def test_only_get_and_delete_on_collection_are_allowed(self):
def test_only_collection_endpoint_is_available(self):
resp = self.app.get(self.history_uri, headers=self.headers)
entry = resp.json['data'][0]
- url = '%s/%s' % (self.bucket_uri, entry['id'])
+ url = '{}/{}'.format(self.bucket_uri, entry['id'])
self.app.get(url, headers=self.headers, status=404)
self.app.put(url, headers=self.headers, status=404)
self.app.patch(url, headers=self.headers, status=404)
@@ -159,7 +160,7 @@ def test_tracks_collection_creation(self):
assert 'bucket_id' not in entry
assert entry['collection_id'] == cid
assert entry['action'] == 'create'
- assert entry['uri'] == '/buckets/test/collections/%s' % cid
+ assert entry['uri'] == '/buckets/test/collections/{}'.format(cid)
def test_tracks_collection_attributes_update(self):
body = {'data': {'foo': 'baz'}}
@@ -208,7 +209,7 @@ def test_tracks_group_creation(self):
assert 'bucket_id' not in entry
assert entry['group_id'] == self.group['id']
assert entry['action'] == 'create'
- assert entry['uri'] == '/buckets/test/groups/%s' % self.group['id']
+ assert entry['uri'] == '/buckets/test/groups/{}'.format(self.group['id'])
def test_tracks_group_attributes_update(self):
body = {'data': {'foo': 'baz', 'members': ['lui']}}
@@ -262,9 +263,9 @@ def test_tracks_record_creation(self):
assert entry['collection_id'] == cid
assert entry['record_id'] == rid
assert entry['action'] == 'create'
- assert entry['uri'] == '/buckets/test/collections/%s/records/%s' % (cid, rid) # NOQA
+ assert entry['uri'] == '/buckets/test/collections/{}/records/{}'.format(cid, rid)
assert entry['target']['data']['foo'] == 42
- assert entry['target']['permissions']['write'][0].startswith('basicauth:') # NOQA
+ assert entry['target']['permissions']['write'][0].startswith('basicauth:')
def test_tracks_record_attributes_update(self):
resp = self.app.patch_json(self.record_uri, {'data': {'foo': 'baz'}},
@@ -333,11 +334,11 @@ def test_full_deletion(self):
def test_partial_deletion(self):
resp = self.app.get('/buckets/bid/history', headers=self.headers)
- before = resp.headers['ETag']
+ before = int(json.loads(resp.headers['ETag']))
self.app.put('/buckets/bid/collections/cid2', headers=self.headers)
# Delete everything before the last entry (exclusive)
- self.app.delete('/buckets/bid/history?_before=%s' % before,
+ self.app.delete('/buckets/bid/history?_before={}'.format(before),
headers=self.headers)
resp = self.app.get('/buckets/bid/history', headers=self.headers)
@@ -385,14 +386,14 @@ def test_filter_by_resource(self):
def test_filter_by_uri(self):
uri = '/buckets/bid/collections/cid/records/rid'
- resp = self.app.get('/buckets/bid/history?uri=%s' % uri,
+ resp = self.app.get('/buckets/bid/history?uri={}'.format(uri),
headers=self.headers)
assert len(resp.json['data']) == 3 # create / update / delete
def test_allows_diff_between_two_versions_of_a_record(self):
uri = '/buckets/bid/collections/cid/records/rid'
- querystring = '?uri=%s&_limit=2&_sort=last_modified' % uri
- resp = self.app.get('/buckets/bid/history' + querystring,
+ querystring = '?uri={}&_limit=2&_sort=last_modified'.format(uri)
+ resp = self.app.get('/buckets/bid/history{}'.format(querystring),
headers=self.headers)
entries = resp.json['data']
version1 = entries[0]['target']['data']
@@ -513,7 +514,7 @@ def test_multiple_patch(self):
# Kinto/kinto#942
requests = [{
'method': 'PATCH',
- 'path': '/buckets/bid/collections/cid/records/%s' % l,
+ 'path': '/buckets/bid/collections/cid/records/{}'.format(l),
'body': {'data': {'label': l}}} for l in ('a', 'b', 'c')]
self.app.post_json('/batch', {'requests': requests}, headers=self.headers)
resp = self.app.get('/buckets/bid/history', headers=self.headers)
@@ -527,7 +528,7 @@ def test_multiple_patch(self):
class DefaultBucketTest(HistoryWebTest):
def get_app_settings(self, extras=None):
- settings = super(HistoryWebTest, self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['includes'] = ('kinto.plugins.default_bucket '
'kinto.plugins.history')
return settings
@@ -535,7 +536,7 @@ def get_app_settings(self, extras=None):
def setUp(self):
resp = self.app.get('/', headers=self.headers)
self.bucket_id = resp.json['user']['bucket']
- self.history_uri = '/buckets/%s/history' % self.bucket_id
+ self.history_uri = '/buckets/{}/history'.format(self.bucket_id)
def test_history_can_be_accessed_via_default_alias(self):
self.app.get('/buckets/default/collections/blah',
@@ -554,33 +555,33 @@ def test_implicit_creations_are_listed(self):
entries = resp.json['data']
assert len(entries) == 3
- bucket_uri = '/buckets/%s' % self.bucket_id
+ bucket_uri = '/buckets/{}'.format(self.bucket_id)
assert entries[2]['resource_name'] == 'bucket'
assert entries[2]['bucket_id'] == self.bucket_id
assert entries[2]['uri'] == bucket_uri
- assert entries[2]['target']['permissions']['write'][0] == self.principal # NOQA
+ assert entries[2]['target']['permissions']['write'][0] == self.principal
collection_uri = bucket_uri + '/collections/blah'
assert entries[1]['resource_name'] == 'collection'
assert 'bucket_id' not in entries[1]
assert entries[1]['collection_id'] == 'blah'
assert entries[1]['uri'] == collection_uri
- assert entries[1]['target']['permissions']['write'][0] == self.principal # NOQA
+ assert entries[1]['target']['permissions']['write'][0] == self.principal
- record_uri = collection_uri + '/records/%s' % record['id']
+ record_uri = collection_uri + '/records/{}'.format(record['id'])
assert entries[0]['resource_name'] == 'record'
assert 'bucket_id' not in entries[1]
assert entries[0]['collection_id'] == 'blah'
assert entries[0]['record_id'] == record['id']
assert entries[0]['uri'] == record_uri
assert entries[0]['target']['data']['foo'] == 42
- assert entries[0]['target']['permissions']['write'][0] == self.principal # NOQA
+ assert entries[0]['target']['permissions']['write'][0] == self.principal
class PermissionsTest(HistoryWebTest):
def get_app_settings(self, extras=None):
- settings = super(PermissionsTest, self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['experimental_permissions_endpoint'] = 'true'
return settings
@@ -714,7 +715,7 @@ def test_history_entries_are_not_listed_in_permissions_endpoint(self):
class ExcludeResourcesTest(HistoryWebTest):
def get_app_settings(self, extras=None):
- settings = super(ExcludeResourcesTest, self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['history.exclude_resources'] = ('/buckets/a '
'/buckets/b/collections/a '
'/buckets/b/groups/a')
diff --git a/tests/plugins/test_quotas.py b/tests/plugins/test_quotas.py
index 25cd1ac53..4680a0fa0 100644
--- a/tests/plugins/test_quotas.py
+++ b/tests/plugins/test_quotas.py
@@ -61,7 +61,7 @@ def create_record(self):
self.record = resp.json['data']
def get_app_settings(self, extras=None):
- settings = super(QuotaWebTest, self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
# Setup the postgresql backend for transaction support.
settings['storage_backend'] = 'kinto.core.storage.postgresql'
@@ -241,13 +241,13 @@ def test_tracks_collection_delete_with_multiple_records(self):
self.create_bucket()
self.create_collection()
body = {'data': {'foo': 42}}
- self.app.post_json('%s/records' % self.collection_uri,
+ self.app.post_json('{}/records'.format(self.collection_uri),
body, headers=self.headers)
- self.app.post_json('%s/records' % self.collection_uri,
+ self.app.post_json('{}/records'.format(self.collection_uri),
body, headers=self.headers)
- self.app.post_json('%s/records' % self.collection_uri,
+ self.app.post_json('{}/records'.format(self.collection_uri),
body, headers=self.headers)
- self.app.post_json('%s/records' % self.collection_uri,
+ self.app.post_json('{}/records'.format(self.collection_uri),
body, headers=self.headers)
self.app.delete(self.collection_uri, headers=self.headers)
data = self.storage.get(collection_id=QUOTA_RESOURCE_NAME,
@@ -364,15 +364,15 @@ def test_tracks_records_delete_with_multiple_records(self):
self.create_bucket()
self.create_collection()
body = {'data': {'foo': 42}}
- self.app.post_json('%s/records' % self.collection_uri,
+ self.app.post_json('{}/records'.format(self.collection_uri),
body, headers=self.headers)
- self.app.post_json('%s/records' % self.collection_uri,
+ self.app.post_json('{}/records'.format(self.collection_uri),
body, headers=self.headers)
- self.app.post_json('%s/records' % self.collection_uri,
+ self.app.post_json('{}/records'.format(self.collection_uri),
body, headers=self.headers)
- self.app.post_json('%s/records' % self.collection_uri,
+ self.app.post_json('{}/records'.format(self.collection_uri),
body, headers=self.headers)
- self.app.delete('%s/records' % self.collection_uri,
+ self.app.delete('{}/records'.format(self.collection_uri),
headers=self.headers)
storage_size = record_size(self.bucket) + record_size(self.collection)
data = self.storage.get(collection_id=QUOTA_RESOURCE_NAME,
@@ -388,7 +388,7 @@ def test_bulk_create(self):
body = {
'defaults': {
'method': 'POST',
- 'path': '%s/records' % self.collection_uri,
+ 'path': '{}/records'.format(self.collection_uri),
},
'requests': [{
'path': self.bucket_uri,
@@ -418,7 +418,7 @@ def test_bulk_update(self):
body = {
'defaults': {
'method': 'POST',
- 'path': '%s/collections' % self.bucket_uri,
+ 'path': '{}/collections'.format(self.bucket_uri),
},
'requests': [{
'path': self.bucket_uri,
@@ -445,13 +445,13 @@ def test_bulk_update(self):
'method': 'PUT',
},
'requests': [{
- 'path': '%s/collections/a' % self.bucket_uri,
+ 'path': '{}/collections/a'.format(self.bucket_uri),
'body': {'data': {'attr': 100}},
}, {
- 'path': '%s/collections/b' % self.bucket_uri,
+ 'path': '{}/collections/b'.format(self.bucket_uri),
'body': {'data': {'attr': 2000}},
}, {
- 'path': '%s/collections/c' % self.bucket_uri,
+ 'path': '{}/collections/c'.format(self.bucket_uri),
'body': {'data': {'attr': 30000}}
}]
}
@@ -473,7 +473,7 @@ def test_bulk_delete(self):
body = {
'defaults': {
'method': 'POST',
- 'path': '%s/collections' % self.bucket_uri,
+ 'path': '{}/collections'.format(self.bucket_uri),
},
'requests': [{
'body': {'data': {'id': 'a', 'attr': 1}},
@@ -490,11 +490,11 @@ def test_bulk_delete(self):
'method': 'DELETE',
},
'requests': [{
- 'path': '%s/collections/a' % self.bucket_uri
+ 'path': '{}/collections/a'.format(self.bucket_uri)
}, {
- 'path': '%s/collections/b' % self.bucket_uri
+ 'path': '{}/collections/b'.format(self.bucket_uri)
}, {
- 'path': '%s/collections/c' % self.bucket_uri
+ 'path': '{}/collections/c'.format(self.bucket_uri)
}, {
'path': self.collection_uri
}]
@@ -512,17 +512,17 @@ def test_bulk_delete(self):
with pytest.raises(RecordNotFoundError):
self.storage.get(collection_id=QUOTA_RESOURCE_NAME,
- parent_id='%s/collections/a' % self.bucket_uri,
+ parent_id='{}/collections/a'.format(self.bucket_uri),
object_id=QUOTA_COLLECTION_ID)
with pytest.raises(RecordNotFoundError):
self.storage.get(collection_id=QUOTA_RESOURCE_NAME,
- parent_id='%s/collections/b' % self.bucket_uri,
+ parent_id='{}/collections/b'.format(self.bucket_uri),
object_id=QUOTA_COLLECTION_ID)
with pytest.raises(RecordNotFoundError):
self.storage.get(collection_id=QUOTA_RESOURCE_NAME,
- parent_id='%s/collections/c' % self.bucket_uri,
+ parent_id='{}/collections/c'.format(self.bucket_uri),
object_id=QUOTA_COLLECTION_ID)
with pytest.raises(RecordNotFoundError):
@@ -531,13 +531,13 @@ def test_bulk_delete(self):
object_id=QUOTA_COLLECTION_ID)
-class QuotaBucketRecordMixin(object):
+class QuotaBucketRecordMixin:
def test_507_is_raised_if_quota_exceeded_on_record_creation(self):
self.create_bucket()
self.create_collection()
self.create_record()
body = {'data': {'foo': 42}}
- resp = self.app.post_json('%s/records' % self.collection_uri,
+ resp = self.app.post_json('{}/records'.format(self.collection_uri),
body, headers=self.headers, status=507)
# Check that the storage was not updated.
@@ -559,12 +559,12 @@ def test_507_is_raised_if_quota_exceeded_on_record_creation(self):
})
# Check that the record wasn't created
- resp = self.app.get('%s/records' % self.collection_uri,
+ resp = self.app.get('{}/records'.format(self.collection_uri),
headers=self.headers)
assert len(resp.json['data']) == 1
-class QuotaBucketUpdateMixin(object):
+class QuotaBucketUpdateMixin:
def test_507_is_raised_if_quota_exceeded_on_record_update(self):
self.create_bucket()
self.create_collection()
@@ -720,13 +720,13 @@ def test_507_is_raised_if_quota_exceeded_on_group_delete(self):
})
-class QuotaBucketMixin(object):
+class QuotaBucketMixin:
def test_507_is_raised_if_quota_exceeded_on_collection_creation(self):
self.create_bucket()
self.create_collection()
self.create_record()
body = {'data': {'foo': 42}}
- resp = self.app.post_json('%s/collections' % self.bucket_uri,
+ resp = self.app.post_json('{}/collections'.format(self.bucket_uri),
body, headers=self.headers, status=507)
storage_size = record_size(self.bucket)
@@ -747,7 +747,7 @@ def test_507_is_raised_if_quota_exceeded_on_collection_creation(self):
})
# Check that the collection wasn't created
- resp = self.app.get('%s/collections' % self.bucket_uri,
+ resp = self.app.get('{}/collections'.format(self.bucket_uri),
headers=self.headers)
assert len(resp.json['data']) == 1
@@ -777,7 +777,7 @@ def test_507_is_raised_if_quota_exceeded_on_group_creation(self):
})
# Check that the group wasn't created
- resp = self.app.get('%s/groups' % self.bucket_uri,
+ resp = self.app.get('{}/groups'.format(self.bucket_uri),
headers=self.headers)
assert len(resp.json['data']) == 0
@@ -789,8 +789,7 @@ class QuotaMaxBytesExceededSettingsListenerTest(
error_message = "Bucket maximum total size exceeded "
def get_app_settings(self, extras=None):
- settings = super(QuotaMaxBytesExceededSettingsListenerTest,
- self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['quotas.bucket_max_bytes'] = '150'
return settings
@@ -802,8 +801,7 @@ class QuotaMaxBytesExceededBucketSettingsListenerTest(
error_message = "Bucket maximum total size exceeded "
def get_app_settings(self, extras=None):
- settings = super(QuotaMaxBytesExceededBucketSettingsListenerTest,
- self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['quotas.bucket_test_max_bytes'] = '150'
return settings
@@ -814,8 +812,7 @@ class QuotaMaxItemsExceededSettingsListenerTest(
error_message = "Bucket maximum number of objects exceeded "
def get_app_settings(self, extras=None):
- settings = super(QuotaMaxItemsExceededSettingsListenerTest,
- self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['quotas.bucket_max_items'] = '1'
return settings
@@ -826,8 +823,7 @@ class QuotaMaxItemsExceededBucketSettingsListenerTest(
error_message = "Bucket maximum number of objects exceeded "
def get_app_settings(self, extras=None):
- settings = super(QuotaMaxItemsExceededBucketSettingsListenerTest,
- self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['quotas.bucket_test_max_items'] = '1'
return settings
@@ -839,8 +835,7 @@ class QuotaMaxBytesPerItemExceededListenerTest(
error_message = "Maximum bytes per object exceeded "
def get_app_settings(self, extras=None):
- settings = super(QuotaMaxBytesPerItemExceededListenerTest,
- self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['quotas.bucket_max_bytes_per_item'] = '55'
return settings
@@ -852,19 +847,18 @@ class QuotaMaxBytesPerItemExceededBucketListenerTest(
error_message = "Maximum bytes per object exceeded "
def get_app_settings(self, extras=None):
- settings = super(QuotaMaxBytesPerItemExceededBucketListenerTest,
- self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['quotas.bucket_test_max_bytes_per_item'] = '55'
return settings
-class QuotaCollectionMixin(object):
+class QuotaCollectionMixin:
def test_507_is_raised_if_quota_exceeded_on_record_creation(self):
self.create_bucket()
self.create_collection()
self.create_record()
body = {'data': {'foo': 42}}
- resp = self.app.post_json('%s/records' % self.collection_uri,
+ resp = self.app.post_json('{}/records'.format(self.collection_uri),
body, headers=self.headers, status=507)
# Check that the storage was not updated.
@@ -883,7 +877,7 @@ def test_507_is_raised_if_quota_exceeded_on_record_creation(self):
})
-class QuotaCollectionUpdateMixin(object):
+class QuotaCollectionUpdateMixin:
def test_507_is_raised_if_quota_exceeded_on_record_update(self):
self.create_bucket()
self.create_collection()
@@ -930,9 +924,7 @@ class QuotaMaxBytesExceededCollectionSettingsListenerTest(
error_message = "Collection maximum size exceeded "
def get_app_settings(self, extras=None):
- settings = super(
- QuotaMaxBytesExceededCollectionSettingsListenerTest,
- self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['quotas.collection_max_bytes'] = '100'
return settings
@@ -944,9 +936,7 @@ class QuotaMaxBytesExceededCollectionBucketSettingsListenerTest(
error_message = "Collection maximum size exceeded "
def get_app_settings(self, extras=None):
- settings = super(
- QuotaMaxBytesExceededCollectionBucketSettingsListenerTest,
- self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['quotas.collection_test_max_bytes'] = '100'
return settings
@@ -958,9 +948,7 @@ class QuotaMaxBytesExceededBucketCollectionSettingsListenerTest(
error_message = "Collection maximum size exceeded "
def get_app_settings(self, extras=None):
- settings = super(
- QuotaMaxBytesExceededBucketCollectionSettingsListenerTest,
- self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['quotas.collection_test_col_max_bytes'] = '100'
return settings
@@ -971,9 +959,7 @@ class QuotaMaxItemsExceededCollectionSettingsListenerTest(
error_message = "Collection maximum number of objects exceeded "
def get_app_settings(self, extras=None):
- settings = super(
- QuotaMaxItemsExceededCollectionSettingsListenerTest,
- self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['quotas.collection_max_items'] = '1'
return settings
@@ -984,9 +970,7 @@ class QuotaMaxItemsExceededCollectionBucketSettingsListenerTest(
error_message = "Collection maximum number of objects exceeded "
def get_app_settings(self, extras=None):
- settings = super(
- QuotaMaxItemsExceededCollectionBucketSettingsListenerTest,
- self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['quotas.collection_test_max_items'] = '1'
return settings
@@ -997,9 +981,7 @@ class QuotaMaxItemsExceededBucketCollectionSettingsListenerTest(
error_message = "Collection maximum number of objects exceeded "
def get_app_settings(self, extras=None):
- settings = super(
- QuotaMaxItemsExceededBucketCollectionSettingsListenerTest,
- self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['quotas.collection_test_col_max_items'] = '1'
return settings
@@ -1010,9 +992,7 @@ class QuotaMaxBytesPerItemExceededCollectionSettingsListenerTest(
error_message = "Maximum bytes per object exceeded "
def get_app_settings(self, extras=None):
- settings = super(
- QuotaMaxBytesPerItemExceededCollectionSettingsListenerTest,
- self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['quotas.collection_max_bytes_per_item'] = '80'
return settings
@@ -1024,9 +1004,7 @@ class QuotaMaxBytesPerItemExceededCollectionBucketSettingsListenerTest(
error_message = "Maximum bytes per object exceeded "
def get_app_settings(self, extras=None):
- settings = super(
- QuotaMaxBytesPerItemExceededCollectionBucketSettingsListenerTest,
- self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['quotas.collection_test_max_bytes_per_item'] = '80'
return settings
@@ -1038,8 +1016,6 @@ class QuotaMaxBytesPerItemExceededBucketCollectionSettingsListenerTest(
error_message = "Maximum bytes per object exceeded "
def get_app_settings(self, extras=None):
- settings = super(
- QuotaMaxBytesPerItemExceededBucketCollectionSettingsListenerTest,
- self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['quotas.collection_test_col_max_bytes_per_item'] = '80'
return settings
diff --git a/tests/support.py b/tests/support.py
index 2852f70db..13b070b66 100644
--- a/tests/support.py
+++ b/tests/support.py
@@ -19,14 +19,14 @@ class BaseWebTest(testing.BaseWebTest):
principal = USER_PRINCIPAL
def __init__(self, *args, **kwargs):
- super(BaseWebTest, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.headers.update(testing.get_user_headers('mat'))
def get_app_settings(self, extras=None):
- settings = DEFAULT_SETTINGS.copy()
+ settings = {**DEFAULT_SETTINGS}
if extras is not None:
settings.update(extras)
- settings = super(BaseWebTest, self).get_app_settings(extras=settings)
+ settings = super().get_app_settings(extras=settings)
return settings
def create_group(self, bucket_id, group_id, members=None):
@@ -34,10 +34,10 @@ def create_group(self, bucket_id, group_id, members=None):
group = MINIMALIST_GROUP
else:
group = {'data': {'members': members}}
- group_url = '/buckets/%s/groups/%s' % (bucket_id, group_id)
+ group_url = '/buckets/{}/groups/{}'.format(bucket_id, group_id)
self.app.put_json(group_url, group,
headers=self.headers, status=201)
def create_bucket(self, bucket_id):
- self.app.put_json('/buckets/%s' % bucket_id, MINIMALIST_BUCKET,
+ self.app.put_json('/buckets/{}'.format(bucket_id), MINIMALIST_BUCKET,
headers=self.headers, status=201)
diff --git a/tests/swagger/support.py b/tests/swagger/support.py
index 4514e8b7b..2bda2e4dc 100644
--- a/tests/swagger/support.py
+++ b/tests/swagger/support.py
@@ -29,7 +29,7 @@ def setUpClass(cls):
cls.resources = build_resources(cls.spec)
def setUp(self):
- super(SwaggerTest, self).setUp()
+ super().setUp()
self.bucket = self.app.put_json('/buckets/b1',
MINIMALIST_BUCKET,
@@ -58,7 +58,7 @@ def setUp(self):
self.request.json = lambda: self.request._json
def get_app_settings(self, extras=None):
- settings = super(SwaggerTest, self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings.update(self.settings)
return settings
@@ -88,7 +88,7 @@ def cast_bravado_response(self, response):
def validate_request_call(self, op, **kargs):
params = unmarshal_request(self.request, op)
- response = self.app.request(op.path_name.format(**params),
+ response = self.app.request(op.path_name.format_map(params),
body=json.dumps(self.request.json()).encode(),
method=op.http_method.upper(),
headers=self.headers, **kargs)
diff --git a/tests/swagger/test_definitions.py b/tests/swagger/test_definitions.py
index 42dc3d7ed..56d541ae9 100644
--- a/tests/swagger/test_definitions.py
+++ b/tests/swagger/test_definitions.py
@@ -7,7 +7,7 @@
class SwaggerDefinitionsTest(SwaggerTest):
def setUp(self):
- super(SwaggerDefinitionsTest, self).setUp()
+ super().setUp()
def test_definitions_validate_id(self):
schema = self.spec.deref(self.spec_dict['definitions']['Bucket'])
diff --git a/tests/swagger/test_resources.py b/tests/swagger/test_resources.py
index 380ac57f7..92349b23b 100644
--- a/tests/swagger/test_resources.py
+++ b/tests/swagger/test_resources.py
@@ -32,8 +32,7 @@ def test_resource_buckets(self):
self.request.path = {
'bucket_id': 'b1',
}
- bucket = MINIMALIST_BUCKET.copy()
- bucket['data'] = {'foo': 'bar'}
+ bucket = {**MINIMALIST_BUCKET, 'data': {'foo': 'bar'}}
self.request.json = lambda: bucket
self.validate_request_call(op)
@@ -60,8 +59,7 @@ def test_resource_collections(self):
'bucket_id': 'b1',
'collection_id': 'c1',
}
- collection = MINIMALIST_COLLECTION.copy()
- collection['data'] = {'foo': 'bar'}
+ collection = {**MINIMALIST_COLLECTION, 'data': {'foo': 'bar'}}
self.request.json = lambda: collection
self.validate_request_call(op)
diff --git a/tests/swagger/test_responses_errors.py b/tests/swagger/test_responses_errors.py
index 85142ee39..5b44ffea7 100644
--- a/tests/swagger/test_responses_errors.py
+++ b/tests/swagger/test_responses_errors.py
@@ -9,7 +9,7 @@
class SwaggerObjectErrorResponsesTest(SwaggerTest):
def setUp(self):
- super(SwaggerObjectErrorResponsesTest, self).setUp()
+ super().setUp()
self.bucket = self.app.put_json('/buckets/b1',
MINIMALIST_BUCKET,
@@ -28,8 +28,8 @@ def setUp(self):
headers=self.headers).json
def test_object_get_304(self):
- headers = self.headers.copy()
- headers['If-None-Match'] = '"%d"' % self.bucket['data']['last_modified']
+ headers = {**self.headers,
+ 'If-None-Match': '"{}"'.format(self.bucket['data']['last_modified'])}
response = self.app.get('/buckets/b1',
headers=headers, status=304)
response = self.cast_bravado_response(response)
@@ -38,8 +38,7 @@ def test_object_get_304(self):
validate_response(schema, op, response)
def test_object_get_400(self):
- headers = self.headers.copy()
- headers['If-None-Match'] = 'aaa'
+ headers = {**self.headers, 'If-None-Match': 'aaa'}
response = self.app.get('/buckets/b1',
headers=headers, status=400)
response = self.cast_bravado_response(response)
@@ -55,8 +54,7 @@ def test_object_get_401(self):
validate_response(schema, op, response)
def test_object_get_403(self):
- headers = self.headers.copy()
- headers.update(testing.get_user_headers('aaa'))
+ headers = {**self.headers, **testing.get_user_headers('aaa')}
response = self.app.get('/buckets/b1',
headers=headers, status=403)
response = self.cast_bravado_response(response)
@@ -73,8 +71,7 @@ def test_object_get_404(self):
validate_response(schema, op, response)
def test_object_get_406(self):
- headers = self.headers.copy()
- headers['Accept'] = 'text/html'
+ headers = {**self.headers, 'Accept': 'text/html'}
response = self.app.get('/buckets/b1',
headers=headers, status=406)
response = self.cast_bravado_response(response)
@@ -83,8 +80,8 @@ def test_object_get_406(self):
validate_response(schema, op, response)
def test_object_get_412(self):
- headers = self.headers.copy()
- headers['If-Match'] = '"%d"' % (self.bucket['data']['last_modified']-1)
+ headers = {**self.headers,
+ 'If-Match': '"{}"'.format(self.bucket['data']['last_modified']-1)}
response = self.app.get('/buckets/b1',
headers=headers, status=412)
response = self.cast_bravado_response(response)
@@ -108,8 +105,7 @@ def test_object_put_401(self):
validate_response(schema, op, response)
def test_object_put_403(self):
- headers = self.headers.copy()
- headers.update(testing.get_user_headers('aaa'))
+ headers = {**self.headers, **testing.get_user_headers('aaa')}
response = self.app.put_json('/buckets/b1', MINIMALIST_BUCKET,
headers=headers, status=403)
response = self.cast_bravado_response(response)
@@ -118,8 +114,7 @@ def test_object_put_403(self):
validate_response(schema, op, response)
def test_object_put_406(self):
- headers = self.headers.copy()
- headers['Accept'] = 'text/html'
+ headers = {**self.headers, 'Accept': 'text/html'}
response = self.app.put_json('/buckets/b1', MINIMALIST_BUCKET,
headers=headers, status=406)
response = self.cast_bravado_response(response)
@@ -128,8 +123,8 @@ def test_object_put_406(self):
validate_response(schema, op, response)
def test_object_put_412(self):
- headers = self.headers.copy()
- headers['If-Match'] = '"%d"' % (self.bucket['data']['last_modified']-1)
+ headers = {**self.headers,
+ 'If-Match': '"{}"'.format(self.bucket['data']['last_modified']-1)}
response = self.app.put_json('/buckets/b1', MINIMALIST_BUCKET,
headers=headers, status=412)
response = self.cast_bravado_response(response)
@@ -138,8 +133,7 @@ def test_object_put_412(self):
validate_response(schema, op, response)
def test_object_put_415(self):
- headers = self.headers.copy()
- headers['Content-Type'] = 'text/html'
+ headers = {**self.headers, 'Content-Type': 'text/html'}
response = self.app.put_json('/buckets/b1', MINIMALIST_BUCKET,
headers=headers, status=415)
response = self.cast_bravado_response(response)
@@ -163,8 +157,7 @@ def test_object_patch_401(self):
validate_response(schema, op, response)
def test_object_patch_403(self):
- headers = self.headers.copy()
- headers.update(testing.get_user_headers('aaa'))
+ headers = {**self.headers, **testing.get_user_headers('aaa')}
response = self.app.patch_json('/buckets/b1', MINIMALIST_BUCKET,
headers=headers, status=403)
response = self.cast_bravado_response(response)
@@ -182,8 +175,7 @@ def test_object_patch_404(self):
validate_response(schema, op, response)
def test_object_patch_406(self):
- headers = self.headers.copy()
- headers['Accept'] = 'text/html'
+ headers = {**self.headers, 'Accept': 'text/html'}
response = self.app.patch_json('/buckets/b1', MINIMALIST_BUCKET,
headers=headers, status=406)
response = self.cast_bravado_response(response)
@@ -192,8 +184,8 @@ def test_object_patch_406(self):
validate_response(schema, op, response)
def test_object_patch_412(self):
- headers = self.headers.copy()
- headers['If-Match'] = '"%d"' % (self.bucket['data']['last_modified']-1)
+ headers = {**self.headers,
+ 'If-Match': '"{}"'.format(self.bucket['data']['last_modified']-1)}
response = self.app.patch_json('/buckets/b1', MINIMALIST_BUCKET,
headers=headers, status=412)
response = self.cast_bravado_response(response)
@@ -202,8 +194,7 @@ def test_object_patch_412(self):
validate_response(schema, op, response)
def test_object_patch_415(self):
- headers = self.headers.copy()
- headers['Content-Type'] = 'text/html'
+ headers = {**self.headers, 'Content-Type': 'text/html'}
response = self.app.patch_json('/buckets/b1', MINIMALIST_BUCKET,
headers=headers, status=415)
response = self.cast_bravado_response(response)
@@ -212,8 +203,7 @@ def test_object_patch_415(self):
validate_response(schema, op, response)
def test_object_delete_400(self):
- headers = self.headers.copy()
- headers['If-Match'] = 'aaa'
+ headers = {**self.headers, 'If-Match': 'aaa'}
response = self.app.delete('/buckets/b1',
headers=headers, status=400)
response = self.cast_bravado_response(response)
@@ -229,8 +219,7 @@ def test_object_delete_401(self):
validate_response(schema, op, response)
def test_object_delete_403(self):
- headers = self.headers.copy()
- headers.update(testing.get_user_headers('aaa'))
+ headers = {**self.headers, **testing.get_user_headers('aaa')}
response = self.app.delete('/buckets/b1',
headers=headers, status=403)
response = self.cast_bravado_response(response)
@@ -247,8 +236,7 @@ def test_object_delete_404(self):
validate_response(schema, op, response)
def test_object_delete_406(self):
- headers = self.headers.copy()
- headers['Accept'] = 'text/html'
+ headers = {**self.headers, 'Accept': 'text/html'}
response = self.app.delete('/buckets/b1',
headers=headers, status=406)
response = self.cast_bravado_response(response)
@@ -257,8 +245,8 @@ def test_object_delete_406(self):
validate_response(schema, op, response)
def test_object_delete_412(self):
- headers = self.headers.copy()
- headers['If-Match'] = '"%d"' % (self.bucket['data']['last_modified']-1)
+ headers = {**self.headers,
+ 'If-Match': '"{}"'.format(self.bucket['data']['last_modified']-1)}
response = self.app.delete('/buckets/b1',
headers=headers, status=412)
response = self.cast_bravado_response(response)
@@ -267,8 +255,8 @@ def test_object_delete_412(self):
validate_response(schema, op, response)
def test_list_get_304(self):
- headers = self.headers.copy()
- headers['If-None-Match'] = '"%d"' % self.bucket['data']['last_modified']
+ headers = {**self.headers,
+ 'If-None-Match': '"{}"'.format(self.bucket['data']['last_modified'])}
response = self.app.get('/buckets',
headers=headers, status=304)
response = self.cast_bravado_response(response)
@@ -292,8 +280,7 @@ def test_list_get_401(self):
validate_response(schema, op, response)
def test_list_get_403(self):
- headers = self.headers.copy()
- headers.update(testing.get_user_headers('aaa'))
+ headers = {**self.headers, **testing.get_user_headers('aaa')}
response = self.app.get('/buckets/b1/collections',
headers=headers, status=403)
response = self.cast_bravado_response(response)
@@ -302,8 +289,7 @@ def test_list_get_403(self):
validate_response(schema, op, response)
def test_list_get_406(self):
- headers = self.headers.copy()
- headers['Accept'] = 'text/html'
+ headers = {**self.headers, 'Accept': 'text/html'}
response = self.app.get('/buckets',
headers=headers, status=406)
response = self.cast_bravado_response(response)
@@ -312,8 +298,8 @@ def test_list_get_406(self):
validate_response(schema, op, response)
def test_list_get_412(self):
- headers = self.headers.copy()
- headers['If-Match'] = '"%d"' % (self.bucket['data']['last_modified']-1)
+ headers = {**self.headers,
+ 'If-Match': '"{}"'.format(self.bucket['data']['last_modified']-1)}
response = self.app.get('/buckets/b1',
headers=headers, status=412)
response = self.cast_bravado_response(response)
@@ -329,8 +315,7 @@ def test_list_delete_401(self):
validate_response(schema, op, response)
def test_lidt_delete_403(self):
- headers = self.headers.copy()
- headers.update(testing.get_user_headers('aaa'))
+ headers = {**self.headers, **testing.get_user_headers('aaa')}
response = self.app.delete('/buckets/b1/collections',
headers=headers, status=403)
response = self.cast_bravado_response(response)
@@ -343,8 +328,7 @@ def test_list_delete_405(self):
pass
def test_list_delete_406(self):
- headers = self.headers.copy()
- headers['Accept'] = 'text/html'
+ headers = {**self.headers, 'Accept': 'text/html'}
response = self.app.delete('/buckets',
headers=headers, status=406)
response = self.cast_bravado_response(response)
@@ -353,8 +337,8 @@ def test_list_delete_406(self):
validate_response(schema, op, response)
def test_list_delete_412(self):
- headers = self.headers.copy()
- headers['If-Match'] = '"%d"' % (self.bucket['data']['last_modified']-1)
+ headers = {**self.headers,
+ 'If-Match': '"{}"'.format(self.bucket['data']['last_modified']-1)}
response = self.app.delete('/buckets',
headers=headers, status=412)
response = self.cast_bravado_response(response)
diff --git a/tests/swagger/test_validation.py b/tests/swagger/test_validation.py
index 277601e79..a0772eb8f 100644
--- a/tests/swagger/test_validation.py
+++ b/tests/swagger/test_validation.py
@@ -7,7 +7,7 @@
class SwaggerRequestsValidationTest(SwaggerTest):
def setUp(self):
- super(SwaggerRequestsValidationTest, self).setUp()
+ super().setUp()
self.request = IncomingRequest()
self.request.path = {}
diff --git a/tests/test_config.py b/tests/test_config.py
index 2a20e97ad..6456aa909 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -5,8 +5,6 @@
import unittest
from time import strftime
-import six
-
from kinto import config
from kinto import __version__
@@ -59,7 +57,7 @@ def test_create_destination_directory(self):
def test_hmac_secret_is_text(self, mocked_render_template):
config.init('kinto.ini', 'postgresql')
args, kwargs = list(mocked_render_template.call_args)
- self.assertEquals(type(kwargs['secret']), six.text_type)
+ self.assertEquals(type(kwargs['secret']), str)
@mock.patch('kinto.config.render_template')
def test_init_postgresql_values(self, mocked_render_template):
diff --git a/tests/test_main.py b/tests/test_main.py
index 13d711d7a..0d7f72998 100644
--- a/tests/test_main.py
+++ b/tests/test_main.py
@@ -1,10 +1,4 @@
-try:
- import builtins
- builtins_name = 'builtins'
-except ImportError:
- import __builtin__ as builtins
- builtins_name = '__builtin__'
-
+import builtins
import logging
import mock
import os
@@ -13,7 +7,7 @@
import tempfile
import unittest
-from six import StringIO
+from io import StringIO
from kinto import __version__ as kinto_version
from kinto.__main__ import main, DEFAULT_LOG_FORMAT
@@ -76,8 +70,7 @@ def psycopg2_missing(name, *args, **kwargs):
else:
return realimport(name, *args, **kwargs)
- with mock.patch('{}.__import__'.format(builtins_name),
- side_effect=psycopg2_missing):
+ with mock.patch('builtins.__import__', side_effect=psycopg2_missing):
with mock.patch('pip.main', return_value=None) as mocked_pip:
with mock.patch("kinto.__main__.input", create=True,
return_value="1"):
@@ -94,8 +87,7 @@ def redis_missing(name, *args, **kwargs):
else:
return realimport(name, *args, **kwargs)
- with mock.patch('{}.__import__'.format(builtins_name),
- side_effect=redis_missing):
+ with mock.patch('builtins.__import__', side_effect=redis_missing):
with mock.patch('pip.main', return_value=None) as mocked_pip:
with mock.patch("kinto.__main__.input", create=True,
return_value="2"):
@@ -156,7 +148,7 @@ def test_cli_start_with_reload_runs_pserve_with_reload(self):
def test_cli_can_display_kinto_version(self):
with mock.patch('sys.stdout', new_callable=StringIO) as mock_stdout:
res = main(['version'])
- assert mock_stdout.getvalue() == '%s\n' % kinto_version
+ assert mock_stdout.getvalue() == '{}\n'.format(kinto_version)
assert res == 0
def test_cli_can_configure_logger_in_quiet(self):
@@ -182,9 +174,3 @@ def test_cli_use_default_logging_logger(self):
mocked_logging.basicConfig.assert_called_with(
level=logging.INFO,
format=DEFAULT_LOG_FORMAT)
-
- def test_cli_uses_six_moves_input_function(self):
- # In Py2 we want to use raw_input and input in Py3
- from kinto.__main__ import input as cli_input
- from six.moves import input as six_input
- assert cli_input == six_input
diff --git a/tests/test_views_buckets.py b/tests/test_views_buckets.py
index 997ef15c6..37d33f09c 100644
--- a/tests/test_views_buckets.py
+++ b/tests/test_views_buckets.py
@@ -15,7 +15,7 @@ class BucketViewTest(BaseWebTest, unittest.TestCase):
record_url = '/buckets/beers'
def setUp(self):
- super(BucketViewTest, self).setUp()
+ super().setUp()
resp = self.app.put_json(self.record_url,
MINIMALIST_BUCKET,
headers=self.headers)
@@ -31,7 +31,7 @@ def test_buckets_can_be_put_with_simple_name(self):
self.assertEqual(self.record['id'], 'beers')
def test_buckets_names_can_have_underscores(self):
- bucket = MINIMALIST_BUCKET.copy()
+ bucket = {**MINIMALIST_BUCKET}
record_url = '/buckets/alexis_beers'
resp = self.app.put_json(record_url,
bucket,
@@ -51,17 +51,15 @@ def test_buckets_name_should_be_simple(self):
status=400)
def test_buckets_should_reject_unaccepted_request_content_type(self):
- headers = self.headers.copy()
- headers['Content-Type'] = 'text/plain'
+ headers = {**self.headers, 'Content-Type': 'text/plain'}
self.app.put('/buckets/beers',
MINIMALIST_BUCKET,
headers=headers,
status=415)
def test_create_permissions_can_be_added_on_buckets(self):
- bucket = MINIMALIST_BUCKET.copy()
- bucket['permissions'] = {'collection:create': ['fxa:user'],
- 'group:create': ['fxa:user']}
+ bucket = {**MINIMALIST_BUCKET, 'permissions': {'collection:create': ['fxa:user'],
+ 'group:create': ['fxa:user']}}
resp = self.app.put_json('/buckets/beers',
bucket,
headers=self.headers,
@@ -71,17 +69,15 @@ def test_create_permissions_can_be_added_on_buckets(self):
self.assertIn('fxa:user', permissions['group:create'])
def test_wrong_create_permissions_cannot_be_added_on_buckets(self):
- bucket = MINIMALIST_BUCKET.copy()
- bucket['permissions'] = {'record:create': ['fxa:user']}
+ bucket = {**MINIMALIST_BUCKET, 'permissions': {'record:create': ['fxa:user']}}
self.app.put_json('/buckets/beers',
bucket,
headers=self.headers,
status=400)
def test_buckets_can_handle_arbitrary_attributes(self):
- bucket = MINIMALIST_BUCKET.copy()
public_key = "5866f245a00bb3a39100d31b2f14d453"
- bucket['data'] = {'public_key': public_key}
+ bucket = {**MINIMALIST_BUCKET, 'data': {'public_key': public_key}}
resp = self.app.put_json('/buckets/beers',
bucket,
headers=self.headers,
@@ -91,8 +87,7 @@ def test_buckets_can_handle_arbitrary_attributes(self):
self.assertEqual(data['public_key'], public_key)
def test_buckets_can_be_filtered_by_arbitrary_attribute(self):
- bucket = MINIMALIST_BUCKET.copy()
- bucket['data'] = {'size': 3}
+ bucket = {**MINIMALIST_BUCKET, 'data': {'size': 3}}
self.app.put_json('/buckets/beers',
bucket,
headers=self.headers)
@@ -103,7 +98,7 @@ def test_buckets_can_be_filtered_by_arbitrary_attribute(self):
class BucketListTest(BaseWebTest, unittest.TestCase):
def get_app_settings(self, extras=None):
- settings = super(BucketListTest, self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['bucket_create_principals'] = self.principal
return settings
@@ -143,7 +138,7 @@ def test_bucket_id_can_be_specified_in_post(self):
self.assertEqual(r.json['data']['id'], bucket)
def test_bucket_can_be_created_without_body_nor_contenttype(self):
- headers = self.headers.copy()
+ headers = {**self.headers}
headers.pop("Content-Type")
self.app.put('/buckets/catalog', headers=headers)
@@ -154,8 +149,8 @@ class BucketReadPermissionTest(BaseWebTest, unittest.TestCase):
record_url = '/buckets/beers'
def setUp(self):
- super(BucketReadPermissionTest, self).setUp()
- bucket = MINIMALIST_BUCKET.copy()
+ super().setUp()
+ bucket = {**MINIMALIST_BUCKET}
self.app.put_json(self.record_url,
bucket,
headers=self.headers)
@@ -198,12 +193,12 @@ def setUp(self):
MINIMALIST_RECORD,
headers=self.headers)
record_id = r.json['data']['id']
- self.record_url = self.collection_url + '/records/%s' % record_id
+ self.record_url = self.collection_url + '/records/{}'.format(record_id)
# Delete the bucket.
self.app.delete(self.bucket_url, headers=self.headers)
def get_app_settings(self, extras=None):
- settings = super(BucketDeletionTest, self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
# Give the permission to read, to get an explicit 404 once deleted.
settings['kinto.bucket_read_principals'] = self.principal
return settings
@@ -235,7 +230,7 @@ def test_every_collections_are_deleted_too(self):
self.app.get(self.collection_url, headers=self.headers, status=404)
# Verify tombstones
- resp = self.app.get('%s/collections?_since=0' % self.bucket_url,
+ resp = self.app.get('{}/collections?_since=0'.format(self.bucket_url),
headers=self.headers)
self.assertEqual(len(resp.json['data']), 0)
@@ -244,7 +239,7 @@ def test_every_groups_are_deleted_too(self):
headers=self.headers)
self.app.get(self.group_url, headers=self.headers, status=404)
# Verify tombstones
- resp = self.app.get('%s/groups?_since=0' % self.bucket_url,
+ resp = self.app.get('{}/groups?_since=0'.format(self.bucket_url),
headers=self.headers)
self.assertEqual(len(resp.json['data']), 0)
@@ -256,12 +251,11 @@ def test_every_records_are_deleted_too(self):
self.app.get(self.record_url, headers=self.headers, status=404)
# Verify tombstones
- resp = self.app.get('%s/records?_since=0' % self.collection_url,
+ resp = self.app.get('{}/records?_since=0'.format(self.collection_url),
headers=self.headers)
self.assertEqual(len(resp.json['data']), 0)
def test_can_be_created_after_deletion_with_if_none_match_star(self):
- headers = self.headers.copy()
- headers['If-None-Match'] = '*'
+ headers = {**self.headers, 'If-None-Match': '*'}
self.app.put_json(self.bucket_url, MINIMALIST_BUCKET,
headers=headers, status=201)
diff --git a/tests/test_views_collections.py b/tests/test_views_collections.py
index 7b67825d7..c7cc3a395 100644
--- a/tests/test_views_collections.py
+++ b/tests/test_views_collections.py
@@ -12,7 +12,7 @@ class CollectionViewTest(BaseWebTest, unittest.TestCase):
collection_url = '/buckets/beers/collections/barley'
def setUp(self):
- super(CollectionViewTest, self).setUp()
+ super().setUp()
self.app.put_json('/buckets/beers', MINIMALIST_BUCKET,
headers=self.headers)
resp = self.app.put_json(self.collection_url,
@@ -36,8 +36,7 @@ def test_collections_name_should_be_simple(self):
status=400)
def test_collections_should_reject_unaccepted_request_content_type(self):
- headers = self.headers.copy()
- headers['Content-Type'] = 'text/plain'
+ headers = {**self.headers, 'Content-Type': 'text/plain'}
self.app.put('/buckets/beers/collections/barley',
MINIMALIST_COLLECTION,
headers=headers,
@@ -61,8 +60,7 @@ def test_collections_are_isolated_by_bucket(self):
self.app.get(other_bucket, headers=self.headers, status=404)
def test_create_permissions_can_be_added_on_collections(self):
- collection = MINIMALIST_COLLECTION.copy()
- collection['permissions'] = {'record:create': ['fxa:user']}
+ collection = {**MINIMALIST_COLLECTION, 'permissions': {'record:create': ['fxa:user']}}
resp = self.app.put_json('/buckets/beers/collections/barley',
collection,
headers=self.headers,
@@ -71,17 +69,15 @@ def test_create_permissions_can_be_added_on_collections(self):
self.assertIn('fxa:user', permissions['record:create'])
def test_wrong_create_permissions_cannot_be_added_on_collections(self):
- collection = MINIMALIST_COLLECTION.copy()
- collection['permissions'] = {'collection:create': ['fxa:user']}
+ collection = {**MINIMALIST_COLLECTION, 'permissions': {'collection:create': ['fxa:user']}}
self.app.put_json('/buckets/beers/collections/barley',
collection,
headers=self.headers,
status=400)
def test_collections_can_handle_arbitrary_attributes(self):
- collection = MINIMALIST_COLLECTION.copy()
fingerprint = "5866f245a00bb3a39100d31b2f14d453"
- collection['data'] = {'fingerprint': fingerprint}
+ collection = {**MINIMALIST_COLLECTION, 'data': {'fingerprint': fingerprint}}
resp = self.app.put_json('/buckets/beers/collections/barley',
collection,
headers=self.headers,
@@ -91,8 +87,7 @@ def test_collections_can_handle_arbitrary_attributes(self):
self.assertEqual(data['fingerprint'], fingerprint)
def test_collections_can_be_filtered_by_arbitrary_attribute(self):
- collection = MINIMALIST_COLLECTION.copy()
- collection['data'] = {'size': 3}
+ collection = {**MINIMALIST_COLLECTION, 'data': {'size': 3}}
self.app.put_json('/buckets/beers/collections/moderator',
collection,
headers=self.headers)
@@ -107,10 +102,10 @@ class CollectionDeletionTest(BaseWebTest, unittest.TestCase):
collection_url = '/buckets/beers/collections/barley'
def setUp(self):
- super(CollectionDeletionTest, self).setUp()
- bucket = MINIMALIST_BUCKET.copy()
- bucket['permissions'] = {'collection:create': ['system.Everyone'],
- 'read': ['system.Everyone']}
+ super().setUp()
+ bucket = {**MINIMALIST_BUCKET,
+ 'permissions': {'collection:create': ['system.Everyone'],
+ 'read': ['system.Everyone']}}
self.app.put_json('/buckets/beers', bucket,
headers=self.headers)
self.app.put_json(self.collection_url, MINIMALIST_COLLECTION,
@@ -119,7 +114,7 @@ def setUp(self):
MINIMALIST_RECORD,
headers=self.headers)
record_id = r.json['data']['id']
- self.record_url = self.collection_url + '/records/%s' % record_id
+ self.record_url = self.collection_url + '/records/{}'.format(record_id)
self.app.delete(self.collection_url, headers=self.headers)
def test_collections_can_be_deleted(self):
@@ -144,13 +139,12 @@ def test_records_of_collection_are_deleted_too(self):
self.app.get(self.record_url, headers=self.headers, status=404)
# Verify tombstones
- resp = self.app.get('%s/records?_since=0' % self.collection_url,
+ resp = self.app.get('{}/records?_since=0'.format(self.collection_url),
headers=self.headers)
self.assertEqual(len(resp.json['data']), 0)
def test_can_be_created_after_deletion_with_if_none_match_star(self):
- headers = self.headers.copy()
- headers['If-None-Match'] = '*'
+ headers = {**self.headers, 'If-None-Match': '*'}
self.app.put_json(self.collection_url, MINIMALIST_COLLECTION,
headers=headers, status=201)
@@ -160,7 +154,7 @@ class CollectionCreationTest(BaseWebTest, unittest.TestCase):
collections_url = '/buckets/beers/collections'
def setUp(self):
- super(CollectionCreationTest, self).setUp()
+ super().setUp()
self.app.put_json('/buckets/beers', MINIMALIST_BUCKET,
headers=self.headers)
diff --git a/tests/test_views_collections_cache.py b/tests/test_views_collections_cache.py
index 4c871f3f6..a38d0ed2d 100644
--- a/tests/test_views_collections_cache.py
+++ b/tests/test_views_collections_cache.py
@@ -6,13 +6,13 @@
class GlobalSettingsTest(BaseWebTest, unittest.TestCase):
def get_app_settings(self, extras=None):
- settings = super(GlobalSettingsTest, self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['kinto.record_cache_expires_seconds'] = 3600
settings['kinto.record_read_principals'] = 'system.Everyone'
return settings
def setUp(self):
- super(GlobalSettingsTest, self).setUp()
+ super().setUp()
self.create_bucket('blog')
self.app.put_json('/buckets/blog/collections/cached',
MINIMALIST_COLLECTION,
@@ -28,29 +28,26 @@ def test_expires_and_cache_control_headers_are_set(self):
self.assertIn('Expires', r.headers)
self.assertEqual(r.headers['Cache-Control'], 'max-age=3600')
- r = self.app.get(url + '/%s' % self.record['id'])
+ r = self.app.get('{}/{}'.format(url, self.record['id']))
self.assertIn('Expires', r.headers)
self.assertEqual(r.headers['Cache-Control'], 'max-age=3600')
class SpecificSettingsTest(BaseWebTest, unittest.TestCase):
def get_app_settings(self, extras=None):
- settings = super(SpecificSettingsTest, self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['kinto.blog_record_cache_expires_seconds'] = '30'
settings['kinto.browser_top500_record_cache_expires_seconds'] = '60'
return settings
def setUp(self):
- super(SpecificSettingsTest, self).setUp()
+ super().setUp()
def create_record_in_collection(bucket_id, collection_id):
- bucket = MINIMALIST_BUCKET.copy()
- bucket['permissions'] = {'read': ['system.Everyone']}
- self.app.put_json('/buckets/%s' % bucket_id,
- bucket,
- headers=self.headers)
- collection_url = '/buckets/%s/collections/%s' % (bucket_id,
- collection_id)
+ bucket = {**MINIMALIST_BUCKET, 'permissions': {'read': ['system.Everyone']}}
+ self.app.put_json('/buckets/{}'.format(bucket_id),
+ bucket, headers=self.headers)
+ collection_url = '/buckets/{}/collections/{}'.format(bucket_id, collection_id)
self.app.put_json(collection_url,
MINIMALIST_COLLECTION,
headers=self.headers)
@@ -65,26 +62,25 @@ def create_record_in_collection(bucket_id, collection_id):
def assertHasCache(self, url, age):
r = self.app.get(url)
self.assertIn('Expires', r.headers)
- self.assertEqual(r.headers['Cache-Control'], 'max-age=%s' % age)
+ self.assertEqual(r.headers['Cache-Control'], 'max-age={}'.format(age))
def test_for_records_on_a_specific_bucket(self):
collection_url = '/buckets/blog/collections/cached/records'
self.assertHasCache(collection_url, 30)
- record_url = collection_url + '/%s' % self.blog_record['id']
+ record_url = '{}/{}'.format(collection_url, self.blog_record['id'])
self.assertHasCache(record_url, 30)
def test_for_records_on_a_specific_collection(self):
collection_url = '/buckets/browser/collections/top500/records'
self.assertHasCache(collection_url, 60)
- record_url = collection_url + '/%s' % self.app_record['id']
+ record_url = '{}/{}'.format(collection_url, self.app_record['id'])
self.assertHasCache(record_url, 60)
class CollectionExpiresTest(BaseWebTest, unittest.TestCase):
def setUp(self):
- super(CollectionExpiresTest, self).setUp()
- bucket = MINIMALIST_BUCKET.copy()
- bucket['permissions'] = {'read': ['system.Everyone']}
+ super().setUp()
+ bucket = {**MINIMALIST_BUCKET, 'permissions': {'read': ['system.Everyone']}}
self.app.put_json('/buckets/blog',
bucket,
headers=self.headers)
@@ -99,7 +95,7 @@ def setUp(self):
MINIMALIST_RECORD,
headers=self.headers)
self.record = resp.json['data']
- self.record_url = self.records_url + '/' + self.record['id']
+ self.record_url = '{}/{}'.format(self.records_url, self.record['id'])
def test_cache_expires_must_be_an_integer(self):
self.app.put_json(self.collection_url,
diff --git a/tests/test_views_collections_schema.py b/tests/test_views_collections_schema.py
index 65ebf8a81..ff49a9a2f 100644
--- a/tests/test_views_collections_schema.py
+++ b/tests/test_views_collections_schema.py
@@ -23,8 +23,7 @@
class DeactivatedSchemaTest(BaseWebTest, unittest.TestCase):
def test_schema_should_be_json_schema(self):
- newschema = SCHEMA.copy()
- newschema['type'] = 'Washmachine'
+ newschema = {**SCHEMA, 'type': 'Washmachine'}
self.app.put(BUCKET_URL, headers=self.headers)
self.app.put(COLLECTION_URL, headers=self.headers)
resp = self.app.put_json(COLLECTION_URL,
@@ -50,12 +49,12 @@ def test_records_are_not_invalid_if_do_not_match_schema(self):
class BaseWebTestWithSchema(BaseWebTest):
def get_app_settings(self, extras=None):
- settings = super(BaseWebTestWithSchema, self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['experimental_collection_schema_validation'] = 'True'
return settings
def setUp(self):
- super(BaseWebTestWithSchema, self).setUp()
+ super().setUp()
self.app.put(BUCKET_URL, headers=self.headers)
self.app.put(COLLECTION_URL, headers=self.headers)
@@ -81,8 +80,7 @@ def test_accepts_any_kind_of_record(self):
class InvalidSchemaTest(BaseWebTestWithSchema, unittest.TestCase):
def test_schema_should_be_json_schema(self):
- newschema = SCHEMA.copy()
- newschema['type'] = 'Washmachine'
+ newschema = {**SCHEMA, 'type': 'Washmachine'}
resp = self.app.put_json(COLLECTION_URL,
{'data': {'schema': newschema}},
headers=self.headers,
@@ -93,7 +91,7 @@ def test_schema_should_be_json_schema(self):
class RecordsValidationTest(BaseWebTestWithSchema, unittest.TestCase):
def setUp(self):
- super(RecordsValidationTest, self).setUp()
+ super().setUp()
resp = self.app.put_json(COLLECTION_URL,
{'data': {'schema': SCHEMA}},
headers=self.headers)
@@ -123,7 +121,7 @@ def test_records_are_validated_on_patch(self):
headers=self.headers,
status=201)
record_id = resp.json['data']['id']
- self.app.patch_json('%s/%s' % (RECORDS_URL, record_id),
+ self.app.patch_json('{}/{}'.format(RECORDS_URL, record_id),
{'data': {'title': 3.14}},
headers=self.headers,
status=400)
@@ -134,7 +132,7 @@ def test_records_are_validated_on_put(self):
headers=self.headers,
status=201)
record_id = resp.json['data']['id']
- self.app.put_json('%s/%s' % (RECORDS_URL, record_id),
+ self.app.put_json('{}/{}'.format(RECORDS_URL, record_id),
{'data': {'body': 'Without title
'}},
headers=self.headers,
status=400)
@@ -175,16 +173,15 @@ def test_records_can_filtered_by_schema_version(self):
{'data': VALID_RECORD},
headers=self.headers)
- resp = self.app.get(RECORDS_URL + '?min_schema=%s' % schema_version,
+ resp = self.app.get(RECORDS_URL + '?min_schema={}'.format(schema_version),
headers=self.headers)
self.assertEqual(len(resp.json['data']), 1)
class ExtraPropertiesValidationTest(BaseWebTestWithSchema, unittest.TestCase):
def setUp(self):
- super(ExtraPropertiesValidationTest, self).setUp()
- schema = SCHEMA.copy()
- schema['additionalProperties'] = False
+ super().setUp()
+ schema = {**SCHEMA, 'additionalProperties': False}
resp = self.app.put_json(COLLECTION_URL,
{'data': {'schema': schema}},
headers=self.headers)
@@ -197,13 +194,13 @@ def test_record_can_be_validated_on_post(self):
def test_record_can_be_validated_on_put(self):
record_id = '5443d83f-852a-481a-8e9d-5aa804b05b08'
- self.app.put_json('%s/%s' % (RECORDS_URL, record_id),
+ self.app.put_json('{}/{}'.format(RECORDS_URL, record_id),
{'data': VALID_RECORD},
headers=self.headers)
def test_records_are_validated_on_patch(self):
record_id = '5443d83f-852a-481a-8e9d-5aa804b05b08'
- record_url = '%s/%s' % (RECORDS_URL, record_id)
+ record_url = '{}/{}'.format(RECORDS_URL, record_id)
resp = self.app.put_json(record_url,
{'data': VALID_RECORD},
headers=self.headers)
@@ -216,9 +213,8 @@ def test_records_are_validated_on_patch(self):
def test_additional_properties_are_rejected(self):
record_id = '5443d83f-852a-481a-8e9d-5aa804b05b08'
- record = VALID_RECORD.copy()
- record['extra'] = 'blah!'
- resp = self.app.put_json('%s/%s' % (RECORDS_URL, record_id),
+ record = {**VALID_RECORD, 'extra': 'blah!'}
+ resp = self.app.put_json('{}/{}'.format(RECORDS_URL, record_id),
{'data': record},
headers=self.headers,
status=400)
diff --git a/tests/test_views_flush.py b/tests/test_views_flush.py
index f7923df33..b0927b400 100644
--- a/tests/test_views_flush.py
+++ b/tests/test_views_flush.py
@@ -16,14 +16,13 @@ class FlushViewTest(BaseWebTest, unittest.TestCase):
collection_url = '/buckets/beers/collections/barley/records'
def setUp(self):
- super(FlushViewTest, self).setUp()
+ super().setUp()
self.events = []
- bucket = MINIMALIST_BUCKET.copy()
+ bucket = {**MINIMALIST_BUCKET}
- self.alice_headers = self.headers.copy()
- self.alice_headers.update(**get_user_headers('alice'))
+ self.alice_headers = {**self.headers, **get_user_headers('alice')}
resp = self.app.get('/', headers=self.alice_headers)
alice_principal = resp.json['user']['id']
@@ -48,21 +47,20 @@ def setUp(self):
def tearDown(self):
self.events = []
- super(FlushViewTest, self).tearDown()
+ super().tearDown()
def make_app(self, settings=None, config=None):
settings = self.get_app_settings(settings)
config = Configurator(settings=settings)
config.add_subscriber(self.listener, ServerFlushed)
config.commit()
- return super(FlushViewTest, self).make_app(settings=settings,
- config=config)
+ return super().make_app(settings=settings, config=config)
def get_app_settings(self, extras=None):
if extras is None:
extras = {}
extras.setdefault('flush_endpoint_enabled', True)
- settings = super(FlushViewTest, self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
return settings
def listener(self, event):
diff --git a/tests/test_views_groups.py b/tests/test_views_groups.py
index 609e701fe..6fc42ac35 100644
--- a/tests/test_views_groups.py
+++ b/tests/test_views_groups.py
@@ -12,7 +12,7 @@ class GroupViewTest(FormattedErrorMixin, BaseWebTest, unittest.TestCase):
record_url = '/buckets/beers/groups/moderators'
def setUp(self):
- super(GroupViewTest, self).setUp()
+ super().setUp()
self.app.put_json('/buckets/beers', MINIMALIST_BUCKET,
headers=self.headers)
resp = self.app.put_json(self.record_url,
@@ -44,9 +44,9 @@ def test_groups_name_should_be_simple(self):
status=400)
def test_groups_can_have_arbitrary_attributes(self):
- group = MINIMALIST_GROUP.copy()
mailinglist = "kinto@mozilla.com"
- group['data']['mailinglist'] = mailinglist
+ group = {**MINIMALIST_GROUP, 'data': {**MINIMALIST_GROUP['data'],
+ 'mailinglist': mailinglist}}
resp = self.app.put_json('/buckets/beers/groups/moderator',
group,
headers=self.headers)
@@ -55,8 +55,7 @@ def test_groups_can_have_arbitrary_attributes(self):
self.assertEqual(data['mailinglist'], mailinglist)
def test_groups_can_be_filtered_by_arbitrary_attribute(self):
- group = MINIMALIST_GROUP.copy()
- group['data']['size'] = 3
+ group = {**MINIMALIST_GROUP, 'data': {**MINIMALIST_GROUP['data'], 'size': 3}}
self.app.put_json('/buckets/beers/groups/moderator',
group,
headers=self.headers)
@@ -66,8 +65,7 @@ def test_groups_can_be_filtered_by_arbitrary_attribute(self):
self.assertEqual(len(data), 1)
def test_groups_should_reject_unaccepted_request_content_type(self):
- headers = self.headers.copy()
- headers['Content-Type'] = 'text/plain'
+ headers = {**self.headers, 'Content-Type': 'text/plain'}
self.app.put('/buckets/beers/groups/moderator',
MINIMALIST_GROUP,
headers=headers,
@@ -85,30 +83,27 @@ def test_groups_are_isolated_by_bucket(self):
self.app.get(other_bucket, headers=self.headers, status=404)
def test_wrong_create_permissions_cannot_be_added_on_groups(self):
- group = MINIMALIST_GROUP.copy()
- group['permissions'] = {'group:create': ['fxa:user']}
+ group = {**MINIMALIST_GROUP, 'permissions': {'group:create': ['fxa:user']}}
self.app.put_json('/buckets/beers/groups/moderator',
group,
headers=self.headers,
status=400)
def test_recreate_group_after_deletion_returns_a_201(self):
- group = MINIMALIST_GROUP.copy()
self.app.put_json('/buckets/beers/groups/moderator',
- group,
+ MINIMALIST_GROUP,
headers=self.headers,
status=201)
self.app.delete('/buckets/beers/groups/moderator',
headers=self.headers,
status=200)
self.app.put_json('/buckets/beers/groups/moderator',
- group,
+ MINIMALIST_GROUP,
headers=self.headers,
status=201)
def test_group_doesnt_accept_system_Everyone(self):
- group = MINIMALIST_GROUP.copy()
- group['data'] = {'members': ['system.Everyone']}
+ group = {**MINIMALIST_GROUP, 'data': {'members': ['system.Everyone']}}
response = self.app.put_json('/buckets/beers/groups/moderator',
group,
headers=self.headers,
@@ -119,8 +114,7 @@ def test_group_doesnt_accept_system_Everyone(self):
"'system.Everyone' is not a valid user ID.")
def test_group_doesnt_accept_groups_inside_groups(self):
- group = MINIMALIST_GROUP.copy()
- group['data'] = {'members': ['/buckets/beers/groups/administrators']}
+ group = {**MINIMALIST_GROUP, 'data': {'members': ['/buckets/beers/groups/administrators']}}
response = self.app.put_json('/buckets/beers/groups/moderator',
group,
headers=self.headers,
@@ -136,7 +130,7 @@ class GroupManagementTest(BaseWebTest, unittest.TestCase):
group_url = '/buckets/beers/groups/moderators'
def setUp(self):
- super(GroupManagementTest, self).setUp()
+ super().setUp()
self.create_bucket('beers')
def test_groups_can_be_deleted(self):
@@ -225,8 +219,7 @@ def test_groups_can_be_created_after_deletion(self):
self.create_group('beers', 'moderators')
group_url = '/buckets/beers/groups/moderators'
self.app.delete(group_url, headers=self.headers)
- headers = self.headers.copy()
- headers['If-None-Match'] = '*'
+ headers = {**self.headers, 'If-None-Match': '*'}
self.app.put_json(group_url, MINIMALIST_GROUP,
headers=headers, status=201)
@@ -244,7 +237,7 @@ class InvalidGroupTest(BaseWebTest, unittest.TestCase):
group_url = '/buckets/beers/groups/moderators'
def setUp(self):
- super(InvalidGroupTest, self).setUp()
+ super().setUp()
self.create_bucket('beers')
def test_groups_data_is_required_with_put(self):
diff --git a/tests/test_views_hello.py b/tests/test_views_hello.py
index 20c030a0a..a4d804109 100644
--- a/tests/test_views_hello.py
+++ b/tests/test_views_hello.py
@@ -26,7 +26,7 @@ def test_returns_user_id_if_authenticated(self):
def test_returns_user_principals_if_authenticated(self):
group_url = '/buckets/beers/groups/users'
- group = MINIMALIST_GROUP.copy()
+ group = {**MINIMALIST_GROUP}
group['data']['members'].append(self.principal)
self.app.put_json('/buckets/beers', MINIMALIST_BUCKET, headers=self.headers)
self.app.put_json(group_url, group, headers=self.headers)
diff --git a/tests/test_views_objects_permissions.py b/tests/test_views_objects_permissions.py
index 120e48958..53b726bae 100644
--- a/tests/test_views_objects_permissions.py
+++ b/tests/test_views_objects_permissions.py
@@ -10,11 +10,9 @@
class PermissionsTest(BaseWebTest, unittest.TestCase):
def __init__(self, *args, **kwargs):
- super(PermissionsTest, self).__init__(*args, **kwargs)
- self.alice_headers = self.headers.copy()
- self.alice_headers.update(**get_user_headers('alice'))
- self.bob_headers = self.headers.copy()
- self.bob_headers.update(**get_user_headers('bob'))
+ super().__init__(*args, **kwargs)
+ self.alice_headers = {**self.headers, **get_user_headers('alice')}
+ self.bob_headers = {**self.headers, **get_user_headers('bob')}
self.alice_principal = ('basicauth:d5b0026601f1b251974e09548d44155e16'
'812e3c64ff7ae053fe3542e2ca1570')
@@ -25,8 +23,7 @@ def __init__(self, *args, **kwargs):
class BucketPermissionsTest(PermissionsTest):
def setUp(self):
- bucket = MINIMALIST_BUCKET.copy()
- bucket['permissions'] = {'read': [self.alice_principal]}
+ bucket = {**MINIMALIST_BUCKET, 'permissions': {'read': [self.alice_principal]}}
self.app.put_json('/buckets/sodas',
bucket,
headers=self.headers)
@@ -65,11 +62,10 @@ def test_permissions_are_returned_if_can_write(self):
class CollectionPermissionsTest(PermissionsTest):
def setUp(self):
- bucket = MINIMALIST_BUCKET.copy()
- bucket['permissions'] = {
+ bucket = {**MINIMALIST_BUCKET, 'permissions': {
'read': [self.alice_principal],
'write': [self.bob_principal]
- }
+ }}
self.app.put_json('/buckets/beer',
bucket,
headers=self.headers)
@@ -91,8 +87,7 @@ def test_read_is_allowed_if_write_on_bucket(self):
headers=self.bob_headers)
def test_cannot_read_if_not_allowed(self):
- headers = self.headers.copy()
- headers.update(**get_user_headers('jean-louis'))
+ headers = {**self.headers, **get_user_headers('jean-louis')}
self.app.get('/buckets/beer/collections/barley',
headers=headers,
status=403)
@@ -118,11 +113,10 @@ def test_permission_backend_prevent_sql_injections(self):
class GroupPermissionsTest(PermissionsTest):
def setUp(self):
- bucket = MINIMALIST_BUCKET.copy()
- bucket['permissions'] = {
+ bucket = {**MINIMALIST_BUCKET, 'permissions': {
'read': [self.alice_principal],
'write': [self.bob_principal]
- }
+ }}
self.app.put_json('/buckets/beer',
bucket,
headers=self.headers)
@@ -145,8 +139,7 @@ def test_read_is_allowed_if_write_on_bucket(self):
headers=self.bob_headers)
def test_cannot_read_if_not_allowed(self):
- headers = self.headers.copy()
- headers.update(**get_user_headers('jean-louis'))
+ headers = {**self.headers, **get_user_headers('jean-louis')}
self.app.get('/buckets/beer/groups/moderators',
headers=headers,
status=403)
@@ -167,14 +160,12 @@ def test_creation_is_forbidden_is_no_write_on_bucket(self):
class RecordPermissionsTest(PermissionsTest):
def setUp(self):
- bucket = MINIMALIST_BUCKET.copy()
- bucket['permissions'] = {'write': [self.alice_principal]}
+ bucket = {**MINIMALIST_BUCKET, 'permissions': {'write': [self.alice_principal]}}
self.app.put_json('/buckets/beer',
bucket,
headers=self.headers)
- collection = MINIMALIST_COLLECTION.copy()
- collection['permissions'] = {'write': [self.bob_principal]}
+ collection = {**MINIMALIST_COLLECTION, 'permissions': {'write': [self.bob_principal]}}
self.app.put_json('/buckets/beer/collections/barley',
collection,
headers=self.headers)
@@ -190,8 +181,7 @@ def test_creation_is_allowed_if_write_on_collection(self):
headers=self.bob_headers)
def test_creation_is_forbidden_is_no_write_on_bucket_nor_collection(self):
- headers = self.headers.copy()
- headers.update(**get_user_headers('jean-louis'))
+ headers = {**self.headers, **get_user_headers('jean-louis')}
self.app.post_json('/buckets/beer/collections/barley/records',
MINIMALIST_RECORD,
headers=headers,
@@ -203,7 +193,7 @@ def test_record_permissions_are_modified_by_patch(self):
MINIMALIST_RECORD,
headers=self.headers)
record = resp.json['data']
- resp = self.app.patch_json(collection_url + '/' + record['id'],
+ resp = self.app.patch_json('{}/{}'.format(collection_url, record['id']),
{'permissions': {'read': ['fxa:user']}},
headers=self.headers)
self.assertIn('fxa:user', resp.json['permissions']['read'])
@@ -221,7 +211,7 @@ def setUp(self):
{'permissions': {'read': ['system.Authenticated']}},
headers=self.alice_headers)
for parent in ('create', 'write', 'read'):
- self.app.put_json('/buckets/%s/groups/child' % parent,
+ self.app.put_json('/buckets/{}/groups/child'.format(parent),
MINIMALIST_GROUP,
headers=self.alice_headers)
self.bob_headers_safe_creation = dict({'If-None-Match': '*'},
diff --git a/tests/test_views_permissions.py b/tests/test_views_permissions.py
index f5c3b5852..75a89be99 100644
--- a/tests/test_views_permissions.py
+++ b/tests/test_views_permissions.py
@@ -13,7 +13,7 @@
class PermissionsViewTest(BaseWebTest, unittest.TestCase):
def get_app_settings(self, extras=None):
- settings = super(PermissionsViewTest, self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['experimental_permissions_endpoint'] = 'True'
return settings
@@ -21,7 +21,7 @@ def get_app_settings(self, extras=None):
class EntriesTest(PermissionsViewTest):
def setUp(self):
- super(EntriesTest, self).setUp()
+ super().setUp()
self.app.put_json('/buckets/beers', MINIMALIST_BUCKET,
headers=self.headers)
self.app.put_json('/buckets/beers/collections/barley',
@@ -30,7 +30,7 @@ def setUp(self):
self.app.put_json('/buckets/beers/groups/amateurs',
MINIMALIST_GROUP,
headers=self.headers)
- self.app.put_json('/buckets/beers/collections/barley/records/' + RECORD_ID, # noqa
+ self.app.put_json('/buckets/beers/collections/barley/records/{}'.format(RECORD_ID),
MINIMALIST_RECORD,
headers=self.headers)
@@ -99,7 +99,7 @@ def test_permissions_list_can_be_paginated(self):
class GroupsPermissionTest(PermissionsViewTest):
def setUp(self):
- super(GroupsPermissionTest, self).setUp()
+ super().setUp()
self.admin_headers = get_user_headers('admin')
self.admin_principal = self.app.get('/', headers=self.admin_headers).json['user']['id']
@@ -148,10 +148,10 @@ class SettingsPermissionsTest(PermissionsViewTest):
admin_principal = 'basicauth:bb7fe7b98e759578ef0de85b546dd57d21fe1e399390ad8dafc9886043a00e5c' # NOQA
def __init__(self, *args, **kwargs):
- super(SettingsPermissionsTest, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def get_app_settings(self, extras=None):
- settings = super(SettingsPermissionsTest, self).get_app_settings(extras)
+ settings = super().get_app_settings(extras)
settings['bucket_write_principals'] = 'system.Authenticated'
settings['group_create_principals'] = self.admin_principal
settings['collection_write_principals'] = 'system.Authenticated'
@@ -159,7 +159,7 @@ def get_app_settings(self, extras=None):
return settings
def setUp(self):
- super(SettingsPermissionsTest, self).setUp()
+ super().setUp()
self.app.put_json('/buckets/beers', MINIMALIST_BUCKET, headers=self.headers)
self.app.put_json('/buckets/beers/groups/admins',
{'data': {'members': [self.admin_principal]}},
@@ -216,7 +216,7 @@ def test_settings_permissions_are_merged_with_perms_backend(self):
class DeletedObjectsTest(PermissionsViewTest):
def setUp(self):
- super(DeletedObjectsTest, self).setUp()
+ super().setUp()
self.app.put_json('/buckets/beers',
MINIMALIST_BUCKET,
headers=self.headers)
diff --git a/tests/test_views_records.py b/tests/test_views_records.py
index ce1c2264c..be81c702a 100644
--- a/tests/test_views_records.py
+++ b/tests/test_views_records.py
@@ -3,7 +3,6 @@
import re
import unittest
-from kinto.core.utils import decode_header
from kinto.core.testing import get_user_headers
from .support import (BaseWebTest, MINIMALIST_RECORD,
@@ -14,10 +13,10 @@
class RecordsViewTest(BaseWebTest, unittest.TestCase):
collection_url = '/buckets/beers/collections/barley/records'
- _record_url = '/buckets/beers/collections/barley/records/%s'
+ _record_url = '/buckets/beers/collections/barley/records/{}'
def setUp(self):
- super(RecordsViewTest, self).setUp()
+ super().setUp()
self.app.put_json('/buckets/beers', MINIMALIST_BUCKET,
headers=self.headers)
self.app.put_json('/buckets/beers/collections/barley',
@@ -27,7 +26,7 @@ def setUp(self):
MINIMALIST_RECORD,
headers=self.headers)
self.record = resp.json['data']
- self.record_url = self._record_url % self.record['id']
+ self.record_url = self._record_url.format(self.record['id'])
def test_records_can_be_accessed_by_id(self):
self.app.get(self.record_url, headers=self.headers)
@@ -140,8 +139,10 @@ def test_records_can_be_filtered_on_any_field(self):
def test_records_can_be_sorted_on_any_field(self):
for i in range(3):
- record = MINIMALIST_RECORD.copy()
- record['data']['name'] = 'Stout %s' % i
+ record = {**MINIMALIST_RECORD, 'data': {
+ **MINIMALIST_RECORD['data'],
+ 'name': 'Stout {}'.format(i)}
+ }
self.app.post_json(self.collection_url,
record,
headers=self.headers)
@@ -153,8 +154,7 @@ def test_records_can_be_sorted_on_any_field(self):
['Stout 2', 'Stout 1', 'Stout 0', 'Hulled Barley'])
def test_wrong_create_permissions_cannot_be_added_on_records(self):
- record = MINIMALIST_RECORD.copy()
- record['permissions'] = {'record:create': ['fxa:user']}
+ record = {**MINIMALIST_RECORD, 'permissions': {'record:create': ['fxa:user']}}
self.app.put_json(self.record_url,
record,
headers=self.headers,
@@ -163,16 +163,14 @@ def test_wrong_create_permissions_cannot_be_added_on_records(self):
def test_create_a_record_update_collection_timestamp(self):
collection_resp = self.app.get(self.collection_url,
headers=self.headers)
- old_timestamp = int(
- decode_header(json.loads(collection_resp.headers['ETag'])))
+ old_timestamp = int(json.loads(collection_resp.headers['ETag']))
self.app.post_json(self.collection_url,
MINIMALIST_RECORD,
headers=self.headers,
status=201)
collection_resp = self.app.get(self.collection_url,
headers=self.headers)
- new_timestamp = int(
- decode_header(json.loads(collection_resp.headers['ETag'])))
+ new_timestamp = int(json.loads(collection_resp.headers['ETag']))
assert old_timestamp < new_timestamp
def test_create_a_record_without_id_generates_a_uuid(self):
@@ -220,44 +218,38 @@ def test_create_a_record_with_existing_from_someone_else_gives_403(self):
def test_update_a_record_update_collection_timestamp(self):
collection_resp = self.app.get(self.collection_url,
headers=self.headers)
- old_timestamp = int(
- decode_header(json.loads(collection_resp.headers['ETag'])))
+ old_timestamp = int(json.loads(collection_resp.headers['ETag']))
self.app.put_json(self.record_url,
MINIMALIST_RECORD,
headers=self.headers,
status=200)
collection_resp = self.app.get(self.collection_url,
headers=self.headers)
- new_timestamp = int(
- decode_header(json.loads(collection_resp.headers['ETag'])))
+ new_timestamp = int(json.loads(collection_resp.headers['ETag']))
assert old_timestamp < new_timestamp
def test_delete_a_record_update_collection_timestamp(self):
collection_resp = self.app.get(self.collection_url,
headers=self.headers)
- old_timestamp = int(
- decode_header(json.loads(collection_resp.headers['ETag'])))
+ old_timestamp = int(json.loads(collection_resp.headers['ETag']))
self.app.delete(self.record_url,
headers=self.headers,
status=200)
collection_resp = self.app.get(self.collection_url,
headers=self.headers)
- new_timestamp = int(
- decode_header(json.loads(collection_resp.headers['ETag'])))
+ new_timestamp = int(json.loads(collection_resp.headers['ETag']))
assert old_timestamp < new_timestamp
def test_record_is_accessible_by_group_member(self):
# access as aaron
- self.aaron_headers = self.headers.copy()
- self.aaron_headers.update(**get_user_headers('aaron'))
+ self.aaron_headers = {**self.headers, **get_user_headers('aaron')}
resp = self.app.get('/',
headers=self.aaron_headers,
status=200)
self.create_group('beers', 'brewers', [resp.json['user']['id']])
- record = MINIMALIST_RECORD.copy()
- record['permissions'] = {'read': ['/buckets/beers/groups/brewers']}
+ record = {**MINIMALIST_RECORD, 'permissions': {'read': ['/buckets/beers/groups/brewers']}}
self.app.put_json(self.record_url,
record,
headers=self.headers,
@@ -268,24 +260,21 @@ def test_record_is_accessible_by_group_member(self):
status=200)
def test_records_should_reject_unaccepted_request_content_type(self):
- headers = self.headers.copy()
- headers['Content-Type'] = 'text/plain'
+ headers = {**self.headers, 'Content-Type': 'text/plain'}
self.app.put(self.record_url,
MINIMALIST_RECORD,
headers=headers,
status=415)
def test_records_should_reject_unaccepted_client_accept(self):
- headers = self.headers.copy()
- headers['Accept'] = 'text/plain'
+ headers = {**self.headers, 'Accept': 'text/plain'}
self.app.get(self.record_url,
MINIMALIST_RECORD,
headers=headers,
status=406)
def test_records_should_accept_client_accept(self):
- headers = self.headers.copy()
- headers['Accept'] = '*/*'
+ headers = {**self.headers, 'Accept': '*/*'}
self.app.get(self.record_url,
MINIMALIST_RECORD,
headers=headers,
@@ -295,8 +284,7 @@ def test_records_can_be_created_after_deletion(self):
self.app.delete(self.record_url,
headers=self.headers,
status=200)
- headers = self.headers.copy()
- headers['If-None-Match'] = '*'
+ headers = {**self.headers, 'If-None-Match': '*'}
self.app.put_json(self.record_url, MINIMALIST_RECORD,
headers=headers, status=201)
@@ -304,27 +292,23 @@ def test_records_can_be_created_after_deletion(self):
class RecordsViewMergeTest(BaseWebTest, unittest.TestCase):
collection_url = '/buckets/beers/collections/barley/records'
- _record_url = '/buckets/beers/collections/barley/records/%s'
+ _record_url = '/buckets/beers/collections/barley/records/{}'
def setUp(self):
- super(RecordsViewMergeTest, self).setUp()
+ super().setUp()
self.app.put_json('/buckets/beers', MINIMALIST_BUCKET,
headers=self.headers)
self.app.put_json('/buckets/beers/collections/barley',
MINIMALIST_COLLECTION,
headers=self.headers)
- record = MINIMALIST_RECORD.copy()
- record['data'] = {}
- record['data']['grain'] = {'one': 1}
- resp = self.app.post_json(self.collection_url,
- record,
+ record = {**MINIMALIST_RECORD, 'data': {'grain': {'one': 1}}}
+ resp = self.app.post_json(self.collection_url, record,
headers=self.headers)
self.record = resp.json['data']
- self.record_url = self._record_url % self.record['id']
+ self.record_url = self._record_url.format(self.record['id'])
def test_merge_patch(self):
- headers = self.headers.copy()
- headers['Content-Type'] = 'application/merge-patch+json'
+ headers = {**self.headers, 'Content-Type': 'application/merge-patch+json'}
json = {'data': {'grain': {'two': 2}}}
resp = self.app.patch_json(self.record_url,
json,
@@ -334,8 +318,7 @@ def test_merge_patch(self):
self.assertEquals(resp.json['data']['grain']['two'], 2)
def test_merge_patch_remove_nones(self):
- headers = self.headers.copy()
- headers['Content-Type'] = 'application/merge-patch+json'
+ headers = {**self.headers, 'Content-Type': 'application/merge-patch+json'}
json = {'data': {'grain': {'one': None}}}
resp = self.app.patch_json(self.record_url,
json,
@@ -347,27 +330,26 @@ def test_merge_patch_remove_nones(self):
class RecordsViewPatchTest(BaseWebTest, unittest.TestCase):
collection_url = '/buckets/beers/collections/barley/records'
- _record_url = '/buckets/beers/collections/barley/records/%s'
+ _record_url = '/buckets/beers/collections/barley/records/{}'
def setUp(self):
- super(RecordsViewPatchTest, self).setUp()
- self.patch_headers = self.headers.copy()
- self.patch_headers['Content-Type'] = 'application/json-patch+json'
+ super().setUp()
+ self.patch_headers = {**self.headers, 'Content-Type': 'application/json-patch+json'}
self.app.put_json('/buckets/beers', MINIMALIST_BUCKET,
headers=self.headers)
self.app.put_json('/buckets/beers/collections/barley',
MINIMALIST_COLLECTION,
headers=self.headers)
- record = MINIMALIST_RECORD.copy()
- record['permissions'] = {}
- record['permissions']['read'] = ['alice', 'carla']
- record['permissions']['write'] = ['bob']
+ record = {**MINIMALIST_RECORD, 'permissions': {
+ 'read': ['alice', 'carla'],
+ 'write': ['bob']
+ }}
resp = self.app.post_json(self.collection_url,
record,
headers=self.headers)
self.record = resp.json['data']
- self.record_url = self._record_url % self.record['id']
+ self.record_url = self._record_url.format(self.record['id'])
def test_patch_add_permissions(self):
json = [{'op': 'add', 'path': '/permissions/read/me', 'value': 'me'}]
diff --git a/tox.ini b/tox.ini
index 1e36bcd9e..61ead7837 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,5 +1,5 @@
[tox]
-envlist = py27,py27-raw,py34,py36,flake8
+envlist = py35-raw,py35,py36,flake8
skip_missing_interpreters = True
[testenv]
@@ -21,7 +21,7 @@ commands = flake8 kinto tests docs/conf.py
deps =
flake8
-[testenv:py27-raw]
+[testenv:py35-raw]
passenv = TRAVIS
commands =
python --version