From 1b061d4d146294842963db93925e71329fc448e1 Mon Sep 17 00:00:00 2001 From: Rafael Aguayo Date: Tue, 4 Jun 2019 17:34:25 -0700 Subject: [PATCH 1/2] Black morango codebase --- morango/__init__.py | 2 +- morango/api/fields.py | 3 +- morango/api/permissions.py | 44 ++- morango/api/serializers.py | 127 ++++-- morango/api/urls.py | 32 +- morango/api/viewsets.py | 202 ++++++---- morango/apps.py | 7 +- morango/certificates.py | 72 +++- morango/constants/api_urls.py | 4 +- morango/constants/capabilities.py | 2 +- morango/constants/file.py | 5 +- morango/controller.py | 4 +- morango/crypto.py | 106 +++-- morango/migrations/0001_initial.py | 374 +++++++++++------- morango/migrations/0002_auto_20170511_0400.py | 71 ++-- morango/migrations/0003_auto_20170519_0543.py | 21 +- morango/migrations/0004_auto_20170520_2112.py | 23 +- morango/migrations/0005_auto_20170629_2139.py | 17 +- .../0006_instanceidmodel_system_id.py | 13 +- morango/migrations/0007_auto_20171018_1615.py | 248 ++++++------ morango/migrations/0008_auto_20171114_2217.py | 69 ++-- morango/migrations/0009_auto_20171205_0252.py | 10 +- morango/migrations/0010_auto_20171206_1615.py | 25 +- morango/migrations/0011_sharedkey.py | 28 +- morango/migrations/0012_auto_20180927_1658.py | 25 +- morango/models.py | 187 ++++++--- morango/parsers.py | 12 +- morango/query.py | 7 +- morango/settings.py | 84 ++-- morango/signals.py | 2 +- morango/syncsession.py | 294 ++++++++++---- morango/urls.py | 9 +- morango/util.py | 30 +- morango/utils/backends/base.py | 79 ++-- morango/utils/backends/postgres.py | 115 ++++-- morango/utils/backends/sqlite.py | 95 +++-- morango/utils/backends/utils.py | 10 +- morango/utils/morango_mptt.py | 11 +- morango/utils/proquint.py | 39 +- morango/utils/register_models.py | 88 ++++- morango/utils/sync_utils.py | 237 ++++++++--- morango/utils/uuids.py | 18 +- morango/validation.py | 51 ++- setup.cfg | 9 +- 44 files changed, 1826 insertions(+), 1085 deletions(-) diff --git a/morango/__init__.py b/morango/__init__.py index c6bfdf79..fb8a5c3a 100644 --- a/morango/__init__.py +++ b/morango/__init__.py @@ -2,5 +2,5 @@ from __future__ import print_function from __future__ import unicode_literals -default_app_config = 'morango.apps.MorangoConfig' +default_app_config = "morango.apps.MorangoConfig" __version__ = "0.4.5" diff --git a/morango/api/fields.py b/morango/api/fields.py index ccf568f6..18fbd791 100644 --- a/morango/api/fields.py +++ b/morango/api/fields.py @@ -4,9 +4,8 @@ class PublicKeyField(serializers.Field): - def to_representation(self, obj): return str(obj) def to_internal_value(self, data): - return Key(public_key_string=data) \ No newline at end of file + return Key(public_key_string=data) diff --git a/morango/api/permissions.py b/morango/api/permissions.py index e555408f..741e0b43 100644 --- a/morango/api/permissions.py +++ b/morango/api/permissions.py @@ -1,8 +1,11 @@ import json from django.conf import settings -from django.contrib.auth import authenticate, get_user_model -from rest_framework import permissions, authentication, exceptions +from django.contrib.auth import authenticate +from django.contrib.auth import get_user_model +from rest_framework import authentication +from rest_framework import exceptions +from rest_framework import permissions from ..models import TransferSession @@ -18,9 +21,7 @@ def authenticate_credentials(self, userargs, password, request=None): The "userargs" string may be just the username, or a querystring-encoded set of params. """ - credentials = { - 'password': password - } + credentials = {"password": password} if "=" not in userargs: # if it doesn't seem to be in querystring format, just use it as the username @@ -35,16 +36,15 @@ def authenticate_credentials(self, userargs, password, request=None): user = authenticate(**credentials) if user is None: - raise exceptions.AuthenticationFailed('Invalid credentials.') + raise exceptions.AuthenticationFailed("Invalid credentials.") if not user.is_active: - raise exceptions.AuthenticationFailed('User inactive or deleted.') + raise exceptions.AuthenticationFailed("User inactive or deleted.") return (user, None) class CertificatePermissions(permissions.BasePermission): - def has_permission(self, request, view): # the Django REST Framework browseable API calls this to see what buttons to show @@ -61,8 +61,14 @@ def has_permission(self, request, view): if hasattr(request.user, "has_morango_certificate_scope_permission"): scope_definition_id = request.data.get("scope_definition") scope_params = json.loads(request.data.get("scope_params")) - if scope_definition_id and scope_params and isinstance(scope_params, dict): - return request.user.has_morango_certificate_scope_permission(scope_definition_id, scope_params) + if ( + scope_definition_id + and scope_params + and isinstance(scope_params, dict) + ): + return request.user.has_morango_certificate_scope_permission( + scope_definition_id, scope_params + ) return False return False @@ -72,13 +78,12 @@ class CertificatePushPermissions(permissions.BasePermission): message = "Server does not allow certificate pushing." def has_permission(self, request, view): - if getattr(settings, 'ALLOW_CERTIFICATE_PUSHING', False): + if getattr(settings, "ALLOW_CERTIFICATE_PUSHING", False): return True return False class NoncePermissions(permissions.BasePermission): - def has_permission(self, request, view): if request.method != "POST": @@ -88,27 +93,29 @@ def has_permission(self, request, view): class SyncSessionPermissions(permissions.BasePermission): - def has_permission(self, request, view): if request.method == "DELETE": return True if request.method == "POST": - return True # we'll be doing some additional permission checks in the viewset + return ( + True + ) # we'll be doing some additional permission checks in the viewset return False class TransferSessionPermissions(permissions.BasePermission): - def has_permission(self, request, view): if request.method == "DELETE": return True if request.method == "POST": - return True # we'll be doing some additional permission checks in the viewset + return ( + True + ) # we'll be doing some additional permission checks in the viewset if request.method == "PATCH": return True @@ -117,7 +124,6 @@ def has_permission(self, request, view): class BufferPermissions(permissions.BasePermission): - def has_permission(self, request, view): if request.method == "POST": @@ -127,7 +133,9 @@ def has_permission(self, request, view): sesh_id = request.query_params.get("transfer_session_id") if not sesh_id: return False - if not TransferSession.objects.filter(id=sesh_id, active=True, push=False).exists(): + if not TransferSession.objects.filter( + id=sesh_id, active=True, push=False + ).exists(): return False return True diff --git a/morango/api/serializers.py b/morango/api/serializers.py index 5b1ce689..19b7b6d2 100644 --- a/morango/api/serializers.py +++ b/morango/api/serializers.py @@ -1,11 +1,15 @@ -from django.db import transaction -from rest_framework import serializers, exceptions -import json +from rest_framework import exceptions +from rest_framework import serializers -from .fields import PublicKeyField -from ..models import Certificate, Nonce, SyncSession, TransferSession, InstanceIDModel, Buffer, SyncableModel, RecordMaxCounterBuffer -from ..utils.register_models import _profile_models from ..crypto import SharedKey +from ..models import Buffer +from ..models import Certificate +from ..models import InstanceIDModel +from ..models import RecordMaxCounterBuffer +from ..models import SyncSession +from ..models import TransferSession +from .fields import PublicKeyField +from morango.certificates import Nonce class CertificateSerializer(serializers.ModelSerializer): @@ -14,61 +18,119 @@ class CertificateSerializer(serializers.ModelSerializer): def validate_parent(self, parent): if not parent: - raise exceptions.ValidationError("Parent certificate (to sign the requested certificate) must be specified!") + raise exceptions.ValidationError( + "Parent certificate (to sign the requested certificate) must be specified!" + ) if not parent.has_private_key(): - raise exceptions.ValidationError("Server does not have private key for requested parent certificate!") + raise exceptions.ValidationError( + "Server does not have private key for requested parent certificate!" + ) return parent class Meta: model = Certificate - fields = ('id', 'parent', 'profile', 'scope_definition', 'scope_version', 'scope_params', 'public_key', 'serialized', 'signature', 'salt') - read_only_fields = ('serialized', 'id', 'signature', 'salt') + fields = ( + "id", + "parent", + "profile", + "scope_definition", + "scope_version", + "scope_params", + "public_key", + "serialized", + "signature", + "salt", + ) + read_only_fields = ("serialized", "id", "signature", "salt") class SharedKeySerializer(serializers.ModelSerializer): - class Meta: model = SharedKey - fields = ('public_key',) + fields = ("public_key",) class NonceSerializer(serializers.ModelSerializer): - class Meta: model = Nonce - fields = ('id', 'timestamp', 'ip') + fields = ("id", "timestamp", "ip") read_only_fields = fields class SyncSessionSerializer(serializers.ModelSerializer): - class Meta: model = SyncSession - fields = ('id', 'start_timestamp', 'last_activity_timestamp', 'active', 'client_certificate', 'server_certificate', 'profile', 'connection_kind', 'connection_path', 'client_ip', 'server_ip', 'client_instance', 'server_instance') - read_only_fields = ('start_timestamp', 'last_activity_timestamp', 'active', 'client_certificate', 'connection_kind', 'client_ip', 'server_ip', 'client_instance',) + fields = ( + "id", + "start_timestamp", + "last_activity_timestamp", + "active", + "client_certificate", + "server_certificate", + "profile", + "connection_kind", + "connection_path", + "client_ip", + "server_ip", + "client_instance", + "server_instance", + ) + read_only_fields = ( + "start_timestamp", + "last_activity_timestamp", + "active", + "client_certificate", + "connection_kind", + "client_ip", + "server_ip", + "client_instance", + ) class TransferSessionSerializer(serializers.ModelSerializer): - class Meta: model = TransferSession - fields = ('id', 'start_timestamp', 'last_activity_timestamp', 'active', 'filter', 'push', 'records_transferred', 'records_total', 'sync_session', 'server_fsic', 'client_fsic',) - read_only_fields = ('start_timestamp', 'last_activity_timestamp', 'active', 'records_transferred',) + fields = ( + "id", + "start_timestamp", + "last_activity_timestamp", + "active", + "filter", + "push", + "records_transferred", + "records_total", + "sync_session", + "server_fsic", + "client_fsic", + ) + read_only_fields = ( + "start_timestamp", + "last_activity_timestamp", + "active", + "records_transferred", + ) class InstanceIDSerializer(serializers.ModelSerializer): - class Meta: model = InstanceIDModel - fields = ('id', 'platform', 'hostname', 'sysversion', 'node_id', 'database', 'db_path', 'system_id') + fields = ( + "id", + "platform", + "hostname", + "sysversion", + "node_id", + "database", + "db_path", + "system_id", + ) read_only_fields = fields class RecordMaxCounterBufferSerializer(serializers.ModelSerializer): - class Meta: model = RecordMaxCounterBuffer - fields = ('transfer_session', 'model_uuid', 'instance_id', 'counter') + fields = ("transfer_session", "model_uuid", "instance_id", "counter") class BufferSerializer(serializers.ModelSerializer): @@ -76,4 +138,19 @@ class BufferSerializer(serializers.ModelSerializer): class Meta: model = Buffer - fields = ('serialized', 'deleted', 'last_saved_instance', 'last_saved_counter', 'hard_deleted', 'partition', 'source_id', 'model_name', 'conflicting_serialized_data', 'model_uuid', 'transfer_session', 'profile', 'rmcb_list', '_self_ref_fk') + fields = ( + "serialized", + "deleted", + "last_saved_instance", + "last_saved_counter", + "hard_deleted", + "partition", + "source_id", + "model_name", + "conflicting_serialized_data", + "model_uuid", + "transfer_session", + "profile", + "rmcb_list", + "_self_ref_fk", + ) diff --git a/morango/api/urls.py b/morango/api/urls.py index 252a25bb..a6b497d2 100644 --- a/morango/api/urls.py +++ b/morango/api/urls.py @@ -1,17 +1,25 @@ from rest_framework import routers -from .viewsets import (BufferViewSet, CertificateChainViewSet, - CertificateViewSet, MorangoInfoViewSet, NonceViewSet, - PublicKeyViewSet, SyncSessionViewSet, - TransferSessionViewSet) +from .viewsets import BufferViewSet +from .viewsets import CertificateChainViewSet +from .viewsets import CertificateViewSet +from .viewsets import MorangoInfoViewSet +from .viewsets import NonceViewSet +from .viewsets import PublicKeyViewSet +from .viewsets import SyncSessionViewSet +from .viewsets import TransferSessionViewSet router = routers.SimpleRouter() -router.register(r'certificates', CertificateViewSet, base_name="certificates") -router.register(r'certificatechain', CertificateChainViewSet, base_name="certificatechain") -router.register(r'nonces', NonceViewSet, base_name="nonces") -router.register(r'syncsessions', SyncSessionViewSet, base_name="syncsessions") -router.register(r'transfersessions', TransferSessionViewSet, base_name="transfersessions") -router.register(r'buffers', BufferViewSet, base_name="buffers") -router.register(r'morangoinfo', MorangoInfoViewSet, base_name="morangoinfo") -router.register(r'publickey', PublicKeyViewSet, base_name="publickey") +router.register(r"certificates", CertificateViewSet, base_name="certificates") +router.register( + r"certificatechain", CertificateChainViewSet, base_name="certificatechain" +) +router.register(r"nonces", NonceViewSet, base_name="nonces") +router.register(r"syncsessions", SyncSessionViewSet, base_name="syncsessions") +router.register( + r"transfersessions", TransferSessionViewSet, base_name="transfersessions" +) +router.register(r"buffers", BufferViewSet, base_name="buffers") +router.register(r"morangoinfo", MorangoInfoViewSet, base_name="morangoinfo") +router.register(r"publickey", PublicKeyViewSet, base_name="publickey") urlpatterns = router.urls diff --git a/morango/api/viewsets.py b/morango/api/viewsets.py index 0f1b1fae..efede1f7 100644 --- a/morango/api/viewsets.py +++ b/morango/api/viewsets.py @@ -2,33 +2,40 @@ import platform import uuid -import morango from django.conf import settings from django.core.exceptions import ValidationError -from django.db import transaction from django.utils import timezone -from django.utils.six import iteritems from ipware.ip import get_ip -from morango.certificates import Filter -from morango.crypto import SharedKey -from morango.models import Buffer, DatabaseMaxCounter, InstanceIDModel, RecordMaxCounterBuffer -from morango.validation import validate_and_create_buffer_data -from morango.utils.sync_utils import (_dequeue_into_store, _queue_into_buffer, - _serialize_into_store) -from rest_framework import (decorators, mixins, pagination, response, status, - viewsets) - -from . import permissions, serializers -from .. import certificates, errors, models -from ..utils.register_models import _profile_models -from ..models import SyncableModel, TransferSession +from rest_framework import mixins +from rest_framework import pagination +from rest_framework import response +from rest_framework import status +from rest_framework import viewsets from rest_framework.parsers import JSONParser -from morango.util import CAPABILITIES + +import morango +from . import permissions +from . import serializers +from .. import certificates +from .. import errors +from .. import models +from ..models import TransferSession from morango.constants.capabilities import GZIP_BUFFER_POST +from morango.crypto import SharedKey +from morango.models import Buffer +from morango.models import DatabaseMaxCounter +from morango.models import InstanceIDModel +from morango.models import RecordMaxCounterBuffer +from morango.util import CAPABILITIES +from morango.utils.sync_utils import _dequeue_into_store +from morango.utils.sync_utils import _queue_into_buffer +from morango.utils.sync_utils import _serialize_into_store +from morango.validation import validate_and_create_buffer_data if GZIP_BUFFER_POST in CAPABILITIES: from morango.parsers import GzipParser + parsers = (GzipParser, JSONParser) else: parsers = (JSONParser,) @@ -44,25 +51,24 @@ def create(self, request): # verify the rest of the cert chain try: - models.Certificate.save_certificate_chain( - cert_chain, - ) + models.Certificate.save_certificate_chain(cert_chain) except (AssertionError, errors.MorangoCertificateError) as e: return response.Response( "Saving certificate chain has failed: {}".format(str(e)), - status=status.HTTP_403_FORBIDDEN + status=status.HTTP_403_FORBIDDEN, ) # create an in-memory instance of the cert from the serialized data and signature - certificate = models.Certificate.deserialize(client_cert["serialized"], client_cert["signature"]) + certificate = models.Certificate.deserialize( + client_cert["serialized"], client_cert["signature"] + ) # check if certificate's public key is in our list of shared keys try: sharedkey = SharedKey.objects.get(public_key=certificate.public_key) except SharedKey.DoesNotExist: return response.Response( - "Shared public key was not used", - status=status.HTTP_400_BAD_REQUEST + "Shared public key was not used", status=status.HTTP_400_BAD_REQUEST ) # set private key @@ -74,7 +80,7 @@ def create(self, request): except errors.MorangoNonceError: return response.Response( "Nonce (certificate's salt) is not valid", - status=status.HTTP_403_FORBIDDEN + status=status.HTTP_403_FORBIDDEN, ) # verify the certificate (scope is a subset, profiles match, etc) @@ -82,17 +88,20 @@ def create(self, request): certificate.check_certificate() except errors.MorangoCertificateError as e: return response.Response( - {"error_class": e.__class__.__name__, - "error_message": getattr(e, "message", (getattr(e, "args") or ("",))[0])}, - status=status.HTTP_400_BAD_REQUEST + { + "error_class": e.__class__.__name__, + "error_message": getattr( + e, "message", (getattr(e, "args") or ("",))[0] + ), + }, + status=status.HTTP_400_BAD_REQUEST, ) # we got this far, and everything looks good, so we can save the certificate certificate.save() return response.Response( - "Certificate chain has been saved", - status=status.HTTP_201_CREATED + "Certificate chain has been saved", status=status.HTTP_201_CREATED ) @@ -119,19 +128,20 @@ def create(self, request): try: certificate.full_clean() except ValidationError as e: - return response.Response( - e, - status=status.HTTP_400_BAD_REQUEST - ) + return response.Response(e, status=status.HTTP_400_BAD_REQUEST) # verify the certificate (scope is a subset, profiles match, etc) try: certificate.check_certificate() except errors.MorangoCertificateError as e: return response.Response( - {"error_class": e.__class__.__name__, - "error_message": getattr(e, "message", (getattr(e, "args") or ("",))[0])}, - status=status.HTTP_400_BAD_REQUEST + { + "error_class": e.__class__.__name__, + "error_message": getattr( + e, "message", (getattr(e, "args") or ("",))[0] + ), + }, + status=status.HTTP_400_BAD_REQUEST, ) # we got this far, and everything looks good, so we can save the certificate @@ -140,11 +150,13 @@ def create(self, request): # return a serialized copy of the signed certificate to the client return response.Response( serializers.CertificateSerializer(certificate).data, - status=status.HTTP_201_CREATED + status=status.HTTP_201_CREATED, ) else: - return response.Response(serialized_cert.errors, status=status.HTTP_400_BAD_REQUEST) + return response.Response( + serialized_cert.errors, status=status.HTTP_400_BAD_REQUEST + ) def get_queryset(self): @@ -161,11 +173,15 @@ def get_queryset(self): # if specified, filter by primary partition, and only include certs the server owns if "primary_partition" in params: target_cert = base_queryset.get(id=params["primary_partition"]) - return target_cert.get_descendants(include_self=True).exclude(_private_key=None) + return target_cert.get_descendants(include_self=True).exclude( + _private_key=None + ) # if specified, return the certificate chain for a certificate owned by the server if "ancestors_of" in params: - target_cert = base_queryset.exclude(_private_key=None).get(id=params["ancestors_of"]) + target_cert = base_queryset.exclude(_private_key=None).get( + id=params["ancestors_of"] + ) return target_cert.get_ancestors(include_self=True) except models.Certificate.DoesNotExist: @@ -181,11 +197,10 @@ class NonceViewSet(viewsets.ModelViewSet): serializer_class = serializers.NonceSerializer def create(self, request): - nonce = models.Nonce.objects.create(ip=get_ip(request)) + nonce = certificates.Nonce.objects.create(ip=get_ip(request)) return response.Response( - serializers.NonceSerializer(nonce).data, - status=status.HTTP_201_CREATED, + serializers.NonceSerializer(nonce).data, status=status.HTTP_201_CREATED ) @@ -201,36 +216,41 @@ def create(self, request): try: models.Certificate.save_certificate_chain( request.data.get("certificate_chain"), - expected_last_id=request.data.get("client_certificate_id") + expected_last_id=request.data.get("client_certificate_id"), ) except (AssertionError, errors.MorangoCertificateError): return response.Response( - "Saving certificate chain has failed", - status=status.HTTP_403_FORBIDDEN + "Saving certificate chain has failed", status=status.HTTP_403_FORBIDDEN ) # attempt to load the requested certificates try: - server_cert = models.Certificate.objects.get(id=request.data.get("server_certificate_id")) - client_cert = models.Certificate.objects.get(id=request.data.get("client_certificate_id")) + server_cert = models.Certificate.objects.get( + id=request.data.get("server_certificate_id") + ) + client_cert = models.Certificate.objects.get( + id=request.data.get("client_certificate_id") + ) except models.Certificate.DoesNotExist: return response.Response( "Requested certificate does not exist!", - status=status.HTTP_400_BAD_REQUEST + status=status.HTTP_400_BAD_REQUEST, ) if server_cert.profile != client_cert.profile: return response.Response( "Certificates must both be associated with the same profile", - status=status.HTTP_400_BAD_REQUEST + status=status.HTTP_400_BAD_REQUEST, ) # check that the nonce/id were properly signed - message = "{nonce}:{id}".format(nonce=request.data.get('nonce'), id=request.data.get('id')) + message = "{nonce}:{id}".format( + nonce=request.data.get("nonce"), id=request.data.get("id") + ) if not client_cert.verify(message, request.data["signature"]): return response.Response( "Client certificate failed to verify signature", - status=status.HTTP_403_FORBIDDEN + status=status.HTTP_403_FORBIDDEN, ) # check that the nonce is valid, and consume it so it can't be used again @@ -238,8 +258,7 @@ def create(self, request): certificates.Nonce.use_nonce(request.data["nonce"]) except errors.MorangoNonceError: return response.Response( - "Nonce is not valid", - status=status.HTTP_403_FORBIDDEN + "Nonce is not valid", status=status.HTTP_403_FORBIDDEN ) # build the data to be used for creation the syncsession @@ -254,10 +273,12 @@ def create(self, request): "profile": server_cert.profile, "connection_kind": "network", "connection_path": request.data.get("connection_path"), - "client_ip": get_ip(request) or '', - "server_ip": request.data.get('server_ip') or '', + "client_ip": get_ip(request) or "", + "server_ip": request.data.get("server_ip") or "", "client_instance": request.data.get("instance"), - "server_instance": json.dumps(serializers.InstanceIDSerializer(instance_id).data), + "server_instance": json.dumps( + serializers.InstanceIDSerializer(instance_id).data + ), } syncsession = models.SyncSession(**data) @@ -266,13 +287,10 @@ def create(self, request): resp_data = { "signature": server_cert.sign(message), - "server_instance": data["server_instance"] + "server_instance": data["server_instance"], } - return response.Response( - resp_data, - status=status.HTTP_201_CREATED, - ) + return response.Response(resp_data, status=status.HTTP_201_CREATED) def perform_destroy(self, syncsession): syncsession.active = False @@ -286,15 +304,17 @@ class TransferSessionViewSet(viewsets.ModelViewSet): permission_classes = (permissions.TransferSessionPermissions,) serializer_class = serializers.TransferSessionSerializer - def create(self, request): + def create(self, request): # noqa: C901 # attempt to load the requested syncsession try: - syncsession = models.SyncSession.objects.filter(active=True).get(id=request.data.get("sync_session_id")) + syncsession = models.SyncSession.objects.filter(active=True).get( + id=request.data.get("sync_session_id") + ) except models.SyncSession.DoesNotExist: return response.Response( "Requested syncsession does not exist or is no longer active!", - status=status.HTTP_400_BAD_REQUEST + status=status.HTTP_400_BAD_REQUEST, ) # a push is to transfer data from client to server; a pull is the inverse @@ -316,10 +336,7 @@ def create(self, request): if not requested_filter.is_subset_of(server_scope.write_filter): scope_error_msg = "Server certificate scope does not permit responding to pulls for the requested filter." if scope_error_msg: - return response.Response( - scope_error_msg, - status=status.HTTP_403_FORBIDDEN - ) + return response.Response(scope_error_msg, status=status.HTTP_403_FORBIDDEN) # build the data to be used for creating the transfersession data = { @@ -331,8 +348,8 @@ def create(self, request): "push": is_a_push, "records_total": request.data.get("records_total") if is_a_push else None, "sync_session": syncsession, - "client_fsic": request.data.get('client_fsic') or '{}', - "server_fsic": '{}', + "client_fsic": request.data.get("client_fsic") or "{}", + "server_fsic": "{}", } transfersession = models.TransferSession(**data) @@ -342,10 +359,14 @@ def create(self, request): # must update database max counters before calculating fsics if not is_a_push: - if getattr(settings, 'MORANGO_SERIALIZE_BEFORE_QUEUING', True): - _serialize_into_store(transfersession.sync_session.profile, filter=requested_filter) + if getattr(settings, "MORANGO_SERIALIZE_BEFORE_QUEUING", True): + _serialize_into_store( + transfersession.sync_session.profile, filter=requested_filter + ) - transfersession.server_fsic = json.dumps(DatabaseMaxCounter.calculate_filter_max_counters(requested_filter)) + transfersession.server_fsic = json.dumps( + DatabaseMaxCounter.calculate_filter_max_counters(requested_filter) + ) transfersession.save() if not is_a_push: @@ -353,7 +374,9 @@ def create(self, request): # queue records to get ready for pulling _queue_into_buffer(transfersession) # update records_total on transfer session object - records_total = Buffer.objects.filter(transfer_session=transfersession).count() + records_total = Buffer.objects.filter( + transfer_session=transfersession + ).count() transfersession.records_total = records_total transfersession.save() @@ -367,12 +390,16 @@ def perform_destroy(self, transfersession): # dequeue into store and then delete records _dequeue_into_store(transfersession) # update database max counters but use latest fsics on server - DatabaseMaxCounter.update_fsics(json.loads(transfersession.client_fsic), - certificates.Filter(transfersession.filter)) + DatabaseMaxCounter.update_fsics( + json.loads(transfersession.client_fsic), + certificates.Filter(transfersession.filter), + ) else: # if pull, then delete records that were queued Buffer.objects.filter(transfer_session=transfersession).delete() - RecordMaxCounterBuffer.objects.filter(transfer_session=transfersession).delete() + RecordMaxCounterBuffer.objects.filter( + transfer_session=transfersession + ).delete() transfersession.active = False transfersession.save() @@ -393,12 +420,12 @@ def create(self, request): if not transfer_session.push: return response.Response( "Specified TransferSession does not allow pushes.", - status=status.HTTP_403_FORBIDDEN + status=status.HTTP_403_FORBIDDEN, ) if len(set(rec["transfer_session"] for rec in data)) > 1: return response.Response( "All pushed records must be associated with the same TransferSession.", - status=status.HTTP_403_FORBIDDEN + status=status.HTTP_403_FORBIDDEN, ) validate_and_create_buffer_data(data, transfer_session) @@ -412,14 +439,15 @@ def get_queryset(self): class MorangoInfoViewSet(viewsets.ViewSet): - def retrieve(self, request, pk=None): (id_model, _) = InstanceIDModel.get_or_create_current_instance() - m_info = {'instance_hash': id_model.get_proquint(), - 'instance_id': id_model.id, - 'system_os': platform.system(), - 'version': morango.__version__, - 'capabilities': CAPABILITIES} + m_info = { + "instance_hash": id_model.get_proquint(), + "instance_id": id_model.id, + "system_os": platform.system(), + "version": morango.__version__, + "capabilities": CAPABILITIES, + } return response.Response(m_info) diff --git a/morango/apps.py b/morango/apps.py index d355fd62..9a8c7b60 100644 --- a/morango/apps.py +++ b/morango/apps.py @@ -4,6 +4,7 @@ from django.apps import AppConfig from django.db import connection + from morango.util import max_parameter_substitution from morango.utils.register_models import add_syncable_models @@ -11,13 +12,13 @@ class MorangoConfig(AppConfig): - name = 'morango' - verbose_name = 'Morango' + name = "morango" + verbose_name = "Morango" def ready(self): from .signals import add_to_deleted_models # noqa: F401 # add models to be synced by profile add_syncable_models() - if 'sqlite' in connection.vendor: + if "sqlite" in connection.vendor: max_parameter_substitution() diff --git a/morango/certificates.py b/morango/certificates.py index 9e8d7f81..72346635 100644 --- a/morango/certificates.py +++ b/morango/certificates.py @@ -1,19 +1,27 @@ from __future__ import unicode_literals import json -import mptt -import mptt.models -from django.utils.six import string_types import string +import mptt.models from django.core.management import call_command -from django.db import models, transaction +from django.db import models +from django.db import transaction from django.utils import timezone +from django.utils.six import string_types from future.utils import python_2_unicode_compatible -from .crypto import Key, PrivateKeyField, PublicKeyField +from .crypto import Key +from .crypto import PrivateKeyField +from .crypto import PublicKeyField +from .errors import CertificateIDInvalid +from .errors import CertificateProfileInvalid +from .errors import CertificateRootScopeInvalid +from .errors import CertificateScopeNotSubset +from .errors import CertificateSignatureInvalid +from .errors import NonceDoesNotExist +from .errors import NonceExpired from .utils.uuids import UUIDModelMixin -from .errors import CertificateScopeNotSubset, CertificateSignatureInvalid, CertificateIDInvalid, CertificateProfileInvalid, CertificateRootScopeInvalid, NonceDoesNotExist, NonceExpired @python_2_unicode_compatible @@ -29,7 +37,9 @@ class Certificate(mptt.models.MPTTModel, UUIDModelMixin): # scope of this certificate, and version of the scope, along with associated params scope_definition = models.ForeignKey("ScopeDefinition") scope_version = models.IntegerField() - scope_params = models.TextField() # JSON dict of values to insert into scope definitions + scope_params = ( + models.TextField() + ) # JSON dict of values to insert into scope definitions # track the certificate's public key so we can verify any certificates it signs public_key = PublicKeyField() @@ -54,7 +64,9 @@ def private_key(self): def private_key(self, value): self._private_key = value if value and not self.public_key: - self.public_key = Key(public_key_string=self._private_key.get_public_key_string()) + self.public_key = Key( + public_key_string=self._private_key.get_public_key_string() + ) @classmethod def generate_root_certificate(cls, scope_def_id, **extra_scope_params): @@ -70,11 +82,15 @@ def generate_root_certificate(cls, scope_def_id, **extra_scope_params): cert.scope_version = scope_def.version cert.profile = scope_def.profile primary_scope_param_key = scope_def.primary_scope_param_key - assert primary_scope_param_key, "Root cert can only be created for ScopeDefinition that has primary_scope_param_key defined" + assert ( + primary_scope_param_key + ), "Root cert can only be created for ScopeDefinition that has primary_scope_param_key defined" # generate a key and extract the public key component cert.private_key = Key() - cert.public_key = Key(public_key_string=cert.private_key.get_public_key_string()) + cert.public_key = Key( + public_key_string=cert.private_key.get_public_key_string() + ) # calculate the certificate's ID on the basis of the profile and public key cert.id = cert.calculate_uuid() @@ -116,7 +132,7 @@ def deserialize(cls, serialized, signature): id=data["id"], parent_id=data["parent_id"], profile=data["profile"], - salt=data.get('salt') or '', + salt=data.get("salt") or "", scope_definition_id=data["scope_definition_id"], scope_version=data["scope_version"], scope_params=data["scope_params"], @@ -138,7 +154,11 @@ def check_certificate(self): # check that the certificate's ID is properly calculated if self.id != self.calculate_uuid(): - raise CertificateIDInvalid("Certificate ID is {} but should be {}".format(self.id, self.calculate_uuid())) + raise CertificateIDInvalid( + "Certificate ID is {} but should be {}".format( + self.id, self.calculate_uuid() + ) + ) if not self.parent: # self-signed root certificate # check that the certificate is properly self-signed @@ -148,7 +168,11 @@ def check_certificate(self): scope = self.get_scope() for item in scope.read_filter + scope.write_filter: if not item.startswith(self.id): - raise CertificateRootScopeInvalid("Scope entry {} does not start with primary partition {}".format(item, self.id)) + raise CertificateRootScopeInvalid( + "Scope entry {} does not start with primary partition {}".format( + item, self.id + ) + ) else: # non-root child certificate # check that the certificate is properly signed by its parent if not self.parent.verify(self.serialized, self.signature): @@ -158,8 +182,11 @@ def check_certificate(self): raise CertificateScopeNotSubset() # check that certificate is for same profile as parent if self.profile != self.parent.profile: - raise CertificateProfileInvalid("Certificate profile is {} but parent's is {}" \ - .format(self.profile, self.parent.profile)) + raise CertificateProfileInvalid( + "Certificate profile is {} but parent's is {}".format( + self.profile, self.parent.profile + ) + ) @classmethod def save_certificate_chain(cls, cert_chain, expected_last_id=None): @@ -192,7 +219,9 @@ def save_certificate_chain(cls, cert_chain, expected_last_id=None): if len(cert_chain) > 1: cls.save_certificate_chain(cert_chain[:-1], expected_last_id=cert.parent_id) else: - assert not cert.parent_id, "First cert in chain must be a root cert (no parent)" + assert ( + not cert.parent_id + ), "First cert in chain must be a root cert (no parent)" # ensure the certificate checks out (now that we know its parent, if any, is saved) cert.check_certificate() @@ -203,7 +232,9 @@ def save_certificate_chain(cls, cert_chain, expected_last_id=None): return cert def sign(self, value): - assert self.private_key, "Can only sign using certificates that have private keys" + assert ( + self.private_key + ), "Can only sign using certificates that have private keys" return self.private_key.sign(value) def verify(self, value, signature): @@ -292,7 +323,6 @@ def get_description(self, params): @python_2_unicode_compatible class Filter(object): - def __init__(self, template, params={}): # ensure params have been deserialized if isinstance(params, string_types): @@ -340,7 +370,6 @@ def __len__(self): class Scope(object): - def __init__(self, definition, params): # turn the scope definition filter templates into Filter objects rw_filter = Filter(definition.read_write_filter_template, params) @@ -358,4 +387,7 @@ def __le__(self, other): return self.is_subset_of(other) def __eq__(self, other): - return self.read_filter == other.read_filter and self.write_filter == other.write_filter + return ( + self.read_filter == other.read_filter + and self.write_filter == other.write_filter + ) diff --git a/morango/constants/api_urls.py b/morango/constants/api_urls.py index b3cca262..34e53ecd 100644 --- a/morango/constants/api_urls.py +++ b/morango/constants/api_urls.py @@ -2,7 +2,7 @@ This module contains constants representing the urls morango will be using for making api requests. """ -BASE_API = 'api/morango/v1/' +BASE_API = "api/morango/v1/" CERTIFICATE = BASE_API + "certificates/" CERTIFICATE_CHAIN = BASE_API + "certificatechain/" @@ -11,4 +11,4 @@ TRANSFERSESSION = BASE_API + "transfersessions/" BUFFER = BASE_API + "buffers/" PUBLIC_KEY = BASE_API + "publickey/" -INFO = BASE_API + 'morangoinfo/1/' +INFO = BASE_API + "morangoinfo/1/" diff --git a/morango/constants/capabilities.py b/morango/constants/capabilities.py index ed7e3f2e..a48d5b35 100644 --- a/morango/constants/capabilities.py +++ b/morango/constants/capabilities.py @@ -1 +1 @@ -GZIP_BUFFER_POST = 'GZIP_BUFFER_POST' +GZIP_BUFFER_POST = "GZIP_BUFFER_POST" diff --git a/morango/constants/file.py b/morango/constants/file.py index ec390203..2742e4ea 100644 --- a/morango/constants/file.py +++ b/morango/constants/file.py @@ -1,4 +1,5 @@ import os -SQLITE_VARIABLE_FILE_CACHE = os.path.join(os.path.expanduser('~'), - 'SQLITE_MAX_VARIABLE_NUMBER.cache') +SQLITE_VARIABLE_FILE_CACHE = os.path.join( + os.path.expanduser("~"), "SQLITE_MAX_VARIABLE_NUMBER.cache" +) diff --git a/morango/controller.py b/morango/controller.py index 5ff06fa6..5986742e 100644 --- a/morango/controller.py +++ b/morango/controller.py @@ -1,5 +1,6 @@ from morango.syncsession import NetworkSyncConnection -from morango.utils.sync_utils import _serialize_into_store, _deserialize_from_store +from morango.utils.sync_utils import _deserialize_from_store +from morango.utils.sync_utils import _serialize_into_store def _self_referential_fk(klass_model): @@ -14,7 +15,6 @@ def _self_referential_fk(klass_model): class MorangoProfileController(object): - def __init__(self, profile): assert profile, "profile needs to be defined." self.profile = profile diff --git a/morango/crypto.py b/morango/crypto.py index 0df60752..13a946b5 100644 --- a/morango/crypto.py +++ b/morango/crypto.py @@ -1,28 +1,38 @@ import hashlib import re import sys -import rsa as PYRSA -from django.db import models, transaction +import rsa as PYRSA +from django.db import models +from django.db import transaction try: from M2Crypto import RSA as M2RSA from M2Crypto import BIO as M2BIO + M2CRYPTO_EXISTS = True -except: +except ImportError: M2CRYPTO_EXISTS = False try: from cryptography.hazmat.backends import default_backend from cryptography import exceptions as crypto_exceptions + crypto_backend = default_backend() - from cryptography.hazmat.primitives.asymmetric import rsa as crypto_rsa, padding as crypto_padding - from cryptography.hazmat.primitives import serialization as crypto_serialization, hashes as crypto_hashes + from cryptography.hazmat.primitives.asymmetric import ( + rsa as crypto_rsa, + padding as crypto_padding, + ) + from cryptography.hazmat.primitives import ( + serialization as crypto_serialization, + hashes as crypto_hashes, + ) + # Ignore cryptography versions that do not support the 'sign' method - if not hasattr(crypto_rsa.RSAPrivateKey, 'sign'): + if not hasattr(crypto_rsa.RSAPrivateKey, "sign"): raise ImportError CRYPTOGRAPHY_EXISTS = True -except: +except ImportError: CRYPTOGRAPHY_EXISTS = False if sys.version_info[0] < 3: @@ -35,7 +45,6 @@ class BaseKey(object): - def __init__(self, private_key_string=None, public_key_string=None): if private_key_string: @@ -51,7 +60,9 @@ def __init__(self, private_key_string=None, public_key_string=None): def sign(self, message): if not self._private_key: - raise Exception("Key object does not have a private key defined, and thus cannot be used to sign.") + raise Exception( + "Key object does not have a private key defined, and thus cannot be used to sign." + ) message = self.ensure_bytes(message) @@ -68,7 +79,9 @@ def verify(self, message, signature): # ensure we have a public key we can use use to verify if not self._public_key: - raise Exception("Key object does not have public key defined, and thus cannot be used to verify.") + raise Exception( + "Key object does not have public key defined, and thus cannot be used to verify." + ) return self._verify(message, signature) @@ -84,7 +97,7 @@ def get_public_key_string(self): # remove the PKCS#8 header so the key won't cause problems for older versions if pem_string.startswith(PKCS8_HEADER): - pem_string = pem_string[len(PKCS8_HEADER):] + pem_string = pem_string[len(PKCS8_HEADER) :] # remove newlines, to ensure consistency pem_string = pem_string.replace("\n", "") @@ -107,21 +120,32 @@ def set_private_key_string(self, private_key_string): private_key_string = self.ensure_unicode(private_key_string) - private_key_string = self._add_pem_headers(private_key_string, "RSA PRIVATE KEY") + private_key_string = self._add_pem_headers( + private_key_string, "RSA PRIVATE KEY" + ) self._set_private_key_string(private_key_string) def _remove_pem_headers(self, pem_string): if not pem_string.strip().startswith("-----"): return pem_string - return "\n".join([line for line in pem_string.split("\n") if line and not line.startswith("---")]) + return "\n".join( + [ + line + for line in pem_string.split("\n") + if line and not line.startswith("---") + ] + ) def _add_pem_headers(self, pem_string, header_string): context = { "key": self._remove_pem_headers(pem_string), "header_string": header_string, } - return "-----BEGIN %(header_string)s-----\n%(key)s\n-----END %(header_string)s-----" % context + return ( + "-----BEGIN %(header_string)s-----\n%(key)s\n-----END %(header_string)s-----" + % context + ) def ensure_bytes(self, message): try: @@ -147,7 +171,7 @@ class PythonRSAKey(BaseKey): def generate_new_key(self, keysize=2048): try: self._public_key, self._private_key = PYRSA.newkeys(keysize, poolsize=4) - except: + except: # noqa: E722 self._public_key, self._private_key = PYRSA.newkeys(keysize) def _sign(self, message): @@ -172,7 +196,7 @@ def _set_public_key_string(self, public_key_string): # remove PKCS#8 header if it exists if public_key_string.startswith(PKCS8_HEADER): - public_key_string = public_key_string[len(PKCS8_HEADER):] + public_key_string = public_key_string[len(PKCS8_HEADER) :] # add the appropriate PEM header/footer public_key_string = self._add_pem_headers(public_key_string, "RSA PUBLIC KEY") @@ -190,7 +214,7 @@ class M2CryptoKey(BaseKey): _private_key = None def generate_new_key(self, keysize=2048): - self._private_key = M2RSA.gen_key(keysize, 65537, lambda x,y,z: None) + self._private_key = M2RSA.gen_key(keysize, 65537, lambda x, y, z: None) self._public_key = M2RSA.RSA_pub(self._private_key.rsa) def _sign(self, message): @@ -199,7 +223,9 @@ def _sign(self, message): def _verify(self, message, signature): try: - self._public_key.verify(hashlib.sha256(message).digest(), signature, algo="sha256") + self._public_key.verify( + hashlib.sha256(message).digest(), signature, algo="sha256" + ) return True except M2RSA.RSAError: return False @@ -223,7 +249,9 @@ def _set_public_key_string(self, public_key_string): # add the appropriate PEM header/footer public_key_string = self._add_pem_headers(public_key_string, "PUBLIC KEY") - self._public_key = M2RSA.load_pub_key_bio(M2BIO.MemoryBuffer(self.ensure_bytes(public_key_string))) + self._public_key = M2RSA.load_pub_key_bio( + M2BIO.MemoryBuffer(self.ensure_bytes(public_key_string)) + ) def _set_private_key_string(self, private_key_string): self._private_key = M2RSA.load_key_string(self.ensure_bytes(private_key_string)) @@ -237,18 +265,20 @@ class CryptographyKey(BaseKey): def generate_new_key(self, keysize=2048): self._private_key = crypto_rsa.generate_private_key( - public_exponent=65537, - key_size=keysize, - backend=crypto_backend, + public_exponent=65537, key_size=keysize, backend=crypto_backend ) self._public_key = self._private_key.public_key() def _sign(self, message): - return self._private_key.sign(message, crypto_padding.PKCS1v15(), crypto_hashes.SHA256()) + return self._private_key.sign( + message, crypto_padding.PKCS1v15(), crypto_hashes.SHA256() + ) def _verify(self, message, signature): try: - self._public_key.verify(signature, message, crypto_padding.PKCS1v15(), crypto_hashes.SHA256()) + self._public_key.verify( + signature, message, crypto_padding.PKCS1v15(), crypto_hashes.SHA256() + ) return True except crypto_exceptions.InvalidSignature: return False @@ -280,26 +310,26 @@ def _set_public_key_string(self, public_key_string): public_key_string = self._add_pem_headers(public_key_string, "PUBLIC KEY") self._public_key = crypto_serialization.load_pem_public_key( - self.ensure_bytes(public_key_string), - backend=crypto_backend, + self.ensure_bytes(public_key_string), backend=crypto_backend ) def _set_private_key_string(self, private_key_string): self._private_key = crypto_serialization.load_pem_private_key( - self.ensure_bytes(private_key_string), - password=None, - backend=crypto_backend, + self.ensure_bytes(private_key_string), password=None, backend=crypto_backend ) self._public_key = self._private_key.public_key() # alias the most-preferred key wrapper class we have available as `Key` -Key = CryptographyKey if CRYPTOGRAPHY_EXISTS else (M2CryptoKey if M2CRYPTO_EXISTS else PythonRSAKey) +Key = ( + CryptographyKey + if CRYPTOGRAPHY_EXISTS + else (M2CryptoKey if M2CRYPTO_EXISTS else PythonRSAKey) +) class RSAKeyBaseField(models.TextField): - def __init__(self, *args, **kwargs): kwargs["max_length"] = 1000 super(RSAKeyBaseField, self).__init__(*args, **kwargs) @@ -311,7 +341,6 @@ def deconstruct(self): class PublicKeyField(RSAKeyBaseField): - def from_db_value(self, value, expression, connection, context): if not value: return None @@ -331,7 +360,6 @@ def get_prep_value(self, value): class PrivateKeyField(RSAKeyBaseField): - def from_db_value(self, value, expression, connection, context): if not value: return None @@ -365,14 +393,14 @@ def get_or_create_shared_key(cls, force_new=False): with transaction.atomic(): SharedKey.objects.filter(current=True).update(current=False) key = Key() - return SharedKey.objects.create(public_key=key, - private_key=key, - current=True) + return SharedKey.objects.create( + public_key=key, private_key=key, current=True + ) # create a new shared key if one doesn't exist try: return SharedKey.objects.get(current=True) except SharedKey.DoesNotExist: key = Key() - return SharedKey.objects.create(public_key=key, - private_key=key, - current=True) + return SharedKey.objects.create( + public_key=key, private_key=key, current=True + ) diff --git a/morango/migrations/0001_initial.py b/morango/migrations/0001_initial.py index d39e66c5..83bcd835 100644 --- a/morango/migrations/0001_initial.py +++ b/morango/migrations/0001_initial.py @@ -2,9 +2,11 @@ # Generated by Django 1.9 on 2017-05-08 23:42 from __future__ import unicode_literals -from django.db import migrations, models import django.db.models.deletion import django.utils.timezone +from django.db import migrations +from django.db import models + import morango.crypto import morango.utils.uuids @@ -13,217 +15,301 @@ class Migration(migrations.Migration): initial = True - dependencies = [ - ] + dependencies = [] operations = [ migrations.CreateModel( - name='Buffer', + name="Buffer", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('serialized', models.TextField(blank=True)), - ('deleted', models.BooleanField(default=False)), - ('last_saved_instance', models.UUIDField()), - ('last_saved_counter', models.IntegerField()), - ('model_name', models.CharField(max_length=40)), - ('profile', models.CharField(max_length=40)), - ('partition', models.TextField()), - ('model_uuid', morango.utils.uuids.UUIDField()), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("serialized", models.TextField(blank=True)), + ("deleted", models.BooleanField(default=False)), + ("last_saved_instance", models.UUIDField()), + ("last_saved_counter", models.IntegerField()), + ("model_name", models.CharField(max_length=40)), + ("profile", models.CharField(max_length=40)), + ("partition", models.TextField()), + ("model_uuid", morango.utils.uuids.UUIDField()), ], - options={ - 'abstract': False, - }, + options={"abstract": False}, ), migrations.CreateModel( - name='Certificate', + name="Certificate", fields=[ - ('id', morango.utils.uuids.UUIDField(primary_key=True, serialize=False)), - ('profile', models.CharField(max_length=20)), - ('scope_version', models.IntegerField()), - ('scope_params', models.TextField()), - ('public_key', morango.crypto.PublicKeyField()), - ('serialized', models.TextField()), - ('signature', models.TextField()), - ('private_key', morango.crypto.PrivateKeyField(blank=True, null=True)), - ('lft', models.PositiveIntegerField(db_index=True, editable=False)), - ('rght', models.PositiveIntegerField(db_index=True, editable=False)), - ('tree_id', models.PositiveIntegerField(db_index=True, editable=False)), - ('level', models.PositiveIntegerField(db_index=True, editable=False)), - ('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='morango.Certificate')), + ( + "id", + morango.utils.uuids.UUIDField(primary_key=True, serialize=False), + ), + ("profile", models.CharField(max_length=20)), + ("scope_version", models.IntegerField()), + ("scope_params", models.TextField()), + ("public_key", morango.crypto.PublicKeyField()), + ("serialized", models.TextField()), + ("signature", models.TextField()), + ("private_key", morango.crypto.PrivateKeyField(blank=True, null=True)), + ("lft", models.PositiveIntegerField(db_index=True, editable=False)), + ("rght", models.PositiveIntegerField(db_index=True, editable=False)), + ("tree_id", models.PositiveIntegerField(db_index=True, editable=False)), + ("level", models.PositiveIntegerField(db_index=True, editable=False)), + ( + "parent", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="morango.Certificate", + ), + ), ], - options={ - 'abstract': False, - }, + options={"abstract": False}, ), migrations.CreateModel( - name='CertificateModel', + name="CertificateModel", fields=[ - ('signature', models.CharField(max_length=64, primary_key=True, serialize=False)), - ('certificate', models.TextField()), - ('issuer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='morango.CertificateModel')), + ( + "signature", + models.CharField(max_length=64, primary_key=True, serialize=False), + ), + ("certificate", models.TextField()), + ( + "issuer", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="morango.CertificateModel", + ), + ), ], ), migrations.CreateModel( - name='DatabaseIDModel', + name="DatabaseIDModel", fields=[ - ('id', morango.utils.uuids.UUIDField(primary_key=True, serialize=False)), - ('current', models.BooleanField(default=True)), - ('date_generated', models.DateTimeField(default=django.utils.timezone.now)), - ('initial_instance_id', models.CharField(blank=True, max_length=32)), + ( + "id", + morango.utils.uuids.UUIDField(primary_key=True, serialize=False), + ), + ("current", models.BooleanField(default=True)), + ( + "date_generated", + models.DateTimeField(default=django.utils.timezone.now), + ), + ("initial_instance_id", models.CharField(blank=True, max_length=32)), ], - options={ - 'abstract': False, - }, + options={"abstract": False}, ), migrations.CreateModel( - name='DatabaseMaxCounter', + name="DatabaseMaxCounter", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('instance_id', morango.utils.uuids.UUIDField()), - ('counter', models.IntegerField()), - ('filter', models.TextField()), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("instance_id", morango.utils.uuids.UUIDField()), + ("counter", models.IntegerField()), + ("filter", models.TextField()), ], - options={ - 'abstract': False, - }, + options={"abstract": False}, ), migrations.CreateModel( - name='DeletedModels', + name="DeletedModels", fields=[ - ('id', morango.utils.uuids.UUIDField(primary_key=True, serialize=False)), - ('profile', models.CharField(max_length=40)), + ( + "id", + morango.utils.uuids.UUIDField(primary_key=True, serialize=False), + ), + ("profile", models.CharField(max_length=40)), ], ), migrations.CreateModel( - name='InstanceIDModel', + name="InstanceIDModel", fields=[ - ('id', morango.utils.uuids.UUIDField(primary_key=True, serialize=False)), - ('platform', models.TextField()), - ('hostname', models.TextField()), - ('sysversion', models.TextField()), - ('macaddress', models.CharField(blank=True, max_length=20)), - ('counter', models.IntegerField(default=0)), - ('current', models.BooleanField(default=True)), - ('db_path', models.CharField(max_length=1000)), - ('database', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='morango.DatabaseIDModel')), + ( + "id", + morango.utils.uuids.UUIDField(primary_key=True, serialize=False), + ), + ("platform", models.TextField()), + ("hostname", models.TextField()), + ("sysversion", models.TextField()), + ("macaddress", models.CharField(blank=True, max_length=20)), + ("counter", models.IntegerField(default=0)), + ("current", models.BooleanField(default=True)), + ("db_path", models.CharField(max_length=1000)), + ( + "database", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="morango.DatabaseIDModel", + ), + ), ], - options={ - 'abstract': False, - }, + options={"abstract": False}, ), migrations.CreateModel( - name='RecordMaxCounter', + name="RecordMaxCounter", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('instance_id', morango.utils.uuids.UUIDField()), - ('counter', models.IntegerField()), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("instance_id", morango.utils.uuids.UUIDField()), + ("counter", models.IntegerField()), ], ), migrations.CreateModel( - name='RecordMaxCounterBuffer', + name="RecordMaxCounterBuffer", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('instance_id', morango.utils.uuids.UUIDField()), - ('counter', models.IntegerField()), - ('model_uuid', morango.utils.uuids.UUIDField()), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("instance_id", morango.utils.uuids.UUIDField()), + ("counter", models.IntegerField()), + ("model_uuid", morango.utils.uuids.UUIDField()), ], - options={ - 'abstract': False, - }, + options={"abstract": False}, ), migrations.CreateModel( - name='ScopeDefinition', + name="ScopeDefinition", fields=[ - ('profile', models.CharField(max_length=20)), - ('version', models.IntegerField()), - ('scope_id', models.CharField(max_length=20, primary_key=True, serialize=False)), - ('description', models.TextField()), - ('read_scope_def', models.TextField()), - ('write_scope_def', models.TextField()), - ('read_write_scope_def', models.TextField()), - ('serialized', models.TextField()), - ('signature', models.TextField()), + ("profile", models.CharField(max_length=20)), + ("version", models.IntegerField()), + ( + "scope_id", + models.CharField(max_length=20, primary_key=True, serialize=False), + ), + ("description", models.TextField()), + ("read_scope_def", models.TextField()), + ("write_scope_def", models.TextField()), + ("read_write_scope_def", models.TextField()), + ("serialized", models.TextField()), + ("signature", models.TextField()), ], ), migrations.CreateModel( - name='Store', + name="Store", fields=[ - ('serialized', models.TextField(blank=True)), - ('deleted', models.BooleanField(default=False)), - ('last_saved_instance', models.UUIDField()), - ('last_saved_counter', models.IntegerField()), - ('model_name', models.CharField(max_length=40)), - ('profile', models.CharField(max_length=40)), - ('partition', models.TextField()), - ('id', morango.utils.uuids.UUIDField(primary_key=True, serialize=False)), + ("serialized", models.TextField(blank=True)), + ("deleted", models.BooleanField(default=False)), + ("last_saved_instance", models.UUIDField()), + ("last_saved_counter", models.IntegerField()), + ("model_name", models.CharField(max_length=40)), + ("profile", models.CharField(max_length=40)), + ("partition", models.TextField()), + ( + "id", + morango.utils.uuids.UUIDField(primary_key=True, serialize=False), + ), ], - options={ - 'abstract': False, - }, + options={"abstract": False}, ), migrations.CreateModel( - name='SyncSession', + name="SyncSession", fields=[ - ('id', models.UUIDField(primary_key=True, serialize=False)), - ('start_timestamp', models.DateTimeField(default=django.utils.timezone.now)), - ('last_activity_timestamp', models.DateTimeField(blank=True)), - ('local_scope', models.TextField()), - ('remote_scope', models.TextField()), - ('host', models.CharField(max_length=255)), + ("id", models.UUIDField(primary_key=True, serialize=False)), + ( + "start_timestamp", + models.DateTimeField(default=django.utils.timezone.now), + ), + ("last_activity_timestamp", models.DateTimeField(blank=True)), + ("local_scope", models.TextField()), + ("remote_scope", models.TextField()), + ("host", models.CharField(max_length=255)), ], ), migrations.CreateModel( - name='TransferSession', + name="TransferSession", fields=[ - ('id', models.UUIDField(primary_key=True, serialize=False)), - ('filter', models.TextField()), - ('incoming', models.BooleanField()), - ('active', models.BooleanField(default=True)), - ('chunksize', models.IntegerField(default=500)), - ('records_remaining', models.IntegerField()), - ('records_total', models.IntegerField()), - ('sync_session', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='morango.SyncSession')), + ("id", models.UUIDField(primary_key=True, serialize=False)), + ("filter", models.TextField()), + ("incoming", models.BooleanField()), + ("active", models.BooleanField(default=True)), + ("chunksize", models.IntegerField(default=500)), + ("records_remaining", models.IntegerField()), + ("records_total", models.IntegerField()), + ( + "sync_session", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="morango.SyncSession", + ), + ), ], ), migrations.CreateModel( - name='TrustedKey', + name="TrustedKey", fields=[ - ('id', morango.utils.uuids.UUIDField(primary_key=True, serialize=False)), - ('public_key', morango.crypto.PublicKeyField()), - ('notes', models.TextField(blank=True)), - ('revoked', models.BooleanField(default=False)), + ( + "id", + morango.utils.uuids.UUIDField(primary_key=True, serialize=False), + ), + ("public_key", morango.crypto.PublicKeyField()), + ("notes", models.TextField(blank=True)), + ("revoked", models.BooleanField(default=False)), ], - options={ - 'abstract': False, - }, + options={"abstract": False}, ), migrations.AddField( - model_name='scopedefinition', - name='key', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='morango.TrustedKey'), + model_name="scopedefinition", + name="key", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="morango.TrustedKey" + ), ), migrations.AddField( - model_name='recordmaxcounterbuffer', - name='transfer_session', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='morango.TransferSession'), + model_name="recordmaxcounterbuffer", + name="transfer_session", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="morango.TransferSession", + ), ), migrations.AddField( - model_name='recordmaxcounter', - name='store_model', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='morango.Store'), + model_name="recordmaxcounter", + name="store_model", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="morango.Store" + ), ), migrations.AddField( - model_name='certificate', - name='scope_definition', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='morango.ScopeDefinition'), + model_name="certificate", + name="scope_definition", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="morango.ScopeDefinition", + ), ), migrations.AddField( - model_name='buffer', - name='transfer_session', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='morango.TransferSession'), + model_name="buffer", + name="transfer_session", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="morango.TransferSession", + ), ), migrations.AlterUniqueTogether( - name='recordmaxcounter', - unique_together=set([('store_model', 'instance_id')]), + name="recordmaxcounter", + unique_together=set([("store_model", "instance_id")]), ), ] diff --git a/morango/migrations/0002_auto_20170511_0400.py b/morango/migrations/0002_auto_20170511_0400.py index cec8d737..cb8b5e93 100644 --- a/morango/migrations/0002_auto_20170511_0400.py +++ b/morango/migrations/0002_auto_20170511_0400.py @@ -2,69 +2,70 @@ # Generated by Django 1.9.7 on 2017-05-11 04:00 from __future__ import unicode_literals -from django.db import migrations, models import django.db.models.manager +from django.db import migrations +from django.db import models + import morango.utils.uuids class Migration(migrations.Migration): - dependencies = [ - ('morango', '0001_initial'), - ] + dependencies = [("morango", "0001_initial")] operations = [ - migrations.RemoveField( - model_name='certificatemodel', - name='issuer', - ), + migrations.RemoveField(model_name="certificatemodel", name="issuer"), migrations.AlterModelManagers( - name='certificate', - managers=[ - ('objects', django.db.models.manager.Manager()), - ], + name="certificate", + managers=[("objects", django.db.models.manager.Manager())], ), migrations.AddField( - model_name='buffer', - name='conflicting_serialized_data', + model_name="buffer", + name="conflicting_serialized_data", field=models.TextField(blank=True), ), migrations.AddField( - model_name='store', - name='conflicting_serialized_data', + model_name="store", + name="conflicting_serialized_data", field=models.TextField(blank=True), ), migrations.AlterField( - model_name='buffer', - name='last_saved_instance', + model_name="buffer", + name="last_saved_instance", field=morango.utils.uuids.UUIDField(), ), migrations.AlterField( - model_name='certificate', - name='id', - field=morango.utils.uuids.UUIDField(editable=False, primary_key=True, serialize=False), + model_name="certificate", + name="id", + field=morango.utils.uuids.UUIDField( + editable=False, primary_key=True, serialize=False + ), ), migrations.AlterField( - model_name='databaseidmodel', - name='id', - field=morango.utils.uuids.UUIDField(editable=False, primary_key=True, serialize=False), + model_name="databaseidmodel", + name="id", + field=morango.utils.uuids.UUIDField( + editable=False, primary_key=True, serialize=False + ), ), migrations.AlterField( - model_name='instanceidmodel', - name='id', - field=morango.utils.uuids.UUIDField(editable=False, primary_key=True, serialize=False), + model_name="instanceidmodel", + name="id", + field=morango.utils.uuids.UUIDField( + editable=False, primary_key=True, serialize=False + ), ), migrations.AlterField( - model_name='store', - name='last_saved_instance', + model_name="store", + name="last_saved_instance", field=morango.utils.uuids.UUIDField(), ), migrations.AlterField( - model_name='trustedkey', - name='id', - field=morango.utils.uuids.UUIDField(editable=False, primary_key=True, serialize=False), - ), - migrations.DeleteModel( - name='CertificateModel', + model_name="trustedkey", + name="id", + field=morango.utils.uuids.UUIDField( + editable=False, primary_key=True, serialize=False + ), ), + migrations.DeleteModel(name="CertificateModel"), ] diff --git a/morango/migrations/0003_auto_20170519_0543.py b/morango/migrations/0003_auto_20170519_0543.py index 7d52a367..0d164f1a 100644 --- a/morango/migrations/0003_auto_20170519_0543.py +++ b/morango/migrations/0003_auto_20170519_0543.py @@ -2,29 +2,22 @@ # Generated by Django 1.9 on 2017-05-19 05:43 from __future__ import unicode_literals -from django.db import migrations, models +from django.db import migrations +from django.db import models class Migration(migrations.Migration): - dependencies = [ - ('morango', '0002_auto_20170511_0400'), - ] + dependencies = [("morango", "0002_auto_20170511_0400")] operations = [ - migrations.AlterModelManagers( - name='certificate', - managers=[ - ], - ), + migrations.AlterModelManagers(name="certificate", managers=[]), migrations.RenameField( - model_name='scopedefinition', - old_name='scope_id', - new_name='id', + model_name="scopedefinition", old_name="scope_id", new_name="id" ), migrations.AddField( - model_name='scopedefinition', - name='primary_scope_param_key', + model_name="scopedefinition", + name="primary_scope_param_key", field=models.CharField(blank=True, max_length=20), ), ] diff --git a/morango/migrations/0004_auto_20170520_2112.py b/morango/migrations/0004_auto_20170520_2112.py index 12ad49f2..b260a16d 100644 --- a/morango/migrations/0004_auto_20170520_2112.py +++ b/morango/migrations/0004_auto_20170520_2112.py @@ -7,24 +7,11 @@ class Migration(migrations.Migration): - dependencies = [ - ('morango', '0003_auto_20170519_0543'), - ] + dependencies = [("morango", "0003_auto_20170519_0543")] operations = [ - migrations.RemoveField( - model_name='scopedefinition', - name='key', - ), - migrations.RemoveField( - model_name='scopedefinition', - name='serialized', - ), - migrations.RemoveField( - model_name='scopedefinition', - name='signature', - ), - migrations.DeleteModel( - name='TrustedKey', - ), + migrations.RemoveField(model_name="scopedefinition", name="key"), + migrations.RemoveField(model_name="scopedefinition", name="serialized"), + migrations.RemoveField(model_name="scopedefinition", name="signature"), + migrations.DeleteModel(name="TrustedKey"), ] diff --git a/morango/migrations/0005_auto_20170629_2139.py b/morango/migrations/0005_auto_20170629_2139.py index 1f5da6c7..7c8bcc15 100644 --- a/morango/migrations/0005_auto_20170629_2139.py +++ b/morango/migrations/0005_auto_20170629_2139.py @@ -3,24 +3,23 @@ from __future__ import unicode_literals from django.db import migrations + import morango.crypto class Migration(migrations.Migration): - dependencies = [ - ('morango', '0004_auto_20170520_2112'), - ] + dependencies = [("morango", "0004_auto_20170520_2112")] operations = [ migrations.RenameField( - model_name='certificate', - old_name='private_key', - new_name='_private_key', + model_name="certificate", old_name="private_key", new_name="_private_key" ), migrations.AlterField( - model_name='certificate', - name='_private_key', - field=morango.crypto.PrivateKeyField(blank=True, db_column='private_key', null=True), + model_name="certificate", + name="_private_key", + field=morango.crypto.PrivateKeyField( + blank=True, db_column="private_key", null=True + ), ), ] diff --git a/morango/migrations/0006_instanceidmodel_system_id.py b/morango/migrations/0006_instanceidmodel_system_id.py index d4c5f863..f68b05e2 100644 --- a/morango/migrations/0006_instanceidmodel_system_id.py +++ b/morango/migrations/0006_instanceidmodel_system_id.py @@ -2,19 +2,18 @@ # Generated by Django 1.9 on 2017-06-30 00:15 from __future__ import unicode_literals -from django.db import migrations, models +from django.db import migrations +from django.db import models class Migration(migrations.Migration): - dependencies = [ - ('morango', '0005_auto_20170629_2139'), - ] + dependencies = [("morango", "0005_auto_20170629_2139")] operations = [ migrations.AddField( - model_name='instanceidmodel', - name='system_id', + model_name="instanceidmodel", + name="system_id", field=models.CharField(blank=True, max_length=100), - ), + ) ] diff --git a/morango/migrations/0007_auto_20171018_1615.py b/morango/migrations/0007_auto_20171018_1615.py index 4ef91041..9c4238d5 100644 --- a/morango/migrations/0007_auto_20171018_1615.py +++ b/morango/migrations/0007_auto_20171018_1615.py @@ -3,217 +3,231 @@ from __future__ import unicode_literals import datetime -from django.db import migrations, models + import django.db.models.deletion -from django.utils.timezone import utc import django.utils.timezone +from django.db import migrations +from django.db import models +from django.utils.timezone import utc + import morango.utils.uuids class Migration(migrations.Migration): - dependencies = [ - ('morango', '0006_instanceidmodel_system_id'), - ] + dependencies = [("morango", "0006_instanceidmodel_system_id")] operations = [ migrations.CreateModel( - name='Nonce', + name="Nonce", fields=[ - ('id', morango.utils.uuids.UUIDField(editable=False, primary_key=True, serialize=False)), - ('timestamp', models.DateTimeField(default=django.utils.timezone.now)), - ('ip', models.CharField(blank=True, max_length=100)), + ( + "id", + morango.utils.uuids.UUIDField( + editable=False, primary_key=True, serialize=False + ), + ), + ("timestamp", models.DateTimeField(default=django.utils.timezone.now)), + ("ip", models.CharField(blank=True, max_length=100)), ], - options={ - 'abstract': False, - }, + options={"abstract": False}, ), migrations.RenameField( - model_name='scopedefinition', - old_name='read_scope_def', - new_name='read_filter_template', + model_name="scopedefinition", + old_name="read_scope_def", + new_name="read_filter_template", ), migrations.RenameField( - model_name='scopedefinition', - old_name='read_write_scope_def', - new_name='read_write_filter_template', + model_name="scopedefinition", + old_name="read_write_scope_def", + new_name="read_write_filter_template", ), migrations.RenameField( - model_name='scopedefinition', - old_name='write_scope_def', - new_name='write_filter_template', + model_name="scopedefinition", + old_name="write_scope_def", + new_name="write_filter_template", ), migrations.RenameField( - model_name='transfersession', - old_name='incoming', - new_name='push', - ), - migrations.RemoveField( - model_name='syncsession', - name='host', - ), - migrations.RemoveField( - model_name='syncsession', - name='local_scope', - ), - migrations.RemoveField( - model_name='syncsession', - name='remote_scope', - ), - migrations.RemoveField( - model_name='transfersession', - name='chunksize', - ), - migrations.RemoveField( - model_name='transfersession', - name='records_remaining', + model_name="transfersession", old_name="incoming", new_name="push" ), + migrations.RemoveField(model_name="syncsession", name="host"), + migrations.RemoveField(model_name="syncsession", name="local_scope"), + migrations.RemoveField(model_name="syncsession", name="remote_scope"), + migrations.RemoveField(model_name="transfersession", name="chunksize"), + migrations.RemoveField(model_name="transfersession", name="records_remaining"), migrations.AddField( - model_name='buffer', - name='_self_ref_fk', + model_name="buffer", + name="_self_ref_fk", field=models.CharField(blank=True, max_length=32), ), migrations.AddField( - model_name='buffer', - name='source_id', - field=models.CharField(default=datetime.datetime(2017, 10, 18, 21, 13, 11, 488565, tzinfo=utc), max_length=96), + model_name="buffer", + name="source_id", + field=models.CharField( + default=datetime.datetime(2017, 10, 18, 21, 13, 11, 488565, tzinfo=utc), + max_length=96, + ), preserve_default=False, ), migrations.AddField( - model_name='certificate', - name='salt', + model_name="certificate", + name="salt", field=models.CharField(blank=True, max_length=32), ), migrations.AddField( - model_name='databasemaxcounter', - name='partition', - field=models.CharField(default=b'', max_length=128), + model_name="databasemaxcounter", + name="partition", + field=models.CharField(default=b"", max_length=128), ), migrations.AddField( - model_name='store', - name='_self_ref_fk', + model_name="store", + name="_self_ref_fk", field=models.CharField(blank=True, max_length=32), ), migrations.AddField( - model_name='store', - name='dirty_bit', + model_name="store", + name="dirty_bit", field=models.BooleanField(default=False), ), migrations.AddField( - model_name='store', - name='source_id', - field=models.CharField(default=datetime.datetime(2017, 10, 18, 21, 15, 6, 842850, tzinfo=utc), max_length=96), + model_name="store", + name="source_id", + field=models.CharField( + default=datetime.datetime(2017, 10, 18, 21, 15, 6, 842850, tzinfo=utc), + max_length=96, + ), preserve_default=False, ), migrations.AddField( - model_name='syncsession', - name='active', + model_name="syncsession", + name="active", field=models.BooleanField(default=True), ), migrations.AddField( - model_name='syncsession', - name='connection_kind', - field=models.CharField(choices=[('network', 'Network'), ('disk', 'Disk')], default='', max_length=10), + model_name="syncsession", + name="connection_kind", + field=models.CharField( + choices=[("network", "Network"), ("disk", "Disk")], + default="", + max_length=10, + ), preserve_default=False, ), migrations.AddField( - model_name='syncsession', - name='connection_path', - field=models.CharField(default=datetime.datetime(2017, 10, 18, 21, 15, 21, 147686, tzinfo=utc), max_length=1000), + model_name="syncsession", + name="connection_path", + field=models.CharField( + default=datetime.datetime(2017, 10, 18, 21, 15, 21, 147686, tzinfo=utc), + max_length=1000, + ), preserve_default=False, ), migrations.AddField( - model_name='syncsession', - name='is_server', + model_name="syncsession", + name="is_server", field=models.BooleanField(default=False), ), migrations.AddField( - model_name='syncsession', - name='local_certificate', - field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='syncsessions_local', to='morango.Certificate'), + model_name="syncsession", + name="local_certificate", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="syncsessions_local", + to="morango.Certificate", + ), ), migrations.AddField( - model_name='syncsession', - name='local_instance', - field=models.TextField(default='{}'), + model_name="syncsession", + name="local_instance", + field=models.TextField(default="{}"), ), migrations.AddField( - model_name='syncsession', - name='local_ip', + model_name="syncsession", + name="local_ip", field=models.CharField(blank=True, max_length=100), ), migrations.AddField( - model_name='syncsession', - name='profile', - field=models.CharField(default=datetime.datetime(2017, 10, 18, 21, 15, 27, 811735, tzinfo=utc), max_length=40), + model_name="syncsession", + name="profile", + field=models.CharField( + default=datetime.datetime(2017, 10, 18, 21, 15, 27, 811735, tzinfo=utc), + max_length=40, + ), preserve_default=False, ), migrations.AddField( - model_name='syncsession', - name='remote_certificate', - field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='syncsessions_remote', to='morango.Certificate'), + model_name="syncsession", + name="remote_certificate", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="syncsessions_remote", + to="morango.Certificate", + ), ), migrations.AddField( - model_name='syncsession', - name='remote_instance', - field=models.TextField(default='{}'), + model_name="syncsession", + name="remote_instance", + field=models.TextField(default="{}"), ), migrations.AddField( - model_name='syncsession', - name='remote_ip', + model_name="syncsession", + name="remote_ip", field=models.CharField(blank=True, max_length=100), ), migrations.AddField( - model_name='transfersession', - name='last_activity_timestamp', - field=models.DateTimeField(blank=True, default=datetime.datetime(2017, 10, 18, 21, 15, 30, 154629, tzinfo=utc)), + model_name="transfersession", + name="last_activity_timestamp", + field=models.DateTimeField( + blank=True, + default=datetime.datetime(2017, 10, 18, 21, 15, 30, 154629, tzinfo=utc), + ), preserve_default=False, ), migrations.AddField( - model_name='transfersession', - name='local_fsic', - field=models.TextField(blank=True, default=b'{}'), + model_name="transfersession", + name="local_fsic", + field=models.TextField(blank=True, default=b"{}"), ), migrations.AddField( - model_name='transfersession', - name='records_transferred', + model_name="transfersession", + name="records_transferred", field=models.IntegerField(default=0), ), migrations.AddField( - model_name='transfersession', - name='remote_fsic', - field=models.TextField(blank=True, default=b'{}'), + model_name="transfersession", + name="remote_fsic", + field=models.TextField(blank=True, default=b"{}"), ), migrations.AddField( - model_name='transfersession', - name='start_timestamp', + model_name="transfersession", + name="start_timestamp", field=models.DateTimeField(default=django.utils.timezone.now), ), migrations.AlterField( - model_name='syncsession', - name='id', + model_name="syncsession", + name="id", field=morango.utils.uuids.UUIDField(primary_key=True, serialize=False), ), migrations.AlterField( - model_name='transfersession', - name='id', + model_name="transfersession", + name="id", field=morango.utils.uuids.UUIDField(primary_key=True, serialize=False), ), migrations.AlterField( - model_name='transfersession', - name='records_total', + model_name="transfersession", + name="records_total", field=models.IntegerField(blank=True, null=True), ), migrations.AlterUniqueTogether( - name='buffer', - unique_together=set([('transfer_session', 'model_uuid')]), - ), - migrations.RemoveField( - model_name='databasemaxcounter', - name='filter', + name="buffer", unique_together=set([("transfer_session", "model_uuid")]) ), + migrations.RemoveField(model_name="databasemaxcounter", name="filter"), migrations.AlterUniqueTogether( - name='databasemaxcounter', - unique_together=set([('instance_id', 'partition')]), + name="databasemaxcounter", + unique_together=set([("instance_id", "partition")]), ), ] diff --git a/morango/migrations/0008_auto_20171114_2217.py b/morango/migrations/0008_auto_20171114_2217.py index 899ee318..e5d8b7ea 100644 --- a/morango/migrations/0008_auto_20171114_2217.py +++ b/morango/migrations/0008_auto_20171114_2217.py @@ -2,64 +2,61 @@ # Generated by Django 1.9.7 on 2017-11-14 22:17 from __future__ import unicode_literals -from django.db import migrations, models import django.db.models.deletion import django.db.models.manager +from django.db import migrations +from django.db import models class Migration(migrations.Migration): - dependencies = [ - ('morango', '0007_auto_20171018_1615'), - ] + dependencies = [("morango", "0007_auto_20171018_1615")] operations = [ migrations.RenameField( - model_name='syncsession', - old_name='local_instance', - new_name='client_instance', + model_name="syncsession", + old_name="local_instance", + new_name="client_instance", ), migrations.RenameField( - model_name='syncsession', - old_name='local_ip', - new_name='client_ip', + model_name="syncsession", old_name="local_ip", new_name="client_ip" ), migrations.RenameField( - model_name='syncsession', - old_name='remote_instance', - new_name='server_instance', + model_name="syncsession", + old_name="remote_instance", + new_name="server_instance", ), migrations.RenameField( - model_name='syncsession', - old_name='remote_ip', - new_name='server_ip', + model_name="syncsession", old_name="remote_ip", new_name="server_ip" ), migrations.RenameField( - model_name='transfersession', - old_name='local_fsic', - new_name='client_fsic', + model_name="transfersession", old_name="local_fsic", new_name="client_fsic" ), migrations.RenameField( - model_name='transfersession', - old_name='remote_fsic', - new_name='server_fsic', - ), - migrations.RemoveField( - model_name='syncsession', - name='local_certificate', - ), - migrations.RemoveField( - model_name='syncsession', - name='remote_certificate', + model_name="transfersession", old_name="remote_fsic", new_name="server_fsic" ), + migrations.RemoveField(model_name="syncsession", name="local_certificate"), + migrations.RemoveField(model_name="syncsession", name="remote_certificate"), migrations.AddField( - model_name='syncsession', - name='client_certificate', - field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='syncsessions_client', to='morango.Certificate'), + model_name="syncsession", + name="client_certificate", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="syncsessions_client", + to="morango.Certificate", + ), ), migrations.AddField( - model_name='syncsession', - name='server_certificate', - field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='syncsessions_server', to='morango.Certificate'), + model_name="syncsession", + name="server_certificate", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="syncsessions_server", + to="morango.Certificate", + ), ), ] diff --git a/morango/migrations/0009_auto_20171205_0252.py b/morango/migrations/0009_auto_20171205_0252.py index 64f4f7fa..09764281 100644 --- a/morango/migrations/0009_auto_20171205_0252.py +++ b/morango/migrations/0009_auto_20171205_0252.py @@ -7,14 +7,10 @@ class Migration(migrations.Migration): - dependencies = [ - ('morango', '0008_auto_20171114_2217'), - ] + dependencies = [("morango", "0008_auto_20171114_2217")] operations = [ migrations.RenameField( - model_name='instanceidmodel', - old_name='macaddress', - new_name='node_id', - ), + model_name="instanceidmodel", old_name="macaddress", new_name="node_id" + ) ] diff --git a/morango/migrations/0010_auto_20171206_1615.py b/morango/migrations/0010_auto_20171206_1615.py index a7b4ab68..c0397a7c 100644 --- a/morango/migrations/0010_auto_20171206_1615.py +++ b/morango/migrations/0010_auto_20171206_1615.py @@ -2,29 +2,28 @@ # Generated by Django 1.9.13 on 2017-12-06 22:15 from __future__ import unicode_literals -from django.db import migrations, models +from django.db import migrations +from django.db import models class Migration(migrations.Migration): - dependencies = [ - ('morango', '0009_auto_20171205_0252'), - ] + dependencies = [("morango", "0009_auto_20171205_0252")] operations = [ migrations.AlterField( - model_name='databasemaxcounter', - name='partition', - field=models.CharField(default='', max_length=128), + model_name="databasemaxcounter", + name="partition", + field=models.CharField(default="", max_length=128), ), migrations.AlterField( - model_name='transfersession', - name='client_fsic', - field=models.TextField(blank=True, default='{}'), + model_name="transfersession", + name="client_fsic", + field=models.TextField(blank=True, default="{}"), ), migrations.AlterField( - model_name='transfersession', - name='server_fsic', - field=models.TextField(blank=True, default='{}'), + model_name="transfersession", + name="server_fsic", + field=models.TextField(blank=True, default="{}"), ), ] diff --git a/morango/migrations/0011_sharedkey.py b/morango/migrations/0011_sharedkey.py index 829248ec..21a25dbb 100644 --- a/morango/migrations/0011_sharedkey.py +++ b/morango/migrations/0011_sharedkey.py @@ -2,24 +2,32 @@ # Generated by Django 1.11.13 on 2018-06-12 18:38 from __future__ import unicode_literals -from django.db import migrations, models +from django.db import migrations +from django.db import models + import morango.crypto class Migration(migrations.Migration): - dependencies = [ - ('morango', '0010_auto_20171206_1615'), - ] + dependencies = [("morango", "0010_auto_20171206_1615")] operations = [ migrations.CreateModel( - name='SharedKey', + name="SharedKey", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('public_key', morango.crypto.PublicKeyField()), - ('private_key', morango.crypto.PrivateKeyField()), - ('current', models.BooleanField(default=True)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("public_key", morango.crypto.PublicKeyField()), + ("private_key", morango.crypto.PrivateKeyField()), + ("current", models.BooleanField(default=True)), ], - ), + ) ] diff --git a/morango/migrations/0012_auto_20180927_1658.py b/morango/migrations/0012_auto_20180927_1658.py index cbc8f39a..fa23b145 100644 --- a/morango/migrations/0012_auto_20180927_1658.py +++ b/morango/migrations/0012_auto_20180927_1658.py @@ -2,32 +2,35 @@ # Generated by Django 1.11.15 on 2018-09-27 16:58 from __future__ import unicode_literals -from django.db import migrations, models +from django.db import migrations +from django.db import models + import morango.utils.uuids class Migration(migrations.Migration): - dependencies = [ - ('morango', '0011_sharedkey'), - ] + dependencies = [("morango", "0011_sharedkey")] operations = [ migrations.CreateModel( - name='HardDeletedModels', + name="HardDeletedModels", fields=[ - ('id', morango.utils.uuids.UUIDField(primary_key=True, serialize=False)), - ('profile', models.CharField(max_length=40)), + ( + "id", + morango.utils.uuids.UUIDField(primary_key=True, serialize=False), + ), + ("profile", models.CharField(max_length=40)), ], ), migrations.AddField( - model_name='buffer', - name='hard_deleted', + model_name="buffer", + name="hard_deleted", field=models.BooleanField(default=False), ), migrations.AddField( - model_name='store', - name='hard_deleted', + model_name="store", + name="hard_deleted", field=models.BooleanField(default=False), ), ] diff --git a/morango/models.py b/morango/models.py index 4995ff9a..f9c26173 100644 --- a/morango/models.py +++ b/morango/models.py @@ -2,28 +2,37 @@ import hashlib import json +import logging import os import platform import sys import uuid -import logging from django.conf import settings from django.core import exceptions -from django.db.models import signals -from django.db import connection, models, transaction, router -from django.db.models import F, Func, TextField, Value -from django.db.models.functions import Cast -from django.utils import timezone, six -from morango.utils.register_models import _profile_models +from django.db import connection +from django.db import models +from django.db import router +from django.db import transaction +from django.db.models import F +from django.db.models import Func +from django.db.models import TextField +from django.db.models import Value from django.db.models.deletion import Collector -from morango.utils.morango_mptt import MorangoMPTTModel from django.db.models.fields.related import ForeignKey +from django.db.models.functions import Cast +from django.utils import six +from django.utils import timezone -from .certificates import Certificate, Filter, Nonce, ScopeDefinition +from .certificates import Certificate +from .certificates import Filter from .manager import SyncableModelManager from .utils import proquint -from .utils.uuids import UUIDField, UUIDModelMixin, sha2_uuid +from .utils.uuids import sha2_uuid +from .utils.uuids import UUIDField +from .utils.uuids import UUIDModelMixin +from morango.utils.morango_mptt import MorangoMPTTModel +from morango.utils.register_models import _profile_models logger = logging.getLogger(__name__) @@ -84,7 +93,15 @@ class InstanceIDModel(UUIDModelMixin): as well as its counter with all the records that were serialized at the time. """ - uuid_input_fields = ("platform", "hostname", "sysversion", "node_id", "database_id", "db_path", "system_id") + uuid_input_fields = ( + "platform", + "hostname", + "sysversion", + "node_id", + "database_id", + "db_path", + "system_id", + ) platform = models.TextField() hostname = models.TextField() @@ -104,7 +121,7 @@ def get_or_create_current_instance(cls): # on Android, platform.platform() barfs, so we handle that safely here try: plat = platform.platform() - except: + except: # noqa: E722 plat = "Unknown (Android?)" kwargs = { @@ -112,15 +129,17 @@ def get_or_create_current_instance(cls): "hostname": platform.node(), "sysversion": sys.version, "database": DatabaseIDModel.get_or_create_current_database_id(), - "db_path": os.path.abspath(settings.DATABASES['default']['NAME']), + "db_path": os.path.abspath(settings.DATABASES["default"]["NAME"]), "system_id": os.environ.get("MORANGO_SYSTEM_ID", ""), } # try to get the MAC address, but exclude it if it was a fake (random) address mac = uuid.getnode() if (mac >> 40) % 2 == 0: # 8th bit (of 48 bits, from left) is 1 if MAC is fake - hashable_identifier = "{}:{}".format(kwargs['database'].id, mac) - kwargs["node_id"] = hashlib.sha1(hashable_identifier.encode('utf-8')).hexdigest()[:20] + hashable_identifier = "{}:{}".format(kwargs["database"].id, mac) + kwargs["node_id"] = hashlib.sha1( + hashable_identifier.encode("utf-8") + ).hexdigest()[:20] else: kwargs["node_id"] = "" @@ -136,7 +155,7 @@ def get_or_create_current_instance(cls): @staticmethod @transaction.atomic def get_current_instance_and_increment_counter(): - InstanceIDModel.objects.filter(current=True).update(counter=F('counter') + 1) + InstanceIDModel.objects.filter(current=True).update(counter=F("counter") + 1) return InstanceIDModel.objects.get(current=True) def get_proquint(self): @@ -160,15 +179,23 @@ class SyncSession(models.Model): is_server = models.BooleanField(default=False) # track the certificates being used by each side for this session - client_certificate = models.ForeignKey(Certificate, blank=True, null=True, related_name="syncsessions_client") - server_certificate = models.ForeignKey(Certificate, blank=True, null=True, related_name="syncsessions_server") + client_certificate = models.ForeignKey( + Certificate, blank=True, null=True, related_name="syncsessions_client" + ) + server_certificate = models.ForeignKey( + Certificate, blank=True, null=True, related_name="syncsessions_server" + ) # track the morango profile this sync session is happening for profile = models.CharField(max_length=40) # information about the connection over which this sync session is happening - connection_kind = models.CharField(max_length=10, choices=[("network", "Network"), ("disk", "Disk")]) - connection_path = models.CharField(max_length=1000) # file path if kind=disk, and base URL of server if kind=network + connection_kind = models.CharField( + max_length=10, choices=[("network", "Network"), ("disk", "Disk")] + ) + connection_path = models.CharField( + max_length=1000 + ) # file path if kind=disk, and base URL of server if kind=network # for network connections, keep track of the IPs on either end client_ip = models.CharField(max_length=100, blank=True) @@ -186,11 +213,17 @@ class TransferSession(models.Model): """ id = UUIDField(primary_key=True) - filter = models.TextField() # partition/filter to know what subset of data is to be synced + filter = ( + models.TextField() + ) # partition/filter to know what subset of data is to be synced push = models.BooleanField() # is session pushing or pulling data? active = models.BooleanField(default=True) # is this transfer session still active? - records_transferred = models.IntegerField(default=0) # track how many records have already been transferred - records_total = models.IntegerField(blank=True, null=True) # total number of records to be synced across in this transfer + records_transferred = models.IntegerField( + default=0 + ) # track how many records have already been transferred + records_total = models.IntegerField( + blank=True, null=True + ) # total number of records to be synced across in this transfer sync_session = models.ForeignKey(SyncSession) # track when the transfer session started and the last time there was activity on it @@ -259,13 +292,16 @@ class Meta: class StoreQueryset(models.QuerySet): - def char_ids_list(self): - return (self.annotate(id_cast=Cast('id', TextField())) \ - # remove dashes from char uuid - .annotate(fixed_id=Func(F('id_cast'), Value('-'), Value(''), function='replace',)) \ - # return as list - .values_list("fixed_id", flat=True)) + return ( + self.annotate(id_cast=Cast("id", TextField())) + # remove dashes from char uuid + .annotate( + fixed_id=Func(F("id_cast"), Value("-"), Value(""), function="replace") + ) + # return as list + .values_list("fixed_id", flat=True) + ) class StoreManager(models.Manager): @@ -285,7 +321,7 @@ class Store(AbstractStore): objects = StoreManager() - def _deserialize_store_model(self, fk_cache): + def _deserialize_store_model(self, fk_cache): # noqa: C901 """ When deserializing a store model, we look at the deleted flags to know if we should delete the app model. Upon loading the app model in memory we validate the app models fields, if any errors occurs we follow @@ -318,9 +354,17 @@ def _deserialize_store_model(self, fk_cache): app_model.cached_clean_fields(fk_cache) return app_model except exceptions.ValidationError as e: - logger.warn("Validation error for {model} with id {id}: {error}".format(model=klass_model.__name__, id=app_model.id, error=e)) + logger.warn( + "Validation error for {model} with id {id}: {error}".format( + model=klass_model.__name__, id=app_model.id, error=e + ) + ) # check FKs in store to see if any of those models were deleted or hard_deleted to propagate to this model - fk_ids = [getattr(app_model, field.attname) for field in app_model._meta.fields if isinstance(field, ForeignKey)] + fk_ids = [ + getattr(app_model, field.attname) + for field in app_model._meta.fields + if isinstance(field, ForeignKey) + ] for fk_id in fk_ids: try: st_model = Store.objects.get(id=fk_id) @@ -334,6 +378,7 @@ def _deserialize_store_model(self, fk_cache): pass raise e + class Buffer(AbstractStore): """ ``Buffer`` is where records from the internal store are queued up temporarily, before being @@ -348,7 +393,9 @@ class Meta: unique_together = ("transfer_session", "model_uuid") def rmcb_list(self): - return RecordMaxCounterBuffer.objects.filter(model_uuid=self.model_uuid, transfer_session=self.transfer_session) + return RecordMaxCounterBuffer.objects.filter( + model_uuid=self.model_uuid, transfer_session=self.transfer_session + ) class AbstractCounter(models.Model): @@ -394,13 +441,17 @@ def update_fsics(cls, fsics, sync_filter): # load database max counters for (key, value) in six.iteritems(updated_fsic): for f in sync_filter: - DatabaseMaxCounter.objects.update_or_create(instance_id=key, partition=f, defaults={'counter': value}) + DatabaseMaxCounter.objects.update_or_create( + instance_id=key, partition=f, defaults={"counter": value} + ) @classmethod def calculate_filter_max_counters(cls, filters): # create string of prefixes to place into sql statement - condition = " UNION ".join(["SELECT CAST('{}' as TEXT) AS a".format(prefix) for prefix in filters]) + condition = " UNION ".join( + ["SELECT CAST('{}' as TEXT) AS a".format(prefix) for prefix in filters] + ) filter_max_calculation = """ SELECT PMC.instance, MIN(PMC.counter) @@ -413,14 +464,14 @@ def calculate_filter_max_counters(cls, filters): ) as PMC GROUP BY PMC.instance HAVING {count} = COUNT(PMC.filter_partition) - """.format(dmc_table=cls._meta.db_table, - filter_list=condition, - count=len(filters)) + """.format( + dmc_table=cls._meta.db_table, filter_list=condition, count=len(filters) + ) with connection.cursor() as cursor: cursor.execute(filter_max_calculation) # try to get hex value because postgres returns values as uuid - return {getattr(tup[0], 'hex', tup[0]): tup[1] for tup in cursor.fetchall()} + return {getattr(tup[0], "hex", tup[0]): tup[1] for tup in cursor.fetchall()} class RecordMaxCounter(AbstractCounter): @@ -433,7 +484,7 @@ class RecordMaxCounter(AbstractCounter): store_model = models.ForeignKey(Store) class Meta: - unique_together = ('store_model', 'instance_id') + unique_together = ("store_model", "instance_id") class RecordMaxCounterBuffer(AbstractCounter): @@ -455,7 +506,7 @@ class SyncableModel(UUIDModelMixin): # constant value to insert into partition strings in place of current model's ID, as needed (to avoid circularity) ID_PLACEHOLDER = "${id}" - _morango_internal_fields_not_to_serialize = ('_morango_dirty_bit',) + _morango_internal_fields_not_to_serialize = ("_morango_dirty_bit",) morango_model_dependencies = () morango_fields_not_to_serialize = () morango_profile = None @@ -473,12 +524,14 @@ class Meta: abstract = True def _update_deleted_models(self): - DeletedModels.objects.update_or_create(defaults={'id': self.id, 'profile': self.morango_profile}, - id=self.id) + DeletedModels.objects.update_or_create( + defaults={"id": self.id, "profile": self.morango_profile}, id=self.id + ) def _update_hard_deleted_models(self): - HardDeletedModels.objects.update_or_create(defaults={'id': self.id, 'profile': self.morango_profile}, - id=self.id) + HardDeletedModels.objects.update_or_create( + defaults={"id": self.id, "profile": self.morango_profile}, id=self.id + ) def save(self, update_dirty_bit_to=True, *args, **kwargs): if update_dirty_bit_to is None: @@ -489,11 +542,13 @@ def save(self, update_dirty_bit_to=True, *args, **kwargs): self._morango_dirty_bit = False super(SyncableModel, self).save(*args, **kwargs) - def delete(self, using=None, keep_parents=False, hard_delete=False, *args, **kwargs): + def delete( + self, using=None, keep_parents=False, hard_delete=False, *args, **kwargs + ): using = using or router.db_for_write(self.__class__, instance=self) assert self._get_pk_val() is not None, ( - "%s object can't be deleted because its %s attribute is set to None." % - (self._meta.object_name, self._meta.pk.attname) + "%s object can't be deleted because its %s attribute is set to None." + % (self._meta.object_name, self._meta.pk.attname) ) collector = Collector(using=using) collector.collect([self], keep_parents=keep_parents) @@ -501,17 +556,23 @@ def delete(self, using=None, keep_parents=False, hard_delete=False, *args, **kwa if hard_delete: # set hard deletion for all related models for model, instances in six.iteritems(collector.data): - if issubclass(model, SyncableModel) or issubclass(model, MorangoMPTTModel): + if issubclass(model, SyncableModel) or issubclass( + model, MorangoMPTTModel + ): for obj in instances: obj._update_hard_deleted_models() return collector.delete() def cached_clean_fields(self, fk_lookup_cache): excluded_fields = [] - fk_fields = [field for field in self._meta.fields if isinstance(field, models.ForeignKey)] + fk_fields = [ + field for field in self._meta.fields if isinstance(field, models.ForeignKey) + ] for f in fk_fields: raw_value = getattr(self, f.attname) - key = 'morango_{id}_{db_table}_foreignkey'.format(db_table=f.related_model._meta.db_table, id=raw_value) + key = "morango_{id}_{db_table}_foreignkey".format( + db_table=f.related_model._meta.db_table, id=raw_value + ) try: fk_lookup_cache[key] excluded_fields.append(f.name) @@ -537,9 +598,13 @@ def serialize(self): if f.attname in self._morango_internal_fields_not_to_serialize: continue # case if model is morango mptt - if f.attname in getattr(self, '_internal_mptt_fields_not_to_serialize', '_internal_fields_not_to_serialize'): + if f.attname in getattr( + self, + "_internal_mptt_fields_not_to_serialize", + "_internal_fields_not_to_serialize", + ): continue - if hasattr(f, 'value_from_object_json_compatible'): + if hasattr(f, "value_from_object_json_compatible"): data[f.attname] = f.value_from_object_json_compatible(self) else: data[f.attname] = f.value_from_object(self) @@ -560,11 +625,15 @@ def merge_conflict(cls, current, push): def calculate_source_id(self): """Should return a string that uniquely defines the model instance or `None` for a random uuid.""" - raise NotImplementedError("You must define a 'calculate_source_id' method on models that inherit from SyncableModel.") + raise NotImplementedError( + "You must define a 'calculate_source_id' method on models that inherit from SyncableModel." + ) def calculate_partition(self): """Should return a string specifying this model instance's partition, using `self.ID_PLACEHOLDER` in place of its own ID, if needed.""" - raise NotImplementedError("You must define a 'calculate_partition' method on models that inherit from SyncableModel.") + raise NotImplementedError( + "You must define a 'calculate_partition' method on models that inherit from SyncableModel." + ) @staticmethod def compute_namespaced_id(partition_value, source_id_value, model_name): @@ -575,6 +644,10 @@ def calculate_uuid(self): if self._morango_source_id is None: self._morango_source_id = uuid.uuid4().hex - namespaced_id = self.compute_namespaced_id(self.calculate_partition(), self._morango_source_id, self.morango_model_name) - self._morango_partition = self.calculate_partition().replace(self.ID_PLACEHOLDER, namespaced_id) + namespaced_id = self.compute_namespaced_id( + self.calculate_partition(), self._morango_source_id, self.morango_model_name + ) + self._morango_partition = self.calculate_partition().replace( + self.ID_PLACEHOLDER, namespaced_id + ) return namespaced_id diff --git a/morango/parsers.py b/morango/parsers.py index 89e296bc..f35921f8 100644 --- a/morango/parsers.py +++ b/morango/parsers.py @@ -1,18 +1,22 @@ -from rest_framework.parsers import BaseParser -import json import io +import json + +from rest_framework.parsers import BaseParser + class GzipParser(BaseParser): """ Parses Gzipped data. """ - media_type = 'application/gzip' + + media_type = "application/gzip" def parse(self, stream, media_type=None, parser_context=None): """ Parses the incoming bytestream by decompressing the gzipped data and returns the resulting data as a dictionary. """ import gzip + with gzip.GzipFile(fileobj=io.BytesIO(stream.read())) as f: data = f.read() - return json.loads(data.decode('utf-8')) + return json.loads(data.decode("utf-8")) diff --git a/morango/query.py b/morango/query.py index 31b17415..550814dd 100644 --- a/morango/query.py +++ b/morango/query.py @@ -2,13 +2,14 @@ class SyncableModelQuerySet(models.query.QuerySet): - def as_manager(cls): # Address the circular dependency between `SyncableModelQueryset` and `SyncableModelManager`. from .manager import SyncableModelManager + manager = SyncableModelManager.from_queryset(cls)() manager._built_with_as_manager = True return manager + as_manager.queryset_only = True as_manager = classmethod(as_manager) @@ -16,7 +17,7 @@ def update(self, update_dirty_bit_to=True, **kwargs): if update_dirty_bit_to is None: pass # don't do anything with the dirty bit elif update_dirty_bit_to: - kwargs.update({'_morango_dirty_bit': True}) + kwargs.update({"_morango_dirty_bit": True}) elif not update_dirty_bit_to: - kwargs.update({'_morango_dirty_bit': False}) + kwargs.update({"_morango_dirty_bit": False}) super(SyncableModelQuerySet, self).update(**kwargs) diff --git a/morango/settings.py b/morango/settings.py index bd4b4f99..803f4dcf 100644 --- a/morango/settings.py +++ b/morango/settings.py @@ -19,7 +19,7 @@ # See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = '-#()od3p8n@o&9kcj(s63!#^tziq+j!nuwlyptw#o06t&wrk$q' +SECRET_KEY = "-#()od3p8n@o&9kcj(s63!#^tziq+j!nuwlyptw#o06t&wrk$q" # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True @@ -30,54 +30,54 @@ # Application definition INSTALLED_APPS = [ - 'django.contrib.admin', - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.messages', - 'django.contrib.staticfiles', - 'rest_framework', - 'morango', + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", + "rest_framework", + "morango", ] MIDDLEWARE_CLASSES = [ - 'django.middleware.security.SecurityMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', + "django.middleware.security.SecurityMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.auth.middleware.SessionAuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", ] TEMPLATES = [ { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [], - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.debug', - 'django.template.context_processors.request', - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', - ], + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", + ] }, - }, + } ] -WSGI_APPLICATION = 'testapp.wsgi.application' +WSGI_APPLICATION = "testapp.wsgi.application" # Database # https://docs.djangoproject.com/en/1.9/ref/settings/#databases DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': os.path.join(BASE_DIR, 'testapp.db'), + "default": { + "ENGINE": "django.db.backends.sqlite3", + "NAME": os.path.join(BASE_DIR, "testapp.db"), } } @@ -87,26 +87,20 @@ AUTH_PASSWORD_VALIDATORS = [ { - 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator" }, + {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"}, + {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"}, + {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"}, ] # Internationalization # https://docs.djangoproject.com/en/1.9/topics/i18n/ -LANGUAGE_CODE = 'en-us' +LANGUAGE_CODE = "en-us" -TIME_ZONE = 'UTC' +TIME_ZONE = "UTC" USE_I18N = True @@ -118,4 +112,4 @@ # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ -STATIC_URL = '/static/' +STATIC_URL = "/static/" diff --git a/morango/signals.py b/morango/signals.py index 69506584..636edc4d 100644 --- a/morango/signals.py +++ b/morango/signals.py @@ -1,7 +1,7 @@ from django.db.models.signals import post_delete from django.dispatch import receiver -from .models import DeletedModels, SyncableModel +from .models import SyncableModel @receiver(post_delete) diff --git a/morango/syncsession.py b/morango/syncsession.py index 2022656b..c8dd6874 100644 --- a/morango/syncsession.py +++ b/morango/syncsession.py @@ -1,11 +1,17 @@ import json -import requests import socket import uuid +from io import BytesIO +import requests from django.conf import settings +from django.core.paginator import Paginator from django.utils import timezone from django.utils.six import iteritems +from rest_framework.exceptions import ValidationError +from six.moves.urllib.parse import urljoin +from six.moves.urllib.parse import urlparse + from morango.api.serializers import BufferSerializer from morango.api.serializers import CertificateSerializer from morango.api.serializers import InstanceIDSerializer @@ -13,6 +19,7 @@ from morango.certificates import Filter from morango.certificates import Key from morango.constants import api_urls +from morango.constants.capabilities import GZIP_BUFFER_POST from morango.errors import CertificateSignatureInvalid from morango.errors import MorangoError from morango.errors import MorangoServerDoesNotAllowNewCertPush @@ -22,18 +29,11 @@ from morango.models import RecordMaxCounterBuffer from morango.models import SyncSession from morango.models import TransferSession +from morango.util import CAPABILITIES from morango.utils.sync_utils import _dequeue_into_store from morango.utils.sync_utils import _queue_into_buffer from morango.utils.sync_utils import _serialize_into_store from morango.validation import validate_and_create_buffer_data -from rest_framework.exceptions import ValidationError -from six.moves.urllib.parse import urljoin -from six.moves.urllib.parse import urlparse - -from django.core.paginator import Paginator -from io import BytesIO -from morango.util import CAPABILITIES -from morango.constants.capabilities import GZIP_BUFFER_POST if GZIP_BUFFER_POST in CAPABILITIES: from gzip import GzipFile @@ -43,19 +43,21 @@ def _join_with_logical_operator(lst, operator): op = ") {operator} (".format(operator=operator) return "(({items}))".format(items=op.join(lst)) + def _get_server_ip(hostname): try: return socket.gethostbyname(hostname) - except: - return '' + except: # noqa: E722 + return "" + def _get_client_ip_for_server(server_host, server_port): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) try: s.connect((server_host, server_port)) IP = s.getsockname()[0] - except: - IP = '127.0.0.1' + except: # noqa: E722 + IP = "127.0.0.1" finally: s.close() return IP @@ -64,7 +66,9 @@ def _get_client_ip_for_server(server_host, server_port): # borrowed from https://github.com/django/django/blob/1.11.20/django/utils/text.py#L295 def compress_string(s, compresslevel=9): zbuf = BytesIO() - with GzipFile(mode='wb', compresslevel=compresslevel, fileobj=zbuf, mtime=0) as zfile: + with GzipFile( + mode="wb", compresslevel=compresslevel, fileobj=zbuf, mtime=0 + ) as zfile: zfile.write(s) return zbuf.getvalue() @@ -79,20 +83,32 @@ class Connection(object): This class should be subclassed for particular transport mechanisms, and the necessary methods overridden. """ + pass class NetworkSyncConnection(Connection): - - def __init__(self, base_url='', compresslevel=9): + def __init__(self, base_url="", compresslevel=9): self.base_url = base_url self.compresslevel = compresslevel # ping server at url with info request info_url = urljoin(self.base_url, api_urls.INFO) self.server_info = requests.get(info_url).json() - self.capabilities = CAPABILITIES.intersection(self.server_info.get('capabilities', [])) - - def _request(self, endpoint, method="GET", lookup=None, data={}, params={}, userargs=None, password=None, data_is_gzipped=False): + self.capabilities = CAPABILITIES.intersection( + self.server_info.get("capabilities", []) + ) + + def _request( + self, + endpoint, + method="GET", + lookup=None, + data={}, + params={}, + userargs=None, + password=None, + data_is_gzipped=False, + ): """ Generic request method designed to handle any morango endpoint. @@ -107,20 +123,31 @@ def _request(self, endpoint, method="GET", lookup=None, data={}, params={}, user """ # convert user arguments into query str for passing to auth layer if isinstance(userargs, dict): - userargs = "&".join(["{}={}".format(key, val) for (key, val) in iteritems(userargs)]) + userargs = "&".join( + ["{}={}".format(key, val) for (key, val) in iteritems(userargs)] + ) # build up url and send request if lookup: - lookup = lookup + '/' + lookup = lookup + "/" url = urljoin(urljoin(self.base_url, endpoint), lookup) auth = (userargs, password) if userargs else None if data_is_gzipped: - resp = requests.request(method, url, data=data, params=params, auth=auth, headers={'content-type': 'application/gzip'}) + resp = requests.request( + method, + url, + data=data, + params=params, + auth=auth, + headers={"content-type": "application/gzip"}, + ) else: if method == "GET": resp = requests.request(method, url, params=params, auth=auth) else: - resp = requests.request(method, url, json=data, params=params, auth=auth) + resp = requests.request( + method, url, json=data, params=params, auth=auth + ) resp.raise_for_status() return resp @@ -130,7 +157,9 @@ def create_sync_session(self, client_cert, server_cert, chunk_size=500): cert_chain_response = self._get_certificate_chain(server_cert) # upon receiving cert chain from server, we attempt to save the chain into our records - Certificate.save_certificate_chain(cert_chain_response.json(), expected_last_id=server_cert.id) + Certificate.save_certificate_chain( + cert_chain_response.json(), expected_last_id=server_cert.id + ) # request the server for a one-time-use nonce nonce_resp = self._get_nonce() @@ -139,7 +168,7 @@ def create_sync_session(self, client_cert, server_cert, chunk_size=500): # if no hostname then url is actually an ip url = urlparse(self.base_url) hostname = url.hostname or self.base_url - port = url.port or (80 if url.scheme == 'http' else 443) + port = url.port or (80 if url.scheme == "http" else 443) # prepare the data to send in the syncsession creation request data = { @@ -147,9 +176,17 @@ def create_sync_session(self, client_cert, server_cert, chunk_size=500): "server_certificate_id": server_cert.id, "client_certificate_id": client_cert.id, "profile": client_cert.profile, - "certificate_chain": json.dumps(CertificateSerializer(client_cert.get_ancestors(include_self=True), many=True).data), + "certificate_chain": json.dumps( + CertificateSerializer( + client_cert.get_ancestors(include_self=True), many=True + ).data + ), "connection_path": self.base_url, - "instance": json.dumps(InstanceIDSerializer(InstanceIDModel.get_or_create_current_instance()[0]).data), + "instance": json.dumps( + InstanceIDSerializer( + InstanceIDModel.get_or_create_current_instance()[0] + ).data + ), "nonce": nonce, "client_ip": _get_client_ip_for_server(hostname, port), "server_ip": _get_server_ip(hostname), @@ -168,7 +205,7 @@ def create_sync_session(self, client_cert, server_cert, chunk_size=500): # build the data to be used for creating our own syncsession data = { - "id": data['id'], + "id": data["id"], "start_timestamp": timezone.now(), "last_activity_timestamp": timezone.now(), "active": True, @@ -178,9 +215,13 @@ def create_sync_session(self, client_cert, server_cert, chunk_size=500): "profile": client_cert.profile, "connection_kind": "network", "connection_path": self.base_url, - "client_ip": data['client_ip'], - "server_ip": data['server_ip'], - "client_instance": json.dumps(InstanceIDSerializer(InstanceIDModel.get_or_create_current_instance()[0]).data), + "client_ip": data["client_ip"], + "server_ip": data["server_ip"], + "client_instance": json.dumps( + InstanceIDSerializer( + InstanceIDModel.get_or_create_current_instance()[0] + ).data + ), "server_instance": session_resp.json().get("server_instance") or "{}", } sync_session = SyncSession.objects.create(**data) @@ -190,25 +231,42 @@ def create_sync_session(self, client_cert, server_cert, chunk_size=500): def get_remote_certificates(self, primary_partition, scope_def_id=None): remote_certs = [] # request certs for this primary partition, where the server also has a private key for - remote_certs_resp = self._request(api_urls.CERTIFICATE, params={'primary_partition': primary_partition}) + remote_certs_resp = self._request( + api_urls.CERTIFICATE, params={"primary_partition": primary_partition} + ) # inflate remote certs into a list of unsaved models for cert in remote_certs_resp.json(): - remote_certs.append(Certificate.deserialize(cert["serialized"], cert["signature"])) + remote_certs.append( + Certificate.deserialize(cert["serialized"], cert["signature"]) + ) # filter certs by scope definition id, if provided if scope_def_id: - remote_certs = [cert for cert in remote_certs if cert.scope_definition_id == scope_def_id] + remote_certs = [ + cert + for cert in remote_certs + if cert.scope_definition_id == scope_def_id + ] return remote_certs - def certificate_signing_request(self, parent_cert, scope_definition_id, scope_params, userargs=None, password=None): + def certificate_signing_request( + self, + parent_cert, + scope_definition_id, + scope_params, + userargs=None, + password=None, + ): # if server cert does not exist locally, retrieve it from server if not Certificate.objects.filter(id=parent_cert.id).exists(): cert_chain_response = self._get_certificate_chain(parent_cert) # upon receiving cert chain from server, we attempt to save the chain into our records - Certificate.save_certificate_chain(cert_chain_response.json(), expected_last_id=parent_cert.id) + Certificate.save_certificate_chain( + cert_chain_response.json(), expected_last_id=parent_cert.id + ) csr_key = Key() # build up data for csr @@ -218,19 +276,29 @@ def certificate_signing_request(self, parent_cert, scope_definition_id, scope_pa "scope_definition": scope_definition_id, "scope_version": parent_cert.scope_version, "scope_params": json.dumps(scope_params), - "public_key": csr_key.get_public_key_string() + "public_key": csr_key.get_public_key_string(), } - csr_resp = self._request(api_urls.CERTIFICATE, method="POST", data=data, userargs=userargs, password=password) + csr_resp = self._request( + api_urls.CERTIFICATE, + method="POST", + data=data, + userargs=userargs, + password=password, + ) csr_data = csr_resp.json() # verify cert returned from server, and proceed to save into our records - csr_cert = Certificate.deserialize(csr_data["serialized"], csr_data["signature"]) + csr_cert = Certificate.deserialize( + csr_data["serialized"], csr_data["signature"] + ) csr_cert.private_key = csr_key csr_cert.check_certificate() csr_cert.save() return csr_cert - def push_signed_client_certificate_chain(self, local_parent_cert, scope_definition_id, scope_params): + def push_signed_client_certificate_chain( + self, local_parent_cert, scope_definition_id, scope_params + ): # grab shared public key of server publickey_response = self._get_public_key() @@ -238,21 +306,28 @@ def push_signed_client_certificate_chain(self, local_parent_cert, scope_definiti nonce_response = self._get_nonce() # build up data for csr - certificate = Certificate(parent_id=local_parent_cert.id, - profile=local_parent_cert.profile, - scope_definition_id=scope_definition_id, - scope_version=local_parent_cert.scope_version, - scope_params=json.dumps(scope_params), - public_key=Key(public_key_string=publickey_response.json()[0]['public_key']), - salt=nonce_response.json()["id"] # for pushing signed certs, we use nonce as salt - ) + certificate = Certificate( + parent_id=local_parent_cert.id, + profile=local_parent_cert.profile, + scope_definition_id=scope_definition_id, + scope_version=local_parent_cert.scope_version, + scope_params=json.dumps(scope_params), + public_key=Key( + public_key_string=publickey_response.json()[0]["public_key"] + ), + salt=nonce_response.json()[ + "id" + ], # for pushing signed certs, we use nonce as salt + ) # add ID and signature to the certificate certificate.id = certificate.calculate_uuid() certificate.parent.sign_certificate(certificate) # serialize the chain for sending to server - certificate_chain = list(local_parent_cert.get_descendants(include_self=True)) + [certificate] + certificate_chain = list( + local_parent_cert.get_descendants(include_self=True) + ) + [certificate] data = json.dumps(CertificateSerializer(certificate_chain, many=True).data) # client sends signed certificate chain to server @@ -276,7 +351,9 @@ def _get_nonce(self): def _get_certificate_chain(self, server_cert): # get ancestors certificate chain for this server cert - return self._request(api_urls.CERTIFICATE, params={'ancestors_of': server_cert.id}) + return self._request( + api_urls.CERTIFICATE, params={"ancestors_of": server_cert.id} + ) def _push_certificate_chain(self, data): # push signed certificate chain to server @@ -291,15 +368,24 @@ def _create_transfer_session(self, data): def _update_transfer_session(self, data, transfer_session): # update transfer session on server side with kwargs - return self._request(api_urls.TRANSFERSESSION, method="PATCH", lookup=transfer_session.id, data=data) + return self._request( + api_urls.TRANSFERSESSION, + method="PATCH", + lookup=transfer_session.id, + data=data, + ) def _close_transfer_session(self, transfer_session): # "delete" transfer session on server side - return self._request(api_urls.TRANSFERSESSION, method="DELETE", lookup=transfer_session.id) + return self._request( + api_urls.TRANSFERSESSION, method="DELETE", lookup=transfer_session.id + ) def _close_sync_session(self, sync_session): # "delete" sync session on server side - return self._request(api_urls.SYNCSESSION, method="DELETE", lookup=sync_session.id) + return self._request( + api_urls.SYNCSESSION, method="DELETE", lookup=sync_session.id + ) def _push_record_chunk(self, serialized_recs): # push a chunk of records to the server side @@ -308,12 +394,20 @@ def _push_record_chunk(self, serialized_recs): # gzip the data if both client and server have gzipping capabilities if use_gzip: data = json.dumps([dict(el) for el in serialized_recs]) - data = compress_string(bytes(data.encode('utf-8')), compresslevel=self.compresslevel) - return self._request(api_urls.BUFFER, method="POST", data=data, data_is_gzipped=use_gzip) + data = compress_string( + bytes(data.encode("utf-8")), compresslevel=self.compresslevel + ) + return self._request( + api_urls.BUFFER, method="POST", data=data, data_is_gzipped=use_gzip + ) def _pull_record_chunk(self, chunk_size, transfer_session): # pull records from server for given transfer session - params = {'limit': chunk_size, 'offset': transfer_session.records_transferred, 'transfer_session_id': transfer_session.id} + params = { + "limit": chunk_size, + "offset": transfer_session.records_transferred, + "transfer_session_id": transfer_session.id, + } return self._request(api_urls.BUFFER, params=params) @@ -321,6 +415,7 @@ class SyncClient(object): """ Controller to support client in initiating syncing and performing related operations. """ + def __init__(self, sync_connection, sync_session, chunk_size=500): self.sync_connection = sync_connection self.sync_session = sync_session @@ -334,20 +429,26 @@ def initiate_push(self, sync_filter): _queue_into_buffer(self.current_transfer_session) # update the records_total for client and server transfer session - records_total = Buffer.objects.filter(transfer_session=self.current_transfer_session).count() + records_total = Buffer.objects.filter( + transfer_session=self.current_transfer_session + ).count() if records_total == 0: self._close_transfer_session() return self.current_transfer_session.records_total = records_total self.current_transfer_session.save() - self.sync_connection._update_transfer_session({'records_total': records_total}, self.current_transfer_session) + self.sync_connection._update_transfer_session( + {"records_total": records_total}, self.current_transfer_session + ) # push records to server self._push_records(chunk_size=self.chunk_size) # upon successful completion of pushing records, proceed to delete buffered records Buffer.objects.filter(transfer_session=self.current_transfer_session).delete() - RecordMaxCounterBuffer.objects.filter(transfer_session=self.current_transfer_session).delete() + RecordMaxCounterBuffer.objects.filter( + transfer_session=self.current_transfer_session + ).delete() # close client and server transfer session self._close_transfer_session() @@ -364,14 +465,20 @@ def initiate_pull(self, sync_filter): self._dequeue_into_store() # update database max counters but use latest fsics on client - DatabaseMaxCounter.update_fsics(json.loads(self.current_transfer_session.server_fsic), - sync_filter) + DatabaseMaxCounter.update_fsics( + json.loads(self.current_transfer_session.server_fsic), sync_filter + ) self._close_transfer_session() def _pull_records(self, chunk_size=500, callback=None): - while self.current_transfer_session.records_transferred < self.current_transfer_session.records_total: - buffers_resp = self.sync_connection._pull_record_chunk(chunk_size, self.current_transfer_session) + while ( + self.current_transfer_session.records_transferred + < self.current_transfer_session.records_total + ): + buffers_resp = self.sync_connection._pull_record_chunk( + chunk_size, self.current_transfer_session + ) # load the returned data from JSON data = buffers_resp.json() @@ -381,26 +488,38 @@ def _pull_records(self, chunk_size=500, callback=None): data = data["results"] # ensure the transfer session allows pulls, and is same across records - transfer_session = TransferSession.objects.get(id=data[0]["transfer_session"]) + transfer_session = TransferSession.objects.get( + id=data[0]["transfer_session"] + ) if transfer_session.push: - raise ValidationError("Specified TransferSession does not allow pulling.") + raise ValidationError( + "Specified TransferSession does not allow pulling." + ) if len(set(rec["transfer_session"] for rec in data)) > 1: - raise ValidationError("All pulled records must be associated with the same TransferSession.") + raise ValidationError( + "All pulled records must be associated with the same TransferSession." + ) if self.current_transfer_session.id != transfer_session.id: - raise ValidationError("Specified TransferSession does not match this SyncClient's current TransferSession.") + raise ValidationError( + "Specified TransferSession does not match this SyncClient's current TransferSession." + ) validate_and_create_buffer_data(data, self.current_transfer_session) def _push_records(self, chunk_size=500, callback=None): # paginate buffered records so we do not load them all into memory - buffered_records = Buffer.objects.filter(transfer_session=self.current_transfer_session) + buffered_records = Buffer.objects.filter( + transfer_session=self.current_transfer_session + ) buffered_pages = Paginator(buffered_records, chunk_size) for count in buffered_pages.page_range: # serialize and send records to server - serialized_recs = BufferSerializer(buffered_pages.page(count).object_list, many=True) + serialized_recs = BufferSerializer( + buffered_pages.page(count).object_list, many=True + ) self.sync_connection._push_record_chunk(serialized_recs.data) # update records_transferred upon successful request @@ -414,7 +533,9 @@ def close_sync_session(self): # "delete" our own local sync session if self.current_transfer_session is not None: - raise MorangoError('Transfer Session must be closed before closing sync session.') + raise MorangoError( + "Transfer Session must be closed before closing sync session." + ) self.sync_session.active = False self.sync_session.save() self.sync_session = None @@ -423,32 +544,41 @@ def _create_transfer_session(self, push, filter): # build data for creating transfer session on server side data = { - 'id': uuid.uuid4().hex, - 'filter': str(filter), - 'push': push, - 'sync_session_id': self.sync_session.id, + "id": uuid.uuid4().hex, + "filter": str(filter), + "push": push, + "sync_session_id": self.sync_session.id, } - data['last_activity_timestamp'] = timezone.now() + data["last_activity_timestamp"] = timezone.now() self.current_transfer_session = TransferSession.objects.create(**data) - data.pop('last_activity_timestamp') + data.pop("last_activity_timestamp") if push: # before pushing, we want to serialize the most recent data and update database max counters - if getattr(settings, 'MORANGO_SERIALIZE_BEFORE_QUEUING', True): - _serialize_into_store(self.current_transfer_session.sync_session.profile, filter=Filter(self.current_transfer_session.filter)) + if getattr(settings, "MORANGO_SERIALIZE_BEFORE_QUEUING", True): + _serialize_into_store( + self.current_transfer_session.sync_session.profile, + filter=Filter(self.current_transfer_session.filter), + ) - data['client_fsic'] = json.dumps(DatabaseMaxCounter.calculate_filter_max_counters(filter)) - self.current_transfer_session.client_fsic = data['client_fsic'] + data["client_fsic"] = json.dumps( + DatabaseMaxCounter.calculate_filter_max_counters(filter) + ) + self.current_transfer_session.client_fsic = data["client_fsic"] # save transfersession locally before creating transfersession server side self.current_transfer_session.save() # create transfer session on server side transfer_resp = self.sync_connection._create_transfer_session(data) - self.current_transfer_session.server_fsic = transfer_resp.json().get('server_fsic') or '{}' + self.current_transfer_session.server_fsic = ( + transfer_resp.json().get("server_fsic") or "{}" + ) if not push: - self.current_transfer_session.records_total = transfer_resp.json().get('records_total') + self.current_transfer_session.records_total = transfer_resp.json().get( + "records_total" + ) self.current_transfer_session.save() def _close_transfer_session(self): diff --git a/morango/urls.py b/morango/urls.py index 3ede65e3..45bad802 100644 --- a/morango/urls.py +++ b/morango/urls.py @@ -1,7 +1,4 @@ -from django.conf.urls import include, url +from django.conf.urls import include +from django.conf.urls import url -urlpatterns = [ - - url(r'^api/morango/v1/', include('morango.api.urls')), - -] +urlpatterns = [url(r"^api/morango/v1/", include("morango.api.urls"))] diff --git a/morango/util.py b/morango/util.py index cf6e28de..36e00f89 100644 --- a/morango/util.py +++ b/morango/util.py @@ -1,17 +1,19 @@ import functools import logging -import sqlite3 import os +import sqlite3 -from morango.constants.file import SQLITE_VARIABLE_FILE_CACHE from morango.constants.capabilities import GZIP_BUFFER_POST +from morango.constants.file import SQLITE_VARIABLE_FILE_CACHE logger = logging.getLogger(__name__) + def get_capabilities(): capabilities = set() try: import gzip # noqa + capabilities.add(GZIP_BUFFER_POST) except ImportError: pass @@ -44,8 +46,7 @@ def __init__(self, *signals): def __enter__(self): for signal in self.signals: - logger.debug('mute_signals: Disabling signal handlers %r', - signal.receivers) + logger.debug("mute_signals: Disabling signal handlers %r", signal.receivers) # Note that we're using implementation details of # django.signals, since arguments to signal.connect() @@ -55,8 +56,7 @@ def __enter__(self): def __exit__(self, exc_type, exc_value, traceback): for signal, receivers in self.paused.items(): - logger.debug('mute_signals: Restoring signal handlers %r', - receivers) + logger.debug("mute_signals: Restoring signal handlers %r", receivers) signal.receivers = receivers with signal.lock: @@ -75,8 +75,10 @@ def wrapper(*args, **kwargs): # A mute_signals() object is not reentrant; use a copy every time. with self.copy(): return callable_obj(*args, **kwargs) + return wrapper + def max_parameter_substitution(): """ SQLite has a limit on the max number of variables allowed for parameter substitution. This limit is usually 999, but @@ -85,23 +87,27 @@ def max_parameter_substitution(): """ if os.path.isfile(SQLITE_VARIABLE_FILE_CACHE): return - conn = sqlite3.connect(':memory:') + conn = sqlite3.connect(":memory:") low = 1 - high = 1000 # hard limit for SQLITE_MAX_VARIABLE_NUMBER - conn.execute('CREATE TABLE T1 (id C1)') + high = ( + 1000 + ) # hard limit for SQLITE_MAX_VARIABLE_NUMBER + conn.execute("CREATE TABLE T1 (id C1)") while low < high - 1: guess = (low + high) // 2 try: - statement = 'select * from T1 where id in (%s)' % ','.join(['?' for _ in range(guess)]) + statement = "select * from T1 where id in (%s)" % ",".join( + ["?" for _ in range(guess)] + ) values = [i for i in range(guess)] conn.execute(statement, values) except sqlite3.DatabaseError as ex: - if 'too many SQL variables' in str(ex): + if "too many SQL variables" in str(ex): high = guess else: raise else: low = guess conn.close() - with open(SQLITE_VARIABLE_FILE_CACHE, 'w') as file: + with open(SQLITE_VARIABLE_FILE_CACHE, "w") as file: file.write(str(low)) diff --git a/morango/utils/backends/base.py b/morango/utils/backends/base.py index 2c2fff63..fdfdffea 100644 --- a/morango/utils/backends/base.py +++ b/morango/utils/backends/base.py @@ -1,8 +1,10 @@ -from morango.models import Buffer, RecordMaxCounterBuffer, Store, RecordMaxCounter +from morango.models import Buffer +from morango.models import RecordMaxCounter +from morango.models import RecordMaxCounterBuffer +from morango.models import Store class BaseSQLWrapper(object): - def _dequeuing_delete_rmcb_records(self, cursor, transfersession_id): # delete all RMCBs which are a reverse FF (store version newer than buffer version) delete_rmcb_records = """DELETE FROM {rmcb} @@ -17,11 +19,13 @@ def _dequeuing_delete_rmcb_records(self, cursor, transfersession_id): AND buffer.last_saved_counter <= rmc.counter AND rmcb.transfer_session_id = '{transfer_session_id}' AND buffer.transfer_session_id = '{transfer_session_id}') - """.format(buffer=Buffer._meta.db_table, - store=Store._meta.db_table, - rmc=RecordMaxCounter._meta.db_table, - rmcb=RecordMaxCounterBuffer._meta.db_table, - transfer_session_id=transfersession_id) + """.format( + buffer=Buffer._meta.db_table, + store=Store._meta.db_table, + rmc=RecordMaxCounter._meta.db_table, + rmcb=RecordMaxCounterBuffer._meta.db_table, + transfer_session_id=transfersession_id, + ) cursor.execute(delete_rmcb_records) @@ -37,21 +41,25 @@ def _dequeuing_delete_buffered_records(self, cursor, transfersession_id): AND buffer.last_saved_instance = rmc.instance_id AND buffer.last_saved_counter <= rmc.counter AND buffer.transfer_session_id = '{transfer_session_id}') - """.format(buffer=Buffer._meta.db_table, - store=Store._meta.db_table, - rmc=RecordMaxCounter._meta.db_table, - rmcb=RecordMaxCounterBuffer._meta.db_table, - transfer_session_id=transfersession_id) + """.format( + buffer=Buffer._meta.db_table, + store=Store._meta.db_table, + rmc=RecordMaxCounter._meta.db_table, + rmcb=RecordMaxCounterBuffer._meta.db_table, + transfer_session_id=transfersession_id, + ) cursor.execute(delete_buffered_records) def _dequeuing_merge_conflict_rmcb(self, cursor, transfersession_id): - raise NotImplemented("Subclass must implement this method.") + raise NotImplementedError("Subclass must implement this method.") def _dequeuing_merge_conflict_buffer(self, cursor, current_id, transfersession_id): - raise NotImplemented("Subclass must implement this method.") + raise NotImplementedError("Subclass must implement this method.") - def _dequeuing_update_rmcs_last_saved_by(self, cursor, current_id, transfersession_id): - raise NotImplemented("Subclass must implement this method.") + def _dequeuing_update_rmcs_last_saved_by( + self, cursor, current_id, transfersession_id + ): + raise NotImplementedError("Subclass must implement this method.") def _dequeuing_delete_mc_buffer(self, cursor, transfersession_id): # delete records with merge conflicts from buffer @@ -66,11 +74,13 @@ def _dequeuing_delete_mc_buffer(self, cursor, transfersession_id): AND store.last_saved_instance = rmcb.instance_id AND store.last_saved_counter <= rmcb.counter AND rmcb.transfer_session_id = '{transfer_session_id}')) - """.format(buffer=Buffer._meta.db_table, - store=Store._meta.db_table, - rmc=RecordMaxCounter._meta.db_table, - rmcb=RecordMaxCounterBuffer._meta.db_table, - transfer_session_id=transfersession_id) + """.format( + buffer=Buffer._meta.db_table, + store=Store._meta.db_table, + rmc=RecordMaxCounter._meta.db_table, + rmcb=RecordMaxCounterBuffer._meta.db_table, + transfer_session_id=transfersession_id, + ) cursor.execute(delete_mc_buffer) def _dequeuing_delete_mc_rmcb(self, cursor, transfersession_id): @@ -89,26 +99,30 @@ def _dequeuing_delete_mc_rmcb(self, cursor, transfersession_id): AND store.last_saved_instance = rmcb2.instance_id AND store.last_saved_counter <= rmcb2.counter AND rmcb2.transfer_session_id = '{transfer_session_id}')) - """.format(buffer=Buffer._meta.db_table, - store=Store._meta.db_table, - rmc=RecordMaxCounter._meta.db_table, - rmcb=RecordMaxCounterBuffer._meta.db_table, - transfer_session_id=transfersession_id) + """.format( + buffer=Buffer._meta.db_table, + store=Store._meta.db_table, + rmc=RecordMaxCounter._meta.db_table, + rmcb=RecordMaxCounterBuffer._meta.db_table, + transfer_session_id=transfersession_id, + ) cursor.execute(delete_mc_rmc) def _dequeuing_insert_remaining_buffer(self, cursor, transfersession_id): - raise NotImplemented("Subclass must implement this method.") + raise NotImplementedError("Subclass must implement this method.") def _dequeuing_insert_remaining_rmcb(self, cursor, transfersession_id): - raise NotImplemented("Subclass must implement this method.") + raise NotImplementedError("Subclass must implement this method.") def _dequeuing_delete_remaining_rmcb(self, cursor, transfersession_id): # delete the remaining rmcb for this transfer session delete_remaining_rmcb = """ DELETE FROM {rmcb} WHERE {rmcb}.transfer_session_id = '{transfer_session_id}' - """.format(rmcb=RecordMaxCounterBuffer._meta.db_table, - transfer_session_id=transfersession_id) + """.format( + rmcb=RecordMaxCounterBuffer._meta.db_table, + transfer_session_id=transfersession_id, + ) cursor.execute(delete_remaining_rmcb) @@ -117,6 +131,7 @@ def _dequeuing_delete_remaining_buffer(self, cursor, transfersession_id): delete_remaining_buffer = """ DELETE FROM {buffer} WHERE {buffer}.transfer_session_id = '{transfer_session_id}' - """.format(buffer=Buffer._meta.db_table, - transfer_session_id=transfersession_id) + """.format( + buffer=Buffer._meta.db_table, transfer_session_id=transfersession_id + ) cursor.execute(delete_remaining_buffer) diff --git a/morango/utils/backends/postgres.py b/morango/utils/backends/postgres.py index 70078f82..b3ff60ce 100644 --- a/morango/utils/backends/postgres.py +++ b/morango/utils/backends/postgres.py @@ -1,22 +1,41 @@ +from django.db import connection + +from morango.models import Buffer +from morango.models import RecordMaxCounter +from morango.models import RecordMaxCounterBuffer +from morango.models import Store from morango.utils.backends.base import BaseSQLWrapper -from morango.models import (Buffer, RecordMaxCounter, RecordMaxCounterBuffer, - Store) -from django.db import connection class SQLWrapper(BaseSQLWrapper): - backend = 'postgresql' + backend = "postgresql" - def _bulk_insert_into_app_models(self, cursor, app_model, fields, db_values, placeholder_list): + def _bulk_insert_into_app_models( + self, cursor, app_model, fields, db_values, placeholder_list + ): # convert this list to a string to be passed into raw sql query - placeholder_str = ', '.join(placeholder_list).replace("'", '') + placeholder_str = ", ".join(placeholder_list).replace("'", "") # cast the values in the SET statement to their appropiate postgres db types - set_casted_values = ', '.join(map(lambda f: '{f} = nv.{f}::{type}'.format(f=f.attname, type=f.rel_db_type(connection)), fields)) + set_casted_values = ", ".join( + map( + lambda f: "{f} = nv.{f}::{type}".format( + f=f.attname, type=f.rel_db_type(connection) + ), + fields, + ) + ) # cast the values in the SELECT statement to their appropiate posgtres db types - select_casted_values = ', '.join(map(lambda f: '{f}::{type}'.format(f=f.attname, type=f.rel_db_type(connection)), fields)) + select_casted_values = ", ".join( + map( + lambda f: "{f}::{type}".format( + f=f.attname, type=f.rel_db_type(connection) + ), + fields, + ) + ) # cast the pk to the correct field type for this model pk = [f for f in fields if f.primary_key][0] - fields = str(tuple(str(f.attname) for f in fields)).replace("'", '') + fields = str(tuple(str(f.attname) for f in fields)).replace("'", "") insert = """ WITH new_values {fields} as @@ -35,12 +54,14 @@ def _bulk_insert_into_app_models(self, cursor, app_model, fields, db_values, pla SELECT {select_fields} FROM new_values ut WHERE ut.id::{id_type} not in (SELECT id FROM updated) - """.format(app_model=app_model, - fields=fields, - placeholder_str=placeholder_str, - set_values=set_casted_values, - select_fields=select_casted_values, - id_type=pk.rel_db_type(connection)) + """.format( + app_model=app_model, + fields=fields, + placeholder_str=placeholder_str, + set_values=set_casted_values, + select_fields=select_casted_values, + id_type=pk.rel_db_type(connection), + ) # use DB-APIs parameter substitution (2nd parameter expects a sequence) cursor.execute(insert, db_values) @@ -62,11 +83,13 @@ def _dequeuing_merge_conflict_rmcb(self, cursor, transfersession_id): AND store.last_saved_instance = rmcb2.instance_id AND store.last_saved_counter <= rmcb2.counter AND rmcb2.transfer_session_id = '{transfer_session_id}') - """.format(buffer=Buffer._meta.db_table, - store=Store._meta.db_table, - rmc=RecordMaxCounter._meta.db_table, - rmcb=RecordMaxCounterBuffer._meta.db_table, - transfer_session_id=transfersession_id) + """.format( + buffer=Buffer._meta.db_table, + store=Store._meta.db_table, + rmc=RecordMaxCounter._meta.db_table, + rmcb=RecordMaxCounterBuffer._meta.db_table, + transfer_session_id=transfersession_id, + ) cursor.execute(merge_conflict_rmc) @@ -86,17 +109,21 @@ def _dequeuing_merge_conflict_buffer(self, cursor, current_id, transfersession_i AND store.last_saved_instance = rmcb2.instance_id AND store.last_saved_counter <= rmcb2.counter AND rmcb2.transfer_session_id = '{transfer_session_id}') - """.format(buffer=Buffer._meta.db_table, - rmcb=RecordMaxCounterBuffer._meta.db_table, - store=Store._meta.db_table, - rmc=RecordMaxCounter._meta.db_table, - transfer_session_id=transfersession_id, - current_instance_id=current_id.id, - current_instance_counter=current_id.counter) + """.format( + buffer=Buffer._meta.db_table, + rmcb=RecordMaxCounterBuffer._meta.db_table, + store=Store._meta.db_table, + rmc=RecordMaxCounter._meta.db_table, + transfer_session_id=transfersession_id, + current_instance_id=current_id.id, + current_instance_counter=current_id.counter, + ) cursor.execute(merge_conflict_store) - def _dequeuing_update_rmcs_last_saved_by(self, cursor, current_id, transfersession_id): + def _dequeuing_update_rmcs_last_saved_by( + self, cursor, current_id, transfersession_id + ): # update or create rmc for merge conflicts with local instance id merge_conflict_store = """ WITH new_values as @@ -124,13 +151,15 @@ def _dequeuing_update_rmcs_last_saved_by(self, cursor, current_id, transfersessi SELECT '{current_instance_id}'::uuid, {current_instance_counter}, ut.id FROM new_values ut WHERE ut.id not in (SELECT store_model_id FROM updated) - """.format(buffer=Buffer._meta.db_table, - rmcb=RecordMaxCounterBuffer._meta.db_table, - store=Store._meta.db_table, - rmc=RecordMaxCounter._meta.db_table, - transfer_session_id=transfersession_id, - current_instance_id=current_id.id, - current_instance_counter=current_id.counter) + """.format( + buffer=Buffer._meta.db_table, + rmcb=RecordMaxCounterBuffer._meta.db_table, + store=Store._meta.db_table, + rmc=RecordMaxCounter._meta.db_table, + transfer_session_id=transfersession_id, + current_instance_id=current_id.id, + current_instance_counter=current_id.counter, + ) cursor.execute(merge_conflict_store) @@ -162,9 +191,11 @@ def _dequeuing_insert_remaining_buffer(self, cursor, transfersession_id): ut._self_ref_fk FROM new_values ut WHERE ut.model_uuid not in (SELECT id FROM updated) - """.format(buffer=Buffer._meta.db_table, - store=Store._meta.db_table, - transfer_session_id=transfersession_id) + """.format( + buffer=Buffer._meta.db_table, + store=Store._meta.db_table, + transfer_session_id=transfersession_id, + ) cursor.execute(insert_remaining_buffer) @@ -190,8 +221,10 @@ def _dequeuing_insert_remaining_rmcb(self, cursor, transfersession_id): FROM new_values ut WHERE (ut.model_uuid, ut.rmcb_instance_id) not in (SELECT store_model_id, instance_id FROM updated) - """.format(rmc=RecordMaxCounter._meta.db_table, - rmcb=RecordMaxCounterBuffer._meta.db_table, - transfer_session_id=transfersession_id) + """.format( + rmc=RecordMaxCounter._meta.db_table, + rmcb=RecordMaxCounterBuffer._meta.db_table, + transfer_session_id=transfersession_id, + ) cursor.execute(insert_remaining_rmcb) diff --git a/morango/utils/backends/sqlite.py b/morango/utils/backends/sqlite.py index 287273d2..f523af2c 100644 --- a/morango/utils/backends/sqlite.py +++ b/morango/utils/backends/sqlite.py @@ -1,12 +1,15 @@ import os from morango.constants.file import SQLITE_VARIABLE_FILE_CACHE -from morango.models import (Buffer, RecordMaxCounter, RecordMaxCounterBuffer, - Store) +from morango.models import Buffer +from morango.models import RecordMaxCounter +from morango.models import RecordMaxCounterBuffer +from morango.models import Store from morango.utils.backends.base import BaseSQLWrapper + class SQLWrapper(BaseSQLWrapper): - backend = 'sqlite' + backend = "sqlite" def __init__(self): if os.path.isfile(SQLITE_VARIABLE_FILE_CACHE): @@ -15,7 +18,9 @@ def __init__(self): else: self.SQLITE_MAX_VARIABLE_NUMBER = 999 - def _bulk_insert_into_app_models(self, cursor, app_model, fields, db_values, placeholder_list): + def _bulk_insert_into_app_models( + self, cursor, app_model, fields, db_values, placeholder_list + ): """ Example query: `REPLACE INTO model (F1,F2,F3) VALUES (%s, %s, %s), (%s, %s, %s), (%s, %s, %s)` @@ -24,15 +29,23 @@ def _bulk_insert_into_app_models(self, cursor, app_model, fields, db_values, pla # calculate and create equal sized chunks of data to insert incrementally num_of_rows_able_to_insert = self.SQLITE_MAX_VARIABLE_NUMBER // len(fields) num_of_values_able_to_insert = num_of_rows_able_to_insert * len(fields) - value_chunks = [db_values[x:x + num_of_values_able_to_insert] for x in range(0, len(db_values), num_of_values_able_to_insert)] - placeholder_chunks = [placeholder_list[x: x + num_of_rows_able_to_insert] for x in range(0, len(placeholder_list), num_of_rows_able_to_insert)] + value_chunks = [ + db_values[x : x + num_of_values_able_to_insert] + for x in range(0, len(db_values), num_of_values_able_to_insert) + ] + placeholder_chunks = [ + placeholder_list[x : x + num_of_rows_able_to_insert] + for x in range(0, len(placeholder_list), num_of_rows_able_to_insert) + ] # insert data chunks - fields = str(tuple(str(f.attname) for f in fields)).replace("'", '') + fields = str(tuple(str(f.attname) for f in fields)).replace("'", "") for values, params in zip(value_chunks, placeholder_chunks): - placeholder_str = ', '.join(params).replace("'", '') + placeholder_str = ", ".join(params).replace("'", "") insert = """REPLACE INTO {app_model} {fields} VALUES {placeholder_str} - """.format(app_model=app_model, fields=fields, placeholder_str=placeholder_str) + """.format( + app_model=app_model, fields=fields, placeholder_str=placeholder_str + ) # use DB-APIs parameter substitution (2nd parameter expects a sequence) cursor.execute(insert, values) @@ -54,11 +67,13 @@ def _dequeuing_merge_conflict_rmcb(self, cursor, transfersession_id): AND store.last_saved_instance = rmcb2.instance_id AND store.last_saved_counter <= rmcb2.counter AND rmcb2.transfer_session_id = '{transfer_session_id}') - """.format(buffer=Buffer._meta.db_table, - store=Store._meta.db_table, - rmc=RecordMaxCounter._meta.db_table, - rmcb=RecordMaxCounterBuffer._meta.db_table, - transfer_session_id=transfersession_id) + """.format( + buffer=Buffer._meta.db_table, + store=Store._meta.db_table, + rmc=RecordMaxCounter._meta.db_table, + rmcb=RecordMaxCounterBuffer._meta.db_table, + transfer_session_id=transfersession_id, + ) cursor.execute(merge_conflict_rmc) def _dequeuing_merge_conflict_buffer(self, cursor, current_id, transfersession_id): @@ -77,16 +92,20 @@ def _dequeuing_merge_conflict_buffer(self, cursor, current_id, transfersession_i AND store.last_saved_instance = rmcb2.instance_id AND store.last_saved_counter <= rmcb2.counter AND rmcb2.transfer_session_id = '{transfer_session_id}') - """.format(buffer=Buffer._meta.db_table, - rmcb=RecordMaxCounterBuffer._meta.db_table, - store=Store._meta.db_table, - rmc=RecordMaxCounter._meta.db_table, - transfer_session_id=transfersession_id, - current_instance_id=current_id.id, - current_instance_counter=current_id.counter) + """.format( + buffer=Buffer._meta.db_table, + rmcb=RecordMaxCounterBuffer._meta.db_table, + store=Store._meta.db_table, + rmc=RecordMaxCounter._meta.db_table, + transfer_session_id=transfersession_id, + current_instance_id=current_id.id, + current_instance_counter=current_id.counter, + ) cursor.execute(merge_conflict_store) - def _dequeuing_update_rmcs_last_saved_by(self, cursor, current_id, transfersession_id): + def _dequeuing_update_rmcs_last_saved_by( + self, cursor, current_id, transfersession_id + ): # update or create rmc for merge conflicts with local instance id merge_conflict_store = """REPLACE INTO {rmc} (instance_id, counter, store_model_id) SELECT '{current_instance_id}', {current_instance_counter}, store.id @@ -99,13 +118,15 @@ def _dequeuing_update_rmcs_last_saved_by(self, cursor, current_id, transfersessi AND store.last_saved_instance = rmcb2.instance_id AND store.last_saved_counter <= rmcb2.counter AND rmcb2.transfer_session_id = '{transfer_session_id}') - """.format(buffer=Buffer._meta.db_table, - rmcb=RecordMaxCounterBuffer._meta.db_table, - store=Store._meta.db_table, - rmc=RecordMaxCounter._meta.db_table, - transfer_session_id=transfersession_id, - current_instance_id=current_id.id, - current_instance_counter=current_id.counter) + """.format( + buffer=Buffer._meta.db_table, + rmcb=RecordMaxCounterBuffer._meta.db_table, + store=Store._meta.db_table, + rmc=RecordMaxCounter._meta.db_table, + transfer_session_id=transfersession_id, + current_instance_id=current_id.id, + current_instance_counter=current_id.counter, + ) cursor.execute(merge_conflict_store) def _dequeuing_insert_remaining_buffer(self, cursor, transfersession_id): @@ -117,9 +138,11 @@ def _dequeuing_insert_remaining_buffer(self, cursor, transfersession_id): buffer._self_ref_fk FROM {buffer} AS buffer WHERE buffer.transfer_session_id = '{transfer_session_id}' - """.format(buffer=Buffer._meta.db_table, - store=Store._meta.db_table, - transfer_session_id=transfersession_id) + """.format( + buffer=Buffer._meta.db_table, + store=Store._meta.db_table, + transfer_session_id=transfersession_id, + ) cursor.execute(insert_remaining_buffer) @@ -129,8 +152,10 @@ def _dequeuing_insert_remaining_rmcb(self, cursor, transfersession_id): SELECT rmcb.instance_id, rmcb.counter, rmcb.model_uuid FROM {rmcb} AS rmcb WHERE rmcb.transfer_session_id = '{transfer_session_id}' - """.format(rmc=RecordMaxCounter._meta.db_table, - rmcb=RecordMaxCounterBuffer._meta.db_table, - transfer_session_id=transfersession_id) + """.format( + rmc=RecordMaxCounter._meta.db_table, + rmcb=RecordMaxCounterBuffer._meta.db_table, + transfer_session_id=transfersession_id, + ) cursor.execute(insert_remaining_rmcb) diff --git a/morango/utils/backends/utils.py b/morango/utils/backends/utils.py index 186ec17f..ec05e825 100644 --- a/morango/utils/backends/utils.py +++ b/morango/utils/backends/utils.py @@ -1,10 +1,12 @@ from importlib import import_module + from morango.errors import MorangoError + def load_backend(conn): - if 'postgresql' in conn.vendor: - return import_module('morango.utils.backends.postgres') - if 'sqlite' in conn.vendor: - return import_module('morango.utils.backends.sqlite') + if "postgresql" in conn.vendor: + return import_module("morango.utils.backends.postgres") + if "sqlite" in conn.vendor: + return import_module("morango.utils.backends.sqlite") raise MorangoError("Incompatible database backend for syncing") diff --git a/morango/utils/morango_mptt.py b/morango/utils/morango_mptt.py index c3e26f68..0ce09c43 100644 --- a/morango/utils/morango_mptt.py +++ b/morango/utils/morango_mptt.py @@ -1,6 +1,9 @@ +from mptt import managers +from mptt import models +from mptt import querysets + from morango.manager import SyncableModelManager from morango.query import SyncableModelQuerySet -from mptt import querysets, managers, models class MorangoTreeQuerySet(querysets.TreeQuerySet, SyncableModelQuerySet): @@ -8,12 +11,11 @@ class MorangoTreeQuerySet(querysets.TreeQuerySet, SyncableModelQuerySet): class MorangoMPTTTreeManager(managers.TreeManager, SyncableModelManager): - def get_queryset(self): return MorangoTreeQuerySet(self.model, using=self._db) def _mptt_update(self, qs=None, **items): - items['update_dirty_bit_to'] = None + items["update_dirty_bit_to"] = None return super(MorangoMPTTTreeManager, self)._mptt_update(qs, **items) @@ -22,7 +24,8 @@ class MorangoMPTTModel(models.MPTTModel): Any model that inherits from ``SyncableModel`` that also wants to inherit from ``MPTTModel`` should instead inherit from ``MorangoMPTTModel``, which modifies some behavior to make it safe for the syncing system. """ - _internal_mptt_fields_not_to_serialize = ('lft', 'rght', 'tree_id', 'level') + + _internal_mptt_fields_not_to_serialize = ("lft", "rght", "tree_id", "level") objects = MorangoMPTTTreeManager() diff --git a/morango/utils/proquint.py b/morango/utils/proquint.py index dfafd388..658d77d8 100644 --- a/morango/utils/proquint.py +++ b/morango/utils/proquint.py @@ -3,7 +3,7 @@ The simplest ways to use this module are the :func:`humanize` and :func:`uuid` functions. For tighter control over the output, see :class:`HumanHasher`. """ -from argparse import ArgumentError +import uuid # Copyright (c) 2014 SUNET. All rights reserved. # @@ -39,19 +39,16 @@ See the source file for complete license statement. """ -import uuid - -__version__ = '0.1.0' -__copyright__ = 'SUNET' -__organization__ = 'SUNET' -__license__ = 'BSD' -__authors__ = ['Fredrik Thulin'] +__version__ = "0.1.0" +__copyright__ = "SUNET" +__organization__ = "SUNET" +__license__ = "BSD" +__authors__ = ["Fredrik Thulin"] -__all__ = [ -] +__all__ = [] -CONSONANTS = 'bdfghjklmnprstvz' -VOWELS = 'aiou' +CONSONANTS = "bdfghjklmnprstvz" +VOWELS = "aiou" def from_int(data): @@ -62,21 +59,21 @@ def from_int(data): :rtype: string """ if not isinstance(data, int) and not isinstance(data, long): - raise TypeError('Input must be integer') + raise TypeError("Input must be integer") res = [] while data > 0 or not res: for j in range(5): if not j % 2: - res += CONSONANTS[(data & 0xf)] + res += CONSONANTS[(data & 0xF)] data >>= 4 else: res += VOWELS[(data & 0x3)] data >>= 2 if data > 0: - res += '-' + res += "-" res.reverse() - return ''.join(res) + return "".join(res) def to_int(data): @@ -87,12 +84,12 @@ def to_int(data): :rtype: int """ if not isinstance(data, basestring): - raise TypeError('Input must be string') + raise TypeError("Input must be string") res = 0 - for part in data.split('-'): + for part in data.split("-"): if len(part) != 5: - raise ValueError('Malformed proquint') + raise ValueError("Malformed proquint") for j in range(5): try: if not j % 2: @@ -102,7 +99,7 @@ def to_int(data): res <<= 2 res |= VOWELS.index(part[j]) except ValueError: - raise ValueError('Unknown character \'{!s}\' in proquint'.format(part[j])) + raise ValueError("Unknown character '{!s}' in proquint".format(part[j])) return res @@ -111,4 +108,4 @@ def generate(): :returns: proquint :rtype: int """ - return from_int(int(uuid.uuid4().hex[:8], 16)).replace('-', '') + return from_int(int(uuid.uuid4().hex[:8], 16)).replace("-", "") diff --git a/morango/utils/register_models.py b/morango/utils/register_models.py index 5823d120..3f090176 100644 --- a/morango/utils/register_models.py +++ b/morango/utils/register_models.py @@ -3,20 +3,26 @@ like to inherit from to make their data syncable. This method takes care of registering morango data structures on a per-profile basis. """ - from collections import OrderedDict + from django.db.models.fields.related import ForeignKey from django.utils.six import iteritems -from morango.errors import ( - InvalidMorangoModelConfiguration, InvalidMPTTManager, InvalidMPTTQuerySet, InvalidSyncableManager, InvalidSyncableQueryset, UnsupportedFieldType -) +from morango.errors import InvalidMorangoModelConfiguration +from morango.errors import InvalidMPTTManager +from morango.errors import InvalidMPTTQuerySet +from morango.errors import InvalidSyncableManager +from morango.errors import InvalidSyncableQueryset +from morango.errors import UnsupportedFieldType _profile_models = {} def _get_foreign_key_classes(m): - return set([field.rel.to for field in m._meta.fields if isinstance(field, ForeignKey)]) + return set( + [field.rel.to for field in m._meta.fields if isinstance(field, ForeignKey)] + ) + def _multiple_self_ref_fk_check(class_model): """ @@ -30,6 +36,7 @@ def _multiple_self_ref_fk_check(class_model): self_fk.append(class_model) return False + def _insert_model_into_profile_dict(model, profile): # When we add models to be synced, we need to make sure # that models that depend on other models are synced AFTER @@ -39,11 +46,17 @@ def _insert_model_into_profile_dict(model, profile): foreign_key_classes = _get_foreign_key_classes(model) # add any more specified dependencies - if hasattr(model, 'morango_model_dependencies'): - foreign_key_classes = foreign_key_classes | set(model.morango_model_dependencies) + if hasattr(model, "morango_model_dependencies"): + foreign_key_classes = foreign_key_classes | set( + model.morango_model_dependencies + ) # Find all the existing models that this new model refers to. - class_indices = [_profile_models[profile].index(cls) for cls in foreign_key_classes if cls in _profile_models[profile]] + class_indices = [ + _profile_models[profile].index(cls) + for cls in foreign_key_classes + if cls in _profile_models[profile] + ] # Insert just after the last dependency found, # or at the front if no dependencies @@ -53,7 +66,7 @@ def _insert_model_into_profile_dict(model, profile): _profile_models[profile].insert(insert_after_idx, model) -def add_syncable_models(): +def add_syncable_models(): # noqa: C901 """ Per profile, adds each model to a dictionary mapping the morango model name to its model class. We sort by ForeignKey dependencies to safely sync data. @@ -70,31 +83,64 @@ def add_syncable_models(): if issubclass(model_class, SyncableModel): name = model_class.__name__ if _multiple_self_ref_fk_check(model_class): - raise InvalidMorangoModelConfiguration("Syncing models with more than 1 self referential ForeignKey is not supported.") + raise InvalidMorangoModelConfiguration( + "Syncing models with more than 1 self referential ForeignKey is not supported." + ) try: from mptt import models - from morango.utils.morango_mptt import MorangoMPTTModel, MorangoMPTTTreeManager, MorangoTreeQuerySet + from morango.utils.morango_mptt import ( + MorangoMPTTModel, + MorangoMPTTTreeManager, + MorangoTreeQuerySet, + ) + # mptt syncable model checks if issubclass(model_class, models.MPTTModel): if not issubclass(model_class, MorangoMPTTModel): - raise InvalidMorangoModelConfiguration("{} that inherits from MPTTModel, should instead inherit from MorangoMPTTModel.".format(name)) + raise InvalidMorangoModelConfiguration( + "{} that inherits from MPTTModel, should instead inherit from MorangoMPTTModel.".format( + name + ) + ) if not isinstance(model_class.objects, MorangoMPTTTreeManager): - raise InvalidMPTTManager("Manager for {} must inherit from MorangoMPTTTreeManager.".format(name)) + raise InvalidMPTTManager( + "Manager for {} must inherit from MorangoMPTTTreeManager.".format( + name + ) + ) if not isinstance(model_class.objects.none(), MorangoTreeQuerySet): - raise InvalidMPTTQuerySet("Queryset for {} model must inherit from MorangoTreeQuerySet.".format(name)) + raise InvalidMPTTQuerySet( + "Queryset for {} model must inherit from MorangoTreeQuerySet.".format( + name + ) + ) except ImportError: pass # syncable model checks if not isinstance(model_class.objects, SyncableModelManager): - raise InvalidSyncableManager("Manager for {} must inherit from SyncableModelManager.".format(name)) + raise InvalidSyncableManager( + "Manager for {} must inherit from SyncableModelManager.".format( + name + ) + ) if not isinstance(model_class.objects.none(), SyncableModelQuerySet): - raise InvalidSyncableQueryset("Queryset for {} model must inherit from SyncableModelQuerySet.".format(name)) + raise InvalidSyncableQueryset( + "Queryset for {} model must inherit from SyncableModelQuerySet.".format( + name + ) + ) if model_class._meta.many_to_many: - raise UnsupportedFieldType("{} model with a ManyToManyField is not supported in morango.") - if not hasattr(model_class, 'morango_model_name'): - raise InvalidMorangoModelConfiguration("{} model must define a morango_model_name attribute".format(name)) - if not hasattr(model_class, 'morango_profile'): - raise InvalidMorangoModelConfiguration("{} model must define a morango_profile attribute".format(name)) + raise UnsupportedFieldType( + "{} model with a ManyToManyField is not supported in morango." + ) + if not hasattr(model_class, "morango_model_name"): + raise InvalidMorangoModelConfiguration( + "{} model must define a morango_model_name attribute".format(name) + ) + if not hasattr(model_class, "morango_profile"): + raise InvalidMorangoModelConfiguration( + "{} model must define a morango_profile attribute".format(name) + ) # create empty list to hold model classes for profile if not yet created profile = model_class.morango_profile diff --git a/morango/utils/sync_utils.py b/morango/utils/sync_utils.py index bf8f159f..1dff79d2 100644 --- a/morango/utils/sync_utils.py +++ b/morango/utils/sync_utils.py @@ -4,27 +4,34 @@ from django.conf import settings from django.core import exceptions from django.core.serializers.json import DjangoJSONEncoder -from django.db import connection, transaction -from django.db import models -from django.db.models import F, Q, CharField, Func, TextField, Value -from django.db.models.functions import Cast +from django.db import connection +from django.db import transaction +from django.db.models import Q +from django.db.models import signals from django.utils import six + from morango.certificates import Filter -from morango.models import (Buffer, DatabaseMaxCounter, DeletedModels, HardDeletedModels, - InstanceIDModel, RecordMaxCounter, - RecordMaxCounterBuffer, Store) -from morango.utils.register_models import _profile_models -from morango.utils.backends.utils import load_backend -from django.db.models import signals +from morango.models import Buffer +from morango.models import DatabaseMaxCounter +from morango.models import DeletedModels +from morango.models import HardDeletedModels +from morango.models import InstanceIDModel +from morango.models import RecordMaxCounter +from morango.models import RecordMaxCounterBuffer +from morango.models import Store from morango.util import mute_signals +from morango.utils.backends.utils import load_backend +from morango.utils.register_models import _profile_models DBBackend = load_backend(connection).SQLWrapper() + def _join_with_logical_operator(lst, operator): op = ") {operator} (".format(operator=operator) return "(({items}))".format(items=op.join(lst)) + def _self_referential_fk(klass_model): """ Return whether this model has a self ref FK, and the name for the field @@ -35,6 +42,7 @@ def _self_referential_fk(klass_model): return f.attname return None + def _fsic_queuing_calc(fsic1, fsic2): """ We set the lower counter between two same instance ids. @@ -44,7 +52,12 @@ def _fsic_queuing_calc(fsic1, fsic2): :param fsic2: dictionary containing (instance_id, counter) pairs :return ``dict`` of fsics to be used in queueing the correct records to the buffer """ - return {instance: fsic2.get(instance, 0) for instance, counter in six.iteritems(fsic1) if fsic2.get(instance, 0) < counter} + return { + instance: fsic2.get(instance, 0) + for instance, counter in six.iteritems(fsic1) + if fsic2.get(instance, 0) < counter + } + def _serialize_into_store(profile, filter=None): """ @@ -57,7 +70,10 @@ def _serialize_into_store(profile, filter=None): # create Q objects for filtering by prefixes prefix_condition = None if filter: - prefix_condition = functools.reduce(lambda x, y: x | y, [Q(_morango_partition__startswith=prefix) for prefix in filter]) + prefix_condition = functools.reduce( + lambda x, y: x | y, + [Q(_morango_partition__startswith=prefix) for prefix in filter], + ) # filter through all models with the dirty bit turned on syncable_dict = _profile_models[profile] @@ -67,14 +83,20 @@ def _serialize_into_store(profile, filter=None): klass_queryset = klass_model.objects.filter(_morango_dirty_bit=True) if prefix_condition: klass_queryset = klass_queryset.filter(prefix_condition) - store_records_dict = Store.objects.in_bulk(id_list=klass_queryset.values_list('id', flat=True)) + store_records_dict = Store.objects.in_bulk( + id_list=klass_queryset.values_list("id", flat=True) + ) for app_model in klass_queryset: try: store_model = store_records_dict[app_model.id] # if store record dirty and app record dirty, append store serialized to conflicting data if store_model.dirty_bit: - store_model.conflicting_serialized_data = store_model.serialized + "\n" + store_model.conflicting_serialized_data + store_model.conflicting_serialized_data = ( + store_model.serialized + + "\n" + + store_model.conflicting_serialized_data + ) store_model.dirty_bit = False # set new serialized data on this store model @@ -83,9 +105,11 @@ def _serialize_into_store(profile, filter=None): store_model.serialized = DjangoJSONEncoder().encode(ser_dict) # create or update instance and counter on the record max counter for this store model - RecordMaxCounter.objects.update_or_create(defaults={'counter': current_id.counter}, - instance_id=current_id.id, - store_model_id=store_model.id) + RecordMaxCounter.objects.update_or_create( + defaults={"counter": current_id.counter}, + instance_id=current_id.id, + store_model_id=store_model.id, + ) # update last saved bys for this store model store_model.last_saved_instance = current_id.id @@ -99,23 +123,29 @@ def _serialize_into_store(profile, filter=None): except KeyError: kwargs = { - 'id': app_model.id, - 'serialized': DjangoJSONEncoder().encode(app_model.serialize()), - 'last_saved_instance': current_id.id, - 'last_saved_counter': current_id.counter, - 'model_name': app_model.morango_model_name, - 'profile': app_model.morango_profile, - 'partition': app_model._morango_partition, - 'source_id': app_model._morango_source_id, + "id": app_model.id, + "serialized": DjangoJSONEncoder().encode(app_model.serialize()), + "last_saved_instance": current_id.id, + "last_saved_counter": current_id.counter, + "model_name": app_model.morango_model_name, + "profile": app_model.morango_profile, + "partition": app_model._morango_partition, + "source_id": app_model._morango_source_id, } # check if model has FK pointing to it and add the value to a field on the store self_ref_fk = _self_referential_fk(klass_model) if self_ref_fk: self_ref_fk_value = getattr(app_model, self_ref_fk) - kwargs.update({'_self_ref_fk': self_ref_fk_value or ''}) + kwargs.update({"_self_ref_fk": self_ref_fk_value or ""}) # create store model and record max counter for the app model new_store_records.append(Store(**kwargs)) - new_rmc_records.append(RecordMaxCounter(store_model_id=app_model.id, instance_id=current_id.id, counter=current_id.counter)) + new_rmc_records.append( + RecordMaxCounter( + store_model_id=app_model.id, + instance_id=current_id.id, + counter=current_id.counter, + ) + ) # bulk create store and rmc records for this class Store.objects.bulk_create(new_store_records) @@ -125,31 +155,64 @@ def _serialize_into_store(profile, filter=None): klass_queryset.update(update_dirty_bit_to=False) # get list of ids of deleted models - deleted_ids = DeletedModels.objects.filter(profile=profile).values_list('id', flat=True) + deleted_ids = DeletedModels.objects.filter(profile=profile).values_list( + "id", flat=True + ) # update last_saved_bys and deleted flag of all deleted store model instances deleted_store_records = Store.objects.filter(id__in=deleted_ids) - deleted_store_records.update(dirty_bit=False, deleted=True, last_saved_instance=current_id.id, last_saved_counter=current_id.counter) + deleted_store_records.update( + dirty_bit=False, + deleted=True, + last_saved_instance=current_id.id, + last_saved_counter=current_id.counter, + ) # update rmcs counters for deleted models that have our instance id - RecordMaxCounter.objects.filter(instance_id=current_id.id, store_model_id__in=deleted_ids).update(counter=current_id.counter) + RecordMaxCounter.objects.filter( + instance_id=current_id.id, store_model_id__in=deleted_ids + ).update(counter=current_id.counter) # get a list of deleted model ids that don't have an rmc for our instance id - new_rmc_ids = deleted_store_records.exclude(recordmaxcounter__instance_id=current_id.id).values_list("id", flat=True) + new_rmc_ids = deleted_store_records.exclude( + recordmaxcounter__instance_id=current_id.id + ).values_list("id", flat=True) # bulk create these new rmcs - RecordMaxCounter.objects.bulk_create([RecordMaxCounter(store_model_id=r_id, instance_id=current_id.id, counter=current_id.counter) for r_id in new_rmc_ids]) + RecordMaxCounter.objects.bulk_create( + [ + RecordMaxCounter( + store_model_id=r_id, + instance_id=current_id.id, + counter=current_id.counter, + ) + for r_id in new_rmc_ids + ] + ) # clear deleted models table for this profile DeletedModels.objects.filter(profile=profile).delete() # handle logic for hard deletion models - hard_deleted_ids = HardDeletedModels.objects.filter(profile=profile).values_list('id', flat=True) + hard_deleted_ids = HardDeletedModels.objects.filter( + profile=profile + ).values_list("id", flat=True) hard_deleted_store_records = Store.objects.filter(id__in=hard_deleted_ids) - hard_deleted_store_records.update(hard_deleted=True, serialized='{}', conflicting_serialized_data='') + hard_deleted_store_records.update( + hard_deleted=True, serialized="{}", conflicting_serialized_data="" + ) HardDeletedModels.objects.filter(profile=profile).delete() # update our own database max counters after serialization if not filter: - DatabaseMaxCounter.objects.update_or_create(instance_id=current_id.id, partition="", defaults={'counter': current_id.counter}) + DatabaseMaxCounter.objects.update_or_create( + instance_id=current_id.id, + partition="", + defaults={"counter": current_id.counter}, + ) else: for f in filter: - DatabaseMaxCounter.objects.update_or_create(instance_id=current_id.id, partition=f, defaults={'counter': current_id.counter}) + DatabaseMaxCounter.objects.update_or_create( + instance_id=current_id.id, + partition=f, + defaults={"counter": current_id.counter}, + ) + def _deserialize_from_store(profile): """ @@ -170,9 +233,16 @@ def _deserialize_from_store(profile): for klass in klass_model.morango_model_dependencies: query |= Q(model_name=klass.morango_model_name) if self_ref_fk: - clean_parents = Store.objects.filter(dirty_bit=False, profile=profile).filter(query).char_ids_list() - dirty_children = Store.objects.filter(dirty_bit=True, profile=profile) \ - .filter(Q(_self_ref_fk__in=clean_parents) | Q(_self_ref_fk='')).filter(query) + clean_parents = ( + Store.objects.filter(dirty_bit=False, profile=profile) + .filter(query) + .char_ids_list() + ) + dirty_children = ( + Store.objects.filter(dirty_bit=True, profile=profile) + .filter(Q(_self_ref_fk__in=clean_parents) | Q(_self_ref_fk="")) + .filter(query) + ) # keep iterating until size of dirty_children is 0 while len(dirty_children) > 0: @@ -184,19 +254,27 @@ def _deserialize_from_store(profile): app_model.save(update_dirty_bit_to=False) # we update a store model after we have deserialized it to be able to mark it as a clean parent store_model.dirty_bit = False - store_model.save(update_fields=['dirty_bit']) + store_model.save(update_fields=["dirty_bit"]) except exceptions.ValidationError: # if the app model did not validate, we leave the store dirty bit set excluded_list.append(store_model.id) # update lists with new clean parents and dirty children - clean_parents = Store.objects.filter(dirty_bit=False, profile=profile).filter(query).char_ids_list() - dirty_children = Store.objects.filter(dirty_bit=True, profile=profile, _self_ref_fk__in=clean_parents).filter(query) + clean_parents = ( + Store.objects.filter(dirty_bit=False, profile=profile) + .filter(query) + .char_ids_list() + ) + dirty_children = Store.objects.filter( + dirty_bit=True, profile=profile, _self_ref_fk__in=clean_parents + ).filter(query) else: # array for holding db values from the fields of each model for this class db_values = [] fields = klass_model._meta.fields - for store_model in Store.objects.filter(model_name=model_name, profile=profile, dirty_bit=True): + for store_model in Store.objects.filter( + model_name=model_name, profile=profile, dirty_bit=True + ): try: app_model = store_model._deserialize_store_model(fk_cache) # if the model was not deleted add its field values to the list @@ -213,14 +291,24 @@ def _deserialize_from_store(profile): # number of rows to update num_of_rows = len(db_values) // len(fields) # create '%s' placeholders for a single row - placeholder_tuple = tuple(['%s' for _ in range(len(fields))]) + placeholder_tuple = tuple(["%s" for _ in range(len(fields))]) # create list of the '%s' tuple placeholders based on number of rows to update - placeholder_list = [str(placeholder_tuple) for _ in range(num_of_rows)] + placeholder_list = [ + str(placeholder_tuple) for _ in range(num_of_rows) + ] with connection.cursor() as cursor: - DBBackend._bulk_insert_into_app_models(cursor, klass_model._meta.db_table, fields, db_values, placeholder_list) + DBBackend._bulk_insert_into_app_models( + cursor, + klass_model._meta.db_table, + fields, + db_values, + placeholder_list, + ) # clear dirty bit for all store models for this profile except for models that did not validate - Store.objects.exclude(id__in=excluded_list).filter(profile=profile, dirty_bit=True).update(dirty_bit=False) + Store.objects.exclude(id__in=excluded_list).filter( + profile=profile, dirty_bit=True + ).update(dirty_bit=False) @transaction.atomic() @@ -244,22 +332,36 @@ def _queue_into_buffer(transfersession): # create condition for all push FSICs where instance_ids are equal, but internal counters are higher than FSICs counters for instance, counter in six.iteritems(fsics): - last_saved_by_conditions += ["(last_saved_instance = '{0}' AND last_saved_counter > {1})".format(instance, counter)] + last_saved_by_conditions += [ + "(last_saved_instance = '{0}' AND last_saved_counter > {1})".format( + instance, counter + ) + ] if fsics: - last_saved_by_conditions = [_join_with_logical_operator(last_saved_by_conditions, 'OR')] + last_saved_by_conditions = [ + _join_with_logical_operator(last_saved_by_conditions, "OR") + ] partition_conditions = [] # create condition for filtering by partitions for prefix in filter_prefixes: partition_conditions += ["partition LIKE '{}%'".format(prefix)] if filter_prefixes: - partition_conditions = [_join_with_logical_operator(partition_conditions, 'OR')] + partition_conditions = [_join_with_logical_operator(partition_conditions, "OR")] # combine conditions - fsic_and_partition_conditions = _join_with_logical_operator(last_saved_by_conditions + partition_conditions, 'AND') + fsic_and_partition_conditions = _join_with_logical_operator( + last_saved_by_conditions + partition_conditions, "AND" + ) # filter by profile - where_condition = _join_with_logical_operator([fsic_and_partition_conditions, "profile = '{}'".format(transfersession.sync_session.profile)], 'AND') + where_condition = _join_with_logical_operator( + [ + fsic_and_partition_conditions, + "profile = '{}'".format(transfersession.sync_session.profile), + ], + "AND", + ) # execute raw sql to take all records that match condition, to be put into buffer for transfer with connection.cursor() as cursor: @@ -267,10 +369,12 @@ def _queue_into_buffer(transfersession): (model_uuid, serialized, deleted, last_saved_instance, last_saved_counter, hard_deleted, model_name, profile, partition, source_id, conflicting_serialized_data, transfer_session_id, _self_ref_fk) SELECT id, serialized, deleted, last_saved_instance, last_saved_counter, hard_deleted, model_name, profile, partition, source_id, conflicting_serialized_data, '{transfer_session_id}', _self_ref_fk - FROM {store} WHERE {condition}""".format(outgoing_buffer=Buffer._meta.db_table, - transfer_session_id=transfersession.id, - condition=where_condition, - store=Store._meta.db_table) + FROM {store} WHERE {condition}""".format( + outgoing_buffer=Buffer._meta.db_table, + transfer_session_id=transfersession.id, + condition=where_condition, + store=Store._meta.db_table, + ) cursor.execute(queue_buffer) # take all record max counters that are foreign keyed onto store models, which were queued into the buffer queue_rmc_buffer = """INSERT INTO {outgoing_rmcb} @@ -279,12 +383,15 @@ def _queue_into_buffer(transfersession): FROM {record_max_counter} AS rmc INNER JOIN {outgoing_buffer} AS buffer ON rmc.store_model_id = buffer.model_uuid WHERE buffer.transfer_session_id = '{transfer_session_id}' - """.format(outgoing_rmcb=RecordMaxCounterBuffer._meta.db_table, - transfer_session_id=transfersession.id, - record_max_counter=RecordMaxCounter._meta.db_table, - outgoing_buffer=Buffer._meta.db_table) + """.format( + outgoing_rmcb=RecordMaxCounterBuffer._meta.db_table, + transfer_session_id=transfersession.id, + record_max_counter=RecordMaxCounter._meta.db_table, + outgoing_buffer=Buffer._meta.db_table, + ) cursor.execute(queue_rmc_buffer) + @transaction.atomic() def _dequeue_into_store(transfersession): """ @@ -294,14 +401,18 @@ def _dequeue_into_store(transfersession): DBBackend._dequeuing_delete_rmcb_records(cursor, transfersession.id) DBBackend._dequeuing_delete_buffered_records(cursor, transfersession.id) current_id = InstanceIDModel.get_current_instance_and_increment_counter() - DBBackend._dequeuing_merge_conflict_buffer(cursor, current_id, transfersession.id) + DBBackend._dequeuing_merge_conflict_buffer( + cursor, current_id, transfersession.id + ) DBBackend._dequeuing_merge_conflict_rmcb(cursor, transfersession.id) - DBBackend._dequeuing_update_rmcs_last_saved_by(cursor, current_id, transfersession.id) + DBBackend._dequeuing_update_rmcs_last_saved_by( + cursor, current_id, transfersession.id + ) DBBackend._dequeuing_delete_mc_rmcb(cursor, transfersession.id) DBBackend._dequeuing_delete_mc_buffer(cursor, transfersession.id) DBBackend._dequeuing_insert_remaining_buffer(cursor, transfersession.id) DBBackend._dequeuing_insert_remaining_rmcb(cursor, transfersession.id) DBBackend._dequeuing_delete_remaining_rmcb(cursor, transfersession.id) DBBackend._dequeuing_delete_remaining_buffer(cursor, transfersession.id) - if getattr(settings, 'MORANGO_DESERIALIZE_AFTER_DEQUEUING', True): + if getattr(settings, "MORANGO_DESERIALIZE_AFTER_DEQUEUING", True): _deserialize_from_store(transfersession.sync_session.profile) diff --git a/morango/utils/uuids.py b/morango/utils/uuids.py index 103463c4..7c06956b 100644 --- a/morango/utils/uuids.py +++ b/morango/utils/uuids.py @@ -5,7 +5,7 @@ def sha2_uuid(*args): - return hashlib.sha256("::".join(args).encode('utf-8')).hexdigest()[:32] + return hashlib.sha256("::".join(args).encode("utf-8")).hexdigest()[:32] class UUIDField(models.CharField): @@ -14,7 +14,7 @@ class UUIDField(models.CharField): """ def __init__(self, *args, **kwargs): - kwargs['max_length'] = 32 + kwargs["max_length"] = 32 super(UUIDField, self).__init__(*args, **kwargs) def prepare_value(self, value): @@ -24,7 +24,7 @@ def prepare_value(self, value): def deconstruct(self): name, path, args, kwargs = super(UUIDField, self).deconstruct() - del kwargs['max_length'] + del kwargs["max_length"] return name, path, args, kwargs def get_internal_type(self): @@ -37,7 +37,7 @@ def get_db_prep_value(self, value, connection, prepared=False): try: value = uuid.UUID(value) except AttributeError: - raise TypeError(self.error_messages['invalid'] % {'value': value}) + raise TypeError(self.error_messages["invalid"] % {"value": value}) return value.hex def from_db_value(self, value, expression, connection, context): @@ -84,17 +84,21 @@ def calculate_uuid(self): # raise an error if no inputs to the UUID calculation were specified if self.uuid_input_fields is None: - raise NotImplementedError("""You must define either a 'uuid_input_fields' attribute + raise NotImplementedError( + """You must define either a 'uuid_input_fields' attribute (with a tuple of field names) or override the 'calculate_uuid' method, on models that inherit from UUIDModelMixin. If you want a fully random UUID, you can set - 'uuid_input_fields' to the string 'RANDOM'.""") + 'uuid_input_fields' to the string 'RANDOM'.""" + ) # if the UUID has been set to be random, return a random UUID if self.uuid_input_fields == "RANDOM": return uuid.uuid4().hex # if we got this far, uuid_input_fields should be a tuple - assert isinstance(self.uuid_input_fields, tuple), "'uuid_input_fields' must either be a tuple or the string 'RANDOM'" + assert isinstance( + self.uuid_input_fields, tuple + ), "'uuid_input_fields' must either be a tuple or the string 'RANDOM'" # calculate the input to the UUID function hashable_input_vals = [] diff --git a/morango/validation.py b/morango/validation.py index a8431e27..1c476f25 100644 --- a/morango/validation.py +++ b/morango/validation.py @@ -1,9 +1,12 @@ import copy + from django.db import transaction -from morango.models import Buffer, RecordMaxCounterBuffer, SyncableModel from rest_framework.exceptions import ValidationError from .utils.register_models import _profile_models +from morango.models import Buffer +from morango.models import RecordMaxCounterBuffer +from morango.models import SyncableModel def validate_and_create_buffer_data(data, transfer_session): @@ -17,28 +20,56 @@ def validate_and_create_buffer_data(data, transfer_session): except KeyError: Model = SyncableModel - partition = record['partition'].replace(record['model_uuid'], Model.ID_PLACEHOLDER) - expected_model_uuid = Model.compute_namespaced_id(partition, record["source_id"], record["model_name"]) + partition = record["partition"].replace( + record["model_uuid"], Model.ID_PLACEHOLDER + ) + expected_model_uuid = Model.compute_namespaced_id( + partition, record["source_id"], record["model_name"] + ) if expected_model_uuid != record["model_uuid"]: - raise ValidationError({"model_uuid": "Does not match results of calling {}.compute_namespaced_id".format(Model.__class__.__name__)}) + raise ValidationError( + { + "model_uuid": "Does not match results of calling {}.compute_namespaced_id".format( + Model.__class__.__name__ + ) + } + ) # ensure the profile is marked onto the buffer record record["profile"] = transfer_session.sync_session.profile # ensure the partition is within the transfer session's filter if not transfer_session.get_filter().contains_partition(record["partition"]): - raise ValidationError({"partition": "Partition {} is not contained within filter for TransferSession ({})".format(record["partition"], transfer_session.filter)}) + raise ValidationError( + { + "partition": "Partition {} is not contained within filter for TransferSession ({})".format( + record["partition"], transfer_session.filter + ) + } + ) # ensure that all nested RMCB models are properly associated with this record and transfer session - for rmcb in record.pop('rmcb_list'): + for rmcb in record.pop("rmcb_list"): if rmcb["transfer_session"] != transfer_session.id: - raise ValidationError({"rmcb_list": "Transfer session on RMCB ({}) does not match Buffer's TransferSession ({})".format(rmcb["transfer_session"], transfer_session)}) + raise ValidationError( + { + "rmcb_list": "Transfer session on RMCB ({}) does not match Buffer's TransferSession ({})".format( + rmcb["transfer_session"], transfer_session + ) + } + ) if rmcb["model_uuid"] != record["model_uuid"]: - raise ValidationError({"rmcb_list": "Model UUID on RMCB ({}) does not match Buffer's Model UUID ({})".format(rmcb["model_uuid"], record["model_uuid"])}) - rmcb['transfer_session_id'] = rmcb.pop('transfer_session') + raise ValidationError( + { + "rmcb_list": "Model UUID on RMCB ({}) does not match Buffer's Model UUID ({})".format( + rmcb["model_uuid"], record["model_uuid"] + ) + } + ) + rmcb["transfer_session_id"] = rmcb.pop("transfer_session") rmcb_list += [RecordMaxCounterBuffer(**rmcb)] - record['transfer_session_id'] = record.pop('transfer_session') + record["transfer_session_id"] = record.pop("transfer_session") buffer_list += [Buffer(**record)] with transaction.atomic(): diff --git a/setup.cfg b/setup.cfg index 1f6caefe..96b97f26 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,10 +2,15 @@ universal = 1 [flake8] -ignore = E226,E302,E41, C901 max-line-length = 160 max-complexity = 10 -exclude = morango/*/migrations/* docs +exclude = morango/*/migrations/*, + docs, + morango/utils/sync_utils.py, + morango/utils/backends/postgres.py, + morango/utils/backends/sqlite.py +# Ignore non-PEP8-compliant rules so that the Black formatter can be used +ignore = E203,W503 [isort] atomic = true From 3ae1ebd9f42b44134da3a2d3921fcde0f71d75fb Mon Sep 17 00:00:00 2001 From: Rafael Aguayo Date: Tue, 4 Jun 2019 19:21:42 -0700 Subject: [PATCH 2/2] Add linting into travis --- .travis.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index afcce177..d829bddf 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,8 +14,7 @@ python: - "3.6" env: -# Removed because linting PR coming up separately -# - LINT="yes" + - LINT="yes" - DJANGO="1.11" CRYPTOGRAPHY="1.2" - DJANGO="1.11" CRYPTOGRAPHY="1.8" - DJANGO="1.11" CRYPTOGRAPHY="2.0"