Skip to content

Commit

Permalink
Defer the import of source keys to the keyring
Browse files Browse the repository at this point in the history
Stop importing source GPG keys during sync. We have a place on Source
to store fingerprint and key, which is all we need for the import, and
which is what we check when deciding whether to enable the reply box,
so let's just update the database here and defer import to the keyring
until someone actually wants to reply. It takes milliseconds that
won't be noticed then, but do add up during sync.
  • Loading branch information
rmol committed Mar 31, 2020
1 parent 7ba9715 commit 0b9bb0c
Show file tree
Hide file tree
Showing 6 changed files with 94 additions and 209 deletions.
37 changes: 18 additions & 19 deletions securedrop_client/crypto.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import tempfile

from sqlalchemy.orm import scoped_session
from uuid import UUID

from securedrop_client.config import Config
from securedrop_client.db import Source
Expand Down Expand Up @@ -145,17 +144,14 @@ def _gpg_cmd_base(self) -> list:
cmd.extend(['--trust-model', 'always'])
return cmd

def import_key(self, source_uuid: UUID, key_data: str, fingerprint: str) -> None:
session = self.session_maker()
local_source = session.query(Source).filter_by(uuid=source_uuid).one()

logger.debug("Importing key with fingerprint %s", fingerprint)
self._import(key_data)

local_source.fingerprint = fingerprint
local_source.public_key = key_data
session.add(local_source)
session.commit()
def import_key(self, source: Source) -> None:
"""
Imports a Source's GPG key.
"""
logger.debug("Importing key for source %s", source.uuid)
if not source.public_key:
raise CryptoError(f"Could not import key: source {source.uuid} has no key")
self._import(source.public_key)

def _import(self, key_data: str) -> None:
'''Wrapper for `gpg --import-keys`'''
Expand Down Expand Up @@ -187,16 +183,19 @@ def encrypt_to_source(self, source_uuid: str, data: str) -> str:
session = self.session_maker()
source = session.query(Source).filter_by(uuid=source_uuid).one()

# do not attempt to encrypt if the source key is missing
if source.fingerprint is None:
raise CryptoError(
'Could not encrypt reply due to missing fingerprint for source: {}'.format(
source_uuid))

# do not attempt to encrypt if the journalist key is missing
if self.journalist_key_fingerprint is None:
if not self.journalist_key_fingerprint:
raise CryptoError('Could not encrypt reply due to missing fingerprint for journalist')

# do not attempt to encrypt if the source key is missing
if not (source.fingerprint and source.public_key):
raise CryptoError(f'Could not encrypt reply: no key for source {source_uuid}')

try:
self.import_key(source)
except CryptoError as e:
raise CryptoError("Could not import key before encrypting reply: {e}") from e

cmd = self._gpg_cmd_base()

with tempfile.NamedTemporaryFile('w+') as content, \
Expand Down
56 changes: 14 additions & 42 deletions securedrop_client/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.orm.session import Session

from securedrop_client.crypto import CryptoError, GpgHelper
from securedrop_client.crypto import GpgHelper
from securedrop_client.db import (DraftReply, Source, Message, File, Reply, ReplySendStatus,
ReplySendStatusCodes, User)
from securedrop_client.utils import chronometer
Expand Down Expand Up @@ -135,36 +135,6 @@ def update_local_storage(session: Session,
update_replies(remote_replies, get_local_replies(session), session, data_dir)


def update_source_key(gpg: GpgHelper, local_source: Source, remote_source: SDKSource) -> None:
"""
Updates a source's GPG key.
"""
if not remote_source.key.get("fingerprint"):
logger.error("New source data lacks key fingerprint")
return

if not remote_source.key.get("public"):
logger.error("New source data lacks public key")
return

if (
local_source.fingerprint == remote_source.key['fingerprint'] and
local_source.public_key == remote_source.key['public']
):
logger.debug("Source key data is unchanged")
return

try:
# import_key updates the source's key and fingerprint, and commits
gpg.import_key(
remote_source.uuid,
remote_source.key['public'],
remote_source.key['fingerprint']
)
except CryptoError:
logger.error('Failed to update key information for source %s', remote_source.uuid)


def update_sources(gpg: GpgHelper, remote_sources: List[SDKSource],
local_sources: List[Source], session: Session, data_dir: str) -> None:
"""
Expand All @@ -188,29 +158,31 @@ def update_sources(gpg: GpgHelper, remote_sources: List[SDKSource],
local_source.document_count = source.number_of_documents
local_source.is_starred = source.is_starred
local_source.last_updated = parse(source.last_updated)
local_source.public_key = source.key['public']
local_source.fingerprint = source.key['fingerprint']
session.commit()

update_source_key(gpg, local_source, source)

# Removing the UUID from local_sources_by_uuid ensures
# this record won't be deleted at the end of this
# function.
del local_sources_by_uuid[source.uuid]
logger.debug('Updated source {}'.format(source.uuid))
else:
# A new source to be added to the database.
ns = Source(uuid=source.uuid,
journalist_designation=source.journalist_designation,
is_flagged=source.is_flagged,
interaction_count=source.interaction_count,
is_starred=source.is_starred,
last_updated=parse(source.last_updated),
document_count=source.number_of_documents)
ns = Source(
uuid=source.uuid,
journalist_designation=source.journalist_designation,
is_flagged=source.is_flagged,
interaction_count=source.interaction_count,
is_starred=source.is_starred,
last_updated=parse(source.last_updated),
document_count=source.number_of_documents,
public_key=source.key['public'],
fingerprint=source.key['fingerprint'],
)
session.add(ns)
session.commit()

update_source_key(gpg, ns, source)

logger.debug('Added new source {}'.format(source.uuid))

# The uuids remaining in local_uuids do not exist on the remote server, so
Expand Down
78 changes: 1 addition & 77 deletions tests/api_jobs/test_sync.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import os

from securedrop_client.api_jobs.sync import MetadataSyncJob
from securedrop_client.crypto import GpgHelper, CryptoError
from securedrop_client.crypto import GpgHelper

from tests import factory

Expand All @@ -22,7 +22,6 @@ def test_MetadataSyncJob_success(mocker, homedir, session, session_maker):
}
)

mock_key_import = mocker.patch.object(job.gpg, 'import_key')
mock_get_remote_data = mocker.patch(
'securedrop_client.api_jobs.sync.get_remote_data',
return_value=([mock_source], [], []))
Expand All @@ -33,41 +32,6 @@ def test_MetadataSyncJob_success(mocker, homedir, session, session_maker):

job.call_api(api_client, session)

assert mock_key_import.call_args[0][0] == mock_source.uuid
assert mock_key_import.call_args[0][1] == mock_source.key['public']
assert mock_key_import.call_args[0][2] == mock_source.key['fingerprint']
assert mock_get_remote_data.call_count == 1


def test_MetadataSyncJob_success_with_key_import_fail(mocker, homedir, session, session_maker):
"""
Check that we can gracefully handle a key import failure.
"""
gpg = GpgHelper(homedir, session_maker, is_qubes=False)
job = MetadataSyncJob(homedir, gpg)

mock_source = factory.RemoteSource(
key={
'type': 'PGP',
'public': PUB_KEY,
'fingerprint': '123456ABC',
}
)

mock_key_import = mocker.patch.object(job.gpg, 'import_key',
side_effect=CryptoError)
mock_get_remote_data = mocker.patch(
'securedrop_client.api_jobs.sync.get_remote_data',
return_value=([mock_source], [], []))

api_client = mocker.MagicMock()
api_client.default_request_timeout = mocker.MagicMock()

job.call_api(api_client, session)

assert mock_key_import.call_args[0][0] == mock_source.uuid
assert mock_key_import.call_args[0][1] == mock_source.key['public']
assert mock_key_import.call_args[0][2] == mock_source.key['fingerprint']
assert mock_get_remote_data.call_count == 1


Expand Down Expand Up @@ -98,43 +62,3 @@ def test_MetadataSyncJob_success_with_missing_key(mocker, homedir, session, sess

assert mock_key_import.call_count == 0
assert mock_get_remote_data.call_count == 1


def test_MetadataSyncJob_only_import_new_source_keys(mocker, homedir, session, session_maker):
"""
Verify that we only import source keys we don't already have.
"""
gpg = GpgHelper(homedir, session_maker, is_qubes=False)
job = MetadataSyncJob(homedir, gpg)

mock_source = factory.RemoteSource(
key={
'type': 'PGP',
'public': PUB_KEY,
'fingerprint': '123456ABC',
}
)

mock_get_remote_data = mocker.patch(
'securedrop_client.api_jobs.sync.get_remote_data',
return_value=([mock_source], [], []))

api_client = mocker.MagicMock()
api_client.default_request_timeout = mocker.MagicMock()

crypto_logger = mocker.patch('securedrop_client.crypto.logger')
storage_logger = mocker.patch('securedrop_client.storage.logger')

job.call_api(api_client, session)

assert mock_get_remote_data.call_count == 1

log_msg = crypto_logger.debug.call_args_list[0][0]
assert log_msg == ('Importing key with fingerprint %s', mock_source.key['fingerprint'])

job.call_api(api_client, session)

assert mock_get_remote_data.call_count == 2

log_msg = storage_logger.debug.call_args_list[5][0][0]
assert log_msg == 'Source key data is unchanged'
8 changes: 6 additions & 2 deletions tests/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,14 +37,18 @@ def User(**attrs):


def Source(**attrs):

with open(os.path.join(os.path.dirname(__file__), 'files', 'test-key.gpg.pub.asc')) as f:
pub_key = f.read()

global SOURCE_COUNT
SOURCE_COUNT += 1
defaults = dict(
uuid='source-uuid-{}'.format(SOURCE_COUNT),
journalist_designation='testy-mctestface',
is_flagged=False,
public_key='mah pub key',
fingerprint='mah fingerprint',
public_key=pub_key,
fingerprint='B2FF7FB28EED8CABEBC5FB6C6179D97BCFA52E5F',
interaction_count=0,
is_starred=False,
last_updated=datetime.now(),
Expand Down
Loading

0 comments on commit 0b9bb0c

Please sign in to comment.