diff --git a/securedrop_client/api_jobs/sync.py b/securedrop_client/api_jobs/sync.py index ca222c5f99..1f7f95a5d0 100644 --- a/securedrop_client/api_jobs/sync.py +++ b/securedrop_client/api_jobs/sync.py @@ -5,7 +5,7 @@ from sqlalchemy.orm.session import Session from securedrop_client.api_jobs.base import ApiJob -from securedrop_client.crypto import GpgHelper, CryptoError +from securedrop_client.crypto import GpgHelper from securedrop_client.storage import get_remote_data, update_local_storage @@ -40,28 +40,8 @@ def call_api(self, api_client: API, session: Session) -> Any: remote_sources, remote_submissions, remote_replies = get_remote_data(api_client) update_local_storage(session, + self.gpg, remote_sources, remote_submissions, remote_replies, self.data_dir) - - fingerprints = self.gpg.fingerprints() - for source in remote_sources: - if source.key and source.key.get('type', None) == 'PGP': - pub_key = source.key.get('public', None) - fingerprint = source.key.get('fingerprint', None) - if not pub_key or not fingerprint: - # The below line needs to be excluded from the coverage computation - # as it will show as uncovered due to a cpython compiler optimziation. - # See: https://bugs.python.org/issue2506 - continue # pragma: no cover - - if fingerprint in fingerprints: - logger.debug("Skipping import of key with fingerprint {}".format(fingerprint)) - continue - - try: - logger.debug("Importing key with fingerprint {}".format(fingerprint)) - self.gpg.import_key(source.uuid, pub_key, fingerprint) - except CryptoError: - logger.warning('Failed to import key for source {}'.format(source.uuid)) diff --git a/securedrop_client/crypto.py b/securedrop_client/crypto.py index fc2557f6a4..9f00235664 100644 --- a/securedrop_client/crypto.py +++ b/securedrop_client/crypto.py @@ -22,7 +22,6 @@ import struct import subprocess import tempfile -import typing from sqlalchemy.orm import scoped_session from uuid import UUID @@ -146,35 +145,15 @@ def _gpg_cmd_base(self) -> list: cmd.extend(['--trust-model', 'always']) return cmd - def fingerprints(self) -> typing.Dict[str, bool]: - """ - Returns a map of key fingerprints. - - The result is a map wherein each key is the fingerprint of a - key on our keyring, mapped to True. It's intended to help us - avoid expensive import operations for keys we already have. - """ - cmd = self._gpg_cmd_base() - cmd.extend(["--list-public-keys", "--fingerprint", "--with-colons", - "--fixed-list-mode", "--list-options", "no-show-photos"]) - output = subprocess.check_output(cmd, universal_newlines=True) - - fingerprints = {} - for line in output.splitlines(): - if line.startswith("fpr:"): - fields = line.split(":") - fingerprint = fields[9] - fingerprints[fingerprint] = True - - return fingerprints - def import_key(self, source_uuid: UUID, key_data: str, fingerprint: str) -> None: session = self.session_maker() local_source = session.query(Source).filter_by(uuid=source_uuid).one() + logger.debug("Importing key with fingerprint %s", fingerprint) self._import(key_data) local_source.fingerprint = fingerprint + local_source.public_key = key_data session.add(local_source) session.commit() diff --git a/securedrop_client/storage.py b/securedrop_client/storage.py index 1785d302e9..d183633619 100644 --- a/securedrop_client/storage.py +++ b/securedrop_client/storage.py @@ -30,6 +30,7 @@ from sqlalchemy.orm.exc import NoResultFound from sqlalchemy.orm.session import Session +from securedrop_client.crypto import CryptoError, GpgHelper from securedrop_client.db import (DraftReply, Source, Message, File, Reply, ReplySendStatus, ReplySendStatusCodes, User) from sdclientapi import API @@ -100,6 +101,7 @@ def get_remote_data(api: API) -> Tuple[List[SDKSource], List[SDKSubmission], Lis def update_local_storage(session: Session, + gpg: GpgHelper, remote_sources: List[SDKSource], remote_submissions: List[SDKSubmission], remote_replies: List[SDKReply], @@ -116,13 +118,48 @@ def update_local_storage(session: Session, # The following update_* functions may change the database state. # Because of that, each get_local_* function needs to be called just before # its respective update_* function. - update_sources(remote_sources, get_local_sources(session), session, data_dir) + update_sources(gpg, remote_sources, get_local_sources(session), session, data_dir) update_files(remote_files, get_local_files(session), session, data_dir) update_messages(remote_messages, get_local_messages(session), session, data_dir) update_replies(remote_replies, get_local_replies(session), session, data_dir) -def update_sources(remote_sources: List[SDKSource], +def update_source_key( + gpg: GpgHelper, session: Session, local_source: Source, remote_source: SDKSource +) -> None: + """ + Updates a source's GPG key. + """ + if not remote_source.key.get("fingerprint"): + logger.error("New source data lacks key fingerprint") + return + + if not remote_source.key.get("public"): + logger.error("New source data lacks public key") + return + + if ( + local_source.fingerprint == remote_source.key['fingerprint'] and + local_source.public_key == remote_source.key['public'] + ): + logger.debug("Source key data is unchanged") + return + + try: + # commit so the new source is visible to import_key, which uses a new session + session.commit() + + # import_key updates the source's key and fingerprint, and commits + gpg.import_key( + remote_source.uuid, + remote_source.key['public'], + remote_source.key['fingerprint'] + ) + except CryptoError: + logger.error('Failed to update key information for source %s', remote_source.uuid) + + +def update_sources(gpg: GpgHelper, remote_sources: List[SDKSource], local_sources: List[Source], session: Session, data_dir: str) -> None: """ Given collections of remote sources, the current local sources and a @@ -134,40 +171,43 @@ def update_sources(remote_sources: List[SDKSource], * Local items not returned in the remote sources are deleted from the local database. """ - local_uuids = {source.uuid for source in local_sources} + local_sources_by_uuid = {s.uuid: s for s in local_sources} for source in remote_sources: - if source.uuid in local_uuids: + if source.uuid in local_sources_by_uuid: # Update an existing record. - local_source = [s for s in local_sources - if s.uuid == source.uuid][0] + local_source = local_sources_by_uuid[source.uuid] local_source.journalist_designation = source.journalist_designation local_source.is_flagged = source.is_flagged - local_source.public_key = source.key['public'] local_source.interaction_count = source.interaction_count local_source.document_count = source.number_of_documents local_source.is_starred = source.is_starred local_source.last_updated = parse(source.last_updated) - # Removing the UUID from local_uuids ensures this record won't be - # deleted at the end of this function. - local_uuids.remove(source.uuid) + update_source_key(gpg, session, local_source, source) + + # Removing the UUID from local_sources_by_uuid ensures + # this record won't be deleted at the end of this + # function. + del local_sources_by_uuid[source.uuid] logger.debug('Updated source {}'.format(source.uuid)) else: # A new source to be added to the database. ns = Source(uuid=source.uuid, journalist_designation=source.journalist_designation, is_flagged=source.is_flagged, - public_key=source.key['public'], interaction_count=source.interaction_count, is_starred=source.is_starred, last_updated=parse(source.last_updated), document_count=source.number_of_documents) session.add(ns) + + update_source_key(gpg, session, ns, source) + logger.debug('Added new source {}'.format(source.uuid)) # The uuids remaining in local_uuids do not exist on the remote server, so # delete the related records. - for deleted_source in [s for s in local_sources if s.uuid in local_uuids]: + for deleted_source in local_sources_by_uuid.values(): for document in deleted_source.collection: if isinstance(document, (Message, File, Reply)): delete_single_submission_or_reply_on_disk(document, data_dir) diff --git a/tests/api_jobs/test_sync.py b/tests/api_jobs/test_sync.py index 570d16d8fc..05ae25d33f 100644 --- a/tests/api_jobs/test_sync.py +++ b/tests/api_jobs/test_sync.py @@ -1,9 +1,11 @@ import os -from uuid import UUID from securedrop_client.api_jobs.sync import MetadataSyncJob from securedrop_client.crypto import GpgHelper, CryptoError +from tests import factory + + with open(os.path.join(os.path.dirname(__file__), '..', 'files', 'test-key.gpg.pub.asc')) as f: PUB_KEY = f.read() @@ -12,27 +14,23 @@ def test_MetadataSyncJob_success(mocker, homedir, session, session_maker): gpg = GpgHelper(homedir, session_maker, is_qubes=False) job = MetadataSyncJob(homedir, gpg) - mock_source = mocker.MagicMock() - mock_source.uuid = 'bar' - mock_source.key = { - 'type': 'PGP', - 'public': PUB_KEY, - 'fingerprint': '123456ABC', - } + mock_source = factory.RemoteSource( + key={ + 'type': 'PGP', + 'public': PUB_KEY, + 'fingerprint': '123456ABC', + } + ) mock_key_import = mocker.patch.object(job.gpg, 'import_key') mock_get_remote_data = mocker.patch( 'securedrop_client.api_jobs.sync.get_remote_data', - return_value=([mock_source], 'submissions', 'replies')) + return_value=([mock_source], [], [])) api_client = mocker.MagicMock() api_client.default_request_timeout = mocker.MagicMock() api_client.default_request_timeout = mocker.MagicMock() - mocker.patch( - 'securedrop_client.api_jobs.sync.update_local_storage', - return_value=([mock_source], 'submissions', 'replies')) - job.call_api(api_client, session) assert mock_key_import.call_args[0][0] == mock_source.uuid @@ -48,27 +46,23 @@ def test_MetadataSyncJob_success_with_key_import_fail(mocker, homedir, session, gpg = GpgHelper(homedir, session_maker, is_qubes=False) job = MetadataSyncJob(homedir, gpg) - mock_source = mocker.MagicMock() - mock_source.uuid = 'bar' - mock_source.key = { - 'type': 'PGP', - 'public': PUB_KEY, - 'fingerprint': '123456ABC', - } + mock_source = factory.RemoteSource( + key={ + 'type': 'PGP', + 'public': PUB_KEY, + 'fingerprint': '123456ABC', + } + ) mock_key_import = mocker.patch.object(job.gpg, 'import_key', side_effect=CryptoError) mock_get_remote_data = mocker.patch( 'securedrop_client.api_jobs.sync.get_remote_data', - return_value=([mock_source], 'submissions', 'replies')) + return_value=([mock_source], [], [])) api_client = mocker.MagicMock() api_client.default_request_timeout = mocker.MagicMock() - mocker.patch( - 'securedrop_client.api_jobs.sync.update_local_storage', - return_value=([mock_source], 'submissions', 'replies')) - job.call_api(api_client, session) assert mock_key_import.call_args[0][0] == mock_source.uuid @@ -84,26 +78,22 @@ def test_MetadataSyncJob_success_with_missing_key(mocker, homedir, session, sess gpg = GpgHelper(homedir, session_maker, is_qubes=False) job = MetadataSyncJob(homedir, gpg) - mock_source = mocker.MagicMock() - mock_source.uuid = 'bar' - mock_source.key = { - 'type': 'PGP', - 'pub_key': '', - 'fingerprint': '' - } + mock_source = factory.RemoteSource( + key={ + 'type': 'PGP', + 'public': '', + 'fingerprint': '', + } + ) mock_key_import = mocker.patch.object(job.gpg, 'import_key') mock_get_remote_data = mocker.patch( 'securedrop_client.api_jobs.sync.get_remote_data', - return_value=([mock_source], 'submissions', 'replies')) + return_value=([mock_source], [], [])) api_client = mocker.MagicMock() api_client.default_request_timeout = mocker.MagicMock() - mocker.patch( - 'securedrop_client.api_jobs.sync.update_local_storage', - return_value=([mock_source], 'submissions', 'replies')) - job.call_api(api_client, session) assert mock_key_import.call_count == 0 @@ -114,20 +104,16 @@ def test_MetadataSyncJob_only_import_new_source_keys(mocker, homedir, session, s """ Verify that we only import source keys we don't already have. """ - class LimitedImportGpgHelper(GpgHelper): - def import_key(self, source_uuid: UUID, key_data: str, fingerprint: str) -> None: - self._import(key_data) - - gpg = LimitedImportGpgHelper(homedir, session_maker, is_qubes=False) + gpg = GpgHelper(homedir, session_maker, is_qubes=False) job = MetadataSyncJob(homedir, gpg) - mock_source = mocker.MagicMock() - mock_source.uuid = 'bar' - mock_source.key = { - 'type': 'PGP', - 'public': PUB_KEY, - 'fingerprint': 'B2FF7FB28EED8CABEBC5FB6C6179D97BCFA52E5F', - } + mock_source = factory.RemoteSource( + key={ + 'type': 'PGP', + 'public': PUB_KEY, + 'fingerprint': '123456ABC', + } + ) mock_get_remote_data = mocker.patch( 'securedrop_client.api_jobs.sync.get_remote_data', @@ -136,27 +122,19 @@ def import_key(self, source_uuid: UUID, key_data: str, fingerprint: str) -> None api_client = mocker.MagicMock() api_client.default_request_timeout = mocker.MagicMock() - mocker.patch( - 'securedrop_client.api_jobs.sync.update_local_storage', - return_value=([mock_source], [], [])) - - mock_logger = mocker.patch('securedrop_client.api_jobs.sync.logger') + crypto_logger = mocker.patch('securedrop_client.crypto.logger') + storage_logger = mocker.patch('securedrop_client.storage.logger') job.call_api(api_client, session) assert mock_get_remote_data.call_count == 1 - assert len(gpg.fingerprints()) == 2 - log_msg = mock_logger.debug.call_args_list[0][0][0] - assert log_msg.startswith( - 'Importing key with fingerprint {}'.format(mock_source.key['fingerprint']) - ) + log_msg = crypto_logger.debug.call_args_list[0][0] + assert log_msg == ('Importing key with fingerprint %s', mock_source.key['fingerprint']) job.call_api(api_client, session) assert mock_get_remote_data.call_count == 2 - log_msg = mock_logger.debug.call_args_list[1][0][0] - assert log_msg.startswith( - 'Skipping import of key with fingerprint {}'.format(mock_source.key['fingerprint']) - ) + log_msg = storage_logger.debug.call_args_list[1][0][0] + assert log_msg == 'Source key data is unchanged' diff --git a/tests/factory.py b/tests/factory.py index 6517874401..683d254a67 100644 --- a/tests/factory.py +++ b/tests/factory.py @@ -3,7 +3,11 @@ """ from datetime import datetime from itertools import cycle +import os from typing import List +import uuid + +from sdclientapi import Source as SDKSource from securedrop_client import db from securedrop_client.api_jobs.base import ApiJob @@ -40,6 +44,7 @@ def Source(**attrs): journalist_designation='testy-mctestface', is_flagged=False, public_key='mah pub key', + fingerprint='mah fingerprint', interaction_count=0, is_starred=False, last_updated=datetime.now(), @@ -156,3 +161,33 @@ def call_api(self, api_client, session): return return_value return DummyApiJob + + +def RemoteSource(**attrs): + + with open(os.path.join(os.path.dirname(__file__), 'files', 'test-key.gpg.pub.asc')) as f: + pub_key = f.read() + + defaults = dict( + add_star_url='foo', + interaction_count=0, + is_flagged=False, + is_starred=True, + journalist_designation='testy-mctestface', + key={ + 'public': pub_key, + 'fingerprint': 'B2FF7FB28EED8CABEBC5FB6C6179D97BCFA52E5F' + }, + last_updated=datetime.now().isoformat(), + number_of_documents=0, + number_of_messages=0, + remove_star_url='baz', + replies_url='qux', + submissions_url='wibble', + url='url', + uuid=str(uuid.uuid4()) + ) + + defaults.update(attrs) + + return SDKSource(**defaults) diff --git a/tests/test_storage.py b/tests/test_storage.py index c2cc58785f..975c48217f 100644 --- a/tests/test_storage.py +++ b/tests/test_storage.py @@ -7,37 +7,23 @@ import uuid from dateutil.parser import parse -from sdclientapi import Source, Submission, Reply +from sdclientapi import Submission, Reply from sqlalchemy.orm.exc import NoResultFound import securedrop_client.db +from securedrop_client.crypto import GpgHelper from securedrop_client.storage import get_local_sources, get_local_messages, get_local_replies, \ get_remote_data, update_local_storage, update_sources, update_files, update_messages, \ update_replies, find_or_create_user, find_new_messages, find_new_replies, \ delete_single_submission_or_reply_on_disk, get_local_files, find_new_files, \ source_exists, set_message_or_reply_content, mark_as_downloaded, mark_as_decrypted, get_file, \ get_message, get_reply, update_and_get_user, update_missing_files, mark_as_not_downloaded, \ - mark_all_pending_drafts_as_failed, delete_local_source_by_uuid + mark_all_pending_drafts_as_failed, delete_local_source_by_uuid, update_source_key from securedrop_client import db from tests import factory -def make_remote_source(): - """ - Utility function for generating sdclientapi Source instances to act upon - in the following unit tests. - """ - return Source(add_star_url='foo', interaction_count=1, is_flagged=False, - is_starred=True, journalist_designation='foo', - key={'public': 'bar'}, - last_updated='2018-09-11T11:42:31.366649Z', - number_of_documents=1, number_of_messages=1, - remove_star_url='baz', replies_url='qux', - submissions_url='wibble', url='url', - uuid=str(uuid.uuid4())) - - def make_remote_submission(source_uuid): """ Utility function for generating sdclientapi Submission instances to act @@ -77,7 +63,7 @@ def test_delete_local_source_by_uuid(mocker): Delete the referenced source in the session. """ mock_session = mocker.MagicMock() - source = make_remote_source() + source = factory.RemoteSource() mock_session.query().filter_by().one_or_none.return_value = source mock_session.query.reset_mock() delete_local_source_by_uuid(mock_session, "uuid") @@ -134,7 +120,7 @@ def test_get_remote_data(mocker): """ # Some source, submission and reply objects from the API. mock_api = mocker.MagicMock() - source = make_remote_source() + source = factory.RemoteSource() mock_api.get_sources.return_value = [source, ] submission = mocker.MagicMock() mock_api.get_all_submissions.return_value = [submission, ] @@ -146,12 +132,12 @@ def test_get_remote_data(mocker): assert replies == [reply, ] -def test_update_local_storage(homedir, mocker): +def test_update_local_storage(homedir, mocker, session_maker): """ Assuming no errors getting data, check the expected functions to update the state of the local database are called with the necessary data. """ - remote_source = make_remote_source() + remote_source = factory.RemoteSource() remote_message = mocker.Mock(filename='1-foo.msg.gpg') remote_file = mocker.Mock(filename='2-foo.gpg') remote_submissions = [remote_message, remote_file] @@ -170,14 +156,18 @@ def test_update_local_storage(homedir, mocker): file_fn = mocker.patch('securedrop_client.storage.update_files') msg_fn = mocker.patch('securedrop_client.storage.update_messages') - update_local_storage(mock_session, [remote_source], remote_submissions, [remote_reply], homedir) - src_fn.assert_called_once_with([remote_source], [local_source], mock_session, homedir) + gpg = GpgHelper(homedir, session_maker, is_qubes=False) + + update_local_storage( + mock_session, gpg, [remote_source], remote_submissions, [remote_reply], homedir + ) + src_fn.assert_called_once_with(gpg, [remote_source], [local_source], mock_session, homedir) rpl_fn.assert_called_once_with([remote_reply], [local_reply], mock_session, homedir) file_fn.assert_called_once_with([remote_file], [local_file], mock_session, homedir) msg_fn.assert_called_once_with([remote_message], [local_message], mock_session, homedir) -def test_update_sources(homedir, mocker): +def test_update_sources(homedir, mocker, session_maker, session): """ Check that: @@ -188,58 +178,115 @@ def test_update_sources(homedir, mocker): * We don't attempt to delete the (non-existent) files associated with draft replies. """ - mock_session = mocker.MagicMock() - # Some source objects from the API, one of which will exist in the local - # database, the other will NOT exist in the local source database (this - # will be added to the database) - source_update = make_remote_source() - source_create = make_remote_source() + # This remote source exists locally and will be updated. + source_update = factory.RemoteSource(journalist_designation="source update") + + # This remote source does not exist locally and will be created. + source_create = factory.RemoteSource(journalist_designation="source create") + remote_sources = [source_update, source_create] - # Some local source objects. One already exists in the API results (this - # will be updated), one does NOT exist in the API results (this will be - # deleted from the local database). - local_source1 = mocker.MagicMock() - local_source1.uuid = source_update.uuid - local_source2 = mocker.MagicMock() - local_source2.uuid = str(uuid.uuid4()) - draft_reply = factory.DraftReply(uuid='mock_reply_uuid') - local_source2.collection = [draft_reply] + + # This local source already exists in the API results and will be updated. + local_source1 = factory.Source( + journalist_designation=source_update.journalist_designation, + uuid=source_update.uuid, + ) + + # This local source does not exist in the API results and will be + # deleted from the local database. + local_source2 = factory.Source() + + # This reply exists just to prove a negative. + draft_reply = factory.DraftReply(source_id=local_source2.uuid) + + session.add(local_source1) + session.add(local_source2) + session.add(draft_reply) + session.commit() + local_sources = [local_source1, local_source2] + file_delete_fcn = mocker.patch( 'securedrop_client.storage.delete_single_submission_or_reply_on_disk') - update_sources(remote_sources, local_sources, mock_session, homedir) + gpg = GpgHelper(homedir, session_maker, is_qubes=False) + + update_sources(gpg, remote_sources, local_sources, session, homedir) # Check the expected local source object has been updated with values from # the API. - assert local_source1.journalist_designation == \ - source_update.journalist_designation - assert local_source1.is_flagged == source_update.is_flagged - assert local_source1.public_key == source_update.key['public'] - assert local_source1.interaction_count == source_update.interaction_count - assert local_source1.is_starred == source_update.is_starred - assert local_source1.last_updated == parse(source_update.last_updated) + updated_source = session.query(db.Source).filter_by(uuid=source_update.uuid).one() + assert updated_source.journalist_designation == source_update.journalist_designation + assert updated_source.is_flagged == source_update.is_flagged + assert updated_source.public_key == source_update.key['public'] + assert updated_source.fingerprint == source_update.key['fingerprint'] + assert updated_source.interaction_count == source_update.interaction_count + assert updated_source.is_starred == source_update.is_starred + assert updated_source.last_updated == parse(source_update.last_updated) + # Check the expected local source object has been created with values from # the API. - assert mock_session.add.call_count == 1 - new_source = mock_session.add.call_args_list[0][0][0] + new_source = session.query(db.Source).filter_by(uuid=source_create.uuid).one() assert new_source.uuid == source_create.uuid - assert new_source.journalist_designation == \ - source_create.journalist_designation + assert new_source.journalist_designation == source_create.journalist_designation assert new_source.is_flagged == source_create.is_flagged assert new_source.public_key == source_create.key['public'] + assert new_source.fingerprint == source_create.key['fingerprint'] assert new_source.interaction_count == source_create.interaction_count assert new_source.is_starred == source_create.is_starred assert new_source.last_updated == parse(source_create.last_updated) - # Ensure the record for the local source that is missing from the results - # of the API is deleted. - mock_session.delete.assert_called_once_with(local_source2) + + # Check that the local source not present in the API results was deleted. + with pytest.raises(NoResultFound): + session.query(db.Source).filter_by(uuid=local_source2.uuid).one() + # Ensure that we didn't attempt to delete files associated with draft replies, # as they don't have files (content stored directly in the database). assert file_delete_fcn.call_count == 0 - # Session is committed to database. - assert mock_session.commit.call_count == 1 + +def test_update_source_key_without_fingerprint(mocker, session): + """ + Checks handling of a source from the API that lacks a fingerprint. + """ + + error_logger = mocker.patch('securedrop_client.storage.logger.error') + + local_source = factory.Source(public_key=None, fingerprint=None) + session.add(local_source) + + remote_source = factory.RemoteSource() + remote_source.key = {} + + update_source_key(None, session, local_source, remote_source) + + error_logger.assert_called_once_with("New source data lacks key fingerprint") + + local_source2 = session.query(db.Source).filter_by(uuid=local_source.uuid).one() + assert not local_source2.fingerprint + assert not local_source2.public_key + + +def test_update_source_key_without_key(mocker, session): + """ + Checks handling of a source from the API that lacks a public key. + """ + + error_logger = mocker.patch('securedrop_client.storage.logger.error') + + local_source = factory.Source(public_key=None, fingerprint=None) + session.add(local_source) + + remote_source = factory.RemoteSource() + del remote_source.key["public"] + + update_source_key(None, session, local_source, remote_source) + + error_logger.assert_called_once_with("New source data lacks public key") + + local_source2 = session.query(db.Source).filter_by(uuid=local_source.uuid).one() + assert not local_source2.fingerprint + assert not local_source2.public_key def add_test_file_to_temp_dir(home_dir, filename): @@ -347,7 +394,9 @@ def test_update_replies_deletes_files_associated_with_the_reply( def test_update_sources_deletes_files_associated_with_the_source( homedir, - mocker): + mocker, + session_maker +): """ Check that: @@ -398,7 +447,10 @@ def test_update_sources_deletes_files_associated_with_the_source( test_filename_absolute_paths.append(abs_server_filename) local_sources = [local_source] - update_sources(remote_sources, local_sources, mock_session, homedir) + + gpg = GpgHelper(homedir, session_maker, is_qubes=False) + + update_sources(gpg, remote_sources, local_sources, mock_session, homedir) # Ensure the files associated with the reply are deleted on disk. for test_filename in test_filename_absolute_paths: @@ -922,7 +974,7 @@ def test_source_exists_true(homedir, mocker): Check that method returns True if a source is return from the query. ''' session = mocker.MagicMock() - source = make_remote_source() + source = factory.RemoteSource() source.uuid = 'test-source-uuid' session.query().filter_by().one.return_value = source assert source_exists(session, 'test-source-uuid')