From 25aeb2c796f054b61c6f8e6bf9565e8582523acd Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Wed, 12 Feb 2020 17:20:19 -0500 Subject: [PATCH 1/3] app: use ServerConnectionError from SDK 0.0.13 --- dev-requirements.txt | 5 +++-- requirements.in | 2 +- requirements.txt | 5 +++-- securedrop_client/api_jobs/base.py | 4 ++-- securedrop_client/api_jobs/uploads.py | 4 ++-- securedrop_client/logic.py | 4 ++-- securedrop_client/queue.py | 14 +++++++------- securedrop_client/sync.py | 4 ++-- 8 files changed, 22 insertions(+), 20 deletions(-) diff --git a/dev-requirements.txt b/dev-requirements.txt index 2f66854b8..717d85799 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -176,8 +176,9 @@ python-editor==1.0.3 \ requests==2.20.0 \ --hash=sha256:99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c \ --hash=sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279 -securedrop-sdk==0.0.12 \ - --hash=sha256:b5ddca26ce87d4007db5d64fe77d44b4086a902c3f79e69fb9a81343c81ce278 +securedrop-sdk==0.0.13 \ + --hash=sha256:3d521e7a63cd7df55f9c6508010f968692eccdb222c57ed9c2d0390fbbbd6f99 \ + --hash=sha256:7763bb44755bdfc387ab6c002cbe49eeec2611feb04a8787c3c9f2aa48a1ee5f sip==4.19.8 \ --hash=sha256:09f9a4e6c28afd0bafedb26ffba43375b97fe7207bd1a0d3513f79b7d168b331 \ --hash=sha256:105edaaa1c8aa486662226360bd3999b4b89dd56de3e314d82b83ed0587d8783 \ diff --git a/requirements.in b/requirements.in index 69eed9512..38a86fd4e 100644 --- a/requirements.in +++ b/requirements.in @@ -9,7 +9,7 @@ pathlib2==2.3.2 python-dateutil==2.7.5 python-editor==1.0.3 requests==2.20.0 -securedrop-sdk==0.0.12 +securedrop-sdk==0.0.13 six==1.11.0 SQLAlchemy==1.3.3 urllib3==1.24.3 diff --git a/requirements.txt b/requirements.txt index 62c0714f1..7adcc4041 100644 --- a/requirements.txt +++ b/requirements.txt @@ -32,8 +32,9 @@ python-editor==1.0.3 \ requests==2.20.0 \ --hash=sha256:99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c \ --hash=sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279 -securedrop-sdk==0.0.12 \ - --hash=sha256:b5ddca26ce87d4007db5d64fe77d44b4086a902c3f79e69fb9a81343c81ce278 +securedrop-sdk==0.0.13 \ + --hash=sha256:3d521e7a63cd7df55f9c6508010f968692eccdb222c57ed9c2d0390fbbbd6f99 \ + --hash=sha256:7763bb44755bdfc387ab6c002cbe49eeec2611feb04a8787c3c9f2aa48a1ee5f six==1.11.0 \ --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \ --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb diff --git a/securedrop_client/api_jobs/base.py b/securedrop_client/api_jobs/base.py index efb64f858..deb229227 100644 --- a/securedrop_client/api_jobs/base.py +++ b/securedrop_client/api_jobs/base.py @@ -1,7 +1,7 @@ import logging from PyQt5.QtCore import QObject, pyqtSignal -from sdclientapi import API, AuthError, RequestTimeoutError +from sdclientapi import API, AuthError, RequestTimeoutError, ServerConnectionError from sqlalchemy.orm.session import Session from typing import Any, Optional, TypeVar @@ -71,7 +71,7 @@ def _do_call_api(self, api_client: API, session: Session) -> None: result = self.call_api(api_client, session) except (AuthError, ApiInaccessibleError) as e: raise ApiInaccessibleError() from e - except RequestTimeoutError as e: + except (RequestTimeoutError, ServerConnectionError) as e: if self.remaining_attempts == 0: self.failure_signal.emit(e) raise diff --git a/securedrop_client/api_jobs/uploads.py b/securedrop_client/api_jobs/uploads.py index 89116aa74..d5b451fb7 100644 --- a/securedrop_client/api_jobs/uploads.py +++ b/securedrop_client/api_jobs/uploads.py @@ -1,7 +1,7 @@ import logging import sdclientapi -from sdclientapi import API, RequestTimeoutError +from sdclientapi import API, RequestTimeoutError, ServerConnectionError from sqlalchemy.orm.session import Session from securedrop_client.api_jobs.base import ApiJob @@ -65,7 +65,7 @@ def call_api(self, api_client: API, session: Session) -> str: session.commit() return reply_db_object.uuid - except RequestTimeoutError as e: + except (RequestTimeoutError, ServerConnectionError) as e: message = "Failed to send reply for source {id} due to Exception: {error}".format( id=self.source_uuid, error=e) diff --git a/securedrop_client/logic.py b/securedrop_client/logic.py index 2bc3de786..e123bfe8e 100644 --- a/securedrop_client/logic.py +++ b/securedrop_client/logic.py @@ -28,7 +28,7 @@ from gettext import gettext as _ from PyQt5.QtCore import QObject, QThread, pyqtSignal, QTimer, QProcess, Qt -from sdclientapi import RequestTimeoutError +from sdclientapi import RequestTimeoutError, ServerConnectionError from sqlalchemy.orm.session import sessionmaker from securedrop_client import storage @@ -98,7 +98,7 @@ def call_api(self): try: self.result = self.api_call(*self.args, **self.kwargs) except Exception as ex: - if isinstance(ex, RequestTimeoutError): + if isinstance(ex, (RequestTimeoutError, ServerConnectionError)): self.call_timed_out.emit() logger.error(ex) diff --git a/securedrop_client/queue.py b/securedrop_client/queue.py index e8f2314ae..f3da55b8e 100644 --- a/securedrop_client/queue.py +++ b/securedrop_client/queue.py @@ -3,7 +3,7 @@ from PyQt5.QtCore import QObject, QThread, pyqtSlot, pyqtSignal from queue import PriorityQueue -from sdclientapi import API, RequestTimeoutError +from sdclientapi import API, RequestTimeoutError, ServerConnectionError from sqlalchemy.orm import scoped_session from typing import Optional, Tuple # noqa: F401 @@ -26,10 +26,10 @@ class RunnableQueue(QObject): job type. If multiple jobs of the same type are added to the queue then they are retrieved in FIFO order. - If a RequestTimeoutError is encountered while processing a job, the job will be added back to - the queue, the processing loop will stop, and the paused signal will be emitted. New jobs can - still be added, but the processing function will need to be called again in order to resume. The - processing loop is resumed when the resume signal is emitted. + If a RequestTimeoutError or ServerConnectionError is encountered while processing a job, the + job will be added back to the queue, the processing loop will stop, and the paused signal will + be emitted. New jobs can still be added, but the processing function will need to be called + again in order to resume. The processing loop is resumed when the resume signal is emitted. If an ApiInaccessibleError is encountered while processing a job, api_client will be set to None and the processing loop will stop. If the queue is resumed before the queue manager @@ -97,7 +97,7 @@ def process(self) -> None: If the job is a PauseQueueJob, emit the paused signal and return from the processing loop so that no more jobs are processed until the queue resumes. - If the job raises RequestTimeoutError, then: + If the job raises RequestTimeoutError or ServerConnectionError, then: (1) Add a PauseQueuejob to the queue (2) Add the job back to the queue so that it can be reprocessed once the queue is resumed. @@ -123,7 +123,7 @@ def process(self) -> None: logger.debug('{}: {}'.format(type(e).__name__, e)) self.api_client = None return - except RequestTimeoutError as e: + except (RequestTimeoutError, ServerConnectionError) as e: logger.debug('{}: {}'.format(type(e).__name__, e)) self.add_job(PauseQueueJob()) self.re_add_job(job) diff --git a/securedrop_client/sync.py b/securedrop_client/sync.py index df0e03299..8a114ae5e 100644 --- a/securedrop_client/sync.py +++ b/securedrop_client/sync.py @@ -2,7 +2,7 @@ from PyQt5.QtCore import pyqtSignal, QObject, QThread, QTimer, Qt from sqlalchemy.orm import scoped_session -from sdclientapi import API, RequestTimeoutError +from sdclientapi import API, RequestTimeoutError, ServerConnectionError from securedrop_client.api_jobs.base import ApiInaccessibleError from securedrop_client.api_jobs.sync import MetadataSyncJob @@ -75,7 +75,7 @@ def on_sync_failure(self, result: Exception) -> None: Only start another sync on failure if the reason is a timeout request. ''' self.sync_failure.emit(result) - if isinstance(result, RequestTimeoutError): + if isinstance(result, (RequestTimeoutError, ServerConnectionError)): QTimer.singleShot(self.TIME_BETWEEN_SYNCS_MS, self.api_sync_bg_task.sync) From 14c301ae253fa00505e4cfdcc542a07a122bd0cd Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 13 Feb 2020 11:20:10 -0500 Subject: [PATCH 2/3] build-requirements: add wheel hash of securedrop-sdk 0.0.13 --- build-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-requirements.txt b/build-requirements.txt index 4529c3284..a6e924e85 100644 --- a/build-requirements.txt +++ b/build-requirements.txt @@ -9,7 +9,7 @@ pathlib2==2.3.2 --hash=sha256:460e67b14d0574b0529a0017b1eb05d10d9722681e303fec70 python-dateutil==2.7.5 --hash=sha256:f6eb9c17acd5a6954e1a5f2f999a41de3e7e25b6bc41baf6344bd053ec25ceeb python-editor==1.0.3 --hash=sha256:e47dcec4ea883853b8196fbd425b875d7ec791d4ede2e20cfc70b9a25365c65b requests==2.20.0 --hash=sha256:d87b2085783d31d874ac7bc62660e287932aaee7059e80b41b76462eb18d35cc -securedrop-sdk==0.0.12 --hash=sha256:d05bb78652c8771e6aa1aefcd76ade1fef08c563d2641acbc5ac8e1d635e6a53 +securedrop-sdk==0.0.13 --hash=sha256:c8d98208fb2074336c06be3fef0994a8a57fde7a765cead12bc36e9128d319e2 six==1.11.0 --hash=sha256:aa4ad34049ddff178b533062797fd1db9f0038b7c5c2461a7cde2244300b9f3d sqlalchemy==1.3.3 --hash=sha256:bfb4cd0df5802a01acd738a080a19e60ff4700e030d497de273807f9a8bd4a0a urllib3==1.24.3 --hash=sha256:3d440cbb168e2c963d5099232bdb3f7390bf031b6270dad1bc79751698a1399a From 58cc3d8874c6177a218262f5dca41dbbb9f5ccbf Mon Sep 17 00:00:00 2001 From: redshiftzero Date: Thu, 13 Feb 2020 11:32:54 -0500 Subject: [PATCH 3/3] test: ensure ServerConnectionError treated as RequestTimeoutError these two exceptions should behave the same from the perspective of the client. At some later point, we may want to customize behavior (e.g. since the ServerConnectionError will be raised much faster than a RequestTimeoutError). --- tests/api_jobs/test_base.py | 28 +++++++++++++++++----------- tests/api_jobs/test_uploads.py | 11 +++++++---- tests/test_logic.py | 11 ++++++----- tests/test_queue.py | 12 ++++++++---- tests/test_sync.py | 14 +++++++++----- 5 files changed, 47 insertions(+), 29 deletions(-) diff --git a/tests/api_jobs/test_base.py b/tests/api_jobs/test_base.py index be9f29d2f..26a9b1761 100644 --- a/tests/api_jobs/test_base.py +++ b/tests/api_jobs/test_base.py @@ -1,6 +1,6 @@ import pytest -from sdclientapi import AuthError, RequestTimeoutError +from sdclientapi import AuthError, RequestTimeoutError, ServerConnectionError from securedrop_client.api_jobs.base import ApiInaccessibleError, ApiJob from tests.factory import dummy_job_factory @@ -68,15 +68,18 @@ def test_ApiJob_auth_error(mocker): assert not api_job.failure_signal.emit.called -def test_ApiJob_timeout_error(mocker): - return_value = RequestTimeoutError() +@pytest.mark.parametrize("exception", [RequestTimeoutError, ServerConnectionError]) +def test_ApiJob_timeout_error(mocker, exception): + """If the server times out or is unreachable, the corresponding + exception should be raised""" + return_value = exception() api_job_cls = dummy_job_factory(mocker, return_value) api_job = api_job_cls() mock_api_client = mocker.MagicMock() mock_session = mocker.MagicMock() - with pytest.raises(RequestTimeoutError): + with pytest.raises(exception): api_job._do_call_api(mock_api_client, mock_session) assert not api_job.success_signal.emit.called @@ -98,12 +101,13 @@ def test_ApiJob_other_error(mocker): api_job.failure_signal.emit.assert_called_once_with(return_value) -def test_ApiJob_retry_suceeds_after_failed_attempt(mocker): +@pytest.mark.parametrize("exception", [RequestTimeoutError, ServerConnectionError]) +def test_ApiJob_retry_suceeds_after_failed_attempt(mocker, exception): """Retry logic: after failed attempt should succeed""" number_of_attempts = 5 success_return_value = 'now works' - return_values = [RequestTimeoutError(), success_return_value] + return_values = [exception(), success_return_value] api_job_cls = dummy_job_factory(mocker, return_values, remaining_attempts=number_of_attempts) api_job = api_job_cls() @@ -119,12 +123,13 @@ def test_ApiJob_retry_suceeds_after_failed_attempt(mocker): api_job.success_signal.emit.assert_called_once_with(success_return_value) -def test_ApiJob_retry_exactly_n_attempts_times(mocker): +@pytest.mark.parametrize("exception", [RequestTimeoutError, ServerConnectionError]) +def test_ApiJob_retry_exactly_n_attempts_times(mocker, exception): """Retry logic: boundary value case - 5th attempt should succeed""" number_of_attempts = 5 success_return_value = 'now works' - return_values = [RequestTimeoutError()] * (number_of_attempts - 1) + [success_return_value] + return_values = [exception()] * (number_of_attempts - 1) + [success_return_value] api_job_cls = dummy_job_factory(mocker, return_values, remaining_attempts=number_of_attempts) api_job = api_job_cls() @@ -140,11 +145,12 @@ def test_ApiJob_retry_exactly_n_attempts_times(mocker): api_job.success_signal.emit.assert_called_once_with(success_return_value) -def test_ApiJob_retry_timeout(mocker): +@pytest.mark.parametrize("exception", [RequestTimeoutError, ServerConnectionError]) +def test_ApiJob_retry_timeout(mocker, exception): """Retry logic: If we exceed the number of attempts, the job will still fail""" number_of_attempts = 5 - return_values = [RequestTimeoutError()] * (number_of_attempts + 1) + return_values = [exception()] * (number_of_attempts + 1) api_job_cls = dummy_job_factory(mocker, return_values, remaining_attempts=number_of_attempts) api_job = api_job_cls() @@ -152,7 +158,7 @@ def test_ApiJob_retry_timeout(mocker): mock_api_client = mocker.MagicMock() mock_session = mocker.MagicMock() - with pytest.raises(RequestTimeoutError): + with pytest.raises(exception): api_job._do_call_api(mock_api_client, mock_session) assert api_job.remaining_attempts == 0 diff --git a/tests/api_jobs/test_uploads.py b/tests/api_jobs/test_uploads.py index 4e2ba3c25..19f8c2b38 100644 --- a/tests/api_jobs/test_uploads.py +++ b/tests/api_jobs/test_uploads.py @@ -218,11 +218,14 @@ def test_send_reply_failure_unknown_error(homedir, mocker, session, session_make assert len(drafts) == 1 +@pytest.mark.parametrize("exception", + [sdclientapi.RequestTimeoutError, + sdclientapi.ServerConnectionError]) def test_send_reply_failure_timeout_error(homedir, mocker, session, session_maker, - reply_status_codes): + reply_status_codes, exception): ''' - Check that if the SendReplyJob api call fails because of a RequestTimeoutError that a - SendReplyJobTimeoutError is raised. + Check that if the SendReplyJob api call fails because of a RequestTimeoutError or + ServerConnectionError that a SendReplyJobTimeoutError is raised. ''' source = factory.Source() session.add(source) @@ -230,7 +233,7 @@ def test_send_reply_failure_timeout_error(homedir, mocker, session, session_make session.add(draft_reply) session.commit() api_client = mocker.MagicMock() - mocker.patch.object(api_client, 'reply_source', side_effect=sdclientapi.RequestTimeoutError) + mocker.patch.object(api_client, 'reply_source', side_effect=exception) gpg = GpgHelper(homedir, session_maker, is_qubes=False) encrypt_fn = mocker.patch.object(gpg, 'encrypt_to_source') job = SendReplyJob(source.uuid, 'mock_reply_uuid', 'mock_message', gpg) diff --git a/tests/test_logic.py b/tests/test_logic.py index 6252cb07b..097335110 100644 --- a/tests/test_logic.py +++ b/tests/test_logic.py @@ -7,7 +7,7 @@ import pytest from PyQt5.QtCore import Qt -from sdclientapi import RequestTimeoutError +from sdclientapi import RequestTimeoutError, ServerConnectionError from tests import factory from securedrop_client import db @@ -1430,13 +1430,14 @@ def test_Controller_resume_queues(homedir, mocker, session_maker): co.api_job_queue.resume_queues.assert_called_once_with() -def test_APICallRunner_api_call_timeout(mocker): +@pytest.mark.parametrize("exception", [RequestTimeoutError, ServerConnectionError]) +def test_APICallRunner_api_call_timeout(mocker, exception): """ - Ensure that if a RequestTimeoutError is raised, both the failure and timeout signals are - emitted. + Ensure that if a RequestTimeoutError or ServerConnectionError is raised, both + the failure and timeout signals are emitted. """ mock_api = mocker.MagicMock() - mock_api.fake_request = mocker.MagicMock(side_effect=RequestTimeoutError()) + mock_api.fake_request = mocker.MagicMock(side_effect=exception()) runner = APICallRunner(mock_api.fake_request) diff --git a/tests/test_queue.py b/tests/test_queue.py index df6bb02c6..d7117709f 100644 --- a/tests/test_queue.py +++ b/tests/test_queue.py @@ -1,8 +1,10 @@ ''' Testing for the ApiJobQueue and related classes. ''' +import pytest + from queue import Queue -from sdclientapi import RequestTimeoutError +from sdclientapi import RequestTimeoutError, ServerConnectionError from securedrop_client.api_jobs.downloads import FileDownloadJob from securedrop_client.api_jobs.base import ApiInaccessibleError, PauseQueueJob @@ -44,19 +46,21 @@ def test_RunnableQueue_happy_path(mocker): assert queue.queue.empty() -def test_RunnableQueue_job_timeout(mocker): +@pytest.mark.parametrize("exception", [RequestTimeoutError, ServerConnectionError]) +def test_RunnableQueue_job_timeout(mocker, exception): ''' Add two jobs to the queue. The first times out, and then gets resubmitted for the next pass through the loop. ''' queue = RunnableQueue(mocker.MagicMock(), mocker.MagicMock()) queue.pause = mocker.MagicMock() - job_cls = factory.dummy_job_factory(mocker, RequestTimeoutError(), remaining_attempts=5) + job_cls = factory.dummy_job_factory(mocker, exception(), remaining_attempts=5) job1 = job_cls() job2 = job_cls() queue.JOB_PRIORITIES = {PauseQueueJob: 0, job_cls: 1} - # RequestTimeoutError will cause the queue to pause, use our fake pause method instead + # RequestTimeoutError or ServerConnectionError will cause the queue to pause, + # use our fake pause method instead def fake_pause() -> None: queue.add_job(PauseQueueJob()) queue.pause.emit = fake_pause diff --git a/tests/test_sync.py b/tests/test_sync.py index d69ea7221..cf6fe93b2 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -1,4 +1,6 @@ -from sdclientapi import RequestTimeoutError +import pytest + +from sdclientapi import RequestTimeoutError, ServerConnectionError from securedrop_client.api_jobs.base import ApiInaccessibleError from securedrop_client.sync import ApiSync @@ -153,7 +155,7 @@ def test_ApiSync_on_sync_success(mocker, session_maker, homedir): def test_ApiSync_on_sync_failure(mocker, session_maker, homedir): ''' Ensure failure handler emits failure signal that the Controller links to and does not fire - another sync for errors other than RequestTimeoutError + another sync for errors other than RequestTimeoutError or ServerConnectionError ''' api_sync = ApiSync(mocker.MagicMock(), session_maker, mocker.MagicMock(), homedir) sync_failure = mocker.patch.object(api_sync, 'sync_failure') @@ -167,15 +169,17 @@ def test_ApiSync_on_sync_failure(mocker, session_maker, homedir): singleShot_fn.assert_not_called() -def test_ApiSync_on_sync_failure_because_of_timeout(mocker, session_maker, homedir): +@pytest.mark.parametrize("exception", [RequestTimeoutError, ServerConnectionError]) +def test_ApiSync_on_sync_failure_because_of_timeout(mocker, session_maker, homedir, exception): ''' Ensure failure handler emits failure signal that the Controller links to and sets up timer to - fire another sync after 15 seconds if the failure reason is a RequestTimeoutError. + fire another sync after 15 seconds if the failure reason is a RequestTimeoutError or + ServerConnectionError. ''' api_sync = ApiSync(mocker.MagicMock(), session_maker, mocker.MagicMock(), homedir) sync_failure = mocker.patch.object(api_sync, 'sync_failure') singleShot_fn = mocker.patch('securedrop_client.sync.QTimer.singleShot') - error = RequestTimeoutError() + error = exception() api_sync.on_sync_failure(error)