Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

health checker/ auto-retry bg sync + adjusted timeouts #612

Merged
merged 8 commits into from
Dec 9, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 10 additions & 1 deletion securedrop_client/api_jobs/downloads.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,10 @@ def call_api(self, api_client: API, session: Session) -> Any:
jobs.
'''

# TODO: Once https://github.com/freedomofpress/securedrop-client/issues/648, we will want to
# pass the default request timeout to api calls instead of setting it on the api object
# directly.
api_client.default_request_timeout = 20
remote_sources, remote_submissions, remote_replies = \
get_remote_data(api_client)

Expand Down Expand Up @@ -87,7 +91,7 @@ def __init__(self, data_dir: str) -> None:

def _get_realistic_timeout(self, size_in_bytes: int) -> int:
'''
Return a realistic timeout in seconds for a file, message, or reply based on its size.
Return a realistic timeout in seconds based on the size of the download.

This simply scales the timeouts per file so that it increases as the file size increases.

Expand Down Expand Up @@ -260,6 +264,11 @@ def call_download_api(self, api: API, db_object: Reply) -> Tuple[str, str]:
'''
sdk_object = SdkReply(uuid=db_object.uuid, filename=db_object.filename)
sdk_object.source_uuid = db_object.source.uuid

# TODO: Once https://github.com/freedomofpress/securedrop-sdk/issues/108 is implemented, we
# will want to pass the default request timeout to download_reply instead of setting it on
# the api object directly.
api.default_request_timeout = 20
return api.download_reply(sdk_object)

def call_decrypt(self, filepath: str, session: Session = None) -> str:
Expand Down
4 changes: 4 additions & 0 deletions securedrop_client/api_jobs/updatestar.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,10 @@ def call_api(self, api_client: API, session: Session) -> str:
try:
source_sdk_object = sdclientapi.Source(uuid=self.source_uuid)

# TODO: Once https://github.com/freedomofpress/securedrop-client/issues/648, we will
# want to pass the default request timeout to remove_star and add_star instead of
# setting it on the api object directly.
api_client.default_request_timeout = 5
if self.star_status:
api_client.remove_star(source_sdk_object)
else:
Expand Down
5 changes: 5 additions & 0 deletions securedrop_client/api_jobs/uploads.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,11 @@ def call_api(self, api_client: API, session: Session) -> str:

def _make_call(self, encrypted_reply: str, api_client: API) -> sdclientapi.Reply:
sdk_source = sdclientapi.Source(uuid=self.source_uuid)

# TODO: Once https://github.com/freedomofpress/securedrop-client/issues/648, we will want to
# pass the default request timeout to reply_source instead of setting it on the api object
# directly.
api_client.default_request_timeout = 5
return api_client.reply_source(sdk_source, encrypted_reply, self.reply_uuid)


Expand Down
2 changes: 1 addition & 1 deletion securedrop_client/gui/widgets.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ def setup(self, controller):
self.controller.sync_events.connect(self._on_refresh_complete)

def _on_clicked(self):
self.controller.sync_api()
self.controller.sync_api(manual_refresh=True)
# This is a temporary solution for showing the icon as active for the entire duration of a
# refresh, rather than for just the duration of a click. The icon image will be replaced
# when the controller tells us the refresh has finished. A cleaner solution would be to
Expand Down
52 changes: 45 additions & 7 deletions securedrop_client/logic.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,11 +304,16 @@ def completed_api_call(self, thread_id, user_callback):

def login(self, username, password, totp):
"""
Given a username, password and time based one-time-passcode (TOTP),
create a new instance representing the SecureDrop api and authenticate.
Given a username, password and time based one-time-passcode (TOTP), create a new instance
representing the SecureDrop api and authenticate.

Default to 60 seconds until we implement a better request timeout strategy. We lower the
default_request_timeout for Queue API requests in ApiJobQueue in order to display errors
faster.
"""
storage.mark_all_pending_drafts_as_failed(self.session)
self.api = sdclientapi.API(self.hostname, username, password, totp, self.proxy)
self.api = sdclientapi.API(
self.hostname, username, password, totp, self.proxy, default_request_timeout=60)
redshiftzero marked this conversation as resolved.
Show resolved Hide resolved
self.call_api(self.api.authenticate,
self.on_authenticate_success,
self.on_authenticate_failure)
Expand Down Expand Up @@ -363,7 +368,7 @@ def authenticated(self):
"""
return bool(self.api and self.api.token is not None)

def sync_api(self):
def sync_api(self, manual_refresh: bool = False):
"""
Grab data from the remote SecureDrop API in a non-blocking manner.
"""
Expand All @@ -374,9 +379,20 @@ def sync_api(self):
logger.debug("You are authenticated, going to make your call")

job = MetadataSyncJob(self.data_dir, self.gpg)

job.success_signal.connect(self.on_sync_success, type=Qt.QueuedConnection)
job.failure_signal.connect(self.on_sync_failure, type=Qt.QueuedConnection)

# If the sync did not originate from a manual refrsh, increase the number of
# retry attempts (remaining_attempts) to 15, otherwise use the default so that a user
# finds out quicker whether or not their refresh-attempt failed.
#
# Set up failure-handling depending on whether or not the sync originated from a manual
# refresh.
if manual_refresh:
job.failure_signal.connect(self.on_refresh_failure, type=Qt.QueuedConnection)
else:
job.remaining_attempts = 15
job.failure_signal.connect(self.on_sync_failure, type=Qt.QueuedConnection)
sssoleileraaa marked this conversation as resolved.
Show resolved Hide resolved

self.api_job_queue.enqueue(job)

logger.debug("In sync_api, after call to submit job to queue, on "
Expand All @@ -401,6 +417,8 @@ def on_sync_success(self) -> None:
* Download new messages and replies
* Update missing files so that they can be re-downloaded
"""
self.gui.clear_error_status() # remove any permanent error status message
sssoleileraaa marked this conversation as resolved.
Show resolved Hide resolved

with open(self.sync_flag, 'w') as f:
f.write(arrow.now().format())

Expand All @@ -412,8 +430,21 @@ def on_sync_success(self) -> None:

def on_sync_failure(self, result: Exception) -> None:
"""
Called when syncronisation of data via the API queue fails.
Called when syncronisation of data via the API fails after a background sync. Resume the
queues so that we continue to retry syncing with the server in the background.
"""
logger.debug('The SecureDrop server cannot be reached due to Error: {}'.format(result))
self.gui.update_error_status(
_('The SecureDrop server cannot be reached.'),
duration=0,
retry=True)
self.resume_queues()

def on_refresh_failure(self, result: Exception) -> None:
"""
Called when syncronisation of data via the API fails after a user manual clicks refresh.
"""
logger.debug('The SecureDrop server cannot be reached due to Error: {}'.format(result))
self.gui.update_error_status(
_('The SecureDrop server cannot be reached.'),
duration=0,
Expand All @@ -439,6 +470,7 @@ def on_update_star_success(self, result) -> None:
"""
After we star a source, we should sync the API such that the local database is updated.
"""
self.gui.clear_error_status() # remove any permanent error status message
self.sync_api() # Syncing the API also updates the source list UI

def on_update_star_failure(self, result: UpdateStarJobException) -> None:
Expand Down Expand Up @@ -519,6 +551,7 @@ def on_message_download_success(self, uuid: str) -> None:
"""
Called when a message has downloaded.
"""
self.gui.clear_error_status() # remove any permanent error status message
message = storage.get_message(self.session, uuid)
self.message_ready.emit(message.uuid, message.content)

Expand All @@ -542,6 +575,7 @@ def on_reply_download_success(self, uuid: str) -> None:
"""
Called when a reply has downloaded.
"""
self.gui.clear_error_status() # remove any permanent error status message
reply = storage.get_reply(self.session, uuid)
self.reply_ready.emit(reply.uuid, reply.content)

Expand Down Expand Up @@ -695,6 +729,7 @@ def on_file_download_success(self, result: Any) -> None:
"""
Called when a file has downloaded.
"""
self.gui.clear_error_status() # remove any permanent error status message
self.file_ready.emit(result)

def on_file_download_failure(self, exception: Exception) -> None:
Expand All @@ -714,6 +749,7 @@ def on_delete_source_success(self, result) -> None:
"""
Handler for when a source deletion succeeds.
"""
self.gui.clear_error_status() # remove any permanent error status message
self.sync_api()

def on_delete_source_failure(self, result: Exception) -> None:
Expand Down Expand Up @@ -770,6 +806,7 @@ def send_reply(self, source_uuid: str, reply_uuid: str, message: str) -> None:

def on_reply_success(self, reply_uuid: str) -> None:
logger.debug('{} sent successfully'.format(reply_uuid))
self.gui.clear_error_status() # remove any permanent error status message
self.reply_succeeded.emit(reply_uuid)
self.sync_api()

Expand All @@ -787,6 +824,7 @@ def get_file(self, file_uuid: str) -> db.File:

def on_logout_success(self, result) -> None:
logging.info('Client logout successful')
self.gui.clear_error_status() # remove any permanent error status message

def on_logout_failure(self, result: Exception) -> None:
logging.info('Client logout failure')
10 changes: 2 additions & 8 deletions securedrop_client/queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,13 +136,12 @@ class ApiJobQueue(QObject):

def __init__(self, api_client: API, session_maker: scoped_session) -> None:
super().__init__(None)
self.api_client = api_client

self.main_thread = QThread()
self.download_file_thread = QThread()

self.main_queue = RunnableQueue(self.api_client, session_maker)
self.download_file_queue = RunnableQueue(self.api_client, session_maker)
self.main_queue = RunnableQueue(api_client, session_maker)
self.download_file_queue = RunnableQueue(api_client, session_maker)

self.main_queue.moveToThread(self.main_thread)
self.download_file_queue.moveToThread(self.download_file_thread)
Expand All @@ -159,11 +158,6 @@ def logout(self) -> None:

def login(self, api_client: API) -> None:
logger.debug('Passing API token to queues')

# Setting realistic (shorter) timeout for general requests so that user feedback
# is faster
api_client.default_request_timeout = 5

self.main_queue.api_client = api_client
self.download_file_queue.api_client = api_client
self.start_queues()
Expand Down
13 changes: 4 additions & 9 deletions securedrop_client/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,15 +77,10 @@ def get_remote_data(api: API) -> Tuple[List[SDKSource], List[SDKSubmission], Lis
(remote_sources, remote_submissions, remote_replies)
"""
remote_submissions = [] # type: List[SDKSubmission]
try:
remote_sources = api.get_sources()
for source in remote_sources:
remote_submissions.extend(api.get_submissions(source))
remote_replies = api.get_all_replies()
except Exception as ex:
# Log any errors but allow the caller to handle the exception.
logger.error(ex)
raise(ex)
remote_sources = api.get_sources()
for source in remote_sources:
remote_submissions.extend(api.get_submissions(source))
remote_replies = api.get_all_replies()

logger.info('Fetched {} remote sources.'.format(len(remote_sources)))
logger.info('Fetched {} remote submissions.'.format(
Expand Down
Loading