Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix Blobstore failure response #101

Merged
merged 3 commits into from
Dec 9, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions RELEASE_NOTES.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
# 0.2.2

- Fixed a bug where if the Blobstore returned an non-json response, logging the response would
fail due to attempting to concatenate string and binary types. This would shadow the real
error returned from the Blobstore.

# 0.2.1

- fixed several bugs regarding downloading files from Google Drive. How long the current method
Expand Down
2 changes: 1 addition & 1 deletion kbase.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ service-language:
python

module-version:
0.2.1
0.2.2

owners:
[gaprice, scanon, tgu2]
6 changes: 4 additions & 2 deletions lib/DataFileUtil/DataFileUtilClient.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,8 +205,10 @@ def unpack_files(self, params, context=None):
String, parameter "unpack" of String
:returns: instance of list of type "UnpackFilesResult" (Output
parameters for the unpack_files function. file_path - the path to
the unpacked file, or in the case of archive files, the path to
the original archive file.) -> structure: parameter "file_path" of
either a) the unpacked file or b) in the case of archive files,
the path to the original archive file, possibly uncompressed, or
c) in the case of regular files that don't need processing, the
path to the input file.) -> structure: parameter "file_path" of
String
"""
return self._client.run_job('DataFileUtil.unpack_files',
Expand Down
50 changes: 23 additions & 27 deletions lib/DataFileUtil/DataFileUtilImpl.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,9 +61,9 @@ class DataFileUtil:
# state. A method could easily clobber the state set by another while
# the latter method is running.
######################################### noqa
VERSION = "0.2.0"
VERSION = "0.2.2"
GIT_URL = "https://github.com/kbaseapps/DataFileUtil.git"
GIT_COMMIT_HASH = "579a0928d7f4ce3713e31adfea216815110ecd5e"
GIT_COMMIT_HASH = "0d855560a37f8787ab3bb67e464b9c94b299764e"

#BEGIN_CLASS_HEADER

Expand Down Expand Up @@ -308,9 +308,8 @@ def check_shock_response(self, response, errtxt):
try:
err = json.loads(response.content)['error'][0]
except Exception:
# this means shock is down or not responding.
log("Couldn't parse response error content from Shock: " +
response.content)
# this means the Blobstore is down or not responding.
log("Couldn't parse response error content from the Blobstore: " + response.text)
Comment on lines -311 to +312
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is the bugfix

response.raise_for_status()
raise ShockException(errtxt + str(err))

Expand Down Expand Up @@ -636,7 +635,7 @@ def __init__(self, config):
# note that the unit tests cannot easily test this. Be careful with changes here
with requests.get(config['kbase-endpoint'] + '/shock-direct', allow_redirects=False) as r:
if r.status_code == 302:
log('Using direct shock url for transferring files')
log('Using direct Blobstore url for transferring files')
self.shock_effective = r.headers['Location']
log('Shock url: ' + self.shock_effective)
self.handle_url = config['handle-service-url']
Expand Down Expand Up @@ -696,10 +695,10 @@ def shock_to_file(self, ctx, params):
shock_id = params.get('shock_id')
handle_id = params.get('handle_id')
if not shock_id and not handle_id:
raise ValueError('Must provide shock ID or handle ID')
raise ValueError('Must provide Blobstore ID or handle ID')
if shock_id and handle_id:
raise ValueError(
'Must provide either a shock ID or handle ID, not both')
'Must provide either a Blobstore ID or handle ID, not both')

shock_url = self.shock_effective
if handle_id:
Expand All @@ -716,8 +715,7 @@ def shock_to_file(self, ctx, params):
self.mkdir_p(os.path.dirname(file_path))
node_url = shock_url + '/node/' + shock_id
r = requests.get(node_url, headers=headers, allow_redirects=True)
errtxt = ('Error downloading file from shock ' +
'node {}: ').format(shock_id)
errtxt = f'Error downloading file from Blobstore node {shock_id}: '
self.check_shock_response(r, errtxt)
resp_obj = r.json()
size = resp_obj['data']['file']['size']
Expand All @@ -727,7 +725,7 @@ def shock_to_file(self, ctx, params):
attributes = resp_obj['data']['attributes']
if os.path.isdir(file_path):
file_path = os.path.join(file_path, node_file_name)
log('downloading shock node ' + shock_id + ' into file: ' + str(file_path))
log('downloading Blobstore node ' + shock_id + ' into file: ' + str(file_path))
with open(file_path, 'wb') as fhandle:
with requests.get(node_url + '?download_raw', stream=True,
headers=headers, allow_redirects=True) as r:
Expand Down Expand Up @@ -863,11 +861,11 @@ def file_to_shock(self, ctx, params):
headers = {'Authorization': 'Oauth ' + token}
file_path = params.get('file_path')
if not file_path:
raise ValueError('No file(s) provided for upload to Shock.')
raise ValueError('No file(s) provided for upload to the Blobstore.')
pack = params.get('pack')
if pack:
file_path = self._pack(file_path, pack)
log('uploading file ' + str(file_path) + ' into shock node')
log('uploading file ' + str(file_path) + ' into Blobstore node')
with open(os.path.abspath(file_path), 'rb') as data_file:
# Content-Length header is required for transition to
# https://github.com/kbase/blobstore
Expand All @@ -879,7 +877,7 @@ def file_to_shock(self, ctx, params):
self.shock_effective + '/node', headers=headers, data=mpe,
stream=True, allow_redirects=True)
self.check_shock_response(
response, ('Error trying to upload file {} to Shock: '
response, ('Error trying to upload file {} to Blobstore: '
).format(file_path))
shock_data = response.json()['data']
shock_id = shock_data['id']
Expand All @@ -889,7 +887,7 @@ def file_to_shock(self, ctx, params):
'size': shock_data['file']['size']}
if params.get('make_handle'):
out['handle'] = self.make_handle(shock_data, token)
log('uploading done into shock node: ' + shock_id)
log('uploading done into Blobstore node: ' + shock_id)
#END file_to_shock

# At some point might do deeper type checking...
Expand Down Expand Up @@ -958,8 +956,10 @@ def unpack_files(self, ctx, params):
String, parameter "unpack" of String
:returns: instance of list of type "UnpackFilesResult" (Output
parameters for the unpack_files function. file_path - the path to
the unpacked file, or in the case of archive files, the path to
the original archive file.) -> structure: parameter "file_path" of
either a) the unpacked file or b) in the case of archive files,
the path to the original archive file, possibly uncompressed, or
c) in the case of regular files that don't need processing, the
path to the input file.) -> structure: parameter "file_path" of
String
"""
# ctx is the context object
Expand Down Expand Up @@ -1206,15 +1206,13 @@ def copy_shock_node(self, ctx, params):
header = {'Authorization': 'Oauth {}'.format(token)}
source_id = params.get('shock_id')
if not source_id:
raise ValueError('Must provide shock ID')
raise ValueError('Must provide Blobstore ID')
mpdata = MultipartEncoder(fields={'copy_data': source_id})
header['Content-Type'] = mpdata.content_type

with requests.post(self.shock_url + '/node', headers=header, data=mpdata,
allow_redirects=True) as response:
self.check_shock_response(
response, ('Error copying Shock node {}: '
).format(source_id))
self.check_shock_response(response, f'Error copying Blobstore node {source_id}: ')
shock_data = response.json()['data']
shock_id = shock_data['id']
out = {'shock_id': shock_id, 'handle': None}
Expand Down Expand Up @@ -1270,11 +1268,10 @@ def own_shock_node(self, ctx, params):
header = {'Authorization': 'Oauth {}'.format(token)}
source_id = params.get('shock_id')
if not source_id:
raise ValueError('Must provide shock ID')
raise ValueError('Must provide Blobstore ID')
with requests.get(self.shock_url + '/node/' + source_id + '/acl/?verbosity=full',
headers=header, allow_redirects=True) as res:
self.check_shock_response(
res, 'Error getting ACLs for Shock node {}: '.format(source_id))
self.check_shock_response(res, f'Error getting ACLs for Blobstore node {source_id}: ')
owner = res.json()['data']['owner']['username']
if owner != ctx['user_id']:
out = self.copy_shock_node(ctx, params)[0]
Expand All @@ -1292,8 +1289,7 @@ def own_shock_node(self, ctx, params):
# meh
with requests.get(self.shock_url + '/node/' + source_id,
headers=header, allow_redirects=True) as r:
errtxt = ('Error downloading attributes from shock ' +
'node {}: ').format(source_id)
errtxt = f'Error downloading attributes from Blobstore node {source_id}: '
self.check_shock_response(r, errtxt)
out = {'shock_id': source_id,
'handle': self.make_handle(r.json()['data'], token)}
Expand Down Expand Up @@ -1503,7 +1499,7 @@ def versions(self, ctx):
del ctx
wsver = Workspace(self.ws_url).ver()
with requests.get(self.shock_url, allow_redirects=True) as resp:
self.check_shock_response(resp, 'Error contacting Shock: ')
self.check_shock_response(resp, 'Error contacting the Blobstore: ')
shockver = resp.json()['version']
#END versions

Expand Down
16 changes: 8 additions & 8 deletions test/DataFileUtil_server_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def delete_shock_node(cls, node_id):
header = {'Authorization': 'Oauth {0}'.format(cls.token)}
requests.delete(cls.shockURL + '/node/' + node_id, headers=header,
allow_redirects=True)
print(('Deleted shock node ' + node_id))
print(('Deleted Blobstore node ' + node_id))

def test_file_to_shock_and_back_by_handle(self):
input_ = "Test!!!"
Expand Down Expand Up @@ -723,7 +723,7 @@ def fail_uncompress_on_archive(self, infile, file_type, newfile=None):
def test_upload_err_no_file_provided(self):
self.fail_upload(
{'file_path': ''},
'No file\(s\) provided for upload to Shock')
'No file\(s\) provided for upload to the Blobstore')

def test_upload_err_bad_pack_param(self):
self.fail_upload(
Expand Down Expand Up @@ -918,7 +918,7 @@ def test_download_err_node_not_found(self):
{'shock_id': '79261fd9-ae10-4a84-853d-1b8fcd57c8f23',
'file_path': 'foo'
},
'Error downloading file from shock node ' +
'Error downloading file from Blobstore node ' +
'79261fd9-ae10-4a84-853d-1b8fcd57c8f23: Node not found',
exception=ShockException)

Expand All @@ -927,7 +927,7 @@ def test_download_err_no_node_provided(self):
{'shock_id': '',
'file_path': 'foo'
},
'Must provide shock ID or handle ID')
'Must provide Blobstore ID or handle ID')

def test_download_err_no_file_provided(self):
self.fail_download(
Expand Down Expand Up @@ -994,14 +994,14 @@ def test_copy_make_handle(self):
def test_copy_err_node_not_found(self):
self.fail_copy(
{'shock_id': '79261fd9-ae10-4a84-853d-1b8fcd57c8f23'},
'Error copying Shock node ' +
'Error copying Blobstore node ' +
'79261fd9-ae10-4a84-853d-1b8fcd57c8f23: ' +
'Invalid copy_data: invalid UUID length: 37',
exception=ShockException)

def test_copy_err_no_node_provided(self):
self.fail_copy(
{'shock_id': ''}, 'Must provide shock ID')
{'shock_id': ''}, 'Must provide Blobstore ID')

def test_own_node_owned_with_existing_handle(self):
fp = self.write_file('ownfile23.txt', 'ownfile23')
Expand Down Expand Up @@ -1080,13 +1080,13 @@ def test_own_node_copy_with_new_handle(self):
def test_own_err_node_not_found(self):
self.fail_own(
{'shock_id': '79261fd9-ae10-4a84-853d-1b8fcd57c8f23'},
'Error getting ACLs for Shock node ' +
'Error getting ACLs for Blobstore node ' +
'79261fd9-ae10-4a84-853d-1b8fcd57c8f23: Node not found',
exception=ShockException)

def test_own_err_no_node_provided(self):
self.fail_own(
{'shock_id': ''}, 'Must provide shock ID')
{'shock_id': ''}, 'Must provide Blobstore ID')

def test_translate_ws_name(self):
self.assertEqual(self.impl.ws_name_to_id(self.ctx, self.ws_info[1])[0],
Expand Down
Loading