Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix for chemical analysis workflow #1336

Merged
merged 2 commits into from
Oct 13, 2023
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions arches_for_science/media/js/bindings/uppy-django-storages.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import BasePlugin from '@uppy/core/lib/BasePlugin.js';

export default class UppyDjangoStorages extends BasePlugin {
constructor(uppy, opts) {
super(uppy, opts);
this.id = opts.id || 'UppyDjangoStorages';
this.type = 'django';
}

install (){
this.uppy.addPreProcessor(async (fileIds) => {
const uppyFiles = fileIds.map(fileId => {
return uppy.getFile(fileId)
})
const files = await this.opts.beforeUpload(uppyFiles);
files.flat().map(file => {
uppy.setFileState(file.clientId, {'name': file.path});
uppy.setFileMeta(file.clientId, {'name': file.path})
})
});
}
}
26 changes: 15 additions & 11 deletions arches_for_science/media/js/bindings/uppy.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,10 @@ define([
'@uppy/core',
'@uppy/dashboard',
'@uppy/drag-drop',
'@uppy/aws-s3-multipart',
'@uppy/progress-bar'
], function($, _, ko, Cookies, uppy, Dashboard, DragDrop, AwsS3Multipart, ProgressBar) {
'@uppy/aws-s3',
'@uppy/progress-bar',
'./uppy-django-storages'
], function($, _, ko, Cookies, uppy, Dashboard, DragDrop, AwsS3, ProgressBar, uppyDjangoStorages) {
/**
* @constructor
* @name dropzone
Expand All @@ -19,7 +20,7 @@ define([
ko.applyBindingsToDescendants(innerBindingContext, element);
const options = valueAccessor() || {};

const uppyObj = new uppy({
const uppyObj = new uppy.Uppy({
debug: true,
autoProceed: true,
onBeforeFileAdded: (currentFile) => {
Expand All @@ -34,22 +35,25 @@ define([
};
return modifiedFile;
},
}).use(DragDrop, {
}).use(DragDrop.default, {
inline: options.inline,
target: element,
autoProceed: true,
logger: uppy.debugLogger,
}).use(AwsS3Multipart, {
companionUrl: "/",
}).use(uppyDjangoStorages.default, {
beforeUpload: options.beforeUpload
}).use(AwsS3.default, {
companionUrl: "/uppy",
companionHeaders: {
'X-CSRFToken': Cookies.get('csrftoken')
}
}).use(ProgressBar, {
},
shouldUseMultipart: (file) => file.size > 50 * (1000 ** 2)
}).use(ProgressBar.default, {
target: ".uppy-progress"
});

if(options.filesAdded) {
uppyObj.on('complete', options.filesAdded);
if(options.complete) {
uppyObj.on('compete', options.complete);
}

return { controlsDescendantBindings: true };
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@ define([
dragDropTarget: '.dropzone-photo-upload',
fileInputTarget: ".fileinput-button."+ this.uniqueidClass(),
autoProceed: true,
filesAdded: (files) => {
files.successful.map(file => self.saveFiles(file));
beforeUpload: async(files) => {
return Promise.all(files.map(async(file) => self.saveFiles(file)));
}
};

Expand Down Expand Up @@ -128,25 +128,26 @@ define([
self.loading(true);

if(file) {
self.loadingMessage(`File upload complete, building data structures`);
self.loadingMessage(`Building arches data structures before upload...`);
let fileInfo;

if (!ko.unwrap(file.tileId)) {
fileInfo = {
name: file.name,
accepted: true,
height: file.height,
lastModified: file.lastModified,
size: file.size,
status: file.status,
height: file.data.height,
lastModified: file.data.lastModified,
size: file.data.size,
status: file.data.status,
type: file.type,
width: file.width,
width: file.data.width,
url: null,
uploaded: ko.observable(false),
// eslint-disable-next-line camelcase
file_id: null,
index: 0,
content: null,
clientFileId: file.id,
error: file.error,
};

Expand All @@ -166,6 +167,7 @@ define([
};

this.saveFiles = async(files) => {
let datasetInfo = undefined
if(!Array.isArray(files)){
files = [files];
}
Expand Down Expand Up @@ -205,7 +207,7 @@ define([

self.loading(false);
if(resp.ok) {
const datasetInfo = await resp.json();
datasetInfo = await resp.json();
self.observationReferenceTileId = datasetInfo.observationReferenceTileId;
this.datasetId = datasetInfo.datasetResourceId;
const newDatasetFiles = self.files().filter(
Expand All @@ -228,6 +230,7 @@ define([
saveWorkflowState();
self.snapshot = params.form.savedData();
params.form.complete(true);
return datasetInfo.files;
};


Expand Down
13 changes: 7 additions & 6 deletions arches_for_science/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from arches_for_science.views.download_project_files import FileDownloader
from arches_for_science.views.physical_thing_search import PhysicalThingSearchView
from arches_for_science.views.physical_things_in_set import PhysicalThingSetView
from arches_for_science.views.s3 import S3MultipartUploadManagerView, S3MultipartUploaderView, batch_sign, complete_upload, upload_part
from arches_for_science.views.s3 import S3MultipartUploadManagerView, S3MultipartUploaderView, S3BatchSignView, S3CompleteUploadView, S3UploadPartView, S3UploadView
from arches_for_science.views.update_resource_list import UpdateResourceListView
from arches_for_science.views.analysis_area_and_sample_taking import (
SaveAnalysisAreaView,
Expand Down Expand Up @@ -47,11 +47,12 @@
name="format_render_map",
),
re_path(r"^updateresourcelist", UpdateResourceListView.as_view(), name="updateresourcelist"),
re_path(r"^s3/multipart/(?P<uploadid>[^\/]+)/complete$", complete_upload, name="s3_multipart_upload_complete"),
re_path(r"^s3/multipart/(?P<uploadid>[^\/]+)/batch", batch_sign, name="s3_multipart_batch_sign"),
re_path(r"^s3/multipart/(?P<uploadid>[^\/]+)/(?P<partnumber>\d+)$", upload_part, name="s3_multipart_upload_part"),
re_path(r"^s3/multipart/(?P<uploadid>[^\/]+)", S3MultipartUploadManagerView.as_view(), name="s3_multipart_upload"),
re_path(r"^s3/multipart$", S3MultipartUploaderView.as_view(), name="s3_multipart_upload"),
re_path(r"^uppy/s3/multipart/(?P<uploadid>[^\/]+)/complete$", S3CompleteUploadView.as_view(), name="s3_multipart_upload_complete"),
re_path(r"^uppy/s3/multipart/(?P<uploadid>[^\/]+)/batch", S3BatchSignView.as_view(), name="s3_multipart_batch_sign"),
re_path(r"^uppy/s3/multipart/(?P<uploadid>[^\/]+)/(?P<partnumber>\d+)", S3UploadPartView.as_view(), name="s3_multipart_upload_part"),
re_path(r"^uppy/s3/multipart/(?P<uploadid>[^\/]+)", S3MultipartUploadManagerView.as_view(), name="s3_multipart_upload"),
re_path(r"^uppy/s3/multipart$", S3MultipartUploaderView.as_view(), name="s3_multipart_upload"),
re_path(r"^uppy/s3/params", S3UploadView.as_view(), name="s3_upload"),
re_path(r"^instrument-info-form-save", InstrumentInfoStepFormSaveView.as_view(), name="instrument-info-form-save"),
re_path(r"^saveanalysisarea", SaveAnalysisAreaView.as_view(), name="saveanalysisarea"),
re_path(r"^savesamplearea", SaveSampleAreaView.as_view(), name="savesamplearea"),
Expand Down
101 changes: 76 additions & 25 deletions arches_for_science/views/s3.py
Original file line number Diff line number Diff line change
@@ -1,30 +1,40 @@

import json
from django.conf import settings
from django.http import JsonResponse, HttpResponseNotAllowed
from django.http import JsonResponse, HttpResponse
from django.utils.decorators import method_decorator
from arches.app.views.base import BaseManagerView
from arches.app.utils.decorators import can_edit_resource_instance
import boto3

KEY_BASE = "uploadedfiles/"

KEY_BASE = settings.UPLOADED_FILES_DIR

@method_decorator(can_edit_resource_instance, name="dispatch")
class S3MultipartUploaderView(BaseManagerView):
"""S3 Multipart uploader chunks files to allow for parallel uploads to S3"""

def options(self, request):
response = HttpResponse()
response.headers['access-control-allow-headers'] = 'x-csrftoken,accept,content-type,uppy-auth-token,location'
return response

def post(self, request):
try:
storage_bucket = settings.AWS_STORAGE_BUCKET_NAME
except AttributeError:
raise Exception("Django storages for AWS not configured")

json_body = json.loads(request.body)
file_name = json_body["filename"]
if(file_name and file_name.startswith(KEY_BASE)):
key = file_name
else:
key = KEY_BASE + file_name

response_object = {}
s3 = boto3.client("s3")
resp = s3.create_multipart_upload(
Bucket=storage_bucket,
Key=KEY_BASE + json_body["filename"],
Key=key,
ContentType=json_body["type"],
Metadata=json_body["metadata"],
)
Expand All @@ -35,8 +45,7 @@ def post(self, request):

@method_decorator(can_edit_resource_instance, name="dispatch")
class S3MultipartUploadManagerView(BaseManagerView):
"""doom"""

"""Returns all of the parts of a given upload id"""
def get(self, request, uploadid):
try:
storage_bucket = settings.AWS_STORAGE_BUCKET_NAME
Expand All @@ -57,14 +66,28 @@ def get_parts(client, uploadId, partNumber):
get_parts(s3, uploadid, 0)
return JsonResponse(parts, safe=False)

def delete(self, request):
"""post"""
def delete(self, request, uploadid):
try:
storage_bucket = settings.AWS_STORAGE_BUCKET_NAME
except AttributeError:
raise Exception("Django storages for AWS not configured")


return JsonResponse({})
s3 = boto3.client("s3")
key = request.GET.get("key", "")

s3.abort_multipart_upload(
storage_bucket,
key,
uploadid
)

def batch_sign(request, uploadid):
if request.method == "GET":
return JsonResponse({})

@method_decorator(can_edit_resource_instance, name="dispatch")
class S3BatchSignView(BaseManagerView):
"""generates a batch of presigned urls for a group of part numbers"""
def get(self, request, uploadid):
try:
storage_bucket = settings.AWS_STORAGE_BUCKET_NAME
except AttributeError:
Expand All @@ -89,12 +112,43 @@ def batch_sign(request, uploadid):
)
)
return JsonResponse(urls, safe=False)
else:
return HttpResponseNotAllowed()

@method_decorator(can_edit_resource_instance, name="dispatch")
class S3UploadView(BaseManagerView):
""""""
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Empty docstring

def get(self, request):
try:
storage_bucket = settings.AWS_STORAGE_BUCKET_NAME
except AttributeError:
raise Exception("Django storages for AWS not configured")
s3 = boto3.client("s3")

file_name = request.GET.get("filename")

if(file_name.startswith(KEY_BASE)):
key = file_name
else:
key = KEY_BASE + "/" + file_name

fields={}
#fields = dict(('x-amz-meta-{}'.format(key[9:len(key)-1]), value) for (key, value) in all_items if key.startswith('metadata') and key != 'metadata[type]')
#fields['content-type'] = content_type
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Looks like this can be removed

response = s3.generate_presigned_post(
storage_bucket,
key,
fields,
ExpiresIn=300,
)
return JsonResponse({
'method': 'post',
'url': response['url'],
'fields': response['fields'],
'expires': 300
}, safe=False)

def upload_part(request, uploadid, partnumber):
if request.method == "GET":
@method_decorator(can_edit_resource_instance, name="dispatch")
class S3UploadPartView(BaseManagerView):
def get(self, request, uploadid, partnumber):
try:
storage_bucket = settings.AWS_STORAGE_BUCKET_NAME
except AttributeError:
Expand All @@ -111,13 +165,12 @@ def upload_part(request, uploadid, partnumber):
},
300,
)
return JsonResponse(url, safe=False)
else:
return HttpResponseNotAllowed()
return JsonResponse({'url': url, 'expires': 300}, safe=False)


def complete_upload(request, uploadid):
if request.method == "POST":
@method_decorator(can_edit_resource_instance, name="dispatch")
class S3CompleteUploadView(BaseManagerView):
def post(self, request, uploadid):
try:
storage_bucket = settings.AWS_STORAGE_BUCKET_NAME
except AttributeError:
Expand All @@ -130,6 +183,4 @@ def complete_upload(request, uploadid):
UploadId=uploadid,
MultipartUpload={"Parts": json.loads(request.body.decode("utf-8"))["parts"]},
)
return JsonResponse({"location": response["Location"]})
else:
return HttpResponseNotAllowed()
return JsonResponse({"location": response["Location"]})
Original file line number Diff line number Diff line change
Expand Up @@ -171,14 +171,15 @@ def post(self, request):
raise
else:
file_data["tileid"] = response["tileid"]
file_data["path"] = response["data"][dataset_file_node_id][0]['path']
try:
file_response = [
{"name": f[0]["name"], "renderer": f[0]["renderer"], "format": f[0].get("format", None), "tileId": f[0]["tileid"]}
{"name": f[0]["name"],"path":f[0]["path"],"clientId":f[0]["clientFileId"], "renderer": f[0]["renderer"], "format": f[0].get("format", None), "tileId": f[0]["tileid"]}
for f in new_files
]
except KeyError:
# XRF files do not have a renderer (yet) and use this same endpoint.
file_response = [{"name": f[0]["name"], "format": f[0].get("format", None), "tileId": f[0]["tileid"]} for f in new_files]
file_response = [{"name": f[0]["name"],"path":f[0]["path"],"clientId":f[0]["clientFileId"], "format": f[0].get("format", None), "tileId": f[0]["tileid"]} for f in new_files]

return JSONResponse(
{
Expand Down
10 changes: 5 additions & 5 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@
},
"version": "1.1.0",
"dependencies": {
"@uppy/aws-s3-multipart": "^2.4.1",
"@uppy/core": "^2.3.1",
"@uppy/dashboard": "^2.4.0",
"@uppy/drag-drop": "^2.1.2",
"@uppy/progress-bar": "^2.1.1",
"@uppy/aws-s3": "^3.3.1",
"@uppy/core": "^3.5.1",
"@uppy/dashboard": "^3.5.4",
"@uppy/drag-drop": "^3.0.3",
"@uppy/progress-bar": "^3.0.3",
"arches": "archesproject/arches#dev/7.5.x",
"dom-to-image": "^2.6.0",
"html2canvas": "^1.4.1",
Expand Down
Loading