diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 0d56f36185..c87fec4501 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -26,6 +26,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Fixed a bug where the warning to zoom in to see the agglomerate mapping was shown to the user even when the 3D viewport was maximized and no volume data was shown. [#7865](https://github.com/scalableminds/webknossos/issues/7865) - Fixed a bug where brushing on a fallback segmentation with active mapping and with segment index file would lead to failed saves. [#7833](https://github.com/scalableminds/webknossos/pull/7833) - Fixed a bug where sometimes old mismatching javascript code would be served after upgrades. [#7854](https://github.com/scalableminds/webknossos/pull/7854) +- Fixed a bug where dataset uploads of zipped tiff data via the UI would be rejected. [#7856](https://github.com/scalableminds/webknossos/pull/7856) ### Removed diff --git a/conf/messages b/conf/messages index 9dcda5891e..0f36944f04 100644 --- a/conf/messages +++ b/conf/messages @@ -109,6 +109,7 @@ dataset.upload.linkRestricted=Can only link layers of datasets that are either p dataset.upload.invalidLinkedLayers=Could not link all requested layers dataset.upload.noFiles=Tried to finish upload with no files. May be a retry of a failed finish request, see previous errors. dataset.upload.storageExceeded=Cannot upload dataset because the storage quota of the organization is exceeded. +dataset.upload.finishFailed=Failed to finalize dataset upload. dataset.explore.failed.readFile=Failed to read remote file dataset.explore.magDtypeMismatch=Element class must be the same for all mags of a layer. Got {0} dataset.explore.autoAdd.failed=Failed to automatically import the explored dataset. diff --git a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx index e2b2325ed6..357d8c8eb3 100644 --- a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx @@ -511,9 +511,9 @@ class DatasetUploadView extends React.Component { zipFile: [], }); } - // We return here since not more than 1 zip archive is supported anyway. This is guarded - // against via form validation. - return; + // The loop breaks here in case of zip because at most one zip archive is supported anyway. + // Form validation takes care of that assertion. + break; } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index bb852b3b77..dbfbba32a4 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -195,7 +195,7 @@ class DataSourceController @Inject()( result <- accessTokenService.validateAccess(UserAccessRequest.writeDataSource(dataSourceId), urlOrHeaderToken(token, request)) { for { - (dataSourceId, datasetSizeBytes) <- uploadService.finishUpload(request.body) + (dataSourceId, datasetSizeBytes) <- uploadService.finishUpload(request.body) ?~> "finishUpload.failed" _ <- remoteWebknossosClient.reportUpload( dataSourceId, datasetSizeBytes, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala index 325b77c6c0..a98ca6fbd2 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala @@ -290,7 +290,8 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, } yield explored) .toList) combinedLayers = exploreLocalLayerService.makeLayerNamesUnique(dataSources.flatMap(_.dataLayers)) - dataSource = GenericDataSource[DataLayer](dataSourceId, combinedLayers, dataSources.head.scale) + firstExploredDatasource <- dataSources.headOption.toFox + dataSource = GenericDataSource[DataLayer](dataSourceId, combinedLayers, firstExploredDatasource.scale) path <- Fox.runIf(combinedLayers.nonEmpty)( exploreLocalLayerService.writeLocalDatasourceProperties(dataSource, path)) } yield path @@ -418,7 +419,8 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, private def looksLikeN5Multilayer(dataSourceDir: Path): Box[Boolean] = for { - _ <- containsMatchingFile(List(FILENAME_ATTRIBUTES_JSON), dataSourceDir, 1) // root attributes.json + matchingFileIsPresent <- containsMatchingFile(List(FILENAME_ATTRIBUTES_JSON), dataSourceDir, 1) // root attributes.json + _ <- bool2Box(matchingFileIsPresent) directories <- PathUtils.listDirectories(dataSourceDir, silent = false) detectedLayerBoxes = directories.map(looksLikeN5MultiscalesLayer) _ <- bool2Box(detectedLayerBoxes.forall(_.openOr(false))) @@ -433,7 +435,8 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, // - directories 0 to n // - attributes.json (N5Header, dimension, compression,...) for { - _ <- containsMatchingFile(List(FILENAME_ATTRIBUTES_JSON), dataSourceDir, 1) // root attributes.json + matchingFileIsPresent <- containsMatchingFile(List(FILENAME_ATTRIBUTES_JSON), dataSourceDir, 1) // root attributes.json + _ <- bool2Box(matchingFileIsPresent) datasetDir <- PathUtils.listDirectories(dataSourceDir, silent = false).map(_.headOption) scaleDirs <- datasetDir.map(PathUtils.listDirectories(_, silent = false)).getOrElse(Full(Seq.empty)) _ <- bool2Box(scaleDirs.length == 1) // Must be 1, otherwise it is a multiscale dataset