Skip to content

Commit

Permalink
Fix uploading zipped tiff (should set needsConversion=true) (#7856)
Browse files Browse the repository at this point in the history
* Fix uploading zipped tiff (should set needsConversion=true)

* changelog

* rephrase code comment

---------

Co-authored-by: MichaelBuessemeyer <[email protected]>
  • Loading branch information
fm3 and MichaelBuessemeyer authored Jun 11, 2024
1 parent ce3037f commit 08ce2ba
Show file tree
Hide file tree
Showing 5 changed files with 12 additions and 7 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
- Fixed a bug where the warning to zoom in to see the agglomerate mapping was shown to the user even when the 3D viewport was maximized and no volume data was shown. [#7865](https://github.com/scalableminds/webknossos/issues/7865)
- Fixed a bug where brushing on a fallback segmentation with active mapping and with segment index file would lead to failed saves. [#7833](https://github.com/scalableminds/webknossos/pull/7833)
- Fixed a bug where sometimes old mismatching javascript code would be served after upgrades. [#7854](https://github.com/scalableminds/webknossos/pull/7854)
- Fixed a bug where dataset uploads of zipped tiff data via the UI would be rejected. [#7856](https://github.com/scalableminds/webknossos/pull/7856)

### Removed

Expand Down
1 change: 1 addition & 0 deletions conf/messages
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,7 @@ dataset.upload.linkRestricted=Can only link layers of datasets that are either p
dataset.upload.invalidLinkedLayers=Could not link all requested layers
dataset.upload.noFiles=Tried to finish upload with no files. May be a retry of a failed finish request, see previous errors.
dataset.upload.storageExceeded=Cannot upload dataset because the storage quota of the organization is exceeded.
dataset.upload.finishFailed=Failed to finalize dataset upload.
dataset.explore.failed.readFile=Failed to read remote file
dataset.explore.magDtypeMismatch=Element class must be the same for all mags of a layer. Got {0}
dataset.explore.autoAdd.failed=Failed to automatically import the explored dataset.
Expand Down
6 changes: 3 additions & 3 deletions frontend/javascripts/admin/dataset/dataset_upload_view.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -511,9 +511,9 @@ class DatasetUploadView extends React.Component<PropsWithFormAndRouter, State> {
zipFile: [],
});
}
// We return here since not more than 1 zip archive is supported anyway. This is guarded
// against via form validation.
return;
// The loop breaks here in case of zip because at most one zip archive is supported anyway.
// Form validation takes care of that assertion.
break;
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ class DataSourceController @Inject()(
result <- accessTokenService.validateAccess(UserAccessRequest.writeDataSource(dataSourceId),
urlOrHeaderToken(token, request)) {
for {
(dataSourceId, datasetSizeBytes) <- uploadService.finishUpload(request.body)
(dataSourceId, datasetSizeBytes) <- uploadService.finishUpload(request.body) ?~> "finishUpload.failed"
_ <- remoteWebknossosClient.reportUpload(
dataSourceId,
datasetSizeBytes,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,8 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository,
} yield explored)
.toList)
combinedLayers = exploreLocalLayerService.makeLayerNamesUnique(dataSources.flatMap(_.dataLayers))
dataSource = GenericDataSource[DataLayer](dataSourceId, combinedLayers, dataSources.head.scale)
firstExploredDatasource <- dataSources.headOption.toFox
dataSource = GenericDataSource[DataLayer](dataSourceId, combinedLayers, firstExploredDatasource.scale)
path <- Fox.runIf(combinedLayers.nonEmpty)(
exploreLocalLayerService.writeLocalDatasourceProperties(dataSource, path))
} yield path
Expand Down Expand Up @@ -418,7 +419,8 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository,

private def looksLikeN5Multilayer(dataSourceDir: Path): Box[Boolean] =
for {
_ <- containsMatchingFile(List(FILENAME_ATTRIBUTES_JSON), dataSourceDir, 1) // root attributes.json
matchingFileIsPresent <- containsMatchingFile(List(FILENAME_ATTRIBUTES_JSON), dataSourceDir, 1) // root attributes.json
_ <- bool2Box(matchingFileIsPresent)
directories <- PathUtils.listDirectories(dataSourceDir, silent = false)
detectedLayerBoxes = directories.map(looksLikeN5MultiscalesLayer)
_ <- bool2Box(detectedLayerBoxes.forall(_.openOr(false)))
Expand All @@ -433,7 +435,8 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository,
// - directories 0 to n
// - attributes.json (N5Header, dimension, compression,...)
for {
_ <- containsMatchingFile(List(FILENAME_ATTRIBUTES_JSON), dataSourceDir, 1) // root attributes.json
matchingFileIsPresent <- containsMatchingFile(List(FILENAME_ATTRIBUTES_JSON), dataSourceDir, 1) // root attributes.json
_ <- bool2Box(matchingFileIsPresent)
datasetDir <- PathUtils.listDirectories(dataSourceDir, silent = false).map(_.headOption)
scaleDirs <- datasetDir.map(PathUtils.listDirectories(_, silent = false)).getOrElse(Full(Seq.empty))
_ <- bool2Box(scaleDirs.length == 1) // Must be 1, otherwise it is a multiscale dataset
Expand Down

0 comments on commit 08ce2ba

Please sign in to comment.