diff --git a/CHANGELOG.released.md b/CHANGELOG.released.md index 201055984b..676a694171 100644 --- a/CHANGELOG.released.md +++ b/CHANGELOG.released.md @@ -7,6 +7,56 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Calendar Versioning](http://calver.org/) `0Y.0M.MICRO`. For upgrade instructions, please check the [migration guide](MIGRATIONS.released.md). +## [24.11.1](https://github.com/scalableminds/webknossos/releases/tag/24.11.1) - 2024-11-13 +[Commits](https://github.com/scalableminds/webknossos/compare/24.10.0...24.11.1) + +### Highlights +- It is now possible to add metadata in annotations to Trees and Segments. [#7875](https://github.com/scalableminds/webknossos/pull/7875) +- Added a button to the search popover in the skeleton and segment tab to select all matching non-group results. [#8123](https://github.com/scalableminds/webknossos/pull/8123) + +### Added +- It is now possible to add metadata in annotations to Trees and Segments. [#7875](https://github.com/scalableminds/webknossos/pull/7875) +- Added a summary row to the time tracking overview, where times and annotations/tasks are summed. [#8092](https://github.com/scalableminds/webknossos/pull/8092) +- Most sliders have been improved: Wheeling above a slider now changes its value and double-clicking its knob resets it to its default value. [#8095](https://github.com/scalableminds/webknossos/pull/8095) +- It is now possible to search for unnamed segments with the full default name instead of only their id. [#8133](https://github.com/scalableminds/webknossos/pull/8133) +- Increased loading speed for precomputed meshes. [#8110](https://github.com/scalableminds/webknossos/pull/8110) +- Added a button to the search popover in the skeleton and segment tab to select all matching non-group results. [#8123](https://github.com/scalableminds/webknossos/pull/8123) +- Unified wording in UI and code: “Magnification”/“mag” is now used in place of “Resolution“ most of the time, compare [https://docs.webknossos.org/webknossos/terminology.html](terminology document). [#8111](https://github.com/scalableminds/webknossos/pull/8111) +- Added support for adding remote OME-Zarr NGFF version 0.5 datasets. [#8122](https://github.com/scalableminds/webknossos/pull/8122) +- Workflow reports may be deleted by superusers. [#8156](https://github.com/scalableminds/webknossos/pull/8156) + +### Changed +- Some mesh-related actions were disabled in proofreading-mode when using meshfiles that were created for a mapping rather than an oversegmentation. [#8091](https://github.com/scalableminds/webknossos/pull/8091) +- Admins can now see and cancel all jobs. The owner of the job is shown in the job list. [#8112](https://github.com/scalableminds/webknossos/pull/8112) +- Migrated nightly screenshot tests from CircleCI to GitHub actions. [#8134](https://github.com/scalableminds/webknossos/pull/8134) +- Migrated nightly screenshot tests for wk.org from CircleCI to GitHub actions. [#8135](https://github.com/scalableminds/webknossos/pull/8135) +- Thumbnails for datasets now use the selected mapping from the view configuration if available. [#8157](https://github.com/scalableminds/webknossos/pull/8157) + +### Fixed +- Fixed a bug during dataset upload in case the configured `datastore.baseFolder` is an absolute path. [#8098](https://github.com/scalableminds/webknossos/pull/8098) [#8103](https://github.com/scalableminds/webknossos/pull/8103) +- Fixed bbox export menu item [#8152](https://github.com/scalableminds/webknossos/pull/8152) +- When trying to save an annotation opened via a link including a sharing token, the token is automatically discarded in case it is insufficient for update actions but the users token is. [#8139](https://github.com/scalableminds/webknossos/pull/8139) +- Fix that scrolling in the trees and segments tab did not work while dragging. [#8162](https://github.com/scalableminds/webknossos/pull/8162) +- Fixed that uploading a dataset which needs a conversion failed when the angstrom unit was configured for the conversion. [#8173](https://github.com/scalableminds/webknossos/pull/8173) +- Fixed that the skeleton search did not automatically expand groups that contained the selected tree [#8129](https://github.com/scalableminds/webknossos/pull/8129) +- Fixed interactions in the trees and segments tab like the search due to a bug introduced by [#8162](https://github.com/scalableminds/webknossos/pull/8162). [#8186](https://github.com/scalableminds/webknossos/pull/8186) +- Fixed a bug that zarr streaming version 3 returned the shape of mag (1, 1, 1) / the finest mag for all mags. [#8116](https://github.com/scalableminds/webknossos/pull/8116) +- Fixed sorting of mags in outbound zarr streaming. [#8125](https://github.com/scalableminds/webknossos/pull/8125) +- Fixed a bug where you could not create annotations for public datasets of other organizations. [#8107](https://github.com/scalableminds/webknossos/pull/8107) +- Users without edit permissions to a dataset can no longer delete sharing tokens via the API. [#8083](https://github.com/scalableminds/webknossos/issues/8083) +- Fixed downloading task annotations of teams you are not in, when accessing directly via URI. [#8155](https://github.com/scalableminds/webknossos/pull/8155) +- Removed unnecessary scrollbars in skeleton tab that occurred especially after resizing. [#8148](https://github.com/scalableminds/webknossos/pull/8148) +- Deleting a bounding box is now possible independently of a visible segmentation layer. [#8164](https://github.com/scalableminds/webknossos/pull/8164) +- S3-compliant object storages can now be accessed via HTTPS. [#8167](https://github.com/scalableminds/webknossos/pull/8167) +- Fixed that skeleton tree nodes were created with the wrong mag. [#8185](https://github.com/scalableminds/webknossos/pull/8185) +- Fixed the expected type of a tree node received from the server. Fixes nml export to include the `inMag` field correctly. [#8187](https://github.com/scalableminds/webknossos/pull/8187) +- Fixed a layout persistence bug leading to empty viewports, triggered when switching between orthogonal, flight, or oblique mode. [#8177](https://github.com/scalableminds/webknossos/pull/8177) + +### Removed + +### Breaking Changes + + ## [24.10.0](https://github.com/scalableminds/webknossos/releases/tag/24.10.0) - 2024-09-24 [Commits](https://github.com/scalableminds/webknossos/compare/24.08.1...24.10.0) diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 8469a0c4dd..7cd632c60f 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -8,45 +8,14 @@ and this project adheres to [Calendar Versioning](http://calver.org/) `0Y.0M.MIC For upgrade instructions, please check the [migration guide](MIGRATIONS.released.md). ## Unreleased -[Commits](https://github.com/scalableminds/webknossos/compare/24.10.0...HEAD) +[Commits](https://github.com/scalableminds/webknossos/compare/24.11.1...HEAD) ### Added -- It is now possible to add metadata in annotations to Trees and Segments. [#7875](https://github.com/scalableminds/webknossos/pull/7875) -- Added a summary row to the time tracking overview, where times and annotations/tasks are summed. [#8092](https://github.com/scalableminds/webknossos/pull/8092) -- Most sliders have been improved: Wheeling above a slider now changes its value and double-clicking its knob resets it to its default value. [#8095](https://github.com/scalableminds/webknossos/pull/8095) -- It is now possible to search for unnamed segments with the full default name instead of only their id. [#8133](https://github.com/scalableminds/webknossos/pull/8133) -- Increased loading speed for precomputed meshes. [#8110](https://github.com/scalableminds/webknossos/pull/8110) -- Added a button to the search popover in the skeleton and segment tab to select all matching non-group results. [#8123](https://github.com/scalableminds/webknossos/pull/8123) -- Unified wording in UI and code: “Magnification”/“mag” is now used in place of “Resolution“ most of the time, compare [https://docs.webknossos.org/webknossos/terminology.html](terminology document). [#8111](https://github.com/scalableminds/webknossos/pull/8111) -- Added support for adding remote OME-Zarr NGFF version 0.5 datasets. [#8122](https://github.com/scalableminds/webknossos/pull/8122) -- Workflow reports may be deleted by superusers. [#8156](https://github.com/scalableminds/webknossos/pull/8156) ### Changed -- Some mesh-related actions were disabled in proofreading-mode when using meshfiles that were created for a mapping rather than an oversegmentation. [#8091](https://github.com/scalableminds/webknossos/pull/8091) -- Admins can now see and cancel all jobs. The owner of the job is shown in the job list. [#8112](https://github.com/scalableminds/webknossos/pull/8112) -- Migrated nightly screenshot tests from CircleCI to GitHub actions. [#8134](https://github.com/scalableminds/webknossos/pull/8134) -- Migrated nightly screenshot tests for wk.org from CircleCI to GitHub actions. [#8135](https://github.com/scalableminds/webknossos/pull/8135) -- Thumbnails for datasets now use the selected mapping from the view configuration if available. [#8157](https://github.com/scalableminds/webknossos/pull/8157) +- Reading image files on datastore filesystem is now done asynchronously. [#8126](https://github.com/scalableminds/webknossos/pull/8126) ### Fixed -- Fixed a bug during dataset upload in case the configured `datastore.baseFolder` is an absolute path. [#8098](https://github.com/scalableminds/webknossos/pull/8098) [#8103](https://github.com/scalableminds/webknossos/pull/8103) -- Fixed bbox export menu item [#8152](https://github.com/scalableminds/webknossos/pull/8152) -- When trying to save an annotation opened via a link including a sharing token, the token is automatically discarded in case it is insufficient for update actions but the users token is. [#8139](https://github.com/scalableminds/webknossos/pull/8139) -- Fix that scrolling in the trees and segments tab did not work while dragging. [#8162](https://github.com/scalableminds/webknossos/pull/8162) -- Fixed that uploading a dataset which needs a conversion failed when the angstrom unit was configured for the conversion. [#8173](https://github.com/scalableminds/webknossos/pull/8173) -- Fixed that the skeleton search did not automatically expand groups that contained the selected tree [#8129](https://github.com/scalableminds/webknossos/pull/8129) -- Fixed interactions in the trees and segments tab like the search due to a bug introduced by [#8162](https://github.com/scalableminds/webknossos/pull/8162). [#8186](https://github.com/scalableminds/webknossos/pull/8186) -- Fixed a bug that zarr streaming version 3 returned the shape of mag (1, 1, 1) / the finest mag for all mags. [#8116](https://github.com/scalableminds/webknossos/pull/8116) -- Fixed sorting of mags in outbound zarr streaming. [#8125](https://github.com/scalableminds/webknossos/pull/8125) -- Fixed a bug where you could not create annotations for public datasets of other organizations. [#8107](https://github.com/scalableminds/webknossos/pull/8107) -- Users without edit permissions to a dataset can no longer delete sharing tokens via the API. [#8083](https://github.com/scalableminds/webknossos/issues/8083) -- Fixed downloading task annotations of teams you are not in, when accessing directly via URI. [#8155](https://github.com/scalableminds/webknossos/pull/8155) -- Removed unnecessary scrollbars in skeleton tab that occurred especially after resizing. [#8148](https://github.com/scalableminds/webknossos/pull/8148) -- Deleting a bounding box is now possible independently of a visible segmentation layer. [#8164](https://github.com/scalableminds/webknossos/pull/8164) -- S3-compliant object storages can now be accessed via HTTPS. [#8167](https://github.com/scalableminds/webknossos/pull/8167) -- Fixed that skeleton tree nodes were created with the wrong mag. [#8185](https://github.com/scalableminds/webknossos/pull/8185) -- Fixed the expected type of a tree node received from the server. Fixes nml export to include the `inMag` field correctly. [#8187](https://github.com/scalableminds/webknossos/pull/8187) -- Fixed a layout persistence bug leading to empty viewports, triggered when switching between orthogonal, flight, or oblique mode. [#8177](https://github.com/scalableminds/webknossos/pull/8177) - Fix performance bottleneck when deleting a lot of trees at once. [#8176](https://github.com/scalableminds/webknossos/pull/8176) - Fix a bug when importing an NML with groups when only groups but no trees exist in an annotation. [#8176](https://github.com/scalableminds/webknossos/pull/8176) - Fix a bug where trying to delete a non-existing node (via the API, for example) would delete the whole active tree. [#8176](https://github.com/scalableminds/webknossos/pull/8176) diff --git a/MIGRATIONS.released.md b/MIGRATIONS.released.md index 2b490c94b5..d0db04bb6a 100644 --- a/MIGRATIONS.released.md +++ b/MIGRATIONS.released.md @@ -6,6 +6,16 @@ See `MIGRATIONS.unreleased.md` for the changes which are not yet part of an offi This project adheres to [Calendar Versioning](http://calver.org/) `0Y.0M.MICRO`. User-facing changes are documented in the [changelog](CHANGELOG.released.md). +## [24.11.1](https://github.com/scalableminds/webknossos/releases/tag/24.11.1) - 2024-11-13 +[Commits](https://github.com/scalableminds/webknossos/compare/24.10.0...24.11.1) + +### Postgres Evolutions: + +- [121-worker-name.sql](conf/evolutions/121-worker-name.sql) +- [122-resolution-to-mag.sql](conf/evolutions/122-resolution-to-mag.sql) +- [123-more-model-categories.sql](conf/evolutions/123-more-model-categories.sql) + + ## [24.10.0](https://github.com/scalableminds/webknossos/releases/tag/24.10.0) - 2024-09-24 [Commits](https://github.com/scalableminds/webknossos/compare/24.08.1...24.10.0) diff --git a/MIGRATIONS.unreleased.md b/MIGRATIONS.unreleased.md index f6d640f469..20414e596e 100644 --- a/MIGRATIONS.unreleased.md +++ b/MIGRATIONS.unreleased.md @@ -6,10 +6,6 @@ This project adheres to [Calendar Versioning](http://calver.org/) `0Y.0M.MICRO`. User-facing changes are documented in the [changelog](CHANGELOG.released.md). ## Unreleased -[Commits](https://github.com/scalableminds/webknossos/compare/24.10.0...HEAD) +[Commits](https://github.com/scalableminds/webknossos/compare/24.11.1...HEAD) ### Postgres Evolutions: - -- [121-worker-name.sql](conf/evolutions/121-worker-name.sql) -- [122-resolution-to-mag.sql](conf/evolutions/122-resolution-to-mag.sql) -- [123-more-model-categories.sql](conf/evolutions/123-more-model-categories.sql) diff --git a/conf/application.conf b/conf/application.conf index 07d8b5d2dd..0ae8b6f25d 100644 --- a/conf/application.conf +++ b/conf/application.conf @@ -125,7 +125,7 @@ webKnossos { securityTxt { enabled = true content ="""Contact: https://github.com/scalableminds/webknossos/security/advisories/new -Expires: 2024-07-03T10:00:00.000Z +Expires: 2025-07-03T10:00:00.000Z Preferred-Languages: en,de """ } diff --git a/frontend/javascripts/test/backend-snapshot-tests/datasets.e2e.ts b/frontend/javascripts/test/backend-snapshot-tests/datasets.e2e.ts index a82653b946..9a09166530 100644 --- a/frontend/javascripts/test/backend-snapshot-tests/datasets.e2e.ts +++ b/frontend/javascripts/test/backend-snapshot-tests/datasets.e2e.ts @@ -9,6 +9,7 @@ import { import type { APIDataset } from "types/api_flow_types"; import * as api from "admin/admin_rest_api"; import test from "ava"; +import fs from "node:fs"; async function getFirstDataset(): Promise { const datasets = await api.getActiveDatasetsOfMyOrganization(); @@ -108,3 +109,100 @@ test("Zarr 3 streaming", async (t) => { const base64 = btoa(String.fromCharCode(...new Uint8Array(bytes.slice(-128)))); t.snapshot(base64); }); + +test("Dataset upload", async (t) => { + const uploadId = "test-dataset-upload-" + Date.now(); + + await fetch("/data/datasets/reserveUpload", { + method: "POST", + headers: new Headers({ + "Content-Type": "application/json", + }), + body: JSON.stringify({ + filePaths: ["test-dataset-upload.zip"], + folderId: "570b9f4e4bb848d0885ea917", + initialTeams: [], + layersToLink: [], + name: "test-dataset-upload", + organization: "Organization_X", + totalFileCount: 1, + uploadId: uploadId, + }), + }); + + const filePath = "test/dataset/test-dataset.zip"; + const testDataset = fs.readFileSync(filePath); + + let formData = new FormData(); + formData.append("resumableChunkNumber", "1"); + formData.append("resumableChunkSize", "10485760"); + formData.append("resumableCurrentChunkSize", "71988"); + formData.append("resumableTotalSize", "71988"); + formData.append("resumableType", "application/zip"); + formData.append("resumableIdentifier", uploadId + "/test-dataset.zip"); + formData.append("resumableFilename", "test-dataset.zip"); + formData.append("resumableRelativePath", "test-dataset.zip"); + formData.append("resumableTotalChunks", "1"); + + // Setting the correct content type header automatically does not work (the boundary is not included) + // We can not extract the boundary from the FormData object + // Thus we have to set the content type header ourselves and create the body manually + + const boundary = "----WebKitFormBoundaryAqTsFa4N9FW7zF7I"; + let bodyString = `--${boundary}\r\n`; + // @ts-ignore + for (const [key, value] of formData.entries()) { + bodyString += `Content-Disposition: form-data; name="${key}"\r\n\r\n${value}\r\n`; + bodyString += `--${boundary}\r\n`; + } + bodyString += `Content-Disposition: form-data; name="file"; filename="test-dataset.zip"\r\n`; + bodyString += "Content-Type: application/octet-stream\r\n\r\n"; + + // We have to send the file as bytes, otherwise JS does some encoding, resulting in erroneous bytes + + const formBytes = new TextEncoder().encode(bodyString); + const fileBytes = new Uint8Array(testDataset); + const endBytes = new TextEncoder().encode(`\r\n--${boundary}--`); + const body = new Uint8Array(formBytes.length + fileBytes.length + endBytes.length); + body.set(formBytes, 0); + body.set(fileBytes, formBytes.length); + body.set(endBytes, formBytes.length + fileBytes.length); + + let content_type = `multipart/form-data; boundary=${boundary}`; + + const uploadResult = await fetch("/data/datasets", { + method: "POST", + headers: new Headers({ + "Content-Type": content_type, + }), + body: body, + }); + + if (uploadResult.status !== 200) { + t.fail("Dataset upload failed"); + } + + const finishResult = await fetch("/data/datasets/finishUpload", { + method: "POST", + headers: new Headers({ + "Content-Type": "application/json", + }), + body: JSON.stringify({ + uploadId: uploadId, + needsConversion: false, + }), + }); + + if (finishResult.status !== 200) { + t.fail("Dataset upload failed at finish"); + } + + const result = await fetch("/api/datasets/Organization_X/test-dataset-upload/health", { + headers: new Headers(), + }); + + if (result.status !== 200) { + t.fail("Dataset health check after upload failed"); + } + t.pass(); +}); diff --git a/frontend/javascripts/test/e2e-setup.ts b/frontend/javascripts/test/e2e-setup.ts index b1330eb25e..e8d6ed720d 100644 --- a/frontend/javascripts/test/e2e-setup.ts +++ b/frontend/javascripts/test/e2e-setup.ts @@ -3,7 +3,7 @@ import _ from "lodash"; // @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module 'deep... Remove this comment to see the full error message import deepForEach from "deep-for-each"; // @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module 'node... Remove this comment to see the full error message -import fetch, { Headers, Request, Response, FetchError } from "node-fetch"; +import fetch, { Headers, FormData, Request, Response, FetchError, File } from "node-fetch"; import fs from "node:fs"; // @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module 'shel... Remove this comment to see the full error message import shell from "shelljs"; @@ -67,7 +67,7 @@ global.fetch = function fetchWrapper(url, options) { let newUrl = url; // @ts-expect-error ts-migrate(2339) FIXME: Property 'indexOf' does not exist on type 'Request... Remove this comment to see the full error message - if (url.indexOf("http:") === -1) { + if (url.indexOf("http:") === -1 && url.indexOf("https:") === -1) { newUrl = `http://localhost:9000${url}`; } @@ -84,6 +84,8 @@ global.Request = Request; global.Response = Response; // @ts-ignore FIXME: Element implicitly has an 'any' type because type ... Remove this comment to see the full error message global.FetchError = FetchError; +global.FormData = FormData; +global.File = File; const { JSDOM } = require("jsdom"); diff --git a/test/e2e/End2EndSpec.scala b/test/e2e/End2EndSpec.scala index 1de30f63de..dc61e6c5d3 100644 --- a/test/e2e/End2EndSpec.scala +++ b/test/e2e/End2EndSpec.scala @@ -1,6 +1,6 @@ package e2e -import com.scalableminds.util.io.ZipIO +import com.scalableminds.util.io.{PathUtils, ZipIO} import com.typesafe.scalalogging.LazyLogging import org.scalatestplus.play.guice._ import org.specs2.main.Arguments @@ -51,9 +51,11 @@ class End2EndSpec(arguments: Arguments) extends Specification with GuiceFakeAppl private def ensureTestDataset(): Unit = { val testDatasetPath = "test/dataset/test-dataset.zip" val dataDirectory = new File("binaryData/Organization_X") - if (!dataDirectory.exists()) { - dataDirectory.mkdirs() + if (dataDirectory.exists()) { + println("Deleting existing data directory Organization_X") + PathUtils.deleteDirectoryRecursively(dataDirectory.toPath) } + dataDirectory.mkdirs() val testDatasetZip = new File(testDatasetPath) if (!testDatasetZip.exists()) { throw new Exception("Test dataset zip file does not exist.") diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemDataVault.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemDataVault.scala index bb16d34cd0..0d42244c6a 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemDataVault.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemDataVault.scala @@ -1,15 +1,16 @@ package com.scalableminds.webknossos.datastore.datavault import com.scalableminds.util.tools.Fox -import net.liftweb.common.Box.tryo -import com.scalableminds.util.tools.Fox.{bool2Fox, box2Fox} +import com.scalableminds.util.tools.Fox.bool2Fox import com.scalableminds.webknossos.datastore.storage.DataVaultService +import net.liftweb.common.{Box, Full} import org.apache.commons.lang3.builder.HashCodeBuilder import java.nio.ByteBuffer -import java.nio.file.{Files, Path, Paths} +import java.nio.channels.{AsynchronousFileChannel, CompletionHandler} +import java.nio.file.{Files, Path, Paths, StandardOpenOption} import java.util.stream.Collectors -import scala.concurrent.ExecutionContext +import scala.concurrent.{ExecutionContext, Promise} import scala.jdk.CollectionConverters._ class FileSystemDataVault extends DataVault { @@ -24,31 +25,55 @@ class FileSystemDataVault extends DataVault { private def readBytesLocal(localPath: Path, range: RangeSpecifier)(implicit ec: ExecutionContext): Fox[Array[Byte]] = if (Files.exists(localPath)) { range match { - case Complete() => tryo(Files.readAllBytes(localPath)).toFox + case Complete() => + readAsync(localPath, 0, Math.toIntExact(Files.size(localPath))) + case StartEnd(r) => - tryo { - val channel = Files.newByteChannel(localPath) - val buf = ByteBuffer.allocateDirect(r.length) - channel.position(r.start) - channel.read(buf) - buf.rewind() - val arr = new Array[Byte](r.length) - buf.get(arr) - arr - }.toFox + readAsync(localPath, r.start, r.length) + case SuffixLength(length) => - tryo { - val channel = Files.newByteChannel(localPath) - val buf = ByteBuffer.allocateDirect(length) - channel.position(channel.size() - length) - channel.read(buf) - buf.rewind() - val arr = new Array[Byte](length) - buf.get(arr) - arr - }.toFox + val fileSize = Files.size(localPath) + readAsync(localPath, fileSize - length, length) } - } else Fox.empty + } else { + Fox.empty + } + + private def readAsync(path: Path, position: Long, length: Int)(implicit ec: ExecutionContext): Fox[Array[Byte]] = { + val promise = Promise[Box[Array[Byte]]]() + val buffer = ByteBuffer.allocateDirect(length) + var channel: AsynchronousFileChannel = null + + try { + channel = AsynchronousFileChannel.open(path, StandardOpenOption.READ) + + channel.read( + buffer, + position, + buffer, + new CompletionHandler[Integer, ByteBuffer] { + override def completed(result: Integer, buffer: ByteBuffer): Unit = { + buffer.rewind() + val arr = new Array[Byte](length) + buffer.get(arr) + promise.success(Full(arr)) + channel.close() + } + + override def failed(exc: Throwable, buffer: ByteBuffer): Unit = { + promise.failure(exc) + channel.close() + } + } + ) + } catch { + case e: Throwable => + promise.failure(e) + if (channel != null && channel.isOpen) channel.close() + } + + promise.future + } override def listDirectory(path: VaultPath, maxItems: Int)(implicit ec: ExecutionContext): Fox[List[VaultPath]] = vaultPathToLocalPath(path).map(