diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 9e5a0ce72ac..c7823a28458 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -16,6 +16,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Precomputed meshes can now be loaded even when a mapping is active (HDF5 or an editable mapping produced by the proofreading tool). The precomputed mesh has to be computed without a mapping for this to work. [#6569](https://github.com/scalableminds/webknossos/pull/6569) ### Changed +- For remote datasets that require authentication, credentials are no longer stored in the respective JSON. [#6646](https://github.com/scalableminds/webknossos/pull/6646) - Improved performance of opening a dataset or annotation. [#6711](https://github.com/scalableminds/webknossos/pull/6711) - Redesigned organization page to include more infos on organization users, storage, webKnossos plan and provided opportunities to upgrade. [#6602](https://github.com/scalableminds/webknossos/pull/6602) - Changed branding of WEBKNOSSOS including a new logo, new primary colors, and UPPERCASE name. [#6739](https://github.com/scalableminds/webknossos/pull/6739) diff --git a/MIGRATIONS.unreleased.md b/MIGRATIONS.unreleased.md index 0f7881d705a..90307c3809b 100644 --- a/MIGRATIONS.unreleased.md +++ b/MIGRATIONS.unreleased.md @@ -13,3 +13,4 @@ User-facing changes are documented in the [changelog](CHANGELOG.released.md). - [094-pricing-plans.sql](conf/evolutions/reversions/094-pricing-plans.sql) - [095-constraint-naming.sql](conf/evolutions/reversions/095-constraint-naming.sql) - [096-storage.sql](conf/evolutions/096-storage.sql) +- [097-credentials.sql](conf/evolutions/097-credentials.sql) diff --git a/app/controllers/CredentialController.scala b/app/controllers/CredentialController.scala new file mode 100644 index 00000000000..af3a56143d5 --- /dev/null +++ b/app/controllers/CredentialController.scala @@ -0,0 +1,65 @@ +package controllers + +import com.mohiva.play.silhouette.api.Silhouette +import com.scalableminds.util.tools.FoxImplicits +import com.scalableminds.webknossos.datastore.storage.{HttpBasicAuthCredential, S3AccessKeyCredential} +import models.binary.credential.CredentialDAO +import oxalis.security.WkEnv +import play.api.libs.json.{Json, OFormat} +import play.api.mvc.{Action, PlayBodyParsers} +import utils.ObjectId + +import javax.inject.Inject +import scala.concurrent.ExecutionContext + +case class HttpBasicAuthCredentialParameters(name: String, username: String, password: String, domain: Option[String]) + +object HttpBasicAuthCredentialParameters { + implicit val jsonFormat: OFormat[HttpBasicAuthCredentialParameters] = Json.format[HttpBasicAuthCredentialParameters] +} + +case class S3AccessKeyCredentialParameters(name: String, keyId: String, key: String, bucket: Option[String]) + +object S3AccessKeyCredentialParameters { + implicit val jsonFormat: OFormat[S3AccessKeyCredentialParameters] = Json.format[S3AccessKeyCredentialParameters] +} + +class CredentialController @Inject()(credentialDAO: CredentialDAO, sil: Silhouette[WkEnv])( + implicit ec: ExecutionContext, + val bodyParsers: PlayBodyParsers) + extends Controller + with FoxImplicits { + + def createHttpBasicAuthCredential: Action[HttpBasicAuthCredentialParameters] = + sil.SecuredAction.async(validateJson[HttpBasicAuthCredentialParameters]) { implicit request => + val _id = ObjectId.generate + for { + _ <- bool2Fox(request.identity.isAdmin) ?~> "notAllowed" ~> FORBIDDEN + _ <- credentialDAO.insertOne( + _id, + HttpBasicAuthCredential(request.body.name, + request.body.username, + request.body.password, + request.identity._id.toString, + request.identity._organization.toString) + ) ?~> "create.failed" + } yield Ok(Json.toJson(_id)) + } + + def createS3AccessKeyCredential: Action[S3AccessKeyCredentialParameters] = + sil.SecuredAction.async(validateJson[S3AccessKeyCredentialParameters]) { implicit request => + val _id = ObjectId.generate + for { + _ <- bool2Fox(request.identity.isAdmin) ?~> "notAllowed" ~> FORBIDDEN + _ <- credentialDAO.insertOne( + _id, + S3AccessKeyCredential(request.body.name, + request.body.keyId, + request.body.key, + request.identity._id.toString, + request.identity._organization.toString) + ) ?~> "create.failed" + } yield Ok(Json.toJson(_id)) + } + +} diff --git a/app/controllers/DataSetController.scala b/app/controllers/DataSetController.scala index 7ab2f731741..acffcf7f719 100755 --- a/app/controllers/DataSetController.scala +++ b/app/controllers/DataSetController.scala @@ -137,7 +137,7 @@ class DataSetController @Inject()(userService: UserService, val reportMutable = ListBuffer[String]() for { dataSourceBox: Box[GenericDataSource[DataLayer]] <- exploreRemoteLayerService - .exploreRemoteDatasource(request.body, reportMutable) + .exploreRemoteDatasource(request.body, request.identity, reportMutable) .futureBox dataSourceOpt = dataSourceBox match { case Full(dataSource) if dataSource.dataLayers.nonEmpty => diff --git a/app/controllers/WKRemoteDataStoreController.scala b/app/controllers/WKRemoteDataStoreController.scala index 871e10c2be8..c51a95e8169 100644 --- a/app/controllers/WKRemoteDataStoreController.scala +++ b/app/controllers/WKRemoteDataStoreController.scala @@ -11,8 +11,10 @@ import com.scalableminds.webknossos.datastore.services.{ ReserveUploadInformation } import com.typesafe.scalalogging.LazyLogging +import javax.inject.Inject import models.analytics.{AnalyticsService, UploadDatasetEvent} import models.binary._ +import models.binary.credential.CredentialDAO import models.folder.FolderDAO import models.job.JobDAO import models.organization.OrganizationDAO @@ -26,7 +28,6 @@ import play.api.libs.json.{JsError, JsSuccess, JsValue, Json} import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import utils.ObjectId -import javax.inject.Inject import scala.concurrent.{ExecutionContext, Future} class WKRemoteDataStoreController @Inject()( @@ -41,6 +42,7 @@ class WKRemoteDataStoreController @Inject()( userDAO: UserDAO, folderDAO: FolderDAO, jobDAO: JobDAO, + credentialDAO: CredentialDAO, mailchimpClient: MailchimpClient, wkSilhouetteEnvironment: WkSilhouetteEnvironment)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller @@ -192,4 +194,14 @@ class WKRemoteDataStoreController @Inject()( } } + def findCredential(name: String, key: String, credentialId: String): Action[AnyContent] = Action.async { + implicit request => + dataStoreService.validateAccess(name, key) { _ => + for { + credentialIdValidated <- ObjectId.fromString(credentialId) + credential <- credentialDAO.findOne(credentialIdValidated) + } yield Ok(Json.toJson(credential)) + } + } + } diff --git a/app/models/binary/credential/CredentialDAO.scala b/app/models/binary/credential/CredentialDAO.scala new file mode 100644 index 00000000000..dc9b553e111 --- /dev/null +++ b/app/models/binary/credential/CredentialDAO.scala @@ -0,0 +1,69 @@ +package models.binary.credential + +import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.storage.{AnyCredential, HttpBasicAuthCredential, S3AccessKeyCredential} +import com.scalableminds.webknossos.schema.Tables.{Credentials, CredentialsRow} +import utils.sql.{SecuredSQLDAO, SqlClient, SqlToken} +import utils.ObjectId + +import javax.inject.Inject +import scala.concurrent.ExecutionContext + +class CredentialDAO @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionContext) extends SecuredSQLDAO(sqlClient) { + protected val collection = Credentials + + protected def columnsList: List[String] = collection.baseTableRow.create_*.map(_.name).toList + override protected def collectionName: String = "credentials" + def columns: SqlToken = SqlToken.raw(columnsList.mkString(", ")) + + private def parseAsHttpBasicAuthCredential(r: CredentialsRow): Fox[HttpBasicAuthCredential] = + for { + username <- r.identifier.toFox + password <- r.secret.toFox + } yield + HttpBasicAuthCredential( + r.name, + username, + password, + r._User, + r._Organization + ) + + private def parseAsS3AccessKeyCredential(r: CredentialsRow): Fox[S3AccessKeyCredential] = + for { + keyId <- r.identifier.toFox + key <- r.secret.toFox + } yield + S3AccessKeyCredential( + r.name, + keyId, + key, + r._User, + r._Organization + ) + + def insertOne(_id: ObjectId, credential: HttpBasicAuthCredential): Fox[Unit] = + for { + _ <- run(q"""insert into webknossos.credentials(_id, type, name, identifier, secret, _user, _organization) + values(${_id}, ${CredentialType.HTTP_Basic_Auth}, ${credential.name}, ${credential.username}, ${credential.password}, ${credential.user}, ${credential.organization})""".asUpdate) + } yield () + + def insertOne(_id: ObjectId, credential: S3AccessKeyCredential): Fox[Unit] = + for { + _ <- run(q"""insert into webknossos.credentials(_id, type, name, identifier, secret, _user, _organization) + values(${_id}, ${CredentialType.S3_Access_Key}, ${credential.name}, ${credential.keyId}, ${credential.key}, ${credential.user}, ${credential.organization})""".asUpdate) + } yield () + + def findOne(id: ObjectId): Fox[AnyCredential] = + for { + r <- run(q"select $columns from webknossos.credentials_ where _id = $id".as[CredentialsRow]) + firstRow <- r.headOption.toFox + parsed <- parseAnyCredential(firstRow) + } yield parsed + + private def parseAnyCredential(r: CredentialsRow): Fox[AnyCredential] = + r.`type` match { + case "HTTP_Basic_Auth" => parseAsHttpBasicAuthCredential(r) + case "S3_Access_Key" => parseAsS3AccessKeyCredential(r) + } +} diff --git a/app/models/binary/credential/CredentialService.scala b/app/models/binary/credential/CredentialService.scala new file mode 100644 index 00000000000..8012d59280e --- /dev/null +++ b/app/models/binary/credential/CredentialService.scala @@ -0,0 +1,51 @@ +package models.binary.credential + +import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.storage.{HttpBasicAuthCredential, S3AccessKeyCredential} +import utils.ObjectId + +import java.net.URI +import javax.inject.Inject +import scala.concurrent.ExecutionContext + +class CredentialService @Inject()(credentialDao: CredentialDAO) { + + def createCredential(uri: URI, + username: Option[String], + password: Option[String], + user: String, + organization: String)(implicit ec: ExecutionContext): Fox[Option[ObjectId]] = { + val scheme = uri.getScheme + scheme match { + case "https" => + username match { + case Some(u) => + val _id = ObjectId.generate + for { + _ <- credentialDao.insertOne( + _id, + HttpBasicAuthCredential(uri.toString, u, password.getOrElse(""), user, organization)) + _ <- credentialDao.findOne(_id) + } yield Some(_id) + case None => Fox.empty + } + case "s3" => + username match { + case Some(keyId) => + password match { + case Some(secretKey) => + val _id = ObjectId.generate + for { + _ <- credentialDao.insertOne( + _id, + S3AccessKeyCredential(uri.toString, keyId, secretKey, user, organization)) + _ <- credentialDao.findOne(_id) + } yield Some(_id) + case None => Fox.empty + } + case None => Fox.empty + } + } + } + +} diff --git a/app/models/binary/credential/CredentialType.scala b/app/models/binary/credential/CredentialType.scala new file mode 100644 index 00000000000..d92b80d662d --- /dev/null +++ b/app/models/binary/credential/CredentialType.scala @@ -0,0 +1,9 @@ +package models.binary.credential + +import com.scalableminds.util.enumeration.ExtendedEnumeration + +object CredentialType extends ExtendedEnumeration { + type CredentialType = Value + + val HTTP_Basic_Auth, S3_Access_Key, HTTP_Token, GCS = Value +} diff --git a/app/models/binary/explore/ExploreRemoteLayerService.scala b/app/models/binary/explore/ExploreRemoteLayerService.scala index d83ee6e3cef..983ef7d7a21 100644 --- a/app/models/binary/explore/ExploreRemoteLayerService.scala +++ b/app/models/binary/explore/ExploreRemoteLayerService.scala @@ -9,8 +9,11 @@ import com.scalableminds.webknossos.datastore.datareaders.zarr._ import com.scalableminds.webknossos.datastore.models.datasource._ import com.scalableminds.webknossos.datastore.storage.FileSystemsHolder import com.typesafe.scalalogging.LazyLogging +import models.binary.credential.CredentialService +import models.user.User import net.liftweb.common.{Box, Empty, Failure, Full} import net.liftweb.util.Helpers.tryo +import oxalis.security.WkEnv import play.api.libs.json.{Json, OFormat} import java.net.URI @@ -26,14 +29,20 @@ object ExploreRemoteDatasetParameters { implicit val jsonFormat: OFormat[ExploreRemoteDatasetParameters] = Json.format[ExploreRemoteDatasetParameters] } -class ExploreRemoteLayerService @Inject()() extends FoxImplicits with LazyLogging { +class ExploreRemoteLayerService @Inject()(credentialService: CredentialService) extends FoxImplicits with LazyLogging { def exploreRemoteDatasource( urisWithCredentials: List[ExploreRemoteDatasetParameters], + requestIdentity: WkEnv#I, reportMutable: ListBuffer[String])(implicit ec: ExecutionContext): Fox[GenericDataSource[DataLayer]] = for { - exploredLayersNested <- Fox.serialCombined(urisWithCredentials)(parameters => - exploreRemoteLayersForUri(parameters.remoteUri, parameters.user, parameters.password, reportMutable)) + exploredLayersNested <- Fox.serialCombined(urisWithCredentials)( + parameters => + exploreRemoteLayersForUri(parameters.remoteUri, + parameters.user, + parameters.password, + reportMutable, + requestIdentity)) layersWithVoxelSizes = exploredLayersNested.flatten _ <- bool2Fox(layersWithVoxelSizes.nonEmpty) ?~> "Detected zero layers" rescaledLayersAndVoxelSize <- rescaleLayersByCommonVoxelSize(layersWithVoxelSizes) ?~> "Could not extract common voxel size from layers" @@ -131,14 +140,20 @@ class ExploreRemoteLayerService @Inject()() extends FoxImplicits with LazyLoggin layerUri: String, user: Option[String], password: Option[String], - reportMutable: ListBuffer[String])(implicit ec: ExecutionContext): Fox[List[(DataLayer, Vec3Double)]] = + reportMutable: ListBuffer[String], + requestingUser: User)(implicit ec: ExecutionContext): Fox[List[(DataLayer, Vec3Double)]] = for { remoteSource <- tryo(RemoteSourceDescriptor(new URI(normalizeUri(layerUri)), user, password)).toFox ?~> s"Received invalid URI: $layerUri" + credentialId <- credentialService.createCredential(new URI(normalizeUri(layerUri)), + user, + password, + requestingUser._id.toString, + requestingUser._organization.toString) fileSystem <- FileSystemsHolder.getOrCreate(remoteSource).toFox ?~> "Failed to set up remote file system" remotePath <- tryo(fileSystem.getPath(remoteSource.remotePath)) ?~> "Failed to get remote path" layersWithVoxelSizes <- exploreRemoteLayersForRemotePath( remotePath, - remoteSource.credentials, + credentialId.map(_.toString), reportMutable, List(new ZarrArrayExplorer, new NgffExplorer, new N5ArrayExplorer, new N5MultiscalesExplorer)) } yield layersWithVoxelSizes @@ -153,23 +168,23 @@ class ExploreRemoteLayerService @Inject()() extends FoxImplicits with LazyLoggin private def exploreRemoteLayersForRemotePath( remotePath: Path, - credentials: Option[FileSystemCredentials], + credentialId: Option[String], reportMutable: ListBuffer[String], explorers: List[RemoteLayerExplorer])(implicit ec: ExecutionContext): Fox[List[(DataLayer, Vec3Double)]] = explorers match { case Nil => Fox.empty case currentExplorer :: remainingExplorers => reportMutable += s"\nTrying to explore $remotePath as ${currentExplorer.name}..." - currentExplorer.explore(remotePath, credentials).futureBox.flatMap { + currentExplorer.explore(remotePath, credentialId).futureBox.flatMap { case Full(layersWithVoxelSizes) => reportMutable += s"Found ${layersWithVoxelSizes.length} ${currentExplorer.name} layers at $remotePath." Fox.successful(layersWithVoxelSizes) case f: Failure => reportMutable += s"Error when reading $remotePath as ${currentExplorer.name}: ${formatFailureForReport(f)}" - exploreRemoteLayersForRemotePath(remotePath, credentials, reportMutable, remainingExplorers) + exploreRemoteLayersForRemotePath(remotePath, credentialId, reportMutable, remainingExplorers) case Empty => reportMutable += s"Error when reading $remotePath as ${currentExplorer.name}: Empty" - exploreRemoteLayersForRemotePath(remotePath, credentials, reportMutable, remainingExplorers) + exploreRemoteLayersForRemotePath(remotePath, credentialId, reportMutable, remainingExplorers) } } diff --git a/app/models/binary/explore/N5ArrayExplorer.scala b/app/models/binary/explore/N5ArrayExplorer.scala index 53c5eab2e1e..73a2454f869 100644 --- a/app/models/binary/explore/N5ArrayExplorer.scala +++ b/app/models/binary/explore/N5ArrayExplorer.scala @@ -3,7 +3,6 @@ import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.dataformats.MagLocator import com.scalableminds.webknossos.datastore.dataformats.n5.{N5DataLayer, N5Layer, N5SegmentationLayer} -import com.scalableminds.webknossos.datastore.dataformats.zarr.FileSystemCredentials import com.scalableminds.webknossos.datastore.datareaders.AxisOrder import com.scalableminds.webknossos.datastore.datareaders.n5.N5Header import com.scalableminds.webknossos.datastore.models.datasource.Category @@ -15,7 +14,7 @@ class N5ArrayExplorer extends RemoteLayerExplorer { override def name: String = "N5 Array" - override def explore(remotePath: Path, credentials: Option[FileSystemCredentials]): Fox[List[(N5Layer, Vec3Double)]] = + override def explore(remotePath: Path, credentialId: Option[String]): Fox[List[(N5Layer, Vec3Double)]] = for { headerPath <- Fox.successful(remotePath.resolve(N5Header.FILENAME_ATTRIBUTES_JSON)) name <- guessNameFromPath(remotePath) @@ -23,7 +22,7 @@ class N5ArrayExplorer extends RemoteLayerExplorer { elementClass <- n5Header.elementClass ?~> "failed to read element class from n5 header" guessedAxisOrder = AxisOrder.asZyxFromRank(n5Header.rank) boundingBox <- n5Header.boundingBox(guessedAxisOrder) ?~> "failed to read bounding box from zarr header. Make sure data is in (T/C)ZYX format" - magLocator = MagLocator(Vec3Int.ones, Some(remotePath.toString), credentials, Some(guessedAxisOrder), None) + magLocator = MagLocator(Vec3Int.ones, Some(remotePath.toString), None, Some(guessedAxisOrder), None, credentialId) layer: N5Layer = if (looksLikeSegmentationLayer(name, elementClass)) { N5SegmentationLayer(name, boundingBox, elementClass, List(magLocator), largestSegmentId = None) } else N5DataLayer(name, Category.color, boundingBox, elementClass, List(magLocator)) diff --git a/app/models/binary/explore/N5MultiscalesExplorer.scala b/app/models/binary/explore/N5MultiscalesExplorer.scala index ebab97c4ea6..c9a9f6c7e55 100644 --- a/app/models/binary/explore/N5MultiscalesExplorer.scala +++ b/app/models/binary/explore/N5MultiscalesExplorer.scala @@ -4,7 +4,6 @@ import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.dataformats.MagLocator import com.scalableminds.webknossos.datastore.dataformats.n5.{N5DataLayer, N5Layer, N5SegmentationLayer} -import com.scalableminds.webknossos.datastore.dataformats.zarr.FileSystemCredentials import com.scalableminds.webknossos.datastore.datareaders.AxisOrder import com.scalableminds.webknossos.datastore.datareaders.n5.{ N5Header, @@ -23,20 +22,20 @@ class N5MultiscalesExplorer extends RemoteLayerExplorer with FoxImplicits { override def name: String = "N5 Multiscales" - override def explore(remotePath: Path, credentials: Option[FileSystemCredentials]): Fox[List[(N5Layer, Vec3Double)]] = + override def explore(remotePath: Path, credentialId: Option[String]): Fox[List[(N5Layer, Vec3Double)]] = for { zattrsPath <- Fox.successful(remotePath.resolve(N5Metadata.FILENAME_ATTRIBUTES_JSON)) n5Metadata <- parseJsonFromPath[N5Metadata](zattrsPath) ?~> s"Failed to read OME NGFF header at $zattrsPath" - layers <- Fox.serialCombined(n5Metadata.multiscales)(layerFromN5MultiscalesItem(_, remotePath, credentials)) + layers <- Fox.serialCombined(n5Metadata.multiscales)(layerFromN5MultiscalesItem(_, remotePath, credentialId)) } yield layers private def layerFromN5MultiscalesItem(multiscalesItem: N5MultiscalesItem, remotePath: Path, - credentials: Option[FileSystemCredentials]): Fox[(N5Layer, Vec3Double)] = + credentialId: Option[String]): Fox[(N5Layer, Vec3Double)] = for { voxelSizeNanometers <- extractVoxelSize(multiscalesItem.datasets.map(_.transform)) magsWithAttributes <- Fox.serialCombined(multiscalesItem.datasets)(d => - n5MagFromDataset(d, remotePath, voxelSizeNanometers, credentials)) + n5MagFromDataset(d, remotePath, voxelSizeNanometers, credentialId)) _ <- bool2Fox(magsWithAttributes.nonEmpty) ?~> "zero mags in layer" elementClass <- elementClassFromMags(magsWithAttributes) ?~> "Could not extract element class from mags" boundingBox = boundingBoxFromMags(magsWithAttributes) @@ -105,7 +104,7 @@ class N5MultiscalesExplorer extends RemoteLayerExplorer with FoxImplicits { private def n5MagFromDataset(n5Dataset: N5MultiscalesDataset, layerPath: Path, voxelSize: Vec3Double, - credentials: Option[FileSystemCredentials]): Fox[MagWithAttributes] = + credentialId: Option[String]): Fox[MagWithAttributes] = for { axisOrder <- extractAxisOrder(n5Dataset.transform.axes) ?~> "Could not extract XYZ axis order mapping. Does the data have x, y and z axes, stated in multiscales metadata?" mag <- magFromTransform(voxelSize, n5Dataset.transform) ?~> "Could not extract mag from transforms" @@ -115,7 +114,7 @@ class N5MultiscalesExplorer extends RemoteLayerExplorer with FoxImplicits { elementClass <- n5Header.elementClass ?~> s"failed to read element class from n5 header at $headerPath" boundingBox <- n5Header.boundingBox(axisOrder) ?~> s"failed to read bounding box from n5 header at $headerPath" } yield - MagWithAttributes(MagLocator(mag, Some(magPath.toString), credentials, Some(axisOrder), None), + MagWithAttributes(MagLocator(mag, Some(magPath.toString), None, Some(axisOrder), None, credentialId), magPath, elementClass, boundingBox) diff --git a/app/models/binary/explore/NgffExplorer.scala b/app/models/binary/explore/NgffExplorer.scala index 6d008c88f17..0f8e772a421 100644 --- a/app/models/binary/explore/NgffExplorer.scala +++ b/app/models/binary/explore/NgffExplorer.scala @@ -3,12 +3,7 @@ package models.binary.explore import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.dataformats.MagLocator -import com.scalableminds.webknossos.datastore.dataformats.zarr.{ - FileSystemCredentials, - ZarrDataLayer, - ZarrLayer, - ZarrSegmentationLayer -} +import com.scalableminds.webknossos.datastore.dataformats.zarr.{ZarrDataLayer, ZarrLayer, ZarrSegmentationLayer} import com.scalableminds.webknossos.datastore.datareaders.AxisOrder import com.scalableminds.webknossos.datastore.datareaders.zarr._ import com.scalableminds.webknossos.datastore.models.datasource.{Category, ElementClass} @@ -20,17 +15,17 @@ class NgffExplorer extends RemoteLayerExplorer { override def name: String = "OME NGFF Zarr v0.4" - override def explore(remotePath: Path, - credentials: Option[FileSystemCredentials]): Fox[List[(ZarrLayer, Vec3Double)]] = + override def explore(remotePath: Path, credentialId: Option[String]): Fox[List[(ZarrLayer, Vec3Double)]] = for { zattrsPath <- Fox.successful(remotePath.resolve(NgffMetadata.FILENAME_DOT_ZATTRS)) ngffHeader <- parseJsonFromPath[NgffMetadata](zattrsPath) ?~> s"Failed to read OME NGFF header at $zattrsPath" - labelLayers <- exploreLabelLayers(remotePath, credentials).orElse(Fox.successful(List[(ZarrLayer, Vec3Double)]())) + labelLayers <- exploreLabelLayers(remotePath, credentialId).orElse( + Fox.successful(List[(ZarrLayer, Vec3Double)]())) layerLists: List[List[(ZarrLayer, Vec3Double)]] <- Fox.serialCombined(ngffHeader.multiscales)(multiscale => { for { channelCount: Int <- getNgffMultiscaleChannelCount(multiscale, remotePath) - layers <- layersFromNgffMultiscale(multiscale, remotePath, credentials, channelCount) + layers <- layersFromNgffMultiscale(multiscale, remotePath, credentialId, channelCount) } yield layers }) layers: List[(ZarrLayer, Vec3Double)] = layerLists.flatten @@ -51,7 +46,7 @@ class NgffExplorer extends RemoteLayerExplorer { private def layersFromNgffMultiscale(multiscale: NgffMultiscalesItem, remotePath: Path, - credentials: Option[FileSystemCredentials], + credentialId: Option[String], channelCount: Int, isSegmentation: Boolean = false): Fox[List[(ZarrLayer, Vec3Double)]] = for { @@ -66,7 +61,7 @@ class NgffExplorer extends RemoteLayerExplorer { layerTuples <- Fox.serialCombined((0 until channelCount).toList)({ channelIndex: Int => for { magsWithAttributes <- Fox.serialCombined(multiscale.datasets)(d => - zarrMagFromNgffDataset(d, remotePath, voxelSizeInAxisUnits, axisOrder, credentials, Some(channelIndex))) + zarrMagFromNgffDataset(d, remotePath, voxelSizeInAxisUnits, axisOrder, credentialId, Some(channelIndex))) _ <- bool2Fox(magsWithAttributes.nonEmpty) ?~> "zero mags in layer" elementClassRaw <- elementClassFromMags(magsWithAttributes) ?~> "Could not extract element class from mags" elementClass = if (isSegmentation) ensureElementClassForSegmentationLayer(elementClassRaw) @@ -83,19 +78,18 @@ class NgffExplorer extends RemoteLayerExplorer { }) } yield layerTuples - private def exploreLabelLayers(remotePath: Path, - credentials: Option[FileSystemCredentials]): Fox[List[(ZarrLayer, Vec3Double)]] = + private def exploreLabelLayers(remotePath: Path, credentialId: Option[String]): Fox[List[(ZarrLayer, Vec3Double)]] = for { labelDescriptionPath <- Fox.successful(remotePath.resolve(NgffLabelsGroup.LABEL_PATH)) labelGroup <- parseJsonFromPath[NgffLabelsGroup](labelDescriptionPath) layerTuples <- Fox.serialCombined(labelGroup.labels) { labelPath => - layersForLabel(remotePath, labelPath, credentials) + layersForLabel(remotePath, labelPath, credentialId) } } yield layerTuples.flatten private def layersForLabel(remotePath: Path, labelPath: String, - credentials: Option[FileSystemCredentials]): Fox[List[(ZarrLayer, Vec3Double)]] = + credentialId: Option[String]): Fox[List[(ZarrLayer, Vec3Double)]] = for { fullLabelPath <- Fox.successful(remotePath.resolve("labels").resolve(labelPath)) zattrsPath = fullLabelPath.resolve(NgffMetadata.FILENAME_DOT_ZATTRS) @@ -104,7 +98,7 @@ class NgffExplorer extends RemoteLayerExplorer { multiscale => layersFromNgffMultiscale(multiscale.copy(name = Some(s"labels-$labelPath")), fullLabelPath, - credentials, + credentialId, 1, isSegmentation = true)) } yield layers.flatten @@ -122,7 +116,7 @@ class NgffExplorer extends RemoteLayerExplorer { layerPath: Path, voxelSizeInAxisUnits: Vec3Double, axisOrder: AxisOrder, - credentials: Option[FileSystemCredentials], + credentialId: Option[String], channelIndex: Option[Int]): Fox[MagWithAttributes] = for { mag <- magFromTransforms(ngffDataset.coordinateTransformations, voxelSizeInAxisUnits, axisOrder) ?~> "Could not extract mag from scale transforms" @@ -132,7 +126,7 @@ class NgffExplorer extends RemoteLayerExplorer { elementClass <- zarrHeader.elementClass ?~> s"failed to read element class from zarr header at $zarrayPath" boundingBox <- zarrHeader.boundingBox(axisOrder) ?~> s"failed to read bounding box from zarr header at $zarrayPath" } yield - MagWithAttributes(MagLocator(mag, Some(magPath.toString), credentials, Some(axisOrder), channelIndex), + MagWithAttributes(MagLocator(mag, Some(magPath.toString), None, Some(axisOrder), channelIndex, credentialId), magPath, elementClass, boundingBox) diff --git a/app/models/binary/explore/RemoteLayerExplorer.scala b/app/models/binary/explore/RemoteLayerExplorer.scala index db0c15a00bb..822a05a81ec 100644 --- a/app/models/binary/explore/RemoteLayerExplorer.scala +++ b/app/models/binary/explore/RemoteLayerExplorer.scala @@ -3,7 +3,6 @@ package models.binary.explore import com.scalableminds.util.geometry.{BoundingBox, Vec3Double} import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper} import com.scalableminds.webknossos.datastore.dataformats.MagLocator -import com.scalableminds.webknossos.datastore.dataformats.zarr.FileSystemCredentials import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, ElementClass} import net.liftweb.util.Helpers.tryo import play.api.libs.json.Reads @@ -19,7 +18,7 @@ case class MagWithAttributes(mag: MagLocator, trait RemoteLayerExplorer extends FoxImplicits { - def explore(remotePath: Path, credentials: Option[FileSystemCredentials]): Fox[List[(DataLayer, Vec3Double)]] + def explore(remotePath: Path, credentialId: Option[String]): Fox[List[(DataLayer, Vec3Double)]] def name: String diff --git a/app/models/binary/explore/ZarrArrayExplorer.scala b/app/models/binary/explore/ZarrArrayExplorer.scala index 51eb4cc7c35..2627ccf9de6 100644 --- a/app/models/binary/explore/ZarrArrayExplorer.scala +++ b/app/models/binary/explore/ZarrArrayExplorer.scala @@ -3,12 +3,7 @@ package models.binary.explore import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.dataformats.MagLocator -import com.scalableminds.webknossos.datastore.dataformats.zarr.{ - FileSystemCredentials, - ZarrDataLayer, - ZarrLayer, - ZarrSegmentationLayer -} +import com.scalableminds.webknossos.datastore.dataformats.zarr.{ZarrDataLayer, ZarrLayer, ZarrSegmentationLayer} import com.scalableminds.webknossos.datastore.datareaders.AxisOrder import com.scalableminds.webknossos.datastore.datareaders.zarr.ZarrHeader import com.scalableminds.webknossos.datastore.models.datasource.Category @@ -20,8 +15,7 @@ class ZarrArrayExplorer extends RemoteLayerExplorer { override def name: String = "Zarr Array" - override def explore(remotePath: Path, - credentials: Option[FileSystemCredentials]): Fox[List[(ZarrLayer, Vec3Double)]] = + override def explore(remotePath: Path, credentialId: Option[String]): Fox[List[(ZarrLayer, Vec3Double)]] = for { zarrayPath <- Fox.successful(remotePath.resolve(ZarrHeader.FILENAME_DOT_ZARRAY)) name <- guessNameFromPath(remotePath) @@ -29,7 +23,7 @@ class ZarrArrayExplorer extends RemoteLayerExplorer { elementClass <- zarrHeader.elementClass ?~> "failed to read element class from zarr header" guessedAxisOrder = AxisOrder.asZyxFromRank(zarrHeader.rank) boundingBox <- zarrHeader.boundingBox(guessedAxisOrder) ?~> "failed to read bounding box from zarr header. Make sure data is in (T/C)ZYX format" - magLocator = MagLocator(Vec3Int.ones, Some(remotePath.toString), credentials, Some(guessedAxisOrder), None) + magLocator = MagLocator(Vec3Int.ones, Some(remotePath.toString), None, Some(guessedAxisOrder), None, credentialId) layer: ZarrLayer = if (looksLikeSegmentationLayer(name, elementClass)) { ZarrSegmentationLayer(name, boundingBox, elementClass, List(magLocator), largestSegmentId = None) } else ZarrDataLayer(name, Category.color, boundingBox, elementClass, List(magLocator)) diff --git a/conf/evolutions/097-credentials.sql b/conf/evolutions/097-credentials.sql new file mode 100644 index 00000000000..7f43341cce8 --- /dev/null +++ b/conf/evolutions/097-credentials.sql @@ -0,0 +1,21 @@ +START TRANSACTION; + +CREATE TYPE webknossos.CREDENTIAL_TYPE AS ENUM ('HTTP_Basic_Auth', 'S3_Access_Key', 'HTTP_Token', 'GCS'); +CREATE TABLE webknossos.credentials( + _id CHAR(24) PRIMARY KEY, + type webknossos.CREDENTIAL_TYPE NOT NULL, + name VARCHAR(256) NOT NULL, + identifier Text, + secret Text, + _user CHAR(24) NOT NULL, + _organization CHAR(24) NOT NULL, + created TIMESTAMPTZ NOT NULL DEFAULT NOW(), + isDeleted BOOLEAN NOT NULL DEFAULT false +); + +CREATE VIEW webknossos.credentials_ as SELECT * FROM webknossos.credentials WHERE NOT isDeleted; + +UPDATE webknossos.releaseInformation +SET schemaVersion = 97; + +COMMIT TRANSACTION; diff --git a/conf/evolutions/reversions/097-credentials.sql b/conf/evolutions/reversions/097-credentials.sql new file mode 100644 index 00000000000..8d11abac6b2 --- /dev/null +++ b/conf/evolutions/reversions/097-credentials.sql @@ -0,0 +1,9 @@ +START TRANSACTION; + +DROP TABLE webknossos.credentials; +DROP TYPE webknossos.CREDENTIAL_TYPE; +DROP VIEW webknossos.credentials_; + +UPDATE webknossos.releaseInformation SET schemaVersion = 96; + +COMMIT TRANSACTION; diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index 84ec8699f6e..e3fdf18156c 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -102,6 +102,7 @@ POST /datastores/:name/reserveUpload POST /datastores/:name/reportDatasetUpload controllers.WKRemoteDataStoreController.reportDatasetUpload(name: String, key: String, token: String, dataSetName: String, dataSetSizeBytes: Long) POST /datastores/:name/deleteDataset controllers.WKRemoteDataStoreController.deleteDataset(name: String, key: String) GET /datastores/:name/jobExportProperties controllers.WKRemoteDataStoreController.jobExportProperties(name: String, key: String, jobId: String) +GET /datastores/:name/findCredential controllers.WKRemoteDataStoreController.findCredential(name: String, key: String, credentialId: String) POST /datastores/:name/validateUserAccess controllers.UserTokenController.validateAccessViaDatastore(name: String, key: String, token: Option[String]) POST /datastores controllers.DataStoreController.create DELETE /datastores/:name controllers.DataStoreController.delete(name: String) @@ -268,6 +269,10 @@ GET /publications/:id POST /shortLinks controllers.ShortLinkController.create GET /shortLinks/byKey/:key controllers.ShortLinkController.getByKey(key: String) +# Credentials +POST /credentials/httpBasicAuth controllers.CredentialController.createHttpBasicAuthCredential +POST /credentials/s3AccessKey controllers.CredentialController.createS3AccessKeyCredential + # Voxelytics POST /voxelytics/workflows controllers.VoxelyticsController.storeWorkflow GET /voxelytics/workflows controllers.VoxelyticsController.listWorkflows(workflowHash: Option[String]) diff --git a/tools/postgres/schema.sql b/tools/postgres/schema.sql index d6ddf5348b9..c5a90ae28b6 100644 --- a/tools/postgres/schema.sql +++ b/tools/postgres/schema.sql @@ -19,7 +19,7 @@ START TRANSACTION; CREATE TABLE webknossos.releaseInformation ( schemaVersion BIGINT NOT NULL ); -INSERT INTO webknossos.releaseInformation(schemaVersion) values(96); +INSERT INTO webknossos.releaseInformation(schemaVersion) values(97); COMMIT TRANSACTION; @@ -453,6 +453,19 @@ CREATE TABLE webknossos.shortLinks( longLink Text NOT NULL ); +CREATE TYPE webknossos.CREDENTIAL_TYPE AS ENUM ('HTTP_Basic_Auth', 'S3_Access_Key', 'HTTP_Token', 'GCS'); +CREATE TABLE webknossos.credentials( + _id CHAR(24) PRIMARY KEY, + type webknossos.CREDENTIAL_TYPE NOT NULL, + name VARCHAR(256) NOT NULL, + identifier Text, + secret Text, + _user CHAR(24) NOT NULL, + _organization CHAR(24) NOT NULL, + created TIMESTAMPTZ NOT NULL DEFAULT NOW(), + isDeleted BOOLEAN NOT NULL DEFAULT false +); + CREATE TABLE webknossos.folders( _id CHAR(24) PRIMARY KEY, name TEXT NOT NULL, @@ -605,6 +618,7 @@ CREATE VIEW webknossos.invites_ AS SELECT * FROM webknossos.invites WHERE NOT is CREATE VIEW webknossos.organizationTeams AS SELECT * FROM webknossos.teams WHERE isOrganizationTeam AND NOT isDeleted; CREATE VIEW webknossos.annotation_privateLinks_ as SELECT * FROM webknossos.annotation_privateLinks WHERE NOT isDeleted; CREATE VIEW webknossos.folders_ as SELECT * FROM webknossos.folders WHERE NOT isDeleted; +CREATE VIEW webknossos.credentials_ as SELECT * FROM webknossos.credentials WHERE NOT isDeleted; CREATE VIEW webknossos.userInfos AS SELECT diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala index 3f68a7e3b2a..3d1ad200bae 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala @@ -117,7 +117,7 @@ class ZarrStreamingController @Inject()( d.category, d.boundingBox, d.elementClass, - d.resolutions.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz), None)), + d.resolutions.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz), None, None)), numChannels = Some(if (d.elementClass == ElementClass.uint24) 3 else 1) ) case s: WKWSegmentationLayer => @@ -125,7 +125,7 @@ class ZarrStreamingController @Inject()( s.name, s.boundingBox, s.elementClass, - s.resolutions.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz), None)), + s.resolutions.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz), None, None)), mappings = s.mappings, largestSegmentId = s.largestSegmentId, numChannels = Some(if (s.elementClass == ElementClass.uint24) 3 else 1) @@ -136,7 +136,7 @@ class ZarrStreamingController @Inject()( z.category, z.boundingBox, z.elementClass, - z.resolutions.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz), None)), + z.resolutions.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz), None, None)), numChannels = Some(if (z.elementClass == ElementClass.uint24) 3 else 1) ) case zs: ZarrSegmentationLayer => @@ -144,7 +144,7 @@ class ZarrStreamingController @Inject()( zs.name, zs.boundingBox, zs.elementClass, - zs.resolutions.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz), None)), + zs.resolutions.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz), None, None)), mappings = zs.mappings, largestSegmentId = zs.largestSegmentId, numChannels = Some(if (zs.elementClass == ElementClass.uint24) 3 else 1) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/BucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/BucketProvider.scala index a81adbe6b76..c9374d687c6 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/BucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/BucketProvider.scala @@ -4,56 +4,65 @@ import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.dataformats.zarr.RemoteSourceDescriptor import com.scalableminds.webknossos.datastore.models.BucketPosition import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction -import com.scalableminds.webknossos.datastore.storage.{DataCubeCache, FileSystemsHolder} +import com.scalableminds.webknossos.datastore.storage.{DataCubeCache, FileSystemService, FileSystemsHolder} import com.typesafe.scalalogging.LazyLogging -import net.liftweb.common.{Box, Empty} +import net.liftweb.common.Empty import java.nio.file.{FileSystem, Path} import scala.concurrent.ExecutionContext trait BucketProvider extends FoxImplicits with LazyLogging { + def fileSystemServiceOpt: Option[FileSystemService] + // To be defined in subclass. - def loadFromUnderlying(readInstruction: DataReadInstruction): Box[DataCubeHandle] = Empty + def loadFromUnderlying(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext): Fox[DataCubeHandle] = + Empty def load(readInstruction: DataReadInstruction, cache: DataCubeCache)( implicit ec: ExecutionContext): Fox[Array[Byte]] = cache.withCache(readInstruction)(loadFromUnderlyingWithTimeout)(_.cutOutBucket(readInstruction.bucket)) - private def loadFromUnderlyingWithTimeout(readInstruction: DataReadInstruction): Box[DataCubeHandle] = { + private def loadFromUnderlyingWithTimeout(readInstruction: DataReadInstruction)( + implicit ec: ExecutionContext): Fox[DataCubeHandle] = { val t = System.currentTimeMillis - val result = loadFromUnderlying(readInstruction) - val duration = System.currentTimeMillis - t - if (duration > 500) { - val className = this.getClass.getName.split("\\.").last - logger.warn( - s"Opening file in $className took ${if (duration > 3000) "really " else ""}long.\n" - + s" duration: $duration ms\n" - + s" dataSource: ${readInstruction.dataSource.id.name}\n" - + s" dataLayer: ${readInstruction.dataLayer.name}\n" - + s" cube: ${readInstruction.cube}" - ) - } - result + for { + result <- loadFromUnderlying(readInstruction).futureBox + duration = System.currentTimeMillis - t + _ = if (duration > 500) { + val className = this.getClass.getName.split("\\.").last + logger.warn( + s"Opening file in $className took ${if (duration > 3000) "really " else ""}long.\n" + + s" duration: $duration ms\n" + + s" dataSource: ${readInstruction.dataSource.id.name}\n" + + s" dataLayer: ${readInstruction.dataLayer.name}\n" + + s" cube: ${readInstruction.cube}" + ) + } + } yield result } def bucketStream(version: Option[Long] = None): Iterator[(BucketPosition, Array[Byte])] = Iterator.empty - protected def remotePathFrom(remoteSource: RemoteSourceDescriptor): Option[Path] = - FileSystemsHolder.getOrCreate(remoteSource).map { fileSystem: FileSystem => - fileSystem.getPath(remoteSource.remotePath) - } + protected def remotePathFrom(remoteSource: RemoteSourceDescriptor)(implicit ec: ExecutionContext): Fox[Path] = + FileSystemsHolder + .getOrCreate(remoteSource) + .map { fileSystem: FileSystem => + fileSystem.getPath(remoteSource.remotePath) + } + .toFox - protected def localPathFrom(readInstruction: DataReadInstruction, relativeMagPath: String): Option[Path] = { + protected def localPathFrom(readInstruction: DataReadInstruction, relativeMagPath: String)( + implicit ec: ExecutionContext): Fox[Path] = { val magPath = readInstruction.baseDir .resolve(readInstruction.dataSource.id.team) .resolve(readInstruction.dataSource.id.name) .resolve(readInstruction.dataLayer.name) .resolve(relativeMagPath) if (magPath.toFile.exists()) { - Some(magPath) - } else None + Fox.successful(magPath) + } else Fox.empty } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/MagLocator.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/MagLocator.scala index 9461ce8d68c..fb0703645f1 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/MagLocator.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/MagLocator.scala @@ -1,7 +1,7 @@ package com.scalableminds.webknossos.datastore.dataformats import com.scalableminds.util.geometry.Vec3Int -import com.scalableminds.webknossos.datastore.dataformats.zarr.{FileSystemCredentials, RemoteSourceDescriptor} +import com.scalableminds.webknossos.datastore.dataformats.zarr.{FileSystemCredentials} import com.scalableminds.webknossos.datastore.datareaders.AxisOrder import com.scalableminds.webknossos.datastore.models.datasource.ResolutionFormatHelper import com.scalableminds.webknossos.datastore.storage.FileSystemsHolder @@ -13,17 +13,12 @@ case class MagLocator(mag: Vec3Int, path: Option[String], credentials: Option[FileSystemCredentials], axisOrder: Option[AxisOrder], - channelIndex: Option[Int]) { + channelIndex: Option[Int], + credentialId: Option[String]) { lazy val pathWithFallback: String = path.getOrElse(mag.toMagLiteral(allowScalar = true)) - private lazy val uri: URI = new URI(pathWithFallback) - private lazy val isRemote: Boolean = FileSystemsHolder.isSupportedRemoteScheme(uri.getScheme) - lazy val remoteSource: Option[RemoteSourceDescriptor] = - if (isRemote) - Some(RemoteSourceDescriptor(uri, credentials.map(_.user), credentials.flatMap(_.password))) - else - None - + lazy val uri: URI = new URI(pathWithFallback) + lazy val isRemote: Boolean = FileSystemsHolder.isSupportedRemoteScheme(uri.getScheme) } object MagLocator extends ResolutionFormatHelper { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5BucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5BucketProvider.scala index 85bf8b3e73e..ee15a7d1076 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5BucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5BucketProvider.scala @@ -4,11 +4,12 @@ import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.requestlogging.RateLimitedErrorLogging import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DataCubeHandle, MagLocator} +import com.scalableminds.webknossos.datastore.datareaders.n5.N5Array import com.scalableminds.webknossos.datastore.models.BucketPosition import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction -import com.scalableminds.webknossos.datastore.datareaders.n5.N5Array +import com.scalableminds.webknossos.datastore.storage.FileSystemService import com.typesafe.scalalogging.LazyLogging -import net.liftweb.common.{Box, Empty, Failure, Full} +import net.liftweb.common.{Empty, Failure, Full} import net.liftweb.util.Helpers.tryo import java.nio.file.Path @@ -28,28 +29,33 @@ class N5CubeHandle(n5Array: N5Array) extends DataCubeHandle with LazyLogging wit } -class N5BucketProvider(layer: N5Layer) extends BucketProvider with LazyLogging with RateLimitedErrorLogging { +class N5BucketProvider(layer: N5Layer, val fileSystemServiceOpt: Option[FileSystemService]) + extends BucketProvider + with LazyLogging + with RateLimitedErrorLogging { - override def loadFromUnderlying(readInstruction: DataReadInstruction): Box[N5CubeHandle] = { + override def loadFromUnderlying(readInstruction: DataReadInstruction)( + implicit ec: ExecutionContext): Fox[N5CubeHandle] = { val n5MagOpt: Option[MagLocator] = layer.mags.find(_.mag == readInstruction.bucket.mag) n5MagOpt match { - case None => Empty + case None => Fox.empty case Some(n5Mag) => - val magPathOpt: Option[Path] = { - n5Mag.remoteSource match { - case Some(remoteSource) => remotePathFrom(remoteSource) - case None => localPathFrom(readInstruction, n5Mag.pathWithFallback) - } - } - magPathOpt match { + fileSystemServiceOpt match { + case Some(fileSystemService: FileSystemService) => + for { + magPath: Path <- if (n5Mag.isRemote) { + for { + remoteSource <- fileSystemService.remoteSourceFor(n5Mag) + remotePath <- remotePathFrom(remoteSource) + } yield remotePath + } else localPathFrom(readInstruction, n5Mag.pathWithFallback) + cubeHandle <- tryo(onError = e => logError(e))(N5Array.open(magPath, n5Mag.axisOrder, n5Mag.channelIndex)) + .map(new N5CubeHandle(_)) + } yield cubeHandle case None => Empty - case Some(magPath) => - tryo(onError = e => logError(e))(N5Array.open(magPath, n5Mag.axisOrder, n5Mag.channelIndex)) - .map(new N5CubeHandle(_)) } } - } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5DataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5DataLayers.scala index 1969873d0c2..fea2314b796 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5DataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5DataLayers.scala @@ -4,13 +4,14 @@ import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.webknossos.datastore.dataformats.MagLocator import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration import com.scalableminds.webknossos.datastore.models.datasource._ +import com.scalableminds.webknossos.datastore.storage.FileSystemService import play.api.libs.json.{Json, OFormat} trait N5Layer extends DataLayer { val dataFormat: DataFormat.Value = DataFormat.n5 - lazy val bucketProvider = new N5BucketProvider(this) + def bucketProvider(fileSystemServiceOpt: Option[FileSystemService]) = new N5BucketProvider(this, fileSystemServiceOpt) def resolutions: List[Vec3Int] = mags.map(_.mag) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketProvider.scala index ad284d9b8ea..2b8e91879c1 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketProvider.scala @@ -4,9 +4,9 @@ import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DataCubeHandle} import com.scalableminds.webknossos.datastore.models.BucketPosition import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction +import com.scalableminds.webknossos.datastore.storage.FileSystemService import com.scalableminds.webknossos.wrap.WKWFile -import net.liftweb.common.{Box, Empty, Failure, Full} - +import net.liftweb.common.{Empty, Failure, Full} import java.nio.file.Path import scala.concurrent.ExecutionContext @@ -33,7 +33,10 @@ class WKWCubeHandle(wkwFile: WKWFile, wkwFilePath: Path) extends DataCubeHandle class WKWBucketProvider(layer: WKWLayer) extends BucketProvider with WKWDataFormatHelper { - override def loadFromUnderlying(readInstruction: DataReadInstruction): Box[WKWCubeHandle] = { + override def fileSystemServiceOpt: Option[FileSystemService] = None + + override def loadFromUnderlying(readInstruction: DataReadInstruction)( + implicit ec: ExecutionContext): Fox[WKWCubeHandle] = { val wkwFile = wkwFilePath( readInstruction.cube, Some(readInstruction.dataSource.id), diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataLayers.scala index 14ca259551f..99da2262ac9 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataLayers.scala @@ -1,8 +1,10 @@ package com.scalableminds.webknossos.datastore.dataformats.wkw import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} +import com.scalableminds.webknossos.datastore.dataformats.BucketProvider import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration import com.scalableminds.webknossos.datastore.models.datasource.{DataFormat, _} +import com.scalableminds.webknossos.datastore.storage.FileSystemService import play.api.libs.json.{Json, OFormat} case class WKWResolution(resolution: Vec3Int, cubeLength: Int) @@ -15,7 +17,8 @@ trait WKWLayer extends DataLayer { val dataFormat: DataFormat.Value = DataFormat.wkw - lazy val bucketProvider = new WKWBucketProvider(this) + override def bucketProvider(fileSystemServiceOpt: Option[FileSystemService]): BucketProvider = + new WKWBucketProvider(this) def wkwResolutions: List[WKWResolution] diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrBucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrBucketProvider.scala index 7bb2037c7b0..6cc56fb16ef 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrBucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrBucketProvider.scala @@ -1,6 +1,5 @@ package com.scalableminds.webknossos.datastore.dataformats.zarr -import java.nio.file.Path import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.requestlogging.RateLimitedErrorLogging import com.scalableminds.util.tools.Fox @@ -8,10 +7,12 @@ import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DataC import com.scalableminds.webknossos.datastore.datareaders.zarr.ZarrArray import com.scalableminds.webknossos.datastore.models.BucketPosition import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction +import com.scalableminds.webknossos.datastore.storage.FileSystemService import com.typesafe.scalalogging.LazyLogging -import net.liftweb.common.{Box, Empty, Failure, Full} +import net.liftweb.common.{Empty, Failure, Full} import net.liftweb.util.Helpers.tryo +import java.nio.file.Path import scala.concurrent.ExecutionContext class ZarrCubeHandle(zarrArray: ZarrArray) extends DataCubeHandle with LazyLogging with RateLimitedErrorLogging { @@ -28,28 +29,33 @@ class ZarrCubeHandle(zarrArray: ZarrArray) extends DataCubeHandle with LazyLoggi } -class ZarrBucketProvider(layer: ZarrLayer) extends BucketProvider with LazyLogging with RateLimitedErrorLogging { +class ZarrBucketProvider(layer: ZarrLayer, val fileSystemServiceOpt: Option[FileSystemService]) + extends BucketProvider + with LazyLogging + with RateLimitedErrorLogging { - override def loadFromUnderlying(readInstruction: DataReadInstruction): Box[ZarrCubeHandle] = { + override def loadFromUnderlying(readInstruction: DataReadInstruction)( + implicit ec: ExecutionContext): Fox[ZarrCubeHandle] = { val zarrMagOpt: Option[MagLocator] = layer.mags.find(_.mag == readInstruction.bucket.mag) zarrMagOpt match { - case None => Empty - case Some(zarrMag) => { - val magPathOpt: Option[Path] = - zarrMag.remoteSource match { - case Some(remoteSource) => remotePathFrom(remoteSource) - case None => localPathFrom(readInstruction, zarrMag.pathWithFallback) - } - magPathOpt match { + case None => Fox.empty + case Some(zarrMag) => + fileSystemServiceOpt match { + case Some(fileSystemService: FileSystemService) => + for { + magPath: Path <- if (zarrMag.isRemote) { + for { + remoteSource <- fileSystemService.remoteSourceFor(zarrMag) + remotePath <- remotePathFrom(remoteSource) + } yield remotePath + } else localPathFrom(readInstruction, zarrMag.pathWithFallback) + cubeHandle <- tryo(onError = e => logError(e))( + ZarrArray.open(magPath, zarrMag.axisOrder, zarrMag.channelIndex)).map(new ZarrCubeHandle(_)) + } yield cubeHandle case None => Empty - case Some(magPath) => - tryo(onError = e => logError(e))(ZarrArray.open(magPath, zarrMag.axisOrder, zarrMag.channelIndex)) - .map(new ZarrCubeHandle(_)) } - } } - } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrDataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrDataLayers.scala index 2d10f899f53..ddf779ac4bc 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrDataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrDataLayers.scala @@ -1,12 +1,14 @@ package com.scalableminds.webknossos.datastore.dataformats.zarr -import java.net.URI import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.webknossos.datastore.dataformats.MagLocator import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration import com.scalableminds.webknossos.datastore.models.datasource._ +import com.scalableminds.webknossos.datastore.storage.FileSystemService import play.api.libs.json.{Json, OFormat} +import java.net.URI + case class FileSystemCredentials(user: String, password: Option[String]) object FileSystemCredentials { @@ -22,7 +24,8 @@ trait ZarrLayer extends DataLayer { val dataFormat: DataFormat.Value = DataFormat.zarr - lazy val bucketProvider = new ZarrBucketProvider(this) + def bucketProvider(fileSystemServiceOpt: Option[FileSystemService]) = + new ZarrBucketProvider(this, fileSystemServiceOpt) def resolutions: List[Vec3Int] = mags.map(_.mag) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala index 5bb8bcf7008..7b138d9d22b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala @@ -10,6 +10,7 @@ import com.scalableminds.webknossos.datastore.dataformats.zarr.{ZarrDataLayer, Z import com.scalableminds.webknossos.datastore.datareaders.ArrayDataType import com.scalableminds.webknossos.datastore.datareaders.ArrayDataType.ArrayDataType import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration +import com.scalableminds.webknossos.datastore.storage.FileSystemService import play.api.libs.json._ object DataFormat extends ExtendedEnumeration { @@ -174,7 +175,7 @@ trait DataLayer extends DataLayerLike { */ def lengthOfUnderlyingCubes(resolution: Vec3Int): Int - def bucketProvider: BucketProvider + def bucketProvider(fileSystemServiceOpt: Option[FileSystemService]): BucketProvider def containsResolution(resolution: Vec3Int): Boolean = resolutions.contains(resolution) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala index 85123b78174..13b92b08e18 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala @@ -8,7 +8,7 @@ import com.scalableminds.webknossos.datastore.helpers.DataSetDeleter import com.scalableminds.webknossos.datastore.models.BucketPosition import com.scalableminds.webknossos.datastore.models.datasource.{Category, DataLayer} import com.scalableminds.webknossos.datastore.models.requests.{DataReadInstruction, DataServiceDataRequest} -import com.scalableminds.webknossos.datastore.storage.{AgglomerateFileKey, CachedCube, DataCubeCache} +import com.scalableminds.webknossos.datastore.storage._ import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.{Failure, Full} @@ -17,15 +17,18 @@ import scala.concurrent.ExecutionContext.Implicits.global class BinaryDataService(val dataBaseDir: Path, maxCacheSize: Int, val agglomerateServiceOpt: Option[AgglomerateService], + fileSystemServiceOpt: Option[FileSystemService], val applicationHealthService: Option[ApplicationHealthService]) extends FoxImplicits with DataSetDeleter with LazyLogging { - /* Note that this must stay in sync with the back-end constant + /* Note that this must stay in sync with the front-end constant compare https://github.com/scalableminds/webknossos/issues/5223 */ private val MaxMagForAgglomerateMapping = 16 - lazy val cache = new DataCubeCache(maxCacheSize) + + private lazy val shardHandleCache = new DataCubeCache(maxCacheSize) + private lazy val bucketProviderCache = new BucketProviderCache(maxEntries = 5000) def handleDataRequest(request: DataServiceDataRequest): Fox[Array[Byte]] = { val bucketQueue = request.cuboid.allBucketsInCuboid @@ -46,9 +49,9 @@ class BinaryDataService(val dataBaseDir: Path, } def handleDataRequests(requests: List[DataServiceDataRequest]): Fox[(Array[Byte], List[Int])] = { - def convertIfNecessary[T](isNecessary: Boolean, - inputArray: Array[Byte], - conversionFunc: Array[Byte] => Array[Byte]): Array[Byte] = + def convertIfNecessary(isNecessary: Boolean, + inputArray: Array[Byte], + conversionFunc: Array[Byte] => Array[Byte]): Array[Byte] = if (isNecessary) conversionFunc(inputArray) else inputArray val requestsCount = requests.length @@ -79,7 +82,9 @@ class BinaryDataService(val dataBaseDir: Path, if (request.dataLayer.doesContainBucket(bucket) && request.dataLayer.containsResolution(bucket.mag)) { val readInstruction = DataReadInstruction(dataBaseDir, request.dataSource, request.dataLayer, bucket, request.settings.version) - request.dataLayer.bucketProvider.load(readInstruction, cache).futureBox.flatMap { + val bucketProvider = bucketProviderCache.getOrLoadAndPut(request.dataLayer)(dataLayer => + dataLayer.bucketProvider(fileSystemServiceOpt)) + bucketProvider.load(readInstruction, shardHandleCache).futureBox.flatMap { case Failure(msg, Full(e: InternalError), _) => applicationHealthService.foreach(a => a.pushError(e)) logger.warn( @@ -188,7 +193,7 @@ class BinaryDataService(val dataBaseDir: Path, val closedAgglomerateFileHandleCount = agglomerateServiceOpt.map(_.agglomerateFileCache.clear(agglomerateFileMatchPredicate)).getOrElse(0) - val closedDataCubeHandleCount = cache.clear(dataCubeMatchPredicate) + val closedDataCubeHandleCount = shardHandleCache.clear(dataCubeMatchPredicate) (closedAgglomerateFileHandleCount, closedDataCubeHandleCount) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataServiceHolder.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataServiceHolder.scala index eef14c74a64..15d30f6af83 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataServiceHolder.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataServiceHolder.scala @@ -1,8 +1,9 @@ package com.scalableminds.webknossos.datastore.services import java.nio.file.Paths - import com.scalableminds.webknossos.datastore.DataStoreConfig +import com.scalableminds.webknossos.datastore.storage.FileSystemService + import javax.inject.Inject /* @@ -14,11 +15,15 @@ import javax.inject.Inject class BinaryDataServiceHolder @Inject()(config: DataStoreConfig, agglomerateService: AgglomerateService, - applicationHealthService: ApplicationHealthService) { + applicationHealthService: ApplicationHealthService, + fileSystemService: FileSystemService) { - val binaryDataService = new BinaryDataService(Paths.get(config.Datastore.baseFolder), - config.Datastore.Cache.DataCube.maxEntries, - Some(agglomerateService), - Some(applicationHealthService)) + val binaryDataService: BinaryDataService = new BinaryDataService( + Paths.get(config.Datastore.baseFolder), + config.Datastore.Cache.DataCube.maxEntries, + Some(agglomerateService), + Some(fileSystemService), + Some(applicationHealthService) + ) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebKnossosClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebKnossosClient.scala index 16f96908946..99312cbdc51 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebKnossosClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebKnossosClient.scala @@ -12,6 +12,7 @@ import com.scalableminds.webknossos.datastore.models.annotation.AnnotationSource import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSourceLike import com.scalableminds.webknossos.datastore.rpc.RPC +import com.scalableminds.webknossos.datastore.storage.AnyCredential import com.typesafe.scalalogging.LazyLogging import play.api.inject.ApplicationLifecycle import play.api.libs.json.{Json, OFormat} @@ -148,4 +149,11 @@ class DSRemoteWebKnossosClient @Inject()( .addQueryStringOptional("userToken", userToken) .getWithJsonResponse[AnnotationSource] ) + + def findCredential(credentialId: String): Fox[AnyCredential] = + rpc(s"$webKnossosUri/api/datastores/$dataStoreName/findCredential") + .addQueryString("credentialId" -> credentialId) + .addQueryString("key" -> dataStoreKey) + .silent + .getWithJsonResponse[AnyCredential] } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/BucketProviderCache.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/BucketProviderCache.scala new file mode 100644 index 00000000000..71c866edcb1 --- /dev/null +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/BucketProviderCache.scala @@ -0,0 +1,7 @@ +package com.scalableminds.webknossos.datastore.storage + +import com.scalableminds.util.cache.LRUConcurrentCache +import com.scalableminds.webknossos.datastore.dataformats.BucketProvider +import com.scalableminds.webknossos.datastore.models.datasource.DataLayer + +class BucketProviderCache(val maxEntries: Int) extends LRUConcurrentCache[DataLayer, BucketProvider] diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/FileSystemCredentials.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/FileSystemCredentials.scala new file mode 100644 index 00000000000..d7c9e1700d1 --- /dev/null +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/FileSystemCredentials.scala @@ -0,0 +1,23 @@ +package com.scalableminds.webknossos.datastore.storage + +import play.api.libs.json.{Json, OFormat} + +sealed trait AnyCredential + +object AnyCredential { + implicit val jsonFormat: OFormat[AnyCredential] = Json.format[AnyCredential] +} + +case class HttpBasicAuthCredential(name: String, username: String, password: String, user: String, organization: String) + extends AnyCredential + +object HttpBasicAuthCredential { + implicit val jsonFormat: OFormat[HttpBasicAuthCredential] = Json.format[HttpBasicAuthCredential] +} + +case class S3AccessKeyCredential(name: String, keyId: String, key: String, user: String, organization: String) + extends AnyCredential + +object S3AccessKeyCredential { + implicit val jsonFormat: OFormat[S3AccessKeyCredential] = Json.format[S3AccessKeyCredential] +} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/FileSystemService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/FileSystemService.scala new file mode 100644 index 00000000000..4b01ec82ad5 --- /dev/null +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/FileSystemService.scala @@ -0,0 +1,36 @@ +package com.scalableminds.webknossos.datastore.storage + +import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.dataformats.MagLocator +import com.scalableminds.webknossos.datastore.dataformats.zarr.RemoteSourceDescriptor +import com.scalableminds.webknossos.datastore.services.DSRemoteWebKnossosClient + +import java.net.URI +import javax.inject.Inject +import scala.concurrent.ExecutionContext + +class FileSystemService @Inject()(dSRemoteWebKnossosClient: DSRemoteWebKnossosClient) { + + private def remoteSourceDescriptorFromCredential(uri: URI, credential: AnyCredential): RemoteSourceDescriptor = + credential match { + case HttpBasicAuthCredential(name, username, password, _, _) => + RemoteSourceDescriptor(uri, Some(username), Some(password)) + case S3AccessKeyCredential(name, keyId, key, _, _) => RemoteSourceDescriptor(uri, Some(keyId), Some(key)) + } + + def remoteSourceFor(magLocator: MagLocator)(implicit ec: ExecutionContext): Fox[RemoteSourceDescriptor] = + magLocator.credentialId match { + case Some(credentialId) => + for { + credential <- dSRemoteWebKnossosClient.findCredential(credentialId) + descriptor = remoteSourceDescriptorFromCredential(magLocator.uri, credential) + } yield descriptor + case None => + magLocator.credentials match { + case Some(credentials) => + Fox.successful(RemoteSourceDescriptor(magLocator.uri, Some(credentials.user), credentials.password)) + case None => Fox.successful(RemoteSourceDescriptor(magLocator.uri, None, None)) + } + } + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala index 718f0a0492e..6ffd7587086 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala @@ -171,7 +171,7 @@ class VolumeTracingZarrStreamingController @Inject()( largestSegmentId = tracing.largestSegmentId, boundingBox = tracing.boundingBox, elementClass = tracing.elementClass, - mags = tracing.resolutions.toList.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz), None)), + mags = tracing.resolutions.toList.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz), None, None)), mappings = None, numChannels = Some(if (tracing.elementClass.isuint24) 3 else 1) ) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala index 51e60e43f9b..0cdea60898d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala @@ -9,11 +9,14 @@ import com.scalableminds.webknossos.datastore.models.{BucketPosition, WebKnossos import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration import com.scalableminds.webknossos.datastore.models.datasource.{DataFormat, DataLayer, ElementClass, SegmentationLayer} import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction -import com.scalableminds.webknossos.datastore.storage.DataCubeCache +import com.scalableminds.webknossos.datastore.storage.{DataCubeCache, FileSystemService} import scala.concurrent.ExecutionContext class EditableMappingBucketProvider(layer: EditableMappingLayer) extends BucketProvider with ProtoGeometryImplicits { + + override def fileSystemServiceOpt: Option[FileSystemService] = None + override def load(readInstruction: DataReadInstruction, cache: DataCubeCache)( implicit ec: ExecutionContext): Fox[Array[Byte]] = { val bucket: BucketPosition = readInstruction.bucket @@ -62,7 +65,8 @@ case class EditableMappingLayer(name: String, override def lengthOfUnderlyingCubes(resolution: Vec3Int): Int = DataLayer.bucketLength - override def bucketProvider: BucketProvider = new EditableMappingBucketProvider(layer = this) + override def bucketProvider(fileSystemServiceOpt: Option[FileSystemService]): BucketProvider = + new EditableMappingBucketProvider(layer = this) override def mappings: Option[Set[String]] = None diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 7836918da04..373580f4625 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -89,7 +89,7 @@ class EditableMappingService @Inject()( private def generateId: String = UUID.randomUUID.toString - val binaryDataService = new BinaryDataService(Paths.get(""), 100, None, None) + val binaryDataService = new BinaryDataService(Paths.get(""), 100, None, None, None) isosurfaceServiceHolder.tracingStoreIsosurfaceConfig = (binaryDataService, 30 seconds, 1) val isosurfaceService: IsosurfaceService = isosurfaceServiceHolder.tracingStoreIsosurfaceService diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala index 840d3e19ee3..edd3446505e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala @@ -7,7 +7,7 @@ import com.scalableminds.webknossos.datastore.models.BucketPosition import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration import com.scalableminds.webknossos.datastore.models.datasource.{ElementClass, _} import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction -import com.scalableminds.webknossos.datastore.storage.DataCubeCache +import com.scalableminds.webknossos.datastore.storage.{DataCubeCache, FileSystemService} import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.tracingstore.tracings.{ @@ -21,6 +21,8 @@ import scala.concurrent.ExecutionContext trait AbstractVolumeTracingBucketProvider extends BucketProvider with VolumeTracingBucketHelper with FoxImplicits { + override def fileSystemServiceOpt: Option[FileSystemService] = None + def bucketStreamWithVersion(version: Option[Long] = None): Iterator[(BucketPosition, Array[Byte], Long)] } @@ -97,7 +99,9 @@ case class VolumeTracingLayer( else new VolumeTracingBucketProvider(this) - override val bucketProvider: BucketProvider = volumeBucketProvider + override def bucketProvider(fileSystemServiceOpt: Option[FileSystemService]): BucketProvider = volumeBucketProvider + + def bucketProvider: AbstractVolumeTracingBucketProvider = volumeBucketProvider override val resolutions: List[Vec3Int] = if (volumeResolutions.nonEmpty) volumeResolutions else List(Vec3Int(1, 1, 1)) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 8e3d85edbfc..4c84c9324f2 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -74,7 +74,7 @@ class VolumeTracingService @Inject()( /* We want to reuse the bucket loading methods from binaryDataService for the volume tracings, however, it does not actually load anything from disk, unlike its “normal” instance in the datastore (only from the volume tracing store) */ - val binaryDataService = new BinaryDataService(Paths.get(""), 100, None, None) + val binaryDataService = new BinaryDataService(Paths.get(""), 100, None, None, None) isosurfaceServiceHolder.tracingStoreIsosurfaceConfig = (binaryDataService, 30 seconds, 1) val isosurfaceService: IsosurfaceService = isosurfaceServiceHolder.tracingStoreIsosurfaceService