From e443ee55f6ff21d7e7dfd9b035a377f60768f265 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 22 May 2024 13:21:25 +0200 Subject: [PATCH] Explore remote datasets in datastore (#7800) * WIP: explore remote datasets in datastore * WIP: move ExploreRemoteLayerService to datastore * move more stuff * move more stuff * start to connect the pieces * look up credentials, cleanup * allow selecting datastore in frontend * changelog * implement pr feedback --- CHANGELOG.unreleased.md | 1 + app/controllers/AnnotationIOController.scala | 8 +- app/controllers/DatasetController.scala | 53 ++---- .../dataset/WKRemoteDataStoreClient.scala | 13 ++ .../explore/ExploreRemoteLayerService.scala | 167 ------------------ .../explore/WKExploreRemoteLayerService.scala | 120 +++++++++++++ frontend/javascripts/admin/admin_rest_api.ts | 2 + .../admin/dataset/dataset_add_remote_view.tsx | 34 +++- .../admin/dataset/dataset_upload_view.tsx | 3 - .../scala/collections/SequenceUtils.scala | 9 + .../controllers/DataSourceController.scala | 38 +++- ...rService.scala => ExploreLayerUtils.scala} | 2 +- .../explore/ExploreLocalLayerService.scala | 5 +- .../explore/ExploreRemoteLayerService.scala | 130 ++++++++++++++ .../explore/NeuroglancerUriExplorer.scala | 12 +- .../explore/RemoteLayerExplorer.scala | 6 +- .../datastore/explore/ZarrArrayExplorer.scala | 2 +- ....scalableminds.webknossos.datastore.routes | 1 + 18 files changed, 370 insertions(+), 236 deletions(-) delete mode 100644 app/models/dataset/explore/ExploreRemoteLayerService.scala create mode 100644 app/models/dataset/explore/WKExploreRemoteLayerService.scala create mode 100644 util/src/main/scala/collections/SequenceUtils.scala rename webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/{ExploreLayerService.scala => ExploreLayerUtils.scala} (99%) create mode 100644 webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreRemoteLayerService.scala diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 2037dbf8d5..d213e74bdf 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -21,6 +21,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Non-admin or -manager users can no longer start long-running jobs that create datasets. This includes annotation materialization and AI inferrals. [#7753](https://github.com/scalableminds/webknossos/pull/7753) - In the time tracking view, all annotations and tasks can be shown for each user by expanding the table. The individual time spans spent with a task or annotating an explorative annotation can be accessed via CSV export. The detail view including a chart for the individual spans has been removed. [#7733](https://github.com/scalableminds/webknossos/pull/7733) - Slightly refactored the ``component to use columns as props. [#7772](https://github.com/scalableminds/webknossos/pull/7772) +- The config value `datastore.localFolderWhitelist` can now be set for each datastore individually. [#7800](https://github.com/scalableminds/webknossos/pull/7800) ### Fixed - Fixed a bug where a toast that was reopened had a flickering effect during the reopening animation. [#7793](https://github.com/scalableminds/webknossos/pull/7793) diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 71f51c3a54..fdd1c21437 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -1,5 +1,7 @@ package controllers +import collections.SequenceUtils + import java.io.{BufferedOutputStream, File, FileOutputStream} import java.util.zip.Deflater import org.apache.pekko.actor.ActorSystem @@ -266,11 +268,7 @@ class AnnotationIOController @Inject()( } private def assertAllOnSameDataset(skeletons: List[SkeletonTracing], volumes: List[VolumeTracing]): Fox[String] = - for { - datasetName <- volumes.headOption.map(_.datasetName).orElse(skeletons.headOption.map(_.datasetName)).toFox - _ <- bool2Fox(skeletons.forall(_.datasetName == datasetName)) - _ <- bool2Fox(volumes.forall(_.datasetName == datasetName)) - } yield datasetName + SequenceUtils.findUniqueElement(volumes.map(_.datasetName) ++ skeletons.map(_.datasetName)).toFox private def assertAllOnSameOrganization(skeletons: List[SkeletonTracing], volumes: List[VolumeTracing]): Fox[Option[String]] = { diff --git a/app/controllers/DatasetController.scala b/app/controllers/DatasetController.scala index e644db6934..c955e16257 100755 --- a/app/controllers/DatasetController.scala +++ b/app/controllers/DatasetController.scala @@ -5,18 +5,17 @@ import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContex import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, TristateOptionJsonHelper} -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, ElementClass, GenericDataSource} +import com.scalableminds.webknossos.datastore.models.datasource.ElementClass import models.analytics.{AnalyticsService, ChangeDatasetSettingsEvent, OpenDatasetEvent} import models.dataset._ import models.dataset.explore.{ ExploreAndAddRemoteDatasetParameters, - ExploreRemoteDatasetParameters, - ExploreRemoteLayerService + WKExploreRemoteLayerParameters, + WKExploreRemoteLayerService } import models.organization.OrganizationDAO import models.team.{TeamDAO, TeamService} import models.user.{User, UserDAO, UserService} -import net.liftweb.common.{Box, Empty, Failure, Full} import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.functional.syntax._ import play.api.libs.json._ @@ -24,7 +23,6 @@ import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import utils.{ObjectId, WkConf} import javax.inject.Inject -import scala.collection.mutable.ListBuffer import scala.concurrent.{ExecutionContext, Future} import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate import mail.{MailchimpClient, MailchimpTag} @@ -71,7 +69,7 @@ class DatasetController @Inject()(userService: UserService, conf: WkConf, analyticsService: AnalyticsService, mailchimpClient: MailchimpClient, - exploreRemoteLayerService: ExploreRemoteLayerService, + wkExploreRemoteLayerService: WKExploreRemoteLayerService, sil: Silhouette[WkEnv])(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller { @@ -107,46 +105,29 @@ class DatasetController @Inject()(userService: UserService, } } - def exploreRemoteDataset(): Action[List[ExploreRemoteDatasetParameters]] = - sil.SecuredAction.async(validateJson[List[ExploreRemoteDatasetParameters]]) { implicit request => - val reportMutable = ListBuffer[String]() + def exploreRemoteDataset(): Action[List[WKExploreRemoteLayerParameters]] = + sil.SecuredAction.async(validateJson[List[WKExploreRemoteLayerParameters]]) { implicit request => for { - dataSourceBox: Box[GenericDataSource[DataLayer]] <- exploreRemoteLayerService - .exploreRemoteDatasource(request.body, request.identity, reportMutable) - .futureBox - dataSourceOpt = dataSourceBox match { - case Full(dataSource) if dataSource.dataLayers.nonEmpty => - reportMutable += s"Resulted in dataSource with ${dataSource.dataLayers.length} layers." - Some(dataSource) - case Full(_) => - reportMutable += "Error when exploring as layer set: Resulted in zero layers." - None - case f: Failure => - reportMutable += s"Error when exploring as layer set: ${Fox.failureChainAsString(f)}" - None - case Empty => - reportMutable += "Error when exploring as layer set: Empty" - None - } - } yield Ok(Json.obj("dataSource" -> Json.toJson(dataSourceOpt), "report" -> reportMutable.mkString("\n"))) + exploreResponse <- wkExploreRemoteLayerService.exploreRemoteDatasource(request.body, request.identity) + } yield Ok(Json.toJson(exploreResponse)) } // Note: This route is used by external applications, keep stable def exploreAndAddRemoteDataset(): Action[ExploreAndAddRemoteDatasetParameters] = sil.SecuredAction.async(validateJson[ExploreAndAddRemoteDatasetParameters]) { implicit request => - val reportMutable = ListBuffer[String]() - val adaptedParameters = ExploreRemoteDatasetParameters(request.body.remoteUri, None, None, None) + val adaptedParameters = + WKExploreRemoteLayerParameters(request.body.remoteUri, None, None, None, request.body.dataStoreName) for { - dataSource <- exploreRemoteLayerService.exploreRemoteDatasource(List(adaptedParameters), - request.identity, - reportMutable) + exploreResponse <- wkExploreRemoteLayerService.exploreRemoteDatasource(List(adaptedParameters), + request.identity) + dataSource <- exploreResponse.dataSource ?~> "dataset.explore.failed" _ <- bool2Fox(dataSource.dataLayers.nonEmpty) ?~> "dataset.explore.zeroLayers" folderIdOpt <- Fox.runOptional(request.body.folderPath)(folderPath => folderService.getOrCreateFromPathLiteral(folderPath, request.identity._organization)) ?~> "dataset.explore.autoAdd.getFolder.failed" - _ <- exploreRemoteLayerService.addRemoteDatasource(dataSource, - request.body.datasetName, - request.identity, - folderIdOpt) ?~> "dataset.explore.autoAdd.failed" + _ <- wkExploreRemoteLayerService.addRemoteDatasource(dataSource, + request.body.datasetName, + request.identity, + folderIdOpt) ?~> "dataset.explore.autoAdd.failed" } yield Ok } diff --git a/app/models/dataset/WKRemoteDataStoreClient.scala b/app/models/dataset/WKRemoteDataStoreClient.scala index db14f13ed5..7015bd4d90 100644 --- a/app/models/dataset/WKRemoteDataStoreClient.scala +++ b/app/models/dataset/WKRemoteDataStoreClient.scala @@ -3,6 +3,11 @@ package models.dataset import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.explore.{ + ExploreRemoteDatasetRequest, + ExploreRemoteDatasetResponse, + ExploreRemoteLayerParameters +} import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, RawCuboidRequest} import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, GenericDataSource} import com.scalableminds.webknossos.datastore.rpc.RPC @@ -102,4 +107,12 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin ) } + def exploreRemoteDataset(layerParameters: List[ExploreRemoteLayerParameters], + organizationName: String, + userToken: String): Fox[ExploreRemoteDatasetResponse] = + rpc(s"${dataStore.url}/data/datasets/exploreRemote") + .addQueryString("token" -> userToken) + .postJsonWithJsonResponse[ExploreRemoteDatasetRequest, ExploreRemoteDatasetResponse]( + ExploreRemoteDatasetRequest(layerParameters, organizationName)) + } diff --git a/app/models/dataset/explore/ExploreRemoteLayerService.scala b/app/models/dataset/explore/ExploreRemoteLayerService.scala deleted file mode 100644 index cec4dcfdae..0000000000 --- a/app/models/dataset/explore/ExploreRemoteLayerService.scala +++ /dev/null @@ -1,167 +0,0 @@ -package models.dataset.explore - -import com.scalableminds.util.accesscontext.DBAccessContext -import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} -import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.datavault.VaultPath -import com.scalableminds.webknossos.datastore.explore.{ - ExploreLayerService, - N5ArrayExplorer, - N5MultiscalesExplorer, - NeuroglancerUriExplorer, - NgffExplorer, - PrecomputedExplorer, - RemoteLayerExplorer, - WebknossosZarrExplorer, - Zarr3ArrayExplorer, - ZarrArrayExplorer -} -import com.scalableminds.webknossos.datastore.models.datasource._ -import com.scalableminds.webknossos.datastore.rpc.RPC -import com.scalableminds.webknossos.datastore.storage.{DataVaultService, RemoteSourceDescriptor} -import com.typesafe.scalalogging.LazyLogging -import models.dataset.{DataStoreDAO, DatasetService, WKRemoteDataStoreClient} -import models.dataset.credential.CredentialService -import models.organization.OrganizationDAO -import models.user.User -import net.liftweb.common.{Empty, Failure, Full} -import net.liftweb.common.Box.tryo -import play.api.libs.json.{Json, OFormat} -import security.{WkEnv, WkSilhouetteEnvironment} -import utils.{ObjectId, WkConf} - -import java.net.URI -import javax.inject.Inject -import scala.collection.mutable.ListBuffer -import scala.concurrent.ExecutionContext - -case class ExploreRemoteDatasetParameters(remoteUri: String, - credentialIdentifier: Option[String], - credentialSecret: Option[String], - preferredVoxelSize: Option[Vec3Double]) - -object ExploreRemoteDatasetParameters { - implicit val jsonFormat: OFormat[ExploreRemoteDatasetParameters] = Json.format[ExploreRemoteDatasetParameters] -} - -case class ExploreAndAddRemoteDatasetParameters(remoteUri: String, datasetName: String, folderPath: Option[String]) - -object ExploreAndAddRemoteDatasetParameters { - implicit val jsonFormat: OFormat[ExploreAndAddRemoteDatasetParameters] = - Json.format[ExploreAndAddRemoteDatasetParameters] -} - -class ExploreRemoteLayerService @Inject()(credentialService: CredentialService, - dataVaultService: DataVaultService, - organizationDAO: OrganizationDAO, - dataStoreDAO: DataStoreDAO, - datasetService: DatasetService, - wkSilhouetteEnvironment: WkSilhouetteEnvironment, - exploreLayerService: ExploreLayerService, - rpc: RPC, - wkConf: WkConf) - extends FoxImplicits - with LazyLogging { - - private lazy val bearerTokenService = wkSilhouetteEnvironment.combinedAuthenticatorService.tokenAuthenticatorService - - def exploreRemoteDatasource( - parameters: List[ExploreRemoteDatasetParameters], - requestIdentity: WkEnv#I, - reportMutable: ListBuffer[String])(implicit ec: ExecutionContext): Fox[GenericDataSource[DataLayer]] = - for { - exploredLayersNested <- Fox.serialCombined(parameters)( - parameters => - exploreRemoteLayersForUri(parameters.remoteUri, - parameters.credentialIdentifier, - parameters.credentialSecret, - reportMutable, - requestIdentity)) - layersWithVoxelSizes = exploredLayersNested.flatten - preferredVoxelSize = parameters.flatMap(_.preferredVoxelSize).headOption - _ <- bool2Fox(layersWithVoxelSizes.nonEmpty) ?~> "Detected zero layers" - (layers, voxelSize) <- exploreLayerService.adaptLayersAndVoxelSize(layersWithVoxelSizes, preferredVoxelSize) - dataSource = GenericDataSource[DataLayer]( - DataSourceId("", ""), // Frontend will prompt user for a good name - layers, - voxelSize - ) - } yield dataSource - - def addRemoteDatasource(dataSource: GenericDataSource[DataLayer], - datasetName: String, - user: User, - folderId: Option[ObjectId])(implicit ctx: DBAccessContext): Fox[Unit] = - for { - organization <- organizationDAO.findOne(user._organization) - dataStore <- dataStoreDAO.findOneWithUploadsAllowed - _ <- datasetService.assertValidDatasetName(datasetName) - _ <- datasetService.assertNewDatasetName(datasetName, organization._id) ?~> "dataset.name.alreadyTaken" - client = new WKRemoteDataStoreClient(dataStore, rpc) - userToken <- bearerTokenService.createAndInitDataStoreTokenForUser(user) - _ <- client.addDataSource(organization.name, datasetName, dataSource, folderId, userToken) - } yield () - - private def exploreRemoteLayersForUri( - layerUri: String, - credentialIdentifier: Option[String], - credentialSecret: Option[String], - reportMutable: ListBuffer[String], - requestingUser: User)(implicit ec: ExecutionContext): Fox[List[(DataLayerWithMagLocators, Vec3Double)]] = - for { - uri <- tryo(new URI(exploreLayerService.removeHeaderFileNamesFromUriSuffix(layerUri))) ?~> s"Received invalid URI: $layerUri" - _ <- bool2Fox(uri.getScheme != null) ?~> s"Received invalid URI: $layerUri" - _ <- assertLocalPathInWhitelist(uri) - credentialOpt = credentialService.createCredentialOpt(uri, - credentialIdentifier, - credentialSecret, - requestingUser._id, - requestingUser._organization) - remoteSource = RemoteSourceDescriptor(uri, credentialOpt) - credentialId <- Fox.runOptional(credentialOpt)(c => credentialService.insertOne(c)) ?~> "dataVault.credential.insert.failed" - remotePath <- dataVaultService.getVaultPath(remoteSource) ?~> "dataVault.setup.failed" - layersWithVoxelSizes <- exploreRemoteLayersForRemotePath( - remotePath, - credentialId.map(_.toString), - reportMutable, - List( - new ZarrArrayExplorer(Vec3Int.ones, ec), - new NgffExplorer, - new WebknossosZarrExplorer, - new N5ArrayExplorer, - new N5MultiscalesExplorer, - new PrecomputedExplorer, - new Zarr3ArrayExplorer, - new NeuroglancerUriExplorer(dataVaultService, exploreLayerService, ec) - ) - ) - } yield layersWithVoxelSizes - - private def assertLocalPathInWhitelist(uri: URI)(implicit ec: ExecutionContext): Fox[Unit] = - if (uri.getScheme == DataVaultService.schemeFile) { - bool2Fox(wkConf.Datastore.localFolderWhitelist.exists(whitelistEntry => uri.getPath.startsWith(whitelistEntry))) ?~> s"Absolute path ${uri.getPath} in local file system is not in path whitelist. Consider adding it to datastore.localFolderWhitelist" - } else Fox.successful(()) - - private def exploreRemoteLayersForRemotePath(remotePath: VaultPath, - credentialId: Option[String], - reportMutable: ListBuffer[String], - explorers: List[RemoteLayerExplorer])( - implicit ec: ExecutionContext): Fox[List[(DataLayerWithMagLocators, Vec3Double)]] = - explorers match { - case Nil => Fox.empty - case currentExplorer :: remainingExplorers => - reportMutable += s"\nTrying to explore $remotePath as ${currentExplorer.name}..." - currentExplorer.explore(remotePath, credentialId).futureBox.flatMap { - case Full(layersWithVoxelSizes) => - reportMutable += s"Found ${layersWithVoxelSizes.length} ${currentExplorer.name} layers at $remotePath." - Fox.successful(layersWithVoxelSizes) - case f: Failure => - reportMutable += s"Error when reading $remotePath as ${currentExplorer.name}: ${Fox.failureChainAsString(f)}" - exploreRemoteLayersForRemotePath(remotePath, credentialId, reportMutable, remainingExplorers) - case Empty => - reportMutable += s"Error when reading $remotePath as ${currentExplorer.name}: Empty" - exploreRemoteLayersForRemotePath(remotePath, credentialId, reportMutable, remainingExplorers) - } - } - -} diff --git a/app/models/dataset/explore/WKExploreRemoteLayerService.scala b/app/models/dataset/explore/WKExploreRemoteLayerService.scala new file mode 100644 index 0000000000..c70b12b79a --- /dev/null +++ b/app/models/dataset/explore/WKExploreRemoteLayerService.scala @@ -0,0 +1,120 @@ +package models.dataset.explore + +import collections.SequenceUtils +import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.geometry.Vec3Double +import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.webknossos.datastore.explore.{ + ExploreLayerUtils, + ExploreRemoteDatasetResponse, + ExploreRemoteLayerParameters +} +import com.scalableminds.webknossos.datastore.models.datasource._ +import com.scalableminds.webknossos.datastore.rpc.RPC +import com.typesafe.scalalogging.LazyLogging +import models.dataset.{DataStore, DataStoreDAO, DatasetService, WKRemoteDataStoreClient} +import models.dataset.credential.CredentialService +import models.organization.OrganizationDAO +import models.user.User +import net.liftweb.common.Box.tryo +import play.api.libs.json.{Json, OFormat} +import security.WkSilhouetteEnvironment +import utils.ObjectId + +import java.net.URI +import javax.inject.Inject +import scala.concurrent.ExecutionContext + +case class WKExploreRemoteLayerParameters(remoteUri: String, + credentialIdentifier: Option[String], + credentialSecret: Option[String], + preferredVoxelSize: Option[Vec3Double], + dataStoreName: Option[String]) + +object WKExploreRemoteLayerParameters { + implicit val jsonFormat: OFormat[WKExploreRemoteLayerParameters] = Json.format[WKExploreRemoteLayerParameters] +} + +case class ExploreAndAddRemoteDatasetParameters(remoteUri: String, + datasetName: String, + folderPath: Option[String], + dataStoreName: Option[String]) + +object ExploreAndAddRemoteDatasetParameters { + implicit val jsonFormat: OFormat[ExploreAndAddRemoteDatasetParameters] = + Json.format[ExploreAndAddRemoteDatasetParameters] +} + +class WKExploreRemoteLayerService @Inject()(credentialService: CredentialService, + organizationDAO: OrganizationDAO, + dataStoreDAO: DataStoreDAO, + datasetService: DatasetService, + wkSilhouetteEnvironment: WkSilhouetteEnvironment, + rpc: RPC) + extends FoxImplicits + with ExploreLayerUtils + with LazyLogging { + + private lazy val bearerTokenService = wkSilhouetteEnvironment.combinedAuthenticatorService.tokenAuthenticatorService + + def exploreRemoteDatasource(parameters: List[WKExploreRemoteLayerParameters], requestingUser: User)( + implicit ec: ExecutionContext): Fox[ExploreRemoteDatasetResponse] = + for { + credentialIds <- Fox.serialCombined(parameters)( + parameters => + storeCredentials(parameters.remoteUri, + parameters.credentialIdentifier, + parameters.credentialSecret, + requestingUser)) + parametersWithCredentialId = parameters.zip(credentialIds).map { + case (originalParameters, credentialId) => + ExploreRemoteLayerParameters(originalParameters.remoteUri, + credentialId.map(_.toString), + originalParameters.preferredVoxelSize) + } + datastore <- selectDataStore(parameters.map(_.dataStoreName)) + client: WKRemoteDataStoreClient = new WKRemoteDataStoreClient(datastore, rpc) + organization <- organizationDAO.findOne(requestingUser._organization)(GlobalAccessContext) + userToken <- bearerTokenService.createAndInitDataStoreTokenForUser(requestingUser) + exploreResponse <- client.exploreRemoteDataset(parametersWithCredentialId, organization.name, userToken) + } yield exploreResponse + + private def selectDataStore(dataStoreNames: List[Option[String]])(implicit ec: ExecutionContext): Fox[DataStore] = + for { + dataStoreNameOpt <- SequenceUtils.findUniqueElement(dataStoreNames) ?~> "explore.dataStore.mustBeEqualForAll" + dataStore <- dataStoreNameOpt match { + case Some(dataStoreName) => dataStoreDAO.findOneByName(dataStoreName)(GlobalAccessContext) + case None => dataStoreDAO.findOneWithUploadsAllowed(GlobalAccessContext) + } + } yield dataStore + + private def storeCredentials(layerUri: String, + credentialIdentifier: Option[String], + credentialSecret: Option[String], + requestingUser: User)(implicit ec: ExecutionContext): Fox[Option[ObjectId]] = + for { + uri <- tryo(new URI(removeHeaderFileNamesFromUriSuffix(layerUri))) ?~> s"Received invalid URI: $layerUri" + credentialOpt = credentialService.createCredentialOpt(uri, + credentialIdentifier, + credentialSecret, + requestingUser._id, + requestingUser._organization) + _ <- bool2Fox(uri.getScheme != null) ?~> s"Received invalid URI: $layerUri" + credentialId <- Fox.runOptional(credentialOpt)(c => credentialService.insertOne(c)) ?~> "dataVault.credential.insert.failed" + } yield credentialId + + def addRemoteDatasource(dataSource: GenericDataSource[DataLayer], + datasetName: String, + user: User, + folderId: Option[ObjectId])(implicit ctx: DBAccessContext): Fox[Unit] = + for { + organization <- organizationDAO.findOne(user._organization) + dataStore <- dataStoreDAO.findOneWithUploadsAllowed + _ <- datasetService.assertValidDatasetName(datasetName) + _ <- datasetService.assertNewDatasetName(datasetName, organization._id) ?~> "dataset.name.alreadyTaken" + client = new WKRemoteDataStoreClient(dataStore, rpc) + userToken <- bearerTokenService.createAndInitDataStoreTokenForUser(user) + _ <- client.addDataSource(organization.name, datasetName, dataSource, folderId, userToken) + } yield () + +} diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 6c86320b3e..ddd4138d55 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -1644,6 +1644,7 @@ type ExplorationResult = { export async function exploreRemoteDataset( remoteUris: string[], + datastoreName: string, credentials?: { username: string; pass: string } | null, preferredVoxelSize?: Vector3, ): Promise { @@ -1652,6 +1653,7 @@ export async function exploreRemoteDataset( const extendedUri = { remoteUri: uri.trim(), preferredVoxelSize, + datastoreName, }; if (credentials) { diff --git a/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx index 0c71902751..84a489365f 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx @@ -19,7 +19,7 @@ import type { OxalisState } from "oxalis/store"; import { exploreRemoteDataset, isDatasetNameValid, storeRemoteDataset } from "admin/admin_rest_api"; import messages from "messages"; import { jsonStringify } from "libs/utils"; -import { CardContainer } from "admin/dataset/dataset_components"; +import { CardContainer, DatastoreFormItem } from "admin/dataset/dataset_components"; import Password from "antd/lib/input/Password"; import { AsyncButton } from "components/async_clickables"; import Toast from "libs/toast"; @@ -172,7 +172,10 @@ export function GoogleAuthFormItem({ } function DatasetAddRemoteView(props: Props) { - const { activeUser, onAdded } = props; + const { activeUser, onAdded, datastores } = props; + + const uploadableDatastores = datastores.filter((datastore) => datastore.allowsUpload); + const hasOnlyOneDatastoreOrNone = uploadableDatastores.length <= 1; const [showAddLayerModal, setShowAddLayerModal] = useState(false); const [dataSourceEditMode, setDataSourceEditMode] = useState<"simple" | "advanced">("simple"); @@ -201,8 +204,9 @@ function DatasetAddRemoteView(props: Props) { await form.validateFields(); const datasourceConfigStr = form.getFieldValue("dataSourceJson"); - const uploadableDatastores = props.datastores.filter((datastore) => datastore.allowsUpload); - const datastoreToUse = uploadableDatastores[0]; + const datastoreToUse = uploadableDatastores.find( + (datastore) => form.getFieldValue("datastoreUrl") === datastore.url, + ); if (!datastoreToUse) { Toast.error("Could not find datastore that allows uploading."); return; @@ -240,6 +244,7 @@ function DatasetAddRemoteView(props: Props) {
+