Skip to content

Commit

Permalink
Store remote dataset credentials separately (#6646)
Browse files Browse the repository at this point in the history
* Add schema for credentials
* Add credential model
* Simplify AnyCredential
* Rename httpbasicauth
* Add routes for credential creation
* Fix compilation
* Add internal route to get credential
* Fix compilation error
* Fix things
* [wip] pass injected fileSystemService to BucketProvider
* Merge branch 'master' into store-credentials-separately
* propagate execution context to bucket loading, change signature to fox
* Fetch credentials from db for maglocator
* Remove remote source from maglocator
* Make scope optional
* Merge, bump schema Version to 93
* Remove unused import
* Merge branch 'master' into store-credentials-separately
* Return credential id on credential creation
* Make request silent
* Merge branch 'master' into store-credentials-separately
* add bucket provider cache
* format
* Allow credentials to be created when exploring
* Update schema to include user, org, isDeleted
* Change backend format command
* Merge branch 'master' into store-credentials-separately
* Fix usage of legacy credentials and no credentials
* Fix schema
* Merge branch 'master' into store-credentials-separately
* Update schema number and migrations, changelog
* Make match exhaustive
* Use protected and private
* Merge branch 'master' into store-credentials-separately
* Merge branch 'master' into store-credentials-separately
* Use SecuredSQLDAO instead of SQLDAO
* Merge branch 'master' into store-credentials-separately
* Update schema number
* Merge branch 'master' into store-credentials-separately
* Fix error in findOne
* Merge branch 'master' into store-credentials-separately
  • Loading branch information
frcroth authored Jan 17, 2023
1 parent 938f31f commit 209921a
Show file tree
Hide file tree
Showing 39 changed files with 528 additions and 152 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
- Precomputed meshes can now be loaded even when a mapping is active (HDF5 or an editable mapping produced by the proofreading tool). The precomputed mesh has to be computed without a mapping for this to work. [#6569](https://github.com/scalableminds/webknossos/pull/6569)

### Changed
- For remote datasets that require authentication, credentials are no longer stored in the respective JSON. [#6646](https://github.com/scalableminds/webknossos/pull/6646)
- Improved performance of opening a dataset or annotation. [#6711](https://github.com/scalableminds/webknossos/pull/6711)
- Redesigned organization page to include more infos on organization users, storage, webKnossos plan and provided opportunities to upgrade. [#6602](https://github.com/scalableminds/webknossos/pull/6602)
- Changed branding of WEBKNOSSOS including a new logo, new primary colors, and UPPERCASE name. [#6739](https://github.com/scalableminds/webknossos/pull/6739)
Expand Down
1 change: 1 addition & 0 deletions MIGRATIONS.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,4 @@ User-facing changes are documented in the [changelog](CHANGELOG.released.md).
- [094-pricing-plans.sql](conf/evolutions/reversions/094-pricing-plans.sql)
- [095-constraint-naming.sql](conf/evolutions/reversions/095-constraint-naming.sql)
- [096-storage.sql](conf/evolutions/096-storage.sql)
- [097-credentials.sql](conf/evolutions/097-credentials.sql)
65 changes: 65 additions & 0 deletions app/controllers/CredentialController.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
package controllers

import com.mohiva.play.silhouette.api.Silhouette
import com.scalableminds.util.tools.FoxImplicits
import com.scalableminds.webknossos.datastore.storage.{HttpBasicAuthCredential, S3AccessKeyCredential}
import models.binary.credential.CredentialDAO
import oxalis.security.WkEnv
import play.api.libs.json.{Json, OFormat}
import play.api.mvc.{Action, PlayBodyParsers}
import utils.ObjectId

import javax.inject.Inject
import scala.concurrent.ExecutionContext

case class HttpBasicAuthCredentialParameters(name: String, username: String, password: String, domain: Option[String])

object HttpBasicAuthCredentialParameters {
implicit val jsonFormat: OFormat[HttpBasicAuthCredentialParameters] = Json.format[HttpBasicAuthCredentialParameters]
}

case class S3AccessKeyCredentialParameters(name: String, keyId: String, key: String, bucket: Option[String])

object S3AccessKeyCredentialParameters {
implicit val jsonFormat: OFormat[S3AccessKeyCredentialParameters] = Json.format[S3AccessKeyCredentialParameters]
}

class CredentialController @Inject()(credentialDAO: CredentialDAO, sil: Silhouette[WkEnv])(
implicit ec: ExecutionContext,
val bodyParsers: PlayBodyParsers)
extends Controller
with FoxImplicits {

def createHttpBasicAuthCredential: Action[HttpBasicAuthCredentialParameters] =
sil.SecuredAction.async(validateJson[HttpBasicAuthCredentialParameters]) { implicit request =>
val _id = ObjectId.generate
for {
_ <- bool2Fox(request.identity.isAdmin) ?~> "notAllowed" ~> FORBIDDEN
_ <- credentialDAO.insertOne(
_id,
HttpBasicAuthCredential(request.body.name,
request.body.username,
request.body.password,
request.identity._id.toString,
request.identity._organization.toString)
) ?~> "create.failed"
} yield Ok(Json.toJson(_id))
}

def createS3AccessKeyCredential: Action[S3AccessKeyCredentialParameters] =
sil.SecuredAction.async(validateJson[S3AccessKeyCredentialParameters]) { implicit request =>
val _id = ObjectId.generate
for {
_ <- bool2Fox(request.identity.isAdmin) ?~> "notAllowed" ~> FORBIDDEN
_ <- credentialDAO.insertOne(
_id,
S3AccessKeyCredential(request.body.name,
request.body.keyId,
request.body.key,
request.identity._id.toString,
request.identity._organization.toString)
) ?~> "create.failed"
} yield Ok(Json.toJson(_id))
}

}
2 changes: 1 addition & 1 deletion app/controllers/DataSetController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ class DataSetController @Inject()(userService: UserService,
val reportMutable = ListBuffer[String]()
for {
dataSourceBox: Box[GenericDataSource[DataLayer]] <- exploreRemoteLayerService
.exploreRemoteDatasource(request.body, reportMutable)
.exploreRemoteDatasource(request.body, request.identity, reportMutable)
.futureBox
dataSourceOpt = dataSourceBox match {
case Full(dataSource) if dataSource.dataLayers.nonEmpty =>
Expand Down
14 changes: 13 additions & 1 deletion app/controllers/WKRemoteDataStoreController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,10 @@ import com.scalableminds.webknossos.datastore.services.{
ReserveUploadInformation
}
import com.typesafe.scalalogging.LazyLogging
import javax.inject.Inject
import models.analytics.{AnalyticsService, UploadDatasetEvent}
import models.binary._
import models.binary.credential.CredentialDAO
import models.folder.FolderDAO
import models.job.JobDAO
import models.organization.OrganizationDAO
Expand All @@ -26,7 +28,6 @@ import play.api.libs.json.{JsError, JsSuccess, JsValue, Json}
import play.api.mvc.{Action, AnyContent, PlayBodyParsers}
import utils.ObjectId

import javax.inject.Inject
import scala.concurrent.{ExecutionContext, Future}

class WKRemoteDataStoreController @Inject()(
Expand All @@ -41,6 +42,7 @@ class WKRemoteDataStoreController @Inject()(
userDAO: UserDAO,
folderDAO: FolderDAO,
jobDAO: JobDAO,
credentialDAO: CredentialDAO,
mailchimpClient: MailchimpClient,
wkSilhouetteEnvironment: WkSilhouetteEnvironment)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers)
extends Controller
Expand Down Expand Up @@ -192,4 +194,14 @@ class WKRemoteDataStoreController @Inject()(
}
}

def findCredential(name: String, key: String, credentialId: String): Action[AnyContent] = Action.async {
implicit request =>
dataStoreService.validateAccess(name, key) { _ =>
for {
credentialIdValidated <- ObjectId.fromString(credentialId)
credential <- credentialDAO.findOne(credentialIdValidated)
} yield Ok(Json.toJson(credential))
}
}

}
69 changes: 69 additions & 0 deletions app/models/binary/credential/CredentialDAO.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
package models.binary.credential

import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.datastore.storage.{AnyCredential, HttpBasicAuthCredential, S3AccessKeyCredential}
import com.scalableminds.webknossos.schema.Tables.{Credentials, CredentialsRow}
import utils.sql.{SecuredSQLDAO, SqlClient, SqlToken}
import utils.ObjectId

import javax.inject.Inject
import scala.concurrent.ExecutionContext

class CredentialDAO @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionContext) extends SecuredSQLDAO(sqlClient) {
protected val collection = Credentials

protected def columnsList: List[String] = collection.baseTableRow.create_*.map(_.name).toList
override protected def collectionName: String = "credentials"
def columns: SqlToken = SqlToken.raw(columnsList.mkString(", "))

private def parseAsHttpBasicAuthCredential(r: CredentialsRow): Fox[HttpBasicAuthCredential] =
for {
username <- r.identifier.toFox
password <- r.secret.toFox
} yield
HttpBasicAuthCredential(
r.name,
username,
password,
r._User,
r._Organization
)

private def parseAsS3AccessKeyCredential(r: CredentialsRow): Fox[S3AccessKeyCredential] =
for {
keyId <- r.identifier.toFox
key <- r.secret.toFox
} yield
S3AccessKeyCredential(
r.name,
keyId,
key,
r._User,
r._Organization
)

def insertOne(_id: ObjectId, credential: HttpBasicAuthCredential): Fox[Unit] =
for {
_ <- run(q"""insert into webknossos.credentials(_id, type, name, identifier, secret, _user, _organization)
values(${_id}, ${CredentialType.HTTP_Basic_Auth}, ${credential.name}, ${credential.username}, ${credential.password}, ${credential.user}, ${credential.organization})""".asUpdate)
} yield ()

def insertOne(_id: ObjectId, credential: S3AccessKeyCredential): Fox[Unit] =
for {
_ <- run(q"""insert into webknossos.credentials(_id, type, name, identifier, secret, _user, _organization)
values(${_id}, ${CredentialType.S3_Access_Key}, ${credential.name}, ${credential.keyId}, ${credential.key}, ${credential.user}, ${credential.organization})""".asUpdate)
} yield ()

def findOne(id: ObjectId): Fox[AnyCredential] =
for {
r <- run(q"select $columns from webknossos.credentials_ where _id = $id".as[CredentialsRow])
firstRow <- r.headOption.toFox
parsed <- parseAnyCredential(firstRow)
} yield parsed

private def parseAnyCredential(r: CredentialsRow): Fox[AnyCredential] =
r.`type` match {
case "HTTP_Basic_Auth" => parseAsHttpBasicAuthCredential(r)
case "S3_Access_Key" => parseAsS3AccessKeyCredential(r)
}
}
51 changes: 51 additions & 0 deletions app/models/binary/credential/CredentialService.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
package models.binary.credential

import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.datastore.storage.{HttpBasicAuthCredential, S3AccessKeyCredential}
import utils.ObjectId

import java.net.URI
import javax.inject.Inject
import scala.concurrent.ExecutionContext

class CredentialService @Inject()(credentialDao: CredentialDAO) {

def createCredential(uri: URI,
username: Option[String],
password: Option[String],
user: String,
organization: String)(implicit ec: ExecutionContext): Fox[Option[ObjectId]] = {
val scheme = uri.getScheme
scheme match {
case "https" =>
username match {
case Some(u) =>
val _id = ObjectId.generate
for {
_ <- credentialDao.insertOne(
_id,
HttpBasicAuthCredential(uri.toString, u, password.getOrElse(""), user, organization))
_ <- credentialDao.findOne(_id)
} yield Some(_id)
case None => Fox.empty
}
case "s3" =>
username match {
case Some(keyId) =>
password match {
case Some(secretKey) =>
val _id = ObjectId.generate
for {
_ <- credentialDao.insertOne(
_id,
S3AccessKeyCredential(uri.toString, keyId, secretKey, user, organization))
_ <- credentialDao.findOne(_id)
} yield Some(_id)
case None => Fox.empty
}
case None => Fox.empty
}
}
}

}
9 changes: 9 additions & 0 deletions app/models/binary/credential/CredentialType.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
package models.binary.credential

import com.scalableminds.util.enumeration.ExtendedEnumeration

object CredentialType extends ExtendedEnumeration {
type CredentialType = Value

val HTTP_Basic_Auth, S3_Access_Key, HTTP_Token, GCS = Value
}
33 changes: 24 additions & 9 deletions app/models/binary/explore/ExploreRemoteLayerService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,11 @@ import com.scalableminds.webknossos.datastore.datareaders.zarr._
import com.scalableminds.webknossos.datastore.models.datasource._
import com.scalableminds.webknossos.datastore.storage.FileSystemsHolder
import com.typesafe.scalalogging.LazyLogging
import models.binary.credential.CredentialService
import models.user.User
import net.liftweb.common.{Box, Empty, Failure, Full}
import net.liftweb.util.Helpers.tryo
import oxalis.security.WkEnv
import play.api.libs.json.{Json, OFormat}

import java.net.URI
Expand All @@ -26,14 +29,20 @@ object ExploreRemoteDatasetParameters {
implicit val jsonFormat: OFormat[ExploreRemoteDatasetParameters] = Json.format[ExploreRemoteDatasetParameters]
}

class ExploreRemoteLayerService @Inject()() extends FoxImplicits with LazyLogging {
class ExploreRemoteLayerService @Inject()(credentialService: CredentialService) extends FoxImplicits with LazyLogging {

def exploreRemoteDatasource(
urisWithCredentials: List[ExploreRemoteDatasetParameters],
requestIdentity: WkEnv#I,
reportMutable: ListBuffer[String])(implicit ec: ExecutionContext): Fox[GenericDataSource[DataLayer]] =
for {
exploredLayersNested <- Fox.serialCombined(urisWithCredentials)(parameters =>
exploreRemoteLayersForUri(parameters.remoteUri, parameters.user, parameters.password, reportMutable))
exploredLayersNested <- Fox.serialCombined(urisWithCredentials)(
parameters =>
exploreRemoteLayersForUri(parameters.remoteUri,
parameters.user,
parameters.password,
reportMutable,
requestIdentity))
layersWithVoxelSizes = exploredLayersNested.flatten
_ <- bool2Fox(layersWithVoxelSizes.nonEmpty) ?~> "Detected zero layers"
rescaledLayersAndVoxelSize <- rescaleLayersByCommonVoxelSize(layersWithVoxelSizes) ?~> "Could not extract common voxel size from layers"
Expand Down Expand Up @@ -131,14 +140,20 @@ class ExploreRemoteLayerService @Inject()() extends FoxImplicits with LazyLoggin
layerUri: String,
user: Option[String],
password: Option[String],
reportMutable: ListBuffer[String])(implicit ec: ExecutionContext): Fox[List[(DataLayer, Vec3Double)]] =
reportMutable: ListBuffer[String],
requestingUser: User)(implicit ec: ExecutionContext): Fox[List[(DataLayer, Vec3Double)]] =
for {
remoteSource <- tryo(RemoteSourceDescriptor(new URI(normalizeUri(layerUri)), user, password)).toFox ?~> s"Received invalid URI: $layerUri"
credentialId <- credentialService.createCredential(new URI(normalizeUri(layerUri)),
user,
password,
requestingUser._id.toString,
requestingUser._organization.toString)
fileSystem <- FileSystemsHolder.getOrCreate(remoteSource).toFox ?~> "Failed to set up remote file system"
remotePath <- tryo(fileSystem.getPath(remoteSource.remotePath)) ?~> "Failed to get remote path"
layersWithVoxelSizes <- exploreRemoteLayersForRemotePath(
remotePath,
remoteSource.credentials,
credentialId.map(_.toString),
reportMutable,
List(new ZarrArrayExplorer, new NgffExplorer, new N5ArrayExplorer, new N5MultiscalesExplorer))
} yield layersWithVoxelSizes
Expand All @@ -153,23 +168,23 @@ class ExploreRemoteLayerService @Inject()() extends FoxImplicits with LazyLoggin

private def exploreRemoteLayersForRemotePath(
remotePath: Path,
credentials: Option[FileSystemCredentials],
credentialId: Option[String],
reportMutable: ListBuffer[String],
explorers: List[RemoteLayerExplorer])(implicit ec: ExecutionContext): Fox[List[(DataLayer, Vec3Double)]] =
explorers match {
case Nil => Fox.empty
case currentExplorer :: remainingExplorers =>
reportMutable += s"\nTrying to explore $remotePath as ${currentExplorer.name}..."
currentExplorer.explore(remotePath, credentials).futureBox.flatMap {
currentExplorer.explore(remotePath, credentialId).futureBox.flatMap {
case Full(layersWithVoxelSizes) =>
reportMutable += s"Found ${layersWithVoxelSizes.length} ${currentExplorer.name} layers at $remotePath."
Fox.successful(layersWithVoxelSizes)
case f: Failure =>
reportMutable += s"Error when reading $remotePath as ${currentExplorer.name}: ${formatFailureForReport(f)}"
exploreRemoteLayersForRemotePath(remotePath, credentials, reportMutable, remainingExplorers)
exploreRemoteLayersForRemotePath(remotePath, credentialId, reportMutable, remainingExplorers)
case Empty =>
reportMutable += s"Error when reading $remotePath as ${currentExplorer.name}: Empty"
exploreRemoteLayersForRemotePath(remotePath, credentials, reportMutable, remainingExplorers)
exploreRemoteLayersForRemotePath(remotePath, credentialId, reportMutable, remainingExplorers)
}
}

Expand Down
5 changes: 2 additions & 3 deletions app/models/binary/explore/N5ArrayExplorer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ import com.scalableminds.util.geometry.{Vec3Double, Vec3Int}
import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.datastore.dataformats.MagLocator
import com.scalableminds.webknossos.datastore.dataformats.n5.{N5DataLayer, N5Layer, N5SegmentationLayer}
import com.scalableminds.webknossos.datastore.dataformats.zarr.FileSystemCredentials
import com.scalableminds.webknossos.datastore.datareaders.AxisOrder
import com.scalableminds.webknossos.datastore.datareaders.n5.N5Header
import com.scalableminds.webknossos.datastore.models.datasource.Category
Expand All @@ -15,15 +14,15 @@ class N5ArrayExplorer extends RemoteLayerExplorer {

override def name: String = "N5 Array"

override def explore(remotePath: Path, credentials: Option[FileSystemCredentials]): Fox[List[(N5Layer, Vec3Double)]] =
override def explore(remotePath: Path, credentialId: Option[String]): Fox[List[(N5Layer, Vec3Double)]] =
for {
headerPath <- Fox.successful(remotePath.resolve(N5Header.FILENAME_ATTRIBUTES_JSON))
name <- guessNameFromPath(remotePath)
n5Header <- parseJsonFromPath[N5Header](headerPath) ?~> s"failed to read n5 header at $headerPath"
elementClass <- n5Header.elementClass ?~> "failed to read element class from n5 header"
guessedAxisOrder = AxisOrder.asZyxFromRank(n5Header.rank)
boundingBox <- n5Header.boundingBox(guessedAxisOrder) ?~> "failed to read bounding box from zarr header. Make sure data is in (T/C)ZYX format"
magLocator = MagLocator(Vec3Int.ones, Some(remotePath.toString), credentials, Some(guessedAxisOrder), None)
magLocator = MagLocator(Vec3Int.ones, Some(remotePath.toString), None, Some(guessedAxisOrder), None, credentialId)
layer: N5Layer = if (looksLikeSegmentationLayer(name, elementClass)) {
N5SegmentationLayer(name, boundingBox, elementClass, List(magLocator), largestSegmentId = None)
} else N5DataLayer(name, Category.color, boundingBox, elementClass, List(magLocator))
Expand Down
Loading

0 comments on commit 209921a

Please sign in to comment.