Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Tiff Export Job #5195

Merged
merged 18 commits into from
Mar 8, 2021
Merged
1 change: 1 addition & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
### Added
- Added CTRL+Scroll for zooming, which enables pinch-to-zoom on some trackpads. [#5224](https://github.com/scalableminds/webknossos/pull/5224)
- The time spent on a project is now displayed in the project list. [#5209](https://github.com/scalableminds/webknossos/pull/5209)
- Added the possibility to export binary data as tiff (if long-runnings jobs are enabled). [#5195](https://github.com/scalableminds/webknossos/pull/5195)

### Changed
-
Expand Down
47 changes: 46 additions & 1 deletion app/controllers/JobsController.scala
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
package controllers

import java.nio.file.{Files, Paths}
import java.util.Date

import com.mohiva.play.silhouette.api.Silhouette
import com.scalableminds.util.geometry.BoundingBox
import com.scalableminds.util.tools.{Fox, FoxImplicits}
import com.scalableminds.webknossos.datastore.rpc.{RPC, RPCRequest}
import com.scalableminds.webknossos.schema.Tables.{Jobs, JobsRow}
Expand Down Expand Up @@ -91,7 +95,7 @@ class JobService @Inject()(wkConf: WkConf, jobDAO: JobDAO, rpc: RPC, analyticsSe
} else {
val updateResult = for {
_ <- Fox.successful(celeryInfosLastUpdated = System.currentTimeMillis())
celeryInfoJson <- flowerRpc("/api/tasks").getWithJsonResponse[JsObject]
celeryInfoJson <- flowerRpc("/api/tasks?offset=0").getWithJsonResponse[JsObject]
celeryInfoMap <- celeryInfoJson
.validate[Map[String, JsObject]] ?~> "Could not validate celery response as json map"
_ <- Fox.serialCombined(celeryInfoMap.keys.toList)(jobId =>
Expand Down Expand Up @@ -131,6 +135,13 @@ class JobService @Inject()(wkConf: WkConf, jobDAO: JobDAO, rpc: RPC, analyticsSe

private def flowerRpc(route: String): RPCRequest =
rpc(wkConf.Jobs.Flower.uri + route).withBasicAuth(wkConf.Jobs.Flower.username, wkConf.Jobs.Flower.password)

def assertTiffExportBoundingBoxLimits(bbox: String): Fox[Unit] =
for {
boundingBox <- BoundingBox.fromForm(bbox).toFox
_ <- bool2Fox(boundingBox.volume <= wkConf.Features.exportTiffMaxVolumeMVx * 1024 * 1024) ?~> "job.export.tiff.volumeExceeded"
_ <- bool2Fox(boundingBox.dimensions.maxDim <= wkConf.Features.exportTiffMaxEdgeLengthVx) ?~> "job.export.tiff.edgeLengthExceeded"
} yield ()
}

class JobsController @Inject()(jobDAO: JobDAO,
Expand Down Expand Up @@ -171,4 +182,38 @@ class JobsController @Inject()(jobDAO: JobDAO,
} yield Ok(js)
}

def runTiffExportJob(organizationName: String,
dataSetName: String,
layerName: String,
bbox: String): Action[AnyContent] =
sil.SecuredAction.async { implicit request =>
for {
organization <- organizationDAO.findOneByName(organizationName) ?~> Messages("organization.notFound",
organizationName)
_ <- bool2Fox(request.identity._organization == organization._id) ~> FORBIDDEN
_ <- jobService.assertTiffExportBoundingBoxLimits(bbox)
command = "export_tiff"
exportFileName = s"${formatDateForFilename(new Date())}__${dataSetName}__${layerName}.zip"
commandArgs = Json.obj(
"kwargs" -> Json.obj("organization_name" -> organizationName,
"dataset_name" -> dataSetName,
"layer_name" -> layerName,
"bbox" -> bbox,
"export_file_name" -> exportFileName))
job <- jobService.runJob(command, commandArgs, request.identity) ?~> "job.couldNotRunTiffExport"
js <- jobService.publicWrites(job)
} yield Ok(js)
}

def downloadExport(jobId: String, exportFileName: String): Action[AnyContent] =
sil.SecuredAction.async { implicit request =>
for {
jobIdValidated <- ObjectId.parse(jobId)
job <- jobDAO.findOne(jobIdValidated)
organization <- organizationDAO.findOne(request.identity._organization)
filePath = Paths.get("binaryData", organization.name, ".export", job.celeryJobId, exportFileName)
_ <- bool2Fox(Files.exists(filePath)) ?~> "job.export.fileNotFound"
} yield Ok.sendPath(filePath, inline = false)
}

}
2 changes: 2 additions & 0 deletions app/utils/WkConf.scala
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,8 @@ class WkConf @Inject()(configuration: Configuration) extends ConfigReader {
val taskReopenAllowed: FiniteDuration = get[Int]("features.taskReopenAllowedInSeconds") seconds
val allowDeleteDatasets: Boolean = get[Boolean]("features.allowDeleteDatasets")
val publicDemoDatasetUrl: String = get[String]("features.publicDemoDatasetUrl")
val exportTiffMaxVolumeMVx: Long = get[Long]("features.exportTiffMaxVolumeMVx")
val exportTiffMaxEdgeLengthVx: Long = get[Long]("features.exportTiffMaxEdgeLengthVx")
}

object BackendAnalytics {
Expand Down
2 changes: 2 additions & 0 deletions conf/application.conf
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,8 @@ features {
# If isDemoInstance == false, `/view` is appended to the URL so that it's opened in view mode (since the user might not
# have an account).
publicDemoDatasetUrl = "https://webknossos.org/datasets/scalable_minds/l4dense_motta_et_al_demo"
exportTiffMaxVolumeMVx = 1024
exportTiffMaxEdgeLengthVx = 8192
}

tracingstore {
Expand Down
4 changes: 4 additions & 0 deletions conf/messages
Original file line number Diff line number Diff line change
Expand Up @@ -313,7 +313,11 @@ initialData.organizationsNotEmpty=There are already organizations present in the

job.notFound = Job with id {0} could not be found.
job.couldNotRunCubing = Failed to start WKW conversion job.
job.couldNotRunTiffExport = Failed to start Tiff export job.
job.disabled = Long-running jobs are not enabled for this webKnossos instance.
job.export.fileNotFound = Exported file not found. The link may be expired.
job.export.tiff.volumeExceeded = The volume of the selected bounding box is too large.
job.export.tiff.edgeLengthExceeded = An edge length of the selected bounding box is too large.

agglomerateSkeleton.failed=Could not generate agglomerate skeleton.

Expand Down
Loading