Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Export Volume Annotations #5246

Merged
merged 9 commits into from
Mar 16, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
- The time spent on a project is now displayed in the project list. [#5209](https://github.com/scalableminds/webknossos/pull/5209)
- Added the possibility to export binary data as tiff (if long-runnings jobs are enabled). [#5195](https://github.com/scalableminds/webknossos/pull/5195)
- Added a link to dataset view mode from annotation mode info tab. [#5262](https://github.com/scalableminds/webknossos/pull/5262)
- Added the possibility to export also volume annotations as tiff (if long-runnings jobs are enabled). [#5246](https://github.com/scalableminds/webknossos/pull/5246)

### Changed
- Measured distances will be shown in voxel space, too. [#5240](https://github.com/scalableminds/webknossos/pull/5240)
Expand Down
35 changes: 21 additions & 14 deletions app/controllers/JobsController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,13 @@ import com.scalableminds.webknossos.schema.Tables.{Jobs, JobsRow}
import com.typesafe.scalalogging.LazyLogging
import javax.inject.Inject
import models.analytics.{AnalyticsService, RunJobEvent}
import models.annotation.TracingStoreRpcClient
import models.organization.OrganizationDAO
import models.user.User
import net.liftweb.common.{Failure, Full}
import oxalis.security.WkEnv
import play.api.i18n.Messages
import play.api.libs.json.{JsObject, JsValue, Json}
import play.api.libs.json._
import play.api.mvc.{Action, AnyContent}
import slick.jdbc.PostgresProfile.api._
import slick.lifted.Rep
Expand Down Expand Up @@ -125,10 +126,12 @@ class JobService @Inject()(wkConf: WkConf, jobDAO: JobDAO, rpc: RPC, analyticsSe
def runJob(command: String, commandArgs: JsObject, owner: User): Fox[Job] =
for {
_ <- bool2Fox(wkConf.Features.jobsEnabled) ?~> "jobs.disabled"
argsWrapped = Json.obj("kwargs" -> commandArgs)
result <- flowerRpc(s"/api/task/async-apply/tasks.$command")
.postWithJsonResponse[JsValue, Map[String, JsValue]](commandArgs)
.postWithJsonResponse[JsValue, Map[String, JsValue]](argsWrapped)
celeryJobId <- result("task-id").validate[String].toFox ?~> "Could not parse job submit answer"
job = Job(ObjectId.generate, owner._id, command, commandArgs, celeryJobId)
argsWithoutToken = Json.obj("kwargs" -> (commandArgs - "webknossos_token"))
job = Job(ObjectId.generate, owner._id, command, argsWithoutToken, celeryJobId)
_ <- jobDAO.insertOne(job)
_ = analyticsService.track(RunJobEvent(owner, command))
} yield job
Expand Down Expand Up @@ -173,9 +176,7 @@ class JobsController @Inject()(jobDAO: JobDAO,
organizationName)
_ <- bool2Fox(request.identity._organization == organization._id) ~> FORBIDDEN
command = "tiff_cubing"
commandArgs = Json.obj(
"kwargs" -> Json
.obj("organization_name" -> organizationName, "dataset_name" -> dataSetName, "scale" -> scale))
commandArgs = Json.obj("organization_name" -> organizationName, "dataset_name" -> dataSetName, "scale" -> scale)

job <- jobService.runJob(command, commandArgs, request.identity) ?~> "job.couldNotRunCubing"
js <- jobService.publicWrites(job)
Expand All @@ -184,22 +185,28 @@ class JobsController @Inject()(jobDAO: JobDAO,

def runTiffExportJob(organizationName: String,
dataSetName: String,
layerName: String,
bbox: String): Action[AnyContent] =
bbox: String,
layerName: Option[String],
tracingId: Option[String],
tracingVersion: Option[String]): Action[AnyContent] =
sil.SecuredAction.async { implicit request =>
for {
organization <- organizationDAO.findOneByName(organizationName) ?~> Messages("organization.notFound",
organizationName)
_ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.export.notAllowed.organization" ~> FORBIDDEN
_ <- jobService.assertTiffExportBoundingBoxLimits(bbox)
command = "export_tiff"
exportFileName = s"${formatDateForFilename(new Date())}__${dataSetName}__${layerName}.zip"
exportFileName = s"${formatDateForFilename(new Date())}__${dataSetName}__${tracingId.map(_ => "volume").getOrElse(layerName.getOrElse(""))}.zip"
commandArgs = Json.obj(
"kwargs" -> Json.obj("organization_name" -> organizationName,
"dataset_name" -> dataSetName,
"layer_name" -> layerName,
"bbox" -> bbox,
"export_file_name" -> exportFileName))
"organization_name" -> organizationName,
"dataset_name" -> dataSetName,
"bbox" -> bbox,
"webknossos_token" -> TracingStoreRpcClient.webKnossosToken,
"export_file_name" -> exportFileName,
"layer_name" -> layerName,
"volume_tracing_id" -> tracingId,
"volume_tracing_version" -> tracingVersion
)
job <- jobService.runJob(command, commandArgs, request.identity) ?~> "job.couldNotRunTiffExport"
js <- jobService.publicWrites(job)
} yield Ok(js)
Expand Down
2 changes: 1 addition & 1 deletion conf/webknossos.latest.routes
Original file line number Diff line number Diff line change
Expand Up @@ -195,4 +195,4 @@ GET /jobs c
GET /jobs/:id controllers.JobsController.get(id: String)
GET /jobs/:id/downloadExport/:exportFileName controllers.JobsController.downloadExport(id: String, exportFileName: String)
GET /jobs/run/cubing/:organizationName/:dataSetName controllers.JobsController.runCubingJob(organizationName: String, dataSetName: String, scale: String)
GET /jobs/run/tiffExport/:organizationName/:dataSetName/:layerName controllers.JobsController.runTiffExportJob(organizationName: String, dataSetName: String, layerName: String, bbox: String)
GET /jobs/run/tiffExport/:organizationName/:dataSetName controllers.JobsController.runTiffExportJob(organizationName: String, dataSetName: String, bbox: String, layerName: Option[String], tracingId: Option[String], tracingVersion: Option[String])
12 changes: 9 additions & 3 deletions frontend/javascripts/admin/admin_rest_api.js
Original file line number Diff line number Diff line change
Expand Up @@ -821,6 +821,7 @@ export async function getJobs(): Promise<Array<APIJob>> {
layerName: job.commandArgs.kwargs.layer_name,
boundingBox: job.commandArgs.kwargs.bbox,
exportFileName: job.commandArgs.kwargs.export_file_name,
tracingId: job.commandArgs.kwargs.volume_tracing_id,
state: job.celeryInfo.state || "UNKNOWN",
createdAt: job.created,
}));
Expand All @@ -839,13 +840,18 @@ export async function startCubingJob(
export async function startTiffExportJob(
datasetName: string,
organizationName: string,
layerName: string,
bbox: Vector6,
layerName: ?string,
tracingId: ?string,
tracingVersion: ?number = null,
): Promise<Array<APIJob>> {
const layerNameSuffix = layerName != null ? `&layerName=${layerName}` : "";
const tracingIdSuffix = tracingId != null ? `&tracingId=${tracingId}` : "";
const tracingVersionSuffix = tracingVersion != null ? `&tracingVersion=${tracingVersion}` : "";
return Request.receiveJSON(
`/api/jobs/run/tiffExport/${organizationName}/${datasetName}/${layerName}?bbox=${bbox.join(
`/api/jobs/run/tiffExport/${organizationName}/${datasetName}?bbox=${bbox.join(
",",
)}`,
)}${layerNameSuffix}${tracingIdSuffix}${tracingVersionSuffix}`,
);
}

Expand Down
3 changes: 2 additions & 1 deletion frontend/javascripts/admin/job/job_list_view.js
Original file line number Diff line number Diff line change
Expand Up @@ -86,9 +86,10 @@ class JobListView extends React.PureComponent<Props, State> {
if (job.type === "tiff_cubing" && job.datasetName) {
return <span>{`Tiff to WKW conversion of ${job.datasetName}`}</span>;
} else if (job.type === "export_tiff" && job.organizationName && job.datasetName) {
const layerLabel = job.tracingId != null ? "volume annotation" : job.layerName || "a";
return (
<span>
Tiff export from {job.layerName || "a"} layer of{" "}
Tiff export from {layerLabel} layer of{" "}
<Link to={`/datasets/${job.organizationName}/${job.datasetName}/view`}>
{job.datasetName}
</Link>{" "}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,54 +2,83 @@
import { Button, Modal, Alert } from "antd";
import React, { useState } from "react";
import type { BoundingBoxType } from "oxalis/constants";
import type { APIDataset } from "types/api_flow_types";
import type { VolumeTracing } from "oxalis/store";
import type { APIDataset, APIDataLayer } from "types/api_flow_types";
import { startTiffExportJob } from "admin/admin_rest_api";
import { getResolutionInfo } from "oxalis/model/accessors/dataset_accessor";
import Model from "oxalis/model";
import features from "features";
import * as Utils from "libs/utils";

type Props = {
destroy: () => void,
volumeTracing: ?VolumeTracing,
dataset: APIDataset,
hasVolumeTracing: boolean,
boundingBox: BoundingBoxType,
};

const ExportBoundingBoxModal = ({ destroy, dataset, hasVolumeTracing, boundingBox }: Props) => {
const ExportBoundingBoxModal = ({ destroy, dataset, boundingBox, volumeTracing }: Props) => {
const [startedExports, setStartedExports] = useState([]);

const handleClose = () => {
destroy();
};

const handleStartExport = layerName => {
startTiffExportJob(
const exportKey = layerInfos => (layerInfos.layerName || "") + (layerInfos.tracingId || "");

const handleStartExport = async layerInfos => {
setStartedExports(startedExports.concat(exportKey(layerInfos)));
if (layerInfos.tracingId) {
await Model.ensureSavedState();
}
await startTiffExportJob(
dataset.name,
dataset.owningOrganization,
layerName,
Utils.computeArrayFromBoundingBox(boundingBox),
layerInfos.layerName,
layerInfos.tracingId,
);
setStartedExports(startedExports.concat(layerName));
};

const layerNames = dataset.dataSource.dataLayers.map(layer => {
const nameIfFromDataset = layer.category === "color" || !hasVolumeTracing ? layer.name : null;
const nameIfVolume =
hasVolumeTracing && layer.category === "segmentation" && layer.fallbackLayerInfo != null
? layer.fallbackLayerInfo.name
: null;
return nameIfFromDataset || nameIfVolume;
const hasMag1 = (layer: APIDataLayer) => getResolutionInfo(layer.resolutions).hasIndex(0);

const allLayerInfos = dataset.dataSource.dataLayers.map(layer => {
if (layer.category === "color" || volumeTracing == null)
return {
displayName: layer.name,
layerName: layer.name,
tracingId: null,
tracingVersion: null,
hasMag1: hasMag1(layer),
};
if (layer.fallbackLayerInfo != null)
return {
displayName: "Volume annotation with fallback segmentation",
fm3 marked this conversation as resolved.
Show resolved Hide resolved
layerName: layer.fallbackLayerInfo.name,
tracingId: volumeTracing.tracingId,
tracingVersion: volumeTracing.version,
hasMag1: hasMag1(layer),
};
return {
displayName: "Volume annotation",
layerName: null,
tracingId: volumeTracing.tracingId,
tracingVersion: volumeTracing.version,
hasMag1: hasMag1(layer),
};
});

const exportButtonsList = layerNames.map(layerName =>
layerName ? (
const exportButtonsList = allLayerInfos.map(layerInfos =>
layerInfos ? (
<p>
<Button
key={layerName}
onClick={() => handleStartExport(layerName)}
disabled={startedExports.includes(layerName)}
key={exportKey(layerInfos)}
onClick={() => handleStartExport(layerInfos)}
disabled={startedExports.includes(exportKey(layerInfos)) || !layerInfos.hasMag1}
>
{layerName}
{startedExports.includes(layerName) ? " (started)" : null}
{layerInfos.displayName}
{!layerInfos.hasMag1 ? " (resolution 1 missing)" : ""}
fm3 marked this conversation as resolved.
Show resolved Hide resolved
{startedExports.includes(exportKey(layerInfos)) ? " (started)" : ""}
</Button>
</p>
) : null,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ class UserSettingsView extends PureComponent<UserSettingsViewProps> {
renderIndependently(destroy => (
<ExportBoundingBoxModal
dataset={this.props.dataset}
hasVolumeTracing={this.props.tracing.volume != null}
volumeTracing={this.props.tracing.volume}
boundingBox={selectedBoundingBox.boundingBox}
destroy={destroy}
/>
Expand Down
1 change: 1 addition & 0 deletions frontend/javascripts/types/api_flow_types.js
Original file line number Diff line number Diff line change
Expand Up @@ -550,6 +550,7 @@ export type APIJob = {
+datasetName: ?string,
+exportFileName: ?string,
+layerName: ?string,
+tracingId: ?string,
+organizationName: ?string,
+boundingBox: ?string,
+type: string,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ trait VolumeTracingBucketHelper
with BucketKeys
with VolumeBucketReversionHelper {

protected val cacheTimeout: FiniteDuration = 20 minutes
protected val cacheTimeout: FiniteDuration = 70 minutes

implicit def volumeDataStore: FossilDBClient
implicit def volumeDataCache: TemporaryVolumeDataStore
Expand Down