Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Change GET routes with side effects to POST #6023

Merged
merged 4 commits into from
Feb 7, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
- When changing which layers are visible in an annotation, this setting is persisted in the annotation, so when you share it, viewers will see the same visibility configuration. [#5967](https://github.com/scalableminds/webknossos/pull/5967)
- Downloading public annotations is now also allowed without being authenticated. [#6001](https://github.com/scalableminds/webknossos/pull/6001)
- Downloaded volume annotation layers no longer produce zero-byte zipfiles but rather a valid header-only zip file with no contents. [#6022](https://github.com/scalableminds/webknossos/pull/6022)
- Changed a number of API routes from GET to POST to avoid unwanted side effects. [#6023](https://github.com/scalableminds/webknossos/pull/6023)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please also note that triggerInboxCheck was removed

- Removed unused datastore route `checkInbox` (use `checkInboxBlocking` instead). [#6023](https://github.com/scalableminds/webknossos/pull/6023)

### Fixed
- Fixed volume-related bugs which could corrupt the volume data in certain scenarios. [#5955](https://github.com/scalableminds/webknossos/pull/5955)
Expand Down
10 changes: 5 additions & 5 deletions app/models/annotation/WKRemoteTracingStoreClient.scala
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ class WKRemoteTracingStoreClient(tracingStore: TracingStore, dataSet: DataSet, r
.addQueryString("token" -> RpcTokenHolder.webKnossosToken)
.addQueryStringOptional("version", versionString)
.addQueryString("fromTask" -> isFromTask.toString)
.getWithJsonResponse[String]
.postWithJsonResponse[String]
}

def duplicateVolumeTracing(volumeTracingId: String,
Expand All @@ -97,23 +97,23 @@ class WKRemoteTracingStoreClient(tracingStore: TracingStore, dataSet: DataSet, r
.addQueryStringOptional("minResolution", resolutionRestrictions.minStr)
.addQueryStringOptional("maxResolution", resolutionRestrictions.maxStr)
.addQueryString("downsample" -> downsample.toString)
.postWithJsonResponse[Option[BoundingBox], String](dataSetBoundingBox)
.postJsonWithJsonResponse[Option[BoundingBox], String](dataSetBoundingBox)
}

def mergeSkeletonTracingsByIds(tracingIds: List[String], persistTracing: Boolean): Fox[String] = {
logger.debug("Called to merge SkeletonTracings by ids." + baseInfo)
rpc(s"${tracingStore.url}/tracings/skeleton/mergedFromIds")
.addQueryString("token" -> RpcTokenHolder.webKnossosToken)
.addQueryString("persist" -> persistTracing.toString)
.postWithJsonResponse[List[TracingSelector], String](tracingIds.map(TracingSelector(_)))
.postJsonWithJsonResponse[List[TracingSelector], String](tracingIds.map(TracingSelector(_)))
}

def mergeVolumeTracingsByIds(tracingIds: List[String], persistTracing: Boolean): Fox[String] = {
logger.debug("Called to merge VolumeTracings by ids." + baseInfo)
rpc(s"${tracingStore.url}/tracings/volume/mergedFromIds")
.addQueryString("token" -> RpcTokenHolder.webKnossosToken)
.addQueryString("persist" -> persistTracing.toString)
.postWithJsonResponse[List[TracingSelector], String](tracingIds.map(TracingSelector(_)))
.postJsonWithJsonResponse[List[TracingSelector], String](tracingIds.map(TracingSelector(_)))
}

def mergeSkeletonTracingsByContents(tracings: SkeletonTracings, persistTracing: Boolean): Fox[String] = {
Expand Down Expand Up @@ -200,7 +200,7 @@ class WKRemoteTracingStoreClient(tracingStore: TracingStore, dataSet: DataSet, r
for {
newId: String <- rpc(s"${tracingStore.url}/tracings/volume/$tracingId/unlinkFallback")
.addQueryString("token" -> RpcTokenHolder.webKnossosToken)
.postWithJsonResponse[DataSourceLike, String](dataSource)
.postJsonWithJsonResponse[DataSourceLike, String](dataSource)
} yield newId
}

Expand Down
2 changes: 1 addition & 1 deletion app/models/organization/OrganizationService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ class OrganizationService @Inject()(organizationDAO: OrganizationDAO,
def sendRPCToDataStore(dataStore: DataStore) =
rpc(s"${dataStore.url}/data/triggers/newOrganizationFolder")
.addQueryString("token" -> dataStoreToken, "organizationName" -> organizationName)
.get
.post
.futureBox

for {
Expand Down
10 changes: 5 additions & 5 deletions conf/webknossos.latest.routes
Original file line number Diff line number Diff line change
Expand Up @@ -203,11 +203,11 @@ GET /time/user/:userId c
GET /jobs/request controllers.WKRemoteWorkerController.requestJobs(key: String)
GET /jobs controllers.JobsController.list
GET /jobs/status controllers.JobsController.status
GET /jobs/run/convertToWkw/:organizationName/:dataSetName controllers.JobsController.runConvertToWkwJob(organizationName: String, dataSetName: String, scale: String, dataStoreName: String)
GET /jobs/run/computeMeshFile/:organizationName/:dataSetName controllers.JobsController.runComputeMeshFileJob(organizationName: String, dataSetName: String, layerName: String, mag: String, agglomerateView: Option[String])
GET /jobs/run/exportTiff/:organizationName/:dataSetName controllers.JobsController.runExportTiffJob(organizationName: String, dataSetName: String, bbox: String, layerName: Option[String], tracingId: Option[String], tracingVersion: Option[String], annotationId: Option[String], annotationType: Option[String], hideUnmappedIds: Option[Boolean], mappingName: Option[String], mappingType: Option[String])
GET /jobs/run/inferNuclei/:organizationName/:dataSetName controllers.JobsController.runInferNucleiJob(organizationName: String, dataSetName: String, layerName: Option[String])
GET /jobs/run/globalizeFloodfills/:organizationName/:dataSetName controllers.JobsController.runGlobalizeFloodfills(organizationName: String, dataSetName: String, newDataSetName: Option[String], layerName: Option[String], annotationId: Option[String], annotationType: Option[String])
POST /jobs/run/convertToWkw/:organizationName/:dataSetName controllers.JobsController.runConvertToWkwJob(organizationName: String, dataSetName: String, scale: String, dataStoreName: String)
POST /jobs/run/computeMeshFile/:organizationName/:dataSetName controllers.JobsController.runComputeMeshFileJob(organizationName: String, dataSetName: String, layerName: String, mag: String, agglomerateView: Option[String])
POST /jobs/run/exportTiff/:organizationName/:dataSetName controllers.JobsController.runExportTiffJob(organizationName: String, dataSetName: String, bbox: String, layerName: Option[String], tracingId: Option[String], tracingVersion: Option[String], annotationId: Option[String], annotationType: Option[String], hideUnmappedIds: Option[Boolean], mappingName: Option[String], mappingType: Option[String])
POST /jobs/run/inferNuclei/:organizationName/:dataSetName controllers.JobsController.runInferNucleiJob(organizationName: String, dataSetName: String, layerName: Option[String])
POST /jobs/run/globalizeFloodfills/:organizationName/:dataSetName controllers.JobsController.runGlobalizeFloodfills(organizationName: String, dataSetName: String, newDataSetName: Option[String], layerName: Option[String], annotationId: Option[String], annotationType: Option[String])
GET /jobs/:id controllers.JobsController.get(id: String)
PATCH /jobs/:id/cancel controllers.JobsController.cancel(id: String)
POST /jobs/:id/status controllers.WKRemoteWorkerController.updateJobStatus(key: String, id: String)
17 changes: 17 additions & 0 deletions frontend/javascripts/admin/admin_rest_api.js
Original file line number Diff line number Diff line change
Expand Up @@ -985,6 +985,9 @@ export async function startConvertToWkwJob(
): Promise<Array<APIJob>> {
return Request.receiveJSON(
`/api/jobs/run/convertToWkw/${organizationName}/${datasetName}?scale=${scale.toString()}&dataStoreName=${datastoreName}`,
{
method: "POST",
},
);
}

Expand Down Expand Up @@ -1014,6 +1017,9 @@ export async function startExportTiffJob(
`/api/jobs/run/exportTiff/${organizationName}/${datasetName}?bbox=${bbox.join(
",",
)}${layerNameSuffix}${tracingIdSuffix}${tracingVersionSuffix}${annotationIdSuffix}${annotationTypeSuffix}${mappingNameSuffix}${mappingTypeSuffix}${hideUnmappedIdsSuffix}`,
{
method: "POST",
},
);
}

Expand All @@ -1028,6 +1034,9 @@ export function startComputeMeshFileJob(
`/api/jobs/run/computeMeshFile/${organizationName}/${datasetName}?layerName=${layerName}&mag=${mag.join(
"-",
)}${agglomerateView ? `&agglomerateView=${agglomerateView}` : ""}`,
{
method: "POST",
},
);
}

Expand All @@ -1038,6 +1047,9 @@ export function startNucleiInferralJob(
): Promise<APIJob> {
return Request.receiveJSON(
`/api/jobs/run/inferNuclei/${organizationName}/${datasetName}?layerName=${layerName}`,
{
method: "POST",
},
);
}

Expand All @@ -1051,6 +1063,9 @@ export function startGlobalizeFloodfillsJob(
): Promise<APIJob> {
return Request.receiveJSON(
`/api/jobs/run/globalizeFloodfills/${organizationName}/${datasetName}?newDataSetName=${newDataSetName}&layerName=${layerName}&annotationId=${annotationId}&annotationType=${annotationType}`,
{
method: "POST",
},
);
}

Expand Down Expand Up @@ -1305,6 +1320,7 @@ export async function triggerDatasetCheck(datastoreHost: string): Promise<void>
await doWithToken(token =>
Request.triggerRequest(`/data/triggers/checkInboxBlocking?token=${token}`, {
host: datastoreHost,
method: "POST",
}),
);
}
Expand All @@ -1321,6 +1337,7 @@ export async function triggerDatasetClearCache(
}`,
{
host: datastoreHost,
method: "POST",
},
),
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,16 +72,6 @@ class DataSourceController @Inject()(
}
}

@ApiOperation(hidden = true, value = "")
def triggerInboxCheck(token: Option[String]): Action[AnyContent] = Action.async { implicit request =>
accessTokenService.validateAccessForSyncBlock(UserAccessRequest.administrateDataSources, token) {
AllowRemoteOrigin {
dataSourceService.checkInbox(verbose = true)
Ok
}
}
}

@ApiOperation(hidden = true, value = "")
def triggerInboxCheckBlocking(token: Option[String]): Action[AnyContent] = Action.async { implicit request =>
accessTokenService.validateAccess(UserAccessRequest.administrateDataSources, token) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,12 +63,22 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient) extends FoxIm
extractBytesResponse(performRequest)
}

def post(): Fox[WSResponse] = {
request = request.withMethod("POST")
performRequest
}

def post(file: File): Fox[WSResponse] = {
request = request.withBody(file).withMethod("POST")
performRequest
}

def postWithJsonResponse[T: Reads](file: File): Fox[T] = {
def postWithJsonResponse[T: Reads]: Fox[T] = {
request = request.withMethod("POST")
parseJsonResponse(performRequest)
}

def postFileWithJsonResponse[T: Reads](file: File): Fox[T] = {
request = request.withBody(file).withMethod("POST")
parseJsonResponse(performRequest)
}
Expand Down Expand Up @@ -102,7 +112,7 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient) extends FoxIm
performRequest
}

def postWithJsonResponse[T: Writes, U: Reads](body: T = Json.obj()): Fox[U] = {
def postJsonWithJsonResponse[T: Writes, U: Reads](body: T = Json.obj()): Fox[U] = {
request = request
.addHttpHeaders(HeaderNames.CONTENT_TYPE -> "application/json")
.withBody(Json.toJson(body))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,5 +102,5 @@ class DSRemoteWebKnossosClient @Inject()(
rpc(s"$webKnossosUri/api/datastores/$dataStoreName/validateUserAccess")
.addQueryString("key" -> dataStoreKey)
.addQueryStringOptional("token", token)
.postWithJsonResponse[UserAccessRequest, UserAccessAnswer](accessRequest)
.postJsonWithJsonResponse[UserAccessRequest, UserAccessAnswer](accessRequest)
}
Original file line number Diff line number Diff line change
Expand Up @@ -51,10 +51,9 @@ GET /datasets/:organizationName/:dataSetName
DELETE /datasets/:organizationName/:dataSetName/deleteOnDisk @com.scalableminds.webknossos.datastore.controllers.DataSourceController.deleteOnDisk(token: Option[String], organizationName: String, dataSetName: String)

# Actions
GET /triggers/checkInbox @com.scalableminds.webknossos.datastore.controllers.DataSourceController.triggerInboxCheck(token: Option[String])
GET /triggers/checkInboxBlocking @com.scalableminds.webknossos.datastore.controllers.DataSourceController.triggerInboxCheckBlocking(token: Option[String])
GET /triggers/newOrganizationFolder @com.scalableminds.webknossos.datastore.controllers.DataSourceController.createOrganizationDirectory(token: Option[String], organizationName: String)
GET /triggers/reload/:organizationName/:dataSetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reload(token: Option[String], organizationName: String, dataSetName: String, layerName: Option[String])
POST /triggers/checkInboxBlocking @com.scalableminds.webknossos.datastore.controllers.DataSourceController.triggerInboxCheckBlocking(token: Option[String])
POST /triggers/newOrganizationFolder @com.scalableminds.webknossos.datastore.controllers.DataSourceController.createOrganizationDirectory(token: Option[String], organizationName: String)
POST /triggers/reload/:organizationName/:dataSetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reload(token: Option[String], organizationName: String, dataSetName: String, layerName: Option[String])

# Exports
GET /exports/:jobId/download @com.scalableminds.webknossos.datastore.controllers.ExportsController.download(token: Option[String], jobId: String)
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ class TSRemoteWebKnossosClient @Inject()(
rpc(s"$webKnossosUrl/api/tracingstores/$tracingStoreName/validateUserAccess")
.addQueryString("key" -> tracingStoreKey)
.addQueryStringOptional("token", token)
.postWithJsonResponse[UserAccessRequest, UserAccessAnswer](accessRequest)
.postJsonWithJsonResponse[UserAccessRequest, UserAccessAnswer](accessRequest)
}

class TracingStoreAccessTokenService @Inject()(val remoteWebKnossosClient: TSRemoteWebKnossosClient,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,4 +38,4 @@ GET /skeleton/:tracingId/updateActionLog @com.scalablemin
POST /skeleton/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.getMultiple(token: Option[String])

POST /skeleton/:tracingId/update @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.update(token: Option[String], tracingId: String)
GET /skeleton/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.duplicate(token: Option[String], tracingId: String, version: Option[Long], fromTask: Option[Boolean])
POST /skeleton/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.duplicate(token: Option[String], tracingId: String, version: Option[Long], fromTask: Option[Boolean])