Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Tensorflow segmentation statistics #3946

Merged
merged 4 commits into from
Mar 26, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions conf/messages
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,8 @@ dataSet.dataStore.missing=dataStore missing in the supplied json
dataSet.dataSet.missing=dataSet missing in the supplied json
dataSet.downloadAlreadyRunning=Sample dataset download is already running.
dataSet.alreadyPresent=Sample dataset is already present.
dataSet.noResolutions=Data layer does not contain resolutions
dataSet.sampledOnlyBlack=Sampled data positions contained only black data

dataSource.notFound=Datasource not found on datastore server

Expand Down
4 changes: 3 additions & 1 deletion project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ object Dependencies {
val xmlWriter = "org.glassfish.jaxb" % "txw2" % "2.2.11"
val woodstoxXml = "org.codehaus.woodstox" % "wstx-asl" % "3.2.3"
val redis = "net.debasishg" %% "redisclient" % "3.9"
val spire = "org.typelevel" %% "spire" % "0.14.1"

val sql = Seq(
"com.typesafe.slick" %% "slick" % "3.2.3",
Expand Down Expand Up @@ -78,7 +79,8 @@ object Dependencies {
playIterateesStreams,
filters,
ws,
guice
guice,
spire
)

val webknossosTracingstoreDependencies = Seq(
Expand Down
12 changes: 12 additions & 0 deletions util/src/main/scala/com/scalableminds/util/tools/Math.scala
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
package com.scalableminds.util.tools

import Numeric.Implicits._

object Math {
val RotationMatrixSize3D = 16

Expand Down Expand Up @@ -34,4 +36,14 @@ object Math {
lower.max(x).min(upper)
}

def mean[T: Numeric](xs: Iterable[T]): Double = xs.sum.toDouble / xs.size

def variance[T: Numeric](xs: Iterable[T]): Double = {
val avg = mean(xs)

xs.map(_.toDouble).map(a => math.pow(a - avg, 2)).sum / xs.size
}

def stdDev[T: Numeric](xs: Iterable[T]): Double = math.sqrt(variance(xs))

}
Original file line number Diff line number Diff line change
Expand Up @@ -370,6 +370,22 @@ class BinaryDataController @Inject()(
private def formatNeighborList(neighbors: List[Int]): String =
"[" + neighbors.mkString(", ") + "]"

def colorStatistics(organizationName: String, dataSetName: String, dataLayerName: String) = Action.async {
implicit request =>
accessTokenService
.validateAccess(UserAccessRequest.readDataSources(DataSourceId(dataSetName, organizationName))) {
AllowRemoteOrigin {
for {
(dataSource, dataLayer) <- getDataSourceAndDataLayer(organizationName, dataSetName, dataLayerName)
meanAndStdDev <- findDataService.meanAndStdDev(dataSource, dataLayer)
} yield
Ok(
Json.obj("mean" -> meanAndStdDev._1, "stdDev" -> meanAndStdDev._2)
)
}
}
}

def findData(organizationName: String, dataSetName: String, dataLayerName: String) = Action.async {
implicit request =>
accessTokenService
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,9 @@ import com.scalableminds.util.tools.{Fox, FoxImplicits}
import com.scalableminds.webknossos.datastore.models.{DataRequest, VoxelPosition}
import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, ElementClass}
import com.scalableminds.webknossos.datastore.models.requests.DataServiceDataRequest
import com.scalableminds.util.tools.Math
import net.liftweb.common.Full
import play.api.i18n.MessagesProvider
import play.api.i18n.{Messages, MessagesProvider}

import scala.concurrent.ExecutionContext
import scala.reflect.ClassTag
Expand All @@ -18,40 +19,41 @@ class FindDataService @Inject()(dataServicesHolder: BinaryDataServiceHolder)(imp
val binaryDataService: BinaryDataService = dataServicesHolder.binaryDataService
var i = 0

def findPositionWithData(dataSource: DataSource, dataLayer: DataLayer)(implicit m: MessagesProvider) =
def findPositionWithData(dataSource: DataSource, dataLayer: DataLayer)(
implicit m: MessagesProvider): Fox[Option[(Point3D, Point3D)]] =
for {
positionAndResolutionOpt <- checkAllPositionsForData(dataSource, dataLayer)
} yield positionAndResolutionOpt

private def checkAllPositionsForData(dataSource: DataSource, dataLayer: DataLayer) = {
private def convertData(data: Array[Byte],
elementClass: ElementClass.Value): Array[_ >: Byte with Short with Int with Long] =
elementClass match {
case ElementClass.uint8 =>
convertDataImpl[Byte, ByteBuffer](data, DataTypeFunctors[Byte, ByteBuffer](identity, _.get(_), _.toByte))
case ElementClass.uint16 =>
convertDataImpl[Short, ShortBuffer](data,
DataTypeFunctors[Short, ShortBuffer](_.asShortBuffer, _.get(_), _.toShort))
case ElementClass.uint32 =>
convertDataImpl[Int, IntBuffer](data, DataTypeFunctors[Int, IntBuffer](_.asIntBuffer, _.get(_), _.toInt))
case ElementClass.uint64 =>
convertDataImpl[Long, LongBuffer](data, DataTypeFunctors[Long, LongBuffer](_.asLongBuffer, _.get(_), identity))
}

def convertData(data: Array[Byte]) =
dataLayer.elementClass match {
case ElementClass.uint8 =>
convertDataImpl[Byte, ByteBuffer](data, DataTypeFunctors[Byte, ByteBuffer](identity, _.get(_), _.toByte))
case ElementClass.uint16 =>
convertDataImpl[Short, ShortBuffer](
data,
DataTypeFunctors[Short, ShortBuffer](_.asShortBuffer, _.get(_), _.toShort))
case ElementClass.uint32 =>
convertDataImpl[Int, IntBuffer](data, DataTypeFunctors[Int, IntBuffer](_.asIntBuffer, _.get(_), _.toInt))
case ElementClass.uint64 =>
convertDataImpl[Long, LongBuffer](data,
DataTypeFunctors[Long, LongBuffer](_.asLongBuffer, _.get(_), identity))
}
private def convertDataImpl[T: ClassTag, B <: Buffer](data: Array[Byte],
dataTypeFunctor: DataTypeFunctors[T, B]): Array[T] = {
val srcBuffer = dataTypeFunctor.getTypedBufferFn(ByteBuffer.wrap(data).order(ByteOrder.LITTLE_ENDIAN))
srcBuffer.rewind()
val dstArray = Array.ofDim[T](srcBuffer.remaining())
dataTypeFunctor.copyDataFn(srcBuffer, dstArray)
dstArray
}

def convertDataImpl[T: ClassTag, B <: Buffer](data: Array[Byte],
dataTypeFunctor: DataTypeFunctors[T, B]): Array[T] = {
val srcBuffer = dataTypeFunctor.getTypedBufferFn(ByteBuffer.wrap(data).order(ByteOrder.LITTLE_ENDIAN))
srcBuffer.rewind()
val dstArray = Array.ofDim[T](srcBuffer.remaining())
dataTypeFunctor.copyDataFn(srcBuffer, dstArray)
dstArray
}
private def checkAllPositionsForData(dataSource: DataSource,
dataLayer: DataLayer): Fox[Option[(Point3D, Point3D)]] = {

def getExactDataOffset(data: Array[Byte]): Point3D = {
val cubeLength = DataLayer.bucketLength / dataLayer.bytesPerElement
val convertedData = convertData(data)
val convertedData = convertData(data, dataLayer.elementClass)
for {
z <- 0 until cubeLength
y <- 0 until cubeLength
Expand Down Expand Up @@ -103,7 +105,7 @@ class FindDataService @Inject()(dataServicesHolder: BinaryDataServiceHolder)(imp
resolutionIter(createPositions(dataLayer).distinct, dataLayer.resolutions.sortBy(_.maxDim))
}

private def createPositions(dataLayer: DataLayer) = {
private def createPositions(dataLayer: DataLayer, iterationCount: Int = 4) = {

def positionCreationIter(remainingRuns: List[Int], currentPositions: List[Point3D]): List[Point3D] = {

Expand Down Expand Up @@ -140,6 +142,66 @@ class FindDataService @Inject()(dataServicesHolder: BinaryDataServiceHolder)(imp
}
}

positionCreationIter((1 to 4).toList, List[Point3D]())
positionCreationIter((1 to iterationCount).toList, List[Point3D]())
}

def meanAndStdDev(dataSource: DataSource, dataLayer: DataLayer)(
implicit m: MessagesProvider): Fox[(Double, Double)] = {

def getDataFor(position: Point3D, resolution: Point3D): Fox[Array[Byte]] = {
val request = DataRequest(
new VoxelPosition(position.x, position.y, position.z, resolution),
DataLayer.bucketLength,
DataLayer.bucketLength,
DataLayer.bucketLength
)
binaryDataService.handleDataRequest(
DataServiceDataRequest(dataSource, dataLayer, None, request.cuboid(dataLayer), request.settings))
}

def concatenateBuckets(buckets: Seq[Array[Byte]]): Array[Byte] =
buckets.foldLeft(Array[Byte]()) { (acc, i) =>
{
acc ++ i
}
}

def convertNonZeroDataToDouble(data: Array[Byte], elementClass: ElementClass.Value): Array[Double] =
elementClass match {
case ElementClass.uint8 =>
convertDataImpl[Byte, ByteBuffer](data, DataTypeFunctors[Byte, ByteBuffer](identity, _.get(_), _.toByte))
.filter(_ != 0)
.map(spire.math.UByte(_).toDouble)
case ElementClass.uint16 =>
convertDataImpl[Short, ShortBuffer](data,
DataTypeFunctors[Short, ShortBuffer](
_.asShortBuffer,
_.get(_),
_.toShort)).filter(_ != 0).map(spire.math.UShort(_).toDouble)
case ElementClass.uint32 =>
convertDataImpl[Int, IntBuffer](data, DataTypeFunctors[Int, IntBuffer](_.asIntBuffer, _.get(_), _.toInt))
.filter(_ != 0)
.map(spire.math.UInt(_).toDouble)
case ElementClass.uint64 =>
convertDataImpl[Long, LongBuffer](data,
DataTypeFunctors[Long, LongBuffer](_.asLongBuffer, _.get(_), identity))
.filter(_ != 0)
.map(spire.math.ULong(_).toDouble)
}

def meanAndStdDevForPositions(positions: List[Point3D], resolution: Point3D)(
implicit m: MessagesProvider): Fox[(Double, Double)] =
for {
dataBucketWise: Seq[Array[Byte]] <- Fox.serialCombined(positions)(pos => getDataFor(pos, resolution))
dataConcatenated = concatenateBuckets(dataBucketWise)
dataAsDoubles = convertNonZeroDataToDouble(dataConcatenated, dataLayer.elementClass)
_ <- Fox.bool2Fox(dataAsDoubles.nonEmpty) ?~> "dataSet.sampledOnlyBlack"
} yield (Math.mean(dataAsDoubles), Math.stdDev(dataAsDoubles))

for {
_ <- bool2Fox(dataLayer.resolutions.nonEmpty) ?~> "dataSet.noResolutions"
meanAndStdDev <- meanAndStdDevForPositions(createPositions(dataLayer, 2).distinct,
dataLayer.resolutions.minBy(_.maxDim))
} yield meanAndStdDev
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ GET /datasets/:organizationName/:dataSetName/layers/:dataLayerName/image
GET /datasets/:organizationName/:dataSetName/layers/:dataLayerName/thumbnail.json @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestImageThumbnailJson(organizationName: String, dataSetName: String, dataLayerName: String, width: Int, height: Int, centerX: Option[Int], centerY: Option[Int], centerZ: Option[Int], zoom: Option[Double])
GET /datasets/:organizationName/:dataSetName/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestImageThumbnailJpeg(organizationName: String, dataSetName: String, dataLayerName: String, width: Int, height: Int, centerX: Option[Int], centerY: Option[Int], centerZ: Option[Int], zoom: Option[Double])
GET /datasets/:organizationName/:dataSetName/layers/:dataLayerName/findData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.findData(organizationName: String, dataSetName: String, dataLayerName: String)
GET /datasets/:organizationName/:dataSetName/layers/:dataLayerName/colorStatistics @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.colorStatistics(organizationName: String, dataSetName: String, dataLayerName: String)

# Knossos compatibale routes
GET /datasets/:organizationName/:dataSetName/layers/:dataLayerName/mag:resolution/x:x/y:y/z:z/bucket.raw @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaKnossos(organizationName: String, dataSetName: String, dataLayerName: String, resolution: Int, x: Int, y: Int, z: Int, cubeSize: Int)
Expand Down
4 changes: 2 additions & 2 deletions yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -1363,7 +1363,7 @@ acorn-walk@^6.0.1:
resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-6.1.1.tgz#d363b66f5fac5f018ff9c3a1e7b6f8e310cc3913"
integrity sha512-OtUw6JUTgxA2QoqqmrmQ7F2NYqiBPi/L2jqHyFtllhOUvXYQXf0Z1CYUinIfyT4bTCGmrA7gX9FvHA81uzCoVw==

acorn@^5.2.1, acorn@^5.5.3, acorn@^5.6.2:
acorn@^5.0.0, acorn@^5.2.1, acorn@^5.5.3, acorn@^5.6.2:
version "5.7.3"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.7.3.tgz#67aa231bf8812974b85235a96771eb6bd07ea279"
integrity sha512-T/zvzYRfbVojPWahDsE5evJdHb3oJoQfFbsrKM7w5Zcs++Tr257tia3BmMP8XYVjp1S9RZXQMh7gao96BlqZOw==
Expand Down Expand Up @@ -10558,7 +10558,7 @@ scheduler@^0.13.3:
loose-envify "^1.1.0"
object-assign "^4.1.1"

schema-utils@^0.4.0, schema-utils@^0.4.5:
schema-utils@^0.4.0, schema-utils@^0.4.4, schema-utils@^0.4.5:
version "0.4.7"
resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-0.4.7.tgz#ba74f597d2be2ea880131746ee17d0a093c68187"
integrity sha512-v/iwU6wvwGK8HbU9yi3/nhGzP0yGSuhQMzL6ySiec1FSrZZDkhm4noOSWzrNFo/jEc+SJY6jRTwuwbSXJPDUnQ==
Expand Down