Skip to content
This repository has been archived by the owner on May 31, 2023. It is now read-only.

fix from_json, extract routes to files #42

Merged
merged 1 commit into from
Feb 13, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 26 additions & 5 deletions Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,11 @@ url = "https://pypi.python.org/simple"
verify_ssl = true

[dev-packages]
autoflake = "*"
black = "==18.9b0"
flake8 = "*"
isort = "*"
mypy = "*"
mypy = "==0.660"
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There is a bug in mypy version 0.670, which was just released:
python/mypy#6364

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

good catch!

pep8-naming = "*"
py-spy = "*"
pylint = "*"
Expand All @@ -26,13 +27,33 @@ python_version = "3.7.2"

[scripts]
main = "python -m py_datastore"
lint = "bash -c 'pylint --errors-only --jobs 0 py_datastore benchmarks stubs && flake8 py_datastore benchmarks stubs'"
pretty = "bash -c 'black py_datastore benchmarks stubs && isort --recursive py_datastore benchmarks stubs'"
pretty-check = "bash -c 'black --check py_datastore benchmarks stubs && isort --check-only --recursive py_datastore benchmarks stubs'"
lint = '''
bash -c " \
pylint --errors-only --jobs 0 py_datastore benchmarks stubs && \
flake8 py_datastore benchmarks stubs \
"
'''
pretty = '''
bash -c " \
autoflake \
--in-place --recursive \
--remove-all-unused-imports --remove-unused-variables \
py_datastore benchmarks stubs && \
black py_datastore benchmarks stubs && \
isort --recursive py_datastore benchmarks stubs \
"
'''
pretty-check = '''
bash -c " \
black --check py_datastore benchmarks stubs && \
isort --check-only --recursive py_datastore benchmarks stubs \
"
'''
type-check = '''
bash -c " \
MYPYPATH=\"$(pipenv --venv)/lib/python3.7/site-packages/:stubs\" \
mypy \
--disallow-untyped-defs \
py_datastore benchmarks stubs"
py_datastore benchmarks stubs \
"
'''
37 changes: 21 additions & 16 deletions Pipfile.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

147 changes: 5 additions & 142 deletions py_datastore/__main__.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,9 @@
import asyncio
import base64
import json
import os
from copy import deepcopy
from io import BytesIO
from typing import Dict, List, Tuple, Type

import numpy as np
from aiohttp import ClientSession
from PIL import Image
from sanic import Sanic, response
from sanic.request import Request
from sanic_cors import CORS
Expand All @@ -17,12 +12,9 @@
from .backends.backend import Backend
from .backends.neuroglancer.backend import NeuroglancerBackend as Neuroglancer
from .repository import Repository
from .utils.colors import color_bytes
from .utils.json import from_json
from .utils.types import JSON, Vec3D
from .webknossos.access import AccessRequest, authorized
from .routes import routes
from .utils.types import JSON
from .webknossos.client import WebKnossosClient as WebKnossos
from .webknossos.models import DataRequest as WKDataRequest


class Server(Sanic):
Expand Down Expand Up @@ -108,14 +100,11 @@ async def ping_webknossos(app: Server) -> None:
## ROUTES ##


@app.route("/data/health")
async def health(request: Request) -> response.HTTPResponse:
return response.text("Ok")
app.blueprint(routes)


@app.route("/data/triggers/checkInboxBlocking")
async def check_inbox_blocking(request: Request) -> response.HTTPResponse:
await app.load_persisted_datasets()
@app.route("/data/health")
async def health(request: Request) -> response.HTTPResponse:
return response.text("Ok")


Expand All @@ -132,132 +121,6 @@ async def build_info(request: Request) -> response.HTTPResponse:
)


@app.route("/api/<backend_name>/<organization_name>/<dataset_name>", methods=["POST"])
async def add_dataset(
request: Request, backend_name: str, organization_name: str, dataset_name: str
) -> response.HTTPResponse:
dataset_config = request.json
await app.add_dataset(dataset_config, backend_name, organization_name, dataset_name)

ds_path = app.config["datasets_path"]
os.makedirs(os.path.dirname(ds_path), exist_ok=True)
if not os.path.isfile(ds_path):
with open(ds_path, "w") as datasets_file:
json.dump({}, datasets_file)
with open(ds_path, "r+") as datasets_file:
datasets = json.load(datasets_file)
datasets.setdefault(backend_name, {}).setdefault(organization_name, {})[
dataset_name
] = dataset_config
datasets_file.seek(0)
json.dump(datasets, datasets_file)

return response.text("Ok")


@app.route(
"/data/datasets/<organization_name>/<dataset_name>/layers/<layer_name>/data",
methods=["POST", "OPTIONS"],
)
@authorized(AccessRequest.read_dataset)
async def get_data_post(
request: Request, organization_name: str, dataset_name: str, layer_name: str
) -> response.HTTPResponse:
(backend_name, dataset) = app.repository.get_dataset(
organization_name, dataset_name
)
backend = app.backends[backend_name]

bucket_requests = from_json(request.json, List[WKDataRequest])
assert all(not request.fourBit for request in bucket_requests)

buckets = await asyncio.gather(
*(
backend.read_data(
dataset,
layer_name,
r.zoomStep,
Vec3D(*r.position),
Vec3D(r.cubeSize, r.cubeSize, r.cubeSize),
)
for r in bucket_requests
)
)
missing_buckets = [index for index, data in enumerate(buckets) if data is None]
existing_buckets = [data.flatten(order="F") for data in buckets if data is not None]
data = (
np.concatenate(existing_buckets).tobytes() if len(existing_buckets) > 0 else b""
)

headers = {
"Access-Control-Expose-Headers": "MISSING-BUCKETS",
"MISSING-BUCKETS": json.dumps(missing_buckets),
}
return response.raw(data, headers=headers)


@app.route(
"/data/datasets/<organization_name>/<dataset_name>/layers/<layer_name>/thumbnail.json"
)
@authorized(AccessRequest.read_dataset)
async def get_thumbnail(
request: Request, organization_name: str, dataset_name: str, layer_name: str
) -> response.HTTPResponse:
width = int(request.args.get("width"))
height = int(request.args.get("height"))

(backend_name, dataset) = app.repository.get_dataset(
organization_name, dataset_name
)
backend = app.backends[backend_name]
layer = [i for i in dataset.to_webknossos().dataLayers if i.name == layer_name][0]
scale = 3
center = layer.boundingBox.box().center()
size = Vec3D(width, height, 1)
data = (
await backend.read_data(dataset, layer_name, scale, center - size // 2, size)
)[:, :, 0]
if layer.category == "segmentation":
data = data.astype("uint8")
thumbnail = Image.fromarray(data, mode="P")
color_list = list(color_bytes.values())[: 2 ** 8]
thumbnail.putpalette(b"".join(color_list))
with BytesIO() as output:
thumbnail.save(output, "PNG", transparency=0)
return response.json(
{"mimeType": "image/png", "value": base64.b64encode(output.getvalue())}
)
else:
thumbnail = Image.fromarray(data)
with BytesIO() as output:
thumbnail.save(output, "JPEG")
return response.json(
{"mimeType": "image/jpeg", "value": base64.b64encode(output.getvalue())}
)


app.static("/trace", "data/trace.html", name="trace_get")
app.static("/trace/flame.svg", "data/flame.svg", name="trace_get")


@app.route("/trace", methods=["POST"])
async def trace_post(request: Request) -> response.HTTPResponse:
# py-spy is only in dev-packages
p = await asyncio.create_subprocess_exec(
"py-spy",
"--flame",
"data/flame.svg",
"--duration",
"20",
"--pid",
str(os.getpid()),
)
await p.wait()
return (
response.text("Ok") if p.returncode == 0 else response.text("Error", status=500)
)


## MAIN ##


Expand Down
1 change: 0 additions & 1 deletion py_datastore/backends/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,4 +52,3 @@ async def read_data(
:param shape: in scale voxels
:returns: numpy array of shape shape
"""
pass
4 changes: 2 additions & 2 deletions py_datastore/backends/neuroglancer/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@

@dataclass(frozen=True)
class Scale:
chunk_sizes: Tuple[Vec3D]
chunk_sizes: Tuple[Vec3D, ...]
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Tuple type definitions are expected to specify all fields, or use the ellipsis, which is similar to lists then. This is needed to make from_json work for tuples of unknown length.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why do we want tuples of unknown length instead of plain lists? Does that help with hashability/typechecking etc?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Exactly, tuples can be cached, lists not.

encoding: str
key: str
resolution: Vec3D
Expand All @@ -38,7 +38,7 @@ class Layer:
source: str
data_type: str
num_channels: int
scales: Tuple[Scale]
scales: Tuple[Scale, ...]
type: str
# InitVar allows to consume mesh argument in init without storing it
mesh: InitVar[Any] = None
Expand Down
8 changes: 8 additions & 0 deletions py_datastore/routes/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from sanic import Blueprint

from .datasets import datasets
from .triggers import triggers
from .trace import trace

data = Blueprint.group(datasets, triggers, url_prefix="/data")
routes = Blueprint.group(data, trace)
6 changes: 6 additions & 0 deletions py_datastore/routes/datasets/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
from sanic import Blueprint

from .read_data import read_data
from .thumbnail import thumbnail

datasets = Blueprint.group(read_data, thumbnail, url_prefix="/datasets")
55 changes: 55 additions & 0 deletions py_datastore/routes/datasets/read_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import asyncio
import json
from typing import List

import numpy as np
from sanic import Blueprint, response
from sanic.request import Request

from ...utils.json import from_json
from ...utils.types import Vec3D
from ...webknossos.access import AccessRequest, authorized
from ...webknossos.models import DataRequest as WKDataRequest

read_data = Blueprint(__name__)


@read_data.route(
"/<organization_name>/<dataset_name>/layers/<layer_name>/data",
methods=["POST", "OPTIONS"],
)
@authorized(AccessRequest.read_dataset)
async def get_data_post(
request: Request, organization_name: str, dataset_name: str, layer_name: str
) -> response.HTTPResponse:
(backend_name, dataset) = request.app.repository.get_dataset(
organization_name, dataset_name
)
backend = request.app.backends[backend_name]

bucket_requests = from_json(request.json, List[WKDataRequest])
assert all(not request.fourBit for request in bucket_requests)

buckets = await asyncio.gather(
*(
backend.read_data(
dataset,
layer_name,
r.zoomStep,
Vec3D(*r.position),
Vec3D(r.cubeSize, r.cubeSize, r.cubeSize),
)
for r in bucket_requests
)
)
missing_buckets = [index for index, data in enumerate(buckets) if data is None]
existing_buckets = [data.flatten(order="F") for data in buckets if data is not None]
data = (
np.concatenate(existing_buckets).tobytes() if len(existing_buckets) > 0 else b""
)

headers = {
"Access-Control-Expose-Headers": "MISSING-BUCKETS",
"MISSING-BUCKETS": json.dumps(missing_buckets),
}
return response.raw(data, headers=headers)
Loading