Skip to content
This repository has been archived by the owner on May 31, 2023. It is now read-only.

Commit

Permalink
BOSS backend (#59)
Browse files Browse the repository at this point in the history
* WIP, v1

* update Pipfile hashes

* use dataset info from BOSS, support resolutions

* typing & package versions

* update Pipfile.lock, but not dev-dependencies

* use datasets.json

* refactoring: extract Boss Client, models & token repo in extra files

* Update Pipfile

* extract handle_new_channel, minor refactorings

* use env-vars for neurodata.io credentials

* pretty

* pretty

* pretty
  • Loading branch information
jstriebel authored Mar 5, 2019
1 parent 8539b56 commit df2781d
Show file tree
Hide file tree
Showing 18 changed files with 594 additions and 105 deletions.
1 change: 1 addition & 0 deletions Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ pylint = "*"
[packages]
aiohttp = "*"
async-lru = "*"
blosc = "*"
compressed-segmentation = ">=1.0.0"
jpeg4py = "*"
numpy = "*"
Expand Down
145 changes: 74 additions & 71 deletions Pipfile.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

9 changes: 7 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,13 @@ A [webKnossos](https://github.com/scalableminds/webknossos) compatible data conn
INSERT INTO "webknossos"."datastores"("name","url","key","isscratch","isdeleted","isforeign")
VALUES (E'connect',E'http://localhost:8000',E'k',FALSE,FALSE,FALSE);
```
2. `docker-compose up --build webknossos-connect`
3. By default, some public datasets are reported. Add more datasets from the webKnossos user interface.
2. To use the initial datasets from [neurodata.io](https://neurodata.io/ndcloud/#data), create a `.env` file:
```
NEURODATA_IO_USER="<your username>"
NEURODATA_IO_PW="<your password>"
```
3. `docker-compose up --build webknossos-connect`
4. By default, some public datasets are reported. Add more datasets from the webKnossos user interface.

## Development
### In Docker :whale:
Expand Down
6 changes: 3 additions & 3 deletions benchmarks/decode_jpg.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,18 @@
import numpy as np
from aiohttp import ClientSession

from wkconnect.backends.neuroglancer.backend import NeuroglancerBackend
from wkconnect.backends.neuroglancer.backend import Neuroglancer
from wkconnect.utils.types import Vec3D

neuroglancer: NeuroglancerBackend
neuroglancer: Neuroglancer
data: np.ndarray


async def setup() -> None:
global neuroglancer
global data
http_client = await ClientSession().__aenter__()
neuroglancer = NeuroglancerBackend({}, http_client)
neuroglancer = Neuroglancer({}, http_client)

data_url = "https://storage.googleapis.com/neuroglancer-public-data/kasthuri2011/image/24_24_30/896-960_1152-1216_1472-1536"

Expand Down
18 changes: 18 additions & 0 deletions data/datasets.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,22 @@
{
"boss": {
"Connectomics_Department": {
"ara": {
"domain": "https://api.boss.neurodata.io",
"collection": "ara_2016",
"experiment": "sagittal_50um",
"username": "$NEURODATA_IO_USER",
"password": "$NEURODATA_IO_PW"
},
"kasthuri2011-neurodata-io": {
"domain": "https://api.boss.neurodata.io",
"collection": "kasthuri",
"experiment": "kasthuri11",
"username": "$NEURODATA_IO_USER",
"password": "$NEURODATA_IO_PW"
}
}
},
"neuroglancer": {
"Connectomics_Department": {
"fafb_v14": {
Expand Down
3 changes: 2 additions & 1 deletion stubs/compressed_segmentation.pyi
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import numpy as np
from typing import Tuple

import numpy as np

def decompress(
encoded: bytes,
volume_size: Tuple[int, int, int],
Expand Down
19 changes: 13 additions & 6 deletions wkconnect/__main__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import asyncio
import json
import logging
import os
from copy import deepcopy
from typing import Dict, List, Tuple, Type
from typing import Any, Dict, List, Tuple, Type

from aiohttp import ClientSession
from aiohttp.client_exceptions import ClientConnectorError
Expand All @@ -12,7 +13,8 @@
from uvloop import Loop

from .backends.backend import Backend
from .backends.neuroglancer.backend import NeuroglancerBackend as Neuroglancer
from .backends.boss.backend import Boss
from .backends.neuroglancer.backend import Neuroglancer
from .repository import Repository
from .routes import routes
from .utils.scheduler import repeat_every_seconds
Expand All @@ -29,7 +31,7 @@ def __init__(self) -> None:
self.repository: Repository
self.webknossos: WebKnossos
self.backends: Dict[str, Backend]
self.available_backends: List[Type[Backend]] = [Neuroglancer]
self.available_backends: List[Type[Backend]] = [Boss, Neuroglancer]

async def add_dataset(
self,
Expand All @@ -46,9 +48,15 @@ async def add_dataset(
await self.webknossos.report_dataset(dataset.to_webknossos())

async def load_persisted_datasets(self) -> None:
def expandvars_hook(dict: Dict[str, Any]) -> Dict[str, Any]:
for key, val in dict.items():
if isinstance(val, str):
dict[key] = os.path.expandvars(val)
return dict

try:
with open(self.config["datasets_path"]) as datasets_file:
datasets = json.load(datasets_file)
datasets = json.load(datasets_file, object_hook=expandvars_hook)
except FileNotFoundError:
datasets = {}
await asyncio.gather(
Expand Down Expand Up @@ -80,14 +88,13 @@ async def load_persisted_datasets(self) -> None:
async def setup(app: Server, loop: Loop) -> None:
def instanciate_backend(backend_class: Type[Backend]) -> Tuple[str, Backend]:
backend_name = backend_class.name()
config = app.config["backends"][backend_name]
config = app.config["backends"].get(backend_name, {})
return (backend_name, backend_class(config, app.http_client))

app.http_client = await ClientSession(raise_for_status=True).__aenter__()
app.repository = Repository()
app.webknossos = WebKnossos(app.config, app.http_client)
app.backends = dict(map(instanciate_backend, app.available_backends))
# await app.load_persisted_datasets()


@app.listener("before_server_stop")
Expand Down
3 changes: 1 addition & 2 deletions wkconnect/backends/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,8 @@ def to_webknossos(self) -> WKDataSource:

class Backend(metaclass=ABCMeta):
@classmethod
@abstractmethod
def name(cls) -> str:
pass
return cls.__name__.lower()

@abstractmethod
def __init__(self, config: Dict, http_client: ClientSession) -> None:
Expand Down
Empty file.
Loading

0 comments on commit df2781d

Please sign in to comment.