Skip to content
This repository has been archived by the owner on May 31, 2023. It is now read-only.

BOSS backend #59

Merged
merged 16 commits into from
Mar 5, 2019
Merged
Show file tree
Hide file tree
Changes from 11 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ pylint = "*"
[packages]
aiohttp = "*"
async-lru = "*"
blosc = "*"
compressed-segmentation = ">=1.0.0"
jpeg4py = "*"
numpy = "*"
Expand Down
145 changes: 74 additions & 71 deletions Pipfile.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions benchmarks/decode_jpg.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,18 @@
import numpy as np
from aiohttp import ClientSession

from wkconnect.backends.neuroglancer.backend import NeuroglancerBackend
from wkconnect.backends.neuroglancer.backend import Neuroglancer
from wkconnect.utils.types import Vec3D

neuroglancer: NeuroglancerBackend
neuroglancer: Neuroglancer
data: np.ndarray


async def setup() -> None:
global neuroglancer
global data
http_client = await ClientSession().__aenter__()
neuroglancer = NeuroglancerBackend({}, http_client)
neuroglancer = Neuroglancer({}, http_client)

data_url = "https://storage.googleapis.com/neuroglancer-public-data/kasthuri2011/image/24_24_30/896-960_1152-1216_1472-1536"

Expand Down
18 changes: 18 additions & 0 deletions data/datasets.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,22 @@
{
"boss": {
"Connectomics_Department": {
"ara": {
"domain": "https://api.boss.neurodata.io",
"collection": "ara_2016",
"experiment": "sagittal_50um",
"username": "jstriebel",
"password": "{,vEzT7J?-5_"
},
"kasthuri2011-neurodata-io": {
"domain": "https://api.boss.neurodata.io",
"collection": "kasthuri",
"experiment": "kasthuri11",
"username": "jstriebel",
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@normanrz I would like to check in one or two datasets, so that there is immediately something to try out. Could you create a user on neurodata.io with scmboy for that?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

or @hotzenklotz? ^

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hmm. I think it is reasonable to ask people to create an account when they want to use the boss integration. I wouldn't want to put credentials into the repo.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

When using it with wk, one will be asked via the frontend anyways. This one would be rather for development, where I would find it useful if we have one or two demo-datasets in the repo, that are working out-of-the box (for dev & review).
The credentials can not be used for anything, except retrieving data from neurodata.io and possibly exceeding a rate-limit, if there is one.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I could also read credentials from an gitignored .env file as a fallback. That would make it easier to develop locally without caring about checking in personal credentials by accident or not.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I implemented the second option now (env vars), this seems quite useful.

"password": "{,vEzT7J?-5_"
}
}
},
"neuroglancer": {
"Connectomics_Department": {
"fafb_v14": {
Expand Down
3 changes: 2 additions & 1 deletion stubs/compressed_segmentation.pyi
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import numpy as np
from typing import Tuple

import numpy as np

def decompress(
encoded: bytes,
volume_size: Tuple[int, int, int],
Expand Down
8 changes: 4 additions & 4 deletions wkconnect/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@
from uvloop import Loop

from .backends.backend import Backend
from .backends.neuroglancer.backend import NeuroglancerBackend as Neuroglancer
from .backends.boss.backend import Boss
from .backends.neuroglancer.backend import Neuroglancer
from .repository import Repository
from .routes import routes
from .utils.scheduler import repeat_every_seconds
Expand All @@ -29,7 +30,7 @@ def __init__(self) -> None:
self.repository: Repository
self.webknossos: WebKnossos
self.backends: Dict[str, Backend]
self.available_backends: List[Type[Backend]] = [Neuroglancer]
self.available_backends: List[Type[Backend]] = [Boss, Neuroglancer]

async def add_dataset(
self,
Expand Down Expand Up @@ -80,14 +81,13 @@ async def load_persisted_datasets(self) -> None:
async def setup(app: Server, loop: Loop) -> None:
def instanciate_backend(backend_class: Type[Backend]) -> Tuple[str, Backend]:
backend_name = backend_class.name()
config = app.config["backends"][backend_name]
config = app.config["backends"].get(backend_name, {})
return (backend_name, backend_class(config, app.http_client))

app.http_client = await ClientSession(raise_for_status=True).__aenter__()
app.repository = Repository()
app.webknossos = WebKnossos(app.config, app.http_client)
app.backends = dict(map(instanciate_backend, app.available_backends))
# await app.load_persisted_datasets()


@app.listener("before_server_stop")
Expand Down
3 changes: 1 addition & 2 deletions wkconnect/backends/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,8 @@ def to_webknossos(self) -> WKDataSource:

class Backend(metaclass=ABCMeta):
@classmethod
@abstractmethod
def name(cls) -> str:
pass
return cls.__name__.lower()

@abstractmethod
def __init__(self, config: Dict, http_client: ClientSession) -> None:
Expand Down
Empty file.
Loading