diff --git a/applications/netpyne/Dockerfile b/applications/netpyne/Dockerfile index e7891f19..ff605093 100644 --- a/applications/netpyne/Dockerfile +++ b/applications/netpyne/Dockerfile @@ -1,8 +1,8 @@ FROM node:14 as jsbuild ENV REPO=https://github.com/MetaCell/NetPyNE-UI.git -ENV BRANCH_TAG=release/1.0.0-notebook-fix +ENV BRANCH_TAG=release/1.0.0 ENV FOLDER=netpyne -RUN echo "no-cache 2023-7-6-3" +RUN echo "no-cache 2023-7-14" RUN git clone $REPO -b $BRANCH_TAG $FOLDER RUN rm -Rf .git @@ -79,7 +79,7 @@ RUN chown -R $NB_UID /opt/workspace/tutorials RUN ln -s /opt/workspace workspace RUN jupyter labextension disable @jupyterlab/hub-extension - +RUN wget --no-check-certificate -O $NP_LFPYKIT_HEAD_FILE https://www.parralab.org/nyhead/sa_nyhead.mat USER $NB_UID diff --git a/applications/osb-portal/deploy/values.yaml b/applications/osb-portal/deploy/values.yaml index 487dad53..ffd00262 100644 --- a/applications/osb-portal/deploy/values.yaml +++ b/applications/osb-portal/deploy/values.yaml @@ -1,5 +1,5 @@ harness: - subdomain: app + subdomain: www secured: false deployment: auto: true diff --git a/applications/osb-portal/src/pages/WorkspacesPage.tsx b/applications/osb-portal/src/pages/WorkspacesPage.tsx index f9ce7832..8b6bae0f 100644 --- a/applications/osb-portal/src/pages/WorkspacesPage.tsx +++ b/applications/osb-portal/src/pages/WorkspacesPage.tsx @@ -286,7 +286,7 @@ export const WorkspacesPage = (props: WorkspacesPageProps) => { - {workspaces?.length === 0 ? ( + {workspaces?.length === 0 && props.user ? ( Pagination: for obj in objects.items: self._calculated_fields_populate(obj) + objects.items = [self.to_dto(obj) for obj in objects.items] return objects @classmethod @@ -181,13 +184,18 @@ def post(self, body): if 'user_id' not in body: body['user_id'] = keycloak_user_id() self.check_max_num_workspaces_per_user(body['user_id']) - for r in body.get("resources", []): - r.update({"origin": json.dumps(r.get("origin"))}) - workspace = Workspace.from_dict(body) # Validate + + workspace = super().post(body) + create_volume(name=self.get_pvc_name(workspace.id), size=self.get_workspace_volume_size(workspace)) + + + for workspace_resource in workspace.resources: + WorkspaceresourceService.handle_resource_data(workspace_resource) + return workspace def put(self, body, id_): @@ -204,28 +212,27 @@ def clone(self, workspace_id): user_id = keycloak_user_id() self.check_max_num_workspaces_per_user(user_id) from workspaces.service.workflow import clone_workspaces_content - workspace = self.get(workspace_id) - if workspace is None: - raise Exception( - f"Cannot clone workspace with id {workspace_id}: not found.") - - cloned = dict( - name=f"Clone of {workspace['name']}", - tags=workspace['tags'], - user_id=user_id, - - description=workspace['description'], - publicable=False, - featured=False - ) - if workspace['thumbnail']: - cloned['thumbnail'] = workspace['thumbnail'] - - cloned = self.repository.post(cloned, do_post=False) - - create_volume(name=self.get_pvc_name(cloned.id), - size=self.get_workspace_volume_size(workspace)) - clone_workspaces_content(workspace_id, cloned.id) + with db.session.no_autoflush: + workspace: TWorkspaceEntity = self.repository.clone(workspace_id) + if workspace is None: + raise NotFoundException( + f"Cannot clone workspace with id {workspace_id}: not found.") + + workspace.name = f"Clone of {workspace.name}" + workspace.user_id = user_id + workspace.publicable = False + workspace.featured = False + workspace.timestamp_created = None + workspace.resources = [] + + + + + cloned = self.repository.post(workspace) + + create_volume(name=self.get_pvc_name(cloned.id), + size=self.get_workspace_volume_size(workspace)) + clone_workspaces_content(workspace_id, cloned.id) return cloned def is_authorized(self, workspace): @@ -255,30 +262,38 @@ def search(self, page=1, per_page=20, *args, **kwargs) -> Pagination: if current_user_id is not None: # Admins see all workspaces, non admin users can see only their own workspaces if not get_auth_client().user_has_realm_role(user_id=current_user_id, role="administrator"): - objects = self.repository.search( + paged_results = self.repository.search( page=page, per_page=per_page, user_id=current_user_id, *args, **kwargs) else: - objects = self.repository.search( + paged_results = self.repository.search( page=page, per_page=per_page, user_id=current_user_id, show_all=True, *args, **kwargs) else: - objects = self.repository.search( + paged_results = self.repository.search( page=page, per_page=per_page, *args, **kwargs) - for obj in objects.items: - self._calculated_fields_populate(obj) - return objects + with db.session.no_autoflush: + paged_results.items = [self.to_dto(w) for w in paged_results.items] + + for obj in paged_results.items: + self._calculated_fields_populate(obj) + return paged_results @classmethod def to_dto(cls, workspace_entity: TWorkspaceEntity) -> Workspace: - workspace = super().to_dto(workspace_entity) - if not workspace.resources: - workspace.resources = [] + + workspace = cls.dict_to_dto(dao_entity2dict(workspace_entity)) + for resource in workspace_entity.resources: + resource.origin = json.loads(resource.origin) + workspace.resources = [WorkspaceresourceService.to_dto(r) for r in workspace_entity.resources] if workspace_entity.resources else [] return workspace @classmethod def to_dao(cls, d: dict) -> TWorkspaceEntity: - + + resources = d.get("resources", []) + d["resources"] = [] workspace: TWorkspaceEntity = super().to_dao(d) workspace.tags = TagRepository().get_tags_daos(workspace.tags) + workspace.resources = [WorkspaceresourceService.to_dao(r) for r in resources] return workspace def get(self, id_): @@ -315,7 +330,7 @@ def get(self, id_): WorkspaceResource.from_dict( { "id": -1, - "name": "Importing resources into workspace", + "name": "Refreshing resources", "origin": {"path": fake_path}, "resource_type": ResourceType.U, "workspace_id": workspace.id, @@ -341,11 +356,12 @@ def user(self, workspace): def delete(self, id): resource_repository = WorkspaceResourceRepository() - workspace = super().get(id) + workspace = self.repository.get(id) - for resource in workspace.resources: - logger.debug("deleting resource %s", resource.id) - resource_repository.delete(resource.id) + if workspace.resources: + for resource in workspace.resources: + logger.debug("deleting resource %s", resource.id) + resource_repository.delete(resource.id) logger.info("deleting workspace %s", id) super().delete(id) logger.info("deleted workspace %s", id) @@ -421,9 +437,12 @@ class WorkspaceresourceService(BaseModelService): @classmethod def to_dao(cls, ws_dict: dict) -> TWorkspaceResourceEntity: if "origin" in ws_dict: - ws_dict.update({"origin": json.dumps(ws_dict.get("origin"))}) - - workspace_resource = super().to_dao(ws_dict) + wro_dao_dict = dict(ws_dict.get("origin")) + ws_dict.update({"origin": json.dumps(wro_dao_dict)}) + if 'path' in ws_dict: + ws_dict['folder'] = ws_dict['path'] + del ws_dict['path'] + workspace_resource: TWorkspaceResourceEntity = super().to_dao(ws_dict) if not workspace_resource.resource_type or workspace_resource.resource_type == "u": origin = json.loads(workspace_resource.origin) workspace_resource.resource_type = guess_resource_type( @@ -434,7 +453,7 @@ def to_dao(cls, ws_dict: dict) -> TWorkspaceResourceEntity: @classmethod def dict_to_dto(cls, d) -> WorkspaceResource: - if 'origin' in d: + if 'origin' in d and isinstance(d['origin'], str): d['origin'] = json.loads(d['origin']) workspace_resource: WorkspaceResource = super().dict_to_dto(d) @@ -453,10 +472,14 @@ def dict_to_dto(cls, d) -> WorkspaceResource: def post(self, body) -> WorkspaceResource: workspace_resource = super().post(body) - if workspace_resource.status == "p" and workspace_resource.origin: + self.handle_resource_data(workspace_resource) + return workspace_resource + + @staticmethod + def handle_resource_data(workspace_resource: WorkspaceResource) -> WorkspaceResource: + if workspace_resource.status == "p" and workspace_resource.origin: from workspaces.helpers.etl_helpers import copy_workspace_resource copy_workspace_resource(workspace_resource) - return workspace_resource def is_authorized(self, resource: WorkspaceResourceEntity): # A resource is authorized if belongs to an authorized workspace diff --git a/applications/workspaces/server/workspaces/utils.py b/applications/workspaces/server/workspaces/utils.py index 3a393e69..6c06661e 100644 --- a/applications/workspaces/server/workspaces/utils.py +++ b/applications/workspaces/server/workspaces/utils.py @@ -5,7 +5,7 @@ from cloudharness import log as logger from cloudharness.auth import AuthClient -from workspaces.models import WorkspaceResourceEntity, ResourceType +from workspaces.models import WorkspaceResourceEntity, ResourceType, WorkspaceEntity def get_keycloak_data(): @@ -27,18 +27,24 @@ def get_pvc_name(workspace_id): disallowed_class_types = ["BaseQuery", "type", "registry", "MetaData"] name_mappings = {WorkspaceResourceEntity.__name__: {"folder": "path"}} +exclude = { WorkspaceEntity.__name__: {"resources"} } def dao_entity2dict(obj): + disallowed_names = {name for name, value in getmembers( - type(obj)) if isinstance(value, FunctionType)} + type(obj)) if isinstance(value, FunctionType) or name in exclude.get(type(obj).__name__, ())} + result = {} for name in dir(obj): - if name[0] != "_" and name not in disallowed_names and hasattr(obj, name): + + if name not in disallowed_names and name[0] != "_" and hasattr(obj, name): val = getattr(obj, name) if not ismethod(val): clas = val.__class__.__name__ + if hasattr(val.__class__, "to_dict"): + val = val.__class__.to_dict(val) if clas == "InstrumentedList": val = list(dao_entity2dict(r) for r in val) if clas not in disallowed_class_types: