diff --git a/conda-store-server/conda_store_server/api.py b/conda-store-server/conda_store_server/api.py index e02342df5..a72a78cd6 100644 --- a/conda-store-server/conda_store_server/api.py +++ b/conda-store-server/conda_store_server/api.py @@ -1,4 +1,4 @@ -from typing import List, Dict, Any, Optional, Union +from typing import List, Dict, Any import re from sqlalchemy import func, null, or_, distinct @@ -49,8 +49,8 @@ def create_namespace(db, name: str): def update_namespace( db, name: str, - metadata_: Union[Optional[Dict], None] = None, - role_mappings: Union[Optional[Dict[str, List[str]]], None] = None, + metadata_: Dict[str, Any] = None, + role_mappings: Dict[str, List[str]] = None, ): namespace = get_namespace(db, name) @@ -63,24 +63,22 @@ def update_namespace( if role_mappings is not None: # deletes all the existing role mappings ... - for rm in namespace.roles_mappings: + for rm in namespace.role_mappings: db.delete(rm) # ... before adding all the new ones mappings_orm = [] for entity, roles in role_mappings.items(): - for r in roles: - + for role in roles: mapping_orm = orm.NamespaceRoleMapping( namespace_id=namespace.id, namespace=namespace, entity=entity, - role=r, + role=role, ) - mappings_orm.append(mapping_orm) - namespace.roles_mappings = mappings_orm + namespace.role_mappings = mappings_orm db.commit() @@ -230,14 +228,6 @@ def get_specification(db, sha256: str): return db.query(orm.Specification).filter(*filters).first() -def post_specification(conda_store, specification, namespace=None): - return conda_store.register_environment(specification, namespace, force=True) - - -def post_solve(conda_store, specification: schema.CondaSpecification): - return conda_store.register_solve(specification) - - def create_solve(db, specification_id: int): solve = orm.Solve(specification_id=specification_id) db.add(solve) diff --git a/conda-store-server/conda_store_server/app.py b/conda-store-server/conda_store_server/app.py index 2a96af6e5..4fa8a5ce4 100644 --- a/conda-store-server/conda_store_server/app.py +++ b/conda-store-server/conda_store_server/app.py @@ -16,7 +16,8 @@ Union, ) from traitlets.config import LoggingConfigurable -from sqlalchemy.pool import NullPool +from sqlalchemy.pool import QueuePool +from sqlalchemy.orm import Session import pydantic from conda_store_server import ( @@ -32,10 +33,13 @@ def conda_store_validate_specification( - conda_store: "CondaStore", namespace: str, specification: schema.CondaSpecification + db: Session, + conda_store: "CondaStore", + namespace: str, + specification: schema.CondaSpecification, ) -> schema.CondaSpecification: settings = conda_store.get_settings( - namespace=namespace, environment_name=specification.name + db, namespace=namespace, environment_name=specification.name ) specification = environment.validate_environment_channels(specification, settings) @@ -50,12 +54,13 @@ def conda_store_validate_specification( def conda_store_validate_action( + db: Session, conda_store: "CondaStore", namespace: str, action: schema.Permissions, ) -> None: - settings = conda_store.get_settings() - system_metrics = api.get_system_metrics(conda_store.db) + settings = conda_store.get_settings(db) + system_metrics = api.get_system_metrics(db) if action in ( schema.Permissions.ENVIRONMENT_CREATE, @@ -343,21 +348,12 @@ def session_factory(self): if hasattr(self, "_session_factory"): return self._session_factory - # https://docs.sqlalchemy.org/en/14/core/pooling.html#using-connection-pools-with-multiprocessing-or-os-fork - # This is the most simplistic, one shot system that prevents - # the Engine from using any connection more than once self._session_factory = orm.new_session_factory( - url=self.database_url, poolclass=NullPool + url=self.database_url, + poolclass=QueuePool, ) return self._session_factory - @property - def db(self): - # we are using a scoped_session which always returns the same - # session if within the same thread - # https://docs.sqlalchemy.org/en/14/orm/contextual.html - return self.session_factory() - @property def redis(self): import redis @@ -367,9 +363,8 @@ def redis(self): self._redis = redis.Redis.from_url(self.redis_url) return self._redis - @property - def configuration(self): - return orm.CondaStoreConfiguration.configuration(self.db) + def configuration(self, db: Session): + return orm.CondaStoreConfiguration.configuration(db) @property def storage(self): @@ -421,14 +416,14 @@ def celery_config(self): @property def celery_app(self): - if hasattr(self, "_celery_app"): - return self._celery_app + # if hasattr(self, "_celery_app"): + # return self._celery_app self._celery_app = Celery("tasks") self._celery_app.config_from_object(self.celery_config) return self._celery_app - def ensure_settings(self): + def ensure_settings(self, db: Session): """Ensure that conda-store traitlets settings are applied""" settings = schema.Settings( default_namespace=self.default_namespace, @@ -455,30 +450,31 @@ def ensure_settings(self): build_artifacts=self.build_artifacts, # default_docker_base_image=self.default_docker_base_image, ) - api.set_kvstore_key_values(self.db, "setting", settings.dict(), update=False) + api.set_kvstore_key_values(db, "setting", settings.dict(), update=False) - def ensure_namespace(self): + def ensure_namespace(self, db: Session): """Ensure that conda-store default namespaces exists""" - api.ensure_namespace(self.db, self.default_namespace) + api.ensure_namespace(db, self.default_namespace) def ensure_directories(self): """Ensure that conda-store filesystem directories exist""" os.makedirs(self.store_directory, exist_ok=True) - def ensure_conda_channels(self): + def ensure_conda_channels(self, db: Session): """Ensure that conda-store indexed channels and packages are in database""" self.log.info("updating conda store channels") - settings = self.get_settings() + settings = self.get_settings(db) for channel in settings.conda_indexed_channels: normalized_channel = conda_utils.normalize_channel_name( settings.conda_channel_alias, channel ) - api.ensure_conda_channel(self.db, normalized_channel) + api.ensure_conda_channel(db, normalized_channel) def set_settings( self, + db: Session, namespace: str = None, environment_name: str = None, data: Dict[str, Any] = {}, @@ -512,10 +508,10 @@ def set_settings( else: prefix = "setting" - api.set_kvstore_key_values(self.db, prefix, data) + api.set_kvstore_key_values(db, prefix, data) def get_settings( - self, namespace: str = None, environment_name: str = None + self, db: Session, namespace: str = None, environment_name: str = None ) -> schema.Settings: # setting logic is intentionally done in python code # rather than using the database for merges and ordering @@ -530,29 +526,31 @@ def get_settings( settings = {} for prefix in prefixes: - settings.update(api.get_kvstore_key_values(self.db, prefix)) + settings.update(api.get_kvstore_key_values(db, prefix)) return schema.Settings(**settings) - def register_solve(self, specification: schema.CondaSpecification): + def register_solve(self, db: Session, specification: schema.CondaSpecification): """Registers a solve for a given specification""" - settings = self.get_settings() + settings = self.get_settings(db) self.validate_action( + db=db, conda_store=self, namespace="solve", action=schema.Permissions.ENVIRONMENT_SOLVE, ) specification_model = self.validate_specification( + db=db, conda_store=self, namespace="solve", specification=specification, ) - specification_orm = api.ensure_specification(self.db, specification_model) - solve = api.create_solve(self.db, specification_orm.id) - self.db.commit() + specification_orm = api.ensure_specification(db, specification_model) + solve = api.create_solve(db, specification_orm.id) + db.commit() self.celery_app @@ -568,28 +566,34 @@ def register_solve(self, specification: schema.CondaSpecification): return task_id, solve.id def register_environment( - self, specification: dict, namespace: str = None, force: bool = True + self, + db: Session, + specification: dict, + namespace: str = None, + force: bool = True, ): """Register a given specification to conda store with given namespace/name.""" - settings = self.get_settings() + settings = self.get_settings(db) namespace = namespace or settings.default_namespace - namespace = api.ensure_namespace(self.db, name=namespace) + namespace = api.ensure_namespace(db, name=namespace) self.validate_action( + db=db, conda_store=self, namespace=namespace.name, action=schema.Permissions.ENVIRONMENT_CREATE, ) specification_model = self.validate_specification( + db=db, conda_store=self, namespace=namespace.name, specification=schema.CondaSpecification.parse_obj(specification), ) spec_sha256 = utils.datastructure_hash(specification_model.dict()) - matching_specification = api.get_specification(self.db, sha256=spec_sha256) + matching_specification = api.get_specification(db, sha256=spec_sha256) if ( matching_specification is not None and not force @@ -600,45 +604,44 @@ def register_environment( ): return None - specification = api.ensure_specification(self.db, specification_model) + specification = api.ensure_specification(db, specification_model) environment_was_empty = ( - api.get_environment( - self.db, name=specification.name, namespace_id=namespace.id - ) + api.get_environment(db, name=specification.name, namespace_id=namespace.id) is None ) environment = api.ensure_environment( - self.db, + db, name=specification.name, namespace_id=namespace.id, description=specification.spec["description"], ) - build = self.create_build(environment.id, specification.sha256) + build = self.create_build(db, environment.id, specification.sha256) if environment_was_empty: environment.current_build = build - self.db.commit() + db.commit() return build.id - def create_build(self, environment_id: int, specification_sha256: str): - environment = api.get_environment(self.db, id=environment_id) + def create_build(self, db: Session, environment_id: int, specification_sha256: str): + environment = api.get_environment(db, id=environment_id) self.validate_action( + db=db, conda_store=self, namespace=environment.namespace.name, action=schema.Permissions.ENVIRONMENT_UPDATE, ) settings = self.get_settings( - namespace=environment.namespace.name, environment_name=environment.name + db, namespace=environment.namespace.name, environment_name=environment.name ) - specification = api.get_specification(self.db, specification_sha256) + specification = api.get_specification(db, specification_sha256) build = api.create_build( - self.db, environment_id=environment_id, specification_id=specification.id + db, environment_id=environment_id, specification_id=specification.id ) - self.db.commit() + db.commit() self.celery_app @@ -685,18 +688,21 @@ def create_build(self, environment_id: int, specification_sha256: str): return build - def update_environment_build(self, namespace: str, name: str, build_id: int): + def update_environment_build( + self, db: Session, namespace: str, name: str, build_id: int + ): self.validate_action( + db=db, conda_store=self, namespace=namespace, action=schema.Permissions.ENVIRONMENT_UPDATE, ) - build = api.get_build(self.db, build_id) + build = api.get_build(db, build_id) if build is None: raise utils.CondaStoreError(f"build id={build_id} does not exist") - environment = api.get_environment(self.db, namespace=namespace, name=name) + environment = api.get_environment(db, namespace=namespace, name=name) if environment is None: raise utils.CondaStoreError( f"environment namespace={namespace} name={name} does not exist" @@ -713,7 +719,7 @@ def update_environment_build(self, namespace: str, name: str, build_id: int): ) environment.current_build_id = build.id - self.db.commit() + db.commit() self.celery_app # must import tasks after a celery app has been initialized @@ -721,24 +727,27 @@ def update_environment_build(self, namespace: str, name: str, build_id: int): tasks.task_update_environment_build.si(environment.id).apply_async() - def update_environment_description(self, namespace, name, description): - environment = api.get_environment(self.db, namespace=namespace, name=name) + def update_environment_description( + self, db: Session, namespace: str, name: str, description: str + ): + environment = api.get_environment(db, namespace=namespace, name=name) if environment is None: raise utils.CondaStoreError( f"environment namespace={namespace} name={name} does not exist" ) environment.description = description - self.db.commit() + db.commit() - def delete_namespace(self, namespace: str): + def delete_namespace(self, db: Session, namespace: str): self.validate_action( + db=db, conda_store=self, namespace=namespace, action=schema.Permissions.NAMESPACE_DELETE, ) - namespace = api.get_namespace(self.db, name=namespace) + namespace = api.get_namespace(db, name=namespace) if namespace is None: raise utils.CondaStoreError(f"namespace={namespace} does not exist") @@ -748,7 +757,7 @@ def delete_namespace(self, namespace: str): environment_orm.deleted_on = utcnow for build in environment_orm.builds: build.deleted_on = utcnow - self.db.commit() + db.commit() self.celery_app @@ -757,14 +766,15 @@ def delete_namespace(self, namespace: str): tasks.task_delete_namespace.si(namespace.id).apply_async() - def delete_environment(self, namespace: str, name: str): + def delete_environment(self, db: Session, namespace: str, name: str): self.validate_action( + db=db, conda_store=self, namespace=namespace, action=schema.Permissions.ENVIRONMENT_DELETE, ) - environment = api.get_environment(self.db, namespace=namespace, name=name) + environment = api.get_environment(db, namespace=namespace, name=name) if environment is None: raise utils.CondaStoreError( f"environment namespace={namespace} name={name} does not exist" @@ -774,7 +784,7 @@ def delete_environment(self, namespace: str, name: str): environment.deleted_on = utcnow for build in environment.builds: build.deleted_on = utcnow - self.db.commit() + db.commit() self.celery_app @@ -783,10 +793,11 @@ def delete_environment(self, namespace: str, name: str): tasks.task_delete_environment.si(environment.id).apply_async() - def delete_build(self, build_id: int): - build = api.get_build(self.db, build_id) + def delete_build(self, db: Session, build_id: int): + build = api.get_build(db, build_id) self.validate_action( + db=db, conda_store=self, namespace=build.environment.namespace.name, action=schema.Permissions.BUILD_DELETE, @@ -801,7 +812,7 @@ def delete_build(self, build_id: int): ) build.deleted_on = datetime.datetime.utcnow() - self.db.commit() + db.commit() self.celery_app diff --git a/conda-store-server/conda_store_server/build.py b/conda-store-server/conda_store_server/build.py index 042def4ae..7694f415a 100644 --- a/conda-store-server/conda_store_server/build.py +++ b/conda-store-server/conda_store_server/build.py @@ -7,17 +7,12 @@ import json import yaml +from sqlalchemy.orm import Session from conda_store_server import conda_utils, orm, utils, schema, action -def set_build_started(conda_store, build): - build.status = schema.BuildStatus.BUILDING - build.started_on = datetime.datetime.utcnow() - conda_store.db.commit() - - -def append_to_logs(conda_store, build, logs: typing.Union[str, bytes]): +def append_to_logs(db: Session, conda_store, build, logs: typing.Union[str, bytes]): try: current_logs = conda_store.storage.get(build.log_key) except Exception: @@ -27,7 +22,7 @@ def append_to_logs(conda_store, build, logs: typing.Union[str, bytes]): logs = logs.encode("utf-8") conda_store.storage.set( - conda_store.db, + db, build.id, build.log_key, current_logs + logs, @@ -36,13 +31,19 @@ def append_to_logs(conda_store, build, logs: typing.Union[str, bytes]): ) -def set_build_failed(conda_store, build): +def set_build_started(db: Session, build: orm.Build): + build.status = schema.BuildStatus.BUILDING + build.started_on = datetime.datetime.utcnow() + db.commit() + + +def set_build_failed(db: Session, build: orm.Build): build.status = schema.BuildStatus.FAILED build.ended_on = datetime.datetime.utcnow() - conda_store.db.commit() + db.commit() -def set_build_completed(conda_store, build): +def set_build_completed(db: Session, conda_store, build: orm.Build): build.status = schema.BuildStatus.COMPLETED build.ended_on = datetime.datetime.utcnow() @@ -51,26 +52,28 @@ def set_build_completed(conda_store, build): artifact_type=schema.BuildArtifactType.DIRECTORY, key=str(build.build_path(conda_store)), ) - conda_store.db.add(directory_build_artifact) + db.add(directory_build_artifact) build.environment.current_build = build build.environment.specification = build.specification - conda_store.db.commit() + db.commit() -def build_conda_environment(conda_store, build): +def build_conda_environment(db: Session, conda_store, build): """Build a conda environment with set uid/gid/and permissions and symlink the build to a named environment """ - set_build_started(conda_store, build) + set_build_started(db, build) append_to_logs( + db, conda_store, build, f"starting build of conda environment {datetime.datetime.utcnow()} UTC\n", ) settings = conda_store.get_settings( + db=db, namespace=build.environment.namespace.name, environment_name=build.environment.name, ) @@ -92,7 +95,7 @@ def build_conda_environment(conda_store, build): ) conda_store.storage.set( - conda_store.db, + db, build.id, build.conda_lock_key, json.dumps(context.result, indent=4).encode("utf-8"), @@ -101,6 +104,7 @@ def build_conda_environment(conda_store, build): ) append_to_logs( + db, conda_store, build, "::group::action_solve_lockfile\n" @@ -114,6 +118,7 @@ def build_conda_environment(conda_store, build): pkgs_dir=conda_utils.conda_root_package_dir(), ) append_to_logs( + db, conda_store, build, "::group::action_fetch_and_extract_conda_packages\n" @@ -126,6 +131,7 @@ def build_conda_environment(conda_store, build): conda_prefix=conda_prefix, ) append_to_logs( + db, conda_store, build, "::group::action_install_lockfile\n" @@ -143,7 +149,7 @@ def build_conda_environment(conda_store, build): ) action.action_add_conda_prefix_packages( - db=conda_store.db, + db=db, conda_prefix=conda_prefix, build_id=build.id, ) @@ -151,24 +157,24 @@ def build_conda_environment(conda_store, build): context = action.action_get_conda_prefix_stats(conda_prefix) build.size = context.result["disk_usage"] - set_build_completed(conda_store, build) + set_build_completed(db, conda_store, build) except subprocess.CalledProcessError as e: conda_store.log.exception(e) - append_to_logs(conda_store, build, e.output) - set_build_failed(conda_store, build) + append_to_logs(db, conda_store, build, e.output) + set_build_failed(db, build) raise e except Exception as e: conda_store.log.exception(e) - append_to_logs(conda_store, build, traceback.format_exc()) - set_build_failed(conda_store, build) + append_to_logs(db, conda_store, build, traceback.format_exc()) + set_build_failed(db, build) raise e -def solve_conda_environment(conda_store, solve): - settings = conda_store.get_settings() +def solve_conda_environment(db: Session, conda_store, solve: orm.Solve): + settings = conda_store.get_settings(db=db) solve.started_on = datetime.datetime.utcnow() - conda_store.db.commit() + db.commit() context = action.action_solve_lockfile( conda_command=settings.conda_command, @@ -178,18 +184,19 @@ def solve_conda_environment(conda_store, solve): conda_lock_spec = context.result action.action_add_lockfile_packages( - db=conda_store.db, + db=db, conda_lock_spec=conda_lock_spec, solve_id=solve.id, ) solve.ended_on = datetime.datetime.utcnow() - conda_store.db.commit() + db.commit() -def build_conda_env_export(conda_store, build): +def build_conda_env_export(db: Session, conda_store, build: orm.Build): conda_prefix = build.build_path(conda_store) settings = conda_store.get_settings( + db=db, namespace=build.environment.namespace.name, environment_name=build.environment.name, ) @@ -198,6 +205,7 @@ def build_conda_env_export(conda_store, build): conda_command=settings.conda_command, conda_prefix=conda_prefix ) append_to_logs( + db, conda_store, build, "::group::action_generate_conda_export\n" @@ -208,7 +216,7 @@ def build_conda_env_export(conda_store, build): conda_prefix_export = yaml.dump(context.result).encode("utf-8") conda_store.storage.set( - conda_store.db, + db, build.id, build.conda_env_export_key, conda_prefix_export, @@ -217,7 +225,7 @@ def build_conda_env_export(conda_store, build): ) -def build_conda_pack(conda_store, build): +def build_conda_pack(db: Session, conda_store, build: orm.Build): conda_prefix = build.build_path(conda_store) with utils.timer( @@ -229,6 +237,7 @@ def build_conda_pack(conda_store, build): conda_prefix=conda_prefix, output_filename=output_filename ) append_to_logs( + db, conda_store, build, "::group::action_generate_conda_pack\n" @@ -236,7 +245,7 @@ def build_conda_pack(conda_store, build): + "\n::endgroup::\n", ) conda_store.storage.fset( - conda_store.db, + db, build.id, build.conda_pack_key, output_filename, @@ -245,9 +254,10 @@ def build_conda_pack(conda_store, build): ) -def build_conda_docker(conda_store, build): +def build_conda_docker(db: Session, conda_store, build: orm.Build): conda_prefix = build.build_path(conda_store) settings = conda_store.get_settings( + db=db, namespace=build.environment.namespace.name, environment_name=build.environment.name, ) @@ -267,6 +277,7 @@ def build_conda_docker(conda_store, build): output_image_tag=build.build_key, ) append_to_logs( + db, conda_store, build, "::group::action_generate_conda_docker\n" @@ -277,11 +288,13 @@ def build_conda_docker(conda_store, build): image = context.result if schema.BuildArtifactType.DOCKER_MANIFEST in settings.build_artifacts: - conda_store.container_registry.store_image(conda_store, build, image) + conda_store.container_registry.store_image( + db, conda_store, build, image + ) if schema.BuildArtifactType.CONTAINER_REGISTRY in settings.build_artifacts: - conda_store.container_registry.push_image(conda_store, build, image) + conda_store.container_registry.push_image(db, build, image) except Exception as e: conda_store.log.exception(e) - append_to_logs(conda_store, build, traceback.format_exc()) + append_to_logs(db, conda_store, build, traceback.format_exc()) raise e diff --git a/conda-store-server/conda_store_server/orm.py b/conda-store-server/conda_store_server/orm.py index 2265fa33f..793ad3580 100644 --- a/conda-store-server/conda_store_server/orm.py +++ b/conda-store-server/conda_store_server/orm.py @@ -22,7 +22,6 @@ from sqlalchemy.orm import ( sessionmaker, relationship, - scoped_session, backref, declarative_base, validates, @@ -56,9 +55,9 @@ class Namespace(Base): deleted_on = Column(DateTime, default=None) - metadata_ = Column(JSON) + metadata_ = Column(JSON, default=dict) - roles_mappings = relationship("NamespaceRoleMapping", back_populates="namespace") + role_mappings = relationship("NamespaceRoleMapping", back_populates="namespace") class NamespaceRoleMapping(Base): @@ -68,7 +67,7 @@ class NamespaceRoleMapping(Base): id = Column(Integer, primary_key=True) namespace_id = Column(Integer, ForeignKey("namespace.id"), nullable=False) - namespace = relationship(Namespace, back_populates="roles_mappings") + namespace = relationship(Namespace, back_populates="role_mappings") # arn e.g. / like `quansight-*/*` or `quansight-devops/*` # The entity must match with ARN_ALLOWED defined in schema.py @@ -691,5 +690,5 @@ class KeyValueStore(Base): def new_session_factory(url="sqlite:///:memory:", reset=False, **kwargs): engine = create_engine(url, **kwargs) - session_factory = scoped_session(sessionmaker(bind=engine)) + session_factory = sessionmaker(bind=engine) return session_factory diff --git a/conda-store-server/conda_store_server/registry.py b/conda-store-server/conda_store_server/registry.py index 1afdb9daa..f59fdff10 100644 --- a/conda-store-server/conda_store_server/registry.py +++ b/conda-store-server/conda_store_server/registry.py @@ -5,6 +5,7 @@ from traitlets.config import LoggingConfigurable from traitlets import Dict, Callable, default from python_docker.registry import Image, Registry +from sqlalchemy.orm import Session from conda_store_server import schema, orm, utils @@ -40,7 +41,7 @@ def _container_registry_image_tag(registry: Registry, build: orm.Build): return _container_registry_image_tag - def store_image(self, conda_store, build: orm.Build, image: Image): + def store_image(self, db: Session, conda_store, build: orm.Build, image: Image): self.log.info("storing container image locally") with utils.timer(self.log, "storing container image locally"): # https://docs.docker.com/registry/spec/manifest-v2-2/#example-image-manifest @@ -59,7 +60,7 @@ def store_image(self, conda_store, build: orm.Build, image: Image): content_compressed = gzip.compress(layer.content) content_compressed_hash = hashlib.sha256(content_compressed).hexdigest() conda_store.storage.set( - conda_store.db, + db, build.id, build.docker_blob_key(content_compressed_hash), content_compressed, @@ -89,7 +90,7 @@ def store_image(self, conda_store, build: orm.Build, image: Image): docker_manifest_hash = hashlib.sha256(docker_manifest_content).hexdigest() conda_store.storage.set( - conda_store.db, + db, build.id, build.docker_blob_key(docker_config_hash), docker_config_content, @@ -101,7 +102,7 @@ def store_image(self, conda_store, build: orm.Build, image: Image): # is sort of hack to avoid having to figure out which sha256 # refers to which manifest. conda_store.storage.set( - conda_store.db, + db, build.id, f"docker/manifest/sha256:{docker_manifest_hash}", docker_manifest_content, @@ -110,7 +111,7 @@ def store_image(self, conda_store, build: orm.Build, image: Image): ) conda_store.storage.set( - conda_store.db, + db, build.id, build.docker_manifest_key, docker_manifest_content, @@ -154,7 +155,7 @@ def pull_image(self, image_name: str) -> Image: return registry.pull_image(name, tag) - def push_image(self, conda_store, build, image: Image): + def push_image(self, db, build, image: Image): for registry_url, configure_registry in self.container_registries.items(): self.log.info(f"beginning upload of image to registry {registry_url}") with utils.timer(self.log, f"uploading image to registry {registry_url}"): @@ -168,8 +169,8 @@ def push_image(self, conda_store, build, image: Image): artifact_type=schema.BuildArtifactType.CONTAINER_REGISTRY, key=f"{registry_url}/{image.name}:{image.tag}", ) - conda_store.db.add(registry_build_artifact) - conda_store.db.commit() + db.add(registry_build_artifact) + db.commit() def delete_image(self, image_name: str): registry_url, name, tag = self.parse_image_uri(image_name) diff --git a/conda-store-server/conda_store_server/schema.py b/conda-store-server/conda_store_server/schema.py index 476f5d7c6..9dc97b053 100644 --- a/conda-store-server/conda_store_server/schema.py +++ b/conda-store-server/conda_store_server/schema.py @@ -93,9 +93,20 @@ class Config: orm_mode = True +class NamespaceRoleMapping(BaseModel): + id: int + entity: str + role: str + + class Config: + orm_mode = True + + class Namespace(BaseModel): id: int name: constr(regex=f"^[{ALLOWED_CHARACTERS}]+$") # noqa: F722 + metadata_: Dict[str, Any] = {} + role_mappings: List[NamespaceRoleMapping] = [] class Config: orm_mode = True diff --git a/conda-store-server/conda_store_server/server/app.py b/conda-store-server/conda_store_server/server/app.py index a7a18817a..2a1bfa453 100644 --- a/conda-store-server/conda_store_server/server/app.py +++ b/conda-store-server/conda_store_server/server/app.py @@ -187,7 +187,9 @@ def initialize(self, *args, **kwargs): dbutil.upgrade(self.conda_store.database_url) self.authentication = self.authentication_class( - parent=self, log=self.log, authentication_db=self.conda_store.db + parent=self, + log=self.log, + authentication_db=self.conda_store.session_factory, ) # ensure checks on redis_url @@ -230,14 +232,11 @@ def trim_slash(url): @app.middleware("http") async def conda_store_middleware(request: Request, call_next): - try: - request.state.conda_store = self.conda_store - request.state.server = self - request.state.authentication = self.authentication - request.state.templates = self.templates - response = await call_next(request) - finally: - request.state.conda_store.session_factory.remove() + request.state.conda_store = self.conda_store + request.state.server = self + request.state.authentication = self.authentication + request.state.templates = self.templates + response = await call_next(request) return response @app.exception_handler(HTTPException) @@ -323,9 +322,10 @@ def redirect_home(request: Request): def start(self): fastapi_app = self.init_fastapi_app() - self.conda_store.ensure_settings() - self.conda_store.ensure_namespace() - self.conda_store.ensure_conda_channels() + with self.conda_store.session_factory() as db: + self.conda_store.ensure_settings(db) + self.conda_store.ensure_namespace(db) + self.conda_store.ensure_conda_channels(db) # start worker if in standalone mode if self.standalone: diff --git a/conda-store-server/conda_store_server/server/auth.py b/conda-store-server/conda_store_server/server/auth.py index ed034fa9f..56d0bdf9f 100644 --- a/conda-store-server/conda_store_server/server/auth.py +++ b/conda-store-server/conda_store_server/server/auth.py @@ -17,7 +17,7 @@ from conda_store_server import schema, orm, utils from conda_store_server.server import dependencies -from sqlalchemy.orm.session import Session as SQLAlchemySession +from sqlalchemy.orm import sessionmaker ARN_ALLOWED_REGEX = re.compile(schema.ARN_ALLOWED) @@ -113,7 +113,7 @@ class RBACAuthorizationBackend(LoggingConfigurable): ) authentication_db = Instance( - SQLAlchemySession, + sessionmaker, help="SQLAlchemy session to query DB. Used for role mapping", config=False, ) @@ -243,21 +243,23 @@ def authorize(self, entity, arn, required_permissions): ) def database_role_bindings(self, entity): - result = self.authentication_db.execute( - text( - """SELECT nrm.entity, nrm.role - FROM namespace n - RIGHT JOIN namespace_role_mapping nrm ON nrm.namespace_id = n.id - WHERE n.name = :primary_namespace - """ - ), - {"primary_namespace": entity.primary_namespace}, - ) - raw_role_mappings = result.mappings().all() + with self.authentication_db() as db: + result = db.execute( + text( + """ + SELECT nrm.entity, nrm.role + FROM namespace n + RIGHT JOIN namespace_role_mapping nrm ON nrm.namespace_id = n.id + WHERE n.name = :primary_namespace + """ + ), + {"primary_namespace": entity.primary_namespace}, + ) + raw_role_mappings = result.mappings().all() - db_role_mappings = defaultdict(set) - for row in raw_role_mappings: - db_role_mappings[row["entity"]].add(row["role"]) + db_role_mappings = defaultdict(set) + for row in raw_role_mappings: + db_role_mappings[row["entity"]].add(row["role"]) return db_role_mappings @@ -289,7 +291,7 @@ class Authentication(LoggingConfigurable): ) authentication_db = Instance( - SQLAlchemySession, + sessionmaker, help="SQLAlchemy session to query DB. Used for role mapping", config=False, ) diff --git a/conda-store-server/conda_store_server/server/dependencies.py b/conda-store-server/conda_store_server/server/dependencies.py index 51ae7a1f2..99224a3a6 100644 --- a/conda-store-server/conda_store_server/server/dependencies.py +++ b/conda-store-server/conda_store_server/server/dependencies.py @@ -1,21 +1,30 @@ from fastapi import Request, Depends +from sqlalchemy.orm import Session -def get_conda_store(request: Request): +async def get_conda_store(request: Request): return request.state.conda_store -def get_server(request: Request): +async def get_server(request: Request): return request.state.server -def get_auth(request: Request): +async def get_auth(request: Request): return request.state.authentication -def get_entity(request: Request, auth=Depends(get_auth)): +async def get_entity(request: Request, auth=Depends(get_auth)): return auth.authenticate_request(request) -def get_templates(request: Request): +async def get_templates(request: Request): return request.state.templates + + +async def get_db(request: Request, conda_store=Depends(get_conda_store)) -> Session: + db = conda_store.session_factory() + try: + yield db + finally: + db.close() diff --git a/conda-store-server/conda_store_server/server/views/api.py b/conda-store-server/conda_store_server/server/views/api.py index c9f144b8e..685fe967e 100644 --- a/conda-store-server/conda_store_server/server/views/api.py +++ b/conda-store-server/conda_store_server/server/views/api.py @@ -1,10 +1,11 @@ -from typing import List, Dict, Optional, Any, Union +from typing import List, Dict, Optional, Any import datetime import pydantic import yaml from fastapi import APIRouter, Request, Depends, HTTPException, Query, Body from fastapi.responses import RedirectResponse, PlainTextResponse +from sqlalchemy.orm import Session from conda_store_server import api, orm, schema, utils, __version__ from conda_store_server.server import dependencies @@ -103,7 +104,6 @@ def paginated_api_response( .limit(paginated_args["limit"]) .offset(paginated_args["offset"]) ) - return { "status": "ok", "data": [object_schema.from_orm(_).dict(exclude=exclude) for _ in query.all()], @@ -117,7 +117,7 @@ def paginated_api_response( "/", response_model=schema.APIGetStatus, ) -def api_status(): +async def api_status(): return {"status": "ok", "data": {"version": __version__}} @@ -125,7 +125,7 @@ def api_status(): "/permission/", response_model=schema.APIGetPermission, ) -def api_get_permissions( +async def api_get_permissions( request: Request, conda_store=Depends(dependencies.get_conda_store), auth=Depends(dependencies.get_auth), @@ -163,14 +163,14 @@ def api_get_permissions( "/usage/", response_model=schema.APIGetUsage, ) -def api_get_usage( +async def api_get_usage( request: Request, - conda_store=Depends(dependencies.get_conda_store), auth=Depends(dependencies.get_auth), entity=Depends(dependencies.get_entity), + db=Depends(dependencies.get_db), ): namespace_usage_metrics = auth.filter_namespaces( - entity, api.get_namespace_metrics(conda_store.db) + entity, api.get_namespace_metrics(db) ) data = {} @@ -192,7 +192,7 @@ def api_get_usage( "/token/", response_model=schema.APIPostToken, ) -def api_post_token( +async def api_post_token( request: Request, primary_namespace: Optional[str] = Body(None), expiration: Optional[datetime.datetime] = Body(None), @@ -236,20 +236,23 @@ def api_post_token( @router_api.get( "/namespace/", response_model=schema.APIListNamespace, + # don't send metadata_ and role_mappings + response_model_exclude_defaults=True, ) -def api_list_namespaces( - conda_store=Depends(dependencies.get_conda_store), +async def api_list_namespaces( auth=Depends(dependencies.get_auth), entity=Depends(dependencies.get_entity), paginated_args: Dict = Depends(get_paginated_args), + db: Session = Depends(dependencies.get_db), ): orm_namespaces = auth.filter_namespaces( - entity, api.list_namespaces(conda_store.db, show_soft_deleted=False) + entity, api.list_namespaces(db, show_soft_deleted=False) ) return paginated_api_response( orm_namespaces, paginated_args, schema.Namespace, + exclude={"role_mappings", "metadata_"}, allowed_sort_bys={ "name": orm.Namespace.name, }, @@ -261,17 +264,17 @@ def api_list_namespaces( "/namespace/{namespace}/", response_model=schema.APIGetNamespace, ) -def api_get_namespace( +async def api_get_namespace( namespace: str, request: Request, - conda_store=Depends(dependencies.get_conda_store), auth=Depends(dependencies.get_auth), + db: Session = Depends(dependencies.get_db), ): auth.authorize_request( request, namespace, {Permissions.NAMESPACE_READ}, require=True ) - namespace = api.get_namespace(conda_store.db, namespace, show_soft_deleted=False) + namespace = api.get_namespace(db, namespace, show_soft_deleted=False) if namespace is None: raise HTTPException(status_code=404, detail="namespace does not exist") @@ -285,25 +288,25 @@ def api_get_namespace( "/namespace/{namespace}/", response_model=schema.APIAckResponse, ) -def api_create_namespace( +async def api_create_namespace( namespace: str, request: Request, - conda_store=Depends(dependencies.get_conda_store), auth=Depends(dependencies.get_auth), + db: Session = Depends(dependencies.get_db), ): auth.authorize_request( request, namespace, {Permissions.NAMESPACE_CREATE}, require=True ) - namespace_orm = api.get_namespace(conda_store.db, namespace) + namespace_orm = api.get_namespace(db, namespace) if namespace_orm: raise HTTPException(status_code=409, detail="namespace already exists") try: - api.create_namespace(conda_store.db, namespace) + api.create_namespace(db, namespace) except ValueError as e: raise HTTPException(status_code=400, detail=str(e.args[0])) - conda_store.db.commit() + db.commit() return {"status": "ok"} @@ -311,13 +314,13 @@ def api_create_namespace( "/namespace/{namespace}/", response_model=schema.APIAckResponse, ) -def api_update_namespace( +async def api_update_namespace( namespace: str, request: Request, - metadata: Union[Optional[Dict | List], None] = None, - role_mappings: Union[Optional[Dict[str, List[str]]], None] = None, - conda_store=Depends(dependencies.get_conda_store), + metadata: Dict[str, Any] = None, + role_mappings: Dict[str, List[str]] = None, auth=Depends(dependencies.get_auth), + db: Session = Depends(dependencies.get_db), ): auth.authorize_request( @@ -331,35 +334,36 @@ def api_update_namespace( require=True, ) - namespace_orm = api.get_namespace(conda_store.db, namespace) + namespace_orm = api.get_namespace(db, namespace) if namespace_orm is None: raise HTTPException(status_code=404, detail="namespace does not exist") try: - api.update_namespace(conda_store.db, namespace, metadata, role_mappings) + api.update_namespace(db, namespace, metadata, role_mappings) except ValueError as e: raise HTTPException(status_code=400, detail=str(e.args[0])) - conda_store.db.commit() + db.commit() return {"status": "ok"} @router_api.delete("/namespace/{namespace}/", response_model=schema.APIAckResponse) -def api_delete_namespace( +async def api_delete_namespace( namespace: str, request: Request, conda_store=Depends(dependencies.get_conda_store), auth=Depends(dependencies.get_auth), + db: Session = Depends(dependencies.get_db), ): auth.authorize_request( request, namespace, {Permissions.NAMESPACE_DELETE}, require=True ) - namespace_orm = api.get_namespace(conda_store.db, namespace) + namespace_orm = api.get_namespace(db, namespace) if namespace_orm is None: raise HTTPException(status_code=404, detail="namespace does not exist") try: - conda_store.delete_namespace(namespace) + conda_store.delete_namespace(db, namespace) except utils.CondaStoreError as e: raise HTTPException(status_code=400, detail=e.message) @@ -370,22 +374,22 @@ def api_delete_namespace( "/environment/", response_model=schema.APIListEnvironment, ) -def api_list_environments( +async def api_list_environments( search: Optional[str] = None, namespace: Optional[str] = None, name: Optional[str] = None, status: Optional[schema.BuildStatus] = None, packages: Optional[List[str]] = Query([]), artifact: Optional[schema.BuildArtifactType] = None, - conda_store=Depends(dependencies.get_conda_store), auth=Depends(dependencies.get_auth), entity=Depends(dependencies.get_entity), paginated_args=Depends(get_paginated_args), + db: Session = Depends(dependencies.get_db), ): orm_environments = auth.filter_environments( entity, api.list_environments( - conda_store.db, + db, search=search, namespace=namespace, name=name, @@ -412,12 +416,12 @@ def api_list_environments( "/environment/{namespace}/{environment_name}/", response_model=schema.APIGetEnvironment, ) -def api_get_environment( +async def api_get_environment( namespace: str, environment_name: str, request: Request, - conda_store=Depends(dependencies.get_conda_store), auth=Depends(dependencies.get_auth), + db: Session = Depends(dependencies.get_db), ): auth.authorize_request( request, @@ -426,9 +430,7 @@ def api_get_environment( require=True, ) - environment = api.get_environment( - conda_store.db, namespace=namespace, name=environment_name - ) + environment = api.get_environment(db, namespace=namespace, name=environment_name) if environment is None: raise HTTPException(status_code=404, detail="environment does not exist") @@ -444,12 +446,13 @@ def api_get_environment( "/environment/{namespace}/{name}/", response_model=schema.APIAckResponse, ) -def api_update_environment_build( +async def api_update_environment_build( namespace: str, name: str, request: Request, conda_store=Depends(dependencies.get_conda_store), auth=Depends(dependencies.get_auth), + db: Session = Depends(dependencies.get_db), build_id: int = Body(None, embed=True), description: str = Body(None, embed=True), ): @@ -459,10 +462,10 @@ def api_update_environment_build( try: if build_id is not None: - conda_store.update_environment_build(namespace, name, build_id) + conda_store.update_environment_build(db, namespace, name, build_id) if description is not None: - conda_store.update_environment_description(namespace, name, description) + conda_store.update_environment_description(db, namespace, name, description) except utils.CondaStoreError as e: raise HTTPException(status_code=400, detail=e.message) @@ -474,11 +477,12 @@ def api_update_environment_build( "/environment/{namespace}/{name}/", response_model=schema.APIAckResponse, ) -def api_delete_environment( +async def api_delete_environment( namespace: str, name: str, request: Request, conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), auth=Depends(dependencies.get_auth), ): auth.authorize_request( @@ -486,7 +490,7 @@ def api_delete_environment( ) try: - conda_store.delete_environment(namespace, name) + conda_store.delete_environment(db, namespace, name) except utils.CondaStoreError as e: raise HTTPException(status_code=400, detail=e.message) @@ -496,7 +500,7 @@ def api_delete_environment( @router_api.get( "/specification/", ) -def api_get_specification( +async def api_get_specification( request: Request, channel: List[str] = Query([]), conda: List[str] = Query([]), @@ -505,6 +509,7 @@ def api_get_specification( schema.APIGetSpecificationFormat.YAML ), conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), auth=Depends(dependencies.get_auth), entity=Depends(dependencies.get_entity), ): @@ -520,12 +525,12 @@ def api_get_specification( ) try: - task, solve_id = api.post_solve(conda_store, specification) + task, solve_id = conda_store.register_solve(db, specification) task.wait() except ValueError as e: raise HTTPException(status_code=400, detail=str(e.args[0])) - solve = api.get_solve(conda_store.db, solve_id) + solve = api.get_solve(db, solve_id) return {"solve": solve.packages} @@ -534,9 +539,10 @@ def api_get_specification( "/specification/", response_model=schema.APIPostSpecification, ) -def api_post_specification( +async def api_post_specification( request: Request, conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), auth=Depends(dependencies.get_auth), entity=Depends(dependencies.get_entity), specification: str = Body(""), @@ -549,7 +555,7 @@ def api_post_specification( ) namespace_name = namespace or default_namespace - namespace = api.get_namespace(conda_store.db, namespace_name) + namespace = api.get_namespace(db, namespace_name) if namespace is None: permissions.add(Permissions.NAMESPACE_CREATE) @@ -569,7 +575,9 @@ def api_post_specification( ) try: - build_id = api.post_specification(conda_store, specification, namespace_name) + build_id = conda_store.register_environment( + db, specification, namespace_name, force=True + ) except ValueError as e: raise HTTPException(status_code=400, detail=str(e.args[0])) except utils.CondaStoreError as e: @@ -579,7 +587,7 @@ def api_post_specification( @router_api.get("/build/", response_model=schema.APIListBuild) -def api_list_builds( +async def api_list_builds( status: Optional[schema.BuildStatus] = None, packages: Optional[List[str]] = Query([]), artifact: Optional[schema.BuildArtifactType] = None, @@ -587,6 +595,7 @@ def api_list_builds( name: Optional[str] = None, namespace: Optional[str] = None, conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), auth=Depends(dependencies.get_auth), entity=Depends(dependencies.get_entity), paginated_args=Depends(get_paginated_args), @@ -594,7 +603,7 @@ def api_list_builds( orm_builds = auth.filter_builds( entity, api.list_builds( - conda_store.db, + db, status=status, packages=packages, artifact=artifact, @@ -620,13 +629,13 @@ def api_list_builds( @router_api.get("/build/{build_id}/", response_model=schema.APIGetBuild) -def api_get_build( +async def api_get_build( build_id: int, request: Request, - conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), auth=Depends(dependencies.get_auth), ): - build = api.get_build(conda_store.db, build_id) + build = api.get_build(db, build_id) if build is None: raise HTTPException(status_code=404, detail="build id does not exist") @@ -647,13 +656,14 @@ def api_get_build( "/build/{build_id}/", response_model=schema.APIPostSpecification, ) -def api_put_build( +async def api_put_build( build_id: int, request: Request, conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), auth=Depends(dependencies.get_auth), ): - build = api.get_build(conda_store.db, build_id) + build = api.get_build(db, build_id) if build is None: raise HTTPException(status_code=404, detail="build id does not exist") @@ -666,7 +676,7 @@ def api_put_build( try: new_build = conda_store.create_build( - build.environment_id, build.specification.sha256 + db, build.environment_id, build.specification.sha256 ) except utils.CondaStoreError as e: raise HTTPException(status_code=400, detail=e.message) @@ -682,13 +692,14 @@ def api_put_build( "/build/{build_id}/", response_model=schema.APIAckResponse, ) -def api_delete_build( +async def api_delete_build( build_id: int, request: Request, conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), auth=Depends(dependencies.get_auth), ): - build = api.get_build(conda_store.db, build_id) + build = api.get_build(db, build_id) if build is None: raise HTTPException(status_code=404, detail="build id does not exist") @@ -700,7 +711,7 @@ def api_delete_build( ) try: - conda_store.delete_build(build_id) + conda_store.delete_build(db, build_id) except utils.CondaStoreError as e: raise HTTPException(status_code=400, detail=e.message) @@ -711,17 +722,17 @@ def api_delete_build( "/build/{build_id}/packages/", response_model=schema.APIListCondaPackage, ) -def api_get_build_packages( +async def api_get_build_packages( build_id: int, request: Request, search: Optional[str] = None, exact: Optional[str] = None, build: Optional[str] = None, - conda_store=Depends(dependencies.get_conda_store), auth=Depends(dependencies.get_auth), + db: Session = Depends(dependencies.get_db), paginated_args=Depends(get_paginated_args), ): - build_orm = api.get_build(conda_store.db, build_id) + build_orm = api.get_build(db, build_id) if build_orm is None: raise HTTPException(status_code=404, detail="build id does not exist") @@ -732,7 +743,7 @@ def api_get_build_packages( require=True, ) orm_packages = api.get_build_packages( - conda_store.db, build_orm.id, search=search, exact=exact, build=build + db, build_orm.id, search=search, exact=exact, build=build ) return paginated_api_response( orm_packages, @@ -748,13 +759,14 @@ def api_get_build_packages( @router_api.get("/build/{build_id}/logs/") -def api_get_build_logs( +async def api_get_build_logs( build_id: int, request: Request, conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), auth=Depends(dependencies.get_auth), ): - build = api.get_build(conda_store.db, build_id) + build = api.get_build(db, build_id) if build is None: raise HTTPException(status_code=404, detail="build id does not exist") @@ -772,11 +784,11 @@ def api_get_build_logs( "/channel/", response_model=schema.APIListCondaChannel, ) -def api_list_channels( - conda_store=Depends(dependencies.get_conda_store), +async def api_list_channels( + db: Session = Depends(dependencies.get_db), paginated_args=Depends(get_paginated_args), ): - orm_channels = api.list_conda_channels(conda_store.db) + orm_channels = api.list_conda_channels(db) return paginated_api_response( orm_channels, paginated_args, @@ -790,17 +802,15 @@ def api_list_channels( "/package/", response_model=schema.APIListCondaPackage, ) -def api_list_packages( +async def api_list_packages( search: Optional[str] = None, exact: Optional[str] = None, build: Optional[str] = None, paginated_args=Depends(get_paginated_args), - conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), distinct_on: List[str] = Query([]), ): - orm_packages = api.list_conda_packages( - conda_store.db, search=search, exact=exact, build=build - ) + orm_packages = api.list_conda_packages(db, search=search, exact=exact, build=build) required_sort_bys, distinct_orm_packages = filter_distinct_on( orm_packages, distinct_on=distinct_on, @@ -826,13 +836,14 @@ def api_list_packages( @router_api.get("/build/{build_id}/yaml/") -def api_get_build_yaml( +async def api_get_build_yaml( build_id: int, request: Request, conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), auth=Depends(dependencies.get_auth), ): - build = api.get_build(conda_store.db, build_id) + build = api.get_build(db, build_id) if build is None: raise HTTPException(status_code=404, detail="build id does not exist") @@ -846,13 +857,14 @@ def api_get_build_yaml( @router_api.get("/build/{build_id}/lockfile/", response_class=PlainTextResponse) -def api_get_build_lockfile( +async def api_get_build_lockfile( build_id: int, request: Request, conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), auth=Depends(dependencies.get_auth), ): - build = api.get_build(conda_store.db, build_id) + build = api.get_build(db, build_id) auth.authorize_request( request, f"{build.environment.namespace.name}/{build.environment.name}", @@ -864,13 +876,14 @@ def api_get_build_lockfile( @router_api.get("/build/{build_id}/archive/") -def api_get_build_archive( +async def api_get_build_archive( build_id: int, request: Request, conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), auth=Depends(dependencies.get_auth), ): - build = api.get_build(conda_store.db, build_id) + build = api.get_build(db, build_id) auth.authorize_request( request, f"{build.environment.namespace.name}/{build.environment.name}", @@ -882,14 +895,14 @@ def api_get_build_archive( @router_api.get("/build/{build_id}/docker/") -def api_get_build_docker_image_url( +async def api_get_build_docker_image_url( build_id: int, request: Request, - conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), server=Depends(dependencies.get_server), auth=Depends(dependencies.get_auth), ): - build = api.get_build(conda_store.db, build_id) + build = api.get_build(db, build_id) auth.authorize_request( request, f"{build.environment.namespace.name}/{build.environment.name}", @@ -920,9 +933,10 @@ def api_get_build_docker_image_url( "/setting/{namespace}/{environment_name}/", response_model=schema.APIGetSetting, ) -def api_get_settings( +async def api_get_settings( request: Request, conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), auth=Depends(dependencies.get_auth), namespace: str = None, environment_name: str = None, @@ -943,7 +957,7 @@ def api_get_settings( return { "status": "ok", - "data": conda_store.get_settings(namespace, environment_name).dict(), + "data": conda_store.get_settings(db, namespace, environment_name).dict(), "message": None, } @@ -960,10 +974,11 @@ def api_get_settings( "/setting/{namespace}/{environment_name}/", response_model=schema.APIPutSetting, ) -def api_put_settings( +async def api_put_settings( request: Request, data: Dict[str, Any], conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), auth=Depends(dependencies.get_auth), namespace: str = None, environment_name: str = None, @@ -983,7 +998,7 @@ def api_put_settings( ) try: - conda_store.set_settings(namespace, environment_name, data) + conda_store.set_settings(db, namespace, environment_name, data) except ValueError as e: raise HTTPException(status_code=400, detail=str(e.args[0])) diff --git a/conda-store-server/conda_store_server/server/views/conda_store_ui.py b/conda-store-server/conda_store_server/server/views/conda_store_ui.py index bf264ad9c..1af0d3276 100644 --- a/conda-store-server/conda_store_server/server/views/conda_store_ui.py +++ b/conda-store-server/conda_store_server/server/views/conda_store_ui.py @@ -6,7 +6,7 @@ @router_conda_store_ui.get("/") -def get_conda_store_ui( +async def get_conda_store_ui( request: Request, templates=Depends(dependencies.get_templates), ): diff --git a/conda-store-server/conda_store_server/server/views/metrics.py b/conda-store-server/conda_store_server/server/views/metrics.py index 99dc7b69c..d8c786108 100644 --- a/conda-store-server/conda_store_server/server/views/metrics.py +++ b/conda-store-server/conda_store_server/server/views/metrics.py @@ -1,5 +1,6 @@ from fastapi import APIRouter, Depends from fastapi.responses import PlainTextResponse +from sqlalchemy.orm import Session from conda_store_server import api from conda_store_server.server import dependencies @@ -9,15 +10,16 @@ @router_metrics.get("/metrics", response_class=PlainTextResponse) -def prometheus_metrics( +async def prometheus_metrics( conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), ): - metrics = api.get_metrics(conda_store.db) + metrics = api.get_metrics(db) return "\n".join(f"conda_store_{key} {value}" for key, value in metrics.items()) @router_metrics.get("/celery") -def trigger_task(conda_store=Depends(dependencies.get_conda_store)): +async def trigger_task(conda_store=Depends(dependencies.get_conda_store)): conda_store.celery_app def get_celery_worker_status(app): diff --git a/conda-store-server/conda_store_server/server/views/ui.py b/conda-store-server/conda_store_server/server/views/ui.py index 5c4ad341d..8d66035b6 100644 --- a/conda-store-server/conda_store_server/server/views/ui.py +++ b/conda-store-server/conda_store_server/server/views/ui.py @@ -2,6 +2,7 @@ from fastapi import APIRouter, Request, Depends from fastapi.responses import RedirectResponse +from sqlalchemy.orm import Session import yaml from conda_store_server import api @@ -12,15 +13,16 @@ @router_ui.get("/create/") -def ui_create_get_environment( +async def ui_create_get_environment( request: Request, templates=Depends(dependencies.get_templates), conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), auth=Depends(dependencies.get_auth), entity=Depends(dependencies.get_entity), ): orm_namespaces = auth.filter_namespaces( - entity, api.list_namespaces(conda_store.db, show_soft_deleted=False) + entity, api.list_namespaces(db, show_soft_deleted=False) ) default_namespace = ( @@ -43,18 +45,18 @@ def sort_namespace(n): @router_ui.get("/") -def ui_list_environments( +async def ui_list_environments( request: Request, search: Optional[str] = None, templates=Depends(dependencies.get_templates), - conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), auth=Depends(dependencies.get_auth), server=Depends(dependencies.get_server), entity=Depends(dependencies.get_entity), ): orm_environments = auth.filter_environments( entity, - api.list_environments(conda_store.db, search=search, show_soft_deleted=False), + api.list_environments(db, search=search, show_soft_deleted=False), ) context = { @@ -68,15 +70,15 @@ def ui_list_environments( @router_ui.get("/namespace/") -def ui_list_namespaces( +async def ui_list_namespaces( request: Request, templates=Depends(dependencies.get_templates), - conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), auth=Depends(dependencies.get_auth), entity=Depends(dependencies.get_entity), ): orm_namespaces = auth.filter_namespaces( - entity, api.list_namespaces(conda_store.db, show_soft_deleted=False) + entity, api.list_namespaces(db, show_soft_deleted=False) ) context = { @@ -89,12 +91,12 @@ def ui_list_namespaces( @router_ui.get("/environment/{namespace}/{environment_name}/") -def ui_get_environment( +async def ui_get_environment( namespace: str, environment_name: str, request: Request, templates=Depends(dependencies.get_templates), - conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), auth=Depends(dependencies.get_auth), entity=Depends(dependencies.get_entity), ): @@ -105,9 +107,7 @@ def ui_get_environment( require=True, ) - environment = api.get_environment( - conda_store.db, namespace=namespace, name=environment_name - ) + environment = api.get_environment(db, namespace=namespace, name=environment_name) if environment is None: return templates.TemplateResponse( "404.html", @@ -129,12 +129,12 @@ def ui_get_environment( @router_ui.get("/environment/{namespace}/{environment_name}/edit/") -def ui_edit_environment( +async def ui_edit_environment( namespace: str, environment_name: str, request: Request, templates=Depends(dependencies.get_templates), - conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), auth=Depends(dependencies.get_auth), entity=Depends(dependencies.get_entity), ): @@ -145,9 +145,7 @@ def ui_edit_environment( require=True, ) - environment = api.get_environment( - conda_store.db, namespace=namespace, name=environment_name - ) + environment = api.get_environment(db, namespace=namespace, name=environment_name) if environment is None: return templates.TemplateResponse( "404.html", @@ -170,16 +168,16 @@ def ui_edit_environment( @router_ui.get("/build/{build_id}/") -def ui_get_build( +async def ui_get_build( build_id: int, request: Request, templates=Depends(dependencies.get_templates), - conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), auth=Depends(dependencies.get_auth), server=Depends(dependencies.get_server), entity=Depends(dependencies.get_entity), ): - build = api.get_build(conda_store.db, build_id) + build = api.get_build(db, build_id) if build is None: return templates.TemplateResponse( "404.html", @@ -209,10 +207,10 @@ def ui_get_build( @router_ui.get("/user/") -def ui_get_user( +async def ui_get_user( request: Request, templates=Depends(dependencies.get_templates), - conda_store=Depends(dependencies.get_conda_store), + db: Session = Depends(dependencies.get_db), auth=Depends(dependencies.get_auth), entity=Depends(dependencies.get_entity), ): @@ -224,13 +222,13 @@ def ui_get_user( ) orm_namespaces = auth.filter_namespaces( - entity, api.list_namespaces(conda_store.db, show_soft_deleted=False) + entity, api.list_namespaces(db, show_soft_deleted=False) ) - system_metrics = api.get_system_metrics(conda_store.db) + system_metrics = api.get_system_metrics(db) namespace_usage_metrics = auth.filter_namespaces( - entity, api.get_namespace_metrics(conda_store.db) + entity, api.get_namespace_metrics(db) ) context = { @@ -247,10 +245,11 @@ def ui_get_user( @router_ui.get("/setting/") @router_ui.get("/setting/{namespace}/") @router_ui.get("/setting/{namespace}/{environment_name}/") -def ui_get_setting( +async def ui_get_setting( request: Request, templates=Depends(dependencies.get_templates), auth=Depends(dependencies.get_auth), + db: Session = Depends(dependencies.get_db), conda_store=Depends(dependencies.get_conda_store), namespace: str = None, environment_name: str = None, @@ -281,7 +280,7 @@ def ui_get_setting( "environment_name": environment_name, "api_settings_url": api_setting_url, "settings": conda_store.get_settings( - namespace=namespace, environment_name=environment_name + db, namespace=namespace, environment_name=environment_name ), } return templates.TemplateResponse("setting.html", context) diff --git a/conda-store-server/conda_store_server/testing.py b/conda-store-server/conda_store_server/testing.py index 5711dbd11..c70101fbd 100644 --- a/conda-store-server/conda_store_server/testing.py +++ b/conda-store-server/conda_store_server/testing.py @@ -2,48 +2,51 @@ import json import uuid +from sqlalchemy.orm import Session + from conda_store_server import schema, api, orm, conda_utils def seed_conda_store( + db: Session, conda_store, config: typing.Dict[str, typing.Dict[str, schema.CondaSpecification]] = {}, ): for namespace_name in config: - namespace = api.ensure_namespace(conda_store.db, name=namespace_name) + namespace = api.ensure_namespace(db, name=namespace_name) for environment_name, specification in config[namespace_name].items(): environment = api.ensure_environment( - conda_store.db, + db, name=specification.name, namespace_id=namespace.id, ) - specification = api.ensure_specification(conda_store.db, specification) - build = api.create_build(conda_store.db, environment.id, specification.id) - conda_store.db.commit() + specification = api.ensure_specification(db, specification) + build = api.create_build(db, environment.id, specification.id) + db.commit() environment.current_build_id = build.id - conda_store.db.commit() + db.commit() - _create_build_artifacts(conda_store, build) - _create_build_packages(conda_store, build) + _create_build_artifacts(db, conda_store, build) + _create_build_packages(db, conda_store, build) - api.create_solve(conda_store.db, specification.id) - conda_store.db.commit() + api.create_solve(db, specification.id) + db.commit() -def _create_build_packages(conda_store, build: orm.Build): +def _create_build_packages(db: Session, conda_store, build: orm.Build): channel_name = conda_utils.normalize_channel_name( conda_store.conda_channel_alias, "conda-forge" ) - channel = api.ensure_conda_channel(conda_store.db, channel_name) + channel = api.ensure_conda_channel(db, channel_name) conda_package = orm.CondaPackage( name=f"madeup-{uuid.uuid4()}", version="1.2.3", channel_id=channel.id, ) - conda_store.db.add(conda_package) - conda_store.db.commit() + db.add(conda_package) + db.commit() conda_package_build = orm.CondaPackageBuild( package_id=conda_package.id, @@ -57,16 +60,16 @@ def _create_build_packages(conda_store, build: orm.Build): subdir="noarch", timestamp=12345667, ) - conda_store.db.add(conda_package_build) - conda_store.db.commit() + db.add(conda_package_build) + db.commit() build.package_builds.append(conda_package_build) - conda_store.db.commit() + db.commit() -def _create_build_artifacts(conda_store, build: orm.Build): +def _create_build_artifacts(db: Session, conda_store, build: orm.Build): conda_store.storage.set( - conda_store.db, + db, build.id, build.log_key, b"fake logs", @@ -79,15 +82,15 @@ def _create_build_artifacts(conda_store, build: orm.Build): artifact_type=schema.BuildArtifactType.DIRECTORY, key=str(build.build_path(conda_store)), ) - conda_store.db.add(directory_build_artifact) + db.add(directory_build_artifact) lockfile_build_artifact = orm.BuildArtifact( build_id=build.id, artifact_type=schema.BuildArtifactType.LOCKFILE, key="" ) - conda_store.db.add(lockfile_build_artifact) + db.add(lockfile_build_artifact) conda_store.storage.set( - conda_store.db, + db, build.id, build.conda_env_export_key, json.dumps( @@ -98,7 +101,7 @@ def _create_build_artifacts(conda_store, build: orm.Build): ) conda_store.storage.set( - conda_store.db, + db, build.id, build.conda_pack_key, b"testing-conda-package", diff --git a/conda-store-server/conda_store_server/worker/tasks.py b/conda-store-server/conda_store_server/worker/tasks.py index b165f75a4..a38b83c86 100644 --- a/conda-store-server/conda_store_server/worker/tasks.py +++ b/conda-store-server/conda_store_server/worker/tasks.py @@ -15,6 +15,7 @@ solve_conda_environment, ) +from sqlalchemy.orm import Session from celery.execute import send_task from filelock import FileLock @@ -28,10 +29,6 @@ def at_start(sender, **k): class WorkerTask(Task): - def after_return(self, *args, **kwargs): - if hasattr(self, "_worker"): - self._worker.conda_store.session_factory.remove() - @property def worker(self): if not hasattr(self, "_worker"): @@ -52,28 +49,32 @@ def worker(self): @shared_task(base=WorkerTask, name="task_watch_paths", bind=True) def task_watch_paths(self): conda_store = self.worker.conda_store - settings = conda_store.get_settings() - conda_store.configuration.update_storage_metrics( - conda_store.db, conda_store.store_directory - ) + with conda_store.session_factory() as db: + settings = conda_store.get_settings(db) + + conda_store.configuration(db).update_storage_metrics( + db, conda_store.store_directory + ) - environment_paths = environment.discover_environments(self.worker.watch_paths) - for path in environment_paths: - with open(path) as f: - conda_store.register_environment( - specification=yaml.safe_load(f), - namespace=settings.filesystem_namespace, - force=False, - ) + environment_paths = environment.discover_environments(self.worker.watch_paths) + for path in environment_paths: + with open(path) as f: + conda_store.register_environment( + db, + specification=yaml.safe_load(f), + namespace=settings.filesystem_namespace, + force=False, + ) @shared_task(base=WorkerTask, name="task_update_storage_metrics", bind=True) def task_update_storage_metrics(self): conda_store = self.worker.conda_store - conda_store.configuration.update_storage_metrics( - conda_store.db, conda_store.store_directory - ) + with conda_store.session_factory() as db: + conda_store.configuration(db).update_storage_metrics( + db, conda_store.store_directory + ) """ @@ -98,118 +99,126 @@ def task_update_storage_metrics(self): @shared_task(base=WorkerTask, name="task_update_conda_channels", bind=True) def task_update_conda_channels(self): conda_store = self.worker.conda_store - - conda_store.ensure_conda_channels() - for channel in api.list_conda_channels(conda_store.db): - send_task("task_update_conda_channel", args=[channel.name], kwargs={}) + with conda_store.session_factory() as db: + conda_store.ensure_conda_channels(db) + for channel in api.list_conda_channels(db): + send_task("task_update_conda_channel", args=[channel.name], kwargs={}) @shared_task(base=WorkerTask, name="task_update_conda_channel", bind=True) def task_update_conda_channel(self, channel_name): - conda_store = self.worker.conda_store - settings = conda_store.get_settings() - - # sanitize the channel name as it's an URL, and it's used for the lock. - sanitizing = { - "https": "", - "http": "", - ":": "", - "/": "_", - "?": "", - "&": "_", - "=": "_", - } - channel_name_sanitized = channel_name - for k, v in sanitizing.items(): - channel_name_sanitized = channel_name_sanitized.replace(k, v) - - task_key = f"lock_{self.name}_{channel_name_sanitized}" - - is_locked = False - - if conda_store.redis_url is not None: - lock = conda_store.redis.lock(task_key, timeout=60 * 15) # timeout 15min - else: - lockfile_path = os.path.join(f"/tmp/task_lock_{task_key}") - lock = FileLock(lockfile_path, timeout=60 * 15) + with conda_store.session_factory() as db: + settings = conda_store.get_settings(db) + + # sanitize the channel name as it's an URL, and it's used for the lock. + sanitizing = { + "https": "", + "http": "", + ":": "", + "/": "_", + "?": "", + "&": "_", + "=": "_", + } + channel_name_sanitized = channel_name + for k, v in sanitizing.items(): + channel_name_sanitized = channel_name_sanitized.replace(k, v) + + task_key = f"lock_{self.name}_{channel_name_sanitized}" + + is_locked = False + + if conda_store.redis_url is not None: + lock = conda_store.redis.lock(task_key, timeout=60 * 15) # timeout 15min + else: + lockfile_path = os.path.join(f"/tmp/task_lock_{task_key}") + lock = FileLock(lockfile_path, timeout=60 * 15) - try: - is_locked = lock.acquire(blocking=False) + try: + is_locked = lock.acquire(blocking=False) - if is_locked: - channel = api.get_conda_channel(conda_store.db, channel_name) + if is_locked: + channel = api.get_conda_channel(db, channel_name) - conda_store.log.debug(f"updating packages for channel {channel.name}") - channel.update_packages(conda_store.db, subdirs=settings.conda_platforms) + conda_store.log.debug(f"updating packages for channel {channel.name}") + channel.update_packages(db, subdirs=settings.conda_platforms) - else: - conda_store.log.debug( - f"skipping updating packages for channel {channel_name} - already in progress" - ) + else: + conda_store.log.debug( + f"skipping updating packages for channel {channel_name} - already in progress" + ) - except TimeoutError: - if conda_store.redis_url is None: - conda_store.log.warning( - f"Timeout when acquiring lock with key {task_key} - We assume the task is already being run" - ) - is_locked = False + except TimeoutError: + if conda_store.redis_url is None: + conda_store.log.warning( + f"Timeout when acquiring lock with key {task_key} - We assume the task is already being run" + ) + is_locked = False - finally: - if is_locked: - lock.release() + finally: + if is_locked: + lock.release() @shared_task(base=WorkerTask, name="task_solve_conda_environment", bind=True) def task_solve_conda_environment(self, solve_id): conda_store = self.worker.conda_store - solve = api.get_solve(conda_store.db, solve_id) - solve_conda_environment(conda_store, solve) + + with conda_store.session_factory() as db: + solve = api.get_solve(db, solve_id) + solve_conda_environment(db, conda_store, solve) @shared_task(base=WorkerTask, name="task_build_conda_environment", bind=True) def task_build_conda_environment(self, build_id): conda_store = self.worker.conda_store - build = api.get_build(conda_store.db, build_id) - build_conda_environment(conda_store, build) + + with conda_store.session_factory() as db: + build = api.get_build(db, build_id) + build_conda_environment(db, conda_store, build) @shared_task(base=WorkerTask, name="task_build_conda_env_export", bind=True) def task_build_conda_env_export(self, build_id): conda_store = self.worker.conda_store - build = api.get_build(conda_store.db, build_id) - build_conda_env_export(conda_store, build) + with conda_store.session_factory() as db: + build = api.get_build(db, build_id) + build_conda_env_export(db, conda_store, build) @shared_task(base=WorkerTask, name="task_build_conda_pack", bind=True) def task_build_conda_pack(self, build_id): conda_store = self.worker.conda_store - build = api.get_build(conda_store.db, build_id) - build_conda_pack(conda_store, build) + with conda_store.session_factory() as db: + build = api.get_build(db, build_id) + build_conda_pack(db, conda_store, build) @shared_task(base=WorkerTask, name="task_build_conda_docker", bind=True) def task_build_conda_docker(self, build_id): conda_store = self.worker.conda_store - build = api.get_build(conda_store.db, build_id) - build_conda_docker(conda_store, build) + with conda_store.session_factory() as db: + build = api.get_build(db, build_id) + build_conda_docker(db, conda_store, build) @shared_task(base=WorkerTask, name="task_update_environment_build", bind=True) def task_update_environment_build(self, environment_id): conda_store = self.worker.conda_store - environment = api.get_environment(conda_store.db, id=environment_id) + with conda_store.session_factory() as db: + environment = api.get_environment(db, id=environment_id) - conda_prefix = environment.current_build.build_path(conda_store) - environment_prefix = environment.current_build.environment_path(conda_store) + conda_prefix = environment.current_build.build_path(conda_store) + environment_prefix = environment.current_build.environment_path(conda_store) - utils.symlink(conda_prefix, environment_prefix) + utils.symlink(conda_prefix, environment_prefix) - if conda_store.post_update_environment_build_hook: - conda_store.post_update_environment_build_hook(conda_store, environment) + if conda_store.post_update_environment_build_hook: + conda_store.post_update_environment_build_hook(conda_store, environment) -def delete_build_artifact(conda_store, build_artifact): +def delete_build_artifact(db: Session, conda_store, build_artifact): if build_artifact.artifact_type == schema.BuildArtifactType.DIRECTORY: # ignore key conda_prefix = build_artifact.build.build_path(conda_store) @@ -218,7 +227,7 @@ def delete_build_artifact(conda_store, build_artifact): conda_prefix ): shutil.rmtree(conda_prefix) - conda_store.db.delete(build_artifact) + db.delete(build_artifact) elif build_artifact.artifact_type == schema.BuildArtifactType.CONTAINER_REGISTRY: pass # # container registry tag deletion is not generally implemented @@ -226,58 +235,59 @@ def delete_build_artifact(conda_store, build_artifact): # conda_store.container_registry.delete_image(build_artifact.key) else: conda_store.log.info(f"deleting {build_artifact.key}") - conda_store.storage.delete( - conda_store.db, build_artifact.build.id, build_artifact.key - ) + conda_store.storage.delete(db, build_artifact.build.id, build_artifact.key) @shared_task(base=WorkerTask, name="task_delete_build", bind=True) def task_delete_build(self, build_id): conda_store = self.worker.conda_store - settings = conda_store.get_settings() + with conda_store.session_factory() as db: + settings = conda_store.get_settings(db) - build = api.get_build(conda_store.db, build_id) + build = api.get_build(db, build_id) - conda_store.log.info(f"deleting artifacts for build={build.id}") - for build_artifact in api.list_build_artifacts( - conda_store.db, - build_id=build_id, - excluded_artifact_types=settings.build_artifacts_kept_on_deletion, - ).all(): - delete_build_artifact(conda_store, build_artifact) - conda_store.db.commit() + conda_store.log.info(f"deleting artifacts for build={build.id}") + for build_artifact in api.list_build_artifacts( + db, + build_id=build_id, + excluded_artifact_types=settings.build_artifacts_kept_on_deletion, + ).all(): + delete_build_artifact(db, conda_store, build_artifact) + conda_store.db.commit() @shared_task(base=WorkerTask, name="task_delete_environment", bind=True) def task_delete_environment(self, environment_id): conda_store = self.worker.conda_store - environment = api.get_environment(conda_store.db, id=environment_id) + with conda_store.session_factory() as db: + environment = api.get_environment(db, id=environment_id) - for build in environment.builds: - conda_store.log.info(f"deleting artifacts for build={build.id}") - for build_artifact in api.list_build_artifacts( - conda_store.db, - build_id=build.id, - ).all(): - delete_build_artifact(conda_store, build_artifact) + for build in environment.builds: + conda_store.log.info(f"deleting artifacts for build={build.id}") + for build_artifact in api.list_build_artifacts( + db, + build_id=build.id, + ).all(): + delete_build_artifact(db, conda_store, build_artifact) - conda_store.db.delete(environment) - conda_store.db.commit() + db.delete(environment) + db.commit() @shared_task(base=WorkerTask, name="task_delete_namespace", bind=True) def task_delete_namespace(self, namespace_id): conda_store = self.worker.conda_store - namespace = api.get_namespace(conda_store.db, id=namespace_id) - - for environment_orm in namespace.environments: - for build in environment_orm.builds: - conda_store.log.info(f"deleting artifacts for build={build.id}") - for build_artifact in api.list_build_artifacts( - conda_store.db, - build_id=build.id, - ).all(): - delete_build_artifact(conda_store, build_artifact) - conda_store.db.delete(environment_orm) - conda_store.db.delete(namespace) - conda_store.db.commit() + with conda_store.session_factory() as db: + namespace = api.get_namespace(db, id=namespace_id) + + for environment_orm in namespace.environments: + for build in environment_orm.builds: + conda_store.log.info(f"deleting artifacts for build={build.id}") + for build_artifact in api.list_build_artifacts( + db, + build_id=build.id, + ).all(): + delete_build_artifact(db, conda_store, build_artifact) + db.delete(environment_orm) + db.delete(namespace) + db.commit() diff --git a/conda-store-server/environment-dev.yaml b/conda-store-server/environment-dev.yaml index 0d481e42f..a375d6a4a 100644 --- a/conda-store-server/environment-dev.yaml +++ b/conda-store-server/environment-dev.yaml @@ -5,7 +5,7 @@ channels: dependencies: - python ==3.10 # conda builds - - conda + - conda ==23.5.2 - conda-docker >= 0.1.2 - conda-pack - conda-lock >=1.0.5 diff --git a/conda-store-server/environment.yaml b/conda-store-server/environment.yaml index d557ca0ed..a289a574b 100644 --- a/conda-store-server/environment.yaml +++ b/conda-store-server/environment.yaml @@ -4,7 +4,7 @@ channels: dependencies: - python ==3.10 # conda environment builds - - conda + - conda ==23.5.2 - conda-docker >= 0.1.2 - conda-pack - conda-lock >=1.0.5 diff --git a/conda-store-server/tests/conftest.py b/conda-store-server/tests/conftest.py index a0e0a37ce..dac4c4d55 100644 --- a/conda-store-server/tests/conftest.py +++ b/conda-store-server/tests/conftest.py @@ -31,26 +31,28 @@ def conda_store_config(tmp_path): def conda_store_server(conda_store_config): _conda_store_server = server_app.CondaStoreServer(config=conda_store_config) _conda_store_server.initialize() + _conda_store = _conda_store_server.conda_store pathlib.Path(_conda_store.store_directory).mkdir(exist_ok=True) dbutil.upgrade(_conda_store.database_url) - _conda_store.ensure_settings() - _conda_store.celery_app + with _conda_store.session_factory() as db: + _conda_store.ensure_settings(db) + _conda_store.configuration(db).update_storage_metrics( + db, _conda_store.store_directory + ) - # must import tasks after a celery app has been initialized - import conda_store_server.worker.tasks # noqa + _conda_store.celery_app - # ensure that models are created - from celery.backends.database.session import ResultModelBase + # must import tasks after a celery app has been initialized + import conda_store_server.worker.tasks # noqa - ResultModelBase.metadata.create_all(_conda_store.db.get_bind()) + # ensure that models are created + from celery.backends.database.session import ResultModelBase - _conda_store.configuration.update_storage_metrics( - _conda_store.db, _conda_store.store_directory - ) + ResultModelBase.metadata.create_all(db.get_bind()) yield _conda_store_server @@ -69,8 +71,9 @@ def authenticate(testclient): @pytest.fixture -def seed_conda_store(conda_store): +def seed_conda_store(db, conda_store): testing.seed_conda_store( + db, conda_store, { "default": { @@ -103,11 +106,11 @@ def seed_conda_store(conda_store): ) # for testing purposes make build 4 complete - build = api.get_build(conda_store.db, build_id=4) + build = api.get_build(db, build_id=4) build.started_on = datetime.datetime.utcnow() build.ended_on = datetime.datetime.utcnow() build.status = schema.BuildStatus.COMPLETED - conda_store.db.commit() + db.commit() @pytest.fixture @@ -118,24 +121,31 @@ def conda_store(conda_store_config): dbutil.upgrade(_conda_store.database_url) - _conda_store.ensure_settings() - _conda_store.celery_app + with _conda_store.session_factory() as db: + _conda_store.ensure_settings(db) + _conda_store.configuration(db).update_storage_metrics( + db, _conda_store.store_directory + ) - # must import tasks after a celery app has been initialized - import conda_store_server.worker.tasks # noqa + _conda_store.celery_app - # ensure that models are created - from celery.backends.database.session import ResultModelBase + # must import tasks after a celery app has been initialized + import conda_store_server.worker.tasks # noqa - ResultModelBase.metadata.create_all(_conda_store.db.get_bind()) + # ensure that models are created + from celery.backends.database.session import ResultModelBase - _conda_store.configuration.update_storage_metrics( - _conda_store.db, _conda_store.store_directory - ) + ResultModelBase.metadata.create_all(db.get_bind()) yield _conda_store +@pytest.fixture +def db(conda_store): + with conda_store.session_factory() as _db: + yield _db + + @pytest.fixture def simple_specification(): yield schema.CondaSpecification( diff --git a/conda-store-server/tests/test_actions.py b/conda-store-server/tests/test_actions.py index bc3317dfe..0ac91dc02 100644 --- a/conda-store-server/tests/test_actions.py +++ b/conda-store-server/tests/test_actions.py @@ -175,31 +175,33 @@ def test_get_conda_prefix_stats(tmp_path, conda_store, simple_conda_lock): assert context.result["disk_usage"] > 0 -def test_add_conda_prefix_packages(conda_store, simple_specification, current_prefix): +def test_add_conda_prefix_packages( + db, conda_store, simple_specification, current_prefix +): build_id = conda_store.register_environment( - specification=simple_specification, namespace="pytest" + db, specification=simple_specification, namespace="pytest" ) action.action_add_conda_prefix_packages( - db=conda_store.db, + db=db, conda_prefix=current_prefix, build_id=build_id, ) - build = api.get_build(conda_store.db, build_id=build_id) + build = api.get_build(db, build_id=build_id) assert len(build.package_builds) > 0 def test_add_lockfile_packages( - conda_store, simple_specification, simple_conda_lock, current_prefix + db, conda_store, simple_specification, simple_conda_lock, current_prefix ): - task, solve_id = conda_store.register_solve(specification=simple_specification) + task, solve_id = conda_store.register_solve(db, specification=simple_specification) action.action_add_lockfile_packages( - db=conda_store.db, + db=db, conda_lock_spec=simple_conda_lock, solve_id=solve_id, ) - solve = api.get_solve(conda_store.db, solve_id=solve_id) + solve = api.get_solve(db, solve_id=solve_id) assert len(solve.package_builds) > 0 diff --git a/conda-store-server/tests/test_app_api.py b/conda-store-server/tests/test_app_api.py index 42b850fc4..1d04a09e1 100644 --- a/conda-store-server/tests/test_app_api.py +++ b/conda-store-server/tests/test_app_api.py @@ -3,15 +3,15 @@ from conda_store_server import api, schema -def test_conda_store_app_register_solve(conda_store, celery_worker): +def test_conda_store_app_register_solve(db, conda_store, celery_worker): conda_specification = schema.CondaSpecification( name="pytest-name", channels=["main"], dependencies=["python"], ) - task_id, solve_id = conda_store.register_solve(conda_specification) - solve = api.get_solve(conda_store.db, solve_id=solve_id) + task_id, solve_id = conda_store.register_solve(db, conda_specification) + solve = api.get_solve(db, solve_id=solve_id) assert solve is not None assert solve.started_on is None @@ -24,12 +24,12 @@ def test_conda_store_app_register_solve(conda_store, celery_worker): task.get(timeout=30) assert task.state == "SUCCESS" - conda_store.db.expire_all() + db.expire_all() assert solve.ended_on is not None assert len(solve.package_builds) > 0 -def test_conda_store_register_environment_workflow(conda_store, celery_worker): +def test_conda_store_register_environment_workflow(db, conda_store, celery_worker): """Test entire environment build workflow""" conda_specification = schema.CondaSpecification( name="pytest-name", @@ -39,10 +39,10 @@ def test_conda_store_register_environment_workflow(conda_store, celery_worker): namespace_name = "pytest-namespace" build_id = conda_store.register_environment( - specification=conda_specification.dict(), namespace=namespace_name + db, specification=conda_specification.dict(), namespace=namespace_name ) - build = api.get_build(conda_store.db, build_id=build_id) + build = api.get_build(db, build_id=build_id) assert build is not None assert build.status == schema.BuildStatus.QUEUED assert build.environment.name == conda_specification.name @@ -71,11 +71,11 @@ def test_conda_store_register_environment_workflow(conda_store, celery_worker): task = AsyncResult(f"build-{build.id}-docker") task.wait(timeout=2 * 60) - conda_store.db.expire_all() + db.expire_all() assert build.status == schema.BuildStatus.COMPLETED -def test_conda_store_register_environment_force_false_same_namespace(conda_store): +def test_conda_store_register_environment_force_false_same_namespace(db, conda_store): """Ensure behavior that when force=False and same namespace the same spec does not trigger another build @@ -88,12 +88,14 @@ def test_conda_store_register_environment_force_false_same_namespace(conda_store namespace_name = "pytest-namespace" first_build_id = conda_store.register_environment( + db, specification=conda_specification.dict(), namespace=namespace_name, force=False, ) second_build_id = conda_store.register_environment( + db, specification=conda_specification.dict(), namespace=namespace_name, force=False, @@ -103,7 +105,9 @@ def test_conda_store_register_environment_force_false_same_namespace(conda_store assert second_build_id is None -def test_conda_store_register_environment_force_false_different_namespace(conda_store): +def test_conda_store_register_environment_force_false_different_namespace( + db, conda_store +): """Ensure behavior that when force=False and different namespace the same spec still triggers another build @@ -115,12 +119,14 @@ def test_conda_store_register_environment_force_false_different_namespace(conda_ ) first_build_id = conda_store.register_environment( + db, specification=conda_specification.dict(), namespace="pytest-namespace", force=False, ) second_build_id = conda_store.register_environment( + db, specification=conda_specification.dict(), namespace="pytest-different-namespace", force=False, @@ -130,7 +136,7 @@ def test_conda_store_register_environment_force_false_different_namespace(conda_ assert second_build_id == 2 -def test_conda_store_register_environment_duplicate_force_true(conda_store): +def test_conda_store_register_environment_duplicate_force_true(db, conda_store): """Ensure behavior that when force=True the same spec in same namespace still triggers another build @@ -143,12 +149,14 @@ def test_conda_store_register_environment_duplicate_force_true(conda_store): namespace_name = "pytest-namespace" first_build_id = conda_store.register_environment( + db, specification=conda_specification.dict(), namespace=namespace_name, force=True, ) second_build_id = conda_store.register_environment( + db, specification=conda_specification.dict(), namespace=namespace_name, force=True, diff --git a/conda-store-server/tests/test_auth.py b/conda-store-server/tests/test_auth.py index 220f2240c..177a968d9 100644 --- a/conda-store-server/tests/test_auth.py +++ b/conda-store-server/tests/test_auth.py @@ -129,7 +129,9 @@ def test_expired_token(): ) def test_authorization(conda_store, entity_bindings, arn, permissions, authorized): - authorization = RBACAuthorizationBackend(authentication_db=conda_store.db) + authorization = RBACAuthorizationBackend( + authentication_db=conda_store.session_factory + ) entity = AuthenticationToken( primary_namespace="example_namespace", role_bindings=entity_bindings @@ -154,7 +156,9 @@ def test_end_to_end_auth_flow(conda_store): token_model = authentication.authenticate(token) - authorization = RBACAuthorizationBackend(authentication_db=conda_store.db) + authorization = RBACAuthorizationBackend( + authentication_db=conda_store.session_factory + ) assert authorization.authorize( AuthenticationToken( primary_namespace=token_model.primary_namespace, @@ -329,7 +333,9 @@ def test_is_subset_entity_permissions( # authenticated, value, ): - authorization = RBACAuthorizationBackend(authentication_db=conda_store.db) + authorization = RBACAuthorizationBackend( + authentication_db=conda_store.session_factory + ) entity = AuthenticationToken(role_bindings=entity_bindings) new_entity = AuthenticationToken(role_bindings=new_entity_bindings) diff --git a/conda-store-server/tests/test_db_api.py b/conda-store-server/tests/test_db_api.py index b158fc460..f158962b2 100644 --- a/conda-store-server/tests/test_db_api.py +++ b/conda-store-server/tests/test_db_api.py @@ -4,53 +4,52 @@ from conda_store_server.orm import NamespaceRoleMapping -def test_namespace_crud(conda_store): +def test_namespace_crud(db): namespace_name = "pytest-namespace" # starts with no namespaces for test - assert len(api.list_namespaces(conda_store.db).all()) == 0 + assert len(api.list_namespaces(db).all()) == 0 # create namespace - namespace = api.create_namespace(conda_store.db, name=namespace_name) - conda_store.db.commit() + namespace = api.create_namespace(db, name=namespace_name) + db.commit() # check that only one namespace exists - assert len(api.list_namespaces(conda_store.db).all()) == 1 + assert len(api.list_namespaces(db).all()) == 1 # check that ensuring a namespace doesn't create a new one - api.ensure_namespace(conda_store.db, name=namespace_name) + api.ensure_namespace(db, name=namespace_name) - assert len(api.list_namespaces(conda_store.db).all()) == 1 + assert len(api.list_namespaces(db).all()) == 1 # check that getting namespace works - namespace = api.get_namespace(conda_store.db, id=namespace.id) + namespace = api.get_namespace(db, id=namespace.id) assert namespace is not None # check that deleting a namespace works - api.delete_namespace(conda_store.db, id=namespace.id) - conda_store.db.commit() + api.delete_namespace(db, id=namespace.id) + db.commit() - assert len(api.list_namespaces(conda_store.db).all()) == 0 + assert len(api.list_namespaces(db).all()) == 0 # check that ensuring a namespace doesn't creates one - api.ensure_namespace(conda_store.db, name=namespace_name) + api.ensure_namespace(db, name=namespace_name) - assert len(api.list_namespaces(conda_store.db).all()) == 1 + assert len(api.list_namespaces(db).all()) == 1 -def test_namespace_role_mapping(conda_store): - +def test_namespace_role_mapping(db): namespace_name = "pytest-namespace" # starts with no namespaces for test - assert len(api.list_namespaces(conda_store.db).all()) == 0 + assert len(api.list_namespaces(db).all()) == 0 # create namespace - namespace = api.create_namespace(conda_store.db, name=namespace_name) - conda_store.db.commit() + namespace = api.create_namespace(db, name=namespace_name) + db.commit() # check that only one namespace exists - assert len(api.list_namespaces(conda_store.db).all()) == 1 + assert len(api.list_namespaces(db).all()) == 1 # create a Role Mapping, with a failing entity with pytest.raises(Exception): @@ -72,37 +71,35 @@ def test_namespace_role_mapping(conda_store): NamespaceRoleMapping(namespace=namespace, namespace_id=namespace.id, entity="*/*") -def test_environment_crud(conda_store): +def test_environment_crud(db): namespace_name = "pytest-namespace" environment_name = "pytest-environment" description = "Hello World" - namespace = api.ensure_namespace(conda_store.db, name=namespace_name) + namespace = api.ensure_namespace(db, name=namespace_name) - assert len(api.list_environments(conda_store.db).all()) == 0 + assert len(api.list_environments(db).all()) == 0 # create environment environment = api.create_environment( - conda_store.db, + db, namespace_id=namespace.id, name=environment_name, description=description, ) - conda_store.db.commit() + db.commit() # check that only one environment exists - assert len(api.list_environments(conda_store.db).all()) == 1 + assert len(api.list_environments(db).all()) == 1 # ensure environment - api.ensure_environment( - conda_store.db, name=environment_name, namespace_id=namespace.id - ) + api.ensure_environment(db, name=environment_name, namespace_id=namespace.id) - assert len(api.list_environments(conda_store.db).all()) == 1 + assert len(api.list_environments(db).all()) == 1 # check that getting environment works environment = api.get_environment( - conda_store.db, namespace_id=namespace.id, name=environment_name + db, namespace_id=namespace.id, name=environment_name ) assert environment is not None @@ -118,29 +115,23 @@ def test_environment_crud(conda_store): # assert len(api.list_environments(conda_store.db).all()) == 1 -def test_get_set_keyvaluestore(conda_store): +def test_get_set_keyvaluestore(db): setting_1 = {"a": 1, "b": 2} setting_2 = {"c": 1, "d": 2} setting_3 = {"e": 1, "f": 2} - api.set_kvstore_key_values(conda_store.db, "pytest", setting_1) - api.set_kvstore_key_values(conda_store.db, "pytest/1", setting_2) - api.set_kvstore_key_values(conda_store.db, "pytest/1/2", setting_3) + api.set_kvstore_key_values(db, "pytest", setting_1) + api.set_kvstore_key_values(db, "pytest/1", setting_2) + api.set_kvstore_key_values(db, "pytest/1/2", setting_3) - assert setting_1 == api.get_kvstore_key_values(conda_store.db, "pytest") - assert setting_2 == api.get_kvstore_key_values(conda_store.db, "pytest/1") - assert setting_3 == api.get_kvstore_key_values(conda_store.db, "pytest/1/2") + assert setting_1 == api.get_kvstore_key_values(db, "pytest") + assert setting_2 == api.get_kvstore_key_values(db, "pytest/1") + assert setting_3 == api.get_kvstore_key_values(db, "pytest/1/2") # test updating a prefix - api.set_kvstore_key_values(conda_store.db, "pytest", setting_2) - assert {**setting_1, **setting_2} == api.get_kvstore_key_values( - conda_store.db, "pytest" - ) + api.set_kvstore_key_values(db, "pytest", setting_2) + assert {**setting_1, **setting_2} == api.get_kvstore_key_values(db, "pytest") # test updating a prefix - api.set_kvstore_key_values( - conda_store.db, "pytest", {"c": 999, "d": 999}, update=False - ) - assert {**setting_1, **setting_2} == api.get_kvstore_key_values( - conda_store.db, "pytest" - ) + api.set_kvstore_key_values(db, "pytest", {"c": 999, "d": 999}, update=False) + assert {**setting_1, **setting_2} == api.get_kvstore_key_values(db, "pytest") diff --git a/conda-store-server/tests/test_testing.py b/conda-store-server/tests/test_testing.py index 02fc59923..20de3a55e 100644 --- a/conda-store-server/tests/test_testing.py +++ b/conda-store-server/tests/test_testing.py @@ -1,7 +1,7 @@ from conda_store_server import schema, testing, api -def test_testing_initialize_database(conda_store): +def test_testing_initialize_database(db, conda_store): config = { "namespace1": { "name1": schema.CondaSpecification( @@ -24,10 +24,10 @@ def test_testing_initialize_database(conda_store): }, } - testing.seed_conda_store(conda_store, config) + testing.seed_conda_store(db, conda_store, config) - assert len(api.list_namespaces(conda_store.db).all()) == 2 - assert len(api.list_environments(conda_store.db).all()) == 3 - assert len(api.list_builds(conda_store.db).all()) == 3 - assert len(api.list_solves(conda_store.db).all()) == 3 - assert len(api.list_conda_packages(conda_store.db).all()) == 3 + assert len(api.list_namespaces(db).all()) == 2 + assert len(api.list_environments(db).all()) == 3 + assert len(api.list_builds(db).all()) == 3 + assert len(api.list_solves(db).all()) == 3 + assert len(api.list_conda_packages(db).all()) == 3 diff --git a/conda-store-server/tests/test_traitlets.py b/conda-store-server/tests/test_traitlets.py index 293bd01bc..5283a67e8 100644 --- a/conda-store-server/tests/test_traitlets.py +++ b/conda-store-server/tests/test_traitlets.py @@ -128,18 +128,20 @@ def a_route(a: str, b: str): assert response.json() == {"data": "Hello World c d"} -def test_conda_store_settings_conda_channels_packages_validate_valid(conda_store): +def test_conda_store_settings_conda_channels_packages_validate_valid(db, conda_store): conda_store.set_settings( + db, data={ "conda_allowed_channels": ["conda-forge"], "conda_included_packages": ["ipykernel"], "conda_required_packages": ["flask"], "pypi_included_packages": ["scipy"], "pypi_required_packages": ["numpy"], - } + }, ) global_specification = conda_store.validate_specification( + db, conda_store, namespace="default", specification=schema.CondaSpecification( @@ -159,6 +161,7 @@ def test_conda_store_settings_conda_channels_packages_validate_valid(conda_store ] conda_store.set_settings( + db, namespace="default", data={ "conda_allowed_channels": ["conda-forge"], @@ -170,6 +173,7 @@ def test_conda_store_settings_conda_channels_packages_validate_valid(conda_store ) namespace_specification = conda_store.validate_specification( + db, conda_store, namespace="default", specification=schema.CondaSpecification( @@ -190,6 +194,7 @@ def test_conda_store_settings_conda_channels_packages_validate_valid(conda_store ] conda_store.set_settings( + db, namespace="default", environment_name="test", data={ @@ -202,6 +207,7 @@ def test_conda_store_settings_conda_channels_packages_validate_valid(conda_store ) environment_specification = conda_store.validate_specification( + db, conda_store, namespace="default", specification=schema.CondaSpecification( @@ -224,6 +230,7 @@ def test_conda_store_settings_conda_channels_packages_validate_valid(conda_store # not allowed channel name with pytest.raises(ValueError): conda_store.validate_specification( + db, conda_store, namespace="default", specification=schema.CondaSpecification( @@ -236,6 +243,7 @@ def test_conda_store_settings_conda_channels_packages_validate_valid(conda_store # missing required conda package with pytest.raises(ValueError): conda_store.validate_specification( + db, conda_store, namespace="default", specification=schema.CondaSpecification( @@ -246,6 +254,7 @@ def test_conda_store_settings_conda_channels_packages_validate_valid(conda_store # missing required pip package with pytest.raises(ValueError): conda_store.validate_specification( + db, conda_store, namespace="default", specification=schema.CondaSpecification(