From 9f24bd69af4271574b2b105b4e90ce596c846f50 Mon Sep 17 00:00:00 2001 From: Peyton Murray Date: Wed, 3 Jul 2024 12:57:57 -0700 Subject: [PATCH 1/8] Add `conda config` warning docs/message; add duration log for actions (#823) Co-authored-by: Kim Pevey --- .../conda_store_server/_internal/action/base.py | 7 +++++++ .../_internal/action/generate_lockfile.py | 7 +++++++ conda-store-server/tests/test_actions.py | 11 ++++++++--- .../conda-store/explanations/conda-concepts.md | 12 ++++++++++++ 4 files changed, 34 insertions(+), 3 deletions(-) diff --git a/conda-store-server/conda_store_server/_internal/action/base.py b/conda-store-server/conda_store_server/_internal/action/base.py index 64ffac605..1e4d948ff 100644 --- a/conda-store-server/conda_store_server/_internal/action/base.py +++ b/conda-store-server/conda_store_server/_internal/action/base.py @@ -4,6 +4,7 @@ import logging import subprocess import tempfile +import time import typing import uuid @@ -27,8 +28,14 @@ def wrapper(*args, stdout=None, stderr=None, **kwargs): # enter temporary directory stack.enter_context(utils.chdir(tmpdir)) + start_time = time.monotonic() + # run function and store result action_context.result = f(action_context, *args, **kwargs) + action_context.log.info( + f"Action {f.__name__} completed in {time.monotonic() - start_time:.3f} s." + ) + return action_context return wrapper diff --git a/conda-store-server/conda_store_server/_internal/action/generate_lockfile.py b/conda-store-server/conda_store_server/_internal/action/generate_lockfile.py index 7ecdd8592..6c79a2def 100644 --- a/conda-store-server/conda_store_server/_internal/action/generate_lockfile.py +++ b/conda-store-server/conda_store_server/_internal/action/generate_lockfile.py @@ -27,6 +27,13 @@ def action_solve_lockfile( with environment_filename.open("w") as f: json.dump(specification.dict(), f) + context.log.info( + "Note that the output of `conda config --show` displayed below only reflects " + "settings in the conda configuration file, which might be overridden by " + "variables required to be set by conda-store via the environment. Overridden " + f"settings: CONDA_FLAGS={conda_flags}" + ) + # The info command can be used with either mamba or conda logged_command(context, [conda_command, "info"]) # The config command is not supported by mamba diff --git a/conda-store-server/tests/test_actions.py b/conda-store-server/tests/test_actions.py index de3ed5f6b..1657379cd 100644 --- a/conda-store-server/tests/test_actions.py +++ b/conda-store-server/tests/test_actions.py @@ -21,6 +21,8 @@ def test_action_decorator(): + """Test that the action decorator captures stdout/stderr and logs correctly.""" + @action.action def test_function(context): print("stdout") @@ -48,10 +50,13 @@ def test_function(context): return pathlib.Path.cwd() context = test_function() - assert ( - context.stdout.getvalue() - == "stdout\nstderr\nsubprocess\nsubprocess_stdout\nsubprocess_stderr\nlog\n" + + stdout = context.stdout.getvalue() + assert stdout.startswith( + "stdout\nstderr\nsubprocess\nsubprocess_stdout\nsubprocess_stderr\nlog\n" ) + assert re.search(r"Action test_function completed in \d+\.\d+ s.\n$", stdout) + assert context.stderr.getvalue() == "subprocess_stderr_no_redirect\n" # test that action direction is not the same as outside function assert context.result != pathlib.Path.cwd() diff --git a/docusaurus-docs/conda-store/explanations/conda-concepts.md b/docusaurus-docs/conda-store/explanations/conda-concepts.md index c326ba769..734452a7c 100644 --- a/docusaurus-docs/conda-store/explanations/conda-concepts.md +++ b/docusaurus-docs/conda-store/explanations/conda-concepts.md @@ -80,3 +80,15 @@ could be updated the next minute the same solve for the same [conda-docs-environments]: https://docs.conda.io/projects/conda/en/latest/user-guide/concepts/environments.html + +## Understanding `conda config` and how it relates to conda-store + +Because conda-store needs to configure some parts of conda without modifying +the user's conda configuration file, internally conda-store sets some conda +configuration variables using environment variables. The impact of this is that +if a user tries to print their conda configuration with `conda config`, some of +the configuration settings displayed by that command will not reflect the values +that are actually used by conda-store. In particular, `conda-store` internally +sets `CONDA_FLAGS=--strict-channel-priority`, overriding the channel priority in +the conda configuration file. Please keep this in mind when using `conda config` +to inspect your conda configuration and when viewing the build logs. From 1c7e53d7b00c900e06e69f7f4a9a70a205abee42 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 3 Jul 2024 13:21:37 -0700 Subject: [PATCH 2/8] [pre-commit.ci] pre-commit autoupdate (#839) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0d70b5520..d56b84139 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -21,7 +21,7 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: "v0.4.7" + rev: "v0.5.0" hooks: - id: ruff exclude: "examples|tests/assets" From 9b86e42d086ae362d44af1e439e0b92ec770ecce Mon Sep 17 00:00:00 2001 From: Peyton Murray Date: Wed, 3 Jul 2024 17:01:41 -0700 Subject: [PATCH 3/8] MAINT - Mock out call to env solve in `test_generate_constructor_installer` (#833) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .../conda_store_server/_internal/build.py | 2 +- .../conda_store_server/_internal/orm.py | 4 +- conda-store-server/conda_store_server/app.py | 4 +- conda-store-server/tests/test_actions.py | 95 ++++++++++++------- 4 files changed, 66 insertions(+), 39 deletions(-) diff --git a/conda-store-server/conda_store_server/_internal/build.py b/conda-store-server/conda_store_server/_internal/build.py index 5aa3d4f5e..63258799f 100644 --- a/conda-store-server/conda_store_server/_internal/build.py +++ b/conda-store-server/conda_store_server/_internal/build.py @@ -136,7 +136,7 @@ def build_cleanup( build_active_tasks = collections.defaultdict(list) for worker_name, tasks in active_tasks.items(): for task in tasks: - match = re.fullmatch("build-(\d+)-(.*)", str(task["id"])) + match = re.fullmatch(r"build-(\d+)-(.*)", str(task["id"])) if match: build_id, name = match.groups() build_active_tasks[build_id].append(task["name"]) diff --git a/conda-store-server/conda_store_server/_internal/orm.py b/conda-store-server/conda_store_server/_internal/orm.py index 3492b1d3f..f5ef72ca7 100644 --- a/conda-store-server/conda_store_server/_internal/orm.py +++ b/conda-store-server/conda_store_server/_internal/orm.py @@ -806,7 +806,9 @@ class KeyValueStore(Base): value = Column(JSON) -def new_session_factory(url="sqlite:///:memory:", reset=False, **kwargs): +def new_session_factory( + url="sqlite:///:memory:", reset=False, **kwargs +) -> sessionmaker: engine = create_engine( url, # See the comment on the CustomJSONEncoder class on why this is needed diff --git a/conda-store-server/conda_store_server/app.py b/conda-store-server/conda_store_server/app.py index 5dab5b55f..7ec5794fe 100644 --- a/conda-store-server/conda_store_server/app.py +++ b/conda-store-server/conda_store_server/app.py @@ -8,7 +8,7 @@ import pydantic from celery import Celery, group -from sqlalchemy.orm import Session +from sqlalchemy.orm import Session, sessionmaker from sqlalchemy.pool import QueuePool from traitlets import ( Bool, @@ -376,7 +376,7 @@ def _docker_base_image(build: orm.Build): ) @property - def session_factory(self): + def session_factory(self) -> sessionmaker: if hasattr(self, "_session_factory"): return self._session_factory diff --git a/conda-store-server/tests/test_actions.py b/conda-store-server/tests/test_actions.py index 1657379cd..7a723d9e0 100644 --- a/conda-store-server/tests/test_actions.py +++ b/conda-store-server/tests/test_actions.py @@ -2,8 +2,8 @@ import datetime import pathlib import re -import subprocess import sys +import tempfile from unittest import mock @@ -11,12 +11,17 @@ import yaml import yarl +from conda.base.context import context as conda_base_context +from constructor import construct from fastapi.responses import RedirectResponse from traitlets import TraitError from conda_store_server import BuildKey, api from conda_store_server._internal import action, conda_utils, orm, schema, server, utils -from conda_store_server._internal.action import generate_lockfile +from conda_store_server._internal.action import ( + generate_constructor_installer, + generate_lockfile, +) from conda_store_server.server.auth import DummyAuthentication @@ -175,6 +180,7 @@ def test_solve_lockfile_multiple_platforms(conda_store, specification, request): def test_generate_constructor_installer( conda_store, specification_name, request, tmp_path ): + """Test that generate_construction_installer correctly produces the files needed by `constructor`.""" specification = request.getfixturevalue(specification_name) installer_dir = tmp_path / "installer_dir" is_lockfile = specification_name in [ @@ -182,43 +188,62 @@ def test_generate_constructor_installer( "simple_lockfile_specification_with_pip", ] - # Creates the installer - context = action.action_generate_constructor_installer( - conda_command=conda_store.conda_command, - specification=specification, - installer_dir=installer_dir, - version="1", - is_lockfile=is_lockfile, - ) + # action_generate_constructor_installer uses a temporary directory context manager + # to create and store the installer, but it usually gets deleted when the function + # exits. Here, we manually create that temporary directory, run the action, + # persisting the directory (so that we can verify the contents). Only then do we + # manually clean up afterward. + class PersistentTemporaryDirectory(tempfile.TemporaryDirectory): + def __exit__(self, exc, value, tb): + pass + + temp_directory = None + + def tmp_dir_side_effect(*args, **kwargs): + nonlocal temp_directory + temp_directory = PersistentTemporaryDirectory(*args, **kwargs) + return temp_directory + + with mock.patch.object( + generate_constructor_installer, "tempfile", wraps=tempfile + ) as mock_tempfile: + mock_tempfile.TemporaryDirectory.side_effect = tmp_dir_side_effect + + # Create the installer, but don't actually run `constructor` - it uses conda to solve the + # environment, which we don't need to do for the purposes of this test. + with mock.patch( + "conda_store_server._internal.action.generate_constructor_installer.logged_command" + ) as mock_command: + generate_constructor_installer.action_generate_constructor_installer( + conda_command=conda_store.conda_command, + specification=specification, + installer_dir=installer_dir, + version="1", + is_lockfile=is_lockfile, + ) - # Checks that the installer was created - installer = context.result - assert installer.exists() + mock_command.assert_called() - tmp_dir = tmp_path / "tmp" + # First call to `constructor` is used to check that it is installed + mock_command.call_args_list[0].args[1] == ["constructor", "--help"] - # Runs the installer - out_dir = pathlib.Path(tmp_dir) / "out" - if sys.platform == "win32": - subprocess.check_output([installer, "/S", f"/D={out_dir}"]) - else: - subprocess.check_output([installer, "-b", "-p", str(out_dir)]) + # Second call is used to build the installer + call_args = mock_command.call_args_list[1].args[1] + cache_dir = pathlib.Path(call_args[3]) + platform = call_args[5] + tmp_dir = pathlib.Path(call_args[6]) + assert call_args[0:3] == ["constructor", "-v", "--cache-dir"] + assert str(cache_dir).endswith("pkgs") + assert call_args[4:6] == ["--platform", conda_base_context.subdir] + assert str(tmp_dir).endswith("build") - # Checks the output directory - assert out_dir.exists() - lib_dir = out_dir / "lib" - if specification_name in ["simple_specification", "simple_lockfile_specification"]: - if sys.platform == "win32": - assert any(str(x).endswith("zlib.dll") for x in out_dir.iterdir()) - elif sys.platform == "darwin": - assert any(str(x).endswith("libz.dylib") for x in lib_dir.iterdir()) - else: - assert any(str(x).endswith("libz.so") for x in lib_dir.iterdir()) - else: - # Uses rglob to not depend on the version of the python - # directory, which is where site-packages is located - flask = pathlib.Path("site-packages") / "flask" - assert any(str(x).endswith(str(flask)) for x in out_dir.rglob("*")) + # Use some of the constructor internals to verify the action's artifacts are valid + # constructor input + info = construct.parse(str(tmp_dir / "construct.yaml"), platform) + construct.verify(info) + + assert temp_directory is not None + temp_directory.cleanup() def test_fetch_and_extract_conda_packages(tmp_path, simple_conda_lock): From 6ec85ec876171c9bafedf348a9bcce08ad7a9e58 Mon Sep 17 00:00:00 2001 From: Peyton Murray Date: Mon, 8 Jul 2024 04:02:59 -0700 Subject: [PATCH 4/8] [MAINT] Remove deprecated docker-compose config option (#846) --- docker-compose.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/docker-compose.yaml b/docker-compose.yaml index be31e89f4..33ec36d38 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,5 +1,3 @@ -version: "3.8" - services: conda-store-worker: build: From 54bf10569a5445494131ca494c9316f12ce6e4f5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Jul 2024 12:40:52 +0100 Subject: [PATCH 5/8] Bump express from 4.18.2 to 4.19.2 in /docusaurus-docs (#800) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Peyton Murray Co-authored-by: Tania Allard --- docusaurus-docs/package-lock.json | 129 +++++++++++++++++++----------- 1 file changed, 83 insertions(+), 46 deletions(-) diff --git a/docusaurus-docs/package-lock.json b/docusaurus-docs/package-lock.json index 8a99b1ca7..3d5d574a8 100644 --- a/docusaurus-docs/package-lock.json +++ b/docusaurus-docs/package-lock.json @@ -4355,12 +4355,12 @@ } }, "node_modules/body-parser": { - "version": "1.20.1", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz", - "integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==", + "version": "1.20.2", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz", + "integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==", "dependencies": { "bytes": "3.1.2", - "content-type": "~1.0.4", + "content-type": "~1.0.5", "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", @@ -4368,7 +4368,7 @@ "iconv-lite": "0.4.24", "on-finished": "2.4.1", "qs": "6.11.0", - "raw-body": "2.5.1", + "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" }, @@ -4536,13 +4536,18 @@ } }, "node_modules/call-bind": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.5.tgz", - "integrity": "sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.1", - "set-function-length": "^1.1.1" + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -5059,9 +5064,9 @@ "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==" }, "node_modules/cookie": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", - "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", "engines": { "node": ">= 0.6" } @@ -5586,16 +5591,19 @@ } }, "node_modules/define-data-property": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz", - "integrity": "sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", "dependencies": { - "get-intrinsic": "^1.2.1", - "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.0" + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/define-lazy-prop": { @@ -6059,6 +6067,25 @@ "is-arrayish": "^0.2.1" } }, + "node_modules/es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "dependencies": { + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/es-module-lexer": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.4.1.tgz", @@ -6311,16 +6338,16 @@ } }, "node_modules/express": { - "version": "4.18.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz", - "integrity": "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==", + "version": "4.19.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz", + "integrity": "sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==", "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", - "body-parser": "1.20.1", + "body-parser": "1.20.2", "content-disposition": "0.5.4", "content-type": "~1.0.4", - "cookie": "0.5.0", + "cookie": "0.6.0", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", @@ -6908,15 +6935,19 @@ } }, "node_modules/get-intrinsic": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.2.tgz", - "integrity": "sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", "dependencies": { + "es-errors": "^1.3.0", "function-bind": "^1.1.2", "has-proto": "^1.0.1", "has-symbols": "^1.0.3", "hasown": "^2.0.0" }, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -7174,11 +7205,11 @@ } }, "node_modules/has-property-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz", - "integrity": "sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", "dependencies": { - "get-intrinsic": "^1.2.2" + "es-define-property": "^1.0.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -12535,9 +12566,9 @@ } }, "node_modules/raw-body": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz", - "integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==", + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", @@ -13678,14 +13709,16 @@ } }, "node_modules/set-function-length": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.1.1.tgz", - "integrity": "sha512-VoaqjbBJKiWtg4yRcKBQ7g7wnGnLV3M8oLvVWwOk2PdYY6PEFegR1vezXR0tw6fZGF9csVakIRjrJiy2veSBFQ==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", "dependencies": { - "define-data-property": "^1.1.1", - "get-intrinsic": "^1.2.1", + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.0" + "has-property-descriptors": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -13756,13 +13789,17 @@ } }, "node_modules/side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", "dependencies": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" From 24e15a1b8bcca2fdc536d0aa66d0a860b96cd6ca Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Jul 2024 12:42:55 +0100 Subject: [PATCH 6/8] Bump follow-redirects from 1.15.4 to 1.15.6 in /docusaurus-docs (#785) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Peyton Murray Co-authored-by: Tania Allard --- docusaurus-docs/package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docusaurus-docs/package-lock.json b/docusaurus-docs/package-lock.json index 3d5d574a8..e4baab315 100644 --- a/docusaurus-docs/package-lock.json +++ b/docusaurus-docs/package-lock.json @@ -6663,9 +6663,9 @@ } }, "node_modules/follow-redirects": { - "version": "1.15.4", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz", - "integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==", + "version": "1.15.6", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", + "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", "funding": [ { "type": "individual", From 8535d3ca2d782800abf1b6eea5f5584c2c2a2e7c Mon Sep 17 00:00:00 2001 From: Tania Allard Date: Mon, 8 Jul 2024 19:32:18 +0100 Subject: [PATCH 7/8] MAINT - Add dependabot config (#847) --- .dependabot.yml | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 .dependabot.yml diff --git a/.dependabot.yml b/.dependabot.yml new file mode 100644 index 000000000..404990db0 --- /dev/null +++ b/.dependabot.yml @@ -0,0 +1,34 @@ +version: 2 + +updates: + # auto update github actions + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" + labels: + - "area: javascript" + - "area: dependencies 📦" + # ensure we have a nicely formatter commit message + prefix: "MAINT - " + + - package-ecosystem: "npm" + directory: "/docusaurus-docs" + schedule: + interval: "monthly" + labels: + - "area: javascript" + - "area: dependencies 📦" + # ensure we have a nicely formatter commit message + prefix: "MAINT - " + + - package-ecosystem: "docker" + directories: + - "/conda-store" + - "/conda-store-server" + schedule: + interval: "monthly" + labels: + - "area: dependencies 📦" + # ensure we have a nicely formatter commit message + prefix: "MAINT - " From 94f7a26e1921ac70173cb46c03d38fcded31859b Mon Sep 17 00:00:00 2001 From: Pavithra Eswaramoorthy Date: Thu, 18 Jul 2024 21:58:44 +0530 Subject: [PATCH 8/8] Update conda-store explanations (#726) Signed-off-by: Pavithra Eswaramoorthy Co-authored-by: Peyton Murray --- .../conda-store/explanations/artifacts.md | 211 ++++++++---------- .../explanations/conda-concepts.md | 162 +++++++++----- .../explanations/conda-store-concepts.md | 87 +++++++- .../conda-store/explanations/performance.md | 90 ++++---- 4 files changed, 327 insertions(+), 223 deletions(-) diff --git a/docusaurus-docs/conda-store/explanations/artifacts.md b/docusaurus-docs/conda-store/explanations/artifacts.md index 6ba8630f5..988655357 100644 --- a/docusaurus-docs/conda-store/explanations/artifacts.md +++ b/docusaurus-docs/conda-store/explanations/artifacts.md @@ -4,27 +4,27 @@ description: Understand environment artifacts generated by conda-store # Artifacts -:::warning -This page is in active development, some content may be inaccurate. -::: - conda environments can be created in a few different ways. -conda-store creates "artifacts" (corresponding to different environment creation options) that can be shared with colleagues and can be used to reproduce environments. -In the conda-store UI, these are available in the "Logs and Artifacts" section at the end of the environment page. +conda-store creates "artifacts" (corresponding to different environment creation options) for every environment, that can be shared with colleagues and used to reproduce environments. +In the conda-store UI, these are available in the **"Logs and Artifacts"** section +at the end of the environment page. The following sections describe the various artifacts generated and how to create environments with them. -### YAML file (pinned) +Environments in shared namespaces on conda-store can be accessed by everyone with access to that namespace, in which case you may not need to share the artifacts manually. +Artifacts are used to share your environment with external collaborators who don't have access to conda-store. -YAML files that follow the conda specification is a common way to create environments. -conda-store creates a "pinned" YAML, where all the exact versions of requested packages (including `pip` packages) as well as all their dependencies are specified, to ensure new environments created match the original environment as closely as possible. +:::note +The libraries (conda, conda-lock, conda-pack, etc.) mentioned in the following sections are separate projects in the conda ecosystem. The environments created using them are not managed by conda-store. +::: -A pinned YAML file is generated for each environment ta is built. -This includes pinning of the `pip`` packages as well. +## YAML file (pinned) + +YAML files that follow the conda specification are a common way to create environments. +conda-store creates a "pinned" YAML, where all the exact versions of requested packages (including `pip` packages) as well as all their dependencies are specified, to ensure new environments created match the original environment as closely as possible. :::info -In rare cases, the completely pinned packages may not solve because packages are -routinely marked as broken and removed. +In rare cases, building environments from "pinned" YAML files may not solve because packages are routinely marked as broken and removed at the repository level. **conda-forge** (default channel in conda-store) has a [policy that packages are never removed but are marked as @@ -32,159 +32,130 @@ broken][conda-forge-immutability-policy]. Most other channels do not have such a policy. ::: -Assuming you have `conda` installed, to create a conda environment (on any machine) using this file: +Click on **"Show yml file"** link in the conda-store UI to open the file in a new browser tab. You can copy-and-past this file in [conda-store UI's YAML editor][cs-ui-yaml] to create a new environment managed by conda-store in a different namespace. -1. Click on **"Show yml file"** link in the conda-store UI to open the file in a new browser tab. -2. Save the file with: Right-click on the page -> Select "Save As" -> Give the file a meaningful name (like `environment.yml`) -3. Run the following command and use the corresponding filename: - ```bash - conda env create --file - ``` +You can download the file and share with someone or use it to create an environment on a different machine. Assuming `conda` is installed, run the [CLI commands mentioned in the conda-documentation][conda-docs-create-env] with the corresponding filename to create a conda environment (on any machine). -### Lockfile +## Lockfile -A conda lockfile is a representation of only the `conda` dependencies in +A conda lockfile is a representation of all (`conda` and `pip`) dependencies in a given environment. -conda-store created lockfiles using the [conda-lock][conda-lock-github] project. +conda-store creates lockfiles using the [conda-lock][conda-lock-github] project. + +Click on **"Show lockfile"** to open the lockfile in a new browser tab. +You can download the file and share with someone or use it to create an environment in a different space. + +To create an environment att the new location, follow the [commands in the conda-lock documentation][conda-lock-install-env]. + +## Tarballs or archives :::warning -This file will not reproduce the `pip` dependencies in a given environment. -It is usually a good practice to not mix pip and conda dependencies. +Building environments from archives is only supported on Linux machines +because the tarballs are built on Linux machines. ::: -Click the `lockfile` icon to download the -lockfile. First install `conda-lock` if it is not already installed. +A tarball or archive is a _packaged_ environment that can be moved, unpacked, and used in a different location or on a different machine. -```shell -conda install -c conda-forge lockfile -``` +conda-store uses [Conda-Pack][conda-pack], a library for +creating tarballs of conda environments. -Install the locked environment file from conda-store. +Click **"Download archive"** button to download the archive of your conda environment, and share/move it to the desired location. -```shell -conda-lock install -``` +To install the tarball, follow the [instructions for the target machine in the conda-pack documentation][conda-pack-usage]. -### conda-pack archive +## Docker images -[Conda-Pack](https://conda.github.io/conda-pack/) is a package for -creating tarballs of given Conda environments. Creating a Conda archive -is not as simple as packing and unpacking a given directory. This is -due to the base path for the environment that may -change. [Conda-Pack](https://conda.github.io/conda-pack/) handles all -of these issues. Click the `archive` button and download the given -environment. The size of the archive will be less than the size seen -on the environment UI element due to compression. +:::warning +Docker image creation is currently only supported on Linux. -```shell -conda install -c conda-forge conda-pack -``` +The docker image generation and registry features are experimental, +and the following instructions are not thoroughly tested. +If you face any difficulties, open an issue on the GitHub repository. +::: -Install the Conda-Pack tarball. The directions are [slightly -complex](https://conda.github.io/conda-pack/#commandline-usage). Note -that `my_env` can be any name in any given prefix. +conda-store acts as a docker registry. +It leverages [Conda Docker][conda-docker], which builds docker images without Docker, allowing for advanced caching, reduced image sizes, and does not require elevated privileges. -```shell -mkdir -p my_env -tar -xzf .tar.gz -C my_env +### Authentication -source my_env/bin/activate +The `conda-store` docker registry requires authentication. +You can use **any username** and your **user token as the password**. -conda-unpack +```bash +docker login -u -p ``` -### Docker images +To get your user token: -:::note -Docker image creation is currently only supported on Linux. -::: +1. Visit your user page at `/admin/user` +2. Click on "Create token", which displays your token +3. Click on "copy" to copy the token to your clipboard -conda-store acts as a docker registry which allows for interesting -ways to handle Conda environment. In addition this registry leverages -[conda-docker](https://github.com/conda-incubator/conda-docker) which -builds docker images without docker allowing for advanced caching, -reduced image sizes, and does not require elevated privileges. Click -on the `docker` link this will copy a url to your clipboard. Note the -beginning of the url for example `localhost:8080/`. This is required to tell -docker where the docker registry is located. Otherwise by default it -will try and user docker hub. Your url will likely be different. - -The `conda-store` docker registry requires authentication via any -username with password set to a token that is generated by visiting -the user page to generate a token. Alternatively in the -`conda_store_config.py` you can set -`c.AuthenticationBackend.predefined_tokens` which have environment -read permissions on the given docker images needed for pulling. +Alternatively, you can set `c.AuthenticationBackend.predefined_tokens` in `conda_store_config.py`, which have environment read permissions on the given docker images required for pulling images. -``` -docker login -u token -p -docker pull -docker run -it python -``` +### General usage -#### General usage +To use a specific environment build, click on the **"Show Docker image"** to get the URL to the docker image. For example: `localhost:8080/analyst/python-numpy-env:583dd55140491c6b4cfa46e36c203e10280fe7e180190aa28c13f6fc35702f8f-20210825-180211-244815-3-python-numpy-env`. -```shell -docker run -it localhost:8080// -``` +The URL consists of: `//:` + +* The conda-store domain (for example `localhost:8080/`) at the beginning tells Docker where the docker registry is located. Otherwise, Docker will try to use Docker Hub by default. +* The `/` refers to the specific conda environment +* The "build key" is a combination of `---` which points to specific build of the environment. For example, a past version of the environment. -If you want to use a specific build (say one that was built in the -past and is not the current environment) you can visit the specific -build that you want in the UI and copy its docker registry tag -name. The tag name is a combination of `---` that we will refer to as build -key. An example would be -`localhost:5000/filesystem/python-numpy-env:583dd55140491c6b4cfa46e36c203e10280fe7e180190aa28c13f6fc35702f8f-20210825-180211-244815-3-python-numpy-env`. +To use a conda-store environment docker image: -```shell -docker run -it localhost:8080//: +```bash +docker run -it ``` -#### On Demand Docker Image +### On-demand (dynamic) docker image + +In conda-store, you can also specify the required packages within the docker image name itself, without needing an actual environment to be created by conda-store UI. + +The URL format is: `:/conda-store-dynamic//.../`. -conda-store has an additional feature which allow for specifying the -packages within the docker image name itself without requiring an -actual environment to be created on the conda-store UI side. +After `conda-store-dynamic`, you can specify packages with constraints separated by +slashes in the following format: +* `<=1.10` as `.lt.1.10` +* `>=1.10` as `.gt.1.10` -The following convention is used -`:/conda-store-dynamic/`. After -`conda-store-dynamic` you specify packages needed separated by -slashes. Additionally you may specify package constraints -for example `<=1.10` as `.lt.1.10`. +For example, if you need Python less than `3.10` and NumPy +greater than `1.0`, this would be the docker image +name: `:/conda-store-dynamic/python.lt.3.10/numpy.gt.1.0`. -As full example support we want python less than `3.8` and NumPy -greater than `1.0`. This would be the following docker image -name. `:/conda-store-dynamic/python.lt.3.8/numpy.gt.1.0`. conda-store -will then create the following environment and the docker image will -download upon the docker image being built. +conda-store creates the environment ands builds the docker image, which you can then download. -### Installers +## Installers -conda-store uses [constructor] to generate an installer for the current platform -(where the server is running): +Installers are another way to share and use a set of (bundled) packages. +conda-store uses [constructor][constructor-docs] to generate an installer for the current platform (where the server is running): -- on Linux and macOS, it generates a `.sh` installer -- on Windows, it generates a `.exe` installer using NSIS. +- on Linux and MacOS, it generates a `.sh` installer +- on Windows, it generates a `.exe` installer using NSIS conda-store automatically adds `conda` and `pip` to the target environment because these are required for the installer to work. -Also note that `constructor` uses a separate dependency solver instead of +:::note +`constructor` uses a separate dependency solver instead of utilizing the generated lockfile, so the package versions used by the installer might be different compared to the environment available in conda-store. There are plans to address this issue in the future. +::: -#### Existing Deployments - -conda-store saves environment settings and doesn't automatically update them on -startup (see `CondaStore.ensure_settings`). Existing deployments need to -manually enable installer builds via the admin interface. This can be done by -going to `/admin/setting///` (or -clicking on the `Settings` button on the environment page) and adding -`"CONSTRUCTOR_INSTALLER"` to `build_artifacts`. + +[cs-ui-yaml]: ../../conda-store-ui/tutorials/create-envs#yaml-editor [conda-docs]: https://docs.conda.io/projects/conda/en/latest/user-guide/concepts/environments.html [conda-forge-immutability-policy]: https://conda-forge.org/docs/maintainer/updating_pkgs.html#packages-on-conda-forge-are-immutable +[conda-docs-create-env]: https://conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html#creating-an-environment-from-an-environment-yml-file [conda-lock-github]: https://github.com/conda-incubator/conda-lock +[conda-lock-install-env]: https://conda.github.io/conda-lock/output/#environment-lockfile [constructor]: https://github.com/conda/constructor +[conda-pack]: https://conda.github.io/conda-pack/ +[conda-pack-usage]: https://conda.github.io/conda-pack/index.html#commandline-usage +[conda-docker]: https://github.com/conda-incubator/conda-docker +[constructor-docs]: https://conda.github.io/constructor/ diff --git a/docusaurus-docs/conda-store/explanations/conda-concepts.md b/docusaurus-docs/conda-store/explanations/conda-concepts.md index 734452a7c..3117caa6e 100644 --- a/docusaurus-docs/conda-store/explanations/conda-concepts.md +++ b/docusaurus-docs/conda-store/explanations/conda-concepts.md @@ -1,94 +1,148 @@ --- -description: Understand conda basics +sidebar_position: 1 +description: Understand basics of package management with conda --- # Conda concepts -:::note -This page is in active development. -::: +conda is a Python package and environment manager, used widely in the Python data science ecosystem. +conda-store build on conda and other supporting libraries in the conda community. +This page briefly covers some key conda concepts necessary to use conda-store. +For detailed explanations, check out the [conda documentation][conda-docs]. + +## Python package + +Open source software projects (sometimes called libraries) are shared with users as *packages*. You need to "install" the package on your local workspace to use it. -## Packages/libraries +[pip][pip-docs] and [conda][conda-docs] are popular package management tools in the Python ecosystem. - +pip ships with the Python programming language, and can install packages from the PyPI (Python Package Index) - a community managed collection of packages, public/private PyPI mirrors, GitHub sources, and local directories. + +conda needs to be downloaded separately (through a distribution like Anaconda or Miniconda), and can install packages from conda [*channels*](#channels) and local builds. + +Some Python packages depend on non-Python code (for example, NumPy includes some C libraries). Installing such packages from PyPI using pip can be un-reliable and sometimes it can be your responsibility to separately install the non-Python libraries. +However, conda provides a package management solution that includes both Python and other underlying non-Python code. ## Dependencies - +Modern open source software (and software in general) is created using or builds on other libraries, which are called the *dependencies* of the project. +For example, pandas uses NumPy's `ndarray`s and is written partially in Python, hence, NumPy and Python are dependencies of pandas. +Specifically, they are the direct dependencies. +The dependencies of NumPy and pandas, and the dependencies of those dependencies, and so on creates a complete dependency graph for pandas. + +Since conda-store focuses on [environments](#environments), the terms *dependencies* usually refers to the full set of compatible dependencies for all the packages specified in an environment. + +## Channels (conda) + +The [conda documentation][conda-docs-channels] defines: + +> Conda channels are the locations where packages are stored. They serve as the base for hosting and managing packages. Conda packages are downloaded from remote channels, which are URLs to directories containing conda packages. + +Similar to PyPI, conda channels are URLs of remote servers that manage packages. + +In conda-store, packages are installed from the [conda-forge][conda-forge] channel by default. +conda-forge is a community maintained channel for hosting open source libraries. + +:::note +This behavior is different from conda downloaded from Anaconda/Miniconda distribution, that gets packages from the "default" channel by default. + +Other distributions like Miniforge also use conda-forge as the default channel. +::: ## Environments -conda-store helps you create and manage "conda environments", also referred to as "data science environments" because `conda` is the leading package and environment management library for the Python data science. +conda-store helps create and manage "conda environments", sometimes also referred to as "data science environments" or simply "environments" in conda-store spaces. +An environment is an isolated set of installed packages. The [official conda documentation][conda-docs-environments] states: > A conda environment is a directory that contains a specific collection of conda packages that you have installed. > > If you change one environment, your other environments are not affected. You can easily activate or deactivate environments, which is how you switch between them. -conda-store is a higher-level toolkit that enforces some conda best practices behind-the-scenes to enable reliable and reproducible environment sharing for collaborative settings. - -One of the ways conda-store ensures reproducibility is by auto-generating certain artifacts. +In data science and development workflows, you often use different environments for different projects and sub-projects. It gives you a clean space for development with only the packages and versions that you need for the specific project. You can also use different versions of the same package in different environments depending on your project needs. -## Channels +Using isolated environments is a good practice to follow. The alternative, where requirements for all projects are added to a single "base" environment can not only give you un-reliable results but also be very tedious to manage across projects. - +## Environment specification (spec) -## Reproducibility of conda +conda environments are specified through a YAML file, which is called the *environment specification* and has the following major components: ```yaml -name: example -channels: - - defaults - - conda-forge -dependencies: - - python >=3.7 +name: my-cool-env # name of your environment +channels: # conda channels to get packages from, in order of priority + - conda-forge + - default +dependencies: # list of packages required for your work + - python >=3.10 + - numpy + - pandas + - matplotlib + - scikit-learn + - nodejs # conda can install non-Python packages as well, if it's available on a channel + - pip + - pip: # Optionally, conda can also install packages using pip if needed + - pytest ``` -Suppose we have the given `environment.yaml` file. How does conda -perform a build? +conda uses this file to create a conda *environment*. + +:::tip +In some cases, installing packages using pip through conda can cause issues dependency conflicts. We suggest you use the `pip:` section only if the package you need is not available on conda-forge. +::: + +Learn more in the [conda documentation about created an environment file manually][conda-docs-env-file] + +## Environment creation -1. Conda downloads `channeldata.json` from each of the channels which +Given an `environment.yaml` file, this is how conda performs a build (in brief): + +1. Conda downloads `channeldata.json`, a metadata file from each of the channels which list the available architectures. 2. Conda then downloads `repodata.json` for each of the architectures - it is interested in (specifically your compute architecture along - with noarch). The `repodata.json` has fields like package name, + it is interested in (specifically your particular compute architecture along + with noarch[^1]). The `repodata.json` has fields like package name, version, and dependencies. -You may notice that the channels listed above do not have a url. This -is because in general you can add -`https://conda.anaconda.org/` to a non-url channel. +[^1]: noarch is a cross-platform architecture which has no OS-specific files. Read [noarch packages in the conda documentation][conda-docs-noarch] for more information. -3. Conda then performs a solve to determine the exact version and - sha256 of each package that it will download +:::tip +You may notice that the channels listed in the YAML do not have a URL. This +is because in general , non-URL channels are expected to be present at `https://conda.anaconda.org/`. +::: -4. The specific packages are downloaded +3. Conda then performs a *solve* to determine the exact version and + sha256 of each package to download. -5. Conda does magic to fix the path prefixes of the install +4. The specific packages are downloaded. -There are two spots that introduce issues to reproducibility. The -first issue is tracking when an `environment.yaml` file has -changes. This can be easily tracked by taking a sha256 of the file -. This is what conda-store does but sorts the dependencies to make -sure it has a way of not triggering a rebuild if the order of two -packages changes in the dependencies list. In step (2) `repodata.json` -is updated regularly. When Conda solves for a user's environment it -tries to use the latest version of each package. Since `repodata.json` -could be updated the next minute the same solve for the same -`environment.yaml` file can result in different solves. +For a detailed walkthrough, check out the [conda install deep dive in the conda documentation][conda-docs-install]. + +Understand how conda-store builds on conda for improved reproducibility in [conda-store concepts page][conda-store-concepts]. + +## Conda configuration (`conda config`) + +You can configure various behaviors in conda through the [`.condarc` configuration file][conda-docs-config]. + +conda-store needs to configure some parts of conda without modifying your conda configuration file, for this conda-store (internally) sets some conda +configuration variables using environment variables. + +The impact of this is that if you try to print your conda configuration with [`conda config --show` CLI command][conda-docs-config-cli], some configuration settings displayed by that command will not reflect the values that are actually used by conda-store. + +In particular, `conda-store` internally sets `CONDA_FLAGS=--strict-channel-priority`, overriding the channel priority in the conda configuration file. Keep this in mind when using `conda config` to inspect your conda configuration and when viewing the build logs. +[conda-docs]: https://docs.conda.io/ +[pip]: https://pip.pypa.io/en/stable/index.html [conda-docs-environments]: https://docs.conda.io/projects/conda/en/latest/user-guide/concepts/environments.html - -## Understanding `conda config` and how it relates to conda-store - -Because conda-store needs to configure some parts of conda without modifying -the user's conda configuration file, internally conda-store sets some conda -configuration variables using environment variables. The impact of this is that -if a user tries to print their conda configuration with `conda config`, some of -the configuration settings displayed by that command will not reflect the values -that are actually used by conda-store. In particular, `conda-store` internally -sets `CONDA_FLAGS=--strict-channel-priority`, overriding the channel priority in -the conda configuration file. Please keep this in mind when using `conda config` -to inspect your conda configuration and when viewing the build logs. +[conda-docs-channels]: https://docs.conda.io/projects/conda/en/latest/user-guide/concepts/channels.html#what-is-a-conda-channel +[conda-forge]: https://conda-forge.org/ +[conda-docs-env-file]: https://docs.conda.io/projects/conda/en/stable/user-guide/tasks/manage-environments.html#creating-an-environment-file-manually +[conda-docs-noarch]: https://docs.conda.io/projects/conda/en/stable/user-guide/concepts/packages.html#noarch-packages +[conda-docs-install]: https://docs.conda.io/projects/conda/en/stable/dev-guide/deep-dives/install.html#fetching-the-index +[conda-docs-config]: https://conda.io/projects/conda/en/latest/user-guide/configuration/use-condarc.html +[conda-docs-config-cli]: https://conda.io/projects/conda/en/latest/commands/config.html + + +[conda-store-concepts]: conda-store-concepts diff --git a/docusaurus-docs/conda-store/explanations/conda-store-concepts.md b/docusaurus-docs/conda-store/explanations/conda-store-concepts.md index a6f878f7d..6acea6f7e 100644 --- a/docusaurus-docs/conda-store/explanations/conda-store-concepts.md +++ b/docusaurus-docs/conda-store/explanations/conda-store-concepts.md @@ -1,13 +1,94 @@ --- +sidebar_position: 2 description: Overview of some conda-store concepts --- # conda-store concepts -:::note -This page is in active development. -::: +conda-store was developed with two key goals in mind: reliable reproducibility of environments, and features for collaboratively using an environment. +This page describes how conda-store achieves these goals. + +## Reproducibility + +In the [conda-based environment creation process][conda-concepts-env-creation], there are two areas where runtime reproducibility is improved through conda-store: + +* Auto-tracking when an `environment.yaml` (which is created and updated manually) file has changes. This can be easily tracked by taking a sha256 of the file, which is what conda-store does but sorts the dependencies to make sure it has a way of not triggering a rebuild if the order of two packages changes in the dependencies list. +* When a user creates an environment, conda tries to use the latest version of each package requested in the environment specification. Conda channels are constantly being updated with new package versions, so the same solve for the same `environment.yaml` file can result in different dependencies being downloaded. To enable reproducibility, conda-store auto-generates certain artifacts like lockfiles and tarballs that capture the actual versions of packages and can be used reliably re-create the same environment. Learn more about them in the [artifacts documentation][artifacts]. ## Namespaces +Namespaces are how conda-store manages environment access for groups of users. + +Every environment in conda-store is a part of a "namespace", and is displayed in the format: `/`. + +Users can have access to view/edit/manage certain "namespaces", which means they have that level of permission for all the environments in that namespace. +This allows a large team or organization to have isolated spaces for environment sharing between smaller groups. + +Each individual user has a separate namespace, which has the same name as their username (used while logging in). All environments in this namespace are private to the individual. + +A user can be a part of several other "shared" namespaces, and based on the level of access given to them, they can view and use the environment, edit the environment, or delete it all together. The permission are dictated by "role mappings". + ## Role mappings + +By default, the following roles are available in conda-store. All users are in one of these groups and have corresponding permissions. + +- **Viewer:** Read-only permissions for environments in selected namespaces +- **Editor (previously called Developer):** Permission to read, create, and update environments in specific namespaces +- **Admin:** Permission to read, create, update, and delete environments in all existing namespaces + +
+ Specific role-mappings: + +```yaml + _viewer_permissions = { + schema.Permissions.ENVIRONMENT_READ, + schema.Permissions.NAMESPACE_READ, + schema.Permissions.NAMESPACE_ROLE_MAPPING_READ, + } + _editor_permissions = { + schema.Permissions.BUILD_CANCEL, + schema.Permissions.ENVIRONMENT_CREATE, + schema.Permissions.ENVIRONMENT_READ, + schema.Permissions.ENVIRONMENT_UPDATE, + schema.Permissions.ENVIRONMENT_SOLVE, + schema.Permissions.NAMESPACE_READ, + schema.Permissions.NAMESPACE_ROLE_MAPPING_READ, + schema.Permissions.SETTING_READ, + } + _admin_permissions = { + schema.Permissions.BUILD_DELETE, + schema.Permissions.BUILD_CANCEL, + schema.Permissions.ENVIRONMENT_CREATE, + schema.Permissions.ENVIRONMENT_DELETE, + schema.Permissions.ENVIRONMENT_READ, + schema.Permissions.ENVIRONMENT_UPDATE, + schema.Permissions.ENVIRONMENT_SOLVE, + schema.Permissions.NAMESPACE_CREATE, + schema.Permissions.NAMESPACE_DELETE, + schema.Permissions.NAMESPACE_READ, + schema.Permissions.NAMESPACE_UPDATE, + schema.Permissions.NAMESPACE_ROLE_MAPPING_CREATE, + schema.Permissions.NAMESPACE_ROLE_MAPPING_READ, + schema.Permissions.NAMESPACE_ROLE_MAPPING_UPDATE, + schema.Permissions.NAMESPACE_ROLE_MAPPING_DELETE, + schema.Permissions.SETTING_READ, + schema.Permissions.SETTING_UPDATE, + } +``` + +
+ +## Environment versions/builds + +conda-store always re-builds an environment from scratch when edits are detected, which is required for ensuring truly reproducible environments. +Version control is very useful in any collaborative setting, and environments are no exception. +Hence, conda-store keeps older versions (also called "builds") of the environment for reference, and allows you to select and use different (previous or newer) versions when needed. conda-store-ui also provides a graphical way to [switch between versions][conda-store-ui-version-control]. + +:::tip +Internally, conda-store handles versions with ✨ symlinking magic ✨, where the environment name points to different environments corresponding to versions. +::: + + +[conda-concepts-env-creation]: conda-concepts#environment-creation +[artifacts]: artifacts +[conda-store-ui-version-control]: ../../conda-store-ui/tutorials/version-control diff --git a/docusaurus-docs/conda-store/explanations/performance.md b/docusaurus-docs/conda-store/explanations/performance.md index f9a685fa7..c7393b13c 100644 --- a/docusaurus-docs/conda-store/explanations/performance.md +++ b/docusaurus-docs/conda-store/explanations/performance.md @@ -1,53 +1,51 @@ --- -description: conda-store's performance +description: Learn to make conda-store performant --- # Performance -:::warning -This page is in active development, some content may be missing or inaccurate. +Several components can impact conda-store's overall performance. +They are listed and described in order of decreasing impact below. + +## Worker storage + +When conda-store builds a given environment it has to locally install the environment in the directory specified in the [Traitlets][traitlets] configuration `CondaStore.store_directory`. +Conda environments consist of many hardlinks to small files. +This means that the performance of `store_directory` is limited to the number of +[Input/output operations per second (IOPS)][IOPS-wikipedia] the directory can +perform. +Many cloud providers have high performance storage options you can consider. + +### When to use NFS + +If you do not need to mount the environments via NFS into the containers, it's recommend to not use NFS and instead use traditional block storage. +Not only is it significantly cheaper, but also the IOPS performance will be better. + +If you want to mount the environments in containers or running VMs, then NFS +may be a good option. +With NFS, many cloud providers provide a high performance filesystem option at a significant premium in cost, like [GCP Filestore][gcp-filestore], [Amazon EFS][aws-efs], and [Azure Files][azure-files]. + +:::note +Choosing an NFS storage option with low IOPS will result in long environment +creation times. ::: -There are several parts of conda-store to consider for performance. We -have tried to list them in order of performance impact that may be -seen. - -### Worker storage - -When conda-store builds a given environment it has to locally install -the environment in the directory specified in the -[Traitlets](https://traitlets.readthedocs.io/en/stable/using_traitlets.html) -configuration `CondaStore.store_directory`. Conda environments consist -of many hardlinks to small files. This means that the -`store_directory` is limited to the number of -[IOPS](https://en.wikipedia.org/wiki/IOPS) the directory can -perform. Many cloud providers have high performance storage -options. These include: - -If you do not need to mount the environments via NFS into the -containers we highly recommend not using NFS and using traditional -block storage. Not only is it significantly cheaper but the IOPs -performance will be better as well. - -If you want to mount the environments in containers or running VMs NFS -may be a good option for you. With NFS many cloud providers provide a -high performance filesystem option at a significant premium in -cost. Example of these include [GCP -Filestore](https://cloud.google.com/filestore/docs/performance#expected_performance), -[AWS EFS](https://aws.amazon.com/efs/features/), and [Azure -files](https://docs.microsoft.com/en-us/azure/storage/files/understanding-billing#provisioning-method). Choosing -an nfs storage option with low IOPS will result in long environment -install times. - -### Network speed - -While Conda does its best to cache packages, it will have to reach out -to download the `repodata.json` along with the packages as well. Thus -network speeds may be important. Typically cloud environments have -plenty fast Internet. - -### S3 storage - -All build artifacts from conda-store are stored in object storage that -behaves S3 like. S3 traditionally has great performance if you use the -cloud provider implementation. +## Network speed + +While conda does its best to cache packages, it will have to connect over the internet +to download the `repodata.json` along with the packages. +Thus network speeds can impact performance, but typically cloud environments have plenty fast Internet. + +## Artifact storage + +All build artifacts from conda-store are stored in object storage that behaves like [Amazon S3][amazon-s3]. +S3 traditionally has great performance if you use the cloud provider implementation. + + + +[amazon-s3]: https://aws.amazon.com/s3/ +[traitlets]: https://traitlets.readthedocs.io/en/stable/using_traitlets.html +[iops-wikipedia]: https://en.wikipedia.org/wiki/IOPS +[gcp-filestore]: https://cloud.google.com/filestore/docs/performance#expected_performance +[aws-efs]: https://aws.amazon.com/efs/features/ +[azure-files]: https://docs.microsoft.com/en-us/azure/storage/files/understanding-billing#provisioning-method