diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 24d8a0e10..e641cc71f 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -108,6 +108,8 @@ jobs: - name: Run unit tests on docker dev image env: GITHUB_TOKEN_TESTING: ${{ secrets.OPENSAFELY_GITHUB_TESTING_ORG_PAT }} + GITHUB_WRITEABLE_TOKEN: + INTERACTIVE_TEMPLATE_REPO: run: | # build docker and run test just docker-test diff --git a/docker/dependencies.txt b/docker/dependencies.txt index 27fff4972..0802b439a 100644 --- a/docker/dependencies.txt +++ b/docker/dependencies.txt @@ -1,4 +1,5 @@ # list ubuntu packages needed in production, one per line +git postgresql-client python3.11 python3.11-distutils diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index cf1b2ebf5..4e5a01daa 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -43,6 +43,8 @@ services: depends_on: [db] environment: - GITHUB_TOKEN= + - GITHUB_WRITEABLE_TOKEN= + - INTERACTIVE_TEMPLATE_REPO= - SECRET_KEY=12345 - SOCIAL_AUTH_GITHUB_KEY=test - SOCIAL_AUTH_GITHUB_SECRET=test @@ -85,7 +87,7 @@ services: environment: # override db hostname, so we can reach it within the container - GITHUB_TOKEN_TESTING - command: bash -c "coverage run --branch --source=applications,jobserver,services,staff,tests --module pytest && coverage report || coverage html" + command: bash -c "coverage run --branch --source=applications,interactive,jobserver,services,staff,tests --module pytest && coverage report || coverage html" volumes: postgres_data: diff --git a/docker/justfile b/docker/justfile index f7c98a129..300c4c13a 100644 --- a/docker/justfile +++ b/docker/justfile @@ -16,7 +16,7 @@ build env="dev": # run tests in dev container test *args="": build - docker-compose run --rm test bash -c "coverage run --branch --source=applications,jobserver,services,staff,tests --module pytest && coverage report || coverage html" + docker-compose run --rm test bash -c "coverage run --branch --source=applications,interactive,jobserver,services,staff,tests --module pytest && coverage report || coverage html" # run server in dev|prod container diff --git a/dotenv-sample b/dotenv-sample index 5c3a61a65..f824699aa 100644 --- a/dotenv-sample +++ b/dotenv-sample @@ -14,6 +14,12 @@ SECRET_KEY=12345 # A GitHub API token, we use a PAT for this GITHUB_TOKEN=dummy +# A GitHub API token, we use a PAT from our writeable user for this +GITHUB_WRITEABLE_TOKEN=dummy + +# The interactive analytical code repo template repo +INTERACTIVE_TEMPLATE_REPO=dummy + # Get these from a GitHub Org admin, you want the Dev application credentials SOCIAL_AUTH_GITHUB_KEY=dummy SOCIAL_AUTH_GITHUB_SECRET=dummy diff --git a/interactive/__init__.py b/interactive/__init__.py index e69de29bb..0c93ce412 100644 --- a/interactive/__init__.py +++ b/interactive/__init__.py @@ -0,0 +1,35 @@ +from attrs import define + + +@define +class Codelist: + label: str + slug: str + type: str # noqa: A003 + + # TODO: what are these again? + path: str | None = None + description: str | None = None + + +@define +class Analysis: + """ + Encapsulate all the data for an analysis to pass between layers + + We expect to need a different object of this sort for each different analysis, + to capture all the details for a given analysis. + """ + + codelist_1: Codelist + codelist_2: Codelist | None + created_by: str + demographics: str + filter_population: str + frequency: str + repo: str + time_event: str + time_scale: str + time_value: str + title: str + identifier: str | None = None diff --git a/interactive/opencodelists.py b/interactive/opencodelists.py index 2799df99a..e091cf4c6 100644 --- a/interactive/opencodelists.py +++ b/interactive/opencodelists.py @@ -57,6 +57,13 @@ def _iter_codelists(self, codelists): "organisation": codelist["organisation"], } + def get_codelist(self, slug): + url = furl("https://www.opencodelists.org") / "codelist" / slug / "download.csv" + r = requests.get(url) + r.raise_for_status() + + return r.text + def get_codelists(self, coding_system): path_segments = [ "codelist", diff --git a/interactive/submit.py b/interactive/submit.py new file mode 100644 index 000000000..4f1a3a3fb --- /dev/null +++ b/interactive/submit.py @@ -0,0 +1,240 @@ +import os +import shutil +import subprocess +import sys +import tempfile +from pathlib import Path + +from attrs import asdict +from django.conf import settings +from django.db import transaction + +from .models import AnalysisRequest +from .opencodelists import _get_opencodelists_api + + +# from jobserver.github import create_issue +# from .emails import send_analysis_request_confirmation_email +# from .slacks import notify_analysis_request_submitted + + +@transaction.atomic() +def submit_analysis( + *, + analysis, + backend, + creator, + project, + get_opencodelists_api=_get_opencodelists_api, + force=False, +): + """ + Create all the parts needed for an analysis + + This will stay in job-server while create_commit() is intended to move to + an external service in the future. + """ + # TODO: wrap form kwargs up in a dataclass? + + # create an AnalysisRequest instance so we have a PK to use in various + # places, but we don't save it until we've written the commit and pushed + # it, so we can create the JobRequest this object needs + analysis_request = AnalysisRequest( + project=project, + created_by=creator, + title=analysis.title, + template_data=asdict(analysis), + ) + + # update the Analysis structure so we can pass around a single object + # if/when we pull the create_commit function out into another service + # this structure would be the JSON we send over + analysis.identifier = analysis_request.pk + + sha, project_yaml = create_commit( + analysis, + force=force, + get_opencodelists_api=get_opencodelists_api, + template_repo=settings.INTERACTIVE_TEMPLATE_REPO, + ) + + job_request = project.interactive_workspace.job_requests.create( + backend=backend, + created_by=creator, + sha=sha, + project_definition=project_yaml, + force_run_dependencies=True, + requested_actions=["run_all"], + ) + + analysis_request.job_request = job_request + analysis_request.save() + + # TODO: notify someone about output checking? + # waiting to find out what this should be + # issue_url = create_issue(analysis_request.pk, job_server_url=url) + # notify_analysis_request_submitted(analysis_request, issue_url) + + # TODO: notify the user? + # send_analysis_request_confirmation_email( + # analysis_request.user.email, analysis_request + # ) + return analysis_request + + +# MOVE TO EXTERNAL SERVICE + + +def clean_working_tree(path): + """Remove all files (except .git)""" + for f in path.glob("**/*"): + relative = f.relative_to(path) + + if str(relative) == ".git" or str(relative).startswith(".git/"): + continue + + print(f, relative) + + f.unlink() if f.is_file() else shutil.rmtree(f) + + +def commit_and_push(working_dir, analysis, force=False): + force_args = ["--force"] if force else [] + + git("add", ".", cwd=working_dir) + + second_codelist = "" + if analysis.codelist_2: + second_codelist = f" and codelist {analysis.codelist_2.slug}" + msg = ( + f"Codelist {analysis.codelist_1.slug}{second_codelist} ({analysis.identifier})" + ) + + git( + # -c arguments are instead of having to having to maintain stateful git config + "-c", + "user.email=interactive@opensafely.org", + "-c", + "user.name=OpenSAFELY Interactive", + "commit", + "--author", + f"{analysis.created_by} <{analysis.created_by}>", + "-m", + msg, + cwd=working_dir, + ) + ps = git("rev-parse", "HEAD", capture_output=True, cwd=working_dir) + commit_sha = ps.stdout.strip() + + # this is an super important step, makes it much easier to track commits + git("tag", analysis.identifier, *force_args, cwd=working_dir) + + # push to main. Note: we technically wouldn't need this from a pure git + # pov, as a tag would be enough, but job-runner explicitly checks that + # a commit is on the branch history, for security reasons + git("push", "origin", "main", "--force-with-lease", cwd=working_dir) + + # push the tag once we know the main push has succeeded + git("push", "origin", analysis.identifier, *force_args, cwd=working_dir) + return commit_sha + + +def create_commit( + analysis, + template_repo, + force=False, + get_opencodelists_api=_get_opencodelists_api, +): + if not force: + # check this commit does not already exist + raise_if_commit_exists(analysis.repo, analysis.identifier) + + # 1. create tempdir with AR.pk suffix + suffix = f"template-{analysis.identifier}" + with tempfile.TemporaryDirectory(suffix=suffix) as working_dir: + working_dir = Path(working_dir) + + # 2. clone the given analysis code template repo to tempdir + git("clone", "--depth", "1", template_repo, working_dir) + + # 3. remove the git directory + shutil.rmtree(working_dir / ".git") + + # 4. download codelistA + download_codelist( + analysis.codelist_1.slug, + working_dir / "codelist_1.csv", + get_opencodelists_api=get_opencodelists_api, + ) + + # 5. optionally download codelistB + if analysis.codelist_2: + download_codelist( + analysis.codelist_2.slug, + working_dir / "codelist_2.csv", + get_opencodelists_api=get_opencodelists_api, + ) + + # 6. interpolate form data into the template files on disk + # render_interactive_report_code(working_dir, analysis) + + suffix = f"repo-{analysis.identifier}" + with tempfile.TemporaryDirectory(suffix=suffix) as repo_dir: + repo_dir = Path(repo_dir) + + # 7. clone the given interactive repo + git("clone", "--depth", "1", analysis.repo, repo_dir) + + # 8. clear working directory because each analysis is fresh set of files + clean_working_tree(repo_dir) + + # 9. Move templated files into the repo dir + for path in working_dir.iterdir(): + shutil.move(path, repo_dir) + + # 10. write a commit to the given interactive repo + sha = commit_and_push(repo_dir, analysis) + + # 11. return contents of project.yaml (from disk) and sha + project_yaml = (repo_dir / "project.yaml").read_text() + + return sha, project_yaml + + +def download_codelist(slug, path, get_opencodelists_api=_get_opencodelists_api): + """Download the contents of a codelist.""" + content = get_opencodelists_api().get_codelist(slug) + + path.write_text(content) + + +def git(*args, check=True, text=True, **kwargs): + """ + Wrapper around subprocess.run for git commands. + + Changes the defaults: check=True and text=True, and prints the command run + for logging. + """ + cmd = ["git"] + [str(arg) for arg in args] + + cwd = kwargs.get("cwd", os.getcwd()) + cleaned = [arg.replace(settings.GITHUB_WRITEABLE_TOKEN, "*****") for arg in cmd] + sys.stderr.write(f"{' '.join(cleaned)} (in {cwd})\n") + + # disable reading the user's gitconfig, to give us a more expected environment + # when developing and testing locally. + env = {"GIT_CONFIG_GLOBAL": "1"} + + return subprocess.run(cmd, check=check, text=text, env=env, **kwargs) + + +def raise_if_commit_exists(repo, tag): + ps = git( + "ls-remote", + "--tags", + repo, + f"refs/tags/{tag}", + capture_output=True, + ) + if ps.stdout != "": + raise Exception(f"Commit for {tag} already exists in {repo}") diff --git a/interactive/views.py b/interactive/views.py index 83cf394c0..7eb8901c6 100644 --- a/interactive/views.py +++ b/interactive/views.py @@ -1,24 +1,81 @@ import json +from attrs import asdict +from django.conf import settings from django.core.exceptions import PermissionDenied from django.shortcuts import get_object_or_404, redirect -from django.views.generic import DetailView, FormView +from django.template.response import TemplateResponse +from django.views.generic import DetailView, View from jobserver.authorization import has_permission from jobserver.models import Backend, Project from jobserver.reports import process_html from jobserver.utils import build_spa_base_url -from .dates import END_DATE, START_DATE +from . import Analysis, Codelist from .forms import AnalysisRequestForm from .models import AnalysisRequest from .opencodelists import _get_opencodelists_api +from .submit import submit_analysis -class AnalysisRequestCreate(FormView): +def get(d, path, default=""): + """Return the value at a given path or the default""" + key, _, remainder = path.partition(".") + + value = d.get(key, default) + + if not isinstance(value, dict): + return value + else: + return get(value, remainder, default) + + +class AnalysisRequestCreate(View): form_class = AnalysisRequestForm get_opencodelists_api = staticmethod(_get_opencodelists_api) - template_name = "interactive/analysis_request_create.html" + + def build_analysis(self, *, form_data, project): + raw = json.loads(form_data) + + # translate the incoming data into something the form can validate + codelist_2 = None + if "codelistB" in raw: + codelist_2 = Codelist( + **{ + "label": get(raw, "codelistB.label"), + "slug": get(raw, "codelistB.value"), + "type": get(raw, "codelistB.type"), + } + ) + + # add auth token if it's a real github repo + # TODO: needs a new token for this + repo = project.interactive_workspace.repo.url + if repo.startswith("https://github.com"): + repo = repo.replace( + "https://", f"https://interactive:{settings.GITHUB_WRITEABLE_TOKEN}@" + ) # pragma: no cover + + return Analysis( + codelist_1=Codelist( + **{ + "label": get(raw, "codelistA.label"), + "slug": get(raw, "codelistA.value"), + "type": get(raw, "codelistA.type"), + } + ), + codelist_2=codelist_2, + created_by=self.request.user.email, + demographics=get(raw, "demographics"), + filter_population=get(raw, "filterPopulation"), + frequency=get(raw, "frequency"), + repo=repo, + time_event=get(raw, "timeEvent"), + time_scale=get(raw, "timeScale"), + time_value=get(raw, "timeValue"), + title=get(raw, "title"), + ) def dispatch(self, request, *args, **kwargs): # even though an AnalysisRequest is a superset of a JobRequest, an @@ -37,85 +94,87 @@ def dispatch(self, request, *args, **kwargs): ): raise PermissionDenied - api = self.get_opencodelists_api() - self.events = api.get_codelists("snomedct") - self.medications = api.get_codelists("dmd") + self.codelists_api = self.get_opencodelists_api() + self.events = self.codelists_api.get_codelists("snomedct") + self.medications = self.codelists_api.get_codelists("dmd") return super().dispatch(request, *args, **kwargs) - def form_valid(self, form): - # OSI v1 does the following: - # * copy the report template over, interpolating details into project.yaml - # * commit to repo - # * create a job request - # * create an issue - # * send confirmation email - - # create some required objects so we can create skeleton views as we - # build up the functionality of interactive - job_request = self.project.interactive_workspace.job_requests.create( - backend=Backend.objects.get(slug="tpp"), - created_by=self.request.user, - sha="", - project_definition="", - force_run_dependencies=True, - requested_actions=["run_all"], + def get(self, request, *args, **kwargs): + return self.render_to_response(request) + + def post(self, request, *args, **kwargs): + # we're posting the form data as JSON so we need to pull that from the + # request body + analysis = self.build_analysis( + form_data=self.request.body, project=self.project + ) + + form = AnalysisRequestForm( + codelists=self.events + self.medications, + data=self.translate_for_form(asdict(analysis)), ) - analysis_request = AnalysisRequest.objects.create( - job_request=job_request, + + if not form.is_valid(): + return self.render_to_response(request, form=form) + + analysis_request = submit_analysis( + analysis=analysis, + backend=Backend.objects.get(slug="tpp"), + creator=request.user, project=self.project, - created_by=self.request.user, - title="get from form", - start_date=START_DATE, - end_date=END_DATE, - codelist_slug="get from form", - codelist_name="get from form", + get_opencodelists_api=self.get_opencodelists_api, ) return redirect(analysis_request) - def get_context_data(self, **kwargs): + def render_to_response(self, request, **context): + """ + Render a response with the given request and context + + This is a cut-down version of Django's render_to_response and + get_context_data methods. We aren't using our form instace to + generate errors for the UI so we don't need to handle construction + of it in both GET/POST so all our context construction can also + happen here. + """ base_path = build_spa_base_url(self.request.path, self.kwargs.get("path", "")) - return super().get_context_data(**kwargs) | { + + context = context | { "base_path": base_path, "events": self.events, "medications": self.medications, "project": self.project, } - def get_form_kwargs(self): - codelists = self.events + self.medications + return TemplateResponse( + request, + template="interactive/analysis_request_create.html", + context=context, + ) - if not self.request.method == "POST": - return {"codelists": codelists} + def translate_for_form(self, data): + """ + Reshape the given data for validation by a Django Form - # we're posting the form data as JSON so we need to pull that from the - # request body - raw = json.loads(self.request.body) + Django forms are designed to work with form data but we're validating + a JSON structure, with sub-keys, etc in it which we need to flatten out + for the form. + """ - # translate the incoming data into something the form can validate - codelist_2 = {} - if "codelistB" in raw: - codelist_2 = { - "codelist_2_label": raw["codelistB"]["label"], - "codelist_2_slug": raw["codelistB"]["value"], - "codelist_2_type": raw["codelistB"]["type"], - } - - data = { - "codelist_1_label": raw["codelistA"]["label"], - "codelist_1_slug": raw["codelistA"]["value"], - "codelist_1_type": raw["codelistA"]["type"], - **codelist_2, - "demographics": raw["demographics"], - "filter_population": raw["filterPopulation"], - "frequency": raw["frequency"], - "time_event": raw["timeEvent"], - "time_scale": raw["timeScale"], - "time_value": raw["timeValue"], - } + def flatten(key, data): + old = data.pop(key) + if old is None: + return data + + new = {f"{key}_{k}": v for k, v in old.items()} + + return data | new + + data = flatten("codelist_1", data) + data = flatten("codelist_2", data) - return {"codelists": codelists, "data": data} + return data class AnalysisRequestDetail(DetailView): diff --git a/jobserver/settings.py b/jobserver/settings.py index 8670f2941..f3ea0be74 100644 --- a/jobserver/settings.py +++ b/jobserver/settings.py @@ -359,6 +359,13 @@ # PROJECT SETTINGS DISABLE_CREATING_JOBS = env.bool("DISABLE_CREATING_JOBS", default=False) +# GitHub token with write permissions +# TODO: remove default when we're happy with setting up CI with this token +GITHUB_WRITEABLE_TOKEN = env.str("GITHUB_WRITEABLE_TOKEN", default="") + +# Interactive Analyses Templates Repo +INTERACTIVE_TEMPLATE_REPO = env.str("INTERACTIVE_TEMPLATE_REPO", default="") + # Releases storage location. # Note: we deliberately don't use MEDIA_ROOT/MEDIA_URL here, to avoid any # surprises with django's default uploads implementation. diff --git a/pyproject.toml b/pyproject.toml index 033322c58..79782ed48 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -59,6 +59,7 @@ addopts = "--disable-network --tb=native --no-migrations --ignore=./release-hatc DJANGO_SETTINGS_MODULE = "jobserver.settings" env = [ "GITHUB_TOKEN=empty", + "INTERACTIVE_TEMPLATE_REPO=empty", "SECRET_KEY=12345", "SOCIAL_AUTH_GITHUB_KEY=test", "SOCIAL_AUTH_GITHUB_SECRET=test", diff --git a/tests/fakes.py b/tests/fakes.py index c77157ad2..83ba93a7b 100644 --- a/tests/fakes.py +++ b/tests/fakes.py @@ -112,6 +112,9 @@ def get_repos_with_dates(self, org): class FakeOpenCodelistsAPI: + def get_codelist(self, slug): + return "a,b,c\n1,2,3" + def get_codelists(self, coding_system): return [ { diff --git a/tests/unit/interactive/conftest.py b/tests/unit/interactive/conftest.py new file mode 100644 index 000000000..c22ded2c2 --- /dev/null +++ b/tests/unit/interactive/conftest.py @@ -0,0 +1,55 @@ +import pytest + +from interactive.submit import git + +from ...factories import RepoFactory + + +@pytest.fixture +def build_repo(tmp_path): + def func(suffix="interactive_repo"): + path = tmp_path / suffix + path.mkdir() + + git("init", ".", "--initial-branch", "main", cwd=path) + + return path + + return func + + +@pytest.fixture +def interactive_repo(remote_repo): + return RepoFactory(url=str(remote_repo)) + + +@pytest.fixture +def template_repo(build_repo): + """Create a repo to match our analysis code template repo""" + path = build_repo("template_repo") + + (path / "project.yaml").write_text("test content") + + git("add", "project.yaml", cwd=path) + git( + "-c", + "user.email=testing@opensafely.org", + "-c", + "user.name=testing", + "commit", + "-m", + "Initial commit", + cwd=path, + ) + + return path + + +@pytest.fixture +def remote_repo(tmp_path): + path = tmp_path / "remote_repo" + path.mkdir() + + git("init", "--bare", ".", "--initial-branch", "main", cwd=path) + + return path diff --git a/tests/unit/interactive/test_submit.py b/tests/unit/interactive/test_submit.py new file mode 100644 index 000000000..bef896d04 --- /dev/null +++ b/tests/unit/interactive/test_submit.py @@ -0,0 +1,282 @@ +import pytest +from django.conf import settings + +from interactive import Analysis, Codelist +from interactive.submit import ( + clean_working_tree, + commit_and_push, + create_commit, + download_codelist, + git, + raise_if_commit_exists, + submit_analysis, +) +from jobserver.models.common import new_ulid_str + +from ...factories import ( + BackendFactory, + ProjectFactory, + RepoFactory, + UserFactory, + WorkspaceFactory, +) +from ...fakes import FakeOpenCodelistsAPI + + +def commit_in_remote(*, remote, commit): + ps = git("ls-remote", "--heads", remote, capture_output=True) + + return ps.stdout.startswith(commit) + + +def tag_in_remote(*, remote, tag): + ps = git("ls-remote", "--tags", remote, tag, capture_output=True) + + return ps.stdout.endswith(f"{tag}\n") + + +def tag_points_at_sha(*, repo, tag, sha): + ps = git("rev-list", "-1", tag, capture_output=True, cwd=repo) + + return ps.stdout == f"{sha}\n" + + +def test_clean_working_tree(tmp_path): + one = tmp_path / "1" + one.mkdir() + (one / "1").write_text("") + (one / "2").write_text("") + (one / "3").write_text("") + + (tmp_path / ".git").mkdir() + (tmp_path / ".git" / "config").write_text("") + + (tmp_path / "testing").mkdir() + + clean_working_tree(tmp_path) + + assert not (tmp_path / "1" / "1").exists() + assert not (tmp_path / "1" / "2").exists() + assert not (tmp_path / "1" / "3").exists() + assert not (tmp_path / "testing").exists() + assert (tmp_path / ".git" / "config").exists() + + +@pytest.mark.parametrize( + "codelist_2,commit_message", + [ + (None, "Codelist slug-a"), + ( + Codelist(label="", slug="slug-b", type=""), + "Codelist slug-a and codelist slug-b", + ), + ], + ids=["with_codelist_2", "without_codelist_2"], +) +def test_commit_and_push(build_repo, remote_repo, codelist_2, commit_message): + # create the git repo which would be tied to a workspace + repo = build_repo() + + # set our remote_repo fixture as the remote "origin" + git("remote", "add", "origin", remote_repo, cwd=repo) + + pk = new_ulid_str() + + analysis = Analysis( + codelist_1=Codelist( + label="", + slug="slug-a", + type="", + ), + codelist_2=codelist_2, + created_by=UserFactory().email, + demographics="", + filter_population="", + frequency="", + repo=repo, + identifier=pk, + time_event="", + time_scale="", + time_value="", + title="", + ) + + (repo / "first.txt").write_text("testing") + sha = commit_and_push(repo, analysis) + + # assert commit is in pushed remote_repo + assert commit_in_remote(remote=remote_repo, commit=sha) + assert tag_in_remote(remote=remote_repo, tag=pk) + + # assert tag is for new commit + assert tag_points_at_sha(repo=repo, tag=pk, sha=sha) + + # commit again with the same analysis to test force tagging + (repo / "second.txt").write_text("testing") + sha = commit_and_push(repo, analysis, force=True) + + # assert new commit is in remote repo + assert commit_in_remote(remote=remote_repo, commit=sha) + assert tag_in_remote(remote=remote_repo, tag=pk) + + # assert tag has been updated to point to second commit + assert tag_points_at_sha(repo=repo, tag=pk, sha=sha) + + +def test_download_codelist(tmp_path): + path = tmp_path / "codlist.csv" + download_codelist("test", path, get_opencodelists_api=FakeOpenCodelistsAPI) + assert path.exists() + + +def test_raise_if_commit_exists(tmp_path): + git("init", ".", "--initial-branch", "main", cwd=tmp_path) + + (tmp_path / "first").write_text("") + git("add", "first", cwd=tmp_path) + git( + "-c", + "user.email=testing@opensafely.org", + "-c", + "user.name=testing", + "commit", + "-m", + "add first", + cwd=tmp_path, + ) + git("tag", "exists", cwd=tmp_path) + + with pytest.raises(Exception, match="Commit for exists"): + raise_if_commit_exists(tmp_path, "exists") + + assert raise_if_commit_exists(tmp_path, "missing") is None + + +@pytest.mark.parametrize( + "force", [(True), (False)], ids=["force_commit", "without_force_commit"] +) +def test_create_commit(build_repo, remote_repo, template_repo, force): + # set our remote_repo fixture as the remote "origin" + git("remote", "add", "origin", remote_repo, cwd=remote_repo) + + pk = new_ulid_str() + + analysis = Analysis( + codelist_1=Codelist(label="", slug="", type=""), + codelist_2=None, + created_by=UserFactory().email, + demographics="", + filter_population="", + frequency="", + repo=remote_repo, + identifier=pk, + time_event="", + time_scale="", + time_value="", + title="", + ) + + sha, project_yaml = create_commit( + analysis, + template_repo, + force=force, + get_opencodelists_api=FakeOpenCodelistsAPI, + ) + + # does the remote repo only have the files we expect from our template? + ps = git( + "ls-tree", + "--full-tree", + "--name-only", + "HEAD", + cwd=remote_repo, + capture_output=True, + ) + assert ps.stdout == "codelist_1.csv\nproject.yaml\n" + + assert commit_in_remote(remote=remote_repo, commit=sha) + + +@pytest.mark.parametrize( + "force", [(True), (False)], ids=["force_commit", "without_force_commit"] +) +def test_create_commit_with_two_codelists( + build_repo, remote_repo, template_repo, force +): + # set our remote_repo fixture as the remote "origin" + git("remote", "add", "origin", remote_repo, cwd=remote_repo) + + pk = new_ulid_str() + + analysis = Analysis( + codelist_1=Codelist(label="", slug="", type=""), + codelist_2=Codelist(label="", slug="slug-b", type=""), + created_by=UserFactory().email, + demographics="", + filter_population="", + frequency="", + repo=remote_repo, + identifier=pk, + time_event="", + time_scale="", + time_value="", + title="", + ) + + sha, project_yaml = create_commit( + analysis, + template_repo, + force=force, + get_opencodelists_api=FakeOpenCodelistsAPI, + ) + + # does the remote repo only have the files we expect from our template? + ps = git( + "ls-tree", + "--full-tree", + "--name-only", + "HEAD", + cwd=remote_repo, + capture_output=True, + ) + assert ps.stdout == "codelist_1.csv\ncodelist_2.csv\nproject.yaml\n" + + assert commit_in_remote(remote=remote_repo, commit=sha) + + +def test_submit_analysis(monkeypatch, remote_repo, template_repo): + monkeypatch.setattr(settings, "INTERACTIVE_TEMPLATE_REPO", str(template_repo)) + + backend = BackendFactory() + project = ProjectFactory() + repo = RepoFactory(url=str(remote_repo)) + WorkspaceFactory(project=project, repo=repo, name=f"{project.slug}-interactive") + user = UserFactory() + + analysis = Analysis( + codelist_1=Codelist(label="", slug="slug-a", type=""), + codelist_2=None, + created_by=user.email, + demographics="", + filter_population="", + frequency="", + repo=str(remote_repo), + time_event="", + time_scale="", + time_value="", + title="test", + ) + + analysis_request = submit_analysis( + analysis=analysis, + backend=backend, + creator=user, + project=project, + get_opencodelists_api=FakeOpenCodelistsAPI, + ) + + assert analysis_request.created_by == user + assert analysis_request.job_request + assert analysis_request.project == project + assert analysis_request.template_data["codelist_1"]["slug"] == "slug-a" + assert analysis_request.title == "test" diff --git a/tests/unit/interactive/test_views.py b/tests/unit/interactive/test_views.py index cb1c51552..bda3decc2 100644 --- a/tests/unit/interactive/test_views.py +++ b/tests/unit/interactive/test_views.py @@ -1,10 +1,11 @@ import json import pytest +from django.conf import settings from django.core.exceptions import PermissionDenied from interactive.models import AnalysisRequest -from interactive.views import AnalysisRequestCreate, AnalysisRequestDetail +from interactive.views import AnalysisRequestCreate, AnalysisRequestDetail, get from jobserver.authorization import InteractiveReporter from ...factories import ( @@ -35,15 +36,47 @@ def test_analysisrequestcreate_get_success(rf): assert response.context_data["project"] == project -def test_analysisrequestcreate_post_success_with_one_codelists(rf): +def test_analysisrequestcreate_post_failure(rf, interactive_repo): BackendFactory(slug="tpp") project = ProjectFactory() user = UserFactory() - WorkspaceFactory(project=project, name=f"{project.slug}-interactive") + WorkspaceFactory( + project=project, repo=interactive_repo, name=f"{project.slug}-interactive" + ) + + ProjectMembershipFactory(project=project, user=user, roles=[InteractiveReporter]) + + data = { + "timeScale": "months", + } + request = rf.post("/", data=json.dumps(data), content_type="appliation/json") + request.user = user + + response = AnalysisRequestCreate.as_view( + get_opencodelists_api=FakeOpenCodelistsAPI + )(request, org_slug=project.org.slug, project_slug=project.slug) + + assert response.status_code == 200, response.context_data["form"].errors + + # TODO: check our error response here + + +def test_analysisrequestcreate_post_success_with_one_codelist( + rf, monkeypatch, interactive_repo, template_repo +): + monkeypatch.setattr(settings, "INTERACTIVE_TEMPLATE_REPO", str(template_repo)) + + BackendFactory(slug="tpp") + project = ProjectFactory() + user = UserFactory() + WorkspaceFactory( + project=project, repo=interactive_repo, name=f"{project.slug}-interactive" + ) ProjectMembershipFactory(project=project, user=user, roles=[InteractiveReporter]) data = { + "title": "Event Codelist", "frequency": "monthly", "codelistA": { "label": "Event Codelist", @@ -70,15 +103,22 @@ def test_analysisrequestcreate_post_success_with_one_codelists(rf): assert response.url == analysis_request.get_absolute_url() -def test_analysisrequestcreate_post_success_with_two_codelists(rf): +def test_analysisrequestcreate_post_success_with_two_codelists( + rf, monkeypatch, interactive_repo, template_repo +): + monkeypatch.setattr(settings, "INTERACTIVE_TEMPLATE_REPO", str(template_repo)) + BackendFactory(slug="tpp") project = ProjectFactory() user = UserFactory() - WorkspaceFactory(project=project, name=f"{project.slug}-interactive") + WorkspaceFactory( + project=project, repo=interactive_repo, name=f"{project.slug}-interactive" + ) ProjectMembershipFactory(project=project, user=user, roles=[InteractiveReporter]) data = { + "title": "Event Codelist & Medication Codelist", "frequency": "monthly", "codelistA": { "label": "Event Codelist", @@ -190,3 +230,17 @@ def test_analysisrequestdetail_with_no_interactivereporter_role(rf): project_slug=analysis_request.project.slug, slug=analysis_request.slug, ) + + +def test_get(): + data = { + "nested": { + "1": "1", + }, + "non-string": ["abc"], + } + + assert get(data, "nested.1") == "1" + assert get(data, "nested.a") == "" + assert get(data, "nested.a", default=None) is None + assert get(data, "non-string") == ["abc"] diff --git a/tests/verification/test_opencodelists.py b/tests/verification/test_opencodelists.py index 3b4a90a31..506d17208 100644 --- a/tests/verification/test_opencodelists.py +++ b/tests/verification/test_opencodelists.py @@ -17,6 +17,17 @@ def opencodelists_api(): return OpenCodelistsAPI() +def test_get_codelist(enable_network, opencodelists_api): + args = ["pincer/ast/v1.8"] + + real = opencodelists_api.get_codelist(*args) + fake = FakeOpenCodelistsAPI().get_codelist(*args) + + compare(fake, real) + + assert real is not None + + def test_get_codelists(enable_network, opencodelists_api): args = ["snomedct"]