diff --git a/.github/workflows/build-rdo-package.yml b/.github/workflows/build-rdo-package.yml index 15470fddc49..3e6005e0890 100644 --- a/.github/workflows/build-rdo-package.yml +++ b/.github/workflows/build-rdo-package.yml @@ -5,7 +5,7 @@ concurrency: cancel-in-progress: true on: - create: + push: tags: - v* diff --git a/.github/workflows/keiko_test.yml b/.github/workflows/keiko_test.yml index 6074f2538ad..ce2f3c0e0a3 100644 --- a/.github/workflows/keiko_test.yml +++ b/.github/workflows/keiko_test.yml @@ -28,7 +28,17 @@ jobs: cache: 'pip' # caching pip dependencies - name: Install Latex - run: sudo apt-get install -y --no-install-recommends texlive-latex-base texlive-latex-extra texlive-latex-recommended texlive-lang-european texlive-fonts-recommended + run: | + sudo apt-get install -y --no-install-recommends \ + chktex \ + fonts-recommended \ + latexmk \ + texlive-fonts-recommended \ + texlive-lang-european \ + texlive-latex-base \ + texlive-latex-extra \ + texlive-latex-recommended \ + texlive-xetex - name: Install requirements-dev.txt run: pip install -r requirements-dev.txt diff --git a/Makefile b/Makefile index ab523891b83..f1415982f6a 100644 --- a/Makefile +++ b/Makefile @@ -1,94 +1,82 @@ SHELL := bash .ONESHELL: +.NOTPARALLEL: # use HIDE to run commands invisibly, unless VERBOSE defined HIDE:=$(if $(VERBOSE),,@) UNAME := $(shell uname) -.PHONY: kat rebuild update clean migrate build itest debian-build-image ubuntu-build-image +.PHONY: $(MAKECMDGOALS) # Export Docker buildkit options export DOCKER_BUILDKIT=1 export COMPOSE_DOCKER_CLI_BUILD=1 -kat: env-if-empty clean # This should give you a clean install - make build - make up +# Build and bring up all containers (default target) +kat: env-if-empty build up -rebuild: clean - make build - make up +# Update using git pull and bring up containers +update: pull kat -update: down pull - make build - make up - -clean: down # This should clean up all persistent data - -docker volume rm nl-kat-coordination_postgres-data nl-kat-coordination_bytes-data nl-kat-coordination_xtdb-data - -export version - -upgrade: fetch down # Upgrade to the latest release without losing persistent data. Usage: `make upgrade version=v1.5.0` (version is optional) -ifeq ($(version),) - version=$(shell curl --silent "https://api.github.com/repos/minvws/nl-kat-coordination/tags" | jq -r '.[].name' | grep -v "rc" | head -n 1) - make upgrade version=$$version -else - make checkout branch=$(version) - make build-all - make up -endif - -reset: - -docker-compose down --remove-orphans --volumes --timeout 0 - make up - make -C boefjes build - make -C rocky almost-flush +# Bring down containers, remove all volumes, and bring them up again +reset: clean kat +# Bring up containers up: - docker-compose up -d --force-recreate + docker-compose up --detach +# Bring down containers without data loss down: -docker-compose down +# Bring down containers and remove all volumes +clean: + -docker-compose down --timeout 0 --volumes --remove-orphans + +# Fetch the latest changes from the Git remote fetch: - -git fetch + git fetch --all --prune --tags +# Pull the latest changes from the default upstream pull: - -git pull + git pull + +# Upgrade to the latest release without losing persistent data. Usage: `make upgrade version=v1.5.0` (version is optional) +VERSION?=$(shell curl -sSf "https://api.github.com/repos/minvws/nl-kat-coordination/tags" | jq -r '[.[].name | select(. | contains("rc") | not)][0]') +upgrade: down fetch + git checkout $(VERSION) + make kat +# Create .env file only if it does not exist env-if-empty: ifeq ("$(wildcard .env)","") make env endif -env: # Create .env file from the env-dist with randomly generated credentials from vars annotated by "{%EXAMPLE_VAR}" - $(HIDE) cp .env-dist .env +# Create .env file from the env-dist with randomly generated credentials from vars annotated by "{%EXAMPLE_VAR}" +env: + cp .env-dist .env + echo "Initializing .env with random credentials" ifeq ($(UNAME), Darwin) # Different sed on MacOS $(HIDE) grep -o "{%\([_A-Z]*\)}" .env-dist | sort -u | while read v; do sed -i '' "s/$$v/$$(openssl rand -hex 25)/g" .env; done else $(HIDE) grep -o "{%\([_A-Z]*\)}" .env-dist | sort -u | while read v; do sed -i "s/$$v/$$(openssl rand -hex 25)/g" .env; done endif -checkout: # Usage: `make checkout branch=develop` - -git checkout $(branch) - -pull-reset: - -git reset --hard HEAD - -git pull - -build-all: # Build should prepare all other services: migrate them, seed them, etc. -ifeq ($(UNAME), Darwin) - docker-compose build --build-arg USER_UID="$$(id -u)" +# Build will prepare all services: migrate them, seed them, etc. +build: +ifeq ($(UNAME),Darwin) + docker-compose build --pull --parallel --build-arg USER_UID="$$(id -u)" else - docker-compose build --build-arg USER_UID="$$(id -u)" --build-arg USER_GID="$$(id -g)" + docker-compose build --pull --parallel --build-arg USER_UID="$$(id -u)" --build-arg USER_GID="$$(id -g)" endif - -build: build-all make -C rocky build make -C boefjes build +# Build Debian build image debian-build-image: docker build -t kat-debian-build-image packaging/debian +# Build Ubuntu build image ubuntu-build-image: docker build -t kat-ubuntu-build-image packaging/ubuntu diff --git a/docker-compose.yml b/docker-compose.yml index 46f01a78259..46c788fabae 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -117,14 +117,7 @@ services: rocky: restart: unless-stopped depends_on: - - octopoes_api_worker - - octopoes_api - postgres - - boefje - - normalizer - - katalogus - - scheduler - - keiko ports: - "127.0.0.1:8000:80" build: diff --git a/docs/source/technical_design/localinstall.rst b/docs/source/technical_design/localinstall.rst index ee20f78cae1..925870b52be 100644 --- a/docs/source/technical_design/localinstall.rst +++ b/docs/source/technical_design/localinstall.rst @@ -107,6 +107,20 @@ Go to the directory containing openkat: $ cd nl-kat-coordination $ make update +Clean reinstallation +-------------------- + +If you to start over with a clean slate, you can do so with the following commands: + +.. code-block:: sh + + $ cd nl-kat-coordination + $ make reset + +This removes all Docker containers and volumes, and then brings up the containers again. + +Optionally, first remove the ``.env`` file (``rm .env``) before running ``make reset`` to also reset all configuration in environment variables. This should also resolve issues such as database authentication errors (``password authentication failed``). + OpenTelemetry ============= diff --git a/keiko/Dockerfile b/keiko/Dockerfile index c04482ada18..4f8cb4556fd 100644 --- a/keiko/Dockerfile +++ b/keiko/Dockerfile @@ -14,8 +14,15 @@ ENV PATH=/home/keiko/.local/bin:${PATH} # LateX dependencies RUN apt update -y \ && apt install -y --no-install-recommends \ - texlive-latex-base texlive-latex-extra texlive-latex-recommended \ - texlive-lang-european texlive-fonts-recommended chktex \ + chktex \ + fonts-recommended \ + latexmk \ + texlive-fonts-recommended \ + texlive-lang-european \ + texlive-latex-base \ + texlive-latex-extra \ + texlive-latex-recommended \ + texlive-xetex \ && rm -rf /var/lib/apt/lists/* ENV KEIKO_REPORTS_FOLDER=/reports diff --git a/keiko/debian/control b/keiko/debian/control index 9f9de60799c..d2ea29e55e7 100644 --- a/keiko/debian/control +++ b/keiko/debian/control @@ -7,6 +7,6 @@ Section: python Priority: optional Architecture: any Pre-Depends: ${misc:Pre-Depends} -Depends: ${python}, texlive-latex-base, texlive-latex-extra, texlive-latex-recommended, texlive-lang-european, texlive-fonts-recommended, chktex, ${misc:Depends}, ${shlibs:Depends} +Depends: ${python}, chktex, fonts-recommended, latexmk, texlive-fonts-recommended, texlive-lang-european, texlive-latex-base, texlive-latex-extra, texlive-latex-recommended, texlive-xetex, ${misc:Depends}, ${shlibs:Depends} Description: Keiko The PDF report generation tool for KAT. diff --git a/keiko/keiko/keiko.py b/keiko/keiko/keiko.py index 8362dad7952..bde12068f20 100644 --- a/keiko/keiko/keiko.py +++ b/keiko/keiko/keiko.py @@ -5,11 +5,11 @@ import shutil import subprocess import tempfile -from logging import getLogger +from logging import DEBUG, ERROR, getLogger from pathlib import Path -from typing import Dict, Set, Tuple +from typing import Any, Dict, Set, Tuple -from jinja2 import Environment, FileSystemLoader, select_autoescape +from jinja2 import Environment, FileSystemLoader from opentelemetry import trace from opentelemetry.trace import Status, StatusCode @@ -22,6 +22,39 @@ DATA_SHAPE_CLASS_NAME = "DataShape" +LATEX_SPECIAL_CHARS = str.maketrans( + { + "&": r"\&", + "%": r"\%", + "$": r"\$", + "#": r"\#", + "_": r"\_", + "{": r"\{", + "}": r"\}", + "~": r"\textasciitilde{}", + "^": r"\^{}", + "\\": r"\textbackslash{}", + "\n": "\\newline%\n", + "-": r"{-}", + "\xA0": "~", # Non-breaking space + "[": r"{[}", + "]": r"{]}", + } +) + + +def latex_escape(input: Any) -> str: + """Escape characters that are special in LaTeX. + + References: + - https://github.com/JelteF/PyLaTeX/blob/ecc1e6e339a5a7be958c328403517cd547873d7e/pylatex/utils.py#L68-L100 + - http://tex.stackexchange.com/a/34586/43228 + - http://stackoverflow.com/a/16264094/2570866 + """ + if not isinstance(input, str): + input = str(input) + return input.translate(LATEX_SPECIAL_CHARS) + def baretext(input_: str) -> str: """Remove non-alphanumeric characters from a string.""" @@ -58,10 +91,10 @@ def generate_report( # init jinja2 template env = Environment( loader=FileSystemLoader(settings.templates_folder), - autoescape=select_autoescape(), variable_start_string="@@{", variable_end_string="}@@", ) + env.filters["latex_escape"] = latex_escape template = env.get_template(f"{template_name}/template.tex") if not template.filename: @@ -135,32 +168,41 @@ def generate_report( # run pdflatex cmd = [ - "pdflatex", + "latexmk", + "-xelatex", "-synctex=1", "-interaction=nonstopmode", preprocessed_tex_path.as_posix(), ] env = {**os.environ, "TEXMFVAR": directory} - for i in (1, 2): - output = subprocess.run(cmd, cwd=directory, env=env, capture_output=True, check=False) - current_span.add_event(f"Completed pdflatex run {i}") + + def log_output(level, output): + if not logger.isEnabledFor(level): + return + # prefix all lines in output + for line in output.decode("utf-8").splitlines(): + logger.log(level, "latexmk [report_id=%s] output: %s", report_id, line) + + try: + # capture all output to stdout, so that lines from stdout+stderr are in correct relative order + output = subprocess.run( + cmd, cwd=directory, env=env, check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT + ) + current_span.add_event("Completed latexmk") logger.info( - "pdflatex [run=%d] [report_id=%s] [template=%s] [command=%s]", - i, + "latexmk [report_id=%s] [template=%s] [command=%s]", report_id, template_name, " ".join(cmd), ) - if output.returncode: - logger.error("stdout: %s", output.stdout.decode("utf-8")) - logger.error("stderr: %s", output.stderr.decode("utf-8")) - ex = Exception("Error in pdflatex run %d", i) - current_span.set_status(Status(StatusCode.ERROR)) - current_span.record_exception(ex) - raise ex - else: - logger.debug(output.stdout.decode("utf-8")) - logger.debug(output.stderr.decode("utf-8")) + log_output(DEBUG, output.stdout) + except subprocess.CalledProcessError as ex: + log_output(ERROR, ex.stdout) + err = Exception("Error in latexmk") + err.__cause__ = ex + current_span.set_status(Status(StatusCode.ERROR)) + current_span.record_exception(err) + raise err # copy result back to output folder shutil.copyfile( diff --git a/keiko/templates/bevindingenrapport/template.tex b/keiko/templates/bevindingenrapport/template.tex index 8c2dd3c0430..8e1f52e2e27 100644 --- a/keiko/templates/bevindingenrapport/template.tex +++ b/keiko/templates/bevindingenrapport/template.tex @@ -7,12 +7,10 @@ \usepackage{graphicx} \usepackage{hyperref} \usepackage{longtable} -\usepackage[utf8]{inputenc} \usepackage{lastpage} \usepackage{ragged2e} \usepackage{titlepic} \usepackage{xcolor} -\usepackage{underscore} \hypersetup{ colorlinks=true, @@ -36,7 +34,7 @@ %KEIKO-specific variables \newcommand\application{KEIKO @@{keiko_version}@@} -\newcommand\reporttitle{Bevindingenrapport voor @@{report_source_type}@@ @@{report_source_value}@@} +\newcommand\reporttitle{Bevindingenrapport voor @@{report_source_type|latex_escape}@@ @@{report_source_value|latex_escape}@@} \newcommand\tlp{AMBER} \newcommand\tlpbox{\colorbox{black}{\color{orange}TLP:AMBER}} %END-KEIKO @@ -63,11 +61,14 @@ \author{ \application{} } \titlepic{\includegraphics[width=70mm]{keiko.png}} +% To use a different font, uncomment the following lines. +% Run `fc-list` in the container to see which fonts are available. +%\usepackage{fontspec} +%\setmainfont{DejaVu Sans} + \begin{document} \maketitle - - \chapter{Over dit document} \section{Vertrouwelijkheid} In de informatiebeveiliging wordt gewerkt met het @@ -140,36 +141,36 @@ \section{Bevinding types} \chapter{Bevindingen} {% for finding_type_id, occurrence in findings_grouped.items()|sort(attribute='1.finding_type.risk_level_score', reverse=True) %} - \section{@@{finding_type_id}@@} + \section{@@{finding_type_id|latex_escape}@@} \subsection{Bevinding informatie} \begin{longtable}{ p{.25\textwidth} p{.75\textwidth} } - Bevinding & @@{occurrence.finding_type.id}@@ \\ - Risico niveau & @@{occurrence.finding_type.risk_level_score}@@ / 10 \\ + Bevinding & @@{occurrence.finding_type.id|latex_escape}@@ \\ + Risico niveau & @@{occurrence.finding_type.risk_level_score|latex_escape}@@ / 10 \\ {% if occurrence.finding_type.cvss %} - CVSS & @@{occurrence.finding_type.cvss}@@ \\ + CVSS & @@{occurrence.finding_type.cvss|latex_escape}@@ \\ {% endif %} - Ernst & @@{occurrence.finding_type.risk_level_severity|capitalize}@@ \\ + Ernst & @@{occurrence.finding_type.risk_level_severity|latex_escape|capitalize}@@ \\ {% if occurrence.finding_type.description %} - Beschrijving & @@{occurrence.finding_type.description}@@ \\ + Beschrijving & @@{occurrence.finding_type.description|latex_escape}@@ \\ {% endif %} {% if occurrence.finding_type.Information %} - Informatie & @@{occurrence.finding_type.Information}@@ \\ + Informatie & @@{occurrence.finding_type.Information|latex_escape}@@ \\ {% endif %} {% if occurrence.finding_type.recommendation %} - Aanbeveling & @@{occurrence.finding_type.recommendation}@@ \\ + Aanbeveling & @@{occurrence.finding_type.recommendation|latex_escape}@@ \\ {% endif %} {% if occurrence.finding_type.source %} - Bron& \href{@@{occurrence.finding_type.source}@@}{@@{occurrence.finding_type.source}@@} \\ + Bron& \href{@@{occurrence.finding_type.source|latex_escape}@@}{@@{occurrence.finding_type.source|latex_escape}@@} \\ {% endif %} {% if occurrence.finding_type.information_updated %} - Informatie laatst bijgewerkt & @@{occurrence.finding_type.information_updated}@@ \\ + Informatie laatst bijgewerkt & @@{occurrence.finding_type.information_updated|latex_escape}@@ \\ {% endif %} \end{longtable} \subsection{Voorvallen} {% for finding in occurrence.list %} - \subsubsection{@@{finding.ooi}@@} - @@{finding.description}@@ + \subsubsection{@@{finding.ooi|latex_escape}@@} + @@{finding.description|latex_escape}@@ {% endfor %} {% endfor %} diff --git a/keiko/templates/dns/template.tex b/keiko/templates/dns/template.tex index 79eaa94acef..9c5f4b9c2bf 100644 --- a/keiko/templates/dns/template.tex +++ b/keiko/templates/dns/template.tex @@ -7,12 +7,10 @@ \usepackage{graphicx} \usepackage{hyperref} \usepackage{longtable} -\usepackage[utf8]{inputenc} \usepackage{lastpage} \usepackage{ragged2e} \usepackage{titlepic} \usepackage{xcolor} -\usepackage{underscore} \hypersetup{ colorlinks=true, @@ -97,13 +95,13 @@ \section{Vertrouwelijkheid} \chapter{DNS Records} {% for hostname in hostnames %} -@@{hostname.name}@@ +@@{hostname.name|latex_escape}@@ \begin{center} \begin{tabular}{ lllll } Hostname & Type & TTL & Value \\\toprule \toprule {% for dns_record in hostname.dns_records %} - @@{ dns_record.hostname }@@ & @@{ dns_record.dns_record_type }@@ & @@{ dns_record.ttl }@@ & @@{ dns_record.value }@@ \\ + @@{ dns_record.hostname|latex_escape }@@ & @@{ dns_record.dns_record_type|latex_escape }@@ & @@{ dns_record.ttl|latex_escape }@@ & @@{ dns_record.value|latex_escape }@@ \\ {% endfor %} \bottomrule \end{tabular} diff --git a/keiko/templates/intel/template.tex b/keiko/templates/intel/template.tex index 79eaa94acef..9c5f4b9c2bf 100644 --- a/keiko/templates/intel/template.tex +++ b/keiko/templates/intel/template.tex @@ -7,12 +7,10 @@ \usepackage{graphicx} \usepackage{hyperref} \usepackage{longtable} -\usepackage[utf8]{inputenc} \usepackage{lastpage} \usepackage{ragged2e} \usepackage{titlepic} \usepackage{xcolor} -\usepackage{underscore} \hypersetup{ colorlinks=true, @@ -97,13 +95,13 @@ \section{Vertrouwelijkheid} \chapter{DNS Records} {% for hostname in hostnames %} -@@{hostname.name}@@ +@@{hostname.name|latex_escape}@@ \begin{center} \begin{tabular}{ lllll } Hostname & Type & TTL & Value \\\toprule \toprule {% for dns_record in hostname.dns_records %} - @@{ dns_record.hostname }@@ & @@{ dns_record.dns_record_type }@@ & @@{ dns_record.ttl }@@ & @@{ dns_record.value }@@ \\ + @@{ dns_record.hostname|latex_escape }@@ & @@{ dns_record.dns_record_type|latex_escape }@@ & @@{ dns_record.ttl|latex_escape }@@ & @@{ dns_record.value|latex_escape }@@ \\ {% endfor %} \bottomrule \end{tabular} diff --git a/mula/scheduler/repositories/sqlalchemy/task_store.py b/mula/scheduler/repositories/sqlalchemy/task_store.py index e244e29b611..a958a2a05e4 100644 --- a/mula/scheduler/repositories/sqlalchemy/task_store.py +++ b/mula/scheduler/repositories/sqlalchemy/task_store.py @@ -27,7 +27,8 @@ def get_tasks( status: Optional[str], min_created_at: Optional[datetime.datetime], max_created_at: Optional[datetime.datetime], - filters: Optional[List[models.Filter]], + input_ooi: Optional[str], + plugin_id: Optional[str], offset: int = 0, limit: int = 100, ) -> Tuple[List[models.Task], int]: @@ -49,9 +50,32 @@ def get_tasks( if max_created_at is not None: query = query.filter(models.TaskORM.created_at <= max_created_at) - if filters is not None: - for f in filters: - query = query.filter(models.TaskORM.p_item[f.get_field()].as_string() == f.value) + if input_ooi is not None: + if type == "boefje": + query = query.filter(models.TaskORM.p_item[["data", "input_ooi"]].as_string() == input_ooi) + elif type == "normalizer": + query = query.filter( + models.TaskORM.p_item[["data", "raw_data", "boefje_meta", "input_ooi"]].as_string() == input_ooi + ) + else: + query = query.filter( + (models.TaskORM.p_item[["data", "input_ooi"]].as_string() == input_ooi) + | ( + models.TaskORM.p_item[["data", "raw_data", "boefje_meta", "input_ooi"]].as_string() + == input_ooi + ) + ) + + if plugin_id is not None: + if type == "boefje": + query = query.filter(models.TaskORM.p_item[["data", "boefje", "id"]].as_string() == plugin_id) + elif type == "normalizer": + query = query.filter(models.TaskORM.p_item[["data", "normalizer", "id"]].as_string() == plugin_id) + else: + query = query.filter( + (models.TaskORM.p_item[["data", "boefje", "id"]].as_string() == plugin_id) + | (models.TaskORM.p_item[["data", "normalizer", "id"]].as_string() == plugin_id) + ) count = query.count() diff --git a/mula/scheduler/repositories/stores.py b/mula/scheduler/repositories/stores.py index e0479789942..e6347edb8ff 100644 --- a/mula/scheduler/repositories/stores.py +++ b/mula/scheduler/repositories/stores.py @@ -23,7 +23,8 @@ def get_tasks( status: Optional[str], min_created_at: Optional[datetime.datetime], max_created_at: Optional[datetime.datetime], - filters: Optional[List[models.Filter]], + input_ooi: Optional[str], + plugin_id: Optional[str], offset: int = 0, limit: int = 100, ) -> Tuple[List[models.Task], int]: diff --git a/mula/scheduler/server/server.py b/mula/scheduler/server/server.py index 970908cfa4a..6a718f3257d 100644 --- a/mula/scheduler/server/server.py +++ b/mula/scheduler/server/server.py @@ -1,6 +1,6 @@ import datetime import logging -from typing import Any, Dict, List, Optional, Union +from typing import Any, Dict, List, Optional import fastapi import prometheus_client @@ -229,14 +229,15 @@ def patch_scheduler(self, scheduler_id: str, item: models.Scheduler) -> Any: def list_tasks( self, request: fastapi.Request, - scheduler_id: Union[str, None] = None, - type: Union[str, None] = None, - status: Union[str, None] = None, + scheduler_id: Optional[str] = None, + type: Optional[str] = None, + status: Optional[str] = None, offset: int = 0, limit: int = 10, - min_created_at: Union[datetime.datetime, None] = None, - max_created_at: Union[datetime.datetime, None] = None, - filters: Optional[List[models.Filter]] = None, + min_created_at: Optional[datetime.datetime] = None, + max_created_at: Optional[datetime.datetime] = None, + input_ooi: Optional[str] = None, + plugin_id: Optional[str] = None, ) -> Any: try: if (min_created_at is not None and max_created_at is not None) and min_created_at > max_created_at: @@ -250,7 +251,8 @@ def list_tasks( limit=limit, min_created_at=min_created_at, max_created_at=max_created_at, - filters=filters, + input_ooi=input_ooi, + plugin_id=plugin_id, ) except ValueError as exc: raise fastapi.HTTPException( diff --git a/mula/tests/integration/test_api.py b/mula/tests/integration/test_api.py index a6fe5440014..0eea5a05824 100644 --- a/mula/tests/integration/test_api.py +++ b/mula/tests/integration/test_api.py @@ -404,26 +404,6 @@ def test_get_task(self): self.assertEqual(200, response_get.status_code, 200) self.assertEqual(initial_item_id, response_get.json().get("id")) - def test_get_tasks_value(self): - # Get tasks with embedded value of "test"", should return 2 items - response = self.client.get("/tasks", json=[{"field": "data__name", "operator": "eq", "value": "test"}]) - self.assertEqual(200, response.status_code) - self.assertEqual(2, len(response.json()["results"])) - - # Get tasks with embedded value of "123", should return 1 item - response = self.client.get("/tasks", json=[{"field": "data__id", "operator": "eq", "value": "123"}]) - self.assertEqual(200, response.status_code) - self.assertEqual(1, len(response.json()["results"])) - self.assertEqual("123", response.json()["results"][0]["p_item"]["data"]["id"]) - - # Get tasks with embedded value of 123 two level deep, should return 1 item - response = self.client.get( - "/tasks", json=[{"field": "data__child__name", "operator": "eq", "value": "test.child"}] - ) - self.assertEqual(200, response.status_code) - self.assertEqual(1, len(response.json()["results"])) - self.assertEqual("123.123", response.json()["results"][0]["p_item"]["data"]["child"]["id"]) - def test_get_tasks_min_and_max_created_at(self): # Get tasks based on datetime, both min_created_at and max_created_at, should return 2 items min_created_at = self.first_item_api.get("created_at") diff --git a/rocky/Dockerfile b/rocky/Dockerfile index 9cde5d51876..36efe7074fc 100644 --- a/rocky/Dockerfile +++ b/rocky/Dockerfile @@ -23,7 +23,7 @@ WORKDIR /app/rocky RUN --mount=type=cache,target=/var/cache/apt \ apt-get update \ && apt-get -y upgrade \ - && apt-get install -y --no-install-recommends gettext \ + && apt-get install -y --no-install-recommends gettext netcat \ && rm -rf /var/lib/apt/lists/* # Build with "docker build --build-arg ENVIRONMENT=dev" to install dev diff --git a/rocky/Makefile b/rocky/Makefile index a0846b564fb..382dca3cf5d 100644 --- a/rocky/Makefile +++ b/rocky/Makefile @@ -9,9 +9,11 @@ export COMPOSE_DOCKER_CLI_BUILD=1 build: build-rocky build-rocky-frontend build-rocky: - docker-compose run --rm rocky make build-rocky-native +# Set DATABASE_MIGRATION=false to prevent entrypoint from running migration + docker-compose run --rm -e DATABASE_MIGRATION=false rocky make build-rocky-native build-rocky-native: + while ! nc -vz $$ROCKY_DB_HOST $$ROCKY_DB_PORT; do sleep 0.1; done python3 manage.py migrate -python3 manage.py createsuperuser --no-input python3 manage.py loaddata OOI_database_seed.json diff --git a/rocky/katalogus/views/plugin_detail.py b/rocky/katalogus/views/plugin_detail.py index 63ea43ce788..b8efa98eeea 100644 --- a/rocky/katalogus/views/plugin_detail.py +++ b/rocky/katalogus/views/plugin_detail.py @@ -1,5 +1,6 @@ from datetime import datetime from logging import getLogger +from typing import Dict from account.mixins import OrganizationView from django.contrib import messages @@ -40,44 +41,22 @@ class PluginDetailView(PluginSettingsListView, BoefjeMixin, TemplateView): limit_ooi_list = 9999 def get_scan_history(self) -> Page: - scheduler_id = f"{self.plugin.type}-{self.organization.code}" + list_args: Dict[str, str] = {} + list_args["scheduler_id"] = f"{self.plugin.type}-{self.organization.code}" + list_args["plugin_type"] = (self.plugin.type,) + list_args["plugin_id"] = (self.plugin.id,) + list_args["input_ooi"] = self.request.GET.get("scan_history_search") + list_args["status"] = self.request.GET.get("scan_history_status") - filters = [ - { - "field": f"data__{self.plugin.type}__id", - "operator": "eq", - "value": self.plugin.id, - } - ] + if self.request.GET.get("scan_history_from"): + list_args["min_created_at"] = datetime.strptime(self.request.GET.get("scan_history_from"), "%Y-%m-%d") - if self.request.GET.get("scan_history_search"): - filters.append( - { - "field": "data__input_ooi", - "operator": "eq", - "value": self.request.GET.get("scan_history_search"), - } - ) + if self.request.GET.get("scan_history_to"): + list_args["max_created_at"] = datetime.strptime(self.request.GET.get("scan_history_to"), "%Y-%m-%d") page = int(self.request.GET.get("scan_history_page", 1)) - status = self.request.GET.get("scan_history_status") - - min_created_at = None - if self.request.GET.get("scan_history_from"): - min_created_at = datetime.strptime(self.request.GET.get("scan_history_from"), "%Y-%m-%d") - - max_created_at = None - if self.request.GET.get("scan_history_to"): - max_created_at = datetime.strptime(self.request.GET.get("scan_history_to"), "%Y-%m-%d") - - scan_history = scheduler.client.get_lazy_task_list( - scheduler_id=scheduler_id, - status=status, - min_created_at=min_created_at, - max_created_at=max_created_at, - filters=filters, - ) + scan_history = scheduler.client.get_lazy_task_list(**list_args) return Paginator(scan_history, self.scan_history_limit).page(page) diff --git a/rocky/package.json b/rocky/package.json index b32957fed3c..251cbe8906d 100644 --- a/rocky/package.json +++ b/rocky/package.json @@ -1,7 +1,7 @@ { "scripts": { - "dev": "parcel watch assets/src/bundles/*.js --dist-dir assets/dist --public-url ./ --no-cache", - "build": "parcel build assets/src/bundles/*.js --dist-dir assets/dist --public-url ./ --no-cache --no-scope-hoist", + "dev": "parcel watch assets/src/bundles/*.js --dist-dir assets/dist --public-url ./", + "build": "parcel build assets/src/bundles/*.js --dist-dir assets/dist --public-url ./ --no-scope-hoist", "test": "yarn --cwd roeltje cypress open", "format": "prettier --write ." }, diff --git a/rocky/requirements.txt b/rocky/requirements.txt index f0aac966acf..493c84e72bf 100644 --- a/rocky/requirements.txt +++ b/rocky/requirements.txt @@ -4,7 +4,7 @@ beautifulsoup4==4.11.2 certifi==2022.12.7 chardet==5.1.0 cwe==1.6 -Django==3.2.18 +Django==3.2.19 django-formtools==2.4 django-otp==1.1.3 django-phonenumber-field==6.1.0 diff --git a/rocky/rocky/scheduler.py b/rocky/rocky/scheduler.py index 11c1520c9d2..ea4b59f9b29 100644 --- a/rocky/rocky/scheduler.py +++ b/rocky/rocky/scheduler.py @@ -125,35 +125,18 @@ class LazyTaskList: def __init__( self, scheduler_client: SchedulerClient, - scheduler_id: str, - object_type: Optional[str] = None, - status: Optional[str] = None, - min_created_at: Optional[datetime.datetime] = None, - max_created_at: Optional[datetime.datetime] = None, - filters: Optional[List[Dict]] = None, + **kwargs, ): self.scheduler_client = scheduler_client - - self.scheduler_id = scheduler_id - self.object_type = object_type - self.status = status - self.min_created_at = min_created_at - self.max_created_at = max_created_at - self.filters = filters - + self.kwargs = kwargs self._count = None @property def count(self) -> int: if self._count is None: self._count = self.scheduler_client.list_tasks( - self.scheduler_id, - type=self.object_type, limit=0, - status=self.status, - min_created_at=self.min_created_at, - max_created_at=self.max_created_at, - filters=self.filters, + **self.kwargs, ).count return self._count @@ -171,14 +154,9 @@ def __getitem__(self, key) -> List[Task]: raise TypeError("Invalid slice argument type.") res = self.scheduler_client.list_tasks( - self.scheduler_id, - type=self.object_type, limit=limit, offset=offset, - status=self.status, - min_created_at=self.min_created_at, - max_created_at=self.max_created_at, - filters=self.filters, + **self.kwargs, ) self._count = res.count @@ -192,38 +170,33 @@ def __init__(self, base_uri: str): def list_tasks( self, - scheduler_id: str, - type: Optional[str] = None, - limit: Optional[int] = None, - offset: Optional[int] = None, - status: Optional[str] = None, - min_created_at: Optional[datetime.datetime] = None, - max_created_at: Optional[datetime.datetime] = None, - filters: Optional[List[Dict]] = None, + **kwargs, ) -> PaginatedTasksResponse: - params = { - "scheduler_id": scheduler_id, - "type": type, - "limit": limit, - "offset": offset, - "status": status, - "min_created_at": min_created_at, - "max_created_at": max_created_at, - } - - res = self.session.get(f"{self._base_uri}/tasks", params=params, json=filters) + res = self.session.get(f"{self._base_uri}/tasks", params=kwargs) return PaginatedTasksResponse.parse_raw(res.text) def get_lazy_task_list( self, scheduler_id: str, - object_type: Optional[str] = None, + type: Optional[str] = None, status: Optional[str] = None, min_created_at: Optional[datetime.datetime] = None, max_created_at: Optional[datetime.datetime] = None, - filters: Optional[List[Dict]] = None, + input_ooi: Optional[str] = None, + plugin_id: Optional[str] = None, + boefje_name: Optional[str] = None, ) -> LazyTaskList: - return LazyTaskList(self, scheduler_id, object_type, status, min_created_at, max_created_at, filters) + return LazyTaskList( + self, + scheduler_id=scheduler_id, + type=type, + status=status, + min_created_at=min_created_at, + max_created_at=max_created_at, + input_ooi=input_ooi, + plugin_id=plugin_id, + boefje_name=boefje_name, + ) def get_task_details(self, task_id): res = self.session.get(f"{self._base_uri}/tasks/{task_id}") diff --git a/rocky/rocky/views/ooi_detail.py b/rocky/rocky/views/ooi_detail.py index 84b1b0280b0..b91f48ec705 100644 --- a/rocky/rocky/views/ooi_detail.py +++ b/rocky/rocky/views/ooi_detail.py @@ -94,39 +94,32 @@ def get_organization_indemnification(self): def get_scan_history(self) -> Page: scheduler_id = f"boefje-{self.organization.code}" - filters = [ - {"field": "data__input_ooi", "operator": "eq", "value": self.get_ooi_id()}, - ] - # FIXME: in context of ooi detail is doesn't make sense to search - # for an object name - if self.request.GET.get("scan_history_search"): - filters.append( - { - "field": "data__boefje__name", - "operator": "eq", - "value": self.request.GET.get("scan_history_search"), - } - ) + # for an object name, so we search on plugin id + plugin_id = self.request.GET.get("scan_history_search") page = int(self.request.GET.get("scan_history_page", 1)) status = self.request.GET.get("scan_history_status") - min_created_at = None if self.request.GET.get("scan_history_from"): min_created_at = datetime.strptime(self.request.GET.get("scan_history_from"), "%Y-%m-%d") + else: + min_created_at = None - max_created_at = None if self.request.GET.get("scan_history_to"): max_created_at = datetime.strptime(self.request.GET.get("scan_history_to"), "%Y-%m-%d") + else: + max_created_at = None scan_history = scheduler.client.get_lazy_task_list( scheduler_id=scheduler_id, status=status, min_created_at=min_created_at, max_created_at=max_created_at, - filters=filters, + type="boefje", + input_ooi=self.get_ooi_id(), + plugin_id=plugin_id, ) return Paginator(scan_history, self.scan_history_limit).page(page) diff --git a/rocky/rocky/views/tasks.py b/rocky/rocky/views/tasks.py index a4bc862ff89..6375528c833 100644 --- a/rocky/rocky/views/tasks.py +++ b/rocky/rocky/views/tasks.py @@ -65,7 +65,7 @@ def get_queryset(self): try: return client.get_lazy_task_list( scheduler_id=scheduler_id, - object_type=type_, + type=type_, status=status, min_created_at=min_created_at, max_created_at=max_created_at, diff --git a/rocky/tests/test_boefjes_tasks.py b/rocky/tests/test_boefjes_tasks.py index b16187631c8..2063b47ba90 100644 --- a/rocky/tests/test_boefjes_tasks.py +++ b/rocky/tests/test_boefjes_tasks.py @@ -20,7 +20,7 @@ def test_boefjes_tasks(rf, client_member, mocker, lazy_task_list_empty): [ call( scheduler_id="boefje-test", - object_type="boefje", + type="boefje", status=None, min_created_at=None, max_created_at=None, @@ -43,7 +43,7 @@ def test_tasks_view_simple(rf, client_member, mocker, lazy_task_list_with_boefje [ call( scheduler_id="boefje-test", - object_type="boefje", + type="boefje", status=None, min_created_at=None, max_created_at=None,