From c53746de5cad5222600422c2685d204d024b8eb6 Mon Sep 17 00:00:00 2001 From: Igor Ponomarev Date: Thu, 23 Nov 2023 21:37:29 +0600 Subject: [PATCH] lava_dispatcher: Make Pipeline `job` attribute always be Job Right now the that attribute can be None but only when running in unit tests. Instead of having special behavior for unit tests change the unit tests to properly initialize the Pipeline objects. Changes to classes: Pipeline __init__ First argument is now `job` and is required to be job. `job` is assigned to the attribute. Job __init__ `device` and `timeout` are now required arguments. They are assigned to attributes. To properly initialize the Job objects inside unit tests a new method was added to the `LavaDispatcherTestCase` called `create_simple_job`. The eventual goal is to make dispatcher pass the `mypy --strict` type checking. Unfortunately the pytest does not support static typing so all the unit tests that had to be changed were converted to unittest style tests. See pytest this developer comment: https://github.com/pytest-dev/pytest/issues/5981#issuecomment-1122303220 --- lava_dispatcher/action.py | 17 +- lava_dispatcher/job.py | 23 +- lava_dispatcher/parser.py | 21 +- tests/lava_dispatcher/actions/__init__.py | 5 + .../lava_dispatcher/actions/boot/__init__.py | 5 + .../actions/deploy/__init__.py | 5 + .../actions/deploy/test_apply_overlay.py | 1017 ++++++----- .../actions/deploy/test_download.py | 1550 +++++++++-------- .../actions/deploy/test_downloads.py | 202 ++- .../lava_dispatcher/actions/test/__init__.py | 5 + .../actions/test/test_monitor.py | 2 - .../actions/test/test_shell.py | 732 ++++---- .../lava_dispatcher/actions/test_commands.py | 164 +- tests/lava_dispatcher/test_auto_login.py | 6 +- tests/lava_dispatcher/test_basic.py | 102 +- tests/lava_dispatcher/test_flasher.py | 149 +- tests/lava_dispatcher/test_grub.py | 7 +- tests/lava_dispatcher/test_ipxe.py | 32 +- tests/lava_dispatcher/test_job.py | 83 +- tests/lava_dispatcher/test_kvm.py | 9 +- tests/lava_dispatcher/test_multi.py | 19 +- tests/lava_dispatcher/test_repeat.py | 42 +- tests/lava_dispatcher/test_retries.py | 54 +- tests/lava_dispatcher/test_test_shell.py | 8 +- tests/lava_dispatcher/test_uboot.py | 10 +- 25 files changed, 2268 insertions(+), 2001 deletions(-) create mode 100644 tests/lava_dispatcher/actions/__init__.py create mode 100644 tests/lava_dispatcher/actions/boot/__init__.py create mode 100644 tests/lava_dispatcher/actions/deploy/__init__.py create mode 100644 tests/lava_dispatcher/actions/test/__init__.py diff --git a/lava_dispatcher/action.py b/lava_dispatcher/action.py index 4477e1d717..1b6d334750 100644 --- a/lava_dispatcher/action.py +++ b/lava_dispatcher/action.py @@ -13,6 +13,7 @@ import traceback import warnings from functools import reduce +from typing import TYPE_CHECKING import pexpect @@ -29,6 +30,9 @@ from lava_common.timeout import Timeout from lava_dispatcher.utils.strings import seconds_to_str +if TYPE_CHECKING: + from .job import Job + class InternalObject: """ @@ -75,7 +79,7 @@ class Pipeline: of the per-action log handler. """ - def __init__(self, parent=None, job=None, parameters=None): + def __init__(self, job: Job, parent=None, parameters=None): self.actions = [] self.parent = None self.parameters = {} if parameters is None else parameters @@ -120,13 +124,10 @@ def add_action(self, action, parameters=None): # Compute the timeout global_timeouts = [] - # FIXME: Only needed for the auto-tests - if self.job is not None: - if self.job.device is not None: - # First, the device level overrides - global_timeouts.append(self.job.device.get("timeouts", {})) - # Then job level overrides - global_timeouts.append(self.job.parameters.get("timeouts", {})) + # First, the device level overrides + global_timeouts.append(self.job.device.get("timeouts", {})) + # Then job level overrides + global_timeouts.append(self.job.parameters.get("timeouts", {})) # Set the timeout. The order is: # 1. global action timeout diff --git a/lava_dispatcher/job.py b/lava_dispatcher/job.py index b25a7d40e6..2c7e891c0b 100644 --- a/lava_dispatcher/job.py +++ b/lava_dispatcher/job.py @@ -3,6 +3,7 @@ # Author: Neil Williams # # SPDX-License-Identifier: GPL-2.0-or-later +from __future__ import annotations import datetime import errno @@ -11,6 +12,7 @@ import tempfile import time import traceback +from typing import TYPE_CHECKING import pytz @@ -23,6 +25,14 @@ MultinodeProtocol, ) +if TYPE_CHECKING: + from logging import Logger + from typing import Any + + from lava_common.timeout import Timeout + + from .device import Device + class Job: """ @@ -40,17 +50,24 @@ class Job: device for this job - one job, one device. """ - def __init__(self, job_id, parameters, logger): + def __init__( + self, + job_id: int, + parameters: dict[str, Any], + logger: Logger, + device: Device, + timeout: Timeout, + ): self.job_id = job_id self.logger = logger - self.device = None + self.device = device self.parameters = parameters self.__context__ = PipelineContext() self.pipeline = None self.connection = None self.triggers = [] # actions can add trigger strings to the run a diagnostic self.diagnostics = [DiagnoseNetwork] - self.timeout = None + self.timeout = timeout self.protocols = [] self.compatibility = 2 # Was the job cleaned diff --git a/lava_dispatcher/parser.py b/lava_dispatcher/parser.py index cd4c54e835..beb6897625 100644 --- a/lava_dispatcher/parser.py +++ b/lava_dispatcher/parser.py @@ -6,6 +6,8 @@ # Bring in the strategy subclass lists, ignore pylint warnings. # pylint: disable=unused-import +from __future__ import annotations + import lava_dispatcher.actions.boot.strategies import lava_dispatcher.actions.deploy.strategies import lava_dispatcher.actions.test.strategies @@ -81,16 +83,22 @@ class JobParser: # FIXME: needs a Schema and a check routine - def _timeouts(self, data, job): - if "job" in data.get("timeouts", {}): - duration = Timeout.parse(data["timeouts"]["job"]) - job.timeout = Timeout("job", None, duration=duration) + @classmethod + def _parse_job_timeout(cls, data: dict) -> Timeout: + timeouts_dict = data["timeouts"] + duration = Timeout.parse(timeouts_dict["job"]) + return Timeout("job", None, duration=duration) def parse(self, content, device, job_id, logger, dispatcher_config, env_dut=None): data = yaml_safe_load(content) - job = Job(job_id, data, logger) + job = Job( + job_id=job_id, + parameters=data, + logger=logger, + device=device, + timeout=self._parse_job_timeout(data), + ) test_counts = {} - job.device = device job.parameters["env_dut"] = env_dut # Load the dispatcher config job.parameters["dispatcher"] = {} @@ -106,7 +114,6 @@ def parse(self, content, device, job_id, logger, dispatcher_config, env_dut=None for item in sorted(level_tuple, key=lambda level_tuple: level_tuple[1]) ] pipeline = Pipeline(job=job) - self._timeouts(data, job) # deploy and boot classes can populate the pipeline differently depending # on the test action type they are linked with (via namespacing). diff --git a/tests/lava_dispatcher/actions/__init__.py b/tests/lava_dispatcher/actions/__init__.py new file mode 100644 index 0000000000..6ce56e1bd0 --- /dev/null +++ b/tests/lava_dispatcher/actions/__init__.py @@ -0,0 +1,5 @@ +# Copyright (C) 2023 Collabora Limited +# +# Author: Igor Ponomarev +# +# SPDX-License-Identifier: GPL-2.0-or-later diff --git a/tests/lava_dispatcher/actions/boot/__init__.py b/tests/lava_dispatcher/actions/boot/__init__.py new file mode 100644 index 0000000000..6ce56e1bd0 --- /dev/null +++ b/tests/lava_dispatcher/actions/boot/__init__.py @@ -0,0 +1,5 @@ +# Copyright (C) 2023 Collabora Limited +# +# Author: Igor Ponomarev +# +# SPDX-License-Identifier: GPL-2.0-or-later diff --git a/tests/lava_dispatcher/actions/deploy/__init__.py b/tests/lava_dispatcher/actions/deploy/__init__.py new file mode 100644 index 0000000000..6ce56e1bd0 --- /dev/null +++ b/tests/lava_dispatcher/actions/deploy/__init__.py @@ -0,0 +1,5 @@ +# Copyright (C) 2023 Collabora Limited +# +# Author: Igor Ponomarev +# +# SPDX-License-Identifier: GPL-2.0-or-later diff --git a/tests/lava_dispatcher/actions/deploy/test_apply_overlay.py b/tests/lava_dispatcher/actions/deploy/test_apply_overlay.py index 0e3f1f13e7..998d80fa07 100644 --- a/tests/lava_dispatcher/actions/deploy/test_apply_overlay.py +++ b/tests/lava_dispatcher/actions/deploy/test_apply_overlay.py @@ -3,496 +3,585 @@ # Author: RĂ©mi Duraffort # # SPDX-License-Identifier: GPL-2.0-or-later +from __future__ import annotations -import logging +from unittest.mock import MagicMock +from unittest.mock import call as mock_call +from unittest.mock import patch -import pytest - -from lava_common.exceptions import JobError, LAVABug +from lava_common.exceptions import JobError from lava_dispatcher.actions.deploy.apply_overlay import AppendOverlays -from lava_dispatcher.job import Job - - -def test_append_overlays_validate(): - # 1/ Working setup - params = { - "format": "cpio.newc", - "overlays": { - "modules": { - "url": "http://example.com/modules.tar.xz", - "compression": "xz", - "format": "tar", - "path": "/", - } - }, - } - action = AppendOverlays("rootfs", params) - action.validate() - - # 2/ Check errors - with pytest.raises(JobError) as exc: - del params["format"] - action.validate() - assert exc.match("Unsupported image format None") - with pytest.raises(JobError) as exc: - params["overlays"]["modules"]["path"] = "../../" - action.validate() - assert exc.match("Invalid 'path': '../../'") +from ...test_basic import LavaDispatcherTestCase - with pytest.raises(JobError) as exc: - del params["overlays"]["modules"]["path"] - action.validate() - assert exc.match("Missing 'path' for 'overlays.modules'") - with pytest.raises(JobError) as exc: - params["overlays"]["modules"]["format"] = "git" +class TestApplyOverlay(LavaDispatcherTestCase): + def test_append_overlays_validate(self): + # 1/ Working setup + params = { + "format": "cpio.newc", + "overlays": { + "modules": { + "url": "http://example.com/modules.tar.xz", + "compression": "xz", + "format": "tar", + "path": "/", + } + }, + } + action = AppendOverlays("rootfs", params) action.validate() - assert exc.match("Invalid 'format' \\('git'\\) for 'overlays.modules'") - with pytest.raises(JobError) as exc: - params["overlays"] = "" - action.validate() - assert exc.match("'overlays' is not a dictionary") + # 2/ Check errors + with self.assertRaisesRegex(JobError, "Unsupported image format None"): + del params["format"] + action.validate() + + with self.assertRaisesRegex(JobError, "Invalid 'path': '../../'"): + params["overlays"]["modules"]["path"] = "../../" + action.validate() + + with self.assertRaisesRegex(JobError, "Missing 'path' for 'overlays.modules'"): + del params["overlays"]["modules"]["path"] + action.validate() + + with self.assertRaisesRegex( + JobError, "Invalid 'format' \\('git'\\) for 'overlays.modules'" + ): + params["overlays"]["modules"]["format"] = "git" + action.validate() + + with self.assertRaisesRegex(JobError, "'overlays' is not a dictionary"): + params["overlays"] = "" + action.validate() + + with self.assertRaisesRegex(JobError, "Missing 'overlays' dictionary"): + del params["overlays"] + action.validate() + + def test_append_overlays_validate_sparse(self): + params = { + "format": "cpio.newc", + "overlays": { + "modules": { + "url": "http://example.com/modules.tar.xz", + "compression": "xz", + "format": "tar", + "path": "/", + } + }, + } - with pytest.raises(JobError) as exc: - del params["overlays"] + action = AppendOverlays("rootfs", params) action.validate() - assert exc.match("Missing 'overlays' dictionary") - - params = { - "format": "cpio.newc", - "overlays": { - "modules": { - "url": "http://example.com/modules.tar.xz", - "compression": "xz", - "format": "tar", - "path": "/", - } - }, - } + with self.assertRaisesRegex( + JobError, "sparse=True is only available for ext4 images" + ): + params["sparse"] = True + action.validate() + + def test_append_overlays_run(self): + params = { + "format": "cpio.newc", + "overlays": { + "modules": { + "url": "http://example.com/modules.tar.xz", + "compression": "xz", + "format": "tar", + "path": "/", + } + }, + } + action = AppendOverlays("rootfs", params) + action.update_cpio = MagicMock() + action.update_guestfs = MagicMock() + action.update_tar = MagicMock() + self.assertIsNone(action.run(None, 0)) + action.update_cpio.assert_called_once_with() + + params["format"] = "ext4" + self.assertIsNone(action.run(None, 0)) + action.update_guestfs.assert_called_once_with() + + params["format"] = "tar" + self.assertIsNone(action.run(None, 0)) + action.update_tar.assert_called_once_with() + + def test_append_overlays_update_cpio(self): + job = self.create_simple_job() + tmp_dir_path = self.create_temporary_directory() + + params = { + "format": "cpio.newc", + "overlays": { + "modules": { + "url": "http://example.com/modules.tar.xz", + "compression": "xz", + "format": "tar", + "path": "/", + } + }, + } - action = AppendOverlays("rootfs", params) - action.validate() - with pytest.raises(JobError) as exc: - params["sparse"] = True - action.validate() - assert exc.match("sparse=True is only available for ext4 images") - - -def test_append_overlays_run(mocker): - params = { - "format": "cpio.newc", - "overlays": { - "modules": { - "url": "http://example.com/modules.tar.xz", - "compression": "xz", - "format": "tar", - "path": "/", - } - }, - } - action = AppendOverlays("rootfs", params) - action.update_cpio = mocker.stub() - action.update_guestfs = mocker.stub() - action.update_tar = mocker.stub() - assert action.run(None, 0) is None - action.update_cpio.assert_called_once_with() - - params["format"] = "ext4" - assert action.run(None, 0) is None - action.update_guestfs.assert_called_once_with() - - params["format"] = "tar" - assert action.run(None, 0) is None - action.update_tar.assert_called_once_with() - - -def test_append_overlays_update_cpio(caplog, mocker, tmp_path): - caplog.set_level(logging.DEBUG) - params = { - "format": "cpio.newc", - "overlays": { - "modules": { - "url": "http://example.com/modules.tar.xz", - "compression": "xz", - "format": "tar", - "path": "/", - } - }, - } - - action = AppendOverlays("rootfs", params) - action.job = Job(1234, {}, None) - action.parameters = { - "rootfs": {"url": "http://example.com/rootfs.cpio.gz", **params}, - "namespace": "common", - } - action.data = { - "common": { - "download-action": { - "rootfs": { - "file": str(tmp_path / "rootfs.cpio.gz"), - "compression": "gz", - "decompressed": False, - }, - "rootfs.modules": {"file": str(tmp_path / "modules.tar")}, - } + action = AppendOverlays("rootfs", params) + action.job = job + action.parameters = { + "rootfs": {"url": "http://example.com/rootfs.cpio.gz", **params}, + "namespace": "common", } - } - action.mkdtemp = lambda: str(tmp_path) - decompress_file = mocker.patch( - "lava_dispatcher.actions.deploy.apply_overlay.decompress_file" - ) - uncpio = mocker.patch("lava_dispatcher.actions.deploy.apply_overlay.uncpio") - unlink = mocker.patch("os.unlink") - untar_file = mocker.patch("lava_dispatcher.actions.deploy.apply_overlay.untar_file") - cpio = mocker.patch("lava_dispatcher.actions.deploy.apply_overlay.cpio") - compress_file = mocker.patch( - "lava_dispatcher.actions.deploy.apply_overlay.compress_file" - ) - - action.update_cpio() - - decompress_file.assert_called_once_with(str(tmp_path / "rootfs.cpio.gz"), "gz") - uncpio.assert_called_once_with(decompress_file(), str(tmp_path)) - unlink.assert_called_once_with(decompress_file()) - untar_file.assert_called_once_with( - str(tmp_path / "modules.tar"), str(tmp_path) + "/" - ) - cpio.assert_called_once_with(str(tmp_path), decompress_file()) - compress_file.assert_called_once_with(decompress_file(), "gz") - - assert caplog.record_tuples == [ - ("dispatcher", 20, f"Modifying '{tmp_path}/rootfs.cpio.gz'"), - ("dispatcher", 10, "* decompressing (gz)"), - ("dispatcher", 10, f"* extracting {decompress_file()}"), - ("dispatcher", 10, "Overlays:"), - ( - "dispatcher", - 10, - f"- rootfs.modules: untar '{tmp_path}/modules.tar' to '{tmp_path}/'", - ), - ("dispatcher", 10, f"* archiving {decompress_file()}"), - ("dispatcher", 10, "* compressing (gz)"), - ] - - -def test_append_overlays_update_guestfs(caplog, mocker, tmp_path): - caplog.set_level(logging.DEBUG) - params = { - "format": "ext4", - "overlays": { - "modules": { - "url": "http://example.com/modules.tar.xz", - "compression": "xz", - "format": "tar", - "path": "/lib", - } - }, - } - - action = AppendOverlays("rootfs", params) - action.job = Job(1234, {}, None) - action.parameters = { - "rootfs": {"url": "http://example.com/rootff.ext4", **params}, - "namespace": "common", - } - action.data = { - "common": { - "download-action": { - "rootfs": { - "file": str(tmp_path / "rootfs.ext4"), - "compression": "gz", - "decompressed": True, - }, - "rootfs.modules": {"file": str(tmp_path / "modules.tar")}, + action.data = { + "common": { + "download-action": { + "rootfs": { + "file": str(tmp_dir_path / "rootfs.cpio.gz"), + "compression": "gz", + "decompressed": False, + }, + "rootfs.modules": {"file": str(tmp_dir_path / "modules.tar")}, + } } } - } - - guestfs = mocker.MagicMock() - guestfs.add_drive = mocker.MagicMock() - mocker.patch( - "lava_dispatcher.actions.deploy.apply_overlay.guestfs.GuestFS", guestfs - ) - action.update_guestfs() - - guestfs.assert_called_once_with(python_return_dict=True) - guestfs().launch.assert_called_once_with() - guestfs().list_devices.assert_called_once_with() - guestfs().add_drive.assert_called_once_with(str(tmp_path / "rootfs.ext4")) - guestfs().mount.assert_called_once_with(guestfs().list_devices()[0], "/") - guestfs().mkdir_p.assert_called_once_with("/lib") - guestfs().tar_in.assert_called_once_with( - str(tmp_path / "modules.tar"), "/lib", compress=None - ) - assert caplog.record_tuples == [ - ("dispatcher", 20, f"Modifying '{tmp_path}/rootfs.ext4'"), - ("dispatcher", 10, "Overlays:"), - ("dispatcher", 10, f"- rootfs.modules: '{tmp_path}/modules.tar' to '/lib'"), - ] - - -def test_append_lava_overlay_update_tar(caplog, mocker, tmp_path): - caplog.set_level(logging.DEBUG) - params = { - "format": "tar", - "overlays": { - "modules": { - "url": "http://example.com/modules.tar.xz", - "compression": "xz", - "format": "tar", - "path": "/", - } - }, - } - - action = AppendOverlays("nfsrootfs", params) - action.job = Job(1234, {}, None) - action.parameters = { - "nfsrootfs": {"url": "http://example.com/rootfs.tar.gz", **params}, - "namespace": "common", - } - action.data = { - "common": { - "download-action": { - "nfsrootfs": { - "file": str(tmp_path / "rootfs.tar.gz"), - "compression": "gz", - "decompressed": False, - }, - "nfsrootfs.modules": {"file": str(tmp_path / "modules.tar")}, + action.mkdtemp = MagicMock(return_value=str(tmp_dir_path)) + + with patch( + "lava_dispatcher.actions.deploy.apply_overlay.decompress_file" + ) as decompress_file_mock, patch( + "lava_dispatcher.actions.deploy.apply_overlay.uncpio" + ) as uncpio_mock, patch( + "os.unlink" + ) as unlink_mock, patch( + "lava_dispatcher.actions.deploy.apply_overlay.untar_file" + ) as untar_file_mock, patch( + "lava_dispatcher.actions.deploy.apply_overlay.cpio" + ) as cpio_mock, patch( + "lava_dispatcher.actions.deploy.apply_overlay.compress_file" + ) as compress_file_mock, self.assertLogs( + action.logger, level="DEBUG" + ) as action_logs: + action.update_cpio() + + decompress_file_mock.assert_called_once_with( + str(tmp_dir_path / "rootfs.cpio.gz"), "gz" + ) + uncpio_mock.assert_called_once_with(decompress_file_mock(), str(tmp_dir_path)) + unlink_mock.assert_called_once_with(decompress_file_mock()) + untar_file_mock.assert_called_once_with( + str(tmp_dir_path / "modules.tar"), str(tmp_dir_path) + "/" + ) + cpio_mock.assert_called_once_with(str(tmp_dir_path), decompress_file_mock()) + compress_file_mock.assert_called_once_with(decompress_file_mock(), "gz") + + self.assertEqual( + [(r.name, r.levelno, r.message) for r in action_logs.records], + [ + ("dispatcher", 20, f"Modifying '{tmp_dir_path}/rootfs.cpio.gz'"), + ("dispatcher", 10, "* decompressing (gz)"), + ("dispatcher", 10, f"* extracting {decompress_file_mock()}"), + ("dispatcher", 10, "Overlays:"), + ( + "dispatcher", + 10, + ( + f"- rootfs.modules: untar '{tmp_dir_path}" + f"/modules.tar' to '{tmp_dir_path}/'" + ), + ), + ("dispatcher", 10, f"* archiving {decompress_file_mock()}"), + ("dispatcher", 10, "* compressing (gz)"), + ], + ) + + def test_append_overlays_update_guestfs(self): + job = self.create_simple_job() + tmp_dir_path = self.create_temporary_directory() + + params = { + "format": "ext4", + "overlays": { + "modules": { + "url": "http://example.com/modules.tar.xz", + "compression": "xz", + "format": "tar", + "path": "/lib", + } }, } - } - action.mkdtemp = lambda: str(tmp_path) - decompress_file = mocker.patch( - "lava_dispatcher.actions.deploy.apply_overlay.decompress_file" - ) - untar_file = mocker.patch("lava_dispatcher.actions.deploy.apply_overlay.untar_file") - unlink = mocker.patch("os.unlink") - create_tarfile = mocker.patch( - "lava_dispatcher.actions.deploy.apply_overlay.create_tarfile" - ) - compress_file = mocker.patch( - "lava_dispatcher.actions.deploy.apply_overlay.compress_file" - ) - - action.update_tar() - - decompress_file.assert_called_once_with(str(tmp_path / "rootfs.tar.gz"), "gz") - assert untar_file.mock_calls == [ - mocker.call(decompress_file(), str(tmp_path)), - mocker.call(str(tmp_path / "modules.tar"), str(tmp_path) + "/"), - ] - unlink.assert_called_once_with(decompress_file()) - - create_tarfile.assert_called_once_with( - str(tmp_path), decompress_file(), arcname="." - ) - compress_file.assert_called_once_with(decompress_file(), "gz") - - assert caplog.record_tuples == [ - ("dispatcher", 20, f"Modifying '{tmp_path}/rootfs.tar.gz'"), - ("dispatcher", 10, "* decompressing (gz)"), - ("dispatcher", 10, f"* extracting {decompress_file()}"), - ("dispatcher", 10, "Overlays:"), - ( - "dispatcher", - 10, - f"- nfsrootfs.modules: untar '{tmp_path}/modules.tar' to '{tmp_path}/'", - ), - ("dispatcher", 10, f"* archiving {decompress_file()}"), - ("dispatcher", 10, "* compressing (gz)"), - ] - - -def test_append_overlays_update_guestfs_sparse(caplog, mocker, tmp_path): - caplog.set_level(logging.DEBUG) - params = { - "format": "ext4", - "sparse": True, - "overlays": { - "modules": { - "url": "http://example.com/modules.tar.xz", - "compression": "xz", - "format": "tar", - "path": "/lib", + + action = AppendOverlays("rootfs", params) + action.job = job + action.parameters = { + "rootfs": {"url": "http://example.com/rootff.ext4", **params}, + "namespace": "common", + } + action.data = { + "common": { + "download-action": { + "rootfs": { + "file": str(tmp_dir_path / "rootfs.ext4"), + "compression": "gz", + "decompressed": True, + }, + "rootfs.modules": {"file": str(tmp_dir_path / "modules.tar")}, + } } - }, - } - - action = AppendOverlays("rootfs", params) - action.job = Job(1234, {}, None) - action.parameters = { - "rootfs": {"url": "http://example.com/rootff.ext4", **params}, - "namespace": "common", - } - action.data = { - "common": { - "download-action": { - "rootfs": { - "file": str(tmp_path / "rootfs.ext4"), - "compression": "gz", - "decompressed": True, + } + + with patch( + "lava_dispatcher.actions.deploy.apply_overlay.guestfs.GuestFS" + ) as guestfs_mock, self.assertLogs(action.logger, level="DEBUG") as action_logs: + action.update_guestfs() + + guestfs_mock.assert_called_once_with(python_return_dict=True) + guestfs_mock().launch.assert_called_once_with() + guestfs_mock().list_devices.assert_called_once_with() + guestfs_mock().add_drive.assert_called_once_with( + str(tmp_dir_path / "rootfs.ext4") + ) + guestfs_mock().mount.assert_called_once_with( + guestfs_mock().list_devices()[0], "/" + ) + guestfs_mock().mkdir_p.assert_called_once_with("/lib") + guestfs_mock().tar_in.assert_called_once_with( + str(tmp_dir_path / "modules.tar"), "/lib", compress=None + ) + self.assertEqual( + [(r.name, r.levelno, r.message) for r in action_logs.records], + [ + ("dispatcher", 20, f"Modifying '{tmp_dir_path}/rootfs.ext4'"), + ("dispatcher", 10, "Overlays:"), + ( + "dispatcher", + 10, + f"- rootfs.modules: '{tmp_dir_path}/modules.tar' to '/lib'", + ), + ], + ) + + def test_append_lava_overlay_update_tar(self): + job = self.create_simple_job() + tmp_dir_path = self.create_temporary_directory() + + params = { + "format": "tar", + "overlays": { + "modules": { + "url": "http://example.com/modules.tar.xz", + "compression": "xz", + "format": "tar", + "path": "/", + } + }, + } + + action = AppendOverlays("nfsrootfs", params) + action.job = job + action.parameters = { + "nfsrootfs": {"url": "http://example.com/rootfs.tar.gz", **params}, + "namespace": "common", + } + action.data = { + "common": { + "download-action": { + "nfsrootfs": { + "file": str(tmp_dir_path / "rootfs.tar.gz"), + "compression": "gz", + "decompressed": False, + }, + "nfsrootfs.modules": {"file": str(tmp_dir_path / "modules.tar")}, }, - "rootfs.modules": {"file": str(tmp_path / "modules.tar")}, } } - } - action.run_cmd = mocker.MagicMock() - replace = mocker.patch("lava_dispatcher.actions.deploy.apply_overlay.os.replace") - - guestfs = mocker.MagicMock() - guestfs.add_drive = mocker.MagicMock() - mocker.patch( - "lava_dispatcher.actions.deploy.apply_overlay.guestfs.GuestFS", guestfs - ) - action.update_guestfs() - - guestfs.assert_called_once_with(python_return_dict=True) - guestfs().launch.assert_called_once_with() - guestfs().list_devices.assert_called_once_with() - guestfs().add_drive.assert_called_once_with(str(tmp_path / "rootfs.ext4")) - guestfs().mount.assert_called_once_with(guestfs().list_devices()[0], "/") - guestfs().mkdir_p.assert_called_once_with("/lib") - guestfs().tar_in.assert_called_once_with( - str(tmp_path / "modules.tar"), "/lib", compress=None - ) - assert action.run_cmd.mock_calls == [ - mocker.call( + action.mkdtemp = MagicMock(return_value=str(tmp_dir_path)) + + with patch( + "lava_dispatcher.actions.deploy.apply_overlay.decompress_file" + ) as decompress_file_mock, patch( + "lava_dispatcher.actions.deploy.apply_overlay.untar_file" + ) as untar_file_mock, patch( + "os.unlink" + ) as unlink_mock, patch( + "lava_dispatcher.actions.deploy.apply_overlay.create_tarfile" + ) as create_tarfile_mock, patch( + "lava_dispatcher.actions.deploy.apply_overlay.compress_file" + ) as compress_file_mock, self.assertLogs( + action.logger, level="DEBUG" + ) as action_logs: + action.update_tar() + + decompress_file_mock.assert_called_once_with( + str(tmp_dir_path / "rootfs.tar.gz"), "gz" + ) + self.assertEqual( + untar_file_mock.mock_calls, [ - "/usr/bin/simg2img", - f"{tmp_path}/rootfs.ext4", - f"{tmp_path}/rootfs.ext4.non-sparse", + mock_call(decompress_file_mock(), str(tmp_dir_path)), + mock_call(str(tmp_dir_path / "modules.tar"), str(tmp_dir_path) + "/"), ], - error_msg=f"simg2img failed for {tmp_path}/rootfs.ext4", - ), - mocker.call( + ) + unlink_mock.assert_called_once_with(decompress_file_mock()) + + create_tarfile_mock.assert_called_once_with( + str(tmp_dir_path), decompress_file_mock(), arcname="." + ) + compress_file_mock.assert_called_once_with(decompress_file_mock(), "gz") + + self.assertEqual( + [(r.name, r.levelno, r.message) for r in action_logs.records], [ - "/usr/bin/img2simg", - f"{tmp_path}/rootfs.ext4", - f"{tmp_path}/rootfs.ext4.sparse", + ("dispatcher", 20, f"Modifying '{tmp_dir_path}/rootfs.tar.gz'"), + ("dispatcher", 10, "* decompressing (gz)"), + ("dispatcher", 10, f"* extracting {decompress_file_mock()}"), + ("dispatcher", 10, "Overlays:"), + ( + "dispatcher", + 10, + ( + f"- nfsrootfs.modules: untar '{tmp_dir_path}" + f"/modules.tar' to '{tmp_dir_path}/'" + ), + ), + ("dispatcher", 10, f"* archiving {decompress_file_mock()}"), + ("dispatcher", 10, "* compressing (gz)"), ], - error_msg=f"img2simg failed for {tmp_path}/rootfs.ext4", - ), - ] - assert replace.mock_calls == [ - mocker.call(f"{tmp_path}/rootfs.ext4.non-sparse", f"{tmp_path}/rootfs.ext4"), - mocker.call(f"{tmp_path}/rootfs.ext4.sparse", f"{tmp_path}/rootfs.ext4"), - ] - - assert caplog.record_tuples == [ - ("dispatcher", 20, f"Modifying '{tmp_path}/rootfs.ext4'"), - ("dispatcher", 10, f"Calling simg2img on '{tmp_path}/rootfs.ext4'"), - ("dispatcher", 10, "Overlays:"), - ("dispatcher", 10, f"- rootfs.modules: '{tmp_path}/modules.tar' to '/lib'"), - ("dispatcher", 10, f"Calling img2simg on '{tmp_path}/rootfs.ext4'"), - ] - - -def test_append_lava_overlay_update_cpio(caplog, mocker, tmp_path): - caplog.set_level(logging.DEBUG) - params = {"format": "cpio.newc", "overlays": {"lava": True}} - - action = AppendOverlays("rootfs", params) - action.job = Job(1234, {}, None) - action.parameters = { - "rootfs": {"url": "http://example.com/rootfs.cpio.gz", **params}, - "namespace": "common", - } - action.data = { - "common": { - "compress-overlay": {"output": {"file": str(tmp_path / "overlay.tar.gz")}}, - "download-action": { - "rootfs": { - "file": str(tmp_path / "rootfs.cpio.gz"), - "compression": "gz", - "decompressed": False, + ) + + def test_append_overlays_update_guestfs_sparse(self): + job = self.create_simple_job() + tmp_dir_path = self.create_temporary_directory() + + params = { + "format": "ext4", + "sparse": True, + "overlays": { + "modules": { + "url": "http://example.com/modules.tar.xz", + "compression": "xz", + "format": "tar", + "path": "/lib", } }, } - } - action.mkdtemp = lambda: str(tmp_path) - decompress_file = mocker.patch( - "lava_dispatcher.actions.deploy.apply_overlay.decompress_file" - ) - uncpio = mocker.patch("lava_dispatcher.actions.deploy.apply_overlay.uncpio") - unlink = mocker.patch("os.unlink") - untar_file = mocker.patch("lava_dispatcher.actions.deploy.apply_overlay.untar_file") - cpio = mocker.patch("lava_dispatcher.actions.deploy.apply_overlay.cpio") - compress_file = mocker.patch( - "lava_dispatcher.actions.deploy.apply_overlay.compress_file" - ) - - action.update_cpio() - - decompress_file.assert_called_once_with(str(tmp_path / "rootfs.cpio.gz"), "gz") - uncpio.assert_called_once_with(decompress_file(), str(tmp_path)) - unlink.assert_called_once_with(decompress_file()) - untar_file.assert_called_once_with( - str(tmp_path / "overlay.tar.gz"), str(tmp_path) + "/" - ) - cpio.assert_called_once_with(str(tmp_path), decompress_file()) - compress_file.assert_called_once_with(decompress_file(), "gz") - - assert caplog.record_tuples == [ - ("dispatcher", 20, f"Modifying '{tmp_path}/rootfs.cpio.gz'"), - ("dispatcher", 10, "* decompressing (gz)"), - ("dispatcher", 10, f"* extracting {decompress_file()}"), - ("dispatcher", 10, "Overlays:"), - ( - "dispatcher", - 10, - f"- rootfs.lava: untar '{tmp_path}/overlay.tar.gz' to '{tmp_path}/'", - ), - ("dispatcher", 10, f"* archiving {decompress_file()}"), - ("dispatcher", 10, "* compressing (gz)"), - ] - - -def test_append_lava_overlay_update_guestfs(caplog, mocker, tmp_path): - caplog.set_level(logging.DEBUG) - params = {"format": "ext4", "overlays": {"lava": True}} - - action = AppendOverlays("rootfs", params) - action.job = Job(1234, {}, None) - action.parameters = { - "rootfs": {"url": "http://example.com/rootff.ext4", **params}, - "namespace": "common", - } - action.data = { - "common": { - "compress-overlay": {"output": {"file": str(tmp_path / "overlay.tar.gz")}}, - "download-action": { - "rootfs": { - "file": str(tmp_path / "rootfs.ext4"), - "compression": "gz", - "decompressed": True, + + action = AppendOverlays("rootfs", params) + action.job = job + action.parameters = { + "rootfs": {"url": "http://example.com/rootff.ext4", **params}, + "namespace": "common", + } + action.data = { + "common": { + "download-action": { + "rootfs": { + "file": str(tmp_dir_path / "rootfs.ext4"), + "compression": "gz", + "decompressed": True, + }, + "rootfs.modules": {"file": str(tmp_dir_path / "modules.tar")}, } - }, + } } - } - - guestfs = mocker.MagicMock() - guestfs.add_drive = mocker.MagicMock() - mocker.patch( - "lava_dispatcher.actions.deploy.apply_overlay.guestfs.GuestFS", guestfs - ) - action.update_guestfs() - - guestfs.assert_called_once_with(python_return_dict=True) - guestfs().launch.assert_called_once_with() - guestfs().list_devices.assert_called_once_with() - guestfs().add_drive.assert_called_once_with(str(tmp_path / "rootfs.ext4")) - guestfs().mount.assert_called_once_with(guestfs().list_devices()[0], "/") - guestfs().mkdir_p.assert_called_once_with("/") - guestfs().tar_in.assert_called_once_with( - str(tmp_path / "overlay.tar.gz"), "/", compress="gzip" - ) - assert caplog.record_tuples == [ - ("dispatcher", 20, f"Modifying '{tmp_path}/rootfs.ext4'"), - ("dispatcher", 10, "Overlays:"), - ("dispatcher", 10, f"- rootfs.lava: '{tmp_path}/overlay.tar.gz' to '/'"), - ] + action.run_cmd = MagicMock() + + with patch( + "lava_dispatcher.actions.deploy.apply_overlay.guestfs.GuestFS" + ) as guestfs_mock, patch( + "lava_dispatcher.actions.deploy.apply_overlay.os.replace" + ) as replace_mock, self.assertLogs( + action.logger, level="DEBUG" + ) as action_logs: + action.update_guestfs() + + guestfs_mock.assert_called_once_with(python_return_dict=True) + guestfs_mock().launch.assert_called_once_with() + guestfs_mock().list_devices.assert_called_once_with() + guestfs_mock().add_drive.assert_called_once_with( + str(tmp_dir_path / "rootfs.ext4") + ) + guestfs_mock().mount.assert_called_once_with( + guestfs_mock().list_devices()[0], "/" + ) + guestfs_mock().mkdir_p.assert_called_once_with("/lib") + guestfs_mock().tar_in.assert_called_once_with( + str(tmp_dir_path / "modules.tar"), "/lib", compress=None + ) + self.assertEqual( + action.run_cmd.mock_calls, + [ + mock_call( + [ + "/usr/bin/simg2img", + f"{tmp_dir_path}/rootfs.ext4", + f"{tmp_dir_path}/rootfs.ext4.non-sparse", + ], + error_msg=f"simg2img failed for {tmp_dir_path}/rootfs.ext4", + ), + mock_call( + [ + "/usr/bin/img2simg", + f"{tmp_dir_path}/rootfs.ext4", + f"{tmp_dir_path}/rootfs.ext4.sparse", + ], + error_msg=f"img2simg failed for {tmp_dir_path}/rootfs.ext4", + ), + ], + ) + self.assertEqual( + replace_mock.mock_calls, + [ + mock_call( + f"{tmp_dir_path}/rootfs.ext4.non-sparse", + f"{tmp_dir_path}/rootfs.ext4", + ), + mock_call( + f"{tmp_dir_path}/rootfs.ext4.sparse", f"{tmp_dir_path}/rootfs.ext4" + ), + ], + ) + + self.assertEqual( + [(r.name, r.levelno, r.message) for r in action_logs.records], + [ + ("dispatcher", 20, f"Modifying '{tmp_dir_path}/rootfs.ext4'"), + ("dispatcher", 10, f"Calling simg2img on '{tmp_dir_path}/rootfs.ext4'"), + ("dispatcher", 10, "Overlays:"), + ( + "dispatcher", + 10, + f"- rootfs.modules: '{tmp_dir_path}/modules.tar' to '/lib'", + ), + ("dispatcher", 10, f"Calling img2simg on '{tmp_dir_path}/rootfs.ext4'"), + ], + ) + + def test_append_lava_overlay_update_cpio(self): + job = self.create_simple_job() + tmp_dir_path = self.create_temporary_directory() + + params = {"format": "cpio.newc", "overlays": {"lava": True}} + + action = AppendOverlays("rootfs", params) + action.job = job + action.parameters = { + "rootfs": {"url": "http://example.com/rootfs.cpio.gz", **params}, + "namespace": "common", + } + action.data = { + "common": { + "compress-overlay": { + "output": {"file": str(tmp_dir_path / "overlay.tar.gz")} + }, + "download-action": { + "rootfs": { + "file": str(tmp_dir_path / "rootfs.cpio.gz"), + "compression": "gz", + "decompressed": False, + } + }, + } + } + action.mkdtemp = MagicMock(return_value=str(tmp_dir_path)) + with patch( + "lava_dispatcher.actions.deploy.apply_overlay.decompress_file" + ) as decompress_file_mock, patch( + "lava_dispatcher.actions.deploy.apply_overlay.uncpio" + ) as uncpio_mock, patch( + "os.unlink" + ) as unlick_mock, patch( + "lava_dispatcher.actions.deploy.apply_overlay.untar_file" + ) as untar_file_mock, patch( + "lava_dispatcher.actions.deploy.apply_overlay.cpio" + ) as cpio_mock, patch( + "lava_dispatcher.actions.deploy.apply_overlay.compress_file" + ) as compress_file_mock, self.assertLogs( + action.logger, level="DEBUG" + ) as action_logs: + action.update_cpio() + + decompress_file_mock.assert_called_once_with( + str(tmp_dir_path / "rootfs.cpio.gz"), "gz" + ) + uncpio_mock.assert_called_once_with(decompress_file_mock(), str(tmp_dir_path)) + unlick_mock.assert_called_once_with(decompress_file_mock()) + untar_file_mock.assert_called_once_with( + str(tmp_dir_path / "overlay.tar.gz"), str(tmp_dir_path) + "/" + ) + cpio_mock.assert_called_once_with(str(tmp_dir_path), decompress_file_mock()) + compress_file_mock.assert_called_once_with(decompress_file_mock(), "gz") + + self.assertEqual( + [(r.name, r.levelno, r.message) for r in action_logs.records], + [ + ("dispatcher", 20, f"Modifying '{tmp_dir_path}/rootfs.cpio.gz'"), + ("dispatcher", 10, "* decompressing (gz)"), + ("dispatcher", 10, f"* extracting {decompress_file_mock()}"), + ("dispatcher", 10, "Overlays:"), + ( + "dispatcher", + 10, + ( + f"- rootfs.lava: untar '{tmp_dir_path}" + f"/overlay.tar.gz' to '{tmp_dir_path}/'" + ), + ), + ("dispatcher", 10, f"* archiving {decompress_file_mock()}"), + ("dispatcher", 10, "* compressing (gz)"), + ], + ) + + def test_append_lava_overlay_update_guestfs(self): + job = self.create_simple_job() + tmp_dir_path = self.create_temporary_directory() + + params = {"format": "ext4", "overlays": {"lava": True}} + + action = AppendOverlays("rootfs", params) + action.job = job + action.parameters = { + "rootfs": {"url": "http://example.com/rootff.ext4", **params}, + "namespace": "common", + } + action.data = { + "common": { + "compress-overlay": { + "output": {"file": str(tmp_dir_path / "overlay.tar.gz")} + }, + "download-action": { + "rootfs": { + "file": str(tmp_dir_path / "rootfs.ext4"), + "compression": "gz", + "decompressed": True, + } + }, + } + } + + with patch( + "lava_dispatcher.actions.deploy.apply_overlay.guestfs.GuestFS" + ) as guestfs_mock, self.assertLogs(action.logger, level="DEBUG") as action_logs: + action.update_guestfs() + + guestfs_mock.assert_called_once_with(python_return_dict=True) + guestfs_mock().launch.assert_called_once_with() + guestfs_mock().list_devices.assert_called_once_with() + guestfs_mock().add_drive.assert_called_once_with( + str(tmp_dir_path / "rootfs.ext4") + ) + guestfs_mock().mount.assert_called_once_with( + guestfs_mock().list_devices()[0], "/" + ) + guestfs_mock().mkdir_p.assert_called_once_with("/") + guestfs_mock().tar_in.assert_called_once_with( + str(tmp_dir_path / "overlay.tar.gz"), "/", compress="gzip" + ) + self.assertEqual( + [(r.name, r.levelno, r.message) for r in action_logs.records], + [ + ("dispatcher", 20, f"Modifying '{tmp_dir_path}/rootfs.ext4'"), + ("dispatcher", 10, "Overlays:"), + ( + "dispatcher", + 10, + f"- rootfs.lava: '{tmp_dir_path}/overlay.tar.gz' to '/'", + ), + ], + ) diff --git a/tests/lava_dispatcher/actions/deploy/test_download.py b/tests/lava_dispatcher/actions/deploy/test_download.py index 53f3630109..27a91f34de 100644 --- a/tests/lava_dispatcher/actions/deploy/test_download.py +++ b/tests/lava_dispatcher/actions/deploy/test_download.py @@ -5,9 +5,9 @@ # SPDX-License-Identifier: GPL-2.0-or-later from pathlib import Path +from unittest.mock import patch from urllib.parse import urlparse -import pytest import requests from lava_common.constants import HTTP_DOWNLOAD_CHUNK_SIZE @@ -22,756 +22,844 @@ PreDownloadedAction, ScpDownloadAction, ) -from lava_dispatcher.job import Job -from tests.lava_dispatcher.test_basic import Factory - - -def test_downloader_populate_http(): - # "images.key" with http - action = DownloaderAction( - "key", "/path/to/save", params={"url": "http://url.org/resource.img"} - ) - action.level = 1 - action.populate({"images": {"key": {"url": "http://url.org/resource.img"}}}) - assert len(action.pipeline.actions) == 1 - assert isinstance(action.pipeline.actions[0], HttpDownloadAction) - assert action.pipeline.actions[0].url == urlparse("http://url.org/resource.img") - - # "key" with http - action = DownloaderAction( - "key", "/path/to/save", params={"url": "http://url.org/resource.img"} - ) - action.level = 1 - action.populate({"key": {"url": "http://url.org/resource.img"}}) - assert len(action.pipeline.actions) == 1 - assert isinstance(action.pipeline.actions[0], HttpDownloadAction) - assert action.pipeline.actions[0].url == urlparse("http://url.org/resource.img") - - -def test_downloader_populate_https(): - # "images.key" with https - action = DownloaderAction( - "key", "/path/to/save", params={"url": "https://url.org/resource.img"} - ) - action.level = 1 - action.populate({"images": {"key": {"url": "https://url.org/resource.img"}}}) - assert len(action.pipeline.actions) == 1 - assert isinstance(action.pipeline.actions[0], HttpDownloadAction) - assert action.pipeline.actions[0].url == urlparse("https://url.org/resource.img") - - # "key" with https - action = DownloaderAction( - "key", "/path/to/save", params={"url": "https://url.org/resource.img"} - ) - action.level = 1 - action.populate({"key": {"url": "https://url.org/resource.img"}}) - assert len(action.pipeline.actions) == 1 - assert isinstance(action.pipeline.actions[0], HttpDownloadAction) - assert action.pipeline.actions[0].url == urlparse("https://url.org/resource.img") - - -def test_downloader_populate_scp(): - # "images.key" with scp - action = DownloaderAction( - "key", "/path/to/save", params={"url": "scp://user@host:/resource.img"} - ) - action.level = 1 - action.populate({"images": {"key": {"url": "scp://user@host:/resource.img"}}}) - assert len(action.pipeline.actions) == 1 - assert isinstance(action.pipeline.actions[0], ScpDownloadAction) - assert action.pipeline.actions[0].url == urlparse("scp://user@host:/resource.img") - - # "key" with scp - action = DownloaderAction( - "key", "/path/to/save", params={"url": "scp://user@host:/resource.img"} - ) - action.level = 1 - action.populate({"key": {"url": "scp://user@host:/resource.img"}}) - assert len(action.pipeline.actions) == 1 - assert isinstance(action.pipeline.actions[0], ScpDownloadAction) - assert action.pipeline.actions[0].url == urlparse("scp://user@host:/resource.img") - - -def test_downloader_populate_file(): - # "images.key" with file - action = DownloaderAction( - "key", "/path/to/save", params={"url": "file:///resource.img"} - ) - action.level = 1 - action.populate({"images": {"key": {"url": "file:///resource.img"}}}) - assert len(action.pipeline.actions) == 1 - assert isinstance(action.pipeline.actions[0], FileDownloadAction) - assert action.pipeline.actions[0].url == urlparse("file:///resource.img") - - # "key" with file - action = DownloaderAction( - "key", "/path/to/save", params={"url": "file:///resource.img"} - ) - action.level = 1 - action.populate({"key": {"url": "file:///resource.img"}}) - assert len(action.pipeline.actions) == 1 - assert isinstance(action.pipeline.actions[0], FileDownloadAction) - assert action.pipeline.actions[0].url == urlparse("file:///resource.img") - - -def test_downloader_populate_file(): - # "images.key" with lxc - action = DownloaderAction( - "key", "/path/to/save", params={"url": "lxc:///resource.img"} - ) - action.level = 1 - action.populate({"images": {"key": {"url": "lxc:///resource.img"}}}) - assert len(action.pipeline.actions) == 1 - assert isinstance(action.pipeline.actions[0], LxcDownloadAction) - assert action.pipeline.actions[0].url == urlparse("lxc:///resource.img") - - # "key" with lxc - action = DownloaderAction( - "key", "/path/to/save", params={"url": "lxc:///resource.img"} - ) - action.level = 1 - action.populate({"key": {"url": "lxc:///resource.img"}}) - assert len(action.pipeline.actions) == 1 - assert isinstance(action.pipeline.actions[0], LxcDownloadAction) - assert action.pipeline.actions[0].url == urlparse("lxc:///resource.img") - - -def test_downloader_unsupported_scheme(): - # Test raise - # 1. unsupported scheme - action = DownloaderAction( - "key", "/path/to/save", params={"url": "ftp://user@host:/resource.img"} - ) - action.level = 1 - with pytest.raises(JobError) as exc: - action.populate({"key": {"url": "ftp://user@host:/resource.img"}}) - assert exc.match("Unsupported url protocol scheme: ftp") - - -def test_downloader_no_url(): - # 1. no url available - action = DownloaderAction("key", "/path/to/save", params={}) - action.level = 1 - with pytest.raises(JobError) as exc: - action.populate({"key": {}}) - assert exc.match("Invalid deploy action: 'url' is missing for 'key'") - - -def test_download_handler_validate_simple(): - # "images.key" without extra parameters - action = DownloadHandler( - "key", "/path/to/save", urlparse("http://example.com/resource.img") - ) - action.job = Job(1234, {}, None) - action.parameters = { - "images": {"key": {"url": "http://example.com/resource.img"}}, - "namespace": "common", - } - action.params = action.parameters["images"]["key"] - action.validate() - assert action.data == { - "common": { - "download-action": { - "key": {"file": "/path/to/save/key/resource.img", "compression": None} - } + +from ...test_basic import Factory, LavaDispatcherTestCase + + +class TestDowload(LavaDispatcherTestCase): + def test_downloader_populate_http(self): + job = self.create_simple_job() + # "images.key" with http + action = DownloaderAction( + "key", "/path/to/save", params={"url": "http://url.org/resource.img"} + ) + action.level = 1 + action.job = job + action.populate({"images": {"key": {"url": "http://url.org/resource.img"}}}) + self.assertEqual(len(action.pipeline.actions), 1) + self.assertIsInstance(action.pipeline.actions[0], HttpDownloadAction) + self.assertEqual( + action.pipeline.actions[0].url, urlparse("http://url.org/resource.img") + ) + + # "key" with http + action = DownloaderAction( + "key", "/path/to/save", params={"url": "http://url.org/resource.img"} + ) + action.level = 1 + action.job = job + action.populate({"key": {"url": "http://url.org/resource.img"}}) + self.assertEqual(len(action.pipeline.actions), 1) + self.assertIsInstance(action.pipeline.actions[0], HttpDownloadAction) + self.assertEqual( + action.pipeline.actions[0].url, urlparse("http://url.org/resource.img") + ) + + def test_downloader_populate_https(self): + job = self.create_simple_job() + # "images.key" with https + action = DownloaderAction( + "key", "/path/to/save", params={"url": "https://url.org/resource.img"} + ) + action.level = 1 + action.job = job + action.populate({"images": {"key": {"url": "https://url.org/resource.img"}}}) + self.assertEqual(len(action.pipeline.actions), 1) + self.assertIsInstance(action.pipeline.actions[0], HttpDownloadAction) + self.assertEqual( + action.pipeline.actions[0].url, urlparse("https://url.org/resource.img") + ) + + # "key" with https + action = DownloaderAction( + "key", "/path/to/save", params={"url": "https://url.org/resource.img"} + ) + action.level = 1 + action.job = job + action.populate({"key": {"url": "https://url.org/resource.img"}}) + self.assertEqual(len(action.pipeline.actions), 1) + self.assertIsInstance(action.pipeline.actions[0], HttpDownloadAction) + self.assertEqual( + action.pipeline.actions[0].url, urlparse("https://url.org/resource.img") + ) + + def test_downloader_populate_scp(self): + job = self.create_simple_job() + # "images.key" with scp + action = DownloaderAction( + "key", "/path/to/save", params={"url": "scp://user@host:/resource.img"} + ) + action.level = 1 + action.job = job + action.populate({"images": {"key": {"url": "scp://user@host:/resource.img"}}}) + self.assertEqual(len(action.pipeline.actions), 1) + self.assertIsInstance(action.pipeline.actions[0], ScpDownloadAction) + self.assertEqual( + action.pipeline.actions[0].url, urlparse("scp://user@host:/resource.img") + ) + + # "key" with scp + action = DownloaderAction( + "key", "/path/to/save", params={"url": "scp://user@host:/resource.img"} + ) + action.level = 1 + action.job = job + action.populate({"key": {"url": "scp://user@host:/resource.img"}}) + self.assertEqual(len(action.pipeline.actions), 1) + self.assertIsInstance(action.pipeline.actions[0], ScpDownloadAction) + self.assertEqual( + action.pipeline.actions[0].url, urlparse("scp://user@host:/resource.img") + ) + + def test_downloader_populate_image_file(self): + job = self.create_simple_job() + # "images.key" with file + action = DownloaderAction( + "key", "/path/to/save", params={"url": "file:///resource.img"} + ) + action.level = 1 + action.job = job + action.populate({"images": {"key": {"url": "file:///resource.img"}}}) + self.assertEqual(len(action.pipeline.actions), 1) + self.assertIsInstance(action.pipeline.actions[0], FileDownloadAction) + self.assertEqual( + action.pipeline.actions[0].url, urlparse("file:///resource.img") + ) + + # "key" with file + action = DownloaderAction( + "key", "/path/to/save", params={"url": "file:///resource.img"} + ) + action.level = 1 + action.job = job + action.populate({"key": {"url": "file:///resource.img"}}) + self.assertEqual(len(action.pipeline.actions), 1) + self.assertIsInstance(action.pipeline.actions[0], FileDownloadAction) + self.assertEqual( + action.pipeline.actions[0].url, urlparse("file:///resource.img") + ) + + def test_downloader_populate_lxc_file(self): + job = self.create_simple_job() + # "images.key" with lxc + action = DownloaderAction( + "key", "/path/to/save", params={"url": "lxc:///resource.img"} + ) + action.level = 1 + action.job = job + action.populate({"images": {"key": {"url": "lxc:///resource.img"}}}) + self.assertEqual(len(action.pipeline.actions), 1) + self.assertIsInstance(action.pipeline.actions[0], LxcDownloadAction) + self.assertEqual( + action.pipeline.actions[0].url, urlparse("lxc:///resource.img") + ) + + # "key" with lxc + action = DownloaderAction( + "key", "/path/to/save", params={"url": "lxc:///resource.img"} + ) + action.level = 1 + action.job = job + action.populate({"key": {"url": "lxc:///resource.img"}}) + self.assertEqual(len(action.pipeline.actions), 1) + self.assertIsInstance(action.pipeline.actions[0], LxcDownloadAction) + self.assertEqual( + action.pipeline.actions[0].url, urlparse("lxc:///resource.img") + ) + + def test_downloader_unsupported_scheme(self): + # Test raise + # 1. unsupported scheme + action = DownloaderAction( + "key", "/path/to/save", params={"url": "ftp://user@host:/resource.img"} + ) + action.level = 1 + with self.assertRaisesRegex(JobError, "Unsupported url protocol scheme: ftp"): + action.populate({"key": {"url": "ftp://user@host:/resource.img"}}) + + def test_downloader_no_url(self): + # 1. no url available + action = DownloaderAction("key", "/path/to/save", params={}) + action.level = 1 + with self.assertRaisesRegex( + JobError, "Invalid deploy action: 'url' is missing for 'key'" + ): + action.populate({"key": {}}) + + def test_download_handler_validate_simple(self): + # "images.key" without extra parameters + action = DownloadHandler( + "key", "/path/to/save", urlparse("http://example.com/resource.img") + ) + action.job = self.create_simple_job() + action.parameters = { + "images": {"key": {"url": "http://example.com/resource.img"}}, + "namespace": "common", } - } - - # "key" without extra parameters - action = DownloadHandler( - "key", "/path/to/save", urlparse("http://example.com/resource.img") - ) - action.job = Job(1234, {}, None) - action.parameters = { - "key": {"url": "http://example.com/resource.img"}, - "namespace": "common", - } - action.params = action.parameters["key"] - action.validate() - assert action.data == { - "common": { - "download-action": { - "key": {"file": "/path/to/save/key/resource.img", "compression": None} - } + action.params = action.parameters["images"]["key"] + action.validate() + self.assertEqual( + action.data, + { + "common": { + "download-action": { + "key": { + "file": "/path/to/save/key/resource.img", + "compression": None, + } + } + } + }, + ) + + # "key" without extra parameters + action = DownloadHandler( + "key", "/path/to/save", urlparse("http://example.com/resource.img") + ) + action.job = self.create_simple_job() + action.parameters = { + "key": {"url": "http://example.com/resource.img"}, + "namespace": "common", } - } - - -def test_download_handler_validate_kernel(): - # "images.key" for kernel - # In this case, the "kernel.type" is not taken into account - action = DownloadHandler( - "kernel", "/path/to/save", urlparse("http://example.com/kernel") - ) - action.job = Job(1234, {}, None) - action.parameters = { - "images": {"kernel": {"url": "http://example.com/kernel", "type": "zimage"}}, - "namespace": "common", - } - action.params = action.parameters["images"]["kernel"] - action.validate() - assert action.data == { - "common": { - "download-action": { - "kernel": {"file": "/path/to/save/kernel/kernel", "compression": None} - } + action.params = action.parameters["key"] + action.validate() + self.assertEqual( + action.data, + { + "common": { + "download-action": { + "key": { + "file": "/path/to/save/key/resource.img", + "compression": None, + } + } + } + }, + ) + + def test_download_handler_validate_kernel(self): + # "images.key" for kernel + # In this case, the "kernel.type" is not taken into account + action = DownloadHandler( + "kernel", "/path/to/save", urlparse("http://example.com/kernel") + ) + action.job = self.create_simple_job() + action.parameters = { + "images": { + "kernel": {"url": "http://example.com/kernel", "type": "zimage"} + }, + "namespace": "common", } - } - - # "key" for kernel - action = DownloadHandler( - "kernel", "/path/to/save", urlparse("http://example.com/kernel") - ) - action.job = Job(1234, {}, None) - action.parameters = { - "kernel": {"url": "http://example.com/kernel", "type": "zimage"}, - "namespace": "common", - } - action.params = action.parameters["kernel"] - action.validate() - assert action.data == { - "common": { - "download-action": { - "kernel": {"file": "/path/to/save/kernel/kernel", "compression": None}, - "type": {"kernel": "zimage"}, - } + action.params = action.parameters["images"]["kernel"] + action.validate() + self.assertEqual( + action.data, + { + "common": { + "download-action": { + "kernel": { + "file": "/path/to/save/kernel/kernel", + "compression": None, + } + } + } + }, + ) + + # "key" for kernel + action = DownloadHandler( + "kernel", "/path/to/save", urlparse("http://example.com/kernel") + ) + action.job = self.create_simple_job() + action.parameters = { + "kernel": {"url": "http://example.com/kernel", "type": "zimage"}, + "namespace": "common", } - } - - -def test_download_handler_validate_extra_arguments(): - # "images.key" with compression, image_arg, overlay, ... - action = DownloadHandler( - "key", "/path/to/save", urlparse("http://example.com/resource.img.gz") - ) - action.job = Job(1234, {}, None) - action.parameters = { - "images": { + action.params = action.parameters["kernel"] + action.validate() + self.assertEqual( + action.data, + { + "common": { + "download-action": { + "kernel": { + "file": "/path/to/save/kernel/kernel", + "compression": None, + }, + "type": {"kernel": "zimage"}, + } + } + }, + ) + + def test_download_handler_validate_extra_arguments(self): + # "images.key" with compression, image_arg, overlay, ... + action = DownloadHandler( + "key", "/path/to/save", urlparse("http://example.com/resource.img.gz") + ) + action.job = self.create_simple_job() + action.parameters = { + "images": { + "key": { + "url": "http://example.com/resource.img.gz", + "compression": "gz", + "image_arg": "something", + "overlay": True, + } + }, + "namespace": "common", + } + action.params = action.parameters["images"]["key"] + action.validate() + self.assertEqual( + action.data, + { + "common": { + "download-action": { + "key": { + "file": "/path/to/save/key/resource.img", + "image_arg": "something", + "compression": "gz", + "overlay": True, + } + } + } + }, + ) + + # "key" with compression, image_arg, overlay, ... + action = DownloadHandler( + "key", "/path/to/save", urlparse("http://example.com/resource.img.gz") + ) + action.job = self.create_simple_job() + action.parameters = { "key": { "url": "http://example.com/resource.img.gz", "compression": "gz", "image_arg": "something", "overlay": True, - } - }, - "namespace": "common", - } - action.params = action.parameters["images"]["key"] - action.validate() - assert action.data == { - "common": { - "download-action": { - "key": { - "file": "/path/to/save/key/resource.img", - "image_arg": "something", - "compression": "gz", - "overlay": True, - } - } + }, + "namespace": "common", } - } - - # "key" with compression, image_arg, overlay, ... - action = DownloadHandler( - "key", "/path/to/save", urlparse("http://example.com/resource.img.gz") - ) - action.job = Job(1234, {}, None) - action.parameters = { - "key": { - "url": "http://example.com/resource.img.gz", - "compression": "gz", - "image_arg": "something", - "overlay": True, - }, - "namespace": "common", - } - action.params = action.parameters["key"] - action.validate() - assert action.data == { - "common": { - "download-action": { - "key": { - "file": "/path/to/save/key/resource.img", - "compression": "gz", - "image_arg": "something", - "overlay": True, + action.params = action.parameters["key"] + action.validate() + self.assertEqual( + action.data, + { + "common": { + "download-action": { + "key": { + "file": "/path/to/save/key/resource.img", + "compression": "gz", + "image_arg": "something", + "overlay": True, + } + } } - } + }, + ) + + def test_download_handler_errors(self): + # "key" downloading a directory + # TODO: is this a good idea to keep this feature? + action = DownloadHandler( + "key", "/path/to/save", urlparse("http://example.com/resource/") + ) + action.section = "deploy" + action.job = self.create_simple_job() + action.parameters = { + "key": {"url": "http://example.com/resource/"}, + "namespace": "common", } - } - - -def test_download_handler_errors(): - # "key" downloading a directory - # TODO: is this a good idea to keep this feature? - action = DownloadHandler( - "key", "/path/to/save", urlparse("http://example.com/resource/") - ) - action.section = "deploy" - action.job = Job(1234, {}, None) - action.parameters = { - "key": {"url": "http://example.com/resource/"}, - "namespace": "common", - } - action.params = action.parameters["key"] - with pytest.raises(JobError) as exc: + action.params = action.parameters["key"] + with self.assertRaisesRegex(JobError, "Cannot download a directory for key"): + action.validate() + + # Unknown compression format + action = DownloadHandler( + "key", "/path/to/save", urlparse("http://example.com/resource.img") + ) + action.section = "deploy" + action.job = self.create_simple_job() + action.parameters = { + "key": { + "url": "http://example.com/resource.img", + "compression": "something", + }, + "namespace": "common", + } + action.params = action.parameters["key"] + action.validate() + self.assertEqual(action.errors, ["Unknown 'compression' format 'something'"]) + + # Unknown archive format + action = DownloadHandler( + "key", "/path/to/save", urlparse("http://example.com/resource.img") + ) + action.section = "deploy" + action.job = self.create_simple_job() + action.parameters = { + "key": {"url": "http://example.com/resource.img", "archive": "cpio"}, + "namespace": "common", + } + action.params = action.parameters["key"] + action.validate() + self.assertEqual(action.errors, ["Unknown 'archive' format 'cpio'"]) + + def test_file_download_validate(self): + job = self.create_simple_job(job_parameters={"dispatcher": {}}) + tmp_dir_path = self.create_temporary_directory() + + # Create the file to use + (tmp_dir_path / "bla.img").write_text("hello", encoding="utf-8") + + # Working + action = FileDownloadAction( + "image", + "/path/to/file", + urlparse("file://" + str(tmp_dir_path) + "/bla.img"), + ) + action.section = "deploy" + action.job = job + action.parameters = { + "image": {"url": "file://" + str(tmp_dir_path) + "/bla.img"}, + "namespace": "common", + } + action.params = action.parameters["image"] + action.validate() + self.assertEqual(action.errors, []) + self.assertEqual(action.size, 5) + + # Missing file + action = FileDownloadAction( + "image", + "/path/to/file", + urlparse("file://" + str(tmp_dir_path) + "/bla2.img"), + ) + action.section = "deploy" + action.job = self.create_simple_job() + action.parameters = { + "image": {"url": "file://" + str(tmp_dir_path) + "/bla2.img"}, + "namespace": "common", + } + action.params = action.parameters["image"] action.validate() - assert str(exc.value) == "Cannot download a directory for key" - - # Unknown compression format - action = DownloadHandler( - "key", "/path/to/save", urlparse("http://example.com/resource.img") - ) - action.section = "deploy" - action.job = Job(1234, {}, None) - action.parameters = { - "key": {"url": "http://example.com/resource.img", "compression": "something"}, - "namespace": "common", - } - action.params = action.parameters["key"] - action.validate() - assert action.errors == ["Unknown 'compression' format 'something'"] - - # Unknown archive format - action = DownloadHandler( - "key", "/path/to/save", urlparse("http://example.com/resource.img") - ) - action.section = "deploy" - action.job = Job(1234, {}, None) - action.parameters = { - "key": {"url": "http://example.com/resource.img", "archive": "cpio"}, - "namespace": "common", - } - action.params = action.parameters["key"] - action.validate() - assert action.errors == ["Unknown 'archive' format 'cpio'"] - - -def test_file_download_validate(tmp_path): - # Create the file to use - (tmp_path / "bla.img").write_text("hello", encoding="utf-8") - - # Working - action = FileDownloadAction( - "image", "/path/to/file", urlparse("file://" + str(tmp_path) + "/bla.img") - ) - action.section = "deploy" - action.job = Job(1234, {}, None) - action.parameters = { - "image": {"url": "file://" + str(tmp_path) + "/bla.img"}, - "namespace": "common", - } - action.params = action.parameters["image"] - action.validate() - assert action.errors == [] - assert action.size == 5 - - # Missing file - action = FileDownloadAction( - "image", "/path/to/file", urlparse("file://" + str(tmp_path) + "/bla2.img") - ) - action.section = "deploy" - action.job = Job(1234, {}, None) - action.parameters = { - "image": {"url": "file://" + str(tmp_path) + "/bla2.img"}, - "namespace": "common", - } - action.params = action.parameters["image"] - action.validate() - assert action.errors == [ - "Image file '" + str(tmp_path) + "/bla2.img' does not exist or is not readable" - ] - assert action.size == -1 - - -def test_http_download_validate(mocker): - class DummyResponseNOK: - status_code = 404 - - def close(self): - pass - - class DummyResponseOK: - status_code = requests.codes.OK - headers = {"content-length": "4212"} - - def close(self): - pass - - def dummyhead(url, allow_redirects, headers, timeout): - assert allow_redirects is True - assert headers == {"Accept-Encoding": ""} - if url == "https://example.com/kernel": + self.assertEqual( + action.errors, + [ + "Image file '" + + str(tmp_dir_path) + + "/bla2.img' does not exist or is not readable" + ], + ) + self.assertEqual(action.size, -1) + + def test_http_download_validate(self): + class DummyResponseNOK: + status_code = 404 + + def close(self): + pass + + class DummyResponseOK: + status_code = requests.codes.OK + headers = {"content-length": "4212"} + + def close(self): + pass + + def dummyhead(url, allow_redirects, headers, timeout): + self.assertIs(allow_redirects, True) + self.assertEqual(headers, {"Accept-Encoding": ""}) + if url == "https://example.com/kernel": + return DummyResponseOK() + elif url == "https://example.com/dtb": + return DummyResponseNOK() + else: + raise ValueError + + def dummyget(url, allow_redirects, stream, headers, timeout): + self.assertIs(allow_redirects, True) + self.assertIs(stream, True) + self.assertEqual(headers, {"Accept-Encoding": ""}) + self.assertEqual(url, "https://example.com/dtb") return DummyResponseOK() - elif url == "https://example.com/dtb": + + job = self.create_simple_job(job_parameters={"dispatcher": {}}) + # HEAD is working + action = HttpDownloadAction( + "image", "/path/to/file", urlparse("https://example.com/kernel") + ) + action.section = "deploy" + action.job = job + action.parameters = { + "image": {"url": "https://example.com/kernel"}, + "namespace": "common", + } + action.params = action.parameters["image"] + with patch("requests.head", dummyhead), patch("requests.get", dummyget): + action.validate() + self.assertEqual(action.errors, []) + self.assertEqual(action.size, 4212) + + # Only GET works + action = HttpDownloadAction( + "image", "/path/to/file", urlparse("https://example.com/dtb") + ) + action.section = "deploy" + action.job = job + action.parameters = { + "image": {"url": "https://example.com/dtb"}, + "namespace": "common", + } + action.params = action.parameters["image"] + with patch("requests.head", dummyhead), patch("requests.get", dummyget): + action.validate() + self.assertEqual(action.errors, []) + self.assertEqual(action.size, 4212) + + # 404 + def response404(*args, **kwargs): + print(args) + print(str(kwargs)) return DummyResponseNOK() - assert 0 - - def dummyget(url, allow_redirects, stream, headers, timeout): - assert allow_redirects is True - assert stream is True - assert headers == {"Accept-Encoding": ""} - assert url == "https://example.com/dtb" - return DummyResponseOK() - - mocker.patch("requests.head", dummyhead) - mocker.patch("requests.get", dummyget) - - # HEAD is working - action = HttpDownloadAction( - "image", "/path/to/file", urlparse("https://example.com/kernel") - ) - action.section = "deploy" - action.job = Job(1234, {"dispatcher": {}}, None) - action.parameters = { - "image": {"url": "https://example.com/kernel"}, - "namespace": "common", - } - action.params = action.parameters["image"] - action.validate() - assert action.errors == [] - assert action.size == 4212 - - # Only GET works - action = HttpDownloadAction( - "image", "/path/to/file", urlparse("https://example.com/dtb") - ) - action.section = "deploy" - action.job = Job(1234, {"dispatcher": {}}, None) - action.parameters = { - "image": {"url": "https://example.com/dtb"}, - "namespace": "common", - } - action.params = action.parameters["image"] - action.validate() - assert action.errors == [] - assert action.size == 4212 - - # 404 - def response404(*args, **kwargs): - print(args) - print(str(kwargs)) - return DummyResponseNOK() - - mocker.patch("requests.head", response404) - mocker.patch("requests.get", response404) - - action = HttpDownloadAction( - "image", "/path/to/file", urlparse("https://example.com/kernel") - ) - action.section = "deploy" - action.job = Job(1234, {"dispatcher": {}}, None) - action.parameters = { - "image": {"url": "https://example.com/kernel"}, - "namespace": "common", - } - action.params = action.parameters["image"] - action.validate() - assert action.errors == [ - "Resource unavailable at 'https://example.com/kernel' (404)" - ] - - # Raising exceptions - def raisinghead(url, allow_redirects, headers, timeout): - raise requests.Timeout() - - mocker.patch("requests.head", raisinghead) - action = HttpDownloadAction( - "image", "/path/to/file", urlparse("https://example.com/kernel") - ) - action.section = "deploy" - action.job = Job(1234, {"dispatcher": {}}, None) - action.parameters = { - "image": {"url": "https://example.com/kernel"}, - "namespace": "common", - } - action.params = action.parameters["image"] - action.validate() - assert action.errors == ["'https://example.com/kernel' timed out"] - - def raisinghead2(url, allow_redirects, headers, timeout): - raise requests.RequestException("an error occurred") - - mocker.patch("requests.head", raisinghead2) - action = HttpDownloadAction( - "image", "/path/to/file", urlparse("https://example.com/kernel") - ) - action.section = "deploy" - action.job = Job(1234, {"dispatcher": {}}, None) - action.parameters = { - "image": {"url": "https://example.com/kernel"}, - "namespace": "common", - } - action.params = action.parameters["image"] - action.validate() - assert action.errors == [ - "Unable to get 'https://example.com/kernel': an error occurred" - ] - - -def test_file_download_reader(tmp_path): - # Create the file to use - (tmp_path / "bla.img").write_text("hello", encoding="utf-8") - - # Normal case - action = FileDownloadAction( - "image", "/path/to/file", urlparse("file://" + str(tmp_path) + "/bla.img") - ) - action.url = urlparse("file://" + str(tmp_path) + "/bla.img") - ite = action.reader() - assert next(ite) == b"hello" - with pytest.raises(StopIteration): - next(ite) - - # Error when reading - action = FileDownloadAction( - "image", "/path/to/file", urlparse("file://" + str(tmp_path) + "/bla2.img") - ) - action.url = urlparse("file://" + str(tmp_path) + "/bla2.img") - ite = action.reader() - with pytest.raises(InfrastructureError) as exc: - next(ite) - assert exc.match( - "Unable to read from %s: \\[Errno 2\\] No such file or directory: '%s'" - % (str(tmp_path / "bla2.img"), str(tmp_path / "bla2.img")) - ) - - -def test_http_download_reader(mocker): - # Working - class DummyResponse: - status_code = requests.codes.OK - headers = {"content-length": "4212"} - - def iter_content(self, size): - assert size == HTTP_DOWNLOAD_CHUNK_SIZE - yield b"hello" - def close(self): - pass - - def dummyget(url, allow_redirects, stream, headers, timeout): - assert allow_redirects is True - assert stream is True - assert url == "https://example.com/dtb" - return DummyResponse() - - mocker.patch("requests.get", dummyget) - action = HttpDownloadAction( - "image", "/path/to/file", urlparse("https://example.com/dtb") - ) - action.url = urlparse("https://example.com/dtb") - - ite = action.reader() - assert next(ite) == b"hello" - with pytest.raises(StopIteration): - next(ite) - - # Not working - def dummygetraise(url, allow_redirects, stream, headers, timeout): - raise requests.RequestException("error") - - mocker.patch("requests.get", dummygetraise) - action = HttpDownloadAction( - "image", "/path/to/file", urlparse("https://example.com/dtb") - ) - action.url = urlparse("https://example.com/dtb") - - ite = action.reader() - with pytest.raises(InfrastructureError) as exc: - next(ite) - assert exc.match("Unable to download 'https://example.com/dtb': error") - - -def test_http_download_run(tmp_path): - def reader(): - yield b"hello" - yield b"world" - - action = HttpDownloadAction( - "dtb", str(tmp_path), urlparse("https://example.com/dtb") - ) - action.job = Job(1234, {"dispatcher": {}}, None) - action.url = urlparse("https://example.com/dtb") - action.parameters = { - "to": "download", - "images": { - "dtb": { - "url": "https://example.com/dtb", + action = HttpDownloadAction( + "image", "/path/to/file", urlparse("https://example.com/kernel") + ) + action.section = "deploy" + action.job = job + action.parameters = { + "image": {"url": "https://example.com/kernel"}, + "namespace": "common", + } + action.params = action.parameters["image"] + with patch("requests.head", response404), patch("requests.get", response404): + action.validate() + self.assertEqual( + action.errors, + ["Resource unavailable at 'https://example.com/kernel' (404)"], + ) + + # Raising exceptions + def raisinghead(url, allow_redirects, headers, timeout): + raise requests.Timeout() + + action = HttpDownloadAction( + "image", "/path/to/file", urlparse("https://example.com/kernel") + ) + action.section = "deploy" + action.job = job + action.parameters = { + "image": {"url": "https://example.com/kernel"}, + "namespace": "common", + } + action.params = action.parameters["image"] + with patch("requests.head", raisinghead): + action.validate() + self.assertEqual(action.errors, ["'https://example.com/kernel' timed out"]) + + def raisinghead2(url, allow_redirects, headers, timeout): + raise requests.RequestException("an error occurred") + + action = HttpDownloadAction( + "image", "/path/to/file", urlparse("https://example.com/kernel") + ) + action.section = "deploy" + action.job = job + action.parameters = { + "image": {"url": "https://example.com/kernel"}, + "namespace": "common", + } + action.params = action.parameters["image"] + with patch("requests.head", raisinghead2): + action.validate() + self.assertEqual( + action.errors, + ["Unable to get 'https://example.com/kernel': an error occurred"], + ) + + def test_file_download_reader(self): + tmpr_dir_path = self.create_temporary_directory() + + # Create the file to use + (tmpr_dir_path / "bla.img").write_text("hello", encoding="utf-8") + + # Normal case + action = FileDownloadAction( + "image", + "/path/to/file", + urlparse("file://" + str(tmpr_dir_path) + "/bla.img"), + ) + action.url = urlparse("file://" + str(tmpr_dir_path) + "/bla.img") + ite = action.reader() + self.assertEqual(next(ite), b"hello") + with self.assertRaises(StopIteration): + next(ite) + + # Error when reading + action = FileDownloadAction( + "image", + "/path/to/file", + urlparse("file://" + str(tmpr_dir_path) + "/bla2.img"), + ) + action.url = urlparse("file://" + str(tmpr_dir_path) + "/bla2.img") + ite = action.reader() + with self.assertRaisesRegex( + InfrastructureError, + ( + "Unable to read from %s: \\[Errno 2\\] No such file or directory: '%s'" + % (str(tmpr_dir_path / "bla2.img"), str(tmpr_dir_path / "bla2.img")) + ), + ): + next(ite) + + def test_http_download_reader(self): + # Working + class DummyResponse: + # pylint: disable=no-self-argument + status_code = requests.codes.OK + headers = {"content-length": "4212"} + + def iter_content(self_, size): + self.assertEqual(size, HTTP_DOWNLOAD_CHUNK_SIZE) + yield b"hello" + + def close(self_): + pass + + def dummyget(url, allow_redirects, stream, headers, timeout): + self.assertIs(allow_redirects, True) + self.assertIs(stream, True) + self.assertEqual(url, "https://example.com/dtb") + return DummyResponse() + + action = HttpDownloadAction( + "image", "/path/to/file", urlparse("https://example.com/dtb") + ) + action.url = urlparse("https://example.com/dtb") + with patch("requests.get", dummyget): + ite = action.reader() + self.assertEqual(next(ite), b"hello") + with self.assertRaises(StopIteration): + next(ite) + + # Not working + def dummygetraise(url, allow_redirects, stream, headers, timeout): + raise requests.RequestException("error") + + action = HttpDownloadAction( + "image", "/path/to/file", urlparse("https://example.com/dtb") + ) + action.url = urlparse("https://example.com/dtb") + + with patch("requests.get", dummygetraise), self.assertRaisesRegex( + InfrastructureError, "Unable to download 'https://example.com/dtb': error" + ): + ite = action.reader() + next(ite) + + def test_http_download_run(self): + tmp_dir_path = self.create_temporary_directory() + + def reader(): + yield b"hello" + yield b"world" + + action = HttpDownloadAction( + "dtb", str(tmp_dir_path), urlparse("https://example.com/dtb") + ) + action.job = self.create_simple_job(job_parameters={"dispatcher": {}}) + action.url = urlparse("https://example.com/dtb") + action.parameters = { + "to": "download", + "images": { + "dtb": { + "url": "https://example.com/dtb", + "md5sum": "fc5e038d38a57032085441e7fe7010b0", + "sha256sum": "936a185caaa266bb9cbe981e9e05cb78cd732b0b3280eb944412bb6f8f8f07af", + "sha512sum": "1594244d52f2d8c12b142bb61f47bc2eaf503d6d9ca8480cae9fcf112f66e4967dc5e8fa98285e36db8af1b8ffa8b84cb15e0fbcf836c3deb803c13f37659a60", + } + }, + "namespace": "common", + } + action.params = action.parameters["images"]["dtb"] + action.reader = reader + action.fname = str(tmp_dir_path / "dtb/dtb") + action.run(None, 4212) + data = "" + with open(str(tmp_dir_path / "dtb/dtb")) as f_in: + data = f_in.read() + self.assertEqual(data, "helloworld") + self.assertEqual( + dict(action.results), + { + "success": { + "sha512": "1594244d52f2d8c12b142bb61f47bc2eaf503d6d9ca8480cae9fcf112f66e4967dc5e8fa98285e36db8af1b8ffa8b84cb15e0fbcf836c3deb803c13f37659a60" + }, + "label": "dtb", + "size": 10, "md5sum": "fc5e038d38a57032085441e7fe7010b0", "sha256sum": "936a185caaa266bb9cbe981e9e05cb78cd732b0b3280eb944412bb6f8f8f07af", "sha512sum": "1594244d52f2d8c12b142bb61f47bc2eaf503d6d9ca8480cae9fcf112f66e4967dc5e8fa98285e36db8af1b8ffa8b84cb15e0fbcf836c3deb803c13f37659a60", - } - }, - "namespace": "common", - } - action.params = action.parameters["images"]["dtb"] - action.reader = reader - action.fname = str(tmp_path / "dtb/dtb") - action.run(None, 4212) - data = "" - with open(str(tmp_path / "dtb/dtb")) as f_in: - data = f_in.read() - assert data == "helloworld" - assert dict(action.results) == { - "success": { - "sha512": "1594244d52f2d8c12b142bb61f47bc2eaf503d6d9ca8480cae9fcf112f66e4967dc5e8fa98285e36db8af1b8ffa8b84cb15e0fbcf836c3deb803c13f37659a60" - }, - "label": "dtb", - "size": 10, - "md5sum": "fc5e038d38a57032085441e7fe7010b0", - "sha256sum": "936a185caaa266bb9cbe981e9e05cb78cd732b0b3280eb944412bb6f8f8f07af", - "sha512sum": "1594244d52f2d8c12b142bb61f47bc2eaf503d6d9ca8480cae9fcf112f66e4967dc5e8fa98285e36db8af1b8ffa8b84cb15e0fbcf836c3deb803c13f37659a60", - } - assert action.data == { - "common": { - "download-action": { - "dtb": { - "decompressed": False, - "file": "%s/dtb/dtb" % str(tmp_path), - "md5": "fc5e038d38a57032085441e7fe7010b0", - "sha256": "936a185caaa266bb9cbe981e9e05cb78cd732b0b3280eb944412bb6f8f8f07af", - "sha512": "1594244d52f2d8c12b142bb61f47bc2eaf503d6d9ca8480cae9fcf112f66e4967dc5e8fa98285e36db8af1b8ffa8b84cb15e0fbcf836c3deb803c13f37659a60", - }, - "file": {"dtb": "%s/dtb/dtb" % str(tmp_path)}, - } + }, + ) + self.assertEqual( + action.data, + { + "common": { + "download-action": { + "dtb": { + "decompressed": False, + "file": "%s/dtb/dtb" % str(tmp_dir_path), + "md5": "fc5e038d38a57032085441e7fe7010b0", + "sha256": "936a185caaa266bb9cbe981e9e05cb78cd732b0b3280eb944412bb6f8f8f07af", + "sha512": "1594244d52f2d8c12b142bb61f47bc2eaf503d6d9ca8480cae9fcf112f66e4967dc5e8fa98285e36db8af1b8ffa8b84cb15e0fbcf836c3deb803c13f37659a60", + }, + "file": {"dtb": "%s/dtb/dtb" % str(tmp_dir_path)}, + } + } + }, + ) + + def test_http_download_run_compressed(self): + tmp_dir_path = self.create_temporary_directory() + + def reader(): + yield b"\xfd7zXZ\x00\x00\x04\xe6\xd6\xb4F\x02\x00!\x01\x16\x00\x00" + yield b"\x00t/\xe5\xa3\x01\x00\x0bhello world\n\x00\xa1\xf2\xff\xc4j" + yield b"\x7f\xbf\xcf\x00\x01$\x0c\xa6\x18\xd8\xd8\x1f\xb6\xf3}\x01" + yield b"\x00\x00\x00\x00\x04YZ" + + action = HttpDownloadAction( + "rootfs", str(tmp_dir_path), urlparse("https://example.com/rootfs.xz") + ) + action.job = self.create_simple_job() + action.url = urlparse("https://example.com/rootfs.xz") + action.parameters = { + "to": "download", + "rootfs": { + "url": "https://example.com/rootfs.xz", + "compression": "xz", + "md5sum": "0107d527acf9b8de628b7b4d103c89d1", + "sha256sum": "3275a39be7b717d548b66f3c8f23d940603a63b0f13d84a596d979a7f66feb2c", + "sha512sum": "d0850c3e0c45bdf74995907a04f69806a070d79a4f0b2dd82d6b96adafdbfd85ce6c1daaff916ff089bdf9b04eba7805041c49afecdbeabca69fef802e60de35", + }, + "namespace": "common", } - } - - -def test_http_download_run_compressed(tmp_path): - def reader(): - yield b"\xfd7zXZ\x00\x00\x04\xe6\xd6\xb4F\x02\x00!\x01\x16\x00\x00" - yield b"\x00t/\xe5\xa3\x01\x00\x0bhello world\n\x00\xa1\xf2\xff\xc4j" - yield b"\x7f\xbf\xcf\x00\x01$\x0c\xa6\x18\xd8\xd8\x1f\xb6\xf3}\x01" - yield b"\x00\x00\x00\x00\x04YZ" - - action = HttpDownloadAction( - "rootfs", str(tmp_path), urlparse("https://example.com/rootfs.xz") - ) - action.job = Job(1234, {}, None) - action.url = urlparse("https://example.com/rootfs.xz") - action.parameters = { - "to": "download", - "rootfs": { - "url": "https://example.com/rootfs.xz", - "compression": "xz", - "md5sum": "0107d527acf9b8de628b7b4d103c89d1", - "sha256sum": "3275a39be7b717d548b66f3c8f23d940603a63b0f13d84a596d979a7f66feb2c", - "sha512sum": "d0850c3e0c45bdf74995907a04f69806a070d79a4f0b2dd82d6b96adafdbfd85ce6c1daaff916ff089bdf9b04eba7805041c49afecdbeabca69fef802e60de35", - }, - "namespace": "common", - } - action.params = action.parameters["rootfs"] - action.reader = reader - action.size = 68 - action.fname = str(tmp_path / "rootfs/rootfs") - action.run(None, 4212) - data = "" - with open(str(tmp_path / "rootfs/rootfs")) as f_in: - data = f_in.read() - assert data == "hello world\n" - assert dict(action.results) == { - "success": { - "sha512": "d0850c3e0c45bdf74995907a04f69806a070d79a4f0b2dd82d6b96adafdbfd85ce6c1daaff916ff089bdf9b04eba7805041c49afecdbeabca69fef802e60de35" - }, - "label": "rootfs", - "size": 68, - "md5sum": "0107d527acf9b8de628b7b4d103c89d1", - "sha256sum": "3275a39be7b717d548b66f3c8f23d940603a63b0f13d84a596d979a7f66feb2c", - "sha512sum": "d0850c3e0c45bdf74995907a04f69806a070d79a4f0b2dd82d6b96adafdbfd85ce6c1daaff916ff089bdf9b04eba7805041c49afecdbeabca69fef802e60de35", - } - - assert action.data == { - "common": { - "download-action": { - "rootfs": { - "decompressed": True, - "file": "%s/rootfs/rootfs" % str(tmp_path), - "md5": "0107d527acf9b8de628b7b4d103c89d1", - "sha256": "3275a39be7b717d548b66f3c8f23d940603a63b0f13d84a596d979a7f66feb2c", - "sha512": "d0850c3e0c45bdf74995907a04f69806a070d79a4f0b2dd82d6b96adafdbfd85ce6c1daaff916ff089bdf9b04eba7805041c49afecdbeabca69fef802e60de35", + action.params = action.parameters["rootfs"] + action.reader = reader + action.size = 68 + action.fname = str(tmp_dir_path / "rootfs/rootfs") + action.run(None, 4212) + data = "" + with open(str(tmp_dir_path / "rootfs/rootfs")) as f_in: + data = f_in.read() + self.assertEqual(data, "hello world\n") + self.assertEqual( + dict(action.results), + { + "success": { + "sha512": "d0850c3e0c45bdf74995907a04f69806a070d79a4f0b2dd82d6b96adafdbfd85ce6c1daaff916ff089bdf9b04eba7805041c49afecdbeabca69fef802e60de35" }, - "file": {"rootfs": "%s/rootfs/rootfs" % str(tmp_path)}, - } + "label": "rootfs", + "size": 68, + "md5sum": "0107d527acf9b8de628b7b4d103c89d1", + "sha256sum": "3275a39be7b717d548b66f3c8f23d940603a63b0f13d84a596d979a7f66feb2c", + "sha512sum": "d0850c3e0c45bdf74995907a04f69806a070d79a4f0b2dd82d6b96adafdbfd85ce6c1daaff916ff089bdf9b04eba7805041c49afecdbeabca69fef802e60de35", + }, + ) + + self.assertEqual( + action.data, + { + "common": { + "download-action": { + "rootfs": { + "decompressed": True, + "file": "%s/rootfs/rootfs" % str(tmp_dir_path), + "md5": "0107d527acf9b8de628b7b4d103c89d1", + "sha256": "3275a39be7b717d548b66f3c8f23d940603a63b0f13d84a596d979a7f66feb2c", + "sha512": "d0850c3e0c45bdf74995907a04f69806a070d79a4f0b2dd82d6b96adafdbfd85ce6c1daaff916ff089bdf9b04eba7805041c49afecdbeabca69fef802e60de35", + }, + "file": {"rootfs": "%s/rootfs/rootfs" % str(tmp_dir_path)}, + } + } + }, + ) + + def test_predownloaded_job_validation(self): + factory = Factory() + factory.validate_job_strict = True + job = factory.create_job( + "kvm01.jinja2", "sample_jobs/qemu-download-postprocess.yaml" + ) + job.validate() + + def test_predownloaded(self): + params = { + "to": "tmpfs", + "rootfs": {"url": "downloads://rootfs.xz"}, + "namespace": "common", } - } - - -def test_predownloaded_job_validation(): - factory = Factory() - factory.validate_job_strict = True - job = factory.create_job( - "kvm01.jinja2", "sample_jobs/qemu-download-postprocess.yaml" - ) - job.validate() - - -def test_predownloaded(): - params = { - "to": "tmpfs", - "rootfs": {"url": "downloads://rootfs.xz"}, - "namespace": "common", - } - job = Job(1234, {}, None) - destdir = job.mkdtemp("some-other-action") - action = PreDownloadedAction( - "rootfs", urlparse("downloads://rootfs.xz"), destdir, params - ) - action.parameters = params - action.job = job - - filename = Path(action.job.tmp_dir) / "downloads/common/rootfs.xz" - filename.parent.mkdir(parents=True) - filename.touch() - - action.data = {} - action.parameters = {"namespace": "common"} - action.validate() - action.run(None, 4242) - mapped_path = action.get_namespace_data( - action="download-action", label="rootfs", key="file" - ) - assert mapped_path == (destdir + "/rootfs.xz") - assert Path(mapped_path).exists() - - -def test_predownloaded_subdirectory(): - params = {"to": "tmpfs", "rootfs": {"url": "downloads://subdir/rootfs.xz"}} - job = Job(1234, {}, None) - destdir = job.mkdtemp("some-other-action") - action = PreDownloadedAction( - "rootfs", urlparse("downloads://subdir/rootfs.xz"), destdir, params - ) - action.parameters = params - action.job = job - - filename = Path(action.job.tmp_dir) / "downloads/common/subdir/rootfs.xz" - filename.parent.mkdir(parents=True) - filename.touch() - - action.data = {} - action.parameters = {"namespace": "common"} - action.validate() - action.run(None, 4242) - mapped_path = action.get_namespace_data( - action="download-action", label="rootfs", key="file" - ) - assert mapped_path == (destdir + "/subdir/rootfs.xz") - assert Path(mapped_path).exists() - - -def test_predownloaded_missing_file(tmp_path): - job = Job(1234, {}, None) - destdir = job.mkdtemp("some-other-action") - action = PreDownloadedAction("rootfs", urlparse("downloads://missing.xz"), destdir) - action.job = job - action.parameters = {"namespace": "common"} - with pytest.raises(JobError) as exc: + job = self.create_simple_job() + destdir = job.mkdtemp("some-other-action") + action = PreDownloadedAction( + "rootfs", urlparse("downloads://rootfs.xz"), destdir, params + ) + action.parameters = params + action.job = job + + filename = Path(action.job.tmp_dir) / "downloads/common/rootfs.xz" + filename.parent.mkdir(parents=True) + filename.touch() + + action.data = {} + action.parameters = {"namespace": "common"} + action.validate() action.run(None, 4242) - - -def test_copy_to_lxc_without_lxc_should_do_nothing(): - action = CopyToLxcAction() - action.job = Job(1234, {}, None) - action.run(None, 4242) # no crash = success + mapped_path = action.get_namespace_data( + action="download-action", label="rootfs", key="file" + ) + self.assertEqual(mapped_path, (destdir + "/rootfs.xz")) + self.assertTrue(Path(mapped_path).exists()) + + def test_predownloaded_subdirectory(self): + params = {"to": "tmpfs", "rootfs": {"url": "downloads://subdir/rootfs.xz"}} + job = self.create_simple_job() + destdir = job.mkdtemp("some-other-action") + action = PreDownloadedAction( + "rootfs", urlparse("downloads://subdir/rootfs.xz"), destdir, params + ) + action.parameters = params + action.job = job + + filename = Path(action.job.tmp_dir) / "downloads/common/subdir/rootfs.xz" + filename.parent.mkdir(parents=True) + filename.touch() + + action.data = {} + action.parameters = {"namespace": "common"} + action.validate() + action.run(None, 4242) + mapped_path = action.get_namespace_data( + action="download-action", label="rootfs", key="file" + ) + self.assertEqual(mapped_path, (destdir + "/subdir/rootfs.xz")) + self.assertTrue(Path(mapped_path).exists()) + + def test_predownloaded_missing_file(self): + job = self.create_simple_job() + destdir = job.mkdtemp("some-other-action") + action = PreDownloadedAction( + "rootfs", urlparse("downloads://missing.xz"), destdir + ) + action.job = job + action.parameters = {"namespace": "common"} + with self.assertRaises(JobError): + action.run(None, 4242) + + def test_copy_to_lxc_without_lxc_should_do_nothing(self): + action = CopyToLxcAction() + action.job = self.create_simple_job() + action.run(None, 4242) # no crash = success diff --git a/tests/lava_dispatcher/actions/deploy/test_downloads.py b/tests/lava_dispatcher/actions/deploy/test_downloads.py index e5fc6701ec..5f4ea97f13 100644 --- a/tests/lava_dispatcher/actions/deploy/test_downloads.py +++ b/tests/lava_dispatcher/actions/deploy/test_downloads.py @@ -4,123 +4,115 @@ # # SPDX-License-Identifier: GPL-2.0-or-later - -import pytest +from unittest.mock import ANY as MOCK_ANY +from unittest.mock import MagicMock, patch from lava_dispatcher.actions.deploy.download import DownloaderAction from lava_dispatcher.actions.deploy.downloads import ( DownloadsAction, PostprocessWithDocker, ) -from lava_dispatcher.job import Job -from tests.lava_dispatcher.test_basic import Factory - - -@pytest.fixture -def job(tmp_path): - job = Job(1234, {}, None) - return job - - -def test_downloads_action(job): - action = DownloadsAction() - action.level = 2 - action.job = job - action.populate( - { - "images": {"rootfs": {"url": "https://example.com/image.img"}}, - "namespace": "common", - } - ) - download = action.pipeline.actions[0] - assert isinstance(download, DownloaderAction) - assert download.key == "rootfs" - assert str(download.path) == f"{job.tmp_dir}/downloads/common" - assert download.params == {"url": "https://example.com/image.img"} - assert not download.uniquify - - -def test_uniquify(job): - action = DownloadsAction() - action.level = 2 - action.job = job - action.populate( - { - "uniquify": True, - "images": { - "rootfs": {"url": "https://example.com/rootfs/image"}, - "boot": {"url": "https://example.com/boot/image"}, - }, - "namespace": "common", - } - ) - download_rootfs = action.pipeline.actions[0].pipeline.actions[0] - download_boot = action.pipeline.actions[1].pipeline.actions[0] - - assert download_rootfs.path != download_boot.path - - -def test_downloads_action_adds_docker_action(): - factory = Factory() - factory.validate_job_strict = True - job = factory.create_job( - "qemu01.jinja2", "sample_jobs/qemu-download-postprocess.yaml" - ) - - deploy = job.pipeline.actions[0] - action = deploy.pipeline.actions[-1] - assert isinstance(action, PostprocessWithDocker) - assert str(action.path) == f"{job.tmp_dir}/downloads/common" - - -@pytest.fixture -def action(tmp_path): - action = PostprocessWithDocker(tmp_path) - action.populate( - { - "postprocess": { - "docker": {"image": "foo", "steps": ["date", "echo HELLO WORLD"]} - } - } - ) - return action - - -def test_postprocess_with_docker_populate(action): - assert action.docker_parameters["image"] == "foo" - assert "date" in action.steps - assert "echo HELLO WORLD" in action.steps - -def test_postprocess_with_docker_populate_missing_data(tmp_path): - action = PostprocessWithDocker(tmp_path) - action.populate({}) +from ...test_basic import Factory, LavaDispatcherTestCase -def test_postprocess_with_docker_validate(tmp_path): - action = PostprocessWithDocker(tmp_path) - assert not action.validate() - assert "postprocessing steps missing" in action.errors - action.steps = ["date"] - action.errors.clear() - assert action.validate() - assert len(action.errors) == 0 +class TestDownloads(LavaDispatcherTestCase): + def setUp(self): + super().setUp() + self.job = self.create_simple_job() + def test_downloads_action(self): + action = DownloadsAction() + action.level = 2 + action.job = self.job + action.populate( + { + "images": {"rootfs": {"url": "https://example.com/image.img"}}, + "namespace": "common", + } + ) + download = action.pipeline.actions[0] + self.assertIsInstance(download, DownloaderAction) + self.assertEqual(download.key, "rootfs") + self.assertEqual(str(download.path), f"{self.job.tmp_dir}/downloads/common") + self.assertEqual(download.params, {"url": "https://example.com/image.img"}) + self.assertFalse(download.uniquify) + + def test_uniquify(self): + action = DownloadsAction() + action.level = 2 + action.job = self.job + action.populate( + { + "uniquify": True, + "images": { + "rootfs": {"url": "https://example.com/rootfs/image"}, + "boot": {"url": "https://example.com/boot/image"}, + }, + "namespace": "common", + } + ) + download_rootfs = action.pipeline.actions[0].pipeline.actions[0] + download_boot = action.pipeline.actions[1].pipeline.actions[0] + + self.assertNotEqual(download_rootfs.path, download_boot.path) + + def test_downloads_action_adds_docker_action(self): + factory = Factory() + factory.validate_job_strict = True + job = factory.create_job( + "qemu01.jinja2", "sample_jobs/qemu-download-postprocess.yaml" + ) + + deploy = job.pipeline.actions[0] + action = deploy.pipeline.actions[-1] + self.assertIsInstance(action, PostprocessWithDocker) + self.assertEqual(str(action.path), f"{job.tmp_dir}/downloads/common") + + def test_postprocess_with_docker_populate_missing_data(self): + action = PostprocessWithDocker(self.create_temporary_directory()) + action.populate({}) + + def test_postprocess_with_docker_validate(self): + action = PostprocessWithDocker(self.create_temporary_directory()) + self.assertFalse(action.validate()) + self.assertIn("postprocessing steps missing", action.errors) + action.steps = ["date"] + action.errors.clear() + self.assertTrue(action.validate()) + self.assertEqual(len(action.errors), 0) + + +class TestPostprocessDocker(LavaDispatcherTestCase): + def setUp(self): + super().setUp() + self.job = self.create_simple_job() + self.action = PostprocessWithDocker(self.create_temporary_directory()) + self.action.job = self.job + self.action.populate( + { + "postprocess": { + "docker": {"image": "foo", "steps": ["date", "echo HELLO WORLD"]} + } + } + ) -def test_postprocess_with_docker_run(action, job, mocker): - action.job = job - - run = mocker.patch("lava_dispatcher.utils.docker.DockerRun.run") + def test_postprocess_with_docker_populate(self): + self.assertEqual(self.action.docker_parameters["image"], "foo") + self.assertIn("date", self.action.steps) + self.assertIn("echo HELLO WORLD", self.action.steps) - origconn = mocker.MagicMock() - conn = action.run(origconn, 4242) + def test_postprocess_with_docker_run(self): + origconn = MagicMock() + with patch("lava_dispatcher.utils.docker.DockerRun.run") as docker_run_mock: + conn = self.action.run(origconn, 4242) - assert conn is origconn + self.assertIs(conn, origconn) - script = action.path / "postprocess.sh" - assert script.exists() - script_text = script.read_text() - assert "date\n" in script_text - assert "echo HELLO WORLD\n" in script_text + script = self.action.path / "postprocess.sh" + self.assertTrue(script.exists()) + script_text = script.read_text() + self.assertIn("date\n", script_text) + self.assertIn("echo HELLO WORLD\n", script_text) - run.assert_called_with(mocker.ANY, action=action) + docker_run_mock.assert_called_with(MOCK_ANY, action=self.action) diff --git a/tests/lava_dispatcher/actions/test/__init__.py b/tests/lava_dispatcher/actions/test/__init__.py new file mode 100644 index 0000000000..6ce56e1bd0 --- /dev/null +++ b/tests/lava_dispatcher/actions/test/__init__.py @@ -0,0 +1,5 @@ +# Copyright (C) 2023 Collabora Limited +# +# Author: Igor Ponomarev +# +# SPDX-License-Identifier: GPL-2.0-or-later diff --git a/tests/lava_dispatcher/actions/test/test_monitor.py b/tests/lava_dispatcher/actions/test/test_monitor.py index 7b19024e9f..2691992d79 100644 --- a/tests/lava_dispatcher/actions/test/test_monitor.py +++ b/tests/lava_dispatcher/actions/test/test_monitor.py @@ -4,8 +4,6 @@ # # SPDX-License-Identifier: GPL-2.0-or-later -import logging - import pytest from lava_common.exceptions import ConnectionClosedError diff --git a/tests/lava_dispatcher/actions/test/test_shell.py b/tests/lava_dispatcher/actions/test/test_shell.py index 5b0a116d58..6073130876 100644 --- a/tests/lava_dispatcher/actions/test/test_shell.py +++ b/tests/lava_dispatcher/actions/test/test_shell.py @@ -4,15 +4,12 @@ # # SPDX-License-Identifier: GPL-2.0-or-later -import logging -import time - -import pytest +from unittest.mock import MagicMock, patch from lava_common.exceptions import ConnectionClosedError, TestError from lava_dispatcher.actions.test.shell import TestShell, TestShellAction -from lava_dispatcher.job import Job -from tests.utils import RecordingLogger + +from ...test_basic import LavaDispatcherTestCase class Mockmatch: @@ -28,119 +25,124 @@ def __init__(self, data): self.match = Mockmatch(data) -def test_accepts(): - assert TestShell.accepts(None, {}) == (False, '"definitions" not in parameters') - assert TestShell.accepts(None, {"definitions": {}}) == (True, "accepted") - - -def test_check_patterns(): - # "exit" - action = TestShellAction() - action.logger = RecordingLogger() - assert action.check_patterns("exit", None) is False - assert action.logger.logs == [ - ("info", "ok: lava_test_shell seems to have completed", {}) - ] - - # "eof" - action = TestShellAction() - action.logger = RecordingLogger() - with pytest.raises(ConnectionClosedError): - action.check_patterns("eof", None) - - # "timeout" - action = TestShellAction() - action.logger = RecordingLogger() - assert action.check_patterns("timeout", None) is True - assert action.logger.logs == [] - - -def test_signal_start_run(): - job = Job(1234, {}, None) - - # "signal.STARTRUN" - action = TestShellAction() - action.job = job - action.logger = RecordingLogger() - action.parameters = {"namespace": "common"} - action.data = {} - action.set_namespace_data( - action="test-definition", - label="test-definition", - key="testdef_index", - value=["DEFINITION"], - ) - action.set_namespace_data( - action="repo-action", label="repo-action", key="uuid-list", value=["UUID"] - ) - - data = ("STARTRUN", "0_DEFINITION UUID") - assert action.check_patterns("signal", MockConnection(data)) is True - assert action.logger.logs == [ - ("debug", "Received signal: 0_DEFINITION UUID", {}), - ("info", "Starting test lava.%s (%s)", "0_DEFINITION", "UUID", {}), - ("info", "Skipping test definition patterns.", {}), - ] - assert action.current_run == { - "case": "0_DEFINITION", - "definition": "lava", - "result": "fail", - "uuid": "UUID", - } - assert action.patterns == {} - - # "signal.STARTRUN exception" - action = TestShellAction() - action.logger = RecordingLogger() - - data = ("STARTRUN", "0_DEFINITIO") - with pytest.raises(TestError): - action.check_patterns("signal", MockConnection(data)) is True - - -def test_signal_end_run(monkeypatch): - counts = 0 - - def monotonic(): - nonlocal counts - counts += 1 - return counts - - monkeypatch.setattr(time, "monotonic", monotonic) - - job = Job(1234, {}, None) - - # "signal.ENDRUN" - action = TestShellAction() - action.job = job - action.logger = RecordingLogger() - action.parameters = {"namespace": "common"} - action.data = {} - action.set_namespace_data( - action="test-definition", - label="test-definition", - key="testdef_index", - value=["DEFINITION"], - ) - action.set_namespace_data( - action="repo-action", label="repo-action", key="uuid-list", value=["UUID"] - ) - - data = ("ENDRUN", "0_DEFINITION UUID") - assert action.check_patterns("signal", MockConnection(data)) is True - assert action.logger.logs == [ - ("debug", "Received signal: 0_DEFINITION UUID", {}), - ("info", "Ending use of test pattern.", {}), - ( - "info", - "Ending test lava.%s (%s), duration %.02f", - "0_DEFINITION", - "UUID", - 1, - {}, - ), - ( - "results", +class TestTestShell(LavaDispatcherTestCase): + def test_accepts(self): + self.assertEqual( + TestShell.accepts(None, {}), (False, '"definitions" not in parameters') + ) + self.assertEqual( + TestShell.accepts(None, {"definitions": {}}), (True, "accepted") + ) + + def test_check_patterns(self): + # "exit" + action = TestShellAction() + with self.assertLogs(action.logger) as action_logs: + self.assertIs(action.check_patterns("exit", None), False) + self.assertEqual( + [(r.levelname, r.message) for r in action_logs.records], + [("INFO", "ok: lava_test_shell seems to have completed")], + ) + + # "eof" + action = TestShellAction() + with self.assertRaises(ConnectionClosedError): + action.check_patterns("eof", None) + + # "timeout" + action = TestShellAction() + with self.assertRaisesRegex(AssertionError, "no logs"), self.assertLogs( + action.logger + ) as action_logs: + self.assertIs(action.check_patterns("timeout", None), True) + + def test_signal_start_run(self): + job = self.create_simple_job() + + # "signal.STARTRUN" + action = TestShellAction() + action.job = job + action.parameters = {"namespace": "common"} + action.data = {} + action.set_namespace_data( + action="test-definition", + label="test-definition", + key="testdef_index", + value=["DEFINITION"], + ) + action.set_namespace_data( + action="repo-action", label="repo-action", key="uuid-list", value=["UUID"] + ) + + data = ("STARTRUN", "0_DEFINITION UUID") + with self.assertLogs(action.logger, level="DEBUG") as action_logs: + self.assertIs(action.check_patterns("signal", MockConnection(data)), True) + self.assertEqual( + [(r.levelname, r.message) for r in action_logs.records], + [ + ("DEBUG", "Received signal: 0_DEFINITION UUID"), + ("INFO", "Starting test lava.0_DEFINITION (UUID)"), + ("INFO", "Skipping test definition patterns."), + ], + ) + self.assertEqual( + action.current_run, + { + "case": "0_DEFINITION", + "definition": "lava", + "result": "fail", + "uuid": "UUID", + }, + ) + self.assertEqual(action.patterns, {}) + + # "signal.STARTRUN exception" + action = TestShellAction() + + data = ("STARTRUN", "0_DEFINITIO") + with self.assertRaises(TestError): + self.assertIs(action.check_patterns("signal", MockConnection(data)), True) + + def test_signal_end_run(self): + counts = 0 + + def monotonic(): + nonlocal counts + counts += 1 + return counts + + job = self.create_simple_job() + + # "signal.ENDRUN" + action = TestShellAction() + action.job = job + action.logger.results = MagicMock() + action.parameters = {"namespace": "common"} + action.data = {} + action.set_namespace_data( + action="test-definition", + label="test-definition", + key="testdef_index", + value=["DEFINITION"], + ) + action.set_namespace_data( + action="repo-action", label="repo-action", key="uuid-list", value=["UUID"] + ) + + data = ("ENDRUN", "0_DEFINITION UUID") + with self.assertLogs(action.logger, "DEBUG") as action_logs, patch( + "time.monotonic", monotonic + ): + self.assertIs(action.check_patterns("signal", MockConnection(data)), True) + self.assertEqual( + [(r.levelname, r.message) for r in action_logs.records], + [ + ("DEBUG", "Received signal: 0_DEFINITION UUID"), + ("INFO", "Ending use of test pattern."), + ("INFO", "Ending test lava.0_DEFINITION (UUID), duration 1.00"), + ], + ) + action.logger.results.assert_called_once_with( { "definition": "lava", "case": "0_DEFINITION", @@ -152,239 +154,293 @@ def monotonic(): "revision": "unspecified", "namespace": "common", }, - {}, - ), - ] - assert action.current_run is None - - # "signal.ENDRUN exception" - action = TestShellAction() - action.logger = RecordingLogger() - - data = ("ENDRUN", "0_DEFINITIO") - with pytest.raises(TestError): - action.check_patterns("signal", MockConnection(data)) is True - - -def test_signal_start_end_tc(): - job = Job(1234, {}, None) - - # "signal.STARTTC" - action = TestShellAction() - action.job = job - action.logger = RecordingLogger() - - data = ("STARTTC", "TESTCASE") - assert action.check_patterns("signal", MockConnection(data)) is True - assert action.logger.logs == [ - ("debug", "Received signal: TESTCASE", {}), - ("marker", {"case": "TESTCASE", "type": "start_test_case"}, {}), - ] - - # "signal.ENDTC" - action = TestShellAction() - action.job = job - action.logger = RecordingLogger() - - data = ("ENDTC", "TESTCASE") - assert action.check_patterns("signal", MockConnection(data)) is True - assert action.logger.logs == [ - ("debug", "Received signal: TESTCASE", {}), - ("marker", {"case": "TESTCASE", "type": "end_test_case"}, {}), - ] - - -def test_signal_testcase(): - job = Job(1234, {}, None) - - # "signal.TESTCASE without test_uuid" - action = TestShellAction() - action.job = job - action.logger = RecordingLogger() - - data = ("TESTCASE", "hello") - with pytest.raises(TestError): - action.check_patterns("signal", MockConnection(data)) - assert action.logger.logs == [ - ("debug", "Received signal: hello", {}), - ("marker", {"case": "hello", "type": "test_case"}, {}), - ( - "error", - "Unknown test uuid. The STARTRUN signal for this test action was not received correctly.", - {}, - ), - ] - - # "signal.TESTCASE malformed parameters" - action = TestShellAction() - action.job = job - action.logger = RecordingLogger() - action.signal_director.test_uuid = "UUID" - - data = ("TESTCASE", "hello") - assert action.check_patterns("signal", MockConnection(data)) is True - assert action.logger.logs == [ - ("debug", "Received signal: hello", {}), - ("marker", {"case": "hello", "type": "test_case"}, {}), - ("error", 'Ignoring malformed parameter for signal: "hello". ', {}), - ] - - # "signal.TESTCASE missing TEST_CASE_ID" - action = TestShellAction() - action.job = job - action.logger = RecordingLogger() - action.signal_director.test_uuid = "UUID" - - data = ("TESTCASE", "TEST_CASE=e") - assert action.check_patterns("signal", MockConnection(data)) is True - assert action.logger.logs == [ - ("debug", "Received signal: TEST_CASE=e", {}), - ("marker", {"case": "TEST_CASE=e", "type": "test_case"}, {}), - ( - "error", - "Test case results without test_case_id (probably a sign of an incorrect parsing pattern being used): {'test_case': 'e'}", - {}, - ), - ] - - # "signal.TESTCASE missing RESULT" - action = TestShellAction() - action.job = job - action.logger = RecordingLogger() - action.signal_director.test_uuid = "UUID" - - data = ("TESTCASE", "TEST_CASE_ID=case-id") - assert action.check_patterns("signal", MockConnection(data)) is True - assert action.logger.logs == [ - ("debug", "Received signal: TEST_CASE_ID=case-id", {}), - ("marker", {"case": "case-id", "type": "test_case"}, {}), - ( - "error", - "Test case results without result (probably a sign of an incorrect parsing pattern being used): {'test_case_id': 'case-id', 'result': 'unknown'}", - {}, - ), - ] - - # "signal.TESTCASE" - action = TestShellAction() - action.job = job - action.logger = RecordingLogger() - action.signal_director.test_uuid = "UUID" - - data = ("TESTCASE", "RESULT=pass TEST_CASE_ID=case_id") - assert action.check_patterns("signal", MockConnection(data)) is True - assert action.logger.logs == [ - ("debug", "Received signal: RESULT=pass TEST_CASE_ID=case_id", {}), - ("marker", {"case": "RESULT=pass", "type": "test_case"}, {}), - ("results", {"definition": None, "case": "case_id", "result": "pass"}, {}), - ] - - # "signal.TESTCASE with measurement" - action = TestShellAction() - action.job = job - action.logger = RecordingLogger() - action.signal_director.test_uuid = "UUID" - - data = ("TESTCASE", "RESULT=pass TEST_CASE_ID=case_id MEASUREMENT=1234") - assert action.check_patterns("signal", MockConnection(data)) is True - assert action.logger.logs == [ - ( - "debug", - "Received signal: RESULT=pass TEST_CASE_ID=case_id MEASUREMENT=1234", - {}, - ), - ("marker", {"case": "RESULT=pass", "type": "test_case"}, {}), - ( - "results", + ) + self.assertIsNone(action.current_run) + + # "signal.ENDRUN exception" + action = TestShellAction() + + data = ("ENDRUN", "0_DEFINITIO") + with self.assertRaises(TestError): + self.assertIs(action.check_patterns("signal", MockConnection(data)), True) + + def test_signal_start_end_tc(self): + job = self.create_simple_job() + + # "signal.STARTTC" + action = TestShellAction() + action.job = job + action.logger.marker = MagicMock() + + data = ("STARTTC", "TESTCASE") + with self.assertLogs(action.logger, "DEBUG") as action_logs: + self.assertIs(action.check_patterns("signal", MockConnection(data)), True) + self.assertEqual( + [(r.levelname, r.message) for r in action_logs.records], + [("DEBUG", "Received signal: TESTCASE")], + ) + action.logger.marker.assert_called_once_with( + {"case": "TESTCASE", "type": "start_test_case"} + ) + + # "signal.ENDTC" + action = TestShellAction() + action.job = job + action.logger.marker = MagicMock() + + data = ("ENDTC", "TESTCASE") + with self.assertLogs(action.logger, "DEBUG") as action_logs: + self.assertIs(action.check_patterns("signal", MockConnection(data)), True) + self.assertEqual( + [(r.levelname, r.message) for r in action_logs.records], + [("DEBUG", "Received signal: TESTCASE")], + ) + action.logger.marker.assert_called_once_with( + {"case": "TESTCASE", "type": "end_test_case"} + ) + + def test_signal_testcase(self): + job = self.create_simple_job() + + # "signal.TESTCASE without test_uuid" + action = TestShellAction() + action.job = job + action.logger.marker = MagicMock() + + data = ("TESTCASE", "hello") + with self.assertRaises(TestError), self.assertLogs( + action.logger, "DEBUG" + ) as action_logs: + action.check_patterns("signal", MockConnection(data)) + self.assertEqual( + [(r.levelname, r.message) for r in action_logs.records], + [ + ("DEBUG", "Received signal: hello"), + ( + "ERROR", + "Unknown test uuid. The STARTRUN signal for this test action was not received correctly.", + ), + ], + ) + action.logger.marker.assert_called_once_with( + {"case": "hello", "type": "test_case"} + ) + + # "signal.TESTCASE malformed parameters" + action = TestShellAction() + action.job = job + action.logger.marker = MagicMock() + action.signal_director.test_uuid = "UUID" + + data = ("TESTCASE", "hello") + with self.assertLogs(action.logger, "DEBUG") as action_logs: + self.assertIs(action.check_patterns("signal", MockConnection(data)), True) + self.assertEqual( + [(r.levelname, r.message) for r in action_logs.records], + [ + ("DEBUG", "Received signal: hello"), + ("ERROR", 'Ignoring malformed parameter for signal: "hello". '), + ], + ) + action.logger.marker.assert_called_once_with( + {"case": "hello", "type": "test_case"} + ) + + # "signal.TESTCASE missing TEST_CASE_ID" + action = TestShellAction() + action.job = job + action.logger.marker = MagicMock() + action.signal_director.test_uuid = "UUID" + + data = ("TESTCASE", "TEST_CASE=e") + with self.assertLogs(action.logger, "DEBUG") as action_logs: + self.assertIs(action.check_patterns("signal", MockConnection(data)), True) + self.assertEqual( + [(r.levelname, r.message) for r in action_logs.records], + [ + ("DEBUG", "Received signal: TEST_CASE=e"), + ( + "ERROR", + "Test case results without test_case_id (probably a sign of an incorrect parsing pattern being used): {'test_case': 'e'}", + ), + ], + ) + action.logger.marker.assert_called_once_with( + {"case": "TEST_CASE=e", "type": "test_case"} + ) + + # "signal.TESTCASE missing RESULT" + action = TestShellAction() + action.job = job + action.logger.marker = MagicMock() + action.signal_director.test_uuid = "UUID" + + data = ("TESTCASE", "TEST_CASE_ID=case-id") + with self.assertLogs(action.logger, "DEBUG") as action_logs: + self.assertIs(action.check_patterns("signal", MockConnection(data)), True) + self.assertEqual( + [(r.levelname, r.message) for r in action_logs.records], + [ + ("DEBUG", "Received signal: TEST_CASE_ID=case-id"), + ( + "ERROR", + "Test case results without result (probably a sign of an incorrect parsing pattern being used): {'test_case_id': 'case-id', 'result': 'unknown'}", + ), + ], + ) + action.logger.marker.assert_called_once_with( + {"case": "case-id", "type": "test_case"} + ) + + # "signal.TESTCASE" + action = TestShellAction() + action.job = job + action.logger.marker = MagicMock() + action.logger.results = MagicMock() + action.signal_director.test_uuid = "UUID" + + data = ("TESTCASE", "RESULT=pass TEST_CASE_ID=case_id") + with self.assertLogs(action.logger, "DEBUG") as action_logs: + self.assertIs(action.check_patterns("signal", MockConnection(data)), True) + self.assertEqual( + [(r.levelname, r.message) for r in action_logs.records], + [ + ( + "DEBUG", + "Received signal: RESULT=pass TEST_CASE_ID=case_id", + ), + ], + ) + action.logger.marker.assert_called_once_with( + {"case": "RESULT=pass", "type": "test_case"} + ) + action.logger.results.assert_called_once_with( + {"definition": None, "case": "case_id", "result": "pass"} + ) + + # "signal.TESTCASE with measurement" + action = TestShellAction() + action.job = job + action.logger.marker = MagicMock() + action.logger.results = MagicMock() + action.signal_director.test_uuid = "UUID" + + data = ("TESTCASE", "RESULT=pass TEST_CASE_ID=case_id MEASUREMENT=1234") + with self.assertLogs(action.logger, "DEBUG") as action_logs: + self.assertIs(action.check_patterns("signal", MockConnection(data)), True) + self.assertEqual( + [(r.levelname, r.message) for r in action_logs.records], + [ + ( + "DEBUG", + "Received signal: RESULT=pass TEST_CASE_ID=case_id MEASUREMENT=1234", + ), + ], + ) + action.logger.marker.assert_called_once_with( + {"case": "RESULT=pass", "type": "test_case"} + ) + action.logger.results.assert_called_once_with( { "definition": None, "case": "case_id", "result": "pass", "measurement": 1234.0, }, - {}, - ), - ] - - # "signal.TESTCASE with measurement and unit" - action = TestShellAction() - action.job = job - action.logger = RecordingLogger() - action.signal_director.test_uuid = "UUID" - - data = ("TESTCASE", "RESULT=pass TEST_CASE_ID=case_id MEASUREMENT=1234 UNITS=s") - assert action.check_patterns("signal", MockConnection(data)) is True - assert action.logger.logs == [ - ( - "debug", - "Received signal: RESULT=pass TEST_CASE_ID=case_id MEASUREMENT=1234 UNITS=s", - {}, - ), - ("marker", {"case": "RESULT=pass", "type": "test_case"}, {}), - ( - "results", + ) + + # "signal.TESTCASE with measurement and unit" + action = TestShellAction() + action.job = job + action.logger.marker = MagicMock() + action.logger.results = MagicMock() + action.signal_director.test_uuid = "UUID" + + data = ("TESTCASE", "RESULT=pass TEST_CASE_ID=case_id MEASUREMENT=1234 UNITS=s") + with self.assertLogs(action.logger, "DEBUG") as action_logs: + self.assertIs(action.check_patterns("signal", MockConnection(data)), True) + self.assertEqual( + [(r.levelname, r.message) for r in action_logs.records], + [ + ( + "DEBUG", + "Received signal: RESULT=pass TEST_CASE_ID=case_id MEASUREMENT=1234 UNITS=s", + ), + ], + ) + action.logger.marker.assert_called_once_with( + {"case": "RESULT=pass", "type": "test_case"} + ) + action.logger.results.assert_called_once_with( { "definition": None, "case": "case_id", "result": "pass", "measurement": 1234.0, "units": "s", - }, - {}, - ), - ] - - -def test_signal_test_feedback(): - job = Job(1234, {}, None) - - # "signal.TESTFEEDBACK missing ns" - action = TestShellAction() - action.job = job - action.logger = RecordingLogger() - - data = ("TESTFEEDBACK", "FEED1") - assert action.check_patterns("signal", MockConnection(data)) is True - assert action.logger.logs == [ - ("debug", "Received signal: FEED1", {}), - ("error", "%s is not a valid namespace", {}), - ] - - -def test_signal_test_reference(): - job = Job(1234, {}, None) - - # "signal.TESTREFERENCE missing parameters" - action = TestShellAction() - action.job = job - action.logger = RecordingLogger() - - data = ("TESTREFERENCE", "") - with pytest.raises(TestError): - action.check_patterns("signal", MockConnection(data)) - assert action.logger.logs == [("debug", "Received signal: ", {})] - - # "signal.TESTREFERENCE" - action = TestShellAction() - action.job = job - action.logger = RecordingLogger() - - data = ("TESTREFERENCE", "case-id pass http://example.com") - assert action.check_patterns("signal", MockConnection(data)) is True - assert action.logger.logs == [ - ( - "debug", - "Received signal: case-id pass http://example.com", - {}, - ), - ( - "results", + } + ) + + def test_signal_test_feedback(self): + job = self.create_simple_job() + + # "signal.TESTFEEDBACK missing ns" + action = TestShellAction() + action.job = job + + data = ("TESTFEEDBACK", "FEED1") + with self.assertLogs(action.logger, "DEBUG") as action_logs: + self.assertIs(action.check_patterns("signal", MockConnection(data)), True) + self.assertEqual( + [(r.levelname, r.message) for r in action_logs.records], + [ + ("DEBUG", "Received signal: FEED1"), + ("ERROR", "%s is not a valid namespace"), + ], + ) + + def test_signal_test_reference(self): + job = self.create_simple_job() + + # "signal.TESTREFERENCE missing parameters" + action = TestShellAction() + action.job = job + + data = ("TESTREFERENCE", "") + with self.assertRaises(TestError), self.assertLogs( + action.logger, "DEBUG" + ) as action_logs: + action.check_patterns("signal", MockConnection(data)) + + self.assertEqual( + [(r.levelname, r.message) for r in action_logs.records], + [ + ("DEBUG", "Received signal: "), + ], + ) + + # "signal.TESTREFERENCE" + action = TestShellAction() + action.job = job + action.logger.results = MagicMock() + + data = ("TESTREFERENCE", "case-id pass http://example.com") + with self.assertLogs(action.logger, "DEBUG") as action_logs: + self.assertIs(action.check_patterns("signal", MockConnection(data)), True) + self.assertEqual( + [(r.levelname, r.message) for r in action_logs.records], + [ + ( + "DEBUG", + "Received signal: case-id pass http://example.com", + ), + ], + ) + action.logger.results.assert_called_once_with( { "case": "case-id", "definition": None, "result": "pass", "reference": "http://example.com", - }, - {}, - ), - ] + } + ) diff --git a/tests/lava_dispatcher/actions/test_commands.py b/tests/lava_dispatcher/actions/test_commands.py index e6d4861113..a58812ed2e 100644 --- a/tests/lava_dispatcher/actions/test_commands.py +++ b/tests/lava_dispatcher/actions/test_commands.py @@ -4,94 +4,84 @@ # # SPDX-License-Identifier: GPL-2.0-or-later -import pytest +from unittest.mock import MagicMock +from unittest.mock import call as mock_call -from lava_common.timeout import Timeout from lava_dispatcher.actions.commands import CommandAction from lava_dispatcher.device import PipelineDevice -from lava_dispatcher.job import Job - -@pytest.fixture -def device(): - return PipelineDevice( - { - "commands": { - "hard_reset": "/path/to/hard-reset", - "power_off": ["something", "something-else"], - "users": {"do_something": {"do": "/bin/do", "undo": "/bin/undo"}}, - } - } - ) - - -@pytest.fixture -def action(device, mocker): - a = CommandAction() - a.job = Job(42, {}, None) - a.job.timeout = Timeout("job", a) - a.job.device = device - a.run_cmd = mocker.MagicMock() - return a - - -@pytest.fixture() -def do_something(action): - action.parameters = {"name": "do_something"} - assert action.validate() - return action - - -@pytest.fixture -def hard_reset(action): - action.parameters = {"name": "hard_reset"} - assert action.validate() - return action - - -def test_run(do_something): - do_something.run(None, 600) - do_something.run_cmd.assert_called_with("/bin/do") - - -def test_cleanup(do_something): - do_something.run(None, 600) - do_something.cleanup(None) - do_something.run_cmd.assert_called_with("/bin/undo") - - -def test_unknown_command(action): - action.parameters = {"name": "unknown_command"} - assert not action.validate() - assert "Unknown user command 'unknown_command'" in action.errors - - -def test_unconfigured_device(action): - action.job.device = PipelineDevice({}) - action.parameters = {"name": "some-action"} - assert not action.validate() # should not crash - - -def test_builtin_command_run(hard_reset): - hard_reset.run(None, 600) - hard_reset.run_cmd.assert_called_with("/path/to/hard-reset") - - -def test_builtin_command_cleanup_is_noop(hard_reset): - hard_reset.run(None, 600) - hard_reset.run_cmd.reset_mock() - hard_reset.cleanup(None) - hard_reset.run_cmd.assert_not_called() - - -def test_builtin_command_not_defined_for_device(action): - action.parameters = {"name": "pre_power_command"} - assert not action.validate() # should not crash - - -def test_multiple_commands(action, mocker): - call = mocker.call - action.parameters = {"name": "power_off"} - action.validate() - action.run(None, 600) - action.run_cmd.assert_has_calls([call("something"), call("something-else")]) +from ..test_basic import LavaDispatcherTestCase + + +class TestCommands(LavaDispatcherTestCase): + def setUp(self): + super().setUp() + self.job = self.create_simple_job( + device_dict=PipelineDevice( + { + "commands": { + "hard_reset": "/path/to/hard-reset", + "power_off": ["something", "something-else"], + "users": { + "do_something": {"do": "/bin/do", "undo": "/bin/undo"} + }, + } + } + ) + ) + self.action = CommandAction() + self.action.job = self.job + self.action.run_cmd = MagicMock() + + def do_something(self): + self.action.parameters = {"name": "do_something"} + self.assertTrue(self.action.validate()) + + def hard_reset(self): + self.action.parameters = {"name": "hard_reset"} + self.action.validate() + + def test_run(self): + self.do_something() + self.action.run(None, 600) + self.action.run_cmd.assert_called_with("/bin/do") + + def test_cleanup(self): + self.do_something() + self.action.run(None, 600) + self.action.cleanup(None) + self.action.run_cmd.assert_called_with("/bin/undo") + + def test_unknown_command(self): + self.action.parameters = {"name": "unknown_command"} + self.assertFalse(self.action.validate()) + self.assertIn("Unknown user command 'unknown_command'", self.action.errors) + + def test_unconfigured_device(self): + self.job.device = PipelineDevice({}) + self.action.parameters = {"name": "some-action"} + self.assertFalse(self.action.validate()) + + def test_builtin_command_run(self): + self.hard_reset() + self.action.run(None, 600) + self.action.run_cmd.assert_called_with("/path/to/hard-reset") + + def test_builtin_command_cleanup_is_noop(self): + self.hard_reset() + self.action.run(None, 600) + self.action.run_cmd.reset_mock() + self.action.cleanup(None) + self.action.run_cmd.assert_not_called() + + def test_builtin_command_not_defined_for_device(self): + self.action.parameters = {"name": "pre_power_command"} + self.assertFalse(self.action.validate()) + + def test_multiple_commands(self): + self.action.parameters = {"name": "power_off"} + self.action.validate() + self.action.run(None, 600) + self.action.run_cmd.assert_has_calls( + (mock_call("something"), mock_call("something-else")) + ) diff --git a/tests/lava_dispatcher/test_auto_login.py b/tests/lava_dispatcher/test_auto_login.py index ab76cf6225..51e990f7c5 100644 --- a/tests/lava_dispatcher/test_auto_login.py +++ b/tests/lava_dispatcher/test_auto_login.py @@ -7,14 +7,14 @@ from lava_common.exceptions import JobError from lava_dispatcher.action import Pipeline from lava_dispatcher.actions.boot import AutoLoginAction -from lava_dispatcher.job import Job from tests.lava_dispatcher.test_basic import LavaDispatcherTestCase class AutoLoginTestCase(LavaDispatcherTestCase): def _make_pipeline(self, params): - job = Job(1234, {}, None) - job.device = {"actions": {"boot": {"methods": []}}} + job = self.create_simple_job( + device_dict={"actions": {"boot": {"methods": []}}}, + ) pipeline = Pipeline(parent=None, job=job) auto_login = AutoLoginAction() auto_login.section = "internal" diff --git a/tests/lava_dispatcher/test_basic.py b/tests/lava_dispatcher/test_basic.py index 1fa015c4d3..8fc2f55686 100644 --- a/tests/lava_dispatcher/test_basic.py +++ b/tests/lava_dispatcher/test_basic.py @@ -3,11 +3,17 @@ # Author: Neil Williams # # SPDX-License-Identifier: GPL-2.0-or-later +from __future__ import annotations import os import sys import time import unittest +from pathlib import Path +from random import randint +from tempfile import TemporaryDirectory +from time import monotonic +from typing import TYPE_CHECKING import voluptuous from jinja2 import ChoiceLoader, DictLoader, FileSystemLoader @@ -20,20 +26,53 @@ LAVAError, ) from lava_common.jinja import create_device_templates_env +from lava_common.log import YAMLLogger from lava_common.schemas import validate as validate_job from lava_common.schemas.device import validate as validate_device +from lava_common.timeout import Timeout from lava_common.yaml import yaml_safe_dump, yaml_safe_load from lava_dispatcher.action import Action, Pipeline from lava_dispatcher.actions.deploy.image import DeployImages -from lava_dispatcher.device import NewDevice +from lava_dispatcher.device import NewDevice, PipelineDevice +from lava_dispatcher.job import Job from lava_dispatcher.parser import JobParser -from tests.utils import DummyLogger + +if TYPE_CHECKING: + from typing import Optional class LavaDispatcherTestCase(unittest.TestCase): # set to True to update pipeline_references automatically. update_ref = False + def create_temporary_directory(self) -> Path: + tmp_dir = TemporaryDirectory(prefix=self.__call__.__name__) + self.addCleanup(tmp_dir.cleanup) + return Path(tmp_dir.name) + + TESTCASE_JOB_LOGGER = YAMLLogger("lava_dispatcher_testcase_job_logger") + + def create_simple_job( + self, device_dict: Optional[dict] = None, job_parameters: Optional[dict] = None + ) -> Job: + if device_dict is None: + device_dict = {} + + if job_parameters is None: + job_parameters = {} + + new_job = Job( + job_id=randint(0, 2**32 - 1), + parameters=job_parameters, + logger=LavaDispatcherTestCase.TESTCASE_JOB_LOGGER, + device=PipelineDevice(device_dict), + timeout=Timeout( + f"unittest-timeout-{self.__class__.__name__}", + None, + ), + ) + return new_job + @classmethod def pipeline_reference(cls, filename, job=None): y_file = os.path.join(os.path.dirname(__file__), "pipeline_refs", filename) @@ -125,7 +164,7 @@ def test_composite_action_aggregates_errors_from_sub_actions(self): sub2.name = "sub2" sub2.__errors__ = [2] - pipe = Pipeline() + pipe = Pipeline(job=self.create_simple_job()) sub1.name = "sub1" pipe.add_action(sub1) pipe.add_action(sub2) @@ -216,14 +255,18 @@ def create_custom_job( try: parser = JobParser() job = parser.parse( - yaml_safe_dump(job_data), device, "4999", None, dispatcher_config + yaml_safe_dump(job_data), + device, + "4999", + YAMLLogger("lava_dispatcher_testcase_job_logger"), + dispatcher_config, ) except (ConfigurationError, TypeError) as exc: print("####### Parser exception ########") print(device) print("#######") raise ConfigurationError("Invalid device: %s" % exc) - job.logger = DummyLogger() + return job def create_job( @@ -255,8 +298,13 @@ def create_kvm_job(self, filename, validate=False): if validate: validate_job(job_data, strict=False) try: - job = parser.parse(yaml_safe_dump(job_data), device, 4212, None, "") - job.logger = DummyLogger() + job = parser.parse( + yaml_safe_dump(job_data), + device, + 4212, + YAMLLogger("lava_dispatcher_testcase_job_logger"), + "", + ) except LAVAError as exc: print(exc) return None @@ -272,11 +320,11 @@ def __init__(self): super().__init__() def run(self, connection, max_end_time): - time.sleep(1) + time.sleep(0.01) self.ran = True def test_create_empty_pipeline(self): - pipe = Pipeline() + pipe = Pipeline(job=self.create_simple_job()) self.assertEqual(pipe.actions, []) def test_add_action_to_pipeline(self): @@ -288,8 +336,8 @@ def test_add_action_to_pipeline(self): self.assertEqual(action.summary, "starter") # action needs to be added to a top level pipe first with self.assertRaises(LAVABug): - Pipeline(action) - pipe = Pipeline() + Pipeline(job=self.create_simple_job(), parent=action) + pipe = Pipeline(job=self.create_simple_job()) with self.assertRaises(LAVABug): pipe.add_action(None) with self.assertRaises(LAVABug): @@ -314,7 +362,7 @@ def test_create_pipeline(self): action.name = "internal_pipe" action.description = "test action only" action.summary = "starter" - pipe = Pipeline() + pipe = Pipeline(job=self.create_simple_job()) pipe.add_action(action) self.assertEqual(len(pipe.actions), 1) self.assertEqual(action.level, "1") @@ -323,12 +371,13 @@ def test_create_pipeline(self): action.summary = "child" action.description = "action implementing an internal pipe" with self.assertRaises(LAVABug): - Pipeline(action) + Pipeline(job=self.create_simple_job(), parent=action) pipe.add_action(action) self.assertEqual(action.level, "2") self.assertEqual(len(pipe.actions), 2) - # a formal RetryAction would contain a pre-built pipeline which can be inserted directly - retry_pipe = Pipeline(action) + # a formal RetryAction would contain a pre-built pipeline + # which can be inserted directly + retry_pipe = Pipeline(job=self.create_simple_job(), parent=action) action = Action() action.name = "inside_action" action.description = "action inside the internal pipe" @@ -342,7 +391,7 @@ def test_complex_pipeline(self): action.name = "starter_action" action.description = "test action only" action.summary = "starter" - pipe = Pipeline() + pipe = Pipeline(job=self.create_simple_job()) pipe.add_action(action) self.assertEqual(action.level, "1") action = Action() @@ -351,8 +400,9 @@ def test_complex_pipeline(self): action.summary = "child" pipe.add_action(action) self.assertEqual(action.level, "2") - # a formal RetryAction would contain a pre-built pipeline which can be inserted directly - retry_pipe = Pipeline(action) + # a formal RetryAction would contain a pre-built pipeline + # which can be inserted directly + retry_pipe = Pipeline(job=self.create_simple_job(), parent=action) action = Action() action.name = "child_action" action.description = "action inside the internal pipe" @@ -371,7 +421,7 @@ def test_complex_pipeline(self): action.summary = "baby" retry_pipe.add_action(action) self.assertEqual(action.level, "2.3") - inner_pipe = Pipeline(action) + inner_pipe = Pipeline(job=self.create_simple_job(), parent=action) action = Action() action.name = "single_action" action.description = "single line action" @@ -491,33 +541,33 @@ def setUp(self): self.sub1 = TestPipeline.FakeAction() def test_list_of_subcommands(self): - pipe = Pipeline() + pipe = Pipeline(job=self.create_simple_job()) pipe.add_action(self.sub0) pipe.add_action(self.sub1) self.assertIs(pipe.actions[0], self.sub0) self.assertIs(pipe.actions[1], self.sub1) def test_runs_subaction(self): - pipe = Pipeline() + pipe = Pipeline(job=self.create_simple_job()) pipe.add_action(self.sub0) pipe.add_action(self.sub1) - pipe.run_actions(None, None) + pipe.run_actions(None, monotonic() + 1.0) self.assertTrue(self.sub0.ran) self.assertTrue(self.sub1.ran) self.assertNotEqual(self.sub0.timeout.elapsed_time, 0) self.assertNotEqual(self.sub1.timeout.elapsed_time, 0) def test_keep_connection(self): - pipe = Pipeline() + pipe = Pipeline(job=self.create_simple_job()) pipe.add_action(TestFakeActions.KeepConnection()) conn = object() - self.assertIs(conn, pipe.run_actions(conn, None)) + self.assertIs(conn, pipe.run_actions(conn, monotonic() + 1.0)) def test_change_connection(self): - pipe = Pipeline() + pipe = Pipeline(job=self.create_simple_job()) pipe.add_action(TestFakeActions.MakeNewConnection()) conn = object() - self.assertIsNot(conn, pipe.run_actions(conn, None)) + self.assertIsNot(conn, pipe.run_actions(conn, monotonic() + 1.0)) class TestStrategySelector(LavaDispatcherTestCase): diff --git a/tests/lava_dispatcher/test_flasher.py b/tests/lava_dispatcher/test_flasher.py index 43d4be5c1b..552532cd2d 100644 --- a/tests/lava_dispatcher/test_flasher.py +++ b/tests/lava_dispatcher/test_flasher.py @@ -5,17 +5,11 @@ # SPDX-License-Identifier: GPL-2.0-or-later import shlex +from unittest.mock import patch import pexpect -import lava_dispatcher.actions.deploy.docker # pylint: disable=unused-import - -# This will be monkey patched -import lava_dispatcher.utils.shell # pylint: disable=unused-import -from lava_dispatcher.action import Pipeline from lava_dispatcher.actions.deploy.flasher import Flasher, FlasherAction -from lava_dispatcher.device import PipelineDevice -from lava_dispatcher.job import Job from tests.lava_dispatcher.test_basic import Factory, LavaDispatcherTestCase from tests.utils import DummyLogger @@ -35,79 +29,70 @@ def test_pipeline(self): description_ref = self.pipeline_reference("b2260-flasher.yaml", job=job) self.assertEqual(description_ref, job.pipeline.describe()) - -def test_run(monkeypatch): - class Proc: - def wait(self): - return 0 - - def expect(self, arg): - assert arg == pexpect.EOF - - commands = [ - ["/home/lava/bin/PiCtrl.py", "PowerPlug", "0", "off"], - ["touch"], - ] - - def spawn(cmd, cwd, encoding, codec_errors, logfile, timeout, searchwindowsize): - command = commands.pop(0) - assert cmd == shlex.join(command) - assert encoding == "utf-8" - assert codec_errors == "replace" - assert searchwindowsize == 10 - return Proc() - - monkeypatch.setattr(pexpect, "spawn", spawn) - - action = FlasherAction() - device = PipelineDevice( - { - "actions": { - "deploy": { - "methods": { - "flasher": {"commands": ["{HARD_RESET_COMMAND}", "touch"]} + def test_run(self): + class Proc: + # pylint: disable=no-self-argument + def wait(self_): + return 0 + + def expect(self_, arg): + self.assertEqual(arg, pexpect.EOF) + + commands = [ + ["/home/lava/bin/PiCtrl.py", "PowerPlug", "0", "off"], + ["touch"], + ] + + action = FlasherAction() + action.job = self.create_simple_job( + device_dict={ + "actions": { + "deploy": { + "methods": { + "flasher": {"commands": ["{HARD_RESET_COMMAND}", "touch"]} + } } - } - }, - "commands": {"hard_reset": "/home/lava/bin/PiCtrl.py PowerPlug 0 off"}, - } - ) - action.job = Job(1234, {}, None) - action.job.device = device - action.parameters = {"namespace": "common", "images": {}} - action.section = Flasher.section - - # self.commands is populated by validate - action.validate() - assert action.errors == [] # nosec - unit test - - # Run the action - action.run(None, 10) - assert commands == [] # nosec - unit test - - -def test_accepts(): - pipe = Pipeline(job=Job(1234, {}, None)) - pipe.add_action = lambda a, b: None - flasher = Flasher - - # Normal case - device = {"actions": {"deploy": {"methods": "flasher"}}} - params = {"to": "flasher"} - assert flasher.accepts(device, params) == (True, "accepted") # nosec - unit test - - # Flasher is not defined - device = {"actions": {"deploy": {"methods": "tftp"}}} - params = {"to": "flasher"} - assert flasher.accepts(device, params) == ( # nosec - unit test - False, - "'flasher' not in the device configuration deploy methods", - ) - - # Flasher is not requested - device = {"actions": {"deploy": {"methods": "flasher"}}} - params = {"to": "tftp"} - assert flasher.accepts(device, params) == ( # nosec - unit test - False, - '"to" parameter is not "flasher"', - ) + }, + "commands": {"hard_reset": "/home/lava/bin/PiCtrl.py PowerPlug 0 off"}, + } + ) + action.parameters = {"namespace": "common", "images": {}} + action.section = Flasher.section + + # self.commands is populated by validate + action.validate() + self.assertFalse(action.errors) + + # Run the action + with patch("pexpect.spawn", return_value=Proc()) as mock_spawn: + action.run(None, 10) + + self.assertEqual(mock_spawn.call_count, 2) + + for i, call in enumerate(mock_spawn.mock_calls): + self.assertEqual(call.args, (shlex.join(commands[i]),)) + + self.assertEqual(call.kwargs["encoding"], "utf-8") + self.assertEqual(call.kwargs["codec_errors"], "replace") + self.assertEqual(call.kwargs["searchwindowsize"], 10) + + def test_accepts(self): + # Normal case + device = {"actions": {"deploy": {"methods": "flasher"}}} + params = {"to": "flasher"} + self.assertEqual(Flasher.accepts(device, params), (True, "accepted")) + + # Flasher is not defined + device = {"actions": {"deploy": {"methods": "tftp"}}} + params = {"to": "flasher"} + self.assertEqual( + Flasher.accepts(device, params), + (False, "'flasher' not in the device configuration deploy methods"), + ) + + # Flasher is not requested + device = {"actions": {"deploy": {"methods": "flasher"}}} + params = {"to": "tftp"} + self.assertEqual( + Flasher.accepts(device, params), (False, '"to" parameter is not "flasher"') + ) diff --git a/tests/lava_dispatcher/test_grub.py b/tests/lava_dispatcher/test_grub.py index 4309f1e589..4d4525e6af 100644 --- a/tests/lava_dispatcher/test_grub.py +++ b/tests/lava_dispatcher/test_grub.py @@ -18,7 +18,6 @@ from lava_dispatcher.actions.boot.grub import GrubMainAction from lava_dispatcher.actions.deploy.tftp import TftpAction from lava_dispatcher.device import NewDevice -from lava_dispatcher.job import Job from lava_dispatcher.utils import filesystem from lava_dispatcher.utils.network import dispatcher_ip from lava_dispatcher.utils.strings import substitute @@ -164,8 +163,10 @@ def test_overlay_action(self): } (rendered, _) = self.factory.create_device("d02-01.jinja2") device = NewDevice(yaml_safe_load(rendered)) - job = Job(4212, parameters, None) - job.device = device + job = self.create_simple_job( + device_dict=device, + job_parameters=parameters, + ) pipeline = Pipeline(job=job, parameters=parameters["actions"]["boot"]) job.pipeline = pipeline overlay = BootloaderCommandOverlay() diff --git a/tests/lava_dispatcher/test_ipxe.py b/tests/lava_dispatcher/test_ipxe.py index 5fd96c2b73..fcf521dba5 100644 --- a/tests/lava_dispatcher/test_ipxe.py +++ b/tests/lava_dispatcher/test_ipxe.py @@ -15,7 +15,6 @@ from lava_dispatcher.actions.boot.ipxe import BootloaderAction from lava_dispatcher.actions.deploy.tftp import TftpAction from lava_dispatcher.device import NewDevice -from lava_dispatcher.job import Job from lava_dispatcher.parser import JobParser from lava_dispatcher.utils.network import dispatcher_ip from lava_dispatcher.utils.strings import substitute @@ -151,8 +150,10 @@ def test_overlay_action(self): } (rendered, _) = self.factory.create_device("x86-01.jinja2") device = NewDevice(yaml_safe_load(rendered)) - job = Job(4212, parameters, None) - job.device = device + job = self.create_simple_job( + device_dict=device, + job_parameters=parameters, + ) pipeline = Pipeline(job=job, parameters=parameters["actions"]["boot"]) job.pipeline = pipeline overlay = BootloaderCommandOverlay() @@ -162,17 +163,20 @@ def test_overlay_action(self): ramdisk = parameters["actions"]["deploy"]["ramdisk"] overlay.validate() - assert overlay.method == "ipxe" - assert overlay.commands == [ - "dhcp net0", - "set console console=ttyS0,115200n8 lava_mac={LAVA_MAC}", - "set extraargs ip=dhcp", - "kernel tftp://{SERVER_IP}/{KERNEL} ${extraargs} ${console}", - "initrd tftp://{SERVER_IP}/{RAMDISK}", - "boot", - ] - assert overlay.use_bootscript is False - assert overlay.lava_mac == "00:90:05:af:00:7d" + self.assertEqual(overlay.method, "ipxe") + self.assertEqual( + overlay.commands, + [ + "dhcp net0", + "set console console=ttyS0,115200n8 lava_mac={LAVA_MAC}", + "set extraargs ip=dhcp", + "kernel tftp://{SERVER_IP}/{KERNEL} ${extraargs} ${console}", + "initrd tftp://{SERVER_IP}/{RAMDISK}", + "boot", + ], + ) + self.assertIs(overlay.use_bootscript, False) + self.assertEqual(overlay.lava_mac, "00:90:05:af:00:7d") substitution_dictionary = { "{SERVER_IP}": ip_addr, diff --git a/tests/lava_dispatcher/test_job.py b/tests/lava_dispatcher/test_job.py index 0c0e179f8c..079a60ff16 100644 --- a/tests/lava_dispatcher/test_job.py +++ b/tests/lava_dispatcher/test_job.py @@ -6,47 +6,42 @@ from pathlib import Path -import pytest - -from lava_dispatcher.job import Job - - -@pytest.fixture -def job(): - return Job(42, {}, None) - - -def test_tmp_dir(job): - assert job.tmp_dir is not None - tmp_dir = Path(job.tmp_dir) - assert not tmp_dir.exists() - assert tmp_dir.name == "42" - - -def test_tmp_dir_with_prefix(job): - job.parameters["dispatcher"] = {"prefix": "FOOBAR-"} - tmp_dir = Path(job.tmp_dir) - assert tmp_dir.name == "FOOBAR-42" - - -def test_mkdtemp(job): - d = job.mkdtemp("my-action") - assert Path(d).exists() - assert "my-action" in d - - -def test_mkdtemp_with_prefix(job): - job.parameters["dispatcher"] = {"prefix": "FOOBAR-"} - d = Path(job.mkdtemp("my-action")) - assert d.parent.name == "FOOBAR-42" - - -def test_mktemp_with_override(job, tmp_path): - override = tmp_path / "override" - first = Path(job.mkdtemp("my-action", override=override)) - second = Path(job.mkdtemp("my-assert", override=override)) - assert first.exists() - assert second.exists() - assert first != second - assert first.parent == second.parent - assert first.parent.name == str(job.job_id) +from .test_basic import LavaDispatcherTestCase + + +class TestJob(LavaDispatcherTestCase): + def setUp(self): + super().setUp() + self.job = self.create_simple_job() + + def test_tmp_dir(self): + self.assertIsNotNone(self.job.tmp_dir) + tmp_dir = Path(self.job.tmp_dir) + self.assertFalse(tmp_dir.exists()) + self.assertEqual(tmp_dir.name, str(self.job.job_id)) + + def test_tmp_dir_with_prefix(self): + self.job.parameters["dispatcher"] = {"prefix": "FOOBAR-"} + tmp_dir = Path(self.job.tmp_dir) + self.assertEqual(tmp_dir.name, f"FOOBAR-{self.job.job_id}") + + def test_mkdtemp(self): + d = self.job.mkdtemp("my-action") + self.assertTrue(Path(d).exists()) + self.assertIn("my-action", d) + + def test_mkdtemp_with_prefix(self): + self.job.parameters["dispatcher"] = {"prefix": "FOOBAR-"} + d = Path(self.job.mkdtemp("my-action")) + self.assertEqual(d.parent.name, f"FOOBAR-{self.job.job_id}") + + def test_mktemp_with_override(self): + tmp_dir_path = self.create_temporary_directory() + override = tmp_dir_path / "override" + first = Path(self.job.mkdtemp("my-action", override=override)) + second = Path(self.job.mkdtemp("my-assert", override=override)) + self.assertTrue(first.exists()) + self.assertTrue(second.exists()) + self.assertNotEqual(first, second) + self.assertEqual(first.parent, second.parent) + self.assertEqual(first.parent.name, str(self.job.job_id)) diff --git a/tests/lava_dispatcher/test_kvm.py b/tests/lava_dispatcher/test_kvm.py index 723a1bb297..2dbb784462 100644 --- a/tests/lava_dispatcher/test_kvm.py +++ b/tests/lava_dispatcher/test_kvm.py @@ -36,7 +36,7 @@ def test_kvm_simulation(self): """ factory = Factory() job = factory.create_kvm_job("sample_jobs/kvm.yaml") - pipe = Pipeline() + pipe = Pipeline(job=job) action = Action() action.name = "deploy_linaro_image" action.description = "deploy action using preset subactions in an internal pipe" @@ -47,7 +47,7 @@ def test_kvm_simulation(self): action.parameters = {"image": "file:///none/images/bad-kvm-debian-wheezy.img"} pipe.add_action(action) self.assertEqual(action.level, "1") - deploy_pipe = Pipeline(action) + deploy_pipe = Pipeline(job=job, parent=action) action = Action() action.name = "downloader" action.description = "download image wrapper, including an internal retry pipe" @@ -55,8 +55,9 @@ def test_kvm_simulation(self): action.job = job deploy_pipe.add_action(action) self.assertEqual(action.level, "1.1") - # a formal RetryAction would contain a pre-built pipeline which can be inserted directly - retry_pipe = Pipeline(action) + # a formal RetryAction would contain a pre-built pipeline which can be + # inserted directly + retry_pipe = Pipeline(job=job, parent=action) action = Action() action.name = "wget" action.description = "do the download with retries" diff --git a/tests/lava_dispatcher/test_multi.py b/tests/lava_dispatcher/test_multi.py index f5a81dcf85..a5ea10fcdc 100644 --- a/tests/lava_dispatcher/test_multi.py +++ b/tests/lava_dispatcher/test_multi.py @@ -10,13 +10,11 @@ from lava_common.decorators import nottest from lava_common.yaml import yaml_safe_dump, yaml_safe_load -from lava_dispatcher.action import Action, Pipeline, Timeout +from lava_dispatcher.action import Action, Pipeline from lava_dispatcher.device import NewDevice -from lava_dispatcher.job import Job from lava_dispatcher.parser import JobParser from tests.lava_dispatcher.test_basic import Factory, LavaDispatcherTestCase from tests.lava_dispatcher.test_uboot import UBootFactory -from tests.utils import DummyLogger class TestMultiDeploy(LavaDispatcherTestCase): @@ -79,23 +77,16 @@ def run(self, connection, max_end_time): self.data[self.name] = self.parameters return connection # no actual connection during this fake job - @nottest - class TestJob(Job): - def __init__(self): - super().__init__(4122, 0, self.parameters) - @patch( "lava_dispatcher.actions.deploy.tftp.which", return_value="/usr/bin/in.tftpd" ) def test_multi_deploy(self, which_mock): self.assertIsNotNone(self.parsed_data) - job = Job(4212, self.parsed_data, None) - job.timeout = Timeout("Job", Timeout.parse({"minutes": 2})) + job = self.create_simple_job( + device_dict=TestMultiDeploy.FakeDevice(), + job_parameters=self.parsed_data, + ) pipeline = Pipeline(job=job) - device = TestMultiDeploy.FakeDevice() - self.assertIsNotNone(device) - job.device = device - job.logger = DummyLogger() job.pipeline = pipeline counts = {} for action_data in self.parsed_data["actions"]: diff --git a/tests/lava_dispatcher/test_repeat.py b/tests/lava_dispatcher/test_repeat.py index 4c4e49281e..894e7bf2d4 100644 --- a/tests/lava_dispatcher/test_repeat.py +++ b/tests/lava_dispatcher/test_repeat.py @@ -4,29 +4,35 @@ # # SPDX-License-Identifier: GPL-2.0-or-later -import time +from unittest.mock import patch from lava_dispatcher.action import Action, Pipeline from lava_dispatcher.logical import RetryAction +from .test_basic import LavaDispatcherTestCase -class DummyAction(Action): - def __init__(self): - super().__init__() - self.ran = 0 - def run(self, connection, max_end_time): - assert connection is None # nosec - unit test support. - assert max_end_time == 1 # nosec - unit test support. - self.ran += 1 +class TestRepeatAction(LavaDispatcherTestCase): + def test_repeat_action(self): + class DummyAction(Action): + # pylint: disable=no-self-argument + def __init__(self_): + super().__init__() + self_.ran = 0 + def run(self_, connection, max_end_time): + self.assertIsNone(connection) + self.assertEqual(max_end_time, 1) + self_.ran += 1 -def test_repeat_action(monkeypatch): - monkeypatch.setattr(time, "monotonic", lambda: 0) - ra = RetryAction() - ra.parameters = {"repeat": 5} - ra.level = "1" - ra.pipeline = Pipeline(parent=ra) - ra.pipeline.add_action(DummyAction()) - ra.run(None, 1) - assert ra.pipeline.actions[0].ran == 5 # nosec - unit test support. + ra = RetryAction() + ra.parameters = {"repeat": 5} + ra.level = "1" + ra.pipeline = Pipeline(job=self.create_simple_job(), parent=ra) + ra.pipeline.add_action(DummyAction()) + with patch("time.monotonic", return_value=0.0): + ra.run(None, 1) + self.assertEqual( + ra.pipeline.actions[0].ran, + 5, + ) diff --git a/tests/lava_dispatcher/test_retries.py b/tests/lava_dispatcher/test_retries.py index d42be8e2a7..39d8020ca7 100644 --- a/tests/lava_dispatcher/test_retries.py +++ b/tests/lava_dispatcher/test_retries.py @@ -9,20 +9,13 @@ from lava_common.exceptions import InfrastructureError, JobError, LAVABug from lava_common.timeout import Timeout from lava_dispatcher.action import Action, Pipeline -from lava_dispatcher.job import Job from lava_dispatcher.logical import DiagnosticAction, RetryAction from lava_dispatcher.parser import JobParser from lava_dispatcher.power import FinalizeAction from tests.lava_dispatcher.test_basic import LavaDispatcherTestCase -from tests.utils import DummyLogger class TestAction(LavaDispatcherTestCase): - class FakeJob(Job): - def __init__(self, parameters): - super().__init__(4212, parameters, None) - self.logger = DummyLogger() - class FakeDeploy: """ Derived from object, *not* Deployment as this confuses python -m unittest discover @@ -45,8 +38,8 @@ def __init__(self, parent): self.action.job = self.job class FakePipeline(Pipeline): - def __init__(self, parent=None, job=None): - super().__init__(parent, job) + def __init__(self, job): + super().__init__(job) job.pipeline = self class FakeAction(Action): @@ -132,8 +125,8 @@ def setUp(self): } ], } - self.fakejob = TestAction.FakeJob(self.parameters) - JobParser._timeouts(None, self.parameters, self.fakejob) + self.fakejob = self.create_simple_job(job_parameters=self.parameters) + self.fakejob.timeout = JobParser._parse_job_timeout(self.parameters) def lookup_deploy(self, params): actions = iter(params) @@ -324,8 +317,7 @@ def test_failure_retry_specified_interval(self): } ], } - self.fakejob = TestAction.FakeJob(self.parameters) - JobParser._timeouts(None, self.parameters, self.fakejob) + self.fakejob = self.create_simple_job(job_parameters=self.parameters) pipeline = TestAction.FakePipeline(job=self.fakejob) action = TestAction.InternalRetryAction() for actions in self.lookup_deploy(self.parameters["actions"]): @@ -339,14 +331,6 @@ def test_failure_retry_specified_interval(self): class TestTimeout(LavaDispatcherTestCase): - class FakeJob(Job): - def __init__(self, parameters): - super().__init__(4212, parameters, None) - self.logger = DummyLogger() - - def validate(self, simulate=False): - self.pipeline.validate_actions() - class FakeDevice(dict): def __init__(self): self.update({"parameters": {}, "commands": {}}) @@ -354,10 +338,6 @@ def __init__(self): def __get_item__(self): return {} - class FakePipeline(Pipeline): - def __init__(self, parent=None, job=None): - super().__init__(parent, job) - class FakeAction(Action): """ Isolated Action which can be used to generate artificial exceptions. @@ -468,8 +448,8 @@ def setUp(self): } ], } - self.fakejob = TestTimeout.FakeJob(self.parameters) - JobParser._timeouts(None, self.parameters, self.fakejob) + self.fakejob = self.create_simple_job(job_parameters=self.parameters) + self.fakejob.timeout = JobParser._parse_job_timeout(self.parameters) def test_action_timeout(self): """ @@ -478,18 +458,17 @@ def test_action_timeout(self): """ self.assertIsNotNone(self.fakejob.timeout) seconds = 2 - pipeline = TestTimeout.FakePipeline(job=self.fakejob) + pipeline = TestAction.FakePipeline(job=self.fakejob) action = TestTimeout.FakeAction() - action.timeout = Timeout(action.name, action=action, duration=seconds) pipeline.add_action(action) - self.fakejob.pipeline = pipeline + action.timeout = Timeout(action.name, action=action, duration=seconds) self.fakejob.device = TestTimeout.FakeDevice() with self.assertRaises(JobError): self.fakejob.run() def test_action_timout_custom_exception(self): seconds = 2 - pipeline = TestTimeout.FakePipeline(job=self.fakejob) + pipeline = TestAction.FakePipeline(job=self.fakejob) action = TestTimeout.FakeAction() action.timeout = Timeout( action.name, action=action, duration=seconds, exception=InfrastructureError @@ -503,7 +482,7 @@ def test_action_timout_custom_exception(self): def test_action_complete(self): self.assertIsNotNone(self.fakejob.timeout) seconds = 2 - pipeline = TestTimeout.FakePipeline(job=self.fakejob) + pipeline = TestAction.FakePipeline(job=self.fakejob) action = TestTimeout.SafeAction() action.timeout = Timeout(action.name, action=action, duration=seconds) pipeline.add_action(action) @@ -513,14 +492,13 @@ def test_action_complete(self): def test_job_timeout(self): self.assertIsNotNone(self.fakejob.timeout) - pipeline = TestTimeout.FakePipeline(job=self.fakejob) + pipeline = TestAction.FakePipeline(job=self.fakejob) action = TestTimeout.LongAction() pipeline.add_action(action) pipeline.add_action(TestTimeout.SafeAction()) finalize = FinalizeAction() finalize.parameters["namespace"] = "common" pipeline.add_action(finalize) - self.fakejob.pipeline = pipeline self.fakejob.device = TestTimeout.FakeDevice() with self.assertRaises(JobError): self.fakejob.run() @@ -530,7 +508,7 @@ def test_retry_job_timeout(self): class LongRetryAction(RetryAction): def populate(self, parameters): - self.pipeline = TestTimeout.FakePipeline(job=fakejob) + self.pipeline = TestAction.FakePipeline(job=fakejob) self.pipeline.add_action(TestTimeout.LongAction()) finalize = FinalizeAction() @@ -538,7 +516,7 @@ def populate(self, parameters): self.pipeline.add_action(finalize) self.assertIsNotNone(self.fakejob.timeout) - pipeline = TestTimeout.FakePipeline(job=self.fakejob) + pipeline = TestAction.FakePipeline(job=self.fakejob) action = LongRetryAction() action.max_retries = 10 pipeline.add_action(action) @@ -561,7 +539,7 @@ def populate(self, parameters): def test_job_safe(self): self.assertIsNotNone(self.fakejob.timeout) - pipeline = TestTimeout.FakePipeline(job=self.fakejob) + pipeline = TestAction.FakePipeline(job=self.fakejob) action = TestTimeout.SafeAction() pipeline.add_action(action) pipeline.add_action(TestTimeout.SafeAction()) @@ -576,7 +554,7 @@ def test_job_safe(self): def test_long_job_safe(self): self.fakejob.timeout.duration = 8 self.assertIsNotNone(self.fakejob.timeout) - pipeline = TestTimeout.FakePipeline(job=self.fakejob) + pipeline = TestAction.FakePipeline(job=self.fakejob) self.fakejob.pipeline = pipeline action = TestTimeout.SafeAction() action.timeout.duration = 2 diff --git a/tests/lava_dispatcher/test_test_shell.py b/tests/lava_dispatcher/test_test_shell.py index 73a3d4db04..b69f6e1bb8 100644 --- a/tests/lava_dispatcher/test_test_shell.py +++ b/tests/lava_dispatcher/test_test_shell.py @@ -9,9 +9,9 @@ import re from lava_common.exceptions import JobError, LAVATimeoutError, TestError +from lava_common.log import YAMLLogger from lava_common.yaml import yaml_safe_load from tests.lava_dispatcher.test_basic import Factory, LavaDispatcherTestCase -from tests.lava_dispatcher.test_multi import DummyLogger class FakeConnection: @@ -30,7 +30,6 @@ def setUp(self): ) factory = Factory() self.job = factory.create_kvm_job("sample_jobs/qemu-reboot.yaml", validate=True) - self.job.logger = DummyLogger() self.job.validate() self.ret = False test_retry = [ @@ -44,7 +43,7 @@ def setUp(self): if action.name == "lava-test-shell" ][0] print(self.skipped_shell.parameters["timeout"]) - self.skipped_shell.logger = DummyLogger() + self.skipped_shell.logger = YAMLLogger("dispatcher") test_retry = [ action for action in self.job.pipeline.actions @@ -80,7 +79,6 @@ def setUp(self): ) factory = Factory() self.job = factory.create_kvm_job("sample_jobs/kvm.yaml") - self.job.logger = DummyLogger() self.job.validate() self.ret = False test_retry = [ @@ -93,7 +91,7 @@ def setUp(self): for action in test_retry.pipeline.actions if action.name == "lava-test-shell" ][0] - self.test_shell.logger = DummyLogger() + self.test_shell.logger = YAMLLogger("dispatcher") def test_case_result(self): self.assertEqual([], self.job.pipeline.errors) diff --git a/tests/lava_dispatcher/test_uboot.py b/tests/lava_dispatcher/test_uboot.py index 26e892e6e3..5d5f830fe5 100644 --- a/tests/lava_dispatcher/test_uboot.py +++ b/tests/lava_dispatcher/test_uboot.py @@ -20,7 +20,6 @@ from lava_dispatcher.actions.deploy.apply_overlay import CompressRamdisk from lava_dispatcher.actions.deploy.tftp import TftpAction from lava_dispatcher.device import NewDevice -from lava_dispatcher.job import Job from lava_dispatcher.parser import JobParser from lava_dispatcher.power import PDUReboot, ResetDevice from lava_dispatcher.utils import filesystem @@ -291,10 +290,11 @@ def test_overlay_action(self, which_mock): }, }, } - data = yaml_safe_load(Factory().create_device("bbb-01.jinja2")[0]) - device = NewDevice(data) - job = Job(4212, parameters, None) - job.device = device + device = NewDevice(yaml_safe_load(Factory().create_device("bbb-01.jinja2")[0])) + job = self.create_simple_job( + device_dict=device, + job_parameters=parameters, + ) pipeline = Pipeline(job=job, parameters=parameters["actions"]["boot"]) job.pipeline = pipeline overlay = BootloaderCommandOverlay()