diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 74b990fd46db0..9ee6ab199934d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,23 +7,12 @@ default_stages: - pre-commit repos: -- repo: https://github.com/pycqa/flake8 - rev: c6e0d27593a45342ffa96a18bba708a5aab32fdf # 3.9.2 should match major Python version +- repo: https://github.com/astral-sh/ruff-pre-commit + rev: 0ccbb5b7942d83fbcf7cb5e0fd99633efd2351d7 # v0.3.5 hooks: - - id: flake8 - additional_dependencies: - - flake8-bugbear==21.11.28 - - flake8-comprehensions==3.7.0 - - flake8-unused-arguments==0.0.6 - - flake8-use-fstring==1.3.0 -- repo: https://github.com/psf/black - rev: ae2c0758c9e61a385df9700dc9c231bf54887041 # 22.3.0 - hooks: - - id: black -- repo: https://github.com/timothycrosley/isort/ - rev: e44834b7b294701f596c9118d6c370f86671a50d # 5.12.0 - hooks: - - id: isort + - id: ruff + args: [ --fix ] + - id: ruff-format - repo: https://github.com/jendrikseipp/vulture rev: 44aed44e226ec0e5660851462f764ec5d5da957c # v2.3 hooks: diff --git a/cmd/agent/dist/checks/libs/wmi/sampler.py b/cmd/agent/dist/checks/libs/wmi/sampler.py index c8e7564cabc64..2073afdf012e1 100644 --- a/cmd/agent/dist/checks/libs/wmi/sampler.py +++ b/cmd/agent/dist/checks/libs/wmi/sampler.py @@ -24,4 +24,5 @@ Original discussion thread: https://github.com/DataDog/dd-agent/issues/1952 Credits to @TheCloudlessSky (https://github.com/TheCloudlessSky) """ + from datadog_checks.base.checks.win.wmi.sampler import WMISampler # noqa: F401 diff --git a/devenv/tasks/__init__.py b/devenv/tasks/__init__.py index 9f7887f5cebfb..686eaa5784b4e 100644 --- a/devenv/tasks/__init__.py +++ b/devenv/tasks/__init__.py @@ -1,6 +1,7 @@ """ Invoke entrypoint, import here all the tasks we want to make available """ + import os from invoke import Collection diff --git a/devenv/tasks/packer.py b/devenv/tasks/packer.py index 3d039d3881bb7..cc5eb06758010 100644 --- a/devenv/tasks/packer.py +++ b/devenv/tasks/packer.py @@ -1,7 +1,6 @@ """ Packer namespaced tasks """ -from __future__ import print_function from invoke import task from invoke.exceptions import Exit diff --git a/docs/dev/linters.md b/docs/dev/linters.md index f3a17b9c981a1..f48734e8f53ed 100644 --- a/docs/dev/linters.md +++ b/docs/dev/linters.md @@ -23,9 +23,7 @@ To run the linters locally, run `inv linter.go`. ## Python For Python, we're using ([see invoke task](https://github.com/DataDog/datadog-agent/blob/dffd3262934a5540b9bf8e4bd3a743732637ef37/tasks/linter_tasks.py/#L17-L33)): -- [flake8](https://flake8.pycqa.org/en/latest), a style linter. -- [black](https://black.readthedocs.io/en/stable/), a code formatter. -- [isort](https://pycqa.github.io/isort/), to sort the imports. +- [ruff](https://github.com/astral-sh/ruff), a style linter and a code formatter. - [vulture](https://github.com/jendrikseipp/vulture), to find unused code. Their configuration is defined in both the [setup.cfg](https://github.com/DataDog/datadog-agent/blob/dffd3262934a5540b9bf8e4bd3a743732637ef37/setup.cfg) and the [pyproject.toml](https://github.com/DataDog/datadog-agent/blob/dffd3262934a5540b9bf8e4bd3a743732637ef37/pyproject.toml) files. diff --git a/pkg/config/legacy/tests/config.py b/pkg/config/legacy/tests/config.py index 7f70f6c344005..d08a1226d839c 100644 --- a/pkg/config/legacy/tests/config.py +++ b/pkg/config/legacy/tests/config.py @@ -191,7 +191,7 @@ def get_histogram_aggregates(configstr=None): for val in vals: val = val.strip() if val not in valid_values: - log.warning(f"Ignored histogram aggregate {val}, invalid") + log.warning("Ignored histogram aggregate %s, invalid", val) continue else: result.append(val) @@ -216,10 +216,10 @@ def get_histogram_percentiles(configstr=None): if floatval <= 0 or floatval >= 1: raise ValueError if len(val) > 4: - log.warning(f"Histogram percentiles are rounded to 2 digits: {floatval} rounded") + log.warning("Histogram percentiles are rounded to 2 digits: %s rounded", floatval) result.append(float(val[0:4])) except ValueError: - log.warning(f"Bad histogram percentile value {val}, must be float in ]0;1[, skipping") + log.warning("Bad histogram percentile value %s, must be float in ]0;1[, skipping", val) except Exception: log.exception("Error when parsing histogram percentiles, skipping") return None @@ -287,11 +287,11 @@ def get_config(options=None): # Core config # ap if not config.has_option('Main', 'api_key'): - log.warning(u"No API key was found. Aborting.") + log.warning("No API key was found. Aborting.") sys.exit(2) if not config.has_option('Main', 'dd_url'): - log.warning(u"No dd_url was found. Aborting.") + log.warning("No dd_url was found. Aborting.") sys.exit(2) # Endpoints @@ -531,13 +531,13 @@ def extract_agent_config(config): conf_backend = config.get('Main', 'sd_config_backend') if backend not in SD_BACKENDS: - log.error(f"The backend {backend} is not supported. Service discovery won't be enabled.") + log.error("The backend %s is not supported. Service discovery won't be enabled.", backend) agentConfig['service_discovery'] = False if conf_backend is None: log.warning('No configuration backend provided for service discovery. Only auto config templates will be used.') elif conf_backend not in SD_CONFIG_BACKENDS: - log.error(f"The config backend {conf_backend} is not supported. Only auto config templates will be used.") + log.error("The config backend %s is not supported. Only auto config templates will be used.", conf_backend) conf_backend = None agentConfig['sd_config_backend'] = conf_backend diff --git a/pkg/gohai/cpu/from-lscpu-arm.py b/pkg/gohai/cpu/from-lscpu-arm.py index 70a9b4b322f35..a4fafe1a0fedf 100644 --- a/pkg/gohai/cpu/from-lscpu-arm.py +++ b/pkg/gohai/cpu/from-lscpu-arm.py @@ -10,7 +10,7 @@ def main(): - lines = iter(open(sys.argv[1], "r")) + lines = iter(open(sys.argv[1], "r")) # noqa: UP015 # part_lists are the lists of part numbers, keyed by name part_lists = {} diff --git a/pyproject.toml b/pyproject.toml index d7d1f231e2072..f22d0eafbc1c6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,28 +3,74 @@ # verbose regular expressions by Black. Use [ ] to denote a significant space # character. -[tool.black] -include = '\.pyi?$' +[tool.ruff] +# TODO: Some images still use python3.8 in the CI, such as datadog-agent-buildimages/docker_x64 or circleci +target-version = "py38" +exclude = [ + ".git", + ".github", + ".circleci", + "chocolatey", + "Dockerfiles", + "docs", + "google-marketplace", + "omnibus", + "pkg-config", + "releasenotes", + "vendor", + "venv", + "dev", +] line-length = 120 -skip-string-normalization = true -exclude = ''' -( - /( - | \.git - | docs - | releasenotes - | releasenotes-installscript - | vendor - | dev - )/ -) -''' -[tool.isort] -default_section = 'THIRDPARTY' -force_grid_wrap = 0 -include_trailing_comma = true -known_first_party = 'datadog_checks' -line_length = 120 -multi_line_output = 3 -use_parentheses = true +[tool.ruff.lint] +# Rules were ported over from the legacy flake8 settings for parity +# All the rules can be found here: https://beta.ruff.rs/docs/rules/ +select = [ + "B", + "C", + "E", + "F", + "G", + "U", + "W", + "B003", + "B006", + "B007", +] +ignore = [ + # From legacy flake8 settings + # Ignore: + # - black-incompatible options: E203 + # - bugbear overlap: E722 + # - style options: W2,W3,W50,E111,E114,E117,E2,E3,E5,E74 + # - Unnecessary dict call: C408 + # - complex-structure : C901 + # - raise-without-from-inside-except: B904 + "E203", + "W2", + "W3", + "W50", + "E111", + "E114", + "E117", + "E2", + "E3", + "E5", + "E74", + "E722", + "C408", + "C901", + "B904", +] +unfixable = [ + # Don't touch unused imports + "F401", +] + +[tool.ruff.lint.flake8-tidy-imports] +ban-relative-imports = "all" + +[tool.ruff.format] +# Enable preview style formatting. +quote-style = "preserve" diff --git a/rtloader/demo/main.py b/rtloader/demo/main.py index c384a663a1940..3e009eb34bbfb 100644 --- a/rtloader/demo/main.py +++ b/rtloader/demo/main.py @@ -2,7 +2,7 @@ # under the Apache License Version 2.0. # This product includes software developed at Datadog (https://www.datadoghq.com/). # Copyright 2019-present Datadog, Inc. -from __future__ import print_function +from __future__ import print_function # noqa fmt: off import aggregator import tagger diff --git a/rtloader/test/python/datadog_checks/base/checks/__init__.py b/rtloader/test/python/datadog_checks/base/checks/__init__.py index 626ba56fc1db1..83127b99a5bd2 100644 --- a/rtloader/test/python/datadog_checks/base/checks/__init__.py +++ b/rtloader/test/python/datadog_checks/base/checks/__init__.py @@ -1,5 +1,5 @@ # AgentCheck stubs for testing -class AgentCheck(object): +class AgentCheck(object): # noqa: UP004 def __init__(self, *args, **kwargs): # noqa: U100 pass diff --git a/rtloader/test/python/fake_check/__init__.py b/rtloader/test/python/fake_check/__init__.py index 0835196a06b86..7d59359958592 100644 --- a/rtloader/test/python/fake_check/__init__.py +++ b/rtloader/test/python/fake_check/__init__.py @@ -2,6 +2,7 @@ was_canceled = False + # Fake check for testing purposes class FakeCheck(AgentCheck): def cancel(self): diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 2a4b7ce26035b..0000000000000 --- a/setup.cfg +++ /dev/null @@ -1,11 +0,0 @@ -[flake8] -select = B,C,E,F,FS,G,U,W,B001,B003,B006,B007,B301,B305,B306,B902 -# Ignore: -# - black-incompatible options: E203 -# - bugbear overlap: E722 -# - style options: W2,W3,W50,E111,E114,E117,E12,E2,E3,E5,E74 -# - unused argument with underscore: U101 -# - Unnecessary dict call: C408 -# - f-string missing prefix FS003 -ignore = E203,W2,W3,W50,E111,E114,E117,E12,E2,E3,E5,E74,E722,U101,C408,FS003 -exclude = .git,.github,.circleci,chocolatey,docs,google-marketplace,omnibus,pkg-config,releasenotes,vendor,venv,venv3 diff --git a/tasks/agent.py b/tasks/agent.py index 79015cb0b7baf..3c0daea44c79d 100644 --- a/tasks/agent.py +++ b/tasks/agent.py @@ -634,7 +634,7 @@ def check_supports_python_version(_, check_dir, python): project_file = os.path.join(check_dir, 'pyproject.toml') setup_file = os.path.join(check_dir, 'setup.py') if os.path.isfile(project_file): - with open(project_file, 'r') as f: + with open(project_file) as f: data = toml.loads(f.read()) project_metadata = data['project'] @@ -651,7 +651,7 @@ def check_supports_python_version(_, check_dir, python): else: print('False', end='') elif os.path.isfile(setup_file): - with open(setup_file, 'r') as f: + with open(setup_file) as f: tree = ast.parse(f.read(), filename=setup_file) prefix = f'Programming Language :: Python :: {python}' diff --git a/tasks/bench.py b/tasks/bench.py index e344882e7ce78..d8147be7f6a77 100644 --- a/tasks/bench.py +++ b/tasks/bench.py @@ -2,7 +2,6 @@ Benchmarking tasks """ - import os from invoke import task diff --git a/tasks/build_tags.py b/tasks/build_tags.py index 694dfc391d642..34f6d24e24126 100644 --- a/tasks/build_tags.py +++ b/tasks/build_tags.py @@ -1,12 +1,13 @@ """ Utilities to manage build tags """ + # TODO: check if we really need the typing import. # Recent versions of Python should be able to use dict and list directly in type hints, # so we only need to check that we don't run this code with old Python versions. +from __future__ import annotations import sys -from typing import List from invoke import task @@ -230,8 +231,8 @@ def compute_build_tags_for_flavor( build: str, arch: str, - build_include: List[str], - build_exclude: List[str], + build_include: list[str], + build_exclude: list[str], flavor: AgentFlavor = AgentFlavor.base, include_sds: bool = False, ): diff --git a/tasks/components.py b/tasks/components.py index d4bc718fc51db..63e8adcd2fb0d 100644 --- a/tasks/components.py +++ b/tasks/components.py @@ -1,6 +1,7 @@ """ Invoke entrypoint, import here all the tasks we want to make available """ + import os import pathlib from collections import namedtuple @@ -264,7 +265,7 @@ def lint_components(_, fix=False): with open(filename, "w") as f: f.write(components_md) else: - with open(filename, "r") as f: + with open(filename) as f: current = f.read() if current != components_md: print(f"** {filename} differs") @@ -273,7 +274,7 @@ def lint_components(_, fix=False): # Check .github/CODEOWNERS filename = ".github/CODEOWNERS" - with open(filename, "r") as f: + with open(filename) as f: current = f.read() codeowners = '\n'.join(make_codeowners(current.splitlines(), bundles, components_without_bundle)) if fix: @@ -419,7 +420,7 @@ def read_file_content(template_path): """ Read all lines in files and return them as a single string. """ - with open(template_path, "r") as file: + with open(template_path) as file: return file.read() diff --git a/tasks/devcontainer.py b/tasks/devcontainer.py index dee75d4e56a80..dc9f4bb0332ab 100644 --- a/tasks/devcontainer.py +++ b/tasks/devcontainer.py @@ -3,6 +3,7 @@ Helpers for getting vscode set up nicely """ + import json import os from collections import OrderedDict @@ -53,7 +54,7 @@ def setup( devcontainer = {} fullpath = os.path.join(DEVCONTAINER_DIR, DEVCONTAINER_FILE) if os.path.exists(fullpath): - with open(fullpath, "r") as sf: + with open(fullpath) as sf: devcontainer = json.load(sf, object_pairs_hook=OrderedDict) local_build_tags = ",".join(use_tags) @@ -104,9 +105,9 @@ def setup( "extensions": ["golang.Go"], } } - devcontainer[ - "postStartCommand" - ] = "git config --global --add safe.directory /workspaces/datadog-agent && invoke install-tools && invoke deps" + devcontainer["postStartCommand"] = ( + "git config --global --add safe.directory /workspaces/datadog-agent && invoke install-tools && invoke deps" + ) with open(fullpath, "w") as sf: json.dump(devcontainer, sf, indent=4, sort_keys=False, separators=(',', ': ')) diff --git a/tasks/docker_tasks.py b/tasks/docker_tasks.py index 5017695169040..62b9827b7d99f 100644 --- a/tasks/docker_tasks.py +++ b/tasks/docker_tasks.py @@ -2,7 +2,6 @@ Docker related tasks """ - import os import shutil import sys @@ -121,7 +120,7 @@ def pull_base_images(ctx, dockerfile, signed_pull=True): images = set() stages = set() - with open(dockerfile, "r") as f: + with open(dockerfile) as f: for line in f: words = line.split() # Get source images diff --git a/tasks/ebpf.py b/tasks/ebpf.py index 4ad37a002ef07..8262df9b3bbf6 100644 --- a/tasks/ebpf.py +++ b/tasks/ebpf.py @@ -141,7 +141,7 @@ def print_verification_stats( write_verifier_stats(verifier_stats, f, jsonfmt) return - with open(base, 'r') as f: + with open(base) as f: base_verifier_stats = json.load(f) stats_diff = dict() diff --git a/tasks/emacs.py b/tasks/emacs.py index 64af12aecda48..8ffe64e9510bd 100644 --- a/tasks/emacs.py +++ b/tasks/emacs.py @@ -3,6 +3,7 @@ Helpers for getting Emacs set up nicely """ + from invoke import task from tasks.build_tags import build_tags, filter_incompatible_tags, get_build_tags, get_default_build_tags diff --git a/tasks/fuzz.py b/tasks/fuzz.py index 3f7b437890ffe..02ed805547792 100644 --- a/tasks/fuzz.py +++ b/tasks/fuzz.py @@ -1,6 +1,7 @@ """ Helper for running fuzz targets """ + import os from invoke import task @@ -26,8 +27,7 @@ def search_fuzz_tests(directory): for file in os.listdir(directory): path = os.path.join(directory, file) if os.path.isdir(path): - for tuple in search_fuzz_tests(path): - yield tuple + yield from search_fuzz_tests(path) else: if not file.endswith('_test.go'): continue @@ -35,4 +35,4 @@ def search_fuzz_tests(directory): for line in f.readlines(): if line.startswith('func Fuzz'): fuzzfunc = line[5 : line.find('(')] # 5 is len('func ') - yield (directory, fuzzfunc) + yield directory, fuzzfunc diff --git a/tasks/git-hooks/clang-format.py b/tasks/git-hooks/clang-format.py index b29d42683ef1e..9a6cd591a159a 100644 --- a/tasks/git-hooks/clang-format.py +++ b/tasks/git-hooks/clang-format.py @@ -9,14 +9,11 @@ From https://github.com/Sarcasm/run-clang-format """ -from __future__ import print_function, unicode_literals - import argparse import codecs import difflib import errno import fnmatch -import io import multiprocessing import os import signal @@ -44,7 +41,7 @@ class ExitStatus: def excludes_from_file(ignore_file): excludes = [] try: - with io.open(ignore_file, 'r', encoding='utf-8') as f: + with open(ignore_file, encoding='utf-8') as f: for line in f: if line.startswith('#'): # ignore comments @@ -54,7 +51,7 @@ def excludes_from_file(ignore_file): # allow empty lines continue excludes.append(pattern) - except EnvironmentError as e: + except OSError as e: if e.errno != errno.ENOENT: raise return excludes @@ -96,13 +93,13 @@ def make_diff(file, original, reformatted): class DiffError(Exception): def __init__(self, message, errs=None): - super(DiffError, self).__init__(message) + super().__init__(message) self.errs = errs or [] class UnexpectedError(Exception): def __init__(self, message, exc=None): - super(UnexpectedError, self).__init__(message) + super().__init__(message) self.formatted_traceback = traceback.format_exc() self.exc = exc @@ -119,9 +116,9 @@ def run_clang_format_diff_wrapper(args, file): def run_clang_format_diff(args, file): try: - with io.open(file, 'r', encoding='utf-8') as f: + with open(file, encoding='utf-8') as f: original = f.readlines() - except IOError as exc: + except OSError as exc: raise DiffError(str(exc)) if args.in_place: @@ -220,7 +217,7 @@ def print_diff(diff_lines, use_color): if use_color: diff_lines = colorize(diff_lines) if sys.version_info[0] < 3: - sys.stdout.writelines((l.encode('utf-8') for l in diff_lines)) + sys.stdout.writelines(l.encode('utf-8') for l in diff_lines) else: sys.stdout.writelines(diff_lines) @@ -276,7 +273,7 @@ def main(): # https://bugs.python.org/issue14229#msg156446 signal.signal(signal.SIGINT, signal.SIG_DFL) try: - signal.SIGPIPE + signal.SIGPIPE # noqa: B018 except AttributeError: # compatibility, SIGPIPE does not exist on Windows pass @@ -292,7 +289,7 @@ def main(): colored_stdout = sys.stdout.isatty() colored_stderr = sys.stderr.isatty() - version_invocation = [args.clang_format_executable, str("--version")] + version_invocation = [args.clang_format_executable, "--version"] try: subprocess.check_call(version_invocation, stdout=DEVNULL) except subprocess.CalledProcessError as e: diff --git a/tasks/github_tasks.py b/tasks/github_tasks.py index c1fb701768d2f..88c1a8ddafef8 100644 --- a/tasks/github_tasks.py +++ b/tasks/github_tasks.py @@ -223,7 +223,7 @@ def _get_teams(changed_files, owners_file='.github/CODEOWNERS') -> List[str]: def _get_team_labels(): import toml - with open('.ddqa/config.toml', 'r') as f: + with open('.ddqa/config.toml') as f: data = toml.loads(f.read()) labels = [] diff --git a/tasks/go.py b/tasks/go.py index a07fcdbab8558..fa4d7f914f349 100644 --- a/tasks/go.py +++ b/tasks/go.py @@ -76,7 +76,13 @@ def run_golangci_lint( @task def golangci_lint( - ctx, targets, rtloader_root=None, build_tags=None, build="test", arch="x64", concurrency=None # noqa: U100 + ctx, + targets, + rtloader_root=None, + build_tags=None, + build="test", + arch="x64", + concurrency=None, # noqa: U100 ): """ Run golangci-lint on targets using .golangci.yml configuration. @@ -149,7 +155,7 @@ def lint_licenses(ctx): licenses = [] file = 'LICENSE-3rdparty.csv' - with open(file, 'r', encoding='utf-8') as f: + with open(file, encoding='utf-8') as f: next(f) for line in f: licenses.append(line.rstrip()) diff --git a/tasks/go_test.py b/tasks/go_test.py index 03756014c5391..1c2ed6a3501f1 100644 --- a/tasks/go_test.py +++ b/tasks/go_test.py @@ -627,7 +627,6 @@ def get_modified_files(ctx): @task(iterable=["extra_tag"]) def send_unit_tests_stats(_, job_name, extra_tag=None): - if extra_tag is None: extra_tag = [] @@ -734,7 +733,7 @@ def send_unit_tests_stats(_, job_name, extra_tag=None): def parse_test_log(log_file): failed_tests = [] n_test_executed = 0 - with open(log_file, "r") as f: + with open(log_file) as f: for line in f: json_line = json.loads(line) if ( diff --git a/tasks/kernel_matrix_testing/ci.py b/tasks/kernel_matrix_testing/ci.py index 8e03b74f5a293..6a33ded4c5e19 100644 --- a/tasks/kernel_matrix_testing/ci.py +++ b/tasks/kernel_matrix_testing/ci.py @@ -6,7 +6,7 @@ import re import tarfile import xml.etree.ElementTree as ET -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple, Union, overload +from typing import TYPE_CHECKING, Any, overload from tasks.libs.ciproviders.gitlab import Gitlab, get_gitlab_token @@ -23,7 +23,7 @@ def get_gitlab() -> Gitlab: class KMTJob: """Abstract class representing a Kernel Matrix Testing job, with common properties and methods for all job types""" - def __init__(self, job_data: Dict[str, Any]): + def __init__(self, job_data: dict[str, Any]): self.gitlab = get_gitlab() self.job_data = job_data @@ -59,14 +59,14 @@ def failure_reason(self) -> str: return self.job_data["failure_reason"] @overload - def artifact_file(self, file: str, ignore_not_found: Literal[True]) -> Optional[str]: # noqa: U100 + def artifact_file(self, file: str, ignore_not_found: Literal[True]) -> str | None: # noqa: U100 ... @overload def artifact_file(self, file: str, ignore_not_found: Literal[False] = False) -> str: # noqa: U100 ... - def artifact_file(self, file: str, ignore_not_found: bool = False) -> Optional[str]: + def artifact_file(self, file: str, ignore_not_found: bool = False) -> str | None: """Download an artifact file from this job, returning its content as a string (decoded UTF-8) file: the path to the file inside the artifact @@ -76,14 +76,14 @@ def artifact_file(self, file: str, ignore_not_found: bool = False) -> Optional[s return data.decode('utf-8') if data is not None else None @overload - def artifact_file_binary(self, file: str, ignore_not_found: Literal[True]) -> Optional[bytes]: # noqa: U100 + def artifact_file_binary(self, file: str, ignore_not_found: Literal[True]) -> bytes | None: # noqa: U100 ... @overload def artifact_file_binary(self, file: str, ignore_not_found: Literal[False] = False) -> bytes: # noqa: U100 ... - def artifact_file_binary(self, file: str, ignore_not_found: bool = False) -> Optional[bytes]: + def artifact_file_binary(self, file: str, ignore_not_found: bool = False) -> bytes | None: """Download an artifact file from this job, returning its content as a byte array file: the path to the file inside the artifact @@ -107,9 +107,9 @@ class KMTSetupEnvJob(KMTJob): the job name and output artifacts """ - def __init__(self, job_data: Dict[str, Any]): + def __init__(self, job_data: dict[str, Any]): super().__init__(job_data) - self.associated_test_jobs: List[KMTTestRunJob] = [] + self.associated_test_jobs: list[KMTTestRunJob] = [] @property def stack_output(self) -> StackOutput: @@ -120,8 +120,8 @@ def vmconfig(self) -> VMConfig: return json.loads(self.artifact_file(f"vmconfig-{self.pipeline_id}-{self.arch}.json")) @property - def seen_ips(self) -> Set[str]: - ips: Set[str] = set() + def seen_ips(self) -> set[str]: + ips: set[str] = set() for iface in [0, 1, 2, 3]: virbr_status = self.artifact_file(f"libvirt/dnsmasq/virbr{iface}.status", ignore_not_found=True) @@ -135,7 +135,7 @@ def seen_ips(self) -> Set[str]: return ips - def get_vm(self, distro: str, vmset: str) -> Optional[Tuple[str, str]]: + def get_vm(self, distro: str, vmset: str) -> tuple[str, str] | None: """Return the VM ID and IP that matches a given distro and vmset in this environment job Returns None if they're not found @@ -146,7 +146,7 @@ def get_vm(self, distro: str, vmset: str) -> Optional[Tuple[str, str]]: return microvm['id'], microvm['ip'] return None - def get_vm_boot_log(self, distro: str, vmset: str) -> Optional[str]: + def get_vm_boot_log(self, distro: str, vmset: str) -> str | None: """Return the boot log for a given distro and vmset in this setup-env job""" vmdata = self.get_vm(distro, vmset) if vmdata is None: @@ -165,12 +165,12 @@ class KMTTestRunJob(KMTJob): the job name and output artifacts """ - def __init__(self, job_data: Dict[str, Any]): + def __init__(self, job_data: dict[str, Any]): super().__init__(job_data) - self.setup_job: Optional[KMTSetupEnvJob] = None + self.setup_job: KMTSetupEnvJob | None = None @property - def vars(self) -> List[str]: + def vars(self) -> list[str]: match = re.search(r"\[([^\]]+)\]", self.name) if match is None: raise RuntimeError(f"Invalid job name {self.name}") @@ -184,7 +184,7 @@ def distro(self) -> str: def vmset(self) -> str: return self.vars[1] - def get_junit_reports(self) -> List[ET.ElementTree]: + def get_junit_reports(self) -> list[ET.ElementTree]: """Return the XML data from all JUnit reports in this job. Does not fail if the file is not found.""" junit_archive_name = f"junit-{self.arch}-{self.distro}-{self.vmset}.tar.gz" junit_archive = self.artifact_file_binary(f"test/kitchen/{junit_archive_name}", ignore_not_found=True) @@ -194,7 +194,7 @@ def get_junit_reports(self) -> List[ET.ElementTree]: bytearr = io.BytesIO(junit_archive) tar = tarfile.open(fileobj=bytearr) - reports: List[ET.ElementTree] = [] + reports: list[ET.ElementTree] = [] for member in tar.getmembers(): filename = os.path.basename(member.name) if filename.endswith(".xml"): @@ -204,11 +204,11 @@ def get_junit_reports(self) -> List[ET.ElementTree]: return reports - def get_test_results(self) -> Dict[str, Optional[bool]]: + def get_test_results(self) -> dict[str, bool | None]: """Return a dictionary with the results of all tests in this job, indexed by "package_name:testname". The values are True if test passed, False if failed, None if skipped. """ - results: Dict[str, Optional[bool]] = {} + results: dict[str, bool | None] = {} for report in self.get_junit_reports(): for testsuite in report.findall(".//testsuite"): pkgname = testsuite.get("name") @@ -223,13 +223,13 @@ def get_test_results(self) -> Dict[str, Optional[bool]]: return results -def get_all_jobs_for_pipeline(pipeline_id: Union[int, str]) -> Tuple[List[KMTSetupEnvJob], List[KMTTestRunJob]]: +def get_all_jobs_for_pipeline(pipeline_id: int | str) -> tuple[list[KMTSetupEnvJob], list[KMTTestRunJob]]: """Gets all KMT jobs for a given pipeline, separated between setup jobs and test run jobs. Also links the corresponding setup jobs for each test run job """ - setup_jobs: List[KMTSetupEnvJob] = [] - test_jobs: List[KMTTestRunJob] = [] + setup_jobs: list[KMTSetupEnvJob] = [] + test_jobs: list[KMTTestRunJob] = [] gitlab = get_gitlab() for job in gitlab.all_jobs(pipeline_id): diff --git a/tasks/kernel_matrix_testing/compiler.py b/tasks/kernel_matrix_testing/compiler.py index 91ff9d1bfdb68..ca4daabc9939d 100644 --- a/tasks/kernel_matrix_testing/compiler.py +++ b/tasks/kernel_matrix_testing/compiler.py @@ -2,7 +2,7 @@ import sys from pathlib import Path -from typing import TYPE_CHECKING, Optional, cast +from typing import TYPE_CHECKING, cast from invoke.context import Context from invoke.runners import Result @@ -56,7 +56,7 @@ def ensure_running(self): info(f"[*] Compiler for {self.arch} not running, starting it...") self.start() - def exec(self, cmd: str, user="compiler", verbose=True, run_dir: Optional[PathOrStr] = None): + def exec(self, cmd: str, user="compiler", verbose=True, run_dir: PathOrStr | None = None): if run_dir: cmd = f"cd {run_dir} && {cmd}" @@ -85,7 +85,7 @@ def build(self) -> Result: ) # Add build arguments (such as go version) from go.env - with open(buildimages_path / "go.env", "r") as f: + with open(buildimages_path / "go.env") as f: for line in f: docker_build_args += ["--build-arg", line.strip()] diff --git a/tasks/kernel_matrix_testing/config.py b/tasks/kernel_matrix_testing/config.py index a8030362765c5..73e11e58ce0bf 100644 --- a/tasks/kernel_matrix_testing/config.py +++ b/tasks/kernel_matrix_testing/config.py @@ -1,7 +1,7 @@ from __future__ import annotations import json -from typing import TYPE_CHECKING, Optional, cast +from typing import TYPE_CHECKING, cast from tasks.kernel_matrix_testing.kmt_os import get_kmt_os from tasks.kernel_matrix_testing.tool import Exit @@ -13,13 +13,13 @@ class ConfigManager: def __init__(self): self._cfg_path = get_kmt_os().kmt_dir / "config.json" - self._config: Optional[KMTConfig] = None + self._config: KMTConfig | None = None def load(self): if not self._cfg_path.is_file(): self._config = cast('KMTConfig', dict()) else: - with open(self._cfg_path, "r") as f: + with open(self._cfg_path) as f: self._config = json.load(f) @property diff --git a/tasks/kernel_matrix_testing/download.py b/tasks/kernel_matrix_testing/download.py index bc36a824da2b4..ff21e0c0c7ee8 100644 --- a/tasks/kernel_matrix_testing/download.py +++ b/tasks/kernel_matrix_testing/download.py @@ -3,7 +3,7 @@ import os import platform import tempfile -from typing import TYPE_CHECKING, List +from typing import TYPE_CHECKING from invoke.context import Context @@ -48,8 +48,8 @@ def download_rootfs(ctx: Context, rootfs_dir: PathOrStr, vmconfig_template_name: url_base = platforms["url_base"] arch = arch_mapping[platform.machine()] - to_download: List[str] = list() - file_ls: List[str] = list() + to_download: list[str] = list() + file_ls: list[str] = list() branch_mapping: dict[str, str] = dict() for tag in platforms[arch]: diff --git a/tasks/kernel_matrix_testing/infra.py b/tasks/kernel_matrix_testing/infra.py index 6b5d70c58ba13..da5c6351b6bc2 100644 --- a/tasks/kernel_matrix_testing/infra.py +++ b/tasks/kernel_matrix_testing/infra.py @@ -4,7 +4,7 @@ import json import os from pathlib import Path -from typing import TYPE_CHECKING, Dict, List, Optional +from typing import TYPE_CHECKING from invoke.context import Context @@ -26,7 +26,7 @@ } -def ssh_options_command(extra_opts: Optional[Dict[str, str]] = None): +def ssh_options_command(extra_opts: dict[str, str] | None = None): opts = SSH_OPTIONS.copy() if extra_opts is not None: opts.update(extra_opts) @@ -36,7 +36,7 @@ def ssh_options_command(extra_opts: Optional[Dict[str, str]] = None): class LocalCommandRunner: @staticmethod - def run_cmd(ctx: Context, _: 'HostInstance', cmd: str, allow_fail: bool, verbose: bool): + def run_cmd(ctx: Context, _: HostInstance, cmd: str, allow_fail: bool, verbose: bool): res = ctx.run(cmd.format(proxy_cmd=""), hide=(not verbose), warn=allow_fail) if res is not None and res.ok: return True @@ -49,9 +49,7 @@ def run_cmd(ctx: Context, _: 'HostInstance', cmd: str, allow_fail: bool, verbose raise Exit("command failed") @staticmethod - def move_to_shared_directory( - ctx: Context, _: 'HostInstance', source: PathOrStr, subdir: Optional[PathOrStr] = None - ): + def move_to_shared_directory(ctx: Context, _: HostInstance, source: PathOrStr, subdir: PathOrStr | None = None): recursive = "" if os.path.isdir(source): recursive = "-R" @@ -65,7 +63,7 @@ def move_to_shared_directory( class RemoteCommandRunner: @staticmethod - def run_cmd(ctx: Context, instance: 'HostInstance', cmd: str, allow_fail: bool, verbose: bool): + def run_cmd(ctx: Context, instance: HostInstance, cmd: str, allow_fail: bool, verbose: bool): ssh_key_arg = f"-i {instance.ssh_key_path}" if instance.ssh_key_path is not None else "" res = ctx.run( cmd.format( @@ -86,7 +84,7 @@ def run_cmd(ctx: Context, instance: 'HostInstance', cmd: str, allow_fail: bool, @staticmethod def move_to_shared_directory( - ctx: Context, instance: 'HostInstance', source: PathOrStr, subdir: Optional[PathOrStr] = None + ctx: Context, instance: HostInstance, source: PathOrStr, subdir: PathOrStr | None = None ): full_target = get_kmt_os().shared_dir if subdir is not None: @@ -119,9 +117,9 @@ def __init__( ip: str, domain_id: str, tag: str, - vmset_tags: List[str], - ssh_key_path: Optional[str], - instance: 'HostInstance', + vmset_tags: list[str], + ssh_key_path: str | None, + instance: HostInstance, ): self.ip = ip self.name = domain_id @@ -151,35 +149,35 @@ def check_reachable(self, ctx: Context) -> bool: class HostInstance: - def __init__(self, ip: str, arch: ArchOrLocal, ssh_key_path: Optional[str]): + def __init__(self, ip: str, arch: ArchOrLocal, ssh_key_path: str | None): self.ip: str = ip self.arch: ArchOrLocal = arch - self.ssh_key_path: Optional[str] = ssh_key_path - self.microvms: List[LibvirtDomain] = [] + self.ssh_key_path: str | None = ssh_key_path + self.microvms: list[LibvirtDomain] = [] self.runner = get_instance_runner(arch) def add_microvm(self, domain: LibvirtDomain): self.microvms.append(domain) - def copy_to_all_vms(self, ctx: Context, path: PathOrStr, subdir: Optional[PathOrStr] = None): + def copy_to_all_vms(self, ctx: Context, path: PathOrStr, subdir: PathOrStr | None = None): self.runner.move_to_shared_directory(ctx, self, path, subdir) def __repr__(self): return f" {self.ip} {self.arch}" -def build_infrastructure(stack: str, ssh_key_obj: Optional[SSHKey] = None): +def build_infrastructure(stack: str, ssh_key_obj: SSHKey | None = None): stack_output = os.path.join(get_kmt_os().stacks_dir, stack, "stack.output") if not os.path.exists(stack_output): raise Exit(f"no stack.output file present at {stack_output}") - with open(stack_output, 'r') as f: + with open(stack_output) as f: try: infra_map: StackOutput = json.load(f) except json.decoder.JSONDecodeError: raise Exit(f"{stack_output} file is not a valid json file") - infra: Dict[ArchOrLocal, HostInstance] = dict() + infra: dict[ArchOrLocal, HostInstance] = dict() for arch in infra_map: key = ssh_key_obj['path'] if ssh_key_obj is not None else None instance = HostInstance(infra_map[arch]["ip"], arch, key) @@ -207,14 +205,14 @@ def ask_for_ssh() -> bool: ) -def get_ssh_key_name(pubkey: Path) -> Optional[str]: +def get_ssh_key_name(pubkey: Path) -> str | None: parts = pubkey.read_text().split() if len(parts) != 3: return None return parts[2] -def get_ssh_agent_key_names(ctx: Context) -> List[str]: +def get_ssh_agent_key_names(ctx: Context) -> list[str]: """Return the key names found in the SSH agent""" agent_output = ctx.run("ssh-add -l") if agent_output is None or not agent_output.ok: @@ -223,14 +221,14 @@ def get_ssh_agent_key_names(ctx: Context) -> List[str]: return [parts[2] for parts in output_parts if len(parts) >= 3] -def try_get_ssh_key(ctx: Context, key_hint: Optional[str]) -> Optional[SSHKey]: +def try_get_ssh_key(ctx: Context, key_hint: str | None) -> SSHKey | None: """Return a SSHKey object, either using the hint provided or using the configuration. The hint can either be a file path, a key name or a name of a file in ~/.ssh """ if key_hint is not None: - checked_paths: List[str] = [] + checked_paths: list[str] = [] possible_paths = map(Path, [key_hint, f"~/.ssh/{key_hint}", f"~/.ssh/{key_hint}.pem"]) for path in possible_paths: checked_paths.append(os.fspath(path)) diff --git a/tasks/kernel_matrix_testing/libvirt.py b/tasks/kernel_matrix_testing/libvirt.py index bb7fbe9e2c2a1..c987599b30ac6 100644 --- a/tasks/kernel_matrix_testing/libvirt.py +++ b/tasks/kernel_matrix_testing/libvirt.py @@ -1,7 +1,7 @@ from __future__ import annotations import sys -from typing import TYPE_CHECKING, Callable, Iterable, List +from typing import TYPE_CHECKING, Callable, Iterable from tasks.kernel_matrix_testing.tool import info @@ -20,7 +20,7 @@ def resource_in_stack(stack: str, resource: str) -> bool: return f"-{stack}" in resource -def get_resources_in_stack(stack: str, list_fn: Callable[[], Iterable[TNamed]]) -> List[TNamed]: +def get_resources_in_stack(stack: str, list_fn: Callable[[], Iterable[TNamed]]) -> list[TNamed]: resources = list_fn() stack_resources = list() for resource in resources: @@ -30,7 +30,7 @@ def get_resources_in_stack(stack: str, list_fn: Callable[[], Iterable[TNamed]]) return stack_resources -def delete_domains(conn: 'libvirt.virConnect', stack: str): +def delete_domains(conn: libvirt.virConnect, stack: str): domains = get_resources_in_stack(stack, conn.listAllDomains) info(f"[*] {len(domains)} VMs running in stack {stack}") @@ -48,11 +48,11 @@ def delete_domains(conn: 'libvirt.virConnect', stack: str): info(f"[+] VM {name} deleted") -def getAllStackVolumesFn(conn: 'libvirt.virConnect', stack: str): - def getAllStackVolumes() -> List['libvirt.virStorageVol']: +def getAllStackVolumesFn(conn: libvirt.virConnect, stack: str): + def getAllStackVolumes() -> list[libvirt.virStorageVol]: pools = get_resources_in_stack(stack, conn.listAllStoragePools) - volumes: List['libvirt.virStorageVol'] = list() + volumes: list[libvirt.virStorageVol] = list() for pool in pools: if not pool.isActive(): continue @@ -63,7 +63,7 @@ def getAllStackVolumes() -> List['libvirt.virStorageVol']: return getAllStackVolumes -def delete_volumes(conn: 'libvirt.virConnect', stack: str): +def delete_volumes(conn: libvirt.virConnect, stack: str): volumes = get_resources_in_stack(stack, getAllStackVolumesFn(conn, stack)) info(f"[*] {len(volumes)} storage volumes running in stack {stack}") @@ -74,7 +74,7 @@ def delete_volumes(conn: 'libvirt.virConnect', stack: str): info(f"[+] Storage volume {name} deleted") -def delete_pools(conn: 'libvirt.virConnect', stack: str): +def delete_pools(conn: libvirt.virConnect, stack: str): pools = get_resources_in_stack(stack, conn.listAllStoragePools) info(f"[*] {len(pools)} storage pools running in stack {stack}") @@ -86,7 +86,7 @@ def delete_pools(conn: 'libvirt.virConnect', stack: str): info(f"[+] Storage pool {name} deleted") -def delete_networks(conn: 'libvirt.virConnect', stack: str): +def delete_networks(conn: libvirt.virConnect, stack: str): networks = get_resources_in_stack(stack, conn.listAllNetworks) info(f"[*] {len(networks)} networks running in stack {stack}") @@ -98,7 +98,7 @@ def delete_networks(conn: 'libvirt.virConnect', stack: str): info(f"[+] Network {name} deleted") -def pause_domains(conn: 'libvirt.virConnect', stack: str): +def pause_domains(conn: libvirt.virConnect, stack: str): domains = get_resources_in_stack(stack, conn.listAllDomains) info(f"[*] {len(domains)} VMs running in stack {stack}") @@ -109,7 +109,7 @@ def pause_domains(conn: 'libvirt.virConnect', stack: str): info(f"[+] VM {name} is paused") -def resume_network(conn: 'libvirt.virConnect', stack: str): +def resume_network(conn: libvirt.virConnect, stack: str): networks = get_resources_in_stack(stack, conn.listAllNetworks) info(f"[*] {len(networks)} networks running in stack {stack}") @@ -120,7 +120,7 @@ def resume_network(conn: 'libvirt.virConnect', stack: str): info(f"[+] Network {name} resumed") -def resume_domains(conn: 'libvirt.virConnect', stack: str): +def resume_domains(conn: libvirt.virConnect, stack: str): domains = get_resources_in_stack(stack, conn.listAllDomains) info(f"[*] {len(domains)} VMs running in stack {stack}") diff --git a/tasks/kernel_matrix_testing/platforms.py b/tasks/kernel_matrix_testing/platforms.py index 052b74a3480be..a60942eb0792a 100644 --- a/tasks/kernel_matrix_testing/platforms.py +++ b/tasks/kernel_matrix_testing/platforms.py @@ -2,7 +2,7 @@ import json from pathlib import Path -from typing import TYPE_CHECKING, Dict, List, cast +from typing import TYPE_CHECKING, cast import yaml @@ -22,11 +22,11 @@ def get_platforms(): def filter_by_ci_component(platforms: Platforms, component: Component) -> Platforms: - job_arch_mapping: Dict[Arch, str] = { + job_arch_mapping: dict[Arch, str] = { "x86_64": "x64", "arm64": "arm64", } - job_component_mapping: Dict[Component, str] = { + job_component_mapping: dict[Component, str] = { "system-probe": "sysprobe", "security-agent": "secagent", } @@ -38,7 +38,7 @@ def filter_by_ci_component(platforms: Platforms, component: Component) -> Platfo with open(target_file) as f: ci_config = yaml.load(f, Loader=GitlabYamlLoader()) - arch_ls: List[Arch] = ["x86_64", "arm64"] + arch_ls: list[Arch] = ["x86_64", "arm64"] for arch in arch_ls: job_name = f"kmt_run_{job_component_mapping[component]}_tests_{job_arch_mapping[arch]}" if job_name not in ci_config: diff --git a/tasks/kernel_matrix_testing/stacks.py b/tasks/kernel_matrix_testing/stacks.py index bc1ac954a262c..6c305e71c6a98 100644 --- a/tasks/kernel_matrix_testing/stacks.py +++ b/tasks/kernel_matrix_testing/stacks.py @@ -3,7 +3,7 @@ import os import platform from pathlib import Path -from typing import TYPE_CHECKING, List, Optional, cast +from typing import TYPE_CHECKING, cast from invoke.context import Context from invoke.runners import Result @@ -52,7 +52,7 @@ def _get_active_branch_name() -> str: raise Exit("Could not find active branch name") -def check_and_get_stack(stack: Optional[str]) -> str: +def check_and_get_stack(stack: str | None) -> str: if stack is None: stack = _get_active_branch_name() @@ -70,7 +70,7 @@ def vm_config_exists(stack: str): return os.path.exists(f"{get_kmt_os().stacks_dir}/{stack}/{VMCONFIG}") -def create_stack(ctx: Context, stack: Optional[str] = None): +def create_stack(ctx: Context, stack: str | None = None): if not os.path.exists(f"{get_kmt_os().stacks_dir}"): raise Exit("Kernel matrix testing environment not correctly setup. Run 'inv kmt.init'.") @@ -184,7 +184,7 @@ def check_env(ctx: Context): def launch_stack( - ctx: Context, stack: Optional[str], ssh_key: Optional[str], x86_ami: str, arm_ami: str, provision_microvms: bool + ctx: Context, stack: str | None, ssh_key: str | None, x86_ami: str, arm_ami: str, provision_microvms: bool ): stack = check_and_get_stack(stack) if not stack_exists(stack): @@ -245,7 +245,7 @@ def launch_stack( info(f"[+] Stack {stack} successfully setup") -def destroy_stack_pulumi(ctx: Context, stack: str, ssh_key: Optional[str]): +def destroy_stack_pulumi(ctx: Context, stack: str, ssh_key: str | None): ssh_key_obj = try_get_ssh_key(ctx, ssh_key) if ssh_key_obj is not None: ensure_key_in_agent(ctx, ssh_key_obj) @@ -269,7 +269,7 @@ def destroy_stack_pulumi(ctx: Context, stack: str, ssh_key: Optional[str]): ) -def ec2_instance_ids(ctx: Context, ip_list: List[str]) -> List[str]: +def ec2_instance_ids(ctx: Context, ip_list: list[str]) -> list[str]: ip_addresses = ','.join(ip_list) list_instances_cmd = f"aws-vault exec sso-sandbox-account-admin -- aws ec2 describe-instances --filter \"Name=private-ip-address,Values={ip_addresses}\" \"Name=tag:team,Values=ebpf-platform\" --query 'Reservations[].Instances[].InstanceId' --output text" @@ -287,7 +287,7 @@ def destroy_ec2_instances(ctx: Context, stack: str): return infra = build_infrastructure(stack) - ips: List[str] = list() + ips: list[str] = list() for arch, instance in infra.items(): if arch != "local": ips.append(instance.ip) @@ -371,7 +371,7 @@ def destroy_stack_force(ctx: Context, stack: str): ) -def destroy_stack(ctx: Context, stack: Optional[str], pulumi: bool, ssh_key: Optional[str]): +def destroy_stack(ctx: Context, stack: str | None, pulumi: bool, ssh_key: str | None): stack = check_and_get_stack(stack) if not stack_exists(stack): raise Exit(f"Stack {stack} does not exist. Please create with 'inv kmt.stack-create --stack='") @@ -385,7 +385,7 @@ def destroy_stack(ctx: Context, stack: Optional[str], pulumi: bool, ssh_key: Opt ctx.run(f"rm -r {get_kmt_os().stacks_dir}/{stack}") -def pause_stack(stack: Optional[str] = None): +def pause_stack(stack: str | None = None): stack = check_and_get_stack(stack) if not stack_exists(stack): raise Exit(f"Stack {stack} does not exist. Please create with 'inv kmt.stack-create --stack='") diff --git a/tasks/kernel_matrix_testing/tool.py b/tasks/kernel_matrix_testing/tool.py index 8019c3cec302f..e3f44ed156191 100644 --- a/tasks/kernel_matrix_testing/tool.py +++ b/tasks/kernel_matrix_testing/tool.py @@ -2,7 +2,7 @@ import os import platform -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING import invoke.exceptions as ie from invoke.context import Context @@ -16,7 +16,7 @@ from termcolor import colored except ImportError: - def colored(text: str, color: Optional[str]) -> str: # noqa: U100 + def colored(text: str, color: str | None) -> str: # noqa: U100 return text @@ -60,7 +60,7 @@ def full_arch(arch: str): return arch_mapping[arch] -def get_binary_target_arch(ctx: Context, file: str) -> Optional[Arch]: +def get_binary_target_arch(ctx: Context, file: str) -> Arch | None: res = ctx.run(f"file {file}") if res is None or not res.ok: return None diff --git a/tasks/kernel_matrix_testing/vars.py b/tasks/kernel_matrix_testing/vars.py index 10920a98aef38..406267d3c364c 100644 --- a/tasks/kernel_matrix_testing/vars.py +++ b/tasks/kernel_matrix_testing/vars.py @@ -1,12 +1,12 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Dict +from typing import TYPE_CHECKING if TYPE_CHECKING: from tasks.kernel_matrix_testing.types import Arch -arch_mapping: Dict[str, Arch] = { +arch_mapping: dict[str, Arch] = { "amd64": "x86_64", "x86": "x86_64", "x86_64": "x86_64", diff --git a/tasks/kernel_matrix_testing/vmconfig.py b/tasks/kernel_matrix_testing/vmconfig.py index c4483fc2444ac..728ba4e70c98d 100644 --- a/tasks/kernel_matrix_testing/vmconfig.py +++ b/tasks/kernel_matrix_testing/vmconfig.py @@ -5,7 +5,7 @@ import json import os import platform -from typing import TYPE_CHECKING, Any, List, Optional, Set, Union, cast +from typing import TYPE_CHECKING, Any, List, cast from urllib.parse import urlparse from invoke.context import Context @@ -154,8 +154,8 @@ def lte_414(version: str) -> bool: return (int(major) <= 4) and (int(minor) <= 14) -def get_image_list(distro: bool, custom: bool) -> List[List[str]]: - custom_kernels: List[List[str]] = list() +def get_image_list(distro: bool, custom: bool) -> list[list[str]]: + custom_kernels: list[list[str]] = list() for k in kernels: if lte_414(k): custom_kernels.append([f"custom kernel v{k}", TICK, CROSS]) @@ -172,7 +172,7 @@ def get_image_list(distro: bool, custom: bool) -> List[List[str]]: return [] -def check_memory_and_vcpus(memory: List[Any], vcpus: List[Any]): +def check_memory_and_vcpus(memory: list[Any], vcpus: list[Any]): for mem in memory: if not mem.isnumeric() or int(mem) == 0: raise Exit(f"Invalid values for memory provided {memory}") @@ -188,12 +188,12 @@ def empty_config(file_path: str): f.write(j) -def list_possible() -> List[str]: +def list_possible() -> list[str]: distros = list(distributions.keys()) archs = list(arch_mapping.keys()) archs.append(local_arch) - result: List[str] = list() + result: list[str] = list() possible = list(itertools.product(["custom"], kernels, archs)) + list(itertools.product(["distro"], distros, archs)) for p in possible: result.append(f"{p[0]}-{p[1]}-{p[2]}") @@ -208,7 +208,7 @@ def list_possible() -> List[str]: # arch: [x86_64, amd64] # Each normalized_vm_def output corresponds to each VM # requested by the user -def normalize_vm_def(possible: List[str], vm: str) -> VMDef: +def normalize_vm_def(possible: list[str], vm: str) -> VMDef: if process is None or fuzz is None: raise Exit("thefuzz module is not installed, please install it to continue") @@ -266,7 +266,7 @@ def xz_suffix_removed(path: str): # to the micro-vms scenario in test-infra-definitions def get_kernel_config( platforms: Platforms, recipe: Recipe, version: str, arch: ArchOrLocal -) -> Union[DistroKernel, CustomKernel]: +) -> DistroKernel | CustomKernel: if recipe == "custom": return get_custom_kernel_config(version, arch) @@ -280,7 +280,7 @@ def get_kernel_config( return {"tag": version, "image_source": os.path.join(url_base, kernel_path), "dir": kernel_name} -def vmset_exists(vm_config: VMConfig, tags: Set[str]) -> bool: +def vmset_exists(vm_config: VMConfig, tags: set[str]) -> bool: vmsets = vm_config["vmsets"] for vmset in vmsets: @@ -303,7 +303,7 @@ def vmset_name(arch: ArchOrLocal, recipe: Recipe) -> str: return f"{recipe}_{arch}" -def add_custom_vmset(vmset: 'VMSet', vm_config: VMConfig): +def add_custom_vmset(vmset: VMSet, vm_config: VMConfig): arch = vmset.arch if arch == local_arch: arch = arch_mapping[platform.machine()] @@ -338,7 +338,7 @@ def add_custom_vmset(vmset: 'VMSet', vm_config: VMConfig): vm_config["vmsets"].append(new_set) -def add_vmset(vmset: 'VMSet', vm_config: VMConfig): +def add_vmset(vmset: VMSet, vm_config: VMConfig): if vmset_exists(vm_config, vmset.tags): return @@ -352,7 +352,7 @@ def add_vmset(vmset: 'VMSet', vm_config: VMConfig): vm_config["vmsets"].append(new_set) -def add_kernel(vm_config: VMConfig, kernel: Kernel, tags: Set[str]): +def add_kernel(vm_config: VMConfig, kernel: Kernel, tags: set[str]): for vmset in vm_config["vmsets"]: if set(vmset.get("tags", [])) != tags: continue @@ -366,11 +366,11 @@ def add_kernel(vm_config: VMConfig, kernel: Kernel, tags: Set[str]): raise Exit(f"Unable to find vmset with tags {tags}") -def add_vcpu(vmset: VMSetDict, vcpu: List[int]): +def add_vcpu(vmset: VMSetDict, vcpu: list[int]): vmset["vcpu"] = vcpu -def add_memory(vmset: VMSetDict, memory: List[int]): +def add_memory(vmset: VMSetDict, memory: list[int]): vmset["memory"] = memory @@ -449,11 +449,11 @@ def __repr__(self): class VMSet: - def __init__(self, arch: ArchOrLocal, recipe: Recipe, tags: Set[str]): + def __init__(self, arch: ArchOrLocal, recipe: Recipe, tags: set[str]): self.arch: ArchOrLocal = arch self.recipe: Recipe = recipe - self.tags: Set[str] = tags - self.vms: List[VM] = list() + self.tags: set[str] = tags + self.vms: list[VM] = list() def __eq__(self, other: Any): if not isinstance(other, VMSet): @@ -492,8 +492,8 @@ def custom_version_prefix(version: str) -> str: return "lte_414" if lte_414(version) else "gt_414" -def build_vmsets(normalized_vm_defs: List[VMDef], sets: List[str]) -> Set[VMSet]: - vmsets: Set[VMSet] = set() +def build_vmsets(normalized_vm_defs: list[VMDef], sets: list[str]) -> set[VMSet]: + vmsets: set[VMSet] = set() for recipe, version, arch in normalized_vm_defs: if recipe == "custom": sets.append(custom_version_prefix(version)) @@ -515,10 +515,10 @@ def build_vmsets(normalized_vm_defs: List[VMDef], sets: List[str]) -> Set[VMSet] def generate_vmconfig( vm_config: VMConfig, - normalized_vm_defs: List[VMDef], - vcpu: List[int], - memory: List[int], - sets: List[str], + normalized_vm_defs: list[VMDef], + vcpu: list[int], + memory: list[int], + sets: list[str], ci: bool, template: str, ) -> VMConfig: @@ -557,15 +557,15 @@ def generate_vmconfig( return vm_config -def ls_to_int(ls: List[Any]) -> List[int]: - int_ls: List[int] = list() +def ls_to_int(ls: list[Any]) -> list[int]: + int_ls: list[int] = list() for elem in ls: int_ls.append(int(elem)) return int_ls -def build_normalized_vm_def_set(vms: str) -> List[VMDef]: +def build_normalized_vm_def_set(vms: str) -> list[VMDef]: vm_types = vms.split(',') if len(vm_types) == 0: raise Exit("No VMs to boot provided") @@ -576,12 +576,12 @@ def build_normalized_vm_def_set(vms: str) -> List[VMDef]: def gen_config_for_stack( ctx: Context, - stack: Optional[str], + stack: str | None, vms: str, - sets: List[str], + sets: list[str], init_stack: bool, - vcpu: List[int], - memory: List[int], + vcpu: list[int], + memory: list[int], new: bool, ci: bool, template: str, @@ -629,12 +629,12 @@ def gen_config_for_stack( info(f"[+] vmconfig @ {vmconfig_file}") -def list_all_distro_normalized_vms(archs: List[Arch], component: Optional[Component] = None): +def list_all_distro_normalized_vms(archs: list[Arch], component: Component | None = None): platforms = get_platforms() if component is not None: platforms = filter_by_ci_component(platforms, component) - vms: List[VMDef] = list() + vms: list[VMDef] = list() for arch in archs: for distro in platforms[arch]: vms.append(("distro", distro, arch)) @@ -644,7 +644,7 @@ def list_all_distro_normalized_vms(archs: List[Arch], component: Optional[Compon def gen_config( ctx: Context, - stack: Optional[str], + stack: str | None, vms: str, sets: str, init_stack: bool, @@ -678,7 +678,7 @@ def gen_config( template, ) - arch_ls: List[Arch] = ["x86_64", "arm64"] + arch_ls: list[Arch] = ["x86_64", "arm64"] if arch != "": arch_ls = [arch_mapping[arch]] diff --git a/tasks/kmt.py b/tasks/kmt.py index d7614ccc73a36..6066b6e428006 100644 --- a/tasks/kmt.py +++ b/tasks/kmt.py @@ -10,7 +10,7 @@ from collections import defaultdict from glob import glob from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Set, Tuple, cast +from typing import TYPE_CHECKING, Any, Iterable, cast from invoke.context import Context from invoke.tasks import task @@ -56,7 +56,7 @@ from termcolor import colored except ImportError: - def colored(text: str, color: Optional[str]) -> str: # noqa: U100 + def colored(text: str, color: str | None) -> str: # noqa: U100 return text @@ -87,17 +87,17 @@ def create_stack(ctx, stack=None): ) def gen_config( ctx: Context, - stack: Optional[str] = None, + stack: str | None = None, vms: str = "", sets: str = "", init_stack=False, - vcpu: Optional[str] = None, - memory: Optional[str] = None, + vcpu: str | None = None, + memory: str | None = None, new=False, ci=False, arch: str = "", output_file: str = "vmconfig.json", - from_ci_pipeline: Optional[str] = None, + from_ci_pipeline: str | None = None, use_local_if_possible=False, vmconfig_template: Component = "system-probe", ): @@ -129,11 +129,11 @@ def gen_config( def gen_config_from_ci_pipeline( ctx: Context, - stack: Optional[str] = None, - pipeline: Optional[str] = None, + stack: str | None = None, + pipeline: str | None = None, init_stack=False, - vcpu: Optional[str] = None, - memory: Optional[str] = None, + vcpu: str | None = None, + memory: str | None = None, new=False, ci=False, use_local_if_possible=False, @@ -167,7 +167,7 @@ def gen_config_from_ci_pipeline( vcpu = str(vcpu_list[0]) info(f"[+] setting vcpu to {vcpu}") - failed_packages: Set[str] = set() + failed_packages: set[str] = set() for job in test_jobs: if job.status == "failed" and job.component == vmconfig_template: vm_arch = job.arch @@ -191,8 +191,8 @@ def gen_config_from_ci_pipeline( @task def launch_stack( ctx: Context, - stack: Optional[str] = None, - ssh_key: Optional[str] = None, + stack: str | None = None, + ssh_key: str | None = None, x86_ami: str = X86_AMI_ID_SANDBOX, arm_ami: str = ARM_AMI_ID_SANDBOX, provision_microvms: bool = True, @@ -201,18 +201,18 @@ def launch_stack( @task -def destroy_stack(ctx: Context, stack: Optional[str] = None, pulumi=False, ssh_key: Optional[str] = None): +def destroy_stack(ctx: Context, stack: str | None = None, pulumi=False, ssh_key: str | None = None): clean(ctx, stack) stacks.destroy_stack(ctx, stack, pulumi, ssh_key) @task -def pause_stack(_, stack: Optional[str] = None): +def pause_stack(_, stack: str | None = None): stacks.pause_stack(stack) @task -def resume_stack(_, stack: Optional[str] = None): +def resume_stack(_, stack: str | None = None): stacks.resume_stack(stack) @@ -268,7 +268,7 @@ def config_ssh_key(ctx: Context): ssh_key = {'path': ssh_key_path, 'name': name, 'aws_key_name': aws_config_name} else: info("[+] Finding SSH keys to use...") - ssh_keys: List[SSHKey] + ssh_keys: list[SSHKey] if method == "1password": agent_keys = get_ssh_agent_key_names(ctx) ssh_keys = [{'path': None, 'name': key, 'aws_key_name': key} for key in agent_keys] @@ -337,9 +337,9 @@ def start_compiler(ctx: Context): cc.start() -def filter_target_domains(vms: str, infra: Dict[ArchOrLocal, HostInstance], arch: Optional[ArchOrLocal] = None): +def filter_target_domains(vms: str, infra: dict[ArchOrLocal, HostInstance], arch: ArchOrLocal | None = None): vmsets = vmconfig.build_vmsets(vmconfig.build_normalized_vm_def_set(vms), []) - domains: List[LibvirtDomain] = list() + domains: list[LibvirtDomain] = list() for vmset in vmsets: if arch is not None and full_arch(vmset.arch) != full_arch(arch): warn(f"Ignoring VM {vmset} as it is not of the expected architecture {arch}") @@ -352,8 +352,8 @@ def filter_target_domains(vms: str, infra: Dict[ArchOrLocal, HostInstance], arch return domains -def get_archs_in_domains(domains: Iterable[LibvirtDomain]) -> Set[Arch]: - archs: Set[Arch] = set() +def get_archs_in_domains(domains: Iterable[LibvirtDomain]) -> set[Arch]: + archs: set[Arch] = set() for d in domains: archs.add(full_arch(d.instance.arch)) return archs @@ -390,7 +390,7 @@ def full_arch(arch: ArchOrLocal) -> Arch: class KMTPaths: - def __init__(self, stack: Optional[str], arch: Arch): + def __init__(self, stack: str | None, arch: Arch): self.stack = stack self.arch = arch @@ -456,7 +456,7 @@ def build_dependencies( layout_file: PathOrStr, source_dir: PathOrStr, ci=False, - stack: Optional[str] = None, + stack: str | None = None, verbose=True, ) -> None: if stack is None: @@ -503,7 +503,7 @@ def is_root(): return os.getuid() == 0 -def vms_have_correct_deps(ctx: Context, domains: List[LibvirtDomain], depsfile: PathOrStr): +def vms_have_correct_deps(ctx: Context, domains: list[LibvirtDomain], depsfile: PathOrStr): deps_dir = os.path.dirname(depsfile) sha256sum = ctx.run(f"cd {deps_dir} && sha256sum {os.path.basename(depsfile)}", warn=True) if sha256sum is None or not sha256sum.ok: @@ -518,7 +518,7 @@ def vms_have_correct_deps(ctx: Context, domains: List[LibvirtDomain], depsfile: return True -def needs_build_from_scratch(ctx: Context, paths: KMTPaths, domains: "list[LibvirtDomain]", full_rebuild: bool): +def needs_build_from_scratch(ctx: Context, paths: KMTPaths, domains: list[LibvirtDomain], full_rebuild: bool): return ( full_rebuild or (not paths.dependencies.exists()) @@ -530,9 +530,9 @@ def needs_build_from_scratch(ctx: Context, paths: KMTPaths, domains: "list[Libvi def prepare( ctx: Context, vms: str, - stack: Optional[str] = None, - arch: Optional[Arch] = None, - ssh_key: Optional[str] = None, + stack: str | None = None, + arch: Arch | None = None, + ssh_key: str | None = None, full_rebuild=False, packages="", verbose=True, @@ -572,7 +572,7 @@ def prepare( run_dir=CONTAINER_AGENT_PATH, ) - target_instances: List[HostInstance] = list() + target_instances: list[HostInstance] = list() for d in domains: target_instances.append(d.instance) @@ -599,8 +599,8 @@ def prepare( info(f"[+] Tests packages setup in target VM {d}") -def build_run_config(run: Optional[str], packages: List[str]): - c: Dict[str, Any] = dict() +def build_run_config(run: str | None, packages: list[str]): + c: dict[str, Any] = dict() if len(packages) == 0: return {"*": {"exclude": False}} @@ -634,15 +634,15 @@ def build_run_config(run: Optional[str], packages: List[str]): ) def test( ctx: Context, - vms: Optional[str] = None, - stack: Optional[str] = None, + vms: str | None = None, + stack: str | None = None, packages="", - run: Optional[str] = None, + run: str | None = None, quick=False, retry=2, run_count=1, full_rebuild=False, - ssh_key: Optional[str] = None, + ssh_key: str | None = None, verbose=True, test_logs=False, test_extra_arguments=None, @@ -704,13 +704,13 @@ def test( ) def build( ctx: Context, - vms: Optional[str] = None, - stack: Optional[str] = None, - ssh_key: Optional[str] = None, + vms: str | None, + stack: str | None = None, + ssh_key: str | None = None, full_rebuild=False, verbose=True, - arch: Optional[ArchOrLocal] = None, - system_probe_yaml: Optional[str] = DEFAULT_CONFIG_PATH, + arch: ArchOrLocal | None = None, + system_probe_yaml: str | None = DEFAULT_CONFIG_PATH, ): stack = check_and_get_stack(stack) if not stacks.stack_exists(stack): @@ -739,7 +739,7 @@ def build( ctx, arch, "test/new-e2e/system-probe/test-runner/files/system-probe-dependencies.json", "./", stack=stack ) - target_instances: List[HostInstance] = list() + target_instances: list[HostInstance] = list() for d in domains: target_instances.append(d.instance) @@ -770,7 +770,7 @@ def build( @task -def clean(ctx: Context, stack: Optional[str] = None, container=False, image=False): +def clean(ctx: Context, stack: str | None = None, container=False, image=False): stack = check_and_get_stack(stack) if not stacks.stack_exists(stack): raise Exit(f"Stack {stack} does not exist. Please create with 'inv kmt.stack-create --stack='") @@ -795,7 +795,7 @@ def clean(ctx: Context, stack: Optional[str] = None, container=False, image=Fals ) def ssh_config( ctx: Context, - stacks: Optional[str] = None, + stacks: str | None = None, ddvm_rsa="tasks/kernel_matrix_testing/ddvm_rsa", ): """ @@ -871,8 +871,8 @@ def ssh_config( "all": "Show status of all stacks. --stack parameter will be ignored", } ) -def status(ctx: Context, stack: Optional[str] = None, all=False, ssh_key: Optional[str] = None): - stacks: List[str] +def status(ctx: Context, stack: str | None = None, all=False, ssh_key: str | None = None): + stacks: list[str] if all: stacks = [stack.name for stack in Path(get_kmt_os().stacks_dir).iterdir() if stack.is_dir()] @@ -880,8 +880,8 @@ def status(ctx: Context, stack: Optional[str] = None, all=False, ssh_key: Option stacks = [check_and_get_stack(stack)] # Dict of status lines for each stack - status: Dict[str, List[str]] = defaultdict(list) - stack_status: Dict[str, Tuple[int, int, int, int]] = {} + status: dict[str, list[str]] = defaultdict(list) + stack_status: dict[str, tuple[int, int, int, int]] = {} info("[+] Getting status...") ssh_key_obj = try_get_ssh_key(ctx, ssh_key) @@ -964,7 +964,7 @@ def explain_ci_failure(_, pipeline: str): failed_setup_jobs = [j for j in setup_jobs if j.status == "failed"] failed_jobs = [j for j in test_jobs if j.status == "failed"] - failreasons: Dict[str, str] = {} + failreasons: dict[str, str] = {} ok = "✅" testfail = "❌" infrafail = "⚙️" @@ -1007,15 +1007,15 @@ def explain_ci_failure(_, pipeline: str): print(f"Legend: OK {ok} | Test failure {testfail} | Infra failure {infrafail} | Skip ' ' (empty cell)") - def groupby_comp_vmset(job: KMTTestRunJob) -> Tuple[str, str]: + def groupby_comp_vmset(job: KMTTestRunJob) -> tuple[str, str]: return (job.component, job.vmset) # Show first a matrix of failed distros and archs for each tuple of component and vmset jobs_by_comp_and_vmset = itertools.groupby(sorted(failed_jobs, key=groupby_comp_vmset), groupby_comp_vmset) for (component, vmset), group_jobs in jobs_by_comp_and_vmset: group_jobs = list(group_jobs) # Consume the iterator, make a copy - distros: Dict[str, Dict[Arch, str]] = defaultdict(lambda: {"x86_64": " ", "arm64": " "}) - distro_arch_with_test_failures: List[Tuple[str, Arch]] = [] + distros: dict[str, dict[Arch, str]] = defaultdict(lambda: {"x86_64": " ", "arm64": " "}) + distro_arch_with_test_failures: list[tuple[str, Arch]] = [] # Build the distro table with all jobs for this component and vmset, to correctly # differentiate between skipped and ok jobs @@ -1040,7 +1040,7 @@ def groupby_comp_vmset(job: KMTTestRunJob) -> Tuple[str, str]: test_results_by_distro_arch = {(j.distro, j.arch): j.get_test_results() for j in jobs_with_failed_tests} # Get the names of all tests all_tests = set(itertools.chain.from_iterable(d.keys() for d in test_results_by_distro_arch.values())) - test_failure_table: List[List[str]] = [] + test_failure_table: list[list[str]] = [] for testname in sorted(all_tests): test_row = [testname] @@ -1064,7 +1064,7 @@ def groupby_comp_vmset(job: KMTTestRunJob) -> Tuple[str, str]: ) ) - def groupby_arch_comp(job: KMTTestRunJob) -> Tuple[str, str]: + def groupby_arch_comp(job: KMTTestRunJob) -> tuple[str, str]: return (job.arch, job.component) # Now get the exact infra failure for each VM @@ -1079,7 +1079,7 @@ def groupby_arch_comp(job: KMTTestRunJob) -> Tuple[str, str]: error("[x] No corresponding setup job found") continue - infra_fail_table: List[List[str]] = [] + infra_fail_table: list[list[str]] = [] for failed_job in group_jobs: try: boot_log = setup_job.get_vm_boot_log(failed_job.distro, failed_job.vmset) @@ -1145,7 +1145,7 @@ def groupby_arch_comp(job: KMTTestRunJob) -> Tuple[str, str]: @task() -def tmux(ctx: Context, stack: Optional[str] = None): +def tmux(ctx: Context, stack: str | None = None): """Create a tmux session with panes for each VM in the stack. Note that this task requires the tmux command to be available on the system, and the SSH diff --git a/tasks/libs/build/ninja.py b/tasks/libs/build/ninja.py index 3b41c80b36fcd..a2f92654964ba 100644 --- a/tasks/libs/build/ninja.py +++ b/tasks/libs/build/ninja.py @@ -32,7 +32,7 @@ def escape_path(word): return word.replace('$ ', '$$ ').replace(' ', '$ ').replace(':', '$:') -class NinjaWriter(object): +class NinjaWriter: def __init__(self, output, width=78): self.output = output self.width = width diff --git a/tasks/libs/ciproviders/github_actions_tools.py b/tasks/libs/ciproviders/github_actions_tools.py index 4fa435db325c0..25ae774571f93 100644 --- a/tasks/libs/ciproviders/github_actions_tools.py +++ b/tasks/libs/ciproviders/github_actions_tools.py @@ -109,7 +109,7 @@ def trigger_macos_workflow( if recent_run.id in might_be_waiting: might_be_waiting.remove(recent_run.id) for job in jobs: - if any([step.name == workflow_id for step in job.steps]): + if any(step.name == workflow_id for step in job.steps): return recent_run else: might_be_waiting.add(recent_run.id) @@ -232,7 +232,7 @@ def parse_log_file(log_file): error_regex = re.compile(r'\[error\]|(Linter|Test) failures|Traceback') - with open(log_file, 'r') as f: + with open(log_file) as f: lines = f.readlines() for line_number, line in enumerate(lines): if error_regex.search(line): diff --git a/tasks/libs/ciproviders/github_api.py b/tasks/libs/ciproviders/github_api.py index 1d0e12f760b08..9cad12d17978b 100644 --- a/tasks/libs/ciproviders/github_api.py +++ b/tasks/libs/ciproviders/github_api.py @@ -168,7 +168,6 @@ def download_from_url(self, url, destination_dir, destination_file): return zip_target_path def download_logs(self, run_id, destination_dir): - run = self._repository.get_workflow_run(run_id) logs_url = run.logs_url _, headers, _ = run._requester.requestJson("GET", logs_url) diff --git a/tasks/libs/ciproviders/gitlab.py b/tasks/libs/ciproviders/gitlab.py index 6e79edca40939..d10737439a1c5 100644 --- a/tasks/libs/ciproviders/gitlab.py +++ b/tasks/libs/ciproviders/gitlab.py @@ -21,7 +21,7 @@ class Gitlab(RemoteAPI): BASE_URL = "https://gitlab.ddbuild.io/api/v4" def __init__(self, project_name="DataDog/datadog-agent", api_token=""): - super(Gitlab, self).__init__("Gitlab") + super().__init__("Gitlab") self.api_token = api_token self.project_name = project_name self.authorization_error_message = ( @@ -378,7 +378,7 @@ class ReferenceTag(yaml.YAMLObject): Custom yaml tag to handle references in gitlab-ci configuration """ - yaml_tag = u'!reference' + yaml_tag = '!reference' def __init__(self, references): self.references = references @@ -536,10 +536,10 @@ def load_context(context): f"Invalid context file: {context}, missing 'variables' key. Input file must be similar to tasks/unit-tests/testdata/gitlab_main_context_template.yml", 1, ) - return [[(k, v) for k, v in y["variables"].items()]] + return [list(y["variables"].items())] else: try: j = json.loads(context) - return [[(k, v) for k, v in j.items()]] + return [list(j.items())] except json.JSONDecodeError: raise Exit(f"Invalid context: {context}, must be a valid json, or a path to a yaml file", 1) diff --git a/tasks/libs/common/check_tools_version.py b/tasks/libs/common/check_tools_version.py index 68d21cc6ba9d0..3872d7f489498 100644 --- a/tasks/libs/common/check_tools_version.py +++ b/tasks/libs/common/check_tools_version.py @@ -13,7 +13,7 @@ def expected_go_repo_v() -> str: """ Returns the repository go version by reading the .go-version file. """ - with open(GO_VPATH, 'r', encoding='utf-8') as f: + with open(GO_VPATH, encoding='utf-8') as f: return f.read().strip() diff --git a/tasks/libs/common/junit_upload_core.py b/tasks/libs/common/junit_upload_core.py index 6b592dc08beb6..f2427c61ed8c4 100644 --- a/tasks/libs/common/junit_upload_core.py +++ b/tasks/libs/common/junit_upload_core.py @@ -284,7 +284,7 @@ def produce_junit_tar(files, result_path): tags_file = io.BytesIO() for k, v in tags.items(): - tags_file.write(f"--tags {k}:{v} ".encode("UTF-8")) + tags_file.write(f"--tags {k}:{v} ".encode()) tags_info = tarfile.TarInfo(TAGS_FILE_NAME) tags_info.size = tags_file.getbuffer().nbytes tags_file.seek(0) diff --git a/tasks/libs/common/remote_api.py b/tasks/libs/common/remote_api.py index 20f4008abed1f..0cbc9e63cc982 100644 --- a/tasks/libs/common/remote_api.py +++ b/tasks/libs/common/remote_api.py @@ -9,12 +9,12 @@ class APIError(Exception): def __init__(self, request, api_name): - super(APIError, self).__init__(f"{api_name} says: {request.content}") + super().__init__(f"{api_name} says: {request.content}") self.status_code = request.status_code self.request = request -class RemoteAPI(object): +class RemoteAPI: """ Helper class to perform calls against a given remote API. """ diff --git a/tasks/libs/common/utils.py b/tasks/libs/common/utils.py index 0186235c06565..e34801b1ec40f 100644 --- a/tasks/libs/common/utils.py +++ b/tasks/libs/common/utils.py @@ -2,7 +2,6 @@ Miscellaneous functions, no tasks here """ - import json import os import re @@ -395,14 +394,14 @@ def query_version(ctx, git_sha_length=7, prefix=None, major_version_hint=None): # The string that's passed in will look something like this: 6.0.0-beta.0-1-g4f19118 # if the tag is 6.0.0-beta.0, it has been one commit since the tag and that commit hash is g4f19118 cmd = "git describe --tags --candidates=50" - if prefix and type(prefix) == str: + if prefix and isinstance(prefix, str): cmd += f" --match \"{prefix}-*\"" else: if major_version_hint: - cmd += r' --match "{}\.*"'.format(major_version_hint) # noqa: FS002 + cmd += rf' --match "{major_version_hint}\.*"' # noqa: FS002 else: cmd += " --match \"[0-9]*\"" - if git_sha_length and type(git_sha_length) == int: + if git_sha_length and isinstance(git_sha_length, int): cmd += f" --abbrev={git_sha_length}" described_version = ctx.run(cmd, hide=True).stdout.strip() @@ -413,8 +412,8 @@ def query_version(ctx, git_sha_length=7, prefix=None, major_version_hint=None): commit_number = int(commit_number_match.group('commit_number')) version_re = r"v?(?P\d+\.\d+\.\d+)(?:(?:-|\.)(?P
[0-9A-Za-z.-]+))?"
-    if prefix and type(prefix) == str:
-        version_re = r"^(?:{}-)?".format(prefix) + version_re  # noqa: FS002
+    if prefix and isinstance(prefix, str):
+        version_re = rf"^(?:{prefix}-)?" + version_re  # noqa: FS002
     else:
         version_re = r"^" + version_re
     if commit_number == 0:
@@ -491,7 +490,7 @@ def get_version(
                 agent_version_cache_file_exist = True
 
         if agent_version_cache_file_exist:
-            with open(AGENT_VERSION_CACHE_NAME, "r") as file:
+            with open(AGENT_VERSION_CACHE_NAME) as file:
                 cache_data = json.load(file)
 
             version, pre, commits_since_version, git_sha, pipeline_id = cache_data[major_version]
@@ -500,7 +499,7 @@ def get_version(
 
             if pre and include_pre:
                 version = f"{version}-{pre}"
-    except (IOError, json.JSONDecodeError, IndexError) as e:
+    except (OSError, json.JSONDecodeError, IndexError) as e:
         # If a cache file is found but corrupted we ignore it.
         print(f"Error while recovering the version from {AGENT_VERSION_CACHE_NAME}: {e}", file=sys.stderr)
         version = ""
@@ -551,11 +550,11 @@ def get_version_numeric_only(ctx, major_version='7'):
                     hide="stdout",
                 )
 
-            with open(AGENT_VERSION_CACHE_NAME, "r") as file:
+            with open(AGENT_VERSION_CACHE_NAME) as file:
                 cache_data = json.load(file)
 
             version, *_ = cache_data[major_version]
-        except (IOError, json.JSONDecodeError, IndexError) as e:
+        except (OSError, json.JSONDecodeError, IndexError) as e:
             # If a cache file is found but corrupted we ignore it.
             print(f"Error while recovering the version from {AGENT_VERSION_CACHE_NAME}: {e}")
             version = ""
@@ -565,7 +564,7 @@ def get_version_numeric_only(ctx, major_version='7'):
 
 
 def load_release_versions(_, target_version):
-    with open("release.json", "r") as f:
+    with open("release.json") as f:
         versions = json.load(f)
         if target_version in versions:
             # windows runners don't accepts anything else than strings in the
diff --git a/tasks/libs/owners/parsing.py b/tasks/libs/owners/parsing.py
index a5122a445aa7a..a75ffa511d0b3 100644
--- a/tasks/libs/owners/parsing.py
+++ b/tasks/libs/owners/parsing.py
@@ -4,7 +4,7 @@
 def read_owners(owners_file: str) -> Any:
     from codeowners import CodeOwners
 
-    with open(owners_file, 'r') as f:
+    with open(owners_file) as f:
         return CodeOwners(f.read())
 
 
diff --git a/tasks/libs/pipeline/notifications.py b/tasks/libs/pipeline/notifications.py
index f21bd8cf8e42a..45466efb0cc0f 100644
--- a/tasks/libs/pipeline/notifications.py
+++ b/tasks/libs/pipeline/notifications.py
@@ -20,7 +20,7 @@ def load_and_validate(file_name: str, default_placeholder: str, default_value: s
     result: Dict[str, str] = {}
     with p.open(encoding='utf-8') as file_stream:
         for key, value in yaml.safe_load(file_stream).items():
-            if not (type(key) is str and type(value) is str):
+            if not (isinstance(key, str) and isinstance(value, str)):
                 raise ValueError(f"File {file_name} contains a non-string key or value. Key: {key}, Value: {value}")
             result[key] = default_value if value == default_placeholder else value
     return result
diff --git a/tasks/libs/types/copyright.py b/tasks/libs/types/copyright.py
index 63cd3485fbd27..fc82b0ee13105 100755
--- a/tasks/libs/types/copyright.py
+++ b/tasks/libs/types/copyright.py
@@ -1,5 +1,4 @@
 #!/usr/bin/env python3
-# -*- coding: utf-8 -*-
 
 import datetime
 import re
@@ -131,7 +130,7 @@ def _get_matching_files(root_dir, glob_pattern, exclude=None):
     @staticmethod
     def _get_header(filepath):
         header = []
-        with open(filepath, "r", encoding="utf-8") as file_obj:
+        with open(filepath, encoding="utf-8") as file_obj:
             # We expect a specific header format which should be 4 lines
             for _ in range(4):
                 header.append(file_obj.readline().strip())
diff --git a/tasks/linter.py b/tasks/linter.py
index 2f43354d300e3..1532fddd5d136 100644
--- a/tasks/linter.py
+++ b/tasks/linter.py
@@ -37,9 +37,7 @@ def python(ctx):
     https://github.com/DataDog/datadog-agent/blob/{DEFAULT_BRANCH}/docs/dev/agent_dev_env.md#pre-commit-hooks"""
     )
 
-    ctx.run("flake8 .")
-    ctx.run("black --check --diff .")
-    ctx.run("isort --check-only --diff .")
+    ctx.run("ruff check --fix .")
     ctx.run("vulture --ignore-decorators @task --ignore-names 'test_*,Test*' tasks")
 
 
diff --git a/tasks/msi.py b/tasks/msi.py
index 0765afefa09ae..0b24fc09b6d96 100644
--- a/tasks/msi.py
+++ b/tasks/msi.py
@@ -2,7 +2,6 @@
 msi namespaced tasks
 """
 
-
 import mmap
 import os
 import shutil
diff --git a/tasks/pipeline.py b/tasks/pipeline.py
index 9a9478e7f9e8e..04857730ef2a2 100644
--- a/tasks/pipeline.py
+++ b/tasks/pipeline.py
@@ -136,7 +136,7 @@ def clean_running_pipelines(ctx, git_ref=DEFAULT_BRANCH, here=False, use_latest_
 
 def workflow_rules(gitlab_file=".gitlab-ci.yml"):
     """Get Gitlab workflow rules list in a YAML-formatted string."""
-    with open(gitlab_file, 'r') as f:
+    with open(gitlab_file) as f:
         return yaml.dump(yaml.safe_load(f.read())["workflow"]["rules"])
 
 
@@ -735,7 +735,7 @@ def update_test_infra_def(file_path, image_tag):
     """
     Override TEST_INFRA_DEFINITIONS_BUILDIMAGES in `.gitlab/common/test_infra_version.yml` file
     """
-    with open(file_path, "r") as gl:
+    with open(file_path) as gl:
         file_content = gl.readlines()
     with open(file_path, "w") as gl:
         for line in file_content:
@@ -750,7 +750,7 @@ def update_gitlab_config(file_path, image_tag, test_version):
     """
     Override variables in .gitlab-ci.yml file
     """
-    with open(file_path, "r") as gl:
+    with open(file_path) as gl:
         file_content = gl.readlines()
     gitlab_ci = yaml.load("".join(file_content), Loader=GitlabYamlLoader())
     # TEST_INFRA_DEFINITION_BUILDIMAGE label format differs from other buildimages
@@ -762,12 +762,12 @@ def update_gitlab_config(file_path, image_tag, test_version):
     images = [name.replace("_SUFFIX", "") for name in suffixes]
     with open(file_path, "w") as gl:
         for line in file_content:
-            if any(re.search(fr"{suffix}:", line) for suffix in suffixes):
+            if any(re.search(rf"{suffix}:", line) for suffix in suffixes):
                 if test_version:
                     gl.write(line.replace('""', '"_test_only"'))
                 else:
                     gl.write(line.replace('"_test_only"', '""'))
-            elif any(re.search(fr"{image}:", line) for image in images):
+            elif any(re.search(rf"{image}:", line) for image in images):
                 current_version = re.search(r"v\d+-\w+", line)
                 if current_version:
                     gl.write(line.replace(current_version.group(0), image_tag))
@@ -784,7 +784,7 @@ def update_circleci_config(file_path, image_tag, test_version):
     Override variables in .gitlab-ci.yml file
     """
     image_name = "gcr.io/datadoghq/agent-circleci-runner"
-    with open(file_path, "r") as circle:
+    with open(file_path) as circle:
         circle_ci = circle.read()
     match = re.search(rf"({image_name}(_test_only)?):([a-zA-Z0-9_-]+)\n", circle_ci)
     if not match:
@@ -853,16 +853,19 @@ def trigger_external(ctx, owner_branch_name: str, no_verify=False):
     ]
 
     # Commands to push the branch
-    commands = [
-        # Fetch
-        f"git remote add {owner} git@github.com:{owner}/datadog-agent.git",
-        f"git fetch '{owner}'",
-        # Create branch
-        f"git checkout '{owner}/{branch}'",  # This first checkout puts us in a detached head state, thus the second checkout below
-        f"git checkout -b '{owner}/{branch}'",
-        # Push
-        f"git push --set-upstream origin '{owner}/{branch}'{no_verify_flag}",
-    ] + restore_commands
+    commands = (
+        [
+            # Fetch
+            f"git remote add {owner} git@github.com:{owner}/datadog-agent.git",
+            f"git fetch '{owner}'",
+            # Create branch
+            f"git checkout '{owner}/{branch}'",  # This first checkout puts us in a detached head state, thus the second checkout below
+            f"git checkout -b '{owner}/{branch}'",
+            # Push
+            f"git push --set-upstream origin '{owner}/{branch}'{no_verify_flag}",
+        ]
+        + restore_commands
+    )
 
     # Run commands then restore commands
     ret_code = 0
diff --git a/tasks/release.py b/tasks/release.py
index 9b882ccabdbb0..00c50db66b66f 100644
--- a/tasks/release.py
+++ b/tasks/release.py
@@ -374,7 +374,7 @@ def list_major_change(_, milestone):
 
 
 def _load_release_json():
-    with open("release.json", "r") as release_json_stream:
+    with open("release.json") as release_json_stream:
         return json.load(release_json_stream, object_pairs_hook=OrderedDict)
 
 
@@ -868,7 +868,6 @@ def __get_force_option(force: bool) -> str:
 def __tag_single_module(ctx, module, agent_version, commit, push, force_option, devel):
     """Tag a given module."""
     for tag in module.tag(agent_version):
-
         if devel:
             tag += "-devel"
 
@@ -1462,7 +1461,7 @@ def create_and_update_release_branch(ctx, repo, release_branch, base_directory="
             _save_release_json(rj)
 
             # Step 1.2 - In datadog-agent repo update gitlab-ci.yaml jobs
-            with open(".gitlab-ci.yml", "r") as gl:
+            with open(".gitlab-ci.yml") as gl:
                 file_content = gl.readlines()
 
             with open(".gitlab-ci.yml", "w") as gl:
diff --git a/tasks/rtloader.py b/tasks/rtloader.py
index 5917a4d73038c..60f9fec88b651 100644
--- a/tasks/rtloader.py
+++ b/tasks/rtloader.py
@@ -1,6 +1,7 @@
 """
 RtLoader namespaced tasks
 """
+
 import errno
 import os
 import shutil
@@ -160,7 +161,8 @@ def generate_doc(ctx):
 
     # doxygen puts both errors and warnings in stderr
     result = ctx.run(
-        "doxygen '{0}/doxygen/Doxyfile' 2>'{0}/doxygen/errors.log'".format(rtloader_path), warn=True  # noqa: FS002
+        "doxygen '{0}/doxygen/Doxyfile' 2>'{0}/doxygen/errors.log'".format(rtloader_path),  # noqa: UP032
+        warn=True,
     )
 
     if result.exited != 0:
diff --git a/tasks/security_agent.py b/tasks/security_agent.py
index 0af5fc538de7e..34999b4a96433 100644
--- a/tasks/security_agent.py
+++ b/tasks/security_agent.py
@@ -940,7 +940,7 @@ def sync_secl_win_pkg(ctx):
     ctx.run("rm -r pkg/security/seclwin/model")
     ctx.run("mkdir -p pkg/security/seclwin/model")
 
-    for (ffrom, fto) in files_to_copy:
+    for ffrom, fto in files_to_copy:
         if not fto:
             fto = ffrom
 
diff --git a/tasks/selinux.py b/tasks/selinux.py
index ed58956e09850..66c3ba7219324 100644
--- a/tasks/selinux.py
+++ b/tasks/selinux.py
@@ -1,6 +1,7 @@
 """
 SELinux namespaced tasks
 """
+
 import os
 
 from invoke import task
diff --git a/tasks/system_probe.py b/tasks/system_probe.py
index a941ebb36a63f..9dfd4c321993a 100644
--- a/tasks/system_probe.py
+++ b/tasks/system_probe.py
@@ -810,7 +810,7 @@ def clean_build(ctx):
         return True
 
     # if this build happens on a new commit do it cleanly
-    with open(BUILD_COMMIT, 'r') as f:
+    with open(BUILD_COMMIT) as f:
         build_commit = f.read().rstrip()
         curr_commit = ctx.run("git rev-parse HEAD", hide=True).stdout.rstrip()
         if curr_commit != build_commit:
@@ -933,13 +933,13 @@ def kitchen_test(ctx, target=None, provider=None):
     # Retrieve a list of all available vagrant images
     images = {}
     platform_file = os.path.join(KITCHEN_DIR, "platforms.json")
-    with open(platform_file, 'r') as f:
+    with open(platform_file) as f:
         for kplatform, by_provider in json.load(f).items():
             if "vagrant" in by_provider and vagrant_arch in by_provider["vagrant"]:
                 for image in by_provider["vagrant"][vagrant_arch]:
                     images[image] = kplatform
 
-    if not (target in images):
+    if target not in images:
         print(
             f"please run inv -e system-probe.kitchen-test --target , where  is one of the following:\n{list(images.keys())}"
         )
@@ -1811,7 +1811,7 @@ def _test_docker_image_list():
 
     images = set()
     for docker_compose_path in docker_compose_paths:
-        with open(docker_compose_path, "r") as f:
+        with open(docker_compose_path) as f:
             docker_compose = yaml.safe_load(f.read())
         for component in docker_compose["services"]:
             images.add(docker_compose["services"][component]["image"])
@@ -1885,7 +1885,7 @@ def save_build_outputs(ctx, destfile):
     absdest = os.path.abspath(destfile)
     count = 0
     with tempfile.TemporaryDirectory() as stagedir:
-        with open("compile_commands.json", "r") as compiledb:
+        with open("compile_commands.json") as compiledb:
             for outputitem in json.load(compiledb):
                 if "output" not in outputitem:
                     continue
diff --git a/tasks/systray.py b/tasks/systray.py
index f3ab9c4dfaf4a..bc1e7039e07e8 100644
--- a/tasks/systray.py
+++ b/tasks/systray.py
@@ -2,7 +2,6 @@
 systray tasks
 """
 
-
 import os
 import sys
 
diff --git a/tasks/unit-tests/pipeline_tests.py b/tasks/unit-tests/pipeline_tests.py
index 64920ff4ff6fd..9d748684c82a4 100644
--- a/tasks/unit-tests/pipeline_tests.py
+++ b/tasks/unit-tests/pipeline_tests.py
@@ -47,7 +47,7 @@ def tearDown(self) -> None:
 
     def test_nominal(self):
         pipeline.update_gitlab_config(self.gitlabci_file, "1mageV3rsi0n", test_version=True)
-        with open(self.gitlabci_file, "r") as gl:
+        with open(self.gitlabci_file) as gl:
             gitlab_ci = yaml.safe_load(gl)
         for variable, value in gitlab_ci["variables"].items():
             # TEST_INFRA_DEFINITION_BUILDIMAGE label format differs from other buildimages
@@ -56,7 +56,7 @@ def test_nominal(self):
 
     def test_update_no_test(self):
         pipeline.update_gitlab_config(self.gitlabci_file, "1mageV3rsi0n", test_version=False)
-        with open(self.gitlabci_file, "r") as gl:
+        with open(self.gitlabci_file) as gl:
             gitlab_ci = yaml.safe_load(gl)
         for variable, value in gitlab_ci["variables"].items():
             if variable.endswith("_SUFFIX"):
@@ -77,7 +77,7 @@ def tearDown(self) -> None:
 
     def test_nominal(self):
         pipeline.update_circleci_config(self.circleci_file, "1m4g3", test_version=True)
-        with open(self.circleci_file, "r") as gl:
+        with open(self.circleci_file) as gl:
             circle_ci = yaml.safe_load(gl)
         full_image = circle_ci['templates']['job_template']['docker'][0]['image']
         image, version = full_image.split(":")
@@ -86,7 +86,7 @@ def test_nominal(self):
 
     def test_update_no_test(self):
         pipeline.update_circleci_config(self.circleci_file, "1m4g3", test_version=False)
-        with open(self.circleci_file, "r") as gl:
+        with open(self.circleci_file) as gl:
             circle_ci = yaml.safe_load(gl)
         full_image = circle_ci['templates']['job_template']['docker'][0]['image']
         image, version = full_image.split(":")
diff --git a/tasks/unit-tests/release_tests.py b/tasks/unit-tests/release_tests.py
index 274b3efd0ec74..f02e89a516d75 100644
--- a/tasks/unit-tests/release_tests.py
+++ b/tasks/unit-tests/release_tests.py
@@ -140,7 +140,7 @@ def test_nonexistant_minor(self, gh_mock):
         )
 
 
-MOCK_JMXFETCH_CONTENT = "jmxfetch content".encode('utf-8')
+MOCK_JMXFETCH_CONTENT = b"jmxfetch content"
 
 
 def mocked_jmxfetch_requests_get(*_args, **_kwargs):
diff --git a/tasks/update_go.py b/tasks/update_go.py
index dc385cbcba126..e218c1a0453dc 100644
--- a/tasks/update_go.py
+++ b/tasks/update_go.py
@@ -130,7 +130,7 @@ def _update_file(warn: bool, path: str, pattern: str, replace: str, expected_mat
     # newline='' keeps the file's newline character(s)
     # meaning it keeps '\n' for most files and '\r\n' for windows specific files
 
-    with open(path, "r", newline='', encoding='utf-8') as reader:
+    with open(path, newline='', encoding='utf-8') as reader:
         content = reader.read()
 
     if dry_run:
@@ -153,7 +153,7 @@ def _update_file(warn: bool, path: str, pattern: str, replace: str, expected_mat
 
 # returns the current go version
 def _get_repo_go_version() -> str:
-    with open(GO_VERSION_FILE, "r") as reader:
+    with open(GO_VERSION_FILE) as reader:
         version = reader.read()
     return version.strip()
 
diff --git a/tasks/vscode.py b/tasks/vscode.py
index 35a51e8deb319..f8a0910fd7790 100644
--- a/tasks/vscode.py
+++ b/tasks/vscode.py
@@ -3,6 +3,7 @@
 
 Helpers for getting vscode set up nicely
 """
+
 import json
 import os
 from typing import OrderedDict
@@ -50,7 +51,7 @@ def set_buildtags(
     settings = {}
     fullpath = os.path.join(VSCODE_DIR, VSCODE_FILE)
     if os.path.exists(fullpath):
-        with open(fullpath, "r") as sf:
+        with open(fullpath) as sf:
             settings = json.load(sf, object_pairs_hook=OrderedDict)
 
     settings["go.buildTags"] = ",".join(use_tags)
diff --git a/test/e2e/containers/fake_datadog/app/api.py b/test/e2e/containers/fake_datadog/app/api.py
index efda5874e8c7c..5c401894000e9 100644
--- a/test/e2e/containers/fake_datadog/app/api.py
+++ b/test/e2e/containers/fake_datadog/app/api.py
@@ -280,7 +280,7 @@ def stat_records():
             p = path.join(record_dir, elt)
             st = os.stat(p)
             lines = 0
-            with open(p, 'r') as f:
+            with open(p) as f:
                 for _ in f:
                     lines += 1
             j[elt] = {"size": st.st_size, "lines": lines}
@@ -304,7 +304,7 @@ def get_records(name):
         return Response(status=503)
 
     payloads = list()
-    with open(path.join(record_dir, name), 'r') as f:
+    with open(path.join(record_dir, name)) as f:
         for l in f:
             payloads.append(json.loads(l))
     return json.dumps(payloads), 200
diff --git a/test/e2e/containers/fake_datadog/app/monitoring.py b/test/e2e/containers/fake_datadog/app/monitoring.py
index 36fdf6ef80c8d..15ae0a1a9e3c8 100644
--- a/test/e2e/containers/fake_datadog/app/monitoring.py
+++ b/test/e2e/containers/fake_datadog/app/monitoring.py
@@ -69,6 +69,6 @@ def count_status(response: Response):
     def log_exception(exc_info):
         class_name = extract_exception_name(exc_info)
         exception_latency.labels(request.method, request.url_rule, class_name).observe(time.time() - g._start_time)
-        app.logger.error(f'Exception on {request.path} [{request.method}]', exc_info=exc_info)
+        app.logger.error('Exception on %s [%s]', request.path, request.method, exc_info=exc_info)
 
     app.log_exception = log_exception
diff --git a/test/e2e/cws-tests/tests/test_e2e_cspm_docker.py b/test/e2e/cws-tests/tests/test_e2e_cspm_docker.py
index 224b75eccf001..bbc365807542c 100644
--- a/test/e2e/cws-tests/tests/test_e2e_cspm_docker.py
+++ b/test/e2e/cws-tests/tests/test_e2e_cspm_docker.py
@@ -122,7 +122,7 @@ def test_privileged_container(self):
         with Step(msg="wait for intake (~1m)", emoji=":alarm_clock:"):
             time.sleep(1 * 60)
 
-        with Step(msg="wait for datadog.security_agent.compliance.running metric", emoji="\N{beer mug}"):
+        with Step(msg="wait for datadog.security_agent.compliance.running metric", emoji="\N{BEER MUG}"):  # fmt: off
             self.app.wait_for_metric("datadog.security_agent.compliance.running", host=socket.gethostname())
 
         ## Disabled while no CSPM API is available
@@ -132,7 +132,7 @@ def test_privileged_container(self):
         with Step(msg="wait for finding generation (~1m)", emoji=":alarm_clock:"):
             time.sleep(1 * 60)
 
-        with Step(msg="wait for datadog.security_agent.compliance.containers_running metric", emoji="\N{beer mug}"):
+        with Step(msg="wait for datadog.security_agent.compliance.containers_running metric", emoji="\N{BEER MUG}"):  # fmt: off
             self.app.wait_for_metric(
                 "datadog.security_agent.compliance.containers_running", container_id=self.container_id
             )
diff --git a/test/e2e/cws-tests/tests/test_e2e_cspm_kubernetes.py b/test/e2e/cws-tests/tests/test_e2e_cspm_kubernetes.py
index 2fea068b6825f..ef0871c049f30 100644
--- a/test/e2e/cws-tests/tests/test_e2e_cspm_kubernetes.py
+++ b/test/e2e/cws-tests/tests/test_e2e_cspm_kubernetes.py
@@ -12,7 +12,6 @@
 
 
 class TestE2EKubernetes(unittest.TestCase):
-
     namespace = "default"
     in_cluster = False
     expectedFindingsMasterEtcdNode = {
@@ -184,7 +183,7 @@ def test_k8s(self):
         with Step(msg="wait for intake (~1m)", emoji=":alarm_clock:"):
             time.sleep(1 * 60)
 
-        with Step(msg="wait for datadog.security_agent.compliance.running metric", emoji="\N{beer mug}"):
+        with Step(msg="wait for datadog.security_agent.compliance.running metric", emoji="\N{beer mug}"):  # fmt: off
             self.app.wait_for_metric("datadog.security_agent.compliance.running", host=TestE2EKubernetes.hostname)
 
         ## Disabled while no CSPM API is available
@@ -194,7 +193,7 @@ def test_k8s(self):
         with Step(msg="wait for finding generation (~1m)", emoji=":alarm_clock:"):
             time.sleep(1 * 60)
 
-        with Step(msg="wait for datadog.security_agent.compliance.containers_running metric", emoji="\N{beer mug}"):
+        with Step(msg="wait for datadog.security_agent.compliance.containers_running metric", emoji="\N{beer mug}"):  # fmt: off
             self.app.wait_for_metric(
                 "datadog.security_agent.compliance.containers_running", host=TestE2EKubernetes.hostname
             )
diff --git a/test/e2e/cws-tests/tests/test_e2e_cws_docker.py b/test/e2e/cws-tests/tests/test_e2e_cws_docker.py
index f20433088c9ac..25ac1075f6c2d 100644
--- a/test/e2e/cws-tests/tests/test_e2e_cws_docker.py
+++ b/test/e2e/cws-tests/tests/test_e2e_cws_docker.py
@@ -153,7 +153,7 @@ def test_open_signal(self):
         with Step(msg="wait for host tags (3m)", emoji=":alarm_clock:"):
             time.sleep(3 * 60)
 
-        with Step(msg="wait for datadog.security_agent.runtime.running metric", emoji="\N{beer mug}"):
+        with Step(msg="wait for datadog.security_agent.runtime.running metric", emoji="\N{BEER MUG}"):  # fmt: off
             self.app.wait_for_metric("datadog.security_agent.runtime.running", host=socket.gethostname())
 
         with Step(msg="check agent event", emoji=":check_mark_button:"):
@@ -184,7 +184,7 @@ def test_open_signal(self):
                 "unable to find rule_id tag attribute",
             )
 
-        with Step(msg="wait for datadog.security_agent.runtime.containers_running metric", emoji="\N{beer mug}"):
+        with Step(msg="wait for datadog.security_agent.runtime.containers_running metric", emoji="\N{BEER MUG}"):  # fmt: off
             self.app.wait_for_metric("datadog.security_agent.runtime.containers_running", host=socket.gethostname())
 
 
diff --git a/test/e2e/cws-tests/tests/test_e2e_cws_kubernetes.py b/test/e2e/cws-tests/tests/test_e2e_cws_kubernetes.py
index 1f0451ea6e4ae..54f67e0b21576 100644
--- a/test/e2e/cws-tests/tests/test_e2e_cws_kubernetes.py
+++ b/test/e2e/cws-tests/tests/test_e2e_cws_kubernetes.py
@@ -174,7 +174,7 @@ def test_open_signal(self):
             jsonSchemaValidator = JsonSchemaValidator()
             jsonSchemaValidator.validate_json_data("self_test.json", attributes)
 
-        with Step(msg="wait for datadog.security_agent.runtime.running metric", emoji="\N{beer mug}"):
+        with Step(msg="wait for datadog.security_agent.runtime.running metric", emoji="\N{BEER MUG}"):  # fmt: off
             self.app.wait_for_metric("datadog.security_agent.runtime.running", host=TestE2EKubernetes.hostname)
 
         with Step(msg="check agent event", emoji=":check_mark_button:"):
@@ -205,7 +205,7 @@ def test_open_signal(self):
                 "unable to find rule_id tag attribute",
             )
 
-        with Step(msg="wait for datadog.security_agent.runtime.containers_running metric", emoji="\N{beer mug}"):
+        with Step(msg="wait for datadog.security_agent.runtime.containers_running metric", emoji="\N{BEER MUG}"):  # fmt: off
             self.app.wait_for_metric(
                 "datadog.security_agent.runtime.containers_running", host=TestE2EKubernetes.hostname
             )
diff --git a/test/e2e/scripts/run-instance/argo-to-junit/argo_to_junit.py b/test/e2e/scripts/run-instance/argo-to-junit/argo_to_junit.py
index 384ab408ad8b6..e82478be3fe8a 100755
--- a/test/e2e/scripts/run-instance/argo-to-junit/argo_to_junit.py
+++ b/test/e2e/scripts/run-instance/argo-to-junit/argo_to_junit.py
@@ -48,7 +48,7 @@ def main():
     parser.add_argument("-o", "--output-file", default="junit.xml", help="The junit xml file")
     args = parser.parse_args()
 
-    with open(args.input_file, "r") as f:
+    with open(args.input_file) as f:
         crd = json.loads(f.read())
     crd_name = crd.get("metadata", {}).get("name")
     nodes = crd.get("status", {}).get("nodes")
diff --git a/test/integration/serverless/log_normalize.py b/test/integration/serverless/log_normalize.py
index a2ead8c961442..088b7fe46d7eb 100644
--- a/test/integration/serverless/log_normalize.py
+++ b/test/integration/serverless/log_normalize.py
@@ -313,7 +313,7 @@ def parse_args():
         args = parse_args()
 
         if args.logs.startswith('file:'):
-            with open(args.logs[5:], 'r') as f:
+            with open(args.logs[5:]) as f:
                 args.logs = f.read()
 
         print(normalize(args.logs, args.type, args.stage, args.accountid))
diff --git a/test/kitchen/tasks/__init__.py b/test/kitchen/tasks/__init__.py
index ba8e2f0b9c804..7d75e1a988354 100644
--- a/test/kitchen/tasks/__init__.py
+++ b/test/kitchen/tasks/__init__.py
@@ -1,6 +1,7 @@
 """
 Invoke entrypoint, import here all the tasks we want to make available
 """
+
 from invoke import Collection
 
 from . import kitchen
diff --git a/test/kitchen/tasks/kitchen.py b/test/kitchen/tasks/kitchen.py
index 524fa141c46d4..beb345b66d4ad 100644
--- a/test/kitchen/tasks/kitchen.py
+++ b/test/kitchen/tasks/kitchen.py
@@ -44,7 +44,7 @@ def genconfig(
         provider = "azure"
 
     if platformfile:
-        with open(platformfile, "r") as f:
+        with open(platformfile) as f:
             platforms = json.load(f)
     else:
         try:
@@ -60,7 +60,7 @@ def genconfig(
         except Exception:
             traceback.print_exc()
             print("Warning: Could not fetch the latest kitchen platforms.json from Github, using local version.")
-            with open("platforms.json", "r") as f:
+            with open("platforms.json") as f:
                 platforms = json.load(f)
 
     # create the TEST_PLATFORMS environment variable
@@ -132,18 +132,18 @@ def genconfig(
         # first read the correct driver
         print(f"Adding driver file drivers/{provider}-driver.yml\n")
 
-        with open(f"drivers/{provider}-driver.yml", 'r') as driverfile:
+        with open(f"drivers/{provider}-driver.yml") as driverfile:
             kitchenyml.write(driverfile.read())
 
         # read the generic contents
-        with open("test-definitions/platforms-common.yml", 'r') as commonfile:
+        with open("test-definitions/platforms-common.yml") as commonfile:
             kitchenyml.write(commonfile.read())
 
         # now open the requested test files
         for f in glob.glob(f"test-definitions/{testfiles}.yml"):
             if f.lower().endswith("platforms-common.yml"):
                 print("Skipping common file\n")
-            with open(f, 'r') as infile:
+            with open(f) as infile:
                 print(f"Adding file {f}\n")
                 kitchenyml.write(infile.read())
 
@@ -152,7 +152,7 @@ def genconfig(
         env = load_user_env(ctx, provider, uservars)
 
     # set KITCHEN_ARCH if it's not set in the user env
-    if 'KITCHEN_ARCH' not in env and not ('KITCHEN_ARCH' in os.environ.keys()):
+    if 'KITCHEN_ARCH' not in env and 'KITCHEN_ARCH' not in os.environ.keys():
         env['KITCHEN_ARCH'] = arch
 
     env['TEST_PLATFORMS'] = testplatforms
@@ -175,7 +175,7 @@ def should_rerun_failed(_, runlog):
     test_result_re_gotest = re.compile(r'--- FAIL: (?P[A-Z].*) \(.*\)')
     test_result_re_rspec = re.compile(r'\d+\s+examples?,\s+(?P\d+)\s+failures?')
 
-    with open(runlog, 'r', encoding='utf-8') as f:
+    with open(runlog, encoding='utf-8') as f:
         text = f.read()
         result_rspec = set(test_result_re_rspec.findall(text))
         result_gotest = set(test_result_re_gotest.findall(text))
@@ -240,7 +240,7 @@ def load_user_env(_, provider, varsfile):
     env = {}
     commentpattern = re.compile("^comment")
     if os.path.exists(varsfile):
-        with open(varsfile, "r") as f:
+        with open(varsfile) as f:
             vars = json.load(f)
             for key, val in vars.get("global", {}).items():
                 if commentpattern.match(key):
diff --git a/tools/agent_QA/test_cases/containers.py b/tools/agent_QA/test_cases/containers.py
index 91c5acf614c09..fe3562fb6ef1a 100644
--- a/tools/agent_QA/test_cases/containers.py
+++ b/tools/agent_QA/test_cases/containers.py
@@ -90,7 +90,7 @@ def build(self, config):  # noqa: U100
 ------
 # Test
 
-- Collect all activated => Source and service are properly set 
+- Collect all activated => Source and service are properly set
 - Collect all disabled => Source and service are properly set and only this container is collected
 - Check that processing rules are working in AD labels:  `com.datadoghq.ad.logs: '[{"source": "java", "service": "myapp", "log_processing_rules": [{"type": "multi_line", "name": "log_start_with_date", "pattern" : "\\d{4}\\-(0?[1-9]|1[012])\\-(0?[1-9]|[12][0-9]|3[01])"}]}]'``
 - `DD_LOGS_CONFIG_DOCKER_CONTAINER_USE_FILE=false` uses docker socket to collect logs
@@ -128,7 +128,7 @@ def build(self, config):  # noqa: U100
 Run a container with
 
 ```
-docker run --log-driver json-file -d centos bash -c "echo '1'; echo '2'; sleep 99999999" 
+docker run --log-driver json-file -d centos bash -c "echo '1'; echo '2'; sleep 99999999"
 ```
 
 ## Test
@@ -193,7 +193,7 @@ def build(self, config):  # noqa: U100
 ---
 # Test
 
-- All logs from all containers are collected from file and not from the docker socket (see `agent status` that will now show whether a container is tailed from the docker socket or it's log file) 
+- All logs from all containers are collected from file and not from the docker socket (see `agent status` that will now show whether a container is tailed from the docker socket or it's log file)
 - All logs are properly tagged with container metadata
 - When the agent cannot reach /var/lib/docker/containers it should fallback on tailing from the docker socket
 - Logs are properly tagged with container metadata
@@ -247,7 +247,7 @@ def build(self, config):  # noqa: U100
 ---
 # Test
 
-- All logs from podman containers are collected from file and not from the docker socket (see `agent status` that will now show whether a container is tailed from the docker socket or it's log file) 
+- All logs from podman containers are collected from file and not from the docker socket (see `agent status` that will now show whether a container is tailed from the docker socket or it's log file)
 - All logs are properly tagged with container metadata
 
 """
@@ -297,7 +297,7 @@ def build(self, config):  # noqa: U100
 ---
 To check:
 
-- All logs from podman containers are collected from the docker socket (see `agent status` that will now show whether a container is tailed from the docker socket or it's log file) 
+- All logs from podman containers are collected from the docker socket (see `agent status` that will now show whether a container is tailed from the docker socket or it's log file)
 - All logs are properly tagged with container metadata
 """
         )
@@ -305,7 +305,7 @@ def build(self, config):  # noqa: U100
 
 class ContainerScenario(TestCase):
     def __init__(self, k8s, cfgsource, cca, kcuf, dcuf):
-        super(ContainerScenario, self).__init__()
+        super().__init__()
         self.k8s = k8s
         self.cfgsource = cfgsource
         self.cca = cca
diff --git a/tools/agent_QA/test_cases/linux.py b/tools/agent_QA/test_cases/linux.py
index f7170579a67b2..f383e24b4a73e 100644
--- a/tools/agent_QA/test_cases/linux.py
+++ b/tools/agent_QA/test_cases/linux.py
@@ -6,7 +6,6 @@ class TailJounald(TestCase):
     name = "[Journald] Agent collect logs from journald"
 
     def build(self, config):
-
         self.append("# Setup")
         self.append(confDir(config))
 
@@ -15,7 +14,7 @@ def build(self, config):
 ```
 logs:
   -type: journald
-``` 
+```
 """
         )
 
@@ -26,7 +25,7 @@ def build(self, config):
             """# Test
 - check that the `hello world` log shows up in app
 
-update the config: 
+update the config:
 
 ```
 logs:
@@ -49,7 +48,6 @@ class TailJournaldStartPosition(TestCase):
     name = "[Files] `start_position` defines where to tail from"
 
     def build(self, config):
-
         self.append("# Setup")
         self.append(confDir(config))
 
@@ -59,14 +57,14 @@ def build(self, config):
 logs:
     - type: journald
       start_position: beginning
-``` 
+```
 
 # Test
 
 1. start the agent
 2. generate some logs like `echo 'test message' | systemd-cat`
 3. check the logs show up in app
-4. stop the agent. 
+4. stop the agent.
 """
         )
         self.append(filePositionSharedSteps())
@@ -76,7 +74,6 @@ class SNMPTraps(TestCase):
     name = "[SNMP traps] Check that traps are working"
 
     def build(self, config):  # noqa: U100
-
         self.append(
             """
 # Setup
diff --git a/tools/agent_QA/test_cases/windows.py b/tools/agent_QA/test_cases/windows.py
index 4951b81a01eb5..f9ae61f8ce2a8 100644
--- a/tools/agent_QA/test_cases/windows.py
+++ b/tools/agent_QA/test_cases/windows.py
@@ -6,7 +6,6 @@ class TestEventLog(TestCase):
     name = "[Windows Event] Agent collect windows event as logs"
 
     def build(self, config):
-
         self.append("# Setup")
         self.append(confDir(config))
 
@@ -32,6 +31,6 @@ def build(self, config):
 
 # Test
 
-- check that the emitted logs show up in app. Only the `Testing123` should appear. 
+- check that the emitted logs show up in app. Only the `Testing123` should appear.
 """
         )
diff --git a/tools/agent_QA/test_cases/xplat/config.py b/tools/agent_QA/test_cases/xplat/config.py
index 13209f2835b77..c273a33e494cf 100644
--- a/tools/agent_QA/test_cases/xplat/config.py
+++ b/tools/agent_QA/test_cases/xplat/config.py
@@ -6,25 +6,24 @@ class EndpointTests(TestCase):
     name = "[Endpoints] Test endpoint configs"
 
     def build(self, config):
-
         self.append("# Setup")
         self.append(confDir(config))
         path = "/var/log/hello-world.log" if config.platform != Platform.windows else "C:\\tmp\\hello-world.log"
         self.append(
             f"""
 ```
-logs: 
+logs:
 - type: file
     path: {path}
     service: test-file-tailing
     source: hello-world
-``` 
+```
 """
         )
 
         self.append(
             """
-in your `datadog.yaml`: 
+in your `datadog.yaml`:
 
 ```
 logs_config:
@@ -37,7 +36,7 @@ def build(self, config):
 - Generate some logs ``docker run -it bfloerschddog/flog -l > hello-world.log`
 
 # Test
-- validate logs are flowing to the intake 
+- validate logs are flowing to the intake
 
 *TIP*: Open Live tail and filter by your host. After each test, refresh the page to clear the live tail. You can leave the log producer running between tests
 
@@ -99,21 +98,21 @@ def build(self, config):  # noqa: U100
    
 ```
 
-3. get some logs flowing through the agent. Warning if you use `DD_LOGS_CONFIG_CONTAINER_COLLECT_ALL` you will create a feedback loop from the mock intake containers. I'd recommend spawning a dedicated logging container. 
+3. get some logs flowing through the agent. Warning if you use `DD_LOGS_CONFIG_CONTAINER_COLLECT_ALL` you will create a feedback loop from the mock intake containers. I'd recommend spawning a dedicated logging container.
 
-4. Watch the logs flow in both intakes. 
+4. Watch the logs flow in both intakes.
 5. Kill one of the intakes
  - make sure logs are still flowing to the other one
 
 6. Kill the second intake (both are now dead)
- - check `agent status` to see that # of bytes read is not increasing (pipeline should be blocked) 
+ - check `agent status` to see that # of bytes read is not increasing (pipeline should be blocked)
 
 7. Restart one of the intakes - logs should start flowing
-8. restart the other intake - logs should start flowing. 
+8. restart the other intake - logs should start flowing.
 
 ### App QA
 
-Now instead of using mock intakes - use two real Datadog intakes. You will need 2 API keys each from different orgs. 
+Now instead of using mock intakes - use two real Datadog intakes. You will need 2 API keys each from different orgs.
 
 ```
 docker run -d --name dd-agent \
@@ -129,6 +128,6 @@ def build(self, config):  # noqa: U100
    
  ```
 
-Stream some logs and watch the livetail in both orgs for the logs. 
+Stream some logs and watch the livetail in both orgs for the logs.
 """
         )
diff --git a/tools/agent_QA/test_cases/xplat/file_tests.py b/tools/agent_QA/test_cases/xplat/file_tests.py
index 500f4f8b8e38e..387fdf543ed8d 100644
--- a/tools/agent_QA/test_cases/xplat/file_tests.py
+++ b/tools/agent_QA/test_cases/xplat/file_tests.py
@@ -6,7 +6,6 @@ class TailFile(TestCase):
     name = "[Files] Agent can tail a file"
 
     def build(self, config):
-
         self.append("# Setup")
         self.append(confDir(config))
 
@@ -26,21 +25,21 @@ def build(self, config):
         self.append(
             f"""
 ```
-logs: 
+logs:
   - type: file
     path: {path}
     service: test-file-tailing
     source: hello-world
-``` 
+```
 - Start the agent
 - generate some logs ({genlogs})
 
 # Test
 - Validate the logs show up in app with the correct `source` and `service` tags
-- Block permission to the file ({blockPermissions}) and check that the Agent status shows that it is inaccessible. 
-- Change the permissions back ({restorePermissions}) so it is accessible again. 
+- Block permission to the file ({blockPermissions}) and check that the Agent status shows that it is inaccessible.
+- Change the permissions back ({restorePermissions}) so it is accessible again.
 - Stop the agent, generate new logs, start the agent and make sure those are sent.
-- Rotate the log file ({rotate}), ensure that logs continue to send after rotation. 
+- Rotate the log file ({rotate}), ensure that logs continue to send after rotation.
 """
         )
 
@@ -49,7 +48,6 @@ class TailFileMultiLine(TestCase):
     name = "[Files] Agent can tail multi line logs"
 
     def build(self, config):
-
         self.append("# Setup")
         self.append(confDir(config))
 
@@ -57,7 +55,7 @@ def build(self, config):
         self.append(
             f"""
 ```
-logs: 
+logs:
   - type: file
     path: {path}
     service: test-file-tailing
@@ -66,7 +64,7 @@ def build(self, config):
       - type: multi_line
         name: new_log_start_with_date
         pattern: \\d{{4}}\\-(0?[1-9]|1[012])\\-(0?[1-9]|[12][0-9]|3[01])
-``` 
+```
 """
         )
 
@@ -76,7 +74,7 @@ def build(self, config):
 - generate some multi-line logs `docker run -it bfloerschddog/java-excepton-logger > {path}`
 
 # Test
-- Validate that the logs show up in app correctly. Look for the multi-line exception logs and ensure they are combined into a single log line. 
+- Validate that the logs show up in app correctly. Look for the multi-line exception logs and ensure they are combined into a single log line.
 """
         )
 
@@ -85,7 +83,6 @@ class TailFileUTF16(TestCase):
     name = "[Files] Agent can tail UTF16 files"
 
     def build(self, config):
-
         self.append("# Setup")
         self.append(confDir(config))
 
@@ -93,13 +90,13 @@ def build(self, config):
         self.append(
             f"""
 ```
-logs: 
+logs:
   - type: file
     path: {path}
     service: test-file-tailing
     source: hello-world
     encoding: utf-16-le
-``` 
+```
 """
         )
 
@@ -121,7 +118,6 @@ class TailFileWildcard(TestCase):
     name = "[Files] Agent can use wildcards to tail a file"
 
     def build(self, config):
-
         self.append("# Setup")
         self.append(confDir(config))
 
@@ -134,7 +130,7 @@ def build(self, config):
     path: {path}
     service: test-wildcard
     source: wildcard
-``` 
+```
 """
         )
 
@@ -157,7 +153,6 @@ class TailFileStartPosition(TestCase):
     name = "[Files] `start_position` defines where to tail from"
 
     def build(self, config):
-
         self.append("# Setup")
         self.append(confDir(config))
 
@@ -171,7 +166,7 @@ def build(self, config):
     service: test-file-tailing
     source: hello-world
     start_position: beginning
-``` 
+```
 """
         )
 
@@ -180,7 +175,7 @@ def build(self, config):
 1. start the agent
 2. generate some logs like `docker run -it bfloerschddog/flog -l > hello-world.log`
 3. check the logs show up in app
-4. stop the agent. 
+4. stop the agent.
 """
         )
         self.append(filePositionSharedSteps())
diff --git a/tools/agent_QA/test_cases/xplat/network.py b/tools/agent_QA/test_cases/xplat/network.py
index d4766016e85b8..1397f0ee1ab67 100644
--- a/tools/agent_QA/test_cases/xplat/network.py
+++ b/tools/agent_QA/test_cases/xplat/network.py
@@ -7,7 +7,6 @@ class TailTCPUDP(TestCase):
     name = "[TCP/UDP] Agent can collect logs from TCP or UDP"
 
     def build(self, config):
-
         self.append("# Setup")
         self.append(confDir(config))
         self.append(
@@ -30,11 +29,11 @@ def build(self, config):
 ```
 echo '154.87.78.229 - - [02/Feb/2021:10:54:52 +0100] "PATCH /facilitate HTTP/1.0" 504 23565' > /dev/udp/localhost/10515
 ```
-- validate UDP logs show in app 
+- validate UDP logs show in app
 
 ```
 echo '95.154.19.214 - ratke2841 [02/Feb/2021:10:57:48 +0100] "GET /real-time/disintermediate HTTP/1.0" 504 5641'  > /dev/tcp/localhost/10514
 ```
-- validate TCP logs show in app 
+- validate TCP logs show in app
 """
         )