diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 2f4eb4391a..e3651e6e15 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -4,6 +4,7 @@ ARG USERNAME=vscode ARG USER_UID=1000 ARG USER_GID=$USER_UID +ARG AWS_CLI_VERSION=2.0.30 ARG HELM_VERSION=3.3.1 ARG KUBECTL_VERSION=1.22.4 ARG TERRAFORM_VERSION=1.1.3 @@ -38,7 +39,13 @@ RUN : INSTALL HELM BINARY \ && curl -fsSLO https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip \ && unzip terraform_${TERRAFORM_VERSION}_linux_amd64.zip -d /usr/local/bin \ && rm terraform_${TERRAFORM_VERSION}_linux_amd64.zip \ - && terraform version + && terraform version \ + && : INSTALL AWS CLI BINARY \ + && curl -fsSLO https://awscli.amazonaws.com/awscli-exe-linux-x86_64-${AWS_CLI_VERSION}.zip \ + && unzip awscli-exe-linux-x86_64-${AWS_CLI_VERSION}.zip \ + && ./aws/install -i /usr/local/aws-cli -b /usr/local/bin \ + && rm -rf awscli-exe-linux-x86_64-${AWS_CLI_VERSION}.zip ./aws \ + && aws --version RUN : INSTALL GEM REQUIREMENTS \ && gem install \ diff --git a/.devcontainer/python.env b/.devcontainer/python.env index ed94d63408..729b2ec774 100644 --- a/.devcontainer/python.env +++ b/.devcontainer/python.env @@ -2,4 +2,4 @@ # PYTHONPATH can contain multiple locations separated by os.pathsep: semicolon (;) on Windows and colon (:) on Linux/macOS. # Invalid paths are ignored. To verify use "python.analysis.logLevel": "Trace". -PYTHONPATH=ansible/playbooks/roles/repository/files/download-requirements +PYTHONPATH=ansible/playbooks/roles/repository/library:ansible/playbooks/roles/repository/files/download-requirements diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 78c92161f4..13d0ad2938 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -30,6 +30,16 @@ "command": "pylint --rcfile .pylintrc ./cli ./tests --output-format text", "group": "test", }, + { + "label": "Pylint repository modules", + "command": "pylint", + "args": [ + "--rcfile", ".pylintrc", + "--output-format", "text", + "./ansible/playbooks/roles/repository/library/tests", + ], + "group": "test", + }, { "label": "Pylint download-requirements", "command": "pylint", diff --git a/Dockerfile b/Dockerfile index a99d54cac2..990f1983a4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,6 +4,7 @@ ARG USERNAME=epiuser ARG USER_UID=1000 ARG USER_GID=$USER_UID +ARG AWS_CLI_VERSION=2.0.30 ARG HELM_VERSION=3.3.1 ARG KUBECTL_VERSION=1.22.4 ARG TERRAFORM_VERSION=1.1.3 @@ -15,7 +16,7 @@ COPY . /epicli RUN : INSTALL APT REQUIREMENTS \ && apt-get update \ && apt-get install --no-install-recommends -y \ - autossh curl gcc jq libcap2-bin libc6-dev libffi-dev make musl-dev openssh-client procps psmisc rsync ruby-full sudo tar unzip vim \ + autossh curl gcc git jq libcap2-bin libc6-dev libffi-dev make musl-dev openssh-client procps psmisc rsync ruby-full sudo tar unzip vim \ \ && : INSTALL HELM BINARY \ && curl -fsSLO https://get.helm.sh/helm-v${HELM_VERSION}-linux-amd64.tar.gz \ @@ -32,6 +33,13 @@ RUN : INSTALL APT REQUIREMENTS \ && unzip terraform_${TERRAFORM_VERSION}_linux_amd64.zip -d /usr/local/bin \ && rm terraform_${TERRAFORM_VERSION}_linux_amd64.zip \ && terraform version \ +\ + && : INSTALL AWS CLI BINARY \ + && curl -fsSLO https://awscli.amazonaws.com/awscli-exe-linux-x86_64-${AWS_CLI_VERSION}.zip \ + && unzip awscli-exe-linux-x86_64-${AWS_CLI_VERSION}.zip \ + && ./aws/install -i /usr/local/aws-cli -b /usr/local/bin \ + && rm -rf awscli-exe-linux-x86_64-${AWS_CLI_VERSION}.zip ./aws \ + && aws --version \ \ && : INSTALL GEM REQUIREMENTS \ && gem install \ diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/command/debian/apt_cache.py b/ansible/playbooks/roles/repository/files/download-requirements/src/command/debian/apt_cache.py index 1d9ebae3bd..e0b3f7b173 100644 --- a/ansible/playbooks/roles/repository/files/download-requirements/src/command/debian/apt_cache.py +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/command/debian/apt_cache.py @@ -66,6 +66,38 @@ def get_package_info(self, package: str, version: str = '') -> Dict[str, str]: return info + def __parse_apt_cache_depends(self, stdout: str) -> List[str]: + """ + Parse output from `apt-cache depends`. + For deps with alternative, only the first package is choosen. + Virtual packages are replaced by the first candidate. + + :param stdout: output from `apt-cache depends` command + :returns: required dependencies + """ + alternative_found: bool = False + is_alternative: bool = False + virt_pkg_found: bool = False + deps: List[str] = [] + for dep in stdout.strip().splitlines(): + + dep = dep.replace(' ', '') # remove white spaces + + if virt_pkg_found and not is_alternative: + deps.append(dep) # pick first from the list + virt_pkg_found = False + + if 'Depends:' in dep: # dependency found + is_alternative = alternative_found + alternative_found = dep.startswith('|Depends:') + virt_pkg_found = '<' in dep and '>' in dep + + if not virt_pkg_found and not is_alternative: + dep = dep.split('Depends:')[-1] # remove "Depends: + deps.append(dep) + + return deps + def get_package_dependencies(self, package: str, version: str = '') -> List[str]: """ Interface for `apt-cache depends` @@ -85,38 +117,4 @@ def get_package_dependencies(self, package: str, version: str = '') -> List[str] f'{package}={version}' if version else package] raw_output = self.run(args).stdout - - virt_pkg: bool = False # True - virtual package detected, False - otherwise - virt_pkgs: List[str] = [] # cached virtual packages options - deps: List[str] = [] - for dep in raw_output.split('\n'): - if not dep: # skip empty lines - continue - - dep = dep.replace(' ', '') # remove white spaces - - if virt_pkg: - virt_pkgs.append(dep) # cache virtual package option - - if '<' in dep and '>' in dep: # virtual package, more than one dependency to choose - virt_pkg = True - continue - - if 'Depends:' in dep: # new dependency found - virt_pkg = False - - if virt_pkgs: # previous choices cached - # avoid conflicts by choosing only non-cached dependency: - if not any(item in deps for item in virt_pkgs): - deps.append(virt_pkgs[0].split('Depends:')[-1]) # pick first from the list - virt_pkgs.clear() - - dep = dep.split('Depends:')[-1] # remove "Depends: - - if not virt_pkg and dep != package: # avoid adding package itself - # workaround for https://elixirforum.com/t/installing-erlang-elixir-on-ubuntu-20-04-is-failing-esl-erlang-25-0-2-1-ubuntu-focal-amd64-deb-file-has-unexpected-size/48754 - # TODO: verify after issues 3210 and 3209 are fixed # pylint: disable=fixme - if dep != 'esl-erlang': # for rabbitmq-server - deps.append(dep) - - return list(set(deps)) + return self.__parse_apt_cache_depends(raw_output) diff --git a/ansible/playbooks/roles/repository/files/download-requirements/tests/command/debian/test_apt_cache.py b/ansible/playbooks/roles/repository/files/download-requirements/tests/command/debian/test_apt_cache.py index 783eedded6..8f3459f680 100644 --- a/ansible/playbooks/roles/repository/files/download-requirements/tests/command/debian/test_apt_cache.py +++ b/ansible/playbooks/roles/repository/files/download-requirements/tests/command/debian/test_apt_cache.py @@ -1,5 +1,10 @@ +import subprocess +from unittest.mock import Mock, patch + +import pytest from src.command.debian.apt_cache import AptCache +from tests.data.apt_cache import APT_CACHE_DEPENDS_RABBITMQ_STDOUT, APT_CACHE_DEPENDS_SOLR_STDOUT from tests.mocks.command_run_mock import CommandRunMock @@ -16,3 +21,20 @@ def test_interface_get_package_dependencies(mocker): '--no-enhances', '--no-pre-depends', 'vim'] + + +APT_CACHE_DEPENDS_DATA = [ + ('tar', 'tar\n', []), + ('rabbitmq-server', APT_CACHE_DEPENDS_RABBITMQ_STDOUT, ['adduser', 'erlang-base', 'erlang-crypto', 'python3']), + ('solr-common', APT_CACHE_DEPENDS_SOLR_STDOUT, ['curl', 'debconf', 'default-jre-headless', 'libjs-jquery'])] + +@pytest.mark.parametrize('PACKAGE_NAME, CMD_STDOUT, EXPECTED_DEPS', APT_CACHE_DEPENDS_DATA) +def test_get_package_dependencies_return_value(PACKAGE_NAME, CMD_STDOUT, EXPECTED_DEPS): + mock_completed_proc = Mock(spec=subprocess.CompletedProcess) + mock_completed_proc.returncode = 0 + mock_completed_proc.stdout = CMD_STDOUT + + with patch('src.command.command.subprocess.run') as mock_run: + mock_run.return_value = mock_completed_proc + return_value = AptCache(1).get_package_dependencies(package=PACKAGE_NAME) + assert return_value == EXPECTED_DEPS diff --git a/ansible/playbooks/roles/repository/files/download-requirements/tests/data/apt_cache.py b/ansible/playbooks/roles/repository/files/download-requirements/tests/data/apt_cache.py new file mode 100644 index 0000000000..39f2b59d6d --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/tests/data/apt_cache.py @@ -0,0 +1,32 @@ +APT_CACHE_DEPENDS_RABBITMQ_STDOUT = ''' +rabbitmq-server + Depends: adduser + |Depends: erlang-base + Depends: erlang-base-hipe + Depends: erlang-crypto + Depends: + python3 + dummy +''' + +APT_CACHE_DEPENDS_SOLR_STDOUT = ''' +solr-common + Depends: curl + Depends: debconf + |Depends: default-jre-headless + |Depends: + default-jre-headless + openjdk-11-jre-headless + openjdk-13-jre-headless + openjdk-16-jre-headless + openjdk-17-jre-headless + openjdk-8-jre-headless + Depends: + default-jre-headless + openjdk-11-jre-headless + openjdk-13-jre-headless + openjdk-16-jre-headless + openjdk-17-jre-headless + openjdk-8-jre-headless + Depends: libjs-jquery +''' diff --git a/ansible/playbooks/roles/repository/files/download-requirements/tests/mocks/command_run_mock.py b/ansible/playbooks/roles/repository/files/download-requirements/tests/mocks/command_run_mock.py index 7922980232..a4cf5193f1 100644 --- a/ansible/playbooks/roles/repository/files/download-requirements/tests/mocks/command_run_mock.py +++ b/ansible/playbooks/roles/repository/files/download-requirements/tests/mocks/command_run_mock.py @@ -27,22 +27,20 @@ def __enter__(self) -> List[str]: """ :return: list of arguments passed to the subprocess.run() function """ - mock = Mock() - mock.returncode = 0 + mock_completed_proc = Mock(spec=subprocess.CompletedProcess) + mock_completed_proc.returncode = 0 - self.__mocker.patch('src.command.command.subprocess.run', side_effect=lambda args, encoding, stdout, stderr: mock) - - spy = self.__mocker.spy(subprocess, 'run') + mock_run = self.__mocker.patch('src.command.command.subprocess.run', return_value=mock_completed_proc) try: if self.__args: self.__func(**self.__args) else: self.__func() - except Exception: + except Exception: # pylint: disable=broad-except pass - return spy.call_args[0][0] + return mock_run.call_args[0][0] def __exit__(self, *args): pass diff --git a/ansible/playbooks/roles/repository/library/__init__.py b/ansible/playbooks/roles/repository/library/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/ansible/playbooks/roles/repository/library/filter_credentials.py b/ansible/playbooks/roles/repository/library/filter_credentials.py new file mode 100644 index 0000000000..dced8574db --- /dev/null +++ b/ansible/playbooks/roles/repository/library/filter_credentials.py @@ -0,0 +1,154 @@ +#!/usr/bin/python + +from __future__ import (absolute_import, division, print_function) + +from hashlib import sha256 +from pathlib import Path +from typing import Callable +import yaml + +__metaclass__ = type + + +DOCUMENTATION = r""" +--- +module: filter_credentials + +short_description: Module for filtering sensitive data stored in manifest.yml file + +options: + src: + description: Path to the manifest file that will be filtered + required: true + type: str + dest: + description: Path to the newly created, filtered manifest + required: false + type: str +""" + +EXAMPLES = r""" +# Pass in a manifest file without modifying the original file +- name: Filter out manifest file and set result to stdout + filter_credentials: + src: /some/where/manifest.yml + +# Pass in a manifest file and save it to `dest` location +- name: Filter out manifest file and save it as a new file + filter_credentials: + src: /some/where/manifest.yml + dest: /some/other/place/manifest.yml +""" + + +from ansible.module_utils.basic import AnsibleModule + + +def _get_hash(filepath: Path) -> str: + # calculate sha256 for `filepath` + with filepath.open(mode='rb') as file_handler: + hashgen = sha256() + hashgen.update(file_handler.read()) + return hashgen.hexdigest() + + +def _filter_common(docs: list[dict]): + # remove admin user info from epiphany-cluster doc: + try: + del next(filter(lambda doc: doc['kind'] == 'epiphany-cluster', docs))['specification']['admin_user'] + except KeyError: + pass # ok, key already doesn't exist + + +def _filter_aws(docs: list[dict]): + _filter_common(docs) + + # filter epiphany-cluster doc + epiphany_cluster = next(filter(lambda doc: doc['kind'] == 'epiphany-cluster', docs)) + + try: + del epiphany_cluster['specification']['cloud']['credentials'] + except KeyError: + pass # ok, key already doesn't exist + + +def _filter_azure(docs: list[dict]): + _filter_common(docs) + + # filter epiphany-cluster doc + epiphany_cluster = next(filter(lambda doc: doc['kind'] == 'epiphany-cluster', docs)) + try: + del epiphany_cluster['specification']['cloud']['subscription_name'] + except KeyError: + pass # ok, key already doesn't exist + + +def _get_filtered_manifest(manifest_path: Path) -> str: + """ + Load the manifest file and remove any sensitive data. + + :param manifest_path: manifest file which will be loaded + :returns: filtered manifset + """ + docs = yaml.safe_load_all(manifest_path.open()) + filtered_docs = [doc for doc in docs if doc['kind'] in ['epiphany-cluster', + 'configuration/feature-mappings', + 'configuration/features', + 'configuration/image-registry']] + + FILTER_DATA: dict[str, Callable] = { + 'any': _filter_common, + 'azure': _filter_azure, + 'aws': _filter_aws + } + + FILTER_DATA[filtered_docs[0]['provider']](filtered_docs) + + return yaml.dump_all(filtered_docs) + +def run_module(): + # define available arguments/parameters a user can pass to the module + module_args = dict( + src=dict(type=str, required=True), + dest=dict(type=str, required=False, default=None), + ) + + # seed the result dict in the object + result = dict( + changed=False, + manifest='' + ) + + # create ansible module + module = AnsibleModule( + argument_spec=module_args, + supports_check_mode=True + ) + + input_manifest = Path(module.params['src']) + output_manifest = Path(module.params['dest']) if module.params['dest'] else None + + manifest = _get_filtered_manifest(input_manifest) + + if not module.params['dest']: # to stdout + result['manifest'] = manifest + else: # write to a new location + orig_hash_value = _get_hash(input_manifest) # hash value prior to change + + with output_manifest.open(mode='w', encoding='utf-8') as output_manifest_file: + output_manifest_file.write(manifest) + + new_hash_value = _get_hash(output_manifest) # hash value post change + + if orig_hash_value != new_hash_value: + result['changed'] = True + + module.exit_json(**result) + + +def main(): + run_module() + + +if __name__ == '__main__': + main() diff --git a/ansible/playbooks/roles/repository/library/tests/__init__.py b/ansible/playbooks/roles/repository/library/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/ansible/playbooks/roles/repository/library/tests/data/filter_credentials_data.py b/ansible/playbooks/roles/repository/library/tests/data/filter_credentials_data.py new file mode 100644 index 0000000000..84c9c5864b --- /dev/null +++ b/ansible/playbooks/roles/repository/library/tests/data/filter_credentials_data.py @@ -0,0 +1,267 @@ +CLUSTER_DOC_ANY = { + 'kind': 'epiphany-cluster', + 'title': 'Epiphany cluster Config', + 'provider': 'any', + 'name': 'default', + 'specification': { + 'name': 'test', + 'admin_user': { + 'name': 'operations', + 'key_path': '/shared/.ssh/epiphany-operations/id_rsa'}, + 'components': { + 'repository': { + 'count': 1, + 'machines': ['default-repository']}, + 'kubernetes_master': { + 'count': 1, + 'machines': ['default-k8s-master1']}, + 'kubernetes_node': { + 'count': 2, + 'machines': ['default-k8s-node1', 'default-k8s-node2']}, + 'logging': { + 'count': 1, + 'machines': ['default-logging']}, + 'monitoring': { + 'count': 1, + 'machines': ['default-monitoring']}, + 'kafka': { + 'count': 2, + 'machines': ['default-kafka1', 'default-kafka2']}, + 'postgresql': { + 'count': 1, + 'machines': ['default-postgresql']}, + 'load_balancer': { + 'count': 1, + 'machines': ['default-loadbalancer']}, + 'rabbitmq': { + 'count': 1, + 'machines': ['default-rabbitmq']}, + 'opensearch': { + 'count': 1, + 'machines': ['default-opensearch']} + } + }, + 'version': '2.0.1dev' +} + + +EXPECTED_CLUSTER_DOC_ANY = { + 'kind': 'epiphany-cluster', + 'title': 'Epiphany cluster Config', + 'provider': 'any', + 'name': 'default', + 'specification': { + 'name': 'test', + 'components': { + 'repository': { + 'count': 1, + 'machines': ['default-repository']}, + 'kubernetes_master': { + 'count': 1, + 'machines': ['default-k8s-master1']}, + 'kubernetes_node': { + 'count': 2, + 'machines': ['default-k8s-node1', 'default-k8s-node2']}, + 'logging': { + 'count': 1, + 'machines': ['default-logging']}, + 'monitoring': { + 'count': 1, + 'machines': ['default-monitoring']}, + 'kafka': { + 'count': 2, + 'machines': ['default-kafka1', 'default-kafka2']}, + 'postgresql': { + 'count': 1, + 'machines': ['default-postgresql']}, + 'load_balancer': { + 'count': 1, + 'machines': ['default-loadbalancer']}, + 'rabbitmq': { + 'count': 1, + 'machines': ['default-rabbitmq']}, + 'opensearch': { + 'count': 1, + 'machines': ['default-opensearch']} + } + }, + 'version': '2.0.1dev' +} + + +CLUSTER_DOC_AZURE = { + 'kind': 'epiphany-cluster', + 'title': 'Epiphany cluster Config', + 'provider': 'azure', + 'name': 'default', + 'specification': { + 'name': 'test', + 'prefix': 'prefix', + 'admin_user': { + 'name': 'operations', + 'key_path': '/shared/.ssh/epiphany-operations/id_rsa'}, + 'cloud': { + 'subscription_name': 'YOUR-SUB-NAME', + 'k8s_as_cloud_service': False, + 'use_public_ips': False, + 'default_os_image': 'default'}, + 'components': { + 'repository': {'count': 1}, + 'kubernetes_master': {'count': 1}, + 'kubernetes_node': {'count': 2}, + 'logging': {'count': 1}, + 'monitoring': {'count': 1}, + 'kafka': {'count': 2}, + 'postgresql': {'count': 1}, + 'load_balancer': {'count': 1}, + 'rabbitmq': {'count': 1}, + 'opensearch': {'count': 1} + } + }, + 'version': '2.0.1dev' +} + + +EXPECTED_CLUSTER_DOC_AZURE = { + 'kind': 'epiphany-cluster', + 'title': 'Epiphany cluster Config', + 'provider': 'azure', + 'name': 'default', + 'specification': { + 'name': 'test', + 'prefix': 'prefix', + 'cloud': { + 'k8s_as_cloud_service': False, + 'use_public_ips': False, + 'default_os_image': 'default'}, + 'components': { + 'repository': {'count': 1}, + 'kubernetes_master': {'count': 1}, + 'kubernetes_node': {'count': 2}, + 'logging': {'count': 1}, + 'monitoring': {'count': 1}, + 'kafka': {'count': 2}, + 'postgresql': {'count': 1}, + 'load_balancer': {'count': 1}, + 'rabbitmq': {'count': 1}, + 'opensearch': {'count': 1} + } + }, + 'version': '2.0.1dev' +} + +CLUSTER_DOC_AWS = { + 'kind': 'epiphany-cluster', + 'title': 'Epiphany cluster Config', + 'provider': 'aws', + 'name': 'default', + 'specification': { + 'name': 'test', + 'prefix': 'prefix', + 'admin_user': { + 'name': 'ubuntu', + 'key_path': '/shared/.ssh/epiphany-operations/id_rsa'}, + 'cloud': { + 'k8s_as_cloud_service': False, + 'use_public_ips': False, + 'credentials': { + 'access_key_id': 'XXXX-XXXX-XXXX', + 'secret_access_key': 'XXXXXXXXXXXXXXXX'}, + 'default_os_image': 'default' + }, + 'components': { + 'repository': {'count': 1}, + 'kubernetes_master': {'count': 1}, + 'kubernetes_node': {'count': 2}, + 'logging': {'count': 1}, + 'monitoring': {'count': 1}, + 'kafka': {'count': 2}, + 'postgresql': {'count': 1}, + 'load_balancer': {'count': 1}, + 'rabbitmq': {'count': 1}, + 'opensearch': {'count': 1} + } + }, + 'version': '2.0.1dev' +} + + +EXPECTED_CLUSTER_DOC_AWS = { + 'kind': 'epiphany-cluster', + 'title': 'Epiphany cluster Config', + 'provider': 'aws', + 'name': 'default', + 'specification': { + 'name': 'test', + 'prefix': 'prefix', + 'cloud': { + 'k8s_as_cloud_service': False, + 'use_public_ips': False, + 'default_os_image': 'default' + }, + 'components': { + 'repository': {'count': 1}, + 'kubernetes_master': {'count': 1}, + 'kubernetes_node': {'count': 2}, + 'logging': {'count': 1}, + 'monitoring': {'count': 1}, + 'kafka': {'count': 2}, + 'postgresql': {'count': 1}, + 'load_balancer': {'count': 1}, + 'rabbitmq': {'count': 1}, + 'opensearch': {'count': 1} + } + }, + 'version': '2.0.1dev' +} + + +COMMON_DOCS = [ + { + 'kind': 'configuration/feature-mappings', + 'title': 'Feature mapping to components', + 'name': 'default' + }, + { + 'kind': 'configuration/image-registry', + 'title': 'Epiphany image registry', + 'name': 'default' + } +] + + +NOT_NEEDED_DOCS = [ + { + 'kind': 'infrastructure/machine', + 'provider': 'any', + 'name': 'default-loadbalancer', + 'specification': { + 'hostname': 'loadbalancer', + 'ip': '192.168.100.110' + }, + 'version': '2.0.1dev' + }, + { + 'kind': 'infrastructure/machine', + 'provider': 'any', + 'name': 'default-rabbitmq', + 'specification': { + 'hostname': 'rabbitmq', + 'ip': '192.168.100.111' + }, + 'version': '2.0.1dev' + }, + { + 'kind': 'infrastructure/machine', + 'provider': 'any', + 'name': 'default-opensearch', + 'specification': { + 'hostname': 'opensearch', + 'ip': '192.168.100.112' + }, + 'version': '2.0.1dev' + } +] + + +MANIFEST_WITH_ADDITIONAL_DOCS = [ CLUSTER_DOC_ANY ] + COMMON_DOCS + NOT_NEEDED_DOCS diff --git a/ansible/playbooks/roles/repository/library/tests/test_filter_credentials.py b/ansible/playbooks/roles/repository/library/tests/test_filter_credentials.py new file mode 100644 index 0000000000..2d4c350a4a --- /dev/null +++ b/ansible/playbooks/roles/repository/library/tests/test_filter_credentials.py @@ -0,0 +1,41 @@ +from copy import deepcopy # make sure that objects used in tests don't get damaged in between test cases +from pathlib import Path + +import pytest + +from library.filter_credentials import _get_filtered_manifest + +from library.tests.data.filter_credentials_data import ( + CLUSTER_DOC_ANY, + CLUSTER_DOC_AWS, + CLUSTER_DOC_AZURE, + EXPECTED_CLUSTER_DOC_ANY, + EXPECTED_CLUSTER_DOC_AWS, + EXPECTED_CLUSTER_DOC_AZURE, + MANIFEST_WITH_ADDITIONAL_DOCS +) + + +@pytest.mark.parametrize('CLUSTER_DOC, EXPECTED_OUTPUT_DOC', + [(CLUSTER_DOC_ANY, EXPECTED_CLUSTER_DOC_ANY), + (CLUSTER_DOC_AZURE, EXPECTED_CLUSTER_DOC_AZURE), + (CLUSTER_DOC_AWS, EXPECTED_CLUSTER_DOC_AWS)]) +def test_epiphany_cluster_doc_filtering(CLUSTER_DOC, EXPECTED_OUTPUT_DOC, mocker): + # Ignore yaml parsing, work on python objects: + mocker.patch('library.filter_credentials.yaml.safe_load_all', return_value=[deepcopy(CLUSTER_DOC)]) + mocker.patch('library.filter_credentials.yaml.dump_all', side_effect=lambda docs: docs) + mocker.patch('library.filter_credentials.Path.open') + + assert _get_filtered_manifest(Path('')) == [EXPECTED_OUTPUT_DOC] + + +def test_not_needed_docs_filtering(mocker): + # Ignore yaml parsing, work on python objects: + mocker.patch('library.filter_credentials.yaml.safe_load_all', return_value=deepcopy(MANIFEST_WITH_ADDITIONAL_DOCS)) + mocker.patch('library.filter_credentials.yaml.dump_all', side_effect=lambda docs: docs) + mocker.patch('library.filter_credentials.Path.open') + + EXPECTED_DOCS = ['epiphany-cluster', 'configuration/feature-mappings', 'configuration/image-registry'] + FILTERED_DOCS = [doc['kind'] for doc in _get_filtered_manifest(Path(''))] + + assert FILTERED_DOCS == EXPECTED_DOCS diff --git a/ansible/playbooks/roles/repository/tasks/copy-download-requirements.yml b/ansible/playbooks/roles/repository/tasks/copy-download-requirements.yml index 16b72da464..43650fd901 100644 --- a/ansible/playbooks/roles/repository/tasks/copy-download-requirements.yml +++ b/ansible/playbooks/roles/repository/tasks/copy-download-requirements.yml @@ -55,11 +55,19 @@ dest: "{{ download_requirements_dir }}/{{ item }}" loop: "{{ _files }}" - - name: Copy the manifest file - synchronize: - src: "{{ input_manifest_path }}" - dest: "{{ download_requirements_dir }}/manifest.yml" + - name: Manifest handling when: not full_download and input_manifest_path + block: + - name: Filter sensitive data from the manifest + filter_credentials: + src: "{{ input_manifest_path }}" + dest: /tmp/filtered_manifest.yml + delegate_to: localhost + + - name: Copy the manifest file + synchronize: + src: /tmp/filtered_manifest.yml + dest: "{{ download_requirements_dir }}/manifest.yml" - name: Copy RedHat family specific download requirements file synchronize: diff --git a/docs/changelogs/CHANGELOG-2.0.md b/docs/changelogs/CHANGELOG-2.0.md index b68859b330..aaf6345fc6 100644 --- a/docs/changelogs/CHANGELOG-2.0.md +++ b/docs/changelogs/CHANGELOG-2.0.md @@ -15,6 +15,7 @@ - [#3140](https://github.com/epiphany-platform/epiphany/issues/3140) - Allow to disable OpenSearch audit logs - [#3218](https://github.com/epiphany-platform/epiphany/issues/3218) - Add support for original output coloring - [#3079](https://github.com/epiphany-platform/epiphany/issues/3079) - OpenSearch improvement - add dedicated user for Filebeat +- [#3207](https://github.com/epiphany-platform/epiphany/issues/3207) - Add filtering mechanism for the sensitive data ### Fixed @@ -25,6 +26,7 @@ - [#3152](https://github.com/epiphany-platform/epiphany/issues/3152) - Use a stable tag for the quay.io/ceph/ceph:v16.2.7 image - [#3209](https://github.com/epiphany-platform/epiphany/issues/3209) - [Ubuntu] download-requirements.py ignores package version when resolving dependencies - [#3231](https://github.com/epiphany-platform/epiphany/issues/3231) - epicli may fail on "Download image haproxy-2.2.2-alpine.tar" task +- [#3210](https://github.com/epiphany-platform/epiphany/issues/3210) - [Ubuntu] download-requirements.py downloads redundant package dependencies ### Updated diff --git a/docs/home/COMPONENTS.md b/docs/home/COMPONENTS.md index 76517bbd0e..a5e7693f35 100644 --- a/docs/home/COMPONENTS.md +++ b/docs/home/COMPONENTS.md @@ -43,8 +43,10 @@ Note that versions are default versions and can be changed in certain cases thro | ------------------------- | ------- | ----------------------------------------------------- | ----------------------------------------------------------------- | | Terraform | 1.1.3 | https://www.terraform.io/ | [Mozilla Public License 2.0](https://github.com/hashicorp/terraform/blob/master/LICENSE) | | Terraform AzureRM provider | 2.91.0 | https://github.com/terraform-providers/terraform-provider-azurerm | [Mozilla Public License 2.0](https://github.com/terraform-providers/terraform-provider-azurerm/blob/master/LICENSE) | -| Terraform AWS provider | 3.71.0 | https://github.com/terraform-providers/terraform-provider-aws | [Mozilla Public License 2.0](https://github.com/terraform-providers/terraform-provider-aws/blob/master/LICENSE) | -| Crane | 0.11.0 | https://github.com/google/go-containerregistry/tree/main/cmd/crane | [Apache License 2.0](https://github.com/google/go-containerregistry/blob/main/LICENSE) | +| Terraform AWS provider | 3.71.0 | https://github.com/terraform-providers/terraform-provider-aws | [Mozilla Public License 2.0](https://github.com/terraform-providers/terraform-provider-aws/blob/master/LICENSE) | +| Crane | 0.11.0 | https://github.com/google/go-containerregistry/tree/main/cmd/crane | [Apache License 2.0](https://github.com/google/go-containerregistry/blob/main/LICENSE) | +| Git | latest | https://github.com/git/git | [GNU GENERAL PUBLIC LICENSE Version 2](https://github.com/git/git/blob/master/COPYING) | +| aws-cli | 2.0.30 | https://github.com/aws/aws-cli | [Apache License 2.0](https://github.com/aws/aws-cli/blob/develop/LICENSE.txt) | ## Epicli Python dependencies diff --git a/pytest.ini b/pytest.ini index 042744a4e9..0d7b30de6e 100644 --- a/pytest.ini +++ b/pytest.ini @@ -5,4 +5,5 @@ filterwarnings = ignore:The distutils package is deprecated:DeprecationWarning:packaging.tags testpaths = tests/unit/ + ansible/playbooks/roles/repository/library/tests/ ansible/playbooks/roles/repository/files/download-requirements/tests/