diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index e05f6f82a3..445bc83c74 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -17,7 +17,7 @@ RUN : INSTALL APT REQUIREMENTS \ jq libc6-dev libcap2-bin libffi-dev lsb-release \ make musl-dev openssh-client procps \ psmisc ruby-full sudo tar \ - unzip vim \ + unzip vim rsync \ && apt-get -q autoremove -y \ && apt-get -q clean -y \ && rm -rf /var/lib/apt/lists/* diff --git a/Dockerfile b/Dockerfile index d9a7272aea..e0651329ae 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,7 +14,7 @@ COPY . /epicli RUN : INSTALL APT REQUIREMENTS \ && apt-get update \ && apt-get install --no-install-recommends -y \ - autossh curl gcc jq libcap2-bin libc6-dev libffi-dev make musl-dev openssh-client procps psmisc ruby-full sudo tar unzip vim \ + autossh curl gcc jq libcap2-bin libc6-dev libffi-dev make musl-dev openssh-client procps psmisc ruby-full sudo tar unzip vim rsync \ \ && : INSTALL HELM BINARY \ && curl -fsSLO https://get.helm.sh/helm-v${HELM_VERSION}-linux-amd64.tar.gz \ diff --git a/ansible/playbooks/roles/repository/files/client/Debian/enable-system-repos.sh b/ansible/playbooks/roles/repository/files/client/Debian/enable-system-repos.sh deleted file mode 100644 index 12fa5c1aba..0000000000 --- a/ansible/playbooks/roles/repository/files/client/Debian/enable-system-repos.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash -eu - -REPOS_BACKUP_FILE=/var/tmp/enabled-system-repos.tar - -tar -C / --absolute-name -xvf ${REPOS_BACKUP_FILE} 2>&1 \ No newline at end of file diff --git a/ansible/playbooks/roles/repository/files/download-requirements/centos-7/add-repositories.aarch64.sh b/ansible/playbooks/roles/repository/files/download-requirements/centos-7/add-repositories.aarch64.sh deleted file mode 100644 index 637aadfc83..0000000000 --- a/ansible/playbooks/roles/repository/files/download-requirements/centos-7/add-repositories.aarch64.sh +++ /dev/null @@ -1 +0,0 @@ -#!/usr/bin/env bash -eu diff --git a/ansible/playbooks/roles/repository/files/download-requirements/centos-7/add-repositories.multiarch.sh b/ansible/playbooks/roles/repository/files/download-requirements/centos-7/add-repositories.multiarch.sh deleted file mode 100644 index c59a6a3e74..0000000000 --- a/ansible/playbooks/roles/repository/files/download-requirements/centos-7/add-repositories.multiarch.sh +++ /dev/null @@ -1,117 +0,0 @@ -#!/usr/bin/env bash -eu - -DOCKER_CE_PATCHED_REPO_CONF=$(cat <<'EOF' -[docker-ce-stable-patched] -name=Docker CE Stable - patched centos/7/$basearch/stable -baseurl=https://download.docker.com/linux/centos/7/$basearch/stable -enabled=1 -gpgcheck=1 -gpgkey=https://download.docker.com/linux/centos/gpg -EOF -) - -ELASTIC_6_REPO_CONF=$(cat <<'EOF' -[elastic-6] -name=Elastic repository for 6.x packages -baseurl=https://artifacts.elastic.co/packages/oss-6.x/yum -gpgcheck=1 -gpgkey=https://artifacts.elastic.co/GPG-KEY-elasticsearch -enabled=1 -autorefresh=1 -type=rpm-md -EOF -) - -ELASTICSEARCH_7_REPO_CONF=$(cat <<'EOF' -[elasticsearch-7.x] -name=Elasticsearch repository for 7.x packages -baseurl=https://artifacts.elastic.co/packages/oss-7.x/yum -gpgcheck=1 -gpgkey=https://artifacts.elastic.co/GPG-KEY-elasticsearch -enabled=1 -autorefresh=1 -type=rpm-md -EOF -) - -ELASTICSEARCH_CURATOR_REPO_CONF=$(cat <<'EOF' -[curator-5] -name=CentOS/RHEL 7 repository for Elasticsearch Curator 5.x packages -baseurl=https://packages.elastic.co/curator/5/centos/7 -gpgcheck=1 -gpgkey=https://packages.elastic.co/GPG-KEY-elasticsearch -enabled=1 -EOF -) - -KUBERNETES_REPO_CONF=$(cat <<'EOF' -[kubernetes] -name=Kubernetes -baseurl=https://packages.cloud.google.com/yum/repos/kubernetes-el7-$basearch -enabled=1 -gpgcheck=1 -repo_gpgcheck=1 -gpgkey=https://packages.cloud.google.com/yum/doc/yum-key.gpg https://packages.cloud.google.com/yum/doc/rpm-package-key.gpg -EOF -) - -OPENDISTRO_REPO_CONF=$(cat <<'EOF' -[opendistroforelasticsearch-artifacts-repo] -name=Release RPM artifacts of OpenDistroForElasticsearch -baseurl=https://d3g5vo6xdbdb9a.cloudfront.net/yum/noarch/ -enabled=1 -gpgkey=https://d3g5vo6xdbdb9a.cloudfront.net/GPG-KEY-opendistroforelasticsearch -gpgcheck=1 -repo_gpgcheck=1 -autorefresh=1 -type=rpm-md -EOF -) - -POSTGRESQL_REPO_CONF=$(cat <<'EOF' -[pgdg13] -name=PostgreSQL 13 for RHEL/CentOS $releasever - $basearch -baseurl=https://download.postgresql.org/pub/repos/yum/13/redhat/rhel-$releasever-$basearch -enabled=1 -gpgcheck=1 -gpgkey=https://download.postgresql.org/pub/repos/yum/RPM-GPG-KEY-PGDG -EOF -) - -POSTGRESQL_COMMON_REPO_CONF=$(cat <<'EOF' -[pgdg-common] -name=PostgreSQL common for RHEL/CentOS $releasever - $basearch -baseurl=https://download.postgresql.org/pub/repos/yum/common/redhat/rhel-$releasever-$basearch -enabled=1 -gpgcheck=1 -gpgkey=https://download.postgresql.org/pub/repos/yum/RPM-GPG-KEY-PGDG -EOF -) - -RABBITMQ_SERVER_REPO_CONF=$(cat <<'EOF' -[rabbitmq-server] -name=rabbitmq-rpm -baseurl=https://packagecloud.io/rabbitmq/rabbitmq-server/el/7/$basearch -gpgcheck=1 -gpgkey=https://packagecloud.io/rabbitmq/rabbitmq-server/gpgkey -repo_gpgcheck=1 -sslcacert=/etc/pki/tls/certs/ca-bundle.crt -enabled=1 -EOF -) - -# Official Docker CE repository, added with https://download.docker.com/linux/centos/docker-ce.repo, -# has broken URL (https://download.docker.com/linux/centos/7Server/x86_64/stable) for longer time. -# So direct (patched) link is used first if available. -add_repo_as_file 'docker-ce-stable-patched' "$DOCKER_CE_PATCHED_REPO_CONF" -if ! is_repo_available "docker-ce-stable-patched"; then - disable_repo "docker-ce-stable-patched" - add_repo 'docker-ce' 'https://download.docker.com/linux/centos/docker-ce.repo' -fi -add_repo_as_file 'elastic-6' "$ELASTIC_6_REPO_CONF" -add_repo_as_file 'elasticsearch-7' "$ELASTICSEARCH_7_REPO_CONF" -add_repo_as_file 'elasticsearch-curator-5' "$ELASTICSEARCH_CURATOR_REPO_CONF" -add_repo_as_file 'kubernetes' "$KUBERNETES_REPO_CONF" -add_repo_as_file 'opendistroforelasticsearch' "$OPENDISTRO_REPO_CONF" -add_repo_as_file 'postgresql-13' "$POSTGRESQL_REPO_CONF" -add_repo_as_file 'rabbitmq' "$RABBITMQ_SERVER_REPO_CONF" diff --git a/ansible/playbooks/roles/repository/files/download-requirements/centos-7/add-repositories.x86_64.sh b/ansible/playbooks/roles/repository/files/download-requirements/centos-7/add-repositories.x86_64.sh deleted file mode 100644 index 20bed5ddf5..0000000000 --- a/ansible/playbooks/roles/repository/files/download-requirements/centos-7/add-repositories.x86_64.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env bash -eu - -add_repo_from_script 'https://dl.2ndquadrant.com/default/release/get/10/rpm' # for repmgr -add_repo_from_script 'https://dl.2ndquadrant.com/default/release/get/13/rpm' - -disable_repo '2ndquadrant-dl-default-release-pg10-debug' # script adds 2 repositories, only 1 is required -disable_repo '2ndquadrant-dl-default-release-pg13-debug' diff --git a/ansible/playbooks/roles/repository/files/download-requirements/centos-7/download-requirements.sh b/ansible/playbooks/roles/repository/files/download-requirements/centos-7/download-requirements.sh deleted file mode 100644 index 07665aeec8..0000000000 --- a/ansible/playbooks/roles/repository/files/download-requirements/centos-7/download-requirements.sh +++ /dev/null @@ -1,799 +0,0 @@ -#!/usr/bin/env bash - -# VERSION 1.0.5 - -# NOTE: You can run only one instance of this script, new instance kills the previous one -# This limitation is for Ansible - -set -euo pipefail - -# set variables needed by common_functions -readonly internet_access_checks_enabled="yes" -readonly script_path="$(readlink -f $(dirname $0))" -. "${script_path}/common/common_functions.sh" - -# === Functions (in alphabetical order) === - -# params: -add_repo() { - local repo_id="$1" - local repo_url="$2" - - if ! is_repo_enabled "$repo_id"; then - echol "Adding repository: $repo_id" - yum-config-manager --add-repo "$repo_url" || - exit_with_error "Command failed: yum-config-manager --add-repo \"$repo_url\"" - # to accept import of GPG keys - yum -y repolist > /dev/null || - exit_with_error "Command failed: yum -y repolist" - fi -} - -# params: -add_repo_as_file() { - local repo_id="$1" - local config_file_content="$2" - local config_file_name="$repo_id.repo" - - if ! is_repo_enabled "$repo_id"; then - echol "Adding repository: $repo_id" - cat <<< "$config_file_content" > "/etc/yum.repos.d/$config_file_name" || - exit_with_error "Function add_repo_as_file failed for repo: $repo_id" - local -a gpg_key_urls - IFS=" " read -r -a gpg_key_urls \ - <<< "$(grep -i --only-matching --perl-regexp '(?<=^gpgkey=)http[^#\n]+' <<< "$config_file_content")" - if (( ${#gpg_key_urls[@]} > 0 )); then - import_repo_gpg_keys "${gpg_key_urls[@]}" 3 - fi - # to accept import of repo's GPG key (for repo_gpgcheck=1) - yum -y repolist > /dev/null || exit_with_error "Command failed: yum -y repolist" - fi -} - -# params: -add_repo_from_script() { - local script_url="$1" - - echol "Running: curl $script_url | bash" - curl "$script_url" | bash -} - -# params: ... [path_N_to_backup] -backup_files() { - local backup_file_path="$1" - shift - local paths_to_backup=("$@") - - # --directory='/' is for tar --verify - tar --create --verbose --verify --directory="/" --file="$backup_file_path" "${paths_to_backup[@]}" -} - -# params: -create_directory() { - local dir_path="$1" - - if [[ -d "$dir_path" ]]; then - echol "Directory $dir_path already exists" - else - echol "Creating directory: $dir_path" - mkdir -p "$dir_path" || exit_with_error "Command failed: mkdir -p \"$dir_path\"" - fi -} - -# params: -disable_repo() { - local repo_id="$1" - - if yum repolist enabled | grep --quiet "$repo_id"; then - echol "Disabling repository: $repo_id" - yum-config-manager --disable "$repo_id" || - exit_with_error "Command failed: yum-config-manager --disable \"$repo_id\"" - fi -} - -# params: [new_filename] -download_file() { - local file_url="$1" - local dest_dir="$2" - - if [[ ${3-} ]]; then - local file_name=$3 - else - local file_name - file_name=$(basename "$file_url") - fi - - local dest_path="${dest_dir}/${file_name}" - local retries=3 - - if [[ ${3-} ]]; then - echol "Downloading file: $file_url as $file_name" - run_cmd_with_retries wget --quiet --directory-prefix="$dest_dir" "$file_url" -O "$dest_path" $retries || \ - exit_with_error "Command failed: wget --no-verbose --directory-prefix=$dest_dir $file_url $retries" - else - echol "Downloading file: $file_url" - run_cmd_with_retries wget --quiet --directory-prefix="$dest_dir" "$file_url" $retries || \ - exit_with_error "Command failed: wget --no-verbose --directory-prefix=$dest_dir $file_url $retries" - fi -} - -# params: -download_image() { - local image_name="$1" - local dest_dir="$2" - - local splited_image=(${image_name//:/ }) - local repository=${splited_image[0]} - local tag=${splited_image[1]} - local repo_basename=$(basename -- "$repository") - local dest_path="${dest_dir}/${repo_basename}-${tag}.tar" - local retries=3 - - if [[ -f $dest_path ]]; then - echol "Image file: $dest_path already exists. Skipping..." - else - # use temporary file for downloading to be safe from sudden interruptions (network, ctrl+c) - local tmp_file_path=$(mktemp) - local crane_cmd="$CRANE_BIN pull --insecure --platform=${DOCKER_PLATFORM} --format=legacy ${image_name} ${tmp_file_path}" - echol "Downloading image: $image" - { run_cmd_with_retries $crane_cmd $retries && chmod 644 $tmp_file_path && mv $tmp_file_path $dest_path; } || - exit_with_error "crane failed, command was: $crane_cmd && chmod 644 $tmp_file_path && mv $tmp_file_path $dest_path" - fi -} - -# params: ... [package_N] -download_packages() { - local dest_dir="$1" - shift - local packages="$@" - local retries=3 - - if [[ -n $packages ]]; then - # when using --archlist=x86_64 yumdownloader (yum-utils-1.1.31-52) also downloads i686 packages - run_cmd_with_retries yumdownloader --quiet --archlist="$ARCH" --exclude='*i686' --destdir="$dest_dir" $packages $retries - fi -} - -# params: -enable_repo() { - local repo_id="$1" - - if ! yum repolist enabled | grep --quiet "$repo_id"; then - echol "Enabling repository: $repo_id" - yum-config-manager --enable "$repo_id" || - exit_with_error "Command failed: yum-config-manager --enable \"$repo_id\"" - fi -} - -# params: -get_package_dependencies_with_arch() { - # $1 reserved for result - local package="$2" - - local query_output=$(repoquery --requires --resolve --queryformat '%{name}.%{arch}' --archlist=$ARCH,noarch "$package") || - exit_with_error "repoquery failed for dependencies of package: $package with exit code: $?, output was: $query_output" - - if [[ -z $query_output ]]; then - echol "No dependencies found for package: $package" - elif grep --ignore-case --perl-regexp '\b(? -get_package_with_version_arch() { - # $1 reserved for result - local package="$2" - - local query_output=$(repoquery --queryformat '%{ui_nevra}' --archlist=$ARCH,noarch "$package") || - exit_with_error "repoquery failed for package: $package with exit code: $?, output was: $query_output" - - # yumdownloader doesn't set error code if repoquery returns empty output - [[ -n $query_output ]] || exit_with_error "repoquery failed: package $package not found" - if grep --ignore-case --perl-regexp '\b(? -get_packages_with_version_arch() { - local result_var_name="$1" - shift - local packages=("$@") - local packages_with_version_arch=() - - for package in "${packages[@]}"; do - get_package_with_version_arch 'QUERY_OUTPUT' "$package" - packages_with_version_arch+=("$QUERY_OUTPUT") - done - - eval $result_var_name='("${packages_with_version_arch[@]}")' -} - -# params: -get_requirements_from_group() { - # $1 reserved for result - local group_name="$2" - local requirements_file_path="$3" - local all_requirements=$(grep --only-matching '^[^#]*' "$requirements_file_path" | sed -e 's/[[:space:]]*$//') - - if [[ $group_name == "files" ]]; then - local requirements_from_group=$(awk "/^$/ {next}; /\[${group_name}\]/ {f=1; f=2; next}; /^\[/ {f=0}; f {print \$0}" <<< "$all_requirements") || - exit_with_error "Function get_requirements_from_group failed for group: $group_name" - else - local requirements_from_group=$(awk "/^$/ {next}; /\[${group_name}\]/ {f=1; next}; /^\[/ {f=0}; f {print \$0}" <<< "$all_requirements") || - exit_with_error "Function get_requirements_from_group failed for group: $group_name" - fi - - [[ -n $requirements_from_group ]] || echol "No requirements found for group: $group_name" - - eval $1='$requirements_from_group' -} - -# params: -get_unique_array() { - local result_var_name="$1" - shift - local array=("$@") - - # filter out duplicates - array=($(echo "${array[@]}" | tr ' ' '\n' | sort -u | tr '\n' ' ')) - - eval $result_var_name='("${array[@]}")' -} - -# params: -import_repo_gpg_keys() { - local retries=${!#} # get last arg - local urls=( "${@:1:$# - 1}" ) # remove last arg - - for url in "${urls[@]}"; do - run_cmd_with_retries rpm --import "$url" "$retries" - done -} - -# params: [package_name] -install_package() { - local package_name_or_url="$1" - local package_name="$1" - - [ $# -gt 1 ] && package_name="$2" - - echol "Installing package: $package_name" - if yum install -y "$package_name_or_url"; then - echo "$package_name" >> "$INSTALLED_PACKAGES_FILE_PATH" - else - exit_with_error "Command failed: yum install -y \"$package_name_or_url\"" - fi -} - -# params: -is_package_installed() { - local package="$1" - - if rpm --query --quiet "$package"; then - echol "Package $package already installed" - return 0 - else - return 1 - fi -} - -# params: -is_repo_available() { - local repo_id="$1" - - echol "Checking if '$repo_id' repo is available" - yum -q --disablerepo=* --enablerepo="$repo_id" repoinfo > /dev/null # returns 1 when 'Error 404 - Not Found' -} - -# params: -is_repo_enabled() { - local repo_id="$1" - - if yum repolist | grep --quiet "$repo_id"; then - echol "Repository $repo_id already enabled" - return 0 - else - return 1 - fi -} - -# params: -remove_package() { - local package="$1" - - if rpm --query --quiet "$package"; then - echol "Removing package: $package" - yum remove -y "$package" || exit_with_error "Command failed: yum remove -y \"$package\"" - fi -} - -# params: -remove_added_repos() { - local yum_repos_backup_tar_file_path="$1" - - declare -A initial_yum_repo_files - for repo_config_file in $(tar -tf "$yum_repos_backup_tar_file_path" | grep '.repo$' | xargs -L 1 --no-run-if-empty basename); do - initial_yum_repo_files["$repo_config_file"]=1 - done - - for repo_config_file in $(find /etc/yum.repos.d/ -maxdepth 1 -type f -name '*.repo' -printf "%f\n"); do - if (( ${initial_yum_repo_files["$repo_config_file"]:-0} == 0)); then - # remove only if not owned by a package - if ! rpm --quiet --query --file "/etc/yum.repos.d/$repo_config_file"; then - remove_file "/etc/yum.repos.d/$repo_config_file" - fi - fi - done -} - -# params: -remove_file() { - local file_path="$1" - - echol "Removing file: $file_path" - rm -f "$file_path" || exit_with_error "Command failed: rm -f \"$file_path\"" -} - -# params: -remove_installed_packages() { - local installed_packages_list_file="$1" - - if [ -f "$installed_packages_list_file" ]; then - for package in $(cat $installed_packages_list_file | sort --unique); do - remove_package "$package" - done - remove_file "$installed_packages_list_file" - fi -} - -remove_yum_cache_for_untracked_repos() { - local basearch releasever - basearch=$(uname --machine) - releasever=$(rpm -q --provides "$(rpm -q --whatprovides 'system-release(releasever)')" | grep "system-release(releasever)" | cut -d ' ' -f 3) - local cachedir find_output - cachedir=$(grep --only-matching --perl-regexp '(?<=^cachedir=)[^#\n]+' /etc/yum.conf) - cachedir="${cachedir/\$basearch/$basearch}" - cachedir="${cachedir/\$releasever/$releasever}" - find_output=$(find "$cachedir" -mindepth 1 -maxdepth 1 -type d -exec basename '{}' ';') - local -a repos_with_cache=() - if [ -n "$find_output" ]; then - readarray -t repos_with_cache <<< "$find_output" - fi - local all_repos_output - all_repos_output=$(yum repolist -v all | grep --only-matching --perl-regexp '(?<=^Repo-id)[^/]+' | sed -e 's/^[[:space:]:]*//') - local -a all_repos=() - readarray -t all_repos <<< "$all_repos_output" - if (( ${#repos_with_cache[@]} > 0 )); then - for cached_repo in "${repos_with_cache[@]}"; do - if ! _in_array "$cached_repo" "${all_repos[@]}"; then - run_cmd rm -rf "$cachedir/$cached_repo" - fi - done - fi -} - -# Runs command as array with printing it, doesn't support commands with shell operators (such as pipe or redirection) -# params: [--no-exit-on-error] -run_cmd() { - local cmd_arr=("$@") - - local exit_on_error=1 - if [[ ${cmd_arr[-1]} == '--no-exit-on-error' ]]; then - exit_on_error=0 - cmd_arr=( "${cmd_arr[@]:0:$# - 1}" ) # remove last item - fi - - local escaped_string return_code - escaped_string=$(_print_array_as_shell_escaped_string "${cmd_arr[@]}") - echol "Executing: ${escaped_string}" - "${cmd_arr[@]}"; return_code=$? - if (( return_code != 0 )) && (( exit_on_error )); then - exit_with_error "Command failed: ${escaped_string}" - else - return $return_code - fi -} - -# Runs command with retries, doesn't support commands with shell operators (such as pipe or redirection) -# params: -run_cmd_with_retries() { - # pop 'retries' argument - local retries=${!#} # get last arg (indirect expansion) - set -- "${@:1:$#-1}" # set new "$@" - - local cmd_arr=("$@") - ( # sub-shell is used to limit scope for 'set +e' - set +e - trap - ERR # disable global trap locally - for ((i=0; i <= retries; i++)); do - run_cmd "${cmd_arr[@]}" '--no-exit-on-error' - return_code=$? - if (( return_code == 0 )); then - break - elif (( i < retries )); then - sleep 1 - echol "retrying ($(( i+1 ))/${retries})" - else - echol "ERROR: all attempts failed" - local escaped_string - escaped_string=$(_print_array_as_shell_escaped_string "${cmd_arr[@]}") - exit_with_error "Command failed: ${escaped_string}" - fi - done - return $return_code - ) -} - -usage() { - echo "usage: ./$(basename $0) [--no-logfile]" - echo "example: ./$(basename $0) /tmp/downloads" - exit 1 -} - -validate_bash_version() { - local major_version=${BASH_VERSINFO[0]} - local minor_version=${BASH_VERSINFO[1]} - local required_version=(4 2) # (minor major) - if (( major_version < ${required_version[0]} )) || (( minor_version < ${required_version[1]} )); then - exit_with_error "This script requires Bash version ${required_version[0]}.${required_version[1]} or higher." - fi -} - -# === Helper functions (in alphabetical order) === - -_get_shell_escaped_array() { - if (( $# > 0 )); then - printf '%q\n' "$@" - fi -} - -# params: -_in_array() { - local value=${1} - shift - local array=( "$@" ) - - (( ${#array[@]} > 0 )) && printf '%s\n' "${array[@]}" | grep -q -Fx "$value" -} - -# Prints string in format that can be reused as shell input (escapes non-printable characters) -_print_array_as_shell_escaped_string() { - local output - output=$(_get_shell_escaped_array "$@") - local escaped=() - if [ -n "$output" ]; then - readarray -t escaped <<< "$output" - fi - if (( ${#escaped[@]} > 0 )); then - printf '%s\n' "${escaped[*]}" - fi -} - -# === Start === - -validate_bash_version - -if [[ $# -lt 1 ]]; then - usage >&2 -fi - -readonly START_TIME=$(date +%s) - -# --- Parse arguments --- - -POSITIONAL_ARGS=() -CREATE_LOGFILE='yes' -while [[ $# -gt 0 ]]; do - case $1 in - --no-logfile) - CREATE_LOGFILE='no' - shift # past argument - ;; - *) # unknown option - POSITIONAL_ARGS+=("$1") # save it in an array for later - shift - ;; - esac -done -set -- "${POSITIONAL_ARGS[@]}" # restore positional arguments - -# --- Global variables --- - -# dirs -readonly DOWNLOADS_DIR="$1" # root directory for downloads -readonly FILES_DIR="${DOWNLOADS_DIR}/files" -readonly PACKAGES_DIR="${DOWNLOADS_DIR}/packages" -readonly IMAGES_DIR="${DOWNLOADS_DIR}/images" -readonly REPO_PREREQ_PACKAGES_DIR="${PACKAGES_DIR}/repo-prereqs" -readonly SCRIPT_DIR="$(dirname $(readlink -f $0))" # want absolute path - -# files -readonly SCRIPT_FILE_NAME=$(basename "$0") -readonly LOG_FILE_NAME="${SCRIPT_FILE_NAME}.log" -readonly LOG_FILE_PATH="${SCRIPT_DIR}/${LOG_FILE_NAME}" -readonly YUM_CONFIG_BACKUP_FILE_PATH="${SCRIPT_DIR}/${SCRIPT_FILE_NAME}-yum-repos-backup-tmp-do-not-remove.tar" -readonly CRANE_BIN="${SCRIPT_DIR}/crane" -readonly INSTALLED_PACKAGES_FILE_PATH="${SCRIPT_DIR}/${SCRIPT_FILE_NAME}-installed-packages-list-do-not-remove.tmp" -readonly PID_FILE_PATH="/var/run/${SCRIPT_FILE_NAME}.pid" -readonly ADD_MULTIARCH_REPOSITORIES_SCRIPT="${SCRIPT_DIR}/add-repositories.multiarch.sh" - -#arch -readonly ARCH=$(uname -m) -echol "Detected arch: ${ARCH}" -readonly REQUIREMENTS_FILE_PATH="${SCRIPT_DIR}/requirements.${ARCH}.txt" -readonly ADD_ARCH_REPOSITORIES_SCRIPT="${SCRIPT_DIR}/add-repositories.${ARCH}.sh" -case $ARCH in -x86_64) - readonly DOCKER_PLATFORM="linux/amd64" - ;; - -aarch64) - readonly DOCKER_PLATFORM="linux/arm64" - ;; - -*) - exit_with_error "Arch ${ARCH} unsupported" - ;; -esac -echol "Docker platform: ${DOCKER_PLATFORM}" - -# --- Checks --- - -[ $EUID -eq 0 ] || { echo "You have to run as root" && exit 1; } - -[[ -f $REQUIREMENTS_FILE_PATH ]] || exit_with_error "File not found: $REQUIREMENTS_FILE_PATH" - -# --- Want to have only one instance for Ansible --- - -if [ -f $PID_FILE_PATH ]; then - readonly PID_FROM_FILE=$(cat $PID_FILE_PATH 2> /dev/null) - if [[ -n $PID_FROM_FILE ]] && kill -0 $PID_FROM_FILE > /dev/null 2>&1; then - echol "Found running process with pid: $PID_FROM_FILE, cmd: $(ps -p $PID_FROM_FILE -o cmd=)" - if ps -p $PID_FROM_FILE -o cmd= | grep --quiet $SCRIPT_FILE_NAME; then - echol "Killing old instance using SIGTERM" - kill -s SIGTERM $PID_FROM_FILE # try gracefully - if sleep 3 && kill -0 $PID_FROM_FILE > /dev/null 2>&1; then - echol "Still running, killing old instance using SIGKILL" - kill -s SIGKILL $PID_FROM_FILE # forcefully - fi - else - remove_file $PID_FILE_PATH - exit_with_error "Process with pid: $PID_FILE_PATH seems to be not an instance of this script" - fi - else - echol "Process with pid: $PID_FROM_FILE not found" - fi - remove_file $PID_FILE_PATH -fi - -echol "PID is: $$, creating file: $PID_FILE_PATH" -echo $$ > $PID_FILE_PATH || exit_with_error "Command failed: echo $$ > $PID_FILE_PATH" - -# --- Parse requirements file --- - -# Requirements are grouped using sections: [packages-repo-prereqs], [packages], [files], [images] -get_requirements_from_group 'REPO_PREREQ_PACKAGES' 'packages-repo-prereqs' "$REQUIREMENTS_FILE_PATH" -get_requirements_from_group 'CRANE' 'crane' "$REQUIREMENTS_FILE_PATH" -get_requirements_from_group 'PACKAGES' 'packages' "$REQUIREMENTS_FILE_PATH" -get_requirements_from_group 'FILES' 'files' "$REQUIREMENTS_FILE_PATH" -get_requirements_from_group 'IMAGES' 'images' "$REQUIREMENTS_FILE_PATH" - -# === Packages === - -check_connection yum $(yum repolist --quiet | tail -n +2 | cut -d' ' -f1 | cut -d'/' -f1 | sed 's/^!//') - -# --- Backup yum repositories --- - -if [ -f "$YUM_CONFIG_BACKUP_FILE_PATH" ]; then - echol "Backup aleady exists: $YUM_CONFIG_BACKUP_FILE_PATH" -else - echol "Backuping /etc/yum.repos.d/ to $YUM_CONFIG_BACKUP_FILE_PATH" - if backup_files "$YUM_CONFIG_BACKUP_FILE_PATH" '/etc/yum.repos.d/'; then - echol "Backup done" - else - if [ -f "$YUM_CONFIG_BACKUP_FILE_PATH" ]; then - remove_file "$YUM_CONFIG_BACKUP_FILE_PATH" - fi - exit_with_error "Backup of yum repositories failed" - fi -fi - -# --- Restore system repositories in case epirepo is enabled - -enable_system_repos_script="/var/tmp/epi-repository-setup-scripts/enable-system-repos.sh" -disable_epirepo_client_script="/var/tmp/epi-repository-setup-scripts/disable-epirepo-client.sh" - -if [[ -f /etc/yum.repos.d/epirepo.repo ]]; then - if [[ -f /var/tmp/enabled-system-repos.txt && -f $enable_system_repos_script ]]; then - echol "OS repositories seems missing, restoring..." - $enable_system_repos_script || exit_with_error "Could not restore system repositories" - $disable_epirepo_client_script || exit_with_error "Could not disable epirepo" - else - echol "/var/tmp/enabled-system-repos.txt or $enable_system_repos_script seems missing, you either know what you're doing or you need to fix your repositories" - fi -fi - -# --- Install required packages unless present --- - -# repos can be enabled or disabled using the yum-config-manager command, which is provided by yum-utils package -for package in 'yum-utils' 'wget' 'curl' 'tar'; do - if ! is_package_installed "$package"; then - install_package "$package" - fi -done - -# --- Download and setup Crane for downloading images --- - -if [[ -z "${CRANE}" ]] || [ $(wc -l <<< "${CRANE}") -ne 1 ] ; then - exit_with_error "Crane binary download path undefined or more than one download path defined" -else - if [[ -x $CRANE_BIN ]]; then - echol "Crane binary already exists" - else - file_url=$(head -n 1 <<< "${CRANE}") - - check_connection wget $file_url - - echol "Downloading crane from: ${file_url}" - download_file "${file_url}" "${SCRIPT_DIR}" - tar_path="${SCRIPT_DIR}/${file_url##*/}" - echol "Unpacking crane from ${tar_path} to ${CRANE_BIN}" - run_cmd tar -xzf "${tar_path}" --directory "${SCRIPT_DIR}" "crane" --overwrite - [[ -x "${CRANE_BIN}" ]] || run_cmd chmod +x "${CRANE_BIN}" - remove_file "${tar_path}" - fi -fi - -# --- Enable CentOS repos --- - -# -> CentOS-7 - Extras # for container-selinux and centos-release-scl packages -enable_repo 'extras' -# -> CentOS-7 - Base # for python dependencies -enable_repo 'base' - -# --- Add repos --- - -# noarch repositories -. ${ADD_MULTIARCH_REPOSITORIES_SCRIPT} - -# arch specific repositories -. ${ADD_ARCH_REPOSITORIES_SCRIPT} -# -> Software Collections (SCL) https://wiki.centos.org/AdditionalResources/Repositories/SCL -if ! is_package_installed 'centos-release-scl'; then - # from extras repo - install_package 'centos-release-scl-rh' - install_package 'centos-release-scl' -fi - -# some packages are from EPEL repo -if ! is_package_installed 'epel-release'; then - install_package 'https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm' 'epel-release' -fi - -# clean metadata for upgrades (when the same package can be downloaded from changed repo) -run_cmd remove_yum_cache_for_untracked_repos - -run_cmd_with_retries yum -y makecache fast 3 - -# --- Download packages --- - -# 1) packages required to create repository - -create_directory "$REPO_PREREQ_PACKAGES_DIR" - -# prepare lists -PREREQ_PACKAGES=() -for package in $REPO_PREREQ_PACKAGES; do - echol "Processing package: $package" - get_package_with_version_arch 'QUERY_OUTPUT' "$package" - PREREQ_PACKAGES+=("$QUERY_OUTPUT") -done - -# download requirements (fixed versions) -if [[ ${#PREREQ_PACKAGES[@]} -gt 0 ]]; then - echol "Downloading repository prerequisite packages (${#PREREQ_PACKAGES[@]})..." - download_packages "$REPO_PREREQ_PACKAGES_DIR" "${PREREQ_PACKAGES[@]}" -fi - -# 2) non-prerequisite packages - -create_directory "$PACKAGES_DIR" - -# prepare lists -NON_PREREQ_PACKAGES=() -DEPENDENCIES_OF_NON_PREREQ_PACKAGES=() -for package in $PACKAGES; do - echol "Processing package: $package" - get_package_with_version_arch 'QUERY_OUTPUT' "$package" - NON_PREREQ_PACKAGES+=("$QUERY_OUTPUT") - get_package_dependencies_with_arch 'DEPENDENCIES' "$package" - if [[ ${#DEPENDENCIES[@]} -gt 0 ]]; then - for dependency in "${DEPENDENCIES[@]}"; do - DEPENDENCIES_OF_NON_PREREQ_PACKAGES+=("$dependency") - done - fi -done - -if [[ ${#NON_PREREQ_PACKAGES[@]} -gt 0 ]]; then - # download requirements (fixed versions) - echol "Downloading packages (${#NON_PREREQ_PACKAGES[@]})..." - download_packages "$PACKAGES_DIR" "${NON_PREREQ_PACKAGES[@]}" - # download dependencies (latest versions) - get_unique_array 'DEPENDENCIES' "${DEPENDENCIES_OF_NON_PREREQ_PACKAGES[@]}" - get_packages_with_version_arch 'DEPENDENCIES' "${DEPENDENCIES[@]}" - echol "Downloading dependencies of packages (${#DEPENDENCIES[@]})..." - download_packages "$PACKAGES_DIR" "${DEPENDENCIES[@]}" -fi - -# --- Clean up yum repos --- - -remove_added_repos "$YUM_CONFIG_BACKUP_FILE_PATH" - -# --- Restore yum repos --- - -echol "Restoring /etc/yum.repos.d/*.repo from: $YUM_CONFIG_BACKUP_FILE_PATH" -echol "Executing: tar --extract --verbose --file $YUM_CONFIG_BACKUP_FILE_PATH" -if tar --extract --verbose --file "$YUM_CONFIG_BACKUP_FILE_PATH" --directory /etc/yum.repos.d \ - --strip-components=2 'etc/yum.repos.d/*.repo'; then - echol "Restored: yum repositories" -else - exit_with_error "Extracting tar failed: $YUM_CONFIG_BACKUP_FILE_PATH" -fi - -# === Files === - -check_connection wget $FILES - -create_directory "$FILES_DIR" - -if [[ -z "$FILES" ]]; then - echol "No files to download" -else - # list of all files that will be downloaded - echol "Files to be downloaded:" - cat -n <<< "${FILES}" - - printf "\n" - - while IFS=' ' read -r url new_filename; do - # download files, check if new filename is provided - if [[ -z $new_filename ]]; then - download_file "$url" "$FILES_DIR" - elif [[ $new_filename = *" "* ]]; then - exit_with_error "wrong new filename for file: $url" - else - download_file "$url" "$FILES_DIR" "$new_filename" - fi - done <<< "$FILES" -fi - -# === Images === - -check_connection crane $(for image in $IMAGES; do splitted=(${image//:/ }); echo "${splitted[0]}"; done) - -create_directory "$IMAGES_DIR" - -for image in $IMAGES; do - download_image "$image" "$IMAGES_DIR" -done - -# --- Clean up --- - -remove_installed_packages "$INSTALLED_PACKAGES_FILE_PATH" - -remove_file "$YUM_CONFIG_BACKUP_FILE_PATH" - -remove_file "$PID_FILE_PATH" - -readonly END_TIME=$(date +%s) - -echol "$SCRIPT_FILE_NAME finished, execution time: $(date -u -d @$((END_TIME-START_TIME)) +'%Hh:%Mm:%Ss')" diff --git a/ansible/playbooks/roles/repository/files/download-requirements/centos-7/requirements.aarch64.txt b/ansible/playbooks/roles/repository/files/download-requirements/centos-7/requirements.aarch64.txt deleted file mode 100644 index 7ecc3fe7f3..0000000000 --- a/ansible/playbooks/roles/repository/files/download-requirements/centos-7/requirements.aarch64.txt +++ /dev/null @@ -1,253 +0,0 @@ -# Put this file in the same directory as download script - -[packages-repo-prereqs] -apr # for httpd -apr-util # for httpd -centos-logos -createrepo -deltarpm # for createrepo -httpd -httpd-tools # for httpd -libxml2 # for libxml2-python -libxml2-python # for createrepo -mailcap # for httpd -mod_ssl # for httpd -python-chardet # for createrepo -python-deltarpm # for createrepo -python-kitchen # for createrepo -yum-utils - -[crane] -https://github.com/google/go-containerregistry/releases/download/v0.4.1/go-containerregistry_Linux_arm64.tar.gz - -[packages] -audit # for docker-ce -bash-completion -ca-certificates -cifs-utils -conntrack-tools # for kubelet -containerd.io -container-selinux -#cri-tools-1.13.0 -curl -dejavu-sans-fonts # for grafana -docker-ce-20.10.8 -docker-ce-cli-20.10.8 -docker-ce-rootless-extras-20.10.8 -ebtables -elasticsearch-oss-7.10.2 # for opendistroforelasticsearch & logging roles -ethtool -filebeat-7.9.2 -firewalld -fontconfig # for grafana -fping -fuse-overlayfs # for docker-ce-rootless-extras -gnutls # for cifs-utils -gssproxy # for nfs-utils -htop -iftop -ipset # for firewalld -java-1.8.0-openjdk-headless -javapackages-tools # for java-1.8.0-openjdk-headless -jq -libini_config # for nfs-utils -libselinux-python -libsemanage-python -libX11 # for grafana -libxcb # for grafana -libXcursor # for grafana -libXt # for grafana -logrotate -net-tools -nfs-utils -nmap-ncat -# Open Distro for Elasticsearch plugins are installed individually to not download them twice in different versions (as dependencies of opendistroforelasticsearch package) -opendistro-alerting-1.13.1.* -opendistro-index-management-1.13.1.* -opendistro-job-scheduler-1.13.0.* -opendistro-performance-analyzer-1.13.0.* -opendistro-security-1.13.1.* -opendistro-sql-1.13.0.* -opendistroforelasticsearch-kibana-1.13.1 # kibana has shorter version -openssl -unixODBC # for erlang -perl # for vim -perl-Getopt-Long # for vim -perl-libs # for vim -perl-Pod-Perldoc # for vim -perl-Pod-Simple # for vim -perl-Pod-Usage # for vim -pgaudit15_13-1.5.0 -policycoreutils-python # for container-selinux -pyldb # for cifs-utils -python-cffi # for python2-cryptography -python-firewall # for firewalld -python-kitchen # for yum-utils -python-lxml # for java-1.8.0-openjdk-headless -python-psycopg2 -python-pycparser # for python2-cryptography -python-setuptools -python-slip-dbus # for firewalld -python-ipaddress -python-backports -python2-cryptography # for Ansible (certificate modules) -python3-3.6.8 -quota # for nfs-utils -rabbitmq-server-3.8.9 -postgresql13-server -samba-client -samba-client-libs # for samba-client -samba-common -samba-libs # for cifs-utils -sysstat -tar -telnet -tmux -urw-base35-fonts # for grafana -unzip -vim-common # for vim -vim-enhanced -wget -xorg-x11-font-utils # for grafana -xorg-x11-server-utils # for grafana -yum-plugin-versionlock -yum-utils - -# to make remote-to-remote "synchronize" work in ansible -rsync - -# K8s v1.18.6 (Epiphany >= v0.7.1) -kubeadm-1.18.6 -kubectl-1.18.6 -kubelet-1.18.6 - -# K8s v1.19.15 (Epiphany >= v1.3, transitional version) -kubeadm-1.19.15 -kubectl-1.19.15 -kubelet-1.19.15 - -# K8s v1.20.12 (Epiphany >= v1.3, transitional version) -kubeadm-1.20.12 -kubectl-1.20.12 -kubelet-1.20.12 - -# K8s v1.21.7 (Epiphany >= v1.3, transitional version) -kubeadm-1.21.7 -kubectl-1.21.7 -kubelet-1.21.7 - -# K8s v1.22.4 -kubeadm-1.22.4 -kubectl-1.22.4 -kubelet-1.22.4 - -# Kubernetes Generic -kubernetes-cni-0.8.6-0 # since K8s v1.18.6 -# https://github.com/kubernetes/kubernetes/blob/v1.19.15/build/dependencies.yaml -kubernetes-cni-0.8.7-0 # since K8s v1.19.15 - -[files] -# --- Packages --- -# Github repository for erlang rpm is used since packagecloud repository is limited to a certain number of versions and erlang package from erlang-solutions repository is much more complex and bigger -https://packages.erlang-solutions.com/erlang/rpm/centos/7/aarch64/esl-erlang_23.1.5-1~centos~7_arm64.rpm -# Grafana package is not downloaded from repository since it was not reliable (issue #2449) -https://dl.grafana.com/oss/release/grafana-8.3.2-1.aarch64.rpm -# --- Exporters --- -https://github.com/danielqsj/kafka_exporter/releases/download/v1.4.0/kafka_exporter-1.4.0.linux-arm64.tar.gz -https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/0.16.1/jmx_prometheus_javaagent-0.16.1.jar -https://github.com/prometheus/node_exporter/releases/download/v1.3.1/node_exporter-1.3.1.linux-arm64.tar.gz -https://github.com/prometheus-community/postgres_exporter/releases/download/v0.10.0/postgres_exporter-0.10.0.linux-arm64.tar.gz -# --- Misc --- -https://archive.apache.org/dist/kafka/2.6.0/kafka_2.12-2.6.0.tgz -https://github.com/prometheus/prometheus/releases/download/v2.31.1/prometheus-2.31.1.linux-arm64.tar.gz -https://github.com/prometheus/alertmanager/releases/download/v0.23.0/alertmanager-0.23.0.linux-arm64.tar.gz -https://archive.apache.org/dist/zookeeper/zookeeper-3.5.8/apache-zookeeper-3.5.8-bin.tar.gz -https://get.helm.sh/helm-v3.2.0-linux-arm64.tar.gz -https://archive.apache.org/dist/logging/log4j/2.17.1/apache-log4j-2.17.1-bin.tar.gz -# --- Helm charts --- -https://charts.bitnami.com/bitnami/node-exporter-2.3.17.tgz -https://helm.elastic.co/helm/filebeat/filebeat-7.9.2.tgz -# --- Grafana Dashboards --- -# Kubernetes Cluster -https://grafana.com/api/dashboards/7249/revisions/1/download grafana_dashboard_7249.json -# Kubernetes cluster monitoring (via Prometheus) -https://grafana.com/api/dashboards/315/revisions/3/download grafana_dashboard_315.json -# Node Exporter for Prometheus -https://grafana.com/api/dashboards/11074/revisions/9/download grafana_dashboard_11074.json -# Node Exporter Server Metrics -https://grafana.com/api/dashboards/405/revisions/8/download grafana_dashboard_405.json -# Postgres Overview -https://grafana.com/api/dashboards/455/revisions/2/download grafana_dashboard_455.json -# PostgreSQL Database -https://grafana.com/api/dashboards/9628/revisions/7/download grafana_dashboard_9628.json -# RabbitMQ Monitoring -https://grafana.com/api/dashboards/4279/revisions/4/download grafana_dashboard_4279.json -# Node Exporter Full -https://grafana.com/api/dashboards/1860/revisions/23/download grafana_dashboard_1860.json -# Kafka Exporter Overview -https://grafana.com/api/dashboards/7589/revisions/5/download grafana_dashboard_7589.json -# HaProxy backend (or frontend/servers) -https://grafana.com/api/dashboards/789/revisions/1/download grafana_dashboard_789.json -# Docker and Host Monitoring w/ Prometheus -https://grafana.com/api/dashboards/179/revisions/7/download grafana_dashboard_179.json -# Kubernetes pod and cluster monitoring (via Prometheus) -https://grafana.com/api/dashboards/6663/revisions/1/download grafana_dashboard_6663.json -# RabbitMQ cluster monitoring (via Prometheus) -https://grafana.com/api/dashboards/10991/revisions/11/download grafana_dashboard_10991.json - -[images] -haproxy:2.2.2-alpine -kubernetesui/dashboard:v2.3.1 -kubernetesui/metrics-scraper:v1.0.7 -registry:2 -# applications -epiphanyplatform/keycloak:14.0.0 -rabbitmq:3.8.9 -# K8s -## v1.18.6 -k8s.gcr.io/kube-apiserver:v1.18.6 -k8s.gcr.io/kube-controller-manager:v1.18.6 -k8s.gcr.io/kube-scheduler:v1.18.6 -k8s.gcr.io/kube-proxy:v1.18.6 -k8s.gcr.io/coredns:1.6.7 -k8s.gcr.io/etcd:3.4.3-0 -quay.io/coreos/flannel:v0.12.0-arm64 -quay.io/coreos/flannel:v0.12.0 -calico/cni:v3.15.0 -calico/kube-controllers:v3.15.0 -calico/node:v3.15.0 -calico/pod2daemon-flexvol:v3.15.0 -## v1.19.15 -k8s.gcr.io/kube-apiserver:v1.19.15 -k8s.gcr.io/kube-controller-manager:v1.19.15 -k8s.gcr.io/kube-scheduler:v1.19.15 -k8s.gcr.io/kube-proxy:v1.19.15 -## v1.20.12 -k8s.gcr.io/kube-apiserver:v1.20.12 -k8s.gcr.io/kube-controller-manager:v1.20.12 -k8s.gcr.io/kube-scheduler:v1.20.12 -k8s.gcr.io/kube-proxy:v1.20.12 -k8s.gcr.io/coredns:1.7.0 -k8s.gcr.io/pause:3.2 -## v1.21.7 -k8s.gcr.io/kube-apiserver:v1.21.7 -k8s.gcr.io/kube-controller-manager:v1.21.7 -k8s.gcr.io/kube-scheduler:v1.21.7 -k8s.gcr.io/kube-proxy:v1.21.7 -k8s.gcr.io/coredns/coredns:v1.8.0 -k8s.gcr.io/etcd:3.4.13-0 -k8s.gcr.io/pause:3.4.1 -## v1.22.4 -k8s.gcr.io/kube-apiserver:v1.22.4 -k8s.gcr.io/kube-controller-manager:v1.22.4 -k8s.gcr.io/kube-scheduler:v1.22.4 -k8s.gcr.io/kube-proxy:v1.22.4 -k8s.gcr.io/coredns/coredns:v1.8.4 -k8s.gcr.io/etcd:3.5.0-0 -k8s.gcr.io/pause:3.5 -quay.io/coreos/flannel:v0.14.0-arm64 -quay.io/coreos/flannel:v0.14.0 -calico/cni:v3.20.3 -calico/kube-controllers:v3.20.3 -calico/node:v3.20.3 -calico/pod2daemon-flexvol:v3.20.3 diff --git a/ansible/playbooks/roles/repository/files/download-requirements/centos-7/requirements.x86_64.txt b/ansible/playbooks/roles/repository/files/download-requirements/centos-7/requirements.x86_64.txt deleted file mode 100644 index fe4ac78f84..0000000000 --- a/ansible/playbooks/roles/repository/files/download-requirements/centos-7/requirements.x86_64.txt +++ /dev/null @@ -1,256 +0,0 @@ -# Put this file in the same directory as download script - -[packages-repo-prereqs] -apr # for httpd -apr-util # for httpd -centos-logos -createrepo -deltarpm # for createrepo -httpd -httpd-tools # for httpd -libxml2 # for libxml2-python -libxml2-python # for createrepo -mailcap # for httpd -mod_ssl # for httpd -python-chardet # for createrepo -python-deltarpm # for createrepo -python-kitchen # for createrepo -yum-utils - -[crane] -https://github.com/google/go-containerregistry/releases/download/v0.4.1/go-containerregistry_Linux_x86_64.tar.gz - -[packages] -audit # for docker-ce -bash-completion -ca-certificates -cifs-utils -conntrack-tools # for kubelet -containerd.io -container-selinux -cri-tools-1.13.0 -curl -dejavu-sans-fonts # for grafana -docker-ce-20.10.8 -docker-ce-cli-20.10.8 -docker-ce-rootless-extras-20.10.8 -ebtables -elasticsearch-curator-5.8.3 -elasticsearch-oss-7.10.2 # for opendistroforelasticsearch & logging roles -ethtool -filebeat-7.9.2 -firewalld -fontconfig # for grafana -fping -fuse-overlayfs # for docker-ce-rootless-extras -gnutls # for cifs-utils -gssproxy # for nfs-utils -htop -iftop -ipset # for firewalld -java-1.8.0-openjdk-headless -javapackages-tools # for java-1.8.0-openjdk-headless -jq -libini_config # for nfs-utils -libselinux-python -libsemanage-python -libX11 # for grafana -libxcb # for grafana -libXcursor # for grafana -libXt # for grafana -logrotate -net-tools -nfs-utils -nmap-ncat -# Open Distro for Elasticsearch plugins are installed individually to not download them twice in different versions (as dependencies of opendistroforelasticsearch package) -opendistro-alerting-1.13.1.* -opendistro-index-management-1.13.1.* -opendistro-job-scheduler-1.13.0.* -opendistro-performance-analyzer-1.13.0.* -opendistro-security-1.13.1.* -opendistro-sql-1.13.0.* -opendistroforelasticsearch-kibana-1.13.1 # kibana has shorter version -openssl -perl # for vim -perl-Getopt-Long # for vim -perl-libs # for vim -perl-Pod-Perldoc # for vim -perl-Pod-Simple # for vim -perl-Pod-Usage # for vim -pgaudit15_13-1.5.0 -policycoreutils-python # for container-selinux -pyldb # for cifs-utils -python-cffi # for python2-cryptography -python-firewall # for firewalld -python-kitchen # for yum-utils -python-lxml # for java-1.8.0-openjdk-headless -python-psycopg2 -python-pycparser # for python2-cryptography -python-setuptools -python-slip-dbus # for firewalld -python-ipaddress -python-backports -python2-cryptography # for Ansible (certificate modules) -python3-3.6.8 -quota # for nfs-utils -rabbitmq-server-3.8.9 -postgresql13-server -repmgr10-5.2.1 # used to upgrade only repmgr -repmgr13-5.2.1 -samba-client -samba-client-libs # for samba-client -samba-common -samba-libs # for cifs-utils -sysstat -tar -telnet -tmux -urw-base35-fonts # for grafana -unzip -vim-common # for vim -vim-enhanced -wget -xorg-x11-font-utils # for grafana -xorg-x11-server-utils # for grafana -yum-plugin-versionlock -yum-utils - -# to make remote-to-remote "synchronize" work in ansible -rsync - -# K8s v1.18.6 (Epiphany >= v0.7.1) -kubeadm-1.18.6 -kubectl-1.18.6 -kubelet-1.18.6 - -# K8s v1.19.15 (Epiphany >= v1.3, transitional version) -kubeadm-1.19.15 -kubectl-1.19.15 -kubelet-1.19.15 - -# K8s v1.20.12 (Epiphany >= v1.3, transitional version) -kubeadm-1.20.12 -kubectl-1.20.12 -kubelet-1.20.12 - -# K8s v1.21.7 (Epiphany >= v1.3, transitional version) -kubeadm-1.21.7 -kubectl-1.21.7 -kubelet-1.21.7 - -# K8s v1.22.4 -kubeadm-1.22.4 -kubectl-1.22.4 -kubelet-1.22.4 - -# Kubernetes Generic -kubernetes-cni-0.8.6-0 # since K8s v1.18.6 -kubernetes-cni-0.8.7-0 # since K8s v1.19.15 - -[files] -# --- Packages --- -# Github repository for erlang rpm is used since packagecloud repository is limited to a certain number of versions and erlang package from erlang-solutions repository is much more complex and bigger -https://github.com/rabbitmq/erlang-rpm/releases/download/v23.1.5/erlang-23.1.5-1.el7.x86_64.rpm -# Grafana package is not downloaded from repository since it was not reliable (issue #2449) -https://dl.grafana.com/oss/release/grafana-8.3.2-1.x86_64.rpm -# --- Exporters --- -https://github.com/danielqsj/kafka_exporter/releases/download/v1.4.0/kafka_exporter-1.4.0.linux-amd64.tar.gz -https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/0.16.1/jmx_prometheus_javaagent-0.16.1.jar -https://github.com/prometheus/node_exporter/releases/download/v1.3.1/node_exporter-1.3.1.linux-amd64.tar.gz -https://github.com/prometheus-community/postgres_exporter/releases/download/v0.10.0/postgres_exporter-0.10.0.linux-amd64.tar.gz -# --- Misc --- -https://archive.apache.org/dist/kafka/2.6.0/kafka_2.12-2.6.0.tgz -https://github.com/prometheus/prometheus/releases/download/v2.31.1/prometheus-2.31.1.linux-amd64.tar.gz -https://github.com/prometheus/alertmanager/releases/download/v0.23.0/alertmanager-0.23.0.linux-amd64.tar.gz -https://archive.apache.org/dist/zookeeper/zookeeper-3.5.8/apache-zookeeper-3.5.8-bin.tar.gz -https://get.helm.sh/helm-v3.2.0-linux-amd64.tar.gz -https://archive.apache.org/dist/logging/log4j/2.17.1/apache-log4j-2.17.1-bin.tar.gz -# --- Helm charts --- -https://helm.elastic.co/helm/filebeat/filebeat-7.9.2.tgz -https://charts.bitnami.com/bitnami/node-exporter-2.3.17.tgz -# --- Grafana Dashboards --- -# Kubernetes Cluster -https://grafana.com/api/dashboards/7249/revisions/1/download grafana_dashboard_7249.json -# Kubernetes cluster monitoring (via Prometheus) -https://grafana.com/api/dashboards/315/revisions/3/download grafana_dashboard_315.json -# Node Exporter for Prometheus -https://grafana.com/api/dashboards/11074/revisions/9/download grafana_dashboard_11074.json -# Node Exporter Server Metrics -https://grafana.com/api/dashboards/405/revisions/8/download grafana_dashboard_405.json -# Postgres Overview -https://grafana.com/api/dashboards/455/revisions/2/download grafana_dashboard_455.json -# PostgreSQL Database -https://grafana.com/api/dashboards/9628/revisions/7/download grafana_dashboard_9628.json -# RabbitMQ Monitoring -https://grafana.com/api/dashboards/4279/revisions/4/download grafana_dashboard_4279.json -# Node Exporter Full -https://grafana.com/api/dashboards/1860/revisions/23/download grafana_dashboard_1860.json -# Kafka Exporter Overview -https://grafana.com/api/dashboards/7589/revisions/5/download grafana_dashboard_7589.json -# HaProxy backend (or frontend/servers) -https://grafana.com/api/dashboards/789/revisions/1/download grafana_dashboard_789.json -# Docker and Host Monitoring w/ Prometheus -https://grafana.com/api/dashboards/179/revisions/7/download grafana_dashboard_179.json -# Kubernetes pod and cluster monitoring (via Prometheus) -https://grafana.com/api/dashboards/6663/revisions/1/download grafana_dashboard_6663.json -# RabbitMQ cluster monitoring (via Prometheus) -https://grafana.com/api/dashboards/10991/revisions/11/download grafana_dashboard_10991.json - -[images] -haproxy:2.2.2-alpine -kubernetesui/dashboard:v2.3.1 -kubernetesui/metrics-scraper:v1.0.7 -registry:2 -# applications -bitnami/pgpool:4.2.4 -bitnami/pgbouncer:1.16.0 -epiphanyplatform/keycloak:14.0.0 -rabbitmq:3.8.9 -# K8s -## v1.18.6 -k8s.gcr.io/kube-apiserver:v1.18.6 -k8s.gcr.io/kube-controller-manager:v1.18.6 -k8s.gcr.io/kube-scheduler:v1.18.6 -k8s.gcr.io/kube-proxy:v1.18.6 -k8s.gcr.io/coredns:1.6.7 -k8s.gcr.io/etcd:3.4.3-0 -quay.io/coreos/flannel:v0.12.0-amd64 -quay.io/coreos/flannel:v0.12.0 -calico/cni:v3.15.0 -calico/kube-controllers:v3.15.0 -calico/node:v3.15.0 -calico/pod2daemon-flexvol:v3.15.0 -## v1.19.15 -k8s.gcr.io/kube-apiserver:v1.19.15 -k8s.gcr.io/kube-controller-manager:v1.19.15 -k8s.gcr.io/kube-scheduler:v1.19.15 -k8s.gcr.io/kube-proxy:v1.19.15 -## v1.20.12 -k8s.gcr.io/kube-apiserver:v1.20.12 -k8s.gcr.io/kube-controller-manager:v1.20.12 -k8s.gcr.io/kube-scheduler:v1.20.12 -k8s.gcr.io/kube-proxy:v1.20.12 -k8s.gcr.io/coredns:1.7.0 -k8s.gcr.io/pause:3.2 -## v1.21.7 -k8s.gcr.io/kube-apiserver:v1.21.7 -k8s.gcr.io/kube-controller-manager:v1.21.7 -k8s.gcr.io/kube-scheduler:v1.21.7 -k8s.gcr.io/kube-proxy:v1.21.7 -k8s.gcr.io/coredns/coredns:v1.8.0 -k8s.gcr.io/etcd:3.4.13-0 -k8s.gcr.io/pause:3.4.1 -## v1.22.4 -k8s.gcr.io/kube-apiserver:v1.22.4 -k8s.gcr.io/kube-controller-manager:v1.22.4 -k8s.gcr.io/kube-scheduler:v1.22.4 -k8s.gcr.io/kube-proxy:v1.22.4 -k8s.gcr.io/coredns/coredns:v1.8.4 -k8s.gcr.io/etcd:3.5.0-0 -k8s.gcr.io/pause:3.5 -quay.io/coreos/flannel:v0.14.0-amd64 -quay.io/coreos/flannel:v0.14.0 -calico/cni:v3.20.3 -calico/kube-controllers:v3.20.3 -calico/node:v3.20.3 -calico/pod2daemon-flexvol:v3.20.3 diff --git a/ansible/playbooks/roles/repository/files/download-requirements/common/common_functions.sh b/ansible/playbooks/roles/repository/files/download-requirements/common/common_functions.sh deleted file mode 100644 index b9c7534993..0000000000 --- a/ansible/playbooks/roles/repository/files/download-requirements/common/common_functions.sh +++ /dev/null @@ -1,246 +0,0 @@ -last_error='' # holds last occurred error msg - - -echol() { -# -# Print to stdout, optionally to a log file. -# Requires $CREATE_LOGFILE and $LOG_FILE_PATH to be defined. -# -# :param $@: args to be printed -# - echo -e "$@" - if [[ $CREATE_LOGFILE == "yes" ]]; then - local timestamp=$(date +"%b %e %H:%M:%S") - echo -e "${timestamp}: $@" >> "$LOG_FILE_PATH" - fi -} - - -exit_with_error() { - echol $@ - exit 1 -} - - -__at_least_one_test_pass() { -# -# Iterate over all arguments each time call test $function and check result. -# If at least one call passes, function will yield success. -# -# :param $1: test function -# :param $@: arguments to be tested -# :return: 0 - success, 1 - failure - local function=$1 - shift - - local args=$@ - local total_count=$# - local failed_count=0 - - for arg in $args; do - echol "- $arg..." - $function $arg - - (( $? == 0 )) && return 0 - - failed_count=$(( $failed_count + 1 )) - done - - (( $total_count != $failed_count )) || return 1 - return 0 -} - - -__test_address_curl() { -# -# Test address connection without downloading any resource. -# -# :param $1: url to be tested -# :return: curl exit value -# - last_error=$(curl --show-error --silent $1 2>&1 >/dev/null) - return $? -} - - -__check_curl() { -# -# Use curl in silent mode to check if target `url` is available. -# -# :param $@: urls to be tested -# :return: 0 - success, 1 - failure -# - echol "Testing curl connection:" - - (( $# > 0 )) || exit_with_error "_check_curl: no url provided" - - __at_least_one_test_pass __test_address_curl $@ -} - - -__test_address_wget() { -# -# Test address connection without downloading any resource. -# -# :param $1: url to be tested -# :return: wget exit value -# - last_error=$(wget --spider $1 2>&1 >/dev/null) - return $? -} - - -__check_wget() { -# -# Use wget in spider mode (without downloading resources) to check if target `url` -# is available. -# -# :param $@: urls to be tested -# :return: 0 - success, 1 - failure -# - echol "Testing wget connection:" - - (( $# > 0 )) || exit_with_error "_check_wget: no url provided" - - __at_least_one_test_pass __test_address_wget $@ -} - - -__test_apt_repo() { -# -# Update a single repository. -# -# :param $1: repository to be updated -# :return: apt return value -# - last_error=$(apt update -o Dir::Etc::sourcelist=$1 2>&1 >/dev/null) - local ret_val=$? - - (( $ret_val == 0 )) || return $ret_val - - # make sure that there were no error messages: - [[ -z $(echo "$last_error" | tr '[:upper:]' '[:lower:]' | grep 'err\|fail') ]] || return 1 - - return 0 -} - - -__check_apt() { -# -# Use `apt update` to make sure that there is connection to repositories. -# -# :param $@: repos to be tested -# :return: 0 - success, 1 - failure -# - echol "Testing apt connection:" - - (( $# > 0 )) || exit_with_error "_check_apt: no repositories provided" - local repos=$@ - - (( $UID == 0 )) || exit_with_error "apt needs to be run as a root" - - __at_least_one_test_pass __test_apt_repo $repos - return $? -} - - -__test_yum_repo() { -# -# List packages from a single repository. -# -# :param $1: repository to be listed -# :return: yum return value -# - last_error=$(yum --quiet --disablerepo=* --enablerepo=$1 list available 2>&1 >/dev/null) - return $? -} - - -__check_yum() { -# -# Use `yum list` to make sure that there is connection to repositories. -# Query available packages for each repository. -# -# :param $@: repositories to be tested -# :return: 0 - success, 1 - failure -# - echol "Testing yum connection:" - - (( $# > 0 )) || exit_with_error "_check_yum: no repositories provided" - local repos=$@ - - __at_least_one_test_pass __test_yum_repo $repos - return $? -} - - -__test_crane_repo() { -# -# List packages from a single repository. -# Requires $CRANE_BIN to be defined -# -# :param $1: repository to be listed -# :return: crane return value -# - last_error=$($CRANE_BIN ls $1 2>&1 >/dev/null) - return $? -} - - -__check_crane() { -# -# Use `crane ls` to make sure that there is connection to repositories. -# Query available packages for each repository. -# -# :param $@: repositories to be tested -# :return: 0 - success, 1 - failure -# - echol "Testing crane connection:" - - (( $# > 0 )) || exit_with_error "_check_crane: no repository provided" - local repos=$@ - - __at_least_one_test_pass __test_crane_repo $repos - return $? -} - - -# Tools which can be tested: -declare -A tools=( -[curl]=__check_curl -[wget]=__check_wget -[apt]=__check_apt -[yum]=__check_yum -[crane]=__check_crane -) - - -check_connection() { -# -# Run connection test for target `tool` with optional `url` parameter. -# Requires $internet_access_checks_enabled to be defined. -# -# :param $1: which `tool` to test -# :param $@: optional parameters used by some tools such as `url` -# - [[ $internet_access_checks_enabled == "no" ]] && return 0 - - [[ $# -lt 1 ]] && exit_with_error '"tool" argument not provided' - local tool=$1 - - shift # discard tool variable - - [[ ! -n ${tools[$tool]} ]] && exit_with_error "no such tool: \"$tool\"" - - ( # disable -e in order to handle non-zero return values - set +e - - ${tools[$tool]} $@ - - if (( $? == 0 )); then - echol "Connection successful." - else - exit_with_error "Connection failure, reason: [$last_error]" - fi - ) -} diff --git a/ansible/playbooks/roles/repository/files/download-requirements/download-requirements.py b/ansible/playbooks/roles/repository/files/download-requirements/download-requirements.py new file mode 100644 index 0000000000..cbf24ed459 --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/download-requirements.py @@ -0,0 +1,41 @@ +#!/usr/bin/python3 +import datetime +import logging +import sys +from os import getuid +from typing import Dict, List + +from src.config import Config, OSType +from src.error import CriticalError +from src.mode.base_mode import BaseMode +from src.mode.debian_family_mode import DebianFamilyMode +from src.mode.red_hat_family_mode import RedHatFamilyMode + + +MODES: Dict[OSType, BaseMode] = { + OSType.Ubuntu: DebianFamilyMode, + OSType.RedHat: RedHatFamilyMode, + OSType.CentOS: RedHatFamilyMode +} + + +def main(argv: List[str]) -> int: + try: + time_begin = datetime.datetime.now() + if getuid() != 0: + print('Error: Needs to be run as root!') + return 1 + + config = Config(argv) + + MODES[config.os_type](config).run() + + time_end = datetime.datetime.now() - time_begin + logging.info(f'Total execution time: {str(time_end).split(".")[0]}') + except CriticalError: + return 1 + + return 0 + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/ansible/playbooks/roles/repository/files/download-requirements/redhat-7/add-repositories.aarch64.sh b/ansible/playbooks/roles/repository/files/download-requirements/redhat-7/add-repositories.aarch64.sh deleted file mode 100644 index 637aadfc83..0000000000 --- a/ansible/playbooks/roles/repository/files/download-requirements/redhat-7/add-repositories.aarch64.sh +++ /dev/null @@ -1 +0,0 @@ -#!/usr/bin/env bash -eu diff --git a/ansible/playbooks/roles/repository/files/download-requirements/redhat-7/add-repositories.multiarch.sh b/ansible/playbooks/roles/repository/files/download-requirements/redhat-7/add-repositories.multiarch.sh deleted file mode 100644 index c59a6a3e74..0000000000 --- a/ansible/playbooks/roles/repository/files/download-requirements/redhat-7/add-repositories.multiarch.sh +++ /dev/null @@ -1,117 +0,0 @@ -#!/usr/bin/env bash -eu - -DOCKER_CE_PATCHED_REPO_CONF=$(cat <<'EOF' -[docker-ce-stable-patched] -name=Docker CE Stable - patched centos/7/$basearch/stable -baseurl=https://download.docker.com/linux/centos/7/$basearch/stable -enabled=1 -gpgcheck=1 -gpgkey=https://download.docker.com/linux/centos/gpg -EOF -) - -ELASTIC_6_REPO_CONF=$(cat <<'EOF' -[elastic-6] -name=Elastic repository for 6.x packages -baseurl=https://artifacts.elastic.co/packages/oss-6.x/yum -gpgcheck=1 -gpgkey=https://artifacts.elastic.co/GPG-KEY-elasticsearch -enabled=1 -autorefresh=1 -type=rpm-md -EOF -) - -ELASTICSEARCH_7_REPO_CONF=$(cat <<'EOF' -[elasticsearch-7.x] -name=Elasticsearch repository for 7.x packages -baseurl=https://artifacts.elastic.co/packages/oss-7.x/yum -gpgcheck=1 -gpgkey=https://artifacts.elastic.co/GPG-KEY-elasticsearch -enabled=1 -autorefresh=1 -type=rpm-md -EOF -) - -ELASTICSEARCH_CURATOR_REPO_CONF=$(cat <<'EOF' -[curator-5] -name=CentOS/RHEL 7 repository for Elasticsearch Curator 5.x packages -baseurl=https://packages.elastic.co/curator/5/centos/7 -gpgcheck=1 -gpgkey=https://packages.elastic.co/GPG-KEY-elasticsearch -enabled=1 -EOF -) - -KUBERNETES_REPO_CONF=$(cat <<'EOF' -[kubernetes] -name=Kubernetes -baseurl=https://packages.cloud.google.com/yum/repos/kubernetes-el7-$basearch -enabled=1 -gpgcheck=1 -repo_gpgcheck=1 -gpgkey=https://packages.cloud.google.com/yum/doc/yum-key.gpg https://packages.cloud.google.com/yum/doc/rpm-package-key.gpg -EOF -) - -OPENDISTRO_REPO_CONF=$(cat <<'EOF' -[opendistroforelasticsearch-artifacts-repo] -name=Release RPM artifacts of OpenDistroForElasticsearch -baseurl=https://d3g5vo6xdbdb9a.cloudfront.net/yum/noarch/ -enabled=1 -gpgkey=https://d3g5vo6xdbdb9a.cloudfront.net/GPG-KEY-opendistroforelasticsearch -gpgcheck=1 -repo_gpgcheck=1 -autorefresh=1 -type=rpm-md -EOF -) - -POSTGRESQL_REPO_CONF=$(cat <<'EOF' -[pgdg13] -name=PostgreSQL 13 for RHEL/CentOS $releasever - $basearch -baseurl=https://download.postgresql.org/pub/repos/yum/13/redhat/rhel-$releasever-$basearch -enabled=1 -gpgcheck=1 -gpgkey=https://download.postgresql.org/pub/repos/yum/RPM-GPG-KEY-PGDG -EOF -) - -POSTGRESQL_COMMON_REPO_CONF=$(cat <<'EOF' -[pgdg-common] -name=PostgreSQL common for RHEL/CentOS $releasever - $basearch -baseurl=https://download.postgresql.org/pub/repos/yum/common/redhat/rhel-$releasever-$basearch -enabled=1 -gpgcheck=1 -gpgkey=https://download.postgresql.org/pub/repos/yum/RPM-GPG-KEY-PGDG -EOF -) - -RABBITMQ_SERVER_REPO_CONF=$(cat <<'EOF' -[rabbitmq-server] -name=rabbitmq-rpm -baseurl=https://packagecloud.io/rabbitmq/rabbitmq-server/el/7/$basearch -gpgcheck=1 -gpgkey=https://packagecloud.io/rabbitmq/rabbitmq-server/gpgkey -repo_gpgcheck=1 -sslcacert=/etc/pki/tls/certs/ca-bundle.crt -enabled=1 -EOF -) - -# Official Docker CE repository, added with https://download.docker.com/linux/centos/docker-ce.repo, -# has broken URL (https://download.docker.com/linux/centos/7Server/x86_64/stable) for longer time. -# So direct (patched) link is used first if available. -add_repo_as_file 'docker-ce-stable-patched' "$DOCKER_CE_PATCHED_REPO_CONF" -if ! is_repo_available "docker-ce-stable-patched"; then - disable_repo "docker-ce-stable-patched" - add_repo 'docker-ce' 'https://download.docker.com/linux/centos/docker-ce.repo' -fi -add_repo_as_file 'elastic-6' "$ELASTIC_6_REPO_CONF" -add_repo_as_file 'elasticsearch-7' "$ELASTICSEARCH_7_REPO_CONF" -add_repo_as_file 'elasticsearch-curator-5' "$ELASTICSEARCH_CURATOR_REPO_CONF" -add_repo_as_file 'kubernetes' "$KUBERNETES_REPO_CONF" -add_repo_as_file 'opendistroforelasticsearch' "$OPENDISTRO_REPO_CONF" -add_repo_as_file 'postgresql-13' "$POSTGRESQL_REPO_CONF" -add_repo_as_file 'rabbitmq' "$RABBITMQ_SERVER_REPO_CONF" diff --git a/ansible/playbooks/roles/repository/files/download-requirements/redhat-7/add-repositories.x86_64.sh b/ansible/playbooks/roles/repository/files/download-requirements/redhat-7/add-repositories.x86_64.sh deleted file mode 100644 index 20bed5ddf5..0000000000 --- a/ansible/playbooks/roles/repository/files/download-requirements/redhat-7/add-repositories.x86_64.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env bash -eu - -add_repo_from_script 'https://dl.2ndquadrant.com/default/release/get/10/rpm' # for repmgr -add_repo_from_script 'https://dl.2ndquadrant.com/default/release/get/13/rpm' - -disable_repo '2ndquadrant-dl-default-release-pg10-debug' # script adds 2 repositories, only 1 is required -disable_repo '2ndquadrant-dl-default-release-pg13-debug' diff --git a/ansible/playbooks/roles/repository/files/download-requirements/redhat-7/download-requirements.sh b/ansible/playbooks/roles/repository/files/download-requirements/redhat-7/download-requirements.sh deleted file mode 100644 index 7688fd061a..0000000000 --- a/ansible/playbooks/roles/repository/files/download-requirements/redhat-7/download-requirements.sh +++ /dev/null @@ -1,827 +0,0 @@ -#!/usr/bin/env bash - -# VERSION 1.0.5 - -# NOTE: You can run only one instance of this script, new instance kills the previous one -# This limitation is for Ansible - -set -euo pipefail - -# set variables needed by common_functions -readonly internet_access_checks_enabled="yes" -readonly script_path="$(readlink -f $(dirname $0))" -. "${script_path}/common/common_functions.sh" - -# === Functions (in alphabetical order) === - -# params: -add_repo() { - local repo_id="$1" - local repo_url="$2" - - if ! is_repo_enabled "$repo_id"; then - echol "Adding repository: $repo_id" - yum-config-manager --add-repo "$repo_url" || - exit_with_error "Command failed: yum-config-manager --add-repo \"$repo_url\"" - # to accept import of GPG keys - yum -y repolist > /dev/null || - exit_with_error "Command failed: yum -y repolist" - fi -} - -# params: -add_repo_as_file() { - local repo_id="$1" - local config_file_content="$2" - local config_file_name="$repo_id.repo" - - if ! is_repo_enabled "$repo_id"; then - echol "Adding repository: $repo_id" - cat <<< "$config_file_content" > "/etc/yum.repos.d/$config_file_name" || - exit_with_error "Function add_repo_as_file failed for repo: $repo_id" - local -a gpg_key_urls - IFS=" " read -r -a gpg_key_urls \ - <<< "$(grep -i --only-matching --perl-regexp '(?<=^gpgkey=)http[^#\n]+' <<< "$config_file_content")" - if (( ${#gpg_key_urls[@]} > 0 )); then - import_repo_gpg_keys "${gpg_key_urls[@]}" 3 - fi - # to accept import of repo's GPG key (for repo_gpgcheck=1) - yum -y repolist > /dev/null || exit_with_error "Command failed: yum -y repolist" - fi -} - -# params: -add_repo_from_script() { - local script_url="$1" - - echol "Running: curl $script_url | bash" - curl "$script_url" | bash -} - -# params: ... [path_N_to_backup] -backup_files() { - local backup_file_path="$1" - shift - local paths_to_backup=("$@") - - # --directory='/' is for tar --verify - tar --create --verbose --verify --directory="/" --file="$backup_file_path" "${paths_to_backup[@]}" -} - -# params: -create_directory() { - local dir_path="$1" - - if [[ -d "$dir_path" ]]; then - echol "Directory $dir_path already exists" - else - echol "Creating directory: $dir_path" - mkdir -p "$dir_path" || exit_with_error "Command failed: mkdir -p \"$dir_path\"" - fi -} - -# params: -disable_repo() { - local repo_id="$1" - - if yum repolist enabled | grep --quiet "$repo_id"; then - echol "Disabling repository: $repo_id" - yum-config-manager --disable "$repo_id" || - exit_with_error "Command failed: yum-config-manager --disable \"$repo_id\"" - fi -} - -# params: [new_filename] -download_file() { - local file_url="$1" - local dest_dir="$2" - - if [[ ${3-} ]]; then - local file_name=$3 - else - local file_name - file_name=$(basename "$file_url") - fi - - local dest_path="${dest_dir}/${file_name}" - local retries=3 - - if [[ ${3-} ]]; then - echol "Downloading file: $file_url as $file_name" - run_cmd_with_retries wget --quiet --directory-prefix="$dest_dir" "$file_url" -O "$dest_path" $retries || \ - exit_with_error "Command failed: wget --no-verbose --directory-prefix=$dest_dir $file_url $retries" - else - echol "Downloading file: $file_url" - run_cmd_with_retries wget --quiet --directory-prefix="$dest_dir" "$file_url" $retries || \ - exit_with_error "Command failed: wget --no-verbose --directory-prefix=$dest_dir $file_url $retries" - fi -} - -# params: -download_image() { - local image_name="$1" - local dest_dir="$2" - - local splited_image=(${image_name//:/ }) - local repository=${splited_image[0]} - local tag=${splited_image[1]} - local repo_basename=$(basename -- "$repository") - local dest_path="${dest_dir}/${repo_basename}-${tag}.tar" - local retries=3 - - if [[ -f $dest_path ]]; then - echol "Image file: $dest_path already exists. Skipping..." - else - # use temporary file for downloading to be safe from sudden interruptions (network, ctrl+c) - local tmp_file_path=$(mktemp) - local crane_cmd="$CRANE_BIN pull --insecure --platform=${DOCKER_PLATFORM} --format=legacy ${image_name} ${tmp_file_path}" - echol "Downloading image: $image" - { run_cmd_with_retries $crane_cmd $retries && chmod 644 $tmp_file_path && mv $tmp_file_path $dest_path; } || - exit_with_error "crane failed, command was: $crane_cmd && chmod 644 $tmp_file_path && mv $tmp_file_path $dest_path" - fi -} - -# params: ... [package_N] -download_packages() { - local dest_dir="$1" - shift - local packages="$@" - local retries=3 - - if [[ -n $packages ]]; then - # when using --archlist=x86_64 yumdownloader (yum-utils-1.1.31-52) also downloads i686 packages - run_cmd_with_retries yumdownloader --quiet --archlist="$ARCH" --exclude='*i686' --destdir="$dest_dir" $packages $retries - fi -} - -# params: -enable_repo() { - local repo_id="$1" - - if ! yum repolist enabled | grep --quiet "$repo_id"; then - echol "Enabling repository: $repo_id" - yum-config-manager --enable "$repo_id" || - exit_with_error "Command failed: yum-config-manager --enable \"$repo_id\"" - fi -} - -# desc: find repo id (set $1) based on given pattern -# params: -find_rhel_repo_id() { - # $1 reserved for result - local rhel_on_prem_repo_id="$2" - local pattern="$3" - local repo_id - - if yum repolist all | egrep --quiet "$pattern"; then - repo_id=$(yum repolist all | egrep --only-matching "$pattern") - else - exit_with_error "RHEL yum repository not found, pattern was: $pattern" - fi - - eval $1='$repo_id' -} - -# params: -get_package_dependencies_with_arch() { - # $1 reserved for result - local package="$2" - - local query_output=$(repoquery --requires --resolve --queryformat '%{name}.%{arch}' --archlist=$ARCH,noarch "$package") || - exit_with_error "repoquery failed for dependencies of package: $package with exit code: $?, output was: $query_output" - - if [[ -z $query_output ]]; then - echol "No dependencies found for package: $package" - elif grep --ignore-case --perl-regexp '\b(? -get_package_with_version_arch() { - # $1 reserved for result - local package="$2" - - local query_output=$(repoquery --queryformat '%{ui_nevra}' --archlist=$ARCH,noarch "$package") || - exit_with_error "repoquery failed for package: $package with exit code: $?, output was: $query_output" - - # yumdownloader doesn't set error code if repoquery returns empty output - [[ -n $query_output ]] || exit_with_error "repoquery failed: package $package not found" - if grep --ignore-case --perl-regexp '\b(? -get_packages_with_version_arch() { - local result_var_name="$1" - shift - local packages=("$@") - local packages_with_version_arch=() - - for package in "${packages[@]}"; do - get_package_with_version_arch 'QUERY_OUTPUT' "$package" - packages_with_version_arch+=("$QUERY_OUTPUT") - done - - eval $result_var_name='("${packages_with_version_arch[@]}")' -} - -# params: -get_requirements_from_group() { - # $1 reserved for result - local group_name="$2" - local requirements_file_path="$3" - local all_requirements=$(grep --only-matching '^[^#]*' "$requirements_file_path" | sed -e 's/[[:space:]]*$//') - - if [[ $group_name == "files" ]]; then - local requirements_from_group=$(awk "/^$/ {next}; /\[${group_name}\]/ {f=1; f=2; next}; /^\[/ {f=0}; f {print \$0}" <<< "$all_requirements") || - exit_with_error "Function get_requirements_from_group failed for group: $group_name" - else - local requirements_from_group=$(awk "/^$/ {next}; /\[${group_name}\]/ {f=1; next}; /^\[/ {f=0}; f {print \$0}" <<< "$all_requirements") || - exit_with_error "Function get_requirements_from_group failed for group: $group_name" - fi - - [[ -n $requirements_from_group ]] || echol "No requirements found for group: $group_name" - - eval $1='$requirements_from_group' -} - -# params: -get_unique_array() { - local result_var_name="$1" - shift - local array=("$@") - - # filter out duplicates - array=($(echo "${array[@]}" | tr ' ' '\n' | sort -u | tr '\n' ' ')) - - eval $result_var_name='("${array[@]}")' -} - -# params: -import_repo_gpg_keys() { - local retries=${!#} # get last arg - local urls=( "${@:1:$# - 1}" ) # remove last arg - - for url in "${urls[@]}"; do - run_cmd_with_retries rpm --import "$url" "$retries" - done -} - -# params: [package_name] -install_package() { - local package_name_or_url="$1" - local package_name="$1" - - [ $# -gt 1 ] && package_name="$2" - - echol "Installing package: $package_name" - if yum install -y "$package_name_or_url"; then - echo "$package_name" >> "$INSTALLED_PACKAGES_FILE_PATH" - else - exit_with_error "Command failed: yum install -y \"$package_name_or_url\"" - fi -} - -# params: -is_package_installed() { - local package="$1" - - if rpm --query --quiet "$package"; then - echol "Package $package already installed" - return 0 - else - return 1 - fi -} - -# params: -is_repo_available() { - local repo_id="$1" - - echol "Checking if '$repo_id' repo is available" - yum -q --disablerepo=* --enablerepo="$repo_id" repoinfo > /dev/null # returns 1 when 'Error 404 - Not Found' -} - -# params: -is_repo_enabled() { - local repo_id="$1" - - if yum repolist | grep --quiet "$repo_id"; then - echol "Repository $repo_id already enabled" - return 0 - else - return 1 - fi -} - -# params: -remove_package() { - local package="$1" - - if rpm --query --quiet "$package"; then - echol "Removing package: $package" - yum remove -y "$package" || exit_with_error "Command failed: yum remove -y \"$package\"" - fi -} - -# params: -remove_added_repos() { - local yum_repos_backup_tar_file_path="$1" - - declare -A initial_yum_repo_files - for repo_config_file in $(tar -tf "$yum_repos_backup_tar_file_path" | grep '.repo$' | xargs -L 1 --no-run-if-empty basename); do - initial_yum_repo_files["$repo_config_file"]=1 - done - - for repo_config_file in $(find /etc/yum.repos.d/ -maxdepth 1 -type f -name '*.repo' -printf "%f\n"); do - if (( ${initial_yum_repo_files["$repo_config_file"]:-0} == 0)); then - # remove only if not owned by a package - if ! rpm --quiet --query --file "/etc/yum.repos.d/$repo_config_file"; then - remove_file "/etc/yum.repos.d/$repo_config_file" - fi - fi - done -} - -# params: -remove_file() { - local file_path="$1" - - echol "Removing file: $file_path" - rm -f "$file_path" || exit_with_error "Command failed: rm -f \"$file_path\"" -} - -# params: -remove_installed_packages() { - local installed_packages_list_file="$1" - - if [ -f "$installed_packages_list_file" ]; then - for package in $(cat $installed_packages_list_file | sort --unique); do - remove_package "$package" - done - remove_file "$installed_packages_list_file" - fi -} - -remove_yum_cache_for_untracked_repos() { - local basearch releasever - basearch=$(uname --machine) - releasever=$(rpm -q --provides "$(rpm -q --whatprovides 'system-release(releasever)')" | grep "system-release(releasever)" | cut -d ' ' -f 3) - local cachedir find_output - cachedir=$(grep --only-matching --perl-regexp '(?<=^cachedir=)[^#\n]+' /etc/yum.conf) - cachedir="${cachedir/\$basearch/$basearch}" - cachedir="${cachedir/\$releasever/$releasever}" - find_output=$(find "$cachedir" -mindepth 1 -maxdepth 1 -type d -exec basename '{}' ';') - local -a repos_with_cache=() - if [ -n "$find_output" ]; then - readarray -t repos_with_cache <<< "$find_output" - fi - local all_repos_output - all_repos_output=$(yum repolist -v all | grep --only-matching --perl-regexp '(?<=^Repo-id)[^/]+' | sed -e 's/^[[:space:]:]*//') - local -a all_repos=() - readarray -t all_repos <<< "$all_repos_output" - if (( ${#repos_with_cache[@]} > 0 )); then - for cached_repo in "${repos_with_cache[@]}"; do - if ! _in_array "$cached_repo" "${all_repos[@]}"; then - run_cmd rm -rf "$cachedir/$cached_repo" - fi - done - fi -} - -# Runs command as array with printing it, doesn't support commands with shell operators (such as pipe or redirection) -# params: [--no-exit-on-error] -run_cmd() { - local cmd_arr=("$@") - - local exit_on_error=1 - if [[ ${cmd_arr[-1]} == '--no-exit-on-error' ]]; then - exit_on_error=0 - cmd_arr=( "${cmd_arr[@]:0:$# - 1}" ) # remove last item - fi - - local escaped_string return_code - escaped_string=$(_print_array_as_shell_escaped_string "${cmd_arr[@]}") - echol "Executing: ${escaped_string}" - "${cmd_arr[@]}"; return_code=$? - if (( return_code != 0 )) && (( exit_on_error )); then - exit_with_error "Command failed: ${escaped_string}" - else - return $return_code - fi -} - -# Runs command with retries, doesn't support commands with shell operators (such as pipe or redirection) -# params: -run_cmd_with_retries() { - # pop 'retries' argument - local retries=${!#} # get last arg (indirect expansion) - set -- "${@:1:$#-1}" # set new "$@" - - local cmd_arr=("$@") - ( # sub-shell is used to limit scope for 'set +e' - set +e - trap - ERR # disable global trap locally - for ((i=0; i <= retries; i++)); do - run_cmd "${cmd_arr[@]}" '--no-exit-on-error' - return_code=$? - if (( return_code == 0 )); then - break - elif (( i < retries )); then - sleep 1 - echol "retrying ($(( i+1 ))/${retries})" - else - echol "ERROR: all attempts failed" - local escaped_string - escaped_string=$(_print_array_as_shell_escaped_string "${cmd_arr[@]}") - exit_with_error "Command failed: ${escaped_string}" - fi - done - return $return_code - ) -} - -usage() { - echo "usage: ./$(basename $0) [--no-logfile]" - echo "example: ./$(basename $0) /tmp/downloads" - exit 1 -} - -validate_bash_version() { - local major_version=${BASH_VERSINFO[0]} - local minor_version=${BASH_VERSINFO[1]} - local required_version=(4 2) # (minor major) - if (( major_version < ${required_version[0]} )) || (( minor_version < ${required_version[1]} )); then - exit_with_error "This script requires Bash version ${required_version[0]}.${required_version[1]} or higher." - fi -} - -# === Helper functions (in alphabetical order) === - -_get_shell_escaped_array() { - if (( $# > 0 )); then - printf '%q\n' "$@" - fi -} - -# params: -_in_array() { - local value=${1} - shift - local array=( "$@" ) - - (( ${#array[@]} > 0 )) && printf '%s\n' "${array[@]}" | grep -q -Fx "$value" -} - -# Prints string in format that can be reused as shell input (escapes non-printable characters) -_print_array_as_shell_escaped_string() { - local output - output=$(_get_shell_escaped_array "$@") - local escaped=() - if [ -n "$output" ]; then - readarray -t escaped <<< "$output" - fi - if (( ${#escaped[@]} > 0 )); then - printf '%s\n' "${escaped[*]}" - fi -} - -# === Start === - -validate_bash_version - -if [[ $# -lt 1 ]]; then - usage >&2 -fi - -readonly START_TIME=$(date +%s) - -# --- Parse arguments --- - -POSITIONAL_ARGS=() -CREATE_LOGFILE='yes' -while [[ $# -gt 0 ]]; do - case $1 in - --no-logfile) - CREATE_LOGFILE='no' - shift # past argument - ;; - *) # unknown option - POSITIONAL_ARGS+=("$1") # save it in an array for later - shift - ;; - esac -done -set -- "${POSITIONAL_ARGS[@]}" # restore positional arguments - -# --- Global variables --- - -# dirs -readonly DOWNLOADS_DIR="$1" # root directory for downloads -readonly FILES_DIR="${DOWNLOADS_DIR}/files" -readonly PACKAGES_DIR="${DOWNLOADS_DIR}/packages" -readonly IMAGES_DIR="${DOWNLOADS_DIR}/images" -readonly REPO_PREREQ_PACKAGES_DIR="${PACKAGES_DIR}/repo-prereqs" -readonly SCRIPT_DIR="$(dirname $(readlink -f $0))" # want absolute path - -# files -readonly SCRIPT_FILE_NAME=$(basename "$0") -readonly LOG_FILE_NAME="${SCRIPT_FILE_NAME}.log" -readonly LOG_FILE_PATH="${SCRIPT_DIR}/${LOG_FILE_NAME}" -readonly YUM_CONFIG_BACKUP_FILE_PATH="${SCRIPT_DIR}/${SCRIPT_FILE_NAME}-yum-repos-backup-tmp-do-not-remove.tar" -readonly CRANE_BIN="${SCRIPT_DIR}/crane" -readonly INSTALLED_PACKAGES_FILE_PATH="${SCRIPT_DIR}/${SCRIPT_FILE_NAME}-installed-packages-list-do-not-remove.tmp" -readonly PID_FILE_PATH="/var/run/${SCRIPT_FILE_NAME}.pid" -readonly ADD_MULTIARCH_REPOSITORIES_SCRIPT="${SCRIPT_DIR}/add-repositories.multiarch.sh" - -#arch -readonly ARCH=$(uname -m) -echol "Detected arch: ${ARCH}" -readonly REQUIREMENTS_FILE_PATH="${SCRIPT_DIR}/requirements.${ARCH}.txt" -readonly ADD_ARCH_REPOSITORIES_SCRIPT="${SCRIPT_DIR}/add-repositories.${ARCH}.sh" -case $ARCH in -x86_64) - readonly DOCKER_PLATFORM="linux/amd64" - ;; - -aarch64) - readonly DOCKER_PLATFORM="linux/arm64" - ;; - -*) - exit_with_error "Arch ${ARCH} unsupported" - ;; -esac -echol "Docker platform: ${DOCKER_PLATFORM}" - -# --- Checks --- - -[ $EUID -eq 0 ] || { echo "You have to run as root" && exit 1; } - -[[ -f $REQUIREMENTS_FILE_PATH ]] || exit_with_error "File not found: $REQUIREMENTS_FILE_PATH" - -# --- Want to have only one instance for Ansible --- - -if [ -f $PID_FILE_PATH ]; then - readonly PID_FROM_FILE=$(cat $PID_FILE_PATH 2> /dev/null) - if [[ -n $PID_FROM_FILE ]] && kill -0 $PID_FROM_FILE > /dev/null 2>&1; then - echol "Found running process with pid: $PID_FROM_FILE, cmd: $(ps -p $PID_FROM_FILE -o cmd=)" - if ps -p $PID_FROM_FILE -o cmd= | grep --quiet $SCRIPT_FILE_NAME; then - echol "Killing old instance using SIGTERM" - kill -s SIGTERM $PID_FROM_FILE # try gracefully - if sleep 3 && kill -0 $PID_FROM_FILE > /dev/null 2>&1; then - echol "Still running, killing old instance using SIGKILL" - kill -s SIGKILL $PID_FROM_FILE # forcefully - fi - else - remove_file $PID_FILE_PATH - exit_with_error "Process with pid: $PID_FILE_PATH seems to be not an instance of this script" - fi - else - echol "Process with pid: $PID_FROM_FILE not found" - fi - remove_file $PID_FILE_PATH -fi - -echol "PID is: $$, creating file: $PID_FILE_PATH" -echo $$ > $PID_FILE_PATH || exit_with_error "Command failed: echo $$ > $PID_FILE_PATH" - -# --- Parse requirements file --- - -# Requirements are grouped using sections: [packages-repo-prereqs], [packages], [files], [images] -get_requirements_from_group 'REPO_PREREQ_PACKAGES' 'packages-repo-prereqs' "$REQUIREMENTS_FILE_PATH" -get_requirements_from_group 'CRANE' 'crane' "$REQUIREMENTS_FILE_PATH" -get_requirements_from_group 'PACKAGES' 'packages' "$REQUIREMENTS_FILE_PATH" -get_requirements_from_group 'FILES' 'files' "$REQUIREMENTS_FILE_PATH" -get_requirements_from_group 'IMAGES' 'images' "$REQUIREMENTS_FILE_PATH" - -# === Packages === - -# --- Backup yum repositories --- - -check_connection yum $(yum repolist --quiet | tail -n +2 | cut -d' ' -f1 | cut -d'/' -f1 | sed 's/^!//') - -if [ -f "$YUM_CONFIG_BACKUP_FILE_PATH" ]; then - echol "Backup aleady exists: $YUM_CONFIG_BACKUP_FILE_PATH" -else - echol "Backuping /etc/yum.repos.d/ to $YUM_CONFIG_BACKUP_FILE_PATH" - if backup_files "$YUM_CONFIG_BACKUP_FILE_PATH" '/etc/yum.repos.d/'; then - echol "Backup done" - else - if [ -f "$YUM_CONFIG_BACKUP_FILE_PATH" ]; then - remove_file "$YUM_CONFIG_BACKUP_FILE_PATH" - fi - exit_with_error "Backup of yum repositories failed" - fi -fi - -# --- Restore system repositories in case epirepo is enabled - -enable_system_repos_script="/var/tmp/epi-repository-setup-scripts/enable-system-repos.sh" -disable_epirepo_client_script="/var/tmp/epi-repository-setup-scripts/disable-epirepo-client.sh" - -if [[ -f /etc/yum.repos.d/epirepo.repo ]]; then - if [[ -f /var/tmp/enabled-system-repos.txt && -f $enable_system_repos_script ]]; then - echol "OS repositories seems missing, restoring..." - $enable_system_repos_script || exit_with_error "Could not restore system repositories" - $disable_epirepo_client_script || exit_with_error "Could not disable epirepo" - else - echol "/var/tmp/enabled-system-repos.txt or $enable_system_repos_script seems missing, you either know what you're doing or you need to fix your repositories" - fi -fi - -# Fix for RHUI client certificate expiration [#2318] -if is_repo_enabled "rhui-microsoft-azure-rhel"; then - run_cmd_with_retries yum update -y --disablerepo='*' --enablerepo='rhui-microsoft-azure-rhel*' 3 -fi - -# --- Install required packages unless present --- - -# repos can be enabled or disabled using the yum-config-manager command, which is provided by yum-utils package -for package in 'yum-utils' 'wget' 'curl' 'tar'; do - if ! is_package_installed "$package"; then - install_package "$package" - fi -done - -# --- Download and setup Crane for downloading images --- - -if [[ -z "${CRANE}" ]] || [ $(wc -l <<< "${CRANE}") -ne 1 ] ; then - exit_with_error "Crane binary download path undefined or more than one download path defined" -else - if [[ -x $CRANE_BIN ]]; then - echol "Crane binary already exists" - else - file_url=$(head -n 1 <<< "${CRANE}") - - check_connection wget $file_url - - echol "Downloading crane from: ${file_url}" - download_file "${file_url}" "${SCRIPT_DIR}" - tar_path="${SCRIPT_DIR}/${file_url##*/}" - echol "Unpacking crane from ${tar_path} to ${CRANE_BIN}" - run_cmd tar -xzf "${tar_path}" --directory "${SCRIPT_DIR}" "crane" --overwrite - [[ -x "${CRANE_BIN}" ]] || run_cmd chmod +x "${CRANE_BIN}" - remove_file "${tar_path}" - fi -fi - -# --- Enable RHEL repos --- - -# -> rhel-7-server-extras-rpms # for container-selinux package, this repo has different id names on clouds -# About rhel-7-server-extras-rpms: https://access.redhat.com/solutions/3418891 - -ON_PREM_REPO_ID='rhel-7-server-extras-rpms' -REPO_ID_PATTERN="$ON_PREM_REPO_ID|rhui-rhel-7-server-rhui-extras-rpms|rhui-REGION-rhel-server-extras|rhel-7-server-rhui-extras-rpms" # on-prem|Azure|AWS7.8|AWS7.9 -find_rhel_repo_id 'REPO_ID' "$ON_PREM_REPO_ID" "$REPO_ID_PATTERN" -enable_repo "$REPO_ID" - -# -> rhel-server-rhscl-7-rpms # for Red Hat Software Collections (RHSCL), this repo has different id names on clouds -# About rhel-server-rhscl-7-rpms: https://access.redhat.com/solutions/472793 - -ON_PREM_REPO_ID='rhel-server-rhscl-7-rpms' -REPO_ID_PATTERN="$ON_PREM_REPO_ID|rhui-rhel-server-rhui-rhscl-7-rpms|rhui-REGION-rhel-server-rhscl|rhel-server-rhui-rhscl-7-rpms" # on-prem|Azure|AWS7.8|AWS7.9 -find_rhel_repo_id 'REPO_ID' "$ON_PREM_REPO_ID" "$REPO_ID_PATTERN" -enable_repo "$REPO_ID" - -# --- Add repos --- - -# noarch repositories -. ${ADD_MULTIARCH_REPOSITORIES_SCRIPT} - -# arch specific repositories -. ${ADD_ARCH_REPOSITORIES_SCRIPT} - -# some packages are from EPEL repo -if ! is_package_installed 'epel-release'; then - install_package 'https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm' 'epel-release' -fi - -# clean metadata for upgrades (when the same package can be downloaded from changed repo) -run_cmd remove_yum_cache_for_untracked_repos - -run_cmd_with_retries yum -y makecache fast 3 - -# --- Download packages --- - -# 1) packages required to create repository - -create_directory "$REPO_PREREQ_PACKAGES_DIR" - -# prepare lists -PREREQ_PACKAGES=() -for package in $REPO_PREREQ_PACKAGES; do - echol "Processing package: $package" - get_package_with_version_arch 'QUERY_OUTPUT' "$package" - PREREQ_PACKAGES+=("$QUERY_OUTPUT") -done - -# download requirements (fixed versions) -if [[ ${#PREREQ_PACKAGES[@]} -gt 0 ]]; then - echol "Downloading repository prerequisite packages (${#PREREQ_PACKAGES[@]})..." - download_packages "$REPO_PREREQ_PACKAGES_DIR" "${PREREQ_PACKAGES[@]}" -fi - -# 2) non-prerequisite packages - -create_directory "$PACKAGES_DIR" - -# prepare lists -NON_PREREQ_PACKAGES=() -DEPENDENCIES_OF_NON_PREREQ_PACKAGES=() -for package in $PACKAGES; do - echol "Processing package: $package" - get_package_with_version_arch 'QUERY_OUTPUT' "$package" - NON_PREREQ_PACKAGES+=("$QUERY_OUTPUT") - get_package_dependencies_with_arch 'DEPENDENCIES' "$package" - if [[ ${#DEPENDENCIES[@]} -gt 0 ]]; then - for dependency in "${DEPENDENCIES[@]}"; do - DEPENDENCIES_OF_NON_PREREQ_PACKAGES+=("$dependency") - done - fi -done - -if [[ ${#NON_PREREQ_PACKAGES[@]} -gt 0 ]]; then - # download requirements (fixed versions) - echol "Downloading packages (${#NON_PREREQ_PACKAGES[@]})..." - download_packages "$PACKAGES_DIR" "${NON_PREREQ_PACKAGES[@]}" - # download dependencies (latest versions) - get_unique_array 'DEPENDENCIES' "${DEPENDENCIES_OF_NON_PREREQ_PACKAGES[@]}" - get_packages_with_version_arch 'DEPENDENCIES' "${DEPENDENCIES[@]}" - echol "Downloading dependencies of packages (${#DEPENDENCIES[@]})..." - download_packages "$PACKAGES_DIR" "${DEPENDENCIES[@]}" -fi - -# --- Clean up yum repos --- - -remove_added_repos "$YUM_CONFIG_BACKUP_FILE_PATH" - -# --- Restore yum repos --- - -echol "Restoring /etc/yum.repos.d/*.repo from: $YUM_CONFIG_BACKUP_FILE_PATH" -echol "Executing: tar --extract --verbose --file $YUM_CONFIG_BACKUP_FILE_PATH" -if tar --extract --verbose --file "$YUM_CONFIG_BACKUP_FILE_PATH" --directory /etc/yum.repos.d \ - --strip-components=2 'etc/yum.repos.d/*.repo'; then - echol "Restored: yum repositories" -else - exit_with_error "Extracting tar failed: $YUM_CONFIG_BACKUP_FILE_PATH" -fi - -# === Files === - -create_directory "$FILES_DIR" - -check_connection wget $FILES - -if [[ -z "$FILES" ]]; then - echol "No files to download" -else - - # list of all files that will be downloaded - echol "Files to be downloaded:" - cat -n <<< "${FILES}" - - printf "\n" - - while IFS=' ' read -r url new_filename; do - # download files, check if new filename is provided - if [[ -z $new_filename ]]; then - download_file "$url" "$FILES_DIR" - elif [[ $new_filename = *" "* ]]; then - exit_with_error "wrong new filename for file: $url" - else - download_file "$url" "$FILES_DIR" "$new_filename" - fi - done <<< "$FILES" -fi - -# === Images === - -check_connection crane $(for image in $IMAGES; do splitted=(${image//:/ }); echo "${splitted[0]}"; done) - -create_directory "$IMAGES_DIR" - -for image in $IMAGES; do - download_image "$image" "$IMAGES_DIR" -done - -# --- Clean up --- - -remove_installed_packages "$INSTALLED_PACKAGES_FILE_PATH" - -remove_file "$YUM_CONFIG_BACKUP_FILE_PATH" - -remove_file "$PID_FILE_PATH" - -readonly END_TIME=$(date +%s) - -echol "$SCRIPT_FILE_NAME finished, execution time: $(date -u -d @$((END_TIME-START_TIME)) +'%Hh:%Mm:%Ss')" diff --git a/ansible/playbooks/roles/repository/files/download-requirements/redhat-7/requirements.x86_64.txt b/ansible/playbooks/roles/repository/files/download-requirements/redhat-7/requirements.x86_64.txt deleted file mode 100644 index 86698c69fa..0000000000 --- a/ansible/playbooks/roles/repository/files/download-requirements/redhat-7/requirements.x86_64.txt +++ /dev/null @@ -1,254 +0,0 @@ -# Put this file in the same directory as download script - -[packages-repo-prereqs] -apr # for httpd -apr-util # for httpd -createrepo -deltarpm # for createrepo -httpd -httpd-tools # for httpd -mailcap # for httpd -mod_ssl # for httpd -python-chardet # for createrepo -python-deltarpm # for createrepo -python-kitchen # for createrepo -redhat-logos # for httpd -yum-utils - -[crane] -https://github.com/google/go-containerregistry/releases/download/v0.4.1/go-containerregistry_Linux_x86_64.tar.gz - -[packages] -audit # for docker-ce -bash-completion -ca-certificates -cifs-utils -conntrack-tools # for kubelet -containerd.io -container-selinux -cri-tools-1.13.0 -curl -dejavu-sans-fonts # for grafana -docker-ce-20.10.8 -docker-ce-cli-20.10.8 -docker-ce-rootless-extras-20.10.8 -ebtables -elasticsearch-curator-5.8.3 -elasticsearch-oss-7.10.2 # for opendistroforelasticsearch & logging roles -ethtool -filebeat-7.9.2 -firewalld -fontconfig # for grafana -fping -fuse-overlayfs # for docker-ce-rootless-extras -gnutls # for cifs-utils -gssproxy # for nfs-utils -htop -iftop -ipset # for firewalld -java-1.8.0-openjdk-headless -javapackages-tools # for java-1.8.0-openjdk-headless -jq -libini_config # for nfs-utils -libselinux-python -libsemanage-python -libX11 # for grafana -libxcb # for grafana -libXcursor # for grafana -libXt # for grafana -logrotate -net-tools -nfs-utils -nmap-ncat -# Open Distro for Elasticsearch plugins are installed individually to not download them twice in different versions (as dependencies of opendistroforelasticsearch package) -opendistro-alerting-1.13.1.* -opendistro-index-management-1.13.1.* -opendistro-job-scheduler-1.13.0.* -opendistro-performance-analyzer-1.13.0.* -opendistro-security-1.13.1.* -opendistro-sql-1.13.0.* -opendistroforelasticsearch-kibana-1.13.1 # kibana has shorter version -openssl -perl # for vim -perl-Getopt-Long # for vim -perl-libs # for vim -perl-Pod-Perldoc # for vim -perl-Pod-Simple # for vim -perl-Pod-Usage # for vim -pgaudit15_13-1.5.0 -policycoreutils-python # for container-selinux -pyldb # for cifs-utils -python-cffi # for python2-cryptography -python-firewall # for firewalld -python-kitchen # for yum-utils -python-lxml # for java-1.8.0-openjdk-headless -python-psycopg2 -python-pycparser # for python2-cryptography -python-setuptools -python-slip-dbus # for firewalld -python2-cryptography # for Ansible (certificate modules) -python3-3.6.8 -quota # for nfs-utils -rabbitmq-server-3.8.9 -rh-haproxy18 -rh-haproxy18-haproxy-syspaths -postgresql13-server -repmgr10-5.2.1 # used to upgrade repmgr first -repmgr13-5.2.1 -samba-client -samba-client-libs # for samba-client -samba-common -samba-libs # for cifs-utils -sysstat -tar -telnet -tmux -urw-base35-fonts # for grafana -unzip -vim-common # for vim -vim-enhanced -wget -xorg-x11-font-utils # for grafana -xorg-x11-server-utils # for grafana -yum-plugin-versionlock -yum-utils - -# to make remote-to-remote "synchronize" work in ansible -rsync - -# K8s v1.18.6 (Epiphany >= v0.7.1) -kubeadm-1.18.6 -kubectl-1.18.6 -kubelet-1.18.6 - -# K8s v1.19.15 (Epiphany >= v1.3, transitional version) -kubeadm-1.19.15 -kubectl-1.19.15 -kubelet-1.19.15 - -# K8s v1.20.12 (Epiphany >= v1.3, transitional version) -kubeadm-1.20.12 -kubectl-1.20.12 -kubelet-1.20.12 - -# K8s v1.21.7 (Epiphany >= v1.3, transitional version) -kubeadm-1.21.7 -kubectl-1.21.7 -kubelet-1.21.7 - -# K8s v1.22.4 -kubeadm-1.22.4 -kubectl-1.22.4 -kubelet-1.22.4 - -# Kubernetes Generic -kubernetes-cni-0.8.6-0 # since K8s v1.18.6 -kubernetes-cni-0.8.7-0 # since K8s v1.19.15 - -[files] -# --- Packages --- -# Github repository for erlang rpm is used since packagecloud repository is limited to a certain number of versions and erlang package from erlang-solutions repository is much more complex and bigger -https://github.com/rabbitmq/erlang-rpm/releases/download/v23.1.5/erlang-23.1.5-1.el7.x86_64.rpm -# Grafana package is not downloaded from repository since it was not reliable (issue #2449) -https://dl.grafana.com/oss/release/grafana-8.3.2-1.x86_64.rpm -# --- Exporters --- -https://github.com/danielqsj/kafka_exporter/releases/download/v1.4.0/kafka_exporter-1.4.0.linux-amd64.tar.gz -https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/0.16.1/jmx_prometheus_javaagent-0.16.1.jar -https://github.com/prometheus/node_exporter/releases/download/v1.3.1/node_exporter-1.3.1.linux-amd64.tar.gz -https://github.com/prometheus-community/postgres_exporter/releases/download/v0.10.0/postgres_exporter-0.10.0.linux-amd64.tar.gz -# --- Misc --- -https://archive.apache.org/dist/kafka/2.6.0/kafka_2.12-2.6.0.tgz -https://github.com/prometheus/prometheus/releases/download/v2.31.1/prometheus-2.31.1.linux-amd64.tar.gz -https://github.com/prometheus/alertmanager/releases/download/v0.23.0/alertmanager-0.23.0.linux-amd64.tar.gz -https://archive.apache.org/dist/zookeeper/zookeeper-3.5.8/apache-zookeeper-3.5.8-bin.tar.gz -https://get.helm.sh/helm-v3.2.0-linux-amd64.tar.gz -https://archive.apache.org/dist/logging/log4j/2.17.1/apache-log4j-2.17.1-bin.tar.gz -# --- Helm charts --- -https://helm.elastic.co/helm/filebeat/filebeat-7.9.2.tgz -https://charts.bitnami.com/bitnami/node-exporter-2.3.17.tgz -# --- Grafana Dashboards --- -# Kubernetes Cluster -https://grafana.com/api/dashboards/7249/revisions/1/download grafana_dashboard_7249.json -# Kubernetes cluster monitoring (via Prometheus) -https://grafana.com/api/dashboards/315/revisions/3/download grafana_dashboard_315.json -# Node Exporter for Prometheus -https://grafana.com/api/dashboards/11074/revisions/9/download grafana_dashboard_11074.json -# Node Exporter Server Metrics -https://grafana.com/api/dashboards/405/revisions/8/download grafana_dashboard_405.json -# Postgres Overview -https://grafana.com/api/dashboards/455/revisions/2/download grafana_dashboard_455.json -# PostgreSQL Database -https://grafana.com/api/dashboards/9628/revisions/7/download grafana_dashboard_9628.json -# RabbitMQ Monitoring -https://grafana.com/api/dashboards/4279/revisions/4/download grafana_dashboard_4279.json -# Node Exporter Full -https://grafana.com/api/dashboards/1860/revisions/23/download grafana_dashboard_1860.json -# Kafka Exporter Overview -https://grafana.com/api/dashboards/7589/revisions/5/download grafana_dashboard_7589.json -# HaProxy backend (or frontend/servers) -https://grafana.com/api/dashboards/789/revisions/1/download grafana_dashboard_789.json -# Docker and Host Monitoring w/ Prometheus -https://grafana.com/api/dashboards/179/revisions/7/download grafana_dashboard_179.json -# Kubernetes pod and cluster monitoring (via Prometheus) -https://grafana.com/api/dashboards/6663/revisions/1/download grafana_dashboard_6663.json -# RabbitMQ cluster monitoring (via Prometheus) -https://grafana.com/api/dashboards/10991/revisions/11/download grafana_dashboard_10991.json - -[images] -haproxy:2.2.2-alpine -kubernetesui/dashboard:v2.3.1 -kubernetesui/metrics-scraper:v1.0.7 -registry:2 -# applications -bitnami/pgpool:4.2.4 -bitnami/pgbouncer:1.16.0 -epiphanyplatform/keycloak:14.0.0 -rabbitmq:3.8.9 -# K8s -## v1.18.6 -k8s.gcr.io/kube-apiserver:v1.18.6 -k8s.gcr.io/kube-controller-manager:v1.18.6 -k8s.gcr.io/kube-scheduler:v1.18.6 -k8s.gcr.io/kube-proxy:v1.18.6 -k8s.gcr.io/coredns:1.6.7 -k8s.gcr.io/etcd:3.4.3-0 -quay.io/coreos/flannel:v0.12.0-amd64 -quay.io/coreos/flannel:v0.12.0 -calico/cni:v3.15.0 -calico/kube-controllers:v3.15.0 -calico/node:v3.15.0 -calico/pod2daemon-flexvol:v3.15.0 -## v1.19.15 -k8s.gcr.io/kube-apiserver:v1.19.15 -k8s.gcr.io/kube-controller-manager:v1.19.15 -k8s.gcr.io/kube-scheduler:v1.19.15 -k8s.gcr.io/kube-proxy:v1.19.15 -## v1.20.12 -k8s.gcr.io/kube-apiserver:v1.20.12 -k8s.gcr.io/kube-controller-manager:v1.20.12 -k8s.gcr.io/kube-scheduler:v1.20.12 -k8s.gcr.io/kube-proxy:v1.20.12 -k8s.gcr.io/coredns:1.7.0 -k8s.gcr.io/pause:3.2 -## v1.21.7 -k8s.gcr.io/kube-apiserver:v1.21.7 -k8s.gcr.io/kube-controller-manager:v1.21.7 -k8s.gcr.io/kube-scheduler:v1.21.7 -k8s.gcr.io/kube-proxy:v1.21.7 -k8s.gcr.io/coredns/coredns:v1.8.0 -k8s.gcr.io/etcd:3.4.13-0 -k8s.gcr.io/pause:3.4.1 -## v1.22.4 -k8s.gcr.io/kube-apiserver:v1.22.4 -k8s.gcr.io/kube-controller-manager:v1.22.4 -k8s.gcr.io/kube-scheduler:v1.22.4 -k8s.gcr.io/kube-proxy:v1.22.4 -k8s.gcr.io/coredns/coredns:v1.8.4 -k8s.gcr.io/etcd:3.5.0-0 -k8s.gcr.io/pause:3.5 -quay.io/coreos/flannel:v0.14.0-amd64 -quay.io/coreos/flannel:v0.14.0 -calico/cni:v3.20.3 -calico/kube-controllers:v3.20.3 -calico/node:v3.20.3 -calico/pod2daemon-flexvol:v3.20.3 diff --git a/ansible/playbooks/roles/repository/files/download-requirements/repositories/x86_64/redhat.yml b/ansible/playbooks/roles/repository/files/download-requirements/repositories/x86_64/redhat.yml new file mode 100644 index 0000000000..2963821a67 --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/repositories/x86_64/redhat.yml @@ -0,0 +1,102 @@ +--- +repositories: + docker-ce-stable-patched: + gpgkeys: + - https://download.docker.com/linux/centos/gpg + data: | + [docker-ce-stable-patched] + name=Docker CE Stable - patched centos/7/$basearch/stable + baseurl=https://download.docker.com/linux/centos/7/$basearch/stable + enabled=1 + gpgcheck=1 + + elastic-6: + gpgkeys: + - https://artifacts.elastic.co/GPG-KEY-elasticsearch + data: | + [elastic-6] + name=Elastic repository for 6.x packages + baseurl=https://artifacts.elastic.co/packages/oss-6.x/yum + gpgcheck=1 + enabled=1 + autorefresh=1 + type=rpm-md + + elasticsearch-7: + gpgkeys: + - https://artifacts.elastic.co/GPG-KEY-elasticsearch + data: | + [elasticsearch-7.x] + name=Elasticsearch repository for 7.x packages + baseurl=https://artifacts.elastic.co/packages/oss-7.x/yum + gpgcheck=1 + enabled=1 + autorefresh=1 + type=rpm-md + + elasticsearch-curator-5: + gpgkeys: + - https://packages.elastic.co/GPG-KEY-elasticsearch + data: | + [curator-5] + name=CentOS/RHEL 7 repository for Elasticsearch Curator 5.x packages + baseurl=https://packages.elastic.co/curator/5/centos/7 + gpgcheck=1 + enabled=1 + + kubernetes: + gpgkeys: + - https://packages.cloud.google.com/yum/doc/yum-key.gpg + - https://packages.cloud.google.com/yum/doc/rpm-package-key.gpg + data: | + [kubernetes] + name=Kubernetes + baseurl=https://packages.cloud.google.com/yum/repos/kubernetes-el7-$basearch + enabled=1 + gpgcheck=1 + repo_gpgcheck=1 + + opendistroforelasticsearch: + gpgkeys: + - https://d3g5vo6xdbdb9a.cloudfront.net/GPG-KEY-opendistroforelasticsearch + data: | + [opendistroforelasticsearch-artifacts-repo] + name=Release RPM artifacts of OpenDistroForElasticsearch + baseurl=https://d3g5vo6xdbdb9a.cloudfront.net/yum/noarch/ + enabled=1 + gpgcheck=1 + repo_gpgcheck=1 + autorefresh=1 + type=rpm-md + + postgresql-13: + gpgkeys: + - https://download.postgresql.org/pub/repos/yum/RPM-GPG-KEY-PGDG + data: | + [pgdg13] + name=PostgreSQL 13 for RHEL/CentOS $releasever - $basearch + baseurl=https://download.postgresql.org/pub/repos/yum/13/redhat/rhel-$releasever-$basearch + enabled=1 + gpgcheck=1 + + #postgresql_common: + #gpgkeys: + #- https://download.postgresql.org/pub/repos/yum/RPM-GPG-KEY-PGDG + #data: | + #[pgdg-common] + #name=PostgreSQL common for RHEL/CentOS $releasever - $basearch + #baseurl=https://download.postgresql.org/pub/repos/yum/common/redhat/rhel-$releasever-$basearch + #enabled=1 + #gpgcheck=1 + + rabbitmq: + gpgkeys: + - https://packagecloud.io/rabbitmq/rabbitmq-server/gpgkey + data: | + [rabbitmq-server] + name=rabbitmq-rpm + baseurl=https://packagecloud.io/rabbitmq/rabbitmq-server/el/7/$basearch + gpgcheck=1 + repo_gpgcheck=1 + sslcacert=/etc/pki/tls/certs/ca-bundle.crt + enabled=1 diff --git a/ansible/playbooks/roles/repository/files/download-requirements/repositories/x86_64/ubuntu.yml b/ansible/playbooks/roles/repository/files/download-requirements/repositories/x86_64/ubuntu.yml new file mode 100644 index 0000000000..bd21e8d6fa --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/repositories/x86_64/ubuntu.yml @@ -0,0 +1,53 @@ +--- +repositories: + elastic_6: + key: 'https://artifacts.elastic.co/GPG-KEY-elasticsearch' + content: 'deb https://artifacts.elastic.co/packages/oss-6.x/apt stable main' + path: '/etc/apt/sources.list.d/elastic-6.x.list' + + kubernetes': + key: 'https://packages.cloud.google.com/apt/doc/apt-key.gpg' + content: 'deb http://apt.kubernetes.io/ kubernetes-xenial main' + path: '/etc/apt/sources.list.d/kubernetes.list' + + erlang_solutions': + key: 'https://packages.erlang-solutions.com/ubuntu/erlang_solutions.asc' + content: 'deb https://packages.erlang-solutions.com/ubuntu focal contrib' + path: '/etc/apt/sources.list.d/erlang-23.x.list' + + rabbitmq_server': + key: 'https://packagecloud.io/rabbitmq/rabbitmq-server/gpgkey' + content: 'deb https://packagecloud.io/rabbitmq/rabbitmq-server/ubuntu bionic main' + path: '/etc/apt/sources.list.d/rabbitmq.list' + + docker_ce': + key: 'https://download.docker.com/linux/ubuntu/gpg' + content: 'deb [arch=amd64] https://download.docker.com/linux/ubuntu focal stable' + path: '/etc/apt/sources.list.d/docker-ce.list' + + elastic_7': + key: 'https://artifacts.elastic.co/GPG-KEY-elasticsearch' + content: 'deb https://artifacts.elastic.co/packages/oss-7.x/apt stable main' + path: '/etc/apt/sources.list.d/elastic-7.x.list' + + opendistroforelasticsearch': + key: 'https://d3g5vo6xdbdb9a.cloudfront.net/GPG-KEY-opendistroforelasticsearch' + content: 'deb https://d3g5vo6xdbdb9a.cloudfront.net/apt stable main' + path: '/etc/apt/sources.list.d/opendistroforelasticsearch.list' + + postgresql': + key: 'https://www.postgresql.org/media/keys/ACCC4CF8.asc' + content: 'deb http://apt.postgresql.org/pub/repos/apt focal-pgdg main' + path: '/etc/apt/sources.list.d/pgdg.list' + + # Historical packages from apt.postgresql.org + postgresql-archive': + key: 'https://www.postgresql.org/media/keys/ACCC4CF8.asc' + content: 'deb http://apt-archive.postgresql.org/pub/repos/apt focal-pgdg-archive main' + path: '/etc/apt/sources.list.d/pgdg-archive.list' + + # Provides repmgr + 2ndquadrant': + key: 'https://dl.2ndquadrant.com/gpg-key.asc' + content: 'deb https://dl.2ndquadrant.com/default/release/apt focal-2ndquadrant main' + path: '/etc/apt/sources.list.d/2ndquadrant-dl-default-release.list' diff --git a/ansible/playbooks/roles/repository/files/download-requirements/requirements/crane.yml b/ansible/playbooks/roles/repository/files/download-requirements/requirements/crane.yml new file mode 100644 index 0000000000..ae1b001ffe --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/requirements/crane.yml @@ -0,0 +1,4 @@ +--- +crane: + - url: 'https://github.com/google/go-containerregistry/releases/download/v0.4.1/go-containerregistry_Linux_x86_64.tar.gz' + sha256: def1364f9483d133ccc6b1c4876f59a653d024c8866d96ecda026561d38c349b diff --git a/ansible/playbooks/roles/repository/files/download-requirements/requirements/dashboards.yml b/ansible/playbooks/roles/repository/files/download-requirements/requirements/dashboards.yml new file mode 100644 index 0000000000..444f770583 --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/requirements/dashboards.yml @@ -0,0 +1,66 @@ +--- +dashboards: + # Kubernetes Cluster + - name: grafana_dashboard_7249 + url: 'https://grafana.com/api/dashboards/7249/revisions/1/download' + sha256: 41cc2794b1cc9fc537baf045fee12d086d23632b4c8b2e88985274bb9862e731 + + # Kubernetes cluster monitoring (via Prometheus) + - name: grafana_dashboard_315 + url: 'https://grafana.com/api/dashboards/315/revisions/3/download' + sha256: ee46dd6e68a9950aa78e8c88ae5e565c8ebde6cbdbe08972a70f06c5486618fb + + # Node Exporter for Prometheus + - name: grafana_dashboard_11074 + url: 'https://grafana.com/api/dashboards/11074/revisions/9/download' + sha256: 151b23305da46eab84930e99175e1c07e375af73dbbb4b8f501ca25f5ac62785 + + # Node Exporter Server Metrics + - name: grafana_dashboard_405 + url: 'https://grafana.com/api/dashboards/405/revisions/8/download' + sha256: 97675027cbd5b7241e93a2b598654c4b466bc909eeb6358ba123d500094d913c + + # Postgres Overview + - name: grafana_dashboard_455 + url: 'https://grafana.com/api/dashboards/455/revisions/2/download' + sha256: c66b91ab8d258b0dc005d3ee4dac3a5634a627c79cc8053875f76ab1e369a362 + + # PostgreSQL Database + - name: grafana_dashboard_9628 + url: 'https://grafana.com/api/dashboards/9628/revisions/7/download' + sha256: c64cc38ad9ebd7af09551ee83e669a38f62a76e7c80929af5668a5852732b376 + + # RabbitMQ Monitoring + - name: grafana_dashboard_4279 + url: 'https://grafana.com/api/dashboards/4279/revisions/4/download' + sha256: 74d47be868da52c145240ab5586d91ace9e9218ca775af988f9d60e501907a25 + + # Node Exporter Full + - name: grafana_dashboard_1860 + url: 'https://grafana.com/api/dashboards/1860/revisions/23/download' + sha256: 225faab8bf35c1723af14d4c069882ccb92b455d1941c6b1cf3d95a1576c13d7 + + # Kafka Exporter Overview + - name: grafana_dashboard_7589 + url: 'https://grafana.com/api/dashboards/7589/revisions/5/download' + sha256: cf020e14465626360418e8b5746818c80d77c0301422f3060879fddc099c2151 + + # HaProxy backend (or frontend/servers) + - name: grafana_dashboard_789 + url: 'https://grafana.com/api/dashboards/789/revisions/1/download' + sha256: 6a9b4bdc386062287af4f7d56781103a2e45a51813596a65f03c1ae1d4d3e919 + + # Docker and Host Monitoring w/ Prometheus + - name: grafana_dashboard_179 + url: 'https://grafana.com/api/dashboards/179/revisions/7/download' + sha256: 8d67350ff74e715fb1463f2406f24a73377357d90344f8200dad9d1b2a8133c2 + + # Kubernetes pod and cluster monitoring (via Prometheus) + - name: grafana_dashboard_6663 + url: 'https://grafana.com/api/dashboards/6663/revisions/1/download' + sha256: d544d88069e1b793ff3d8f6970df641ad9a66217e69b629621e1ecbb2f06aa05 + + # RabbitMQ cluster monitoring (via Prometheus) + - name: grafana_dashboard_10991 + url: 'https://grafana.com/api/dashboards/10991/revisions/11/download' + sha256: 66340fa3256d432287cba75ab5177eb058c77afa7d521a75d58099f95b1bff50 diff --git a/ansible/playbooks/roles/repository/files/download-requirements/requirements/files.yml b/ansible/playbooks/roles/repository/files/download-requirements/requirements/files.yml new file mode 100644 index 0000000000..68d7b1390a --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/requirements/files.yml @@ -0,0 +1,40 @@ +--- +files: + # --- Exporters --- + - url: 'https://github.com/danielqsj/kafka_exporter/releases/download/v1.4.0/kafka_exporter-1.4.0.linux-amd64.tar.gz' + sha256: ffda682e82daede726da8719257a088f8e23dcaa4e2ac8b2b2748a129aea85f0 + + - url: 'https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/0.16.1/jmx_prometheus_javaagent-0.16.1.jar' + sha256: 0ddc6834f854c03d5795305193c1d33132a24fbd406b4b52828602f5bc30777e + + - url: 'https://github.com/prometheus/node_exporter/releases/download/v1.3.1/node_exporter-1.3.1.linux-amd64.tar.gz' + sha256: 68f3802c2dd3980667e4ba65ea2e1fb03f4a4ba026cca375f15a0390ff850949 + + - url: 'https://github.com/prometheus-community/postgres_exporter/releases/download/v0.10.0/postgres_exporter-0.10.0.linux-amd64.tar.gz' + sha256: 1d1a008c5e29673b404a9ce119b7516fa59974aeda2f47d4a0446d102abce8a1 + + # --- Misc --- + - url: 'https://archive.apache.org/dist/kafka/2.6.0/kafka_2.12-2.6.0.tgz' + sha256: 086bf9ca1fcbe2abe5c62e73d6f172adb1ee5a5b42732e153fb4d4ec82dab69f + + - url: 'https://archive.apache.org/dist/zookeeper/zookeeper-3.5.8/apache-zookeeper-3.5.8-bin.tar.gz' + sha256: c35ed6786d59b73920243f1a324d24c2ddfafb379041d7a350cc9a341c52caf3 + + - url: 'https://github.com/prometheus/alertmanager/releases/download/v0.23.0/alertmanager-0.23.0.linux-amd64.tar.gz' + sha256: 77793c4d9bb92be98f7525f8bc50cb8adb8c5de2e944d5500e90ab13918771fc + + - url: 'https://github.com/prometheus/prometheus/releases/download/v2.31.1/prometheus-2.31.1.linux-amd64.tar.gz' + sha256: 7852dc11cfaa039577c1804fe6f082a07c5eb06be50babcffe29214aedf318b3 + + - url: 'https://get.helm.sh/helm-v3.2.0-linux-amd64.tar.gz' + sha256: 4c3fd562e64005786ac8f18e7334054a24da34ec04bbd769c206b03b8ed6e457 + + - url: 'https://archive.apache.org/dist/logging/log4j/2.17.1/apache-log4j-2.17.1-bin.tar.gz' + sha256: b876c20c9d318d77a39c0c2e095897b2bb1cd100c7859643f8c7c8b0fc6d5961 + + # --- Helm charts --- + - url: 'https://charts.bitnami.com/bitnami/node-exporter-2.3.17.tgz' + sha256: ec586fabb775a4f05510386899cf348391523c89ff5a1d4097b0592e675ade7f + + - url: 'https://helm.elastic.co/helm/filebeat/filebeat-7.9.2.tgz' + sha256: 5140b4c4473ca33a0af4c3f70545dcc89735c0a179d974ebc150f1f28ac229ab diff --git a/ansible/playbooks/roles/repository/files/download-requirements/requirements/images.yml b/ansible/playbooks/roles/repository/files/download-requirements/requirements/images.yml new file mode 100644 index 0000000000..72c3ad4dc8 --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/requirements/images.yml @@ -0,0 +1,158 @@ +--- +images: + - name: 'haproxy:2.2.2-alpine' + sha256: f42019aaa7b8d10a7818a72ecde001b91e83df1d9ad0d1e38805968791880a75 + + - name: 'kubernetesui/dashboard:v2.3.1' + sha256: 1351ee7605362e7fcb84a645cf81c7ae915ce1034de9d9cf00f1246882976f59 + + - name: 'kubernetesui/metrics-scraper:v1.0.7' + sha256: 4fde72c37aa57868f19a5552b89eab71226eccec95fa33801146577880739050 + + - name: 'registry:2' + sha256: bdd3efacf7f73edbca2da6fae2cb46adc0a48999dbbb9f362e56f43187a3e34c + + # applications + - name: 'bitnami/pgpool:4.2.4' + sha256: 3388e83ef861545f363c674b516e58c67dda0e15adf208ed5dee627e4a9a6453 + + - name: 'bitnami/pgbouncer:1.16.0' + sha256: f66d8c29402f03180393cea7bc1a75b708807113a77877e85405fcca1f3f2b0e + + - name: 'epiphanyplatform/keycloak:14.0.0' + sha256: a6c143ab8f2e53f0af73f068cda0d8bd50aa792a6729f901367ace44a6bedf58 + + - name: 'rabbitmq:3.8.9' + sha256: 7f28ecc2db56fb02fa237adc8cef811352ebde18365741b0be6da8d850465e59 + + # K8s + # v1.18.6 + - name: 'k8s.gcr.io/kube-apiserver:v1.18.6' + sha256: 46a6a5db199cf175ddf83dc4f8c4bca9ef852b36ffd8929dbc793a9a5ac7f42e + + - name: 'k8s.gcr.io/kube-controller-manager:v1.18.6' + sha256: 066153685539eb43de5702d8924ed9c7c449a1a6b4557b0aead5e8c80850f5f7 + + - name: 'k8s.gcr.io/kube-scheduler:v1.18.6' + sha256: 257187a3ff641276ad174ef2a01176f4655997e235c83ae5f27a96a3ed5ff01b + + - name: 'k8s.gcr.io/kube-proxy:v1.18.6' + sha256: 6fa027ea9c16e230384617fbfd3b6ba0d70c6f4783fe8b23af03d226ebcb444b + + - name: 'k8s.gcr.io/coredns:1.6.7' + sha256: bc895982dfec04707553353fef5e6f35b8dfc2ec58a4bc67d234eee5dbed814f + + - name: 'k8s.gcr.io/etcd:3.4.3-0' + sha256: 2770a10d5b98167c07b9326167a52b3d58bf9e9241d813ed9ec88472e958e12a + + - name: 'quay.io/coreos/flannel:v0.12.0-amd64' + sha256: 0fd9251d0bed2e9fba12ef21c3443acf461fb7d0d9e469ff7f0ed51f68848bec + + - name: 'quay.io/coreos/flannel:v0.12.0' + sha256: 14c9a68e0038d764992cf06119e67f86e1d1e4489d1c0bac0ee2a69ff484ab44 + + - name: 'calico/cni:v3.15.0' + sha256: 1007ddea91576ff50325e6a8dad83ba7596e7369c5c5f2318118438e029a348a + + - name: 'calico/kube-controllers:v3.15.0' + sha256: 4e13dd977d99a9c846b740531a9e4210ddb7cb5be4b8c05264aad2f541702e28 + + - name: 'calico/node:v3.15.0' + sha256: 8aedb15067f694aa1220589c1cb3e51a3a652d127143a55a41b9be28b0ea7da7 + + - name: 'calico/pod2daemon-flexvol:v3.15.0' + sha256: 4cf6c67db34bdc5598e28fc54429afa0362b299fdc6da064335c6bff8e01ca1a + + # v1.19.15 + - name: 'k8s.gcr.io/kube-apiserver:v1.19.15' + sha256: 5a128204ea5aeae8eb84cfc7cf759b6e31c97eb0fc57652d10199d6406ab7333 + + - name: 'k8s.gcr.io/kube-controller-manager:v1.19.15' + sha256: abdd9350d664f4a235d3a300fa55f9a4a6c5a26d299d7e0ec0e11476be589aea + + - name: 'k8s.gcr.io/kube-scheduler:v1.19.15' + sha256: 08732fd863f4127dd34692fabc2f9e023f6d6d2c238eb34e928cf3ef9c16f0cf + + - name: 'k8s.gcr.io/kube-proxy:v1.19.15' + sha256: 131b72a5d153c153bbc85e5cfbef7fbdc0ff7f43ffb4e9555b1a95b8625f0226 + + # v1.20.12 + - name: 'k8s.gcr.io/kube-apiserver:v1.20.12' + sha256: 618e808f0b2cc5203c4105f417817a8a5cf10a6829493fdc1a5f9c7d3bd45520 + + - name: 'k8s.gcr.io/kube-controller-manager:v1.20.12' + sha256: 30c5431215d61593b0debf5c2a2e3b818f8af83689c55a06817f4f23cf950f70 + + - name: 'k8s.gcr.io/kube-scheduler:v1.20.12' + sha256: f31e3de525f75953ab3494df4879352c4aaf0f7bd95e3960c29e52dfd81783d0 + + - name: 'k8s.gcr.io/kube-proxy:v1.20.12' + sha256: 40bb2f9e229e846034dde56e41a125b14c697be96102dc3b02f60d2b16b5fb4c + + - name: 'k8s.gcr.io/coredns:1.7.0' + sha256: 08368ce06f22002639ffe095a2b2a0d16da50b2a7e821206cb97957d183dde92 + + - name: 'k8s.gcr.io/pause:3.2' + sha256: 64bb6a32f297d03e8b570e58d7dd00a171ed5686295d966b2fd583165fb93f9e + + # v1.21.7 + - name: 'k8s.gcr.io/kube-apiserver:v1.21.7' + sha256: 903a9db6809afa0fd6a35109102916c5648518de931b424e87173dfcfbc98f24 + + - name: 'k8s.gcr.io/kube-controller-manager:v1.21.7' + sha256: e7b9752b044582dc841f7f0ef538835712ca1e5311a1ee667464be2c361c768d + + - name: 'k8s.gcr.io/kube-scheduler:v1.21.7' + sha256: 3c3585da6d26cffa725b40b8c49b4d0720485320144bcdc51013f546cc781f12 + + - name: 'k8s.gcr.io/kube-proxy:v1.21.7' + sha256: 31b1f8a34852b9a66a174df742346927d2ffcf16366ebbc22e71d2fcb01b1b42 + + - name: 'k8s.gcr.io/coredns/coredns:v1.8.0' + sha256: 6bf705594c7676ab6f8fcf22876fe5d33eb71018928e73fff90255b460aded6a + + - name: 'k8s.gcr.io/etcd:3.4.13-0' + sha256: 377554d89698f6c4e7a1aafaa756ad87bcefdc45ea33123fb9945501623aa501 + + - name: 'k8s.gcr.io/pause:3.4.1' + sha256: d8f6de1afe05ef0de2f25368599e9b540da19bbf7a83746d6fbed06bfef4edac + + # v1.22.4 + - name: 'k8s.gcr.io/kube-apiserver:v1.22.4' + sha256: 86c7114f56570eccdf1542eb08b1c17e1d453e56c028795a3751ee39d780d2e7 + + - name: 'k8s.gcr.io/kube-controller-manager:v1.22.4' + sha256: 3651fbadbbd1785244862f6f0ee9e29d267676e1536db3dd89bae3be5a25145e + + - name: 'k8s.gcr.io/kube-scheduler:v1.22.4' + sha256: 129e8dada97246067eede38a2f0634428ee77d05d7a2c38c84258e1977e7a7e6 + + - name: 'k8s.gcr.io/kube-proxy:v1.22.4' + sha256: a5331a2d1b22487cad455d2e6e82e107ecd7e5124a5bf9c6d9044c99231d7c55 + + - name: 'k8s.gcr.io/coredns/coredns:v1.8.4' + sha256: 0db432fb9f3de461a3fbc463628aaef4609f179a55a9ae487b2f1f1af7673939 + + - name: 'k8s.gcr.io/etcd:3.5.0-0' + sha256: b56aff902afec396f75c9f7e8db6b47e1da041730848cd38e69db4ff74b72dfc + + - name: 'k8s.gcr.io/pause:3.5' + sha256: 44269aea9e3efb259c9bd305569e7bd25805958ebb6f1c7f758101d311b9493a + + - name: 'quay.io/coreos/flannel:v0.14.0-amd64' + sha256: bb95008a5b6af496db5ebd120542035952c559eec5b7414ce635aea5cc8592c1 + + - name: 'quay.io/coreos/flannel:v0.14.0' + sha256: d4c7f16b7f39dc4cfa4226a04514808c279aa01863886df0f32d27a93fef76c2 + + - name: 'calico/cni:v3.20.3' + sha256: 289009d818d6d676ea509f0baf717dacddec81d2b64cb0c55450f6e881f90432 + + - name: 'calico/kube-controllers:v3.20.3' + sha256: 2c4146dd95d62924b727d84a62e9ec306647a702da7715d6889021c2c9e07a2d + + - name: 'calico/node:v3.20.3' + sha256: 267511105a133492f845dd94a520b319003ca26d404cabe251a3256d11dfd84c + + - name: 'calico/pod2daemon-flexvol:v3.20.3' + sha256: 5b9e837c43575b4bde353f1e9c1e2ce45c399a192b8e72a622dace55567feee7 diff --git a/ansible/playbooks/roles/repository/files/download-requirements/requirements/x86_64/redhat.yml b/ansible/playbooks/roles/repository/files/download-requirements/requirements/x86_64/redhat.yml new file mode 100644 index 0000000000..7a102fef35 --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/requirements/x86_64/redhat.yml @@ -0,0 +1,276 @@ +--- +prereq-packages: + # packages-repo-prereqs + - name: 'apr' # for httpd + sha256: abc + - name: 'apr-util' # for httpd + sha256: abc + - name: 'createrepo' + sha256: abc + - name: 'deltarpm' # for createrepo + sha256: abc + - name: 'httpd' + sha256: abc + - name: 'httpd-tools' # for httpd + sha256: abc + - name: 'mailcap' # for httpd + sha256: abc + - name: 'mod_ssl' # for httpd + sha256: abc + - name: 'python-chardet' # for createrepo + sha256: abc + - name: 'python-deltarpm' # for createrepo + sha256: abc + - name: 'python-kitchen' # for createrepo + sha256: abc + - name: 'redhat-logos' # for httpd + sha256: abc + - name: 'yum-utils' + sha256: abc + +packages: + - name: 'audit' # for docker-ce + sha256: abc + - name: 'bash-completion' + sha256: abc + - name: 'ca-certificates' + sha256: abc + - name: 'cifs-utils' + sha256: abc + - name: 'conntrack-tools' # for kubelet + sha256: abc + - name: 'containerd.io' + sha256: abc + - name: 'container-selinux' + sha256: abc + - name: 'cri-tools-1.13.0' + sha256: abc + - name: 'curl' + sha256: abc + - name: 'dejavu-sans-fonts' # for grafana + sha256: abc + - name: 'docker-ce-20.10.8' + sha256: abc + - name: 'docker-ce-cli-20.10.8' + sha256: abc + - name: 'docker-ce-rootless-extras-20.10.8' + sha256: abc + - name: 'ebtables' + sha256: abc + - name: 'elasticsearch-curator-5.8.3' + sha256: abc + - name: 'elasticsearch-oss-7.10.2' # for opendistroforelasticsearch & logging roles + sha256: abc + - name: 'ethtool' + sha256: abc + - name: 'filebeat-7.9.2' + sha256: abc + - name: 'firewalld' + sha256: abc + - name: 'fontconfig' # for grafana + sha256: abc + - name: 'fping' + sha256: abc + - name: 'fuse-overlayfs' # for docker-ce-rootless-extras + sha256: abc + - name: 'gnutls' # for cifs-utils + sha256: abc + - name: 'gssproxy' # for nfs-utils + sha256: abc + - name: 'htop' + sha256: abc + - name: 'iftop' + sha256: abc + - name: 'ipset' # for firewalld + sha256: abc + - name: 'java-1.8.0-openjdk-headless' + sha256: abc + - name: 'javapackages-tools' # for java-1.8.0-openjdk-headless + sha256: abc + - name: 'jq' + sha256: abc + - name: 'libini_config' # for nfs-utils + sha256: abc + - name: 'libselinux-python' + sha256: abc + - name: 'libsemanage-python' + sha256: abc + - name: 'libX11' # for grafana + sha256: abc + - name: 'libxcb' # for grafana + sha256: abc + - name: 'libXcursor' # for grafana + sha256: abc + - name: 'libXt' # for grafana + sha256: abc + - name: 'logrotate' + sha256: abc + - name: 'net-tools' + sha256: abc + - name: 'nfs-utils' + sha256: abc + - name: 'nmap-ncat' + sha256: abc + + # Open Distro for Elasticsearch plugins are installed individually to not download them twice in different versions (as dependencies of opendistroforelasticsearch package) + - name: 'opendistro-alerting-1.13.1.*' + sha256: abc + - name: 'opendistro-index-management-1.13.1.*' + sha256: abc + - name: 'opendistro-job-scheduler-1.13.0.*' + sha256: abc + - name: 'opendistro-performance-analyzer-1.13.0.*' + sha256: abc + - name: 'opendistro-security-1.13.1.*' + sha256: abc + - name: 'opendistro-sql-1.13.0.*' + sha256: abc + - name: 'opendistroforelasticsearch-kibana-1.13.1' # kibana has shorter version + sha256: abc + - name: 'openssl' + sha256: abc + - name: 'perl' # for vim + sha256: abc + - name: 'perl-Getopt-Long' # for vim + sha256: abc + - name: 'perl-libs' # for vim + sha256: abc + - name: 'perl-Pod-Perldoc' # for vim + sha256: abc + - name: 'perl-Pod-Simple' # for vim + sha256: abc + - name: 'perl-Pod-Usage' # for vim + sha256: abc + - name: 'pgaudit15_13-1.5.0' + sha256: abc + - name: 'policycoreutils-python' # for container-selinux + sha256: abc + - name: 'pyldb' # for cifs-utils + sha256: abc + - name: 'python-cffi' # for python2-cryptography + sha256: abc + - name: 'python-firewall' # for firewalld + sha256: abc + - name: 'python-kitchen' # for yum-utils + sha256: abc + - name: 'python-lxml' # for java-1.8.0-openjdk-headless + sha256: abc + - name: 'python-psycopg2' + sha256: abc + - name: 'python-pycparser' # for python2-cryptography + sha256: abc + - name: 'python-setuptools' + sha256: abc + - name: 'python-slip-dbus' # for firewalld + sha256: abc + - name: 'python2-cryptography' # for Ansible (certificate modules) + sha256: abc + - name: 'python3-3.6.8' + sha256: abc + - name: 'quota' # for nfs-utils + sha256: abc + - name: 'rabbitmq-server-3.8.9' + sha256: abc + - name: 'rh-haproxy18' + sha256: abc + - name: 'rh-haproxy18-haproxy-syspaths' + sha256: abc + - name: 'postgresql13-server' + sha256: abc + - name: 'repmgr10-5.2.1' # used to upgrade repmgr first + sha256: abc + - name: 'repmgr13-5.2.1' + sha256: abc + - name: 'samba-client' + sha256: abc + - name: 'samba-client-libs' # for samba-client + sha256: abc + - name: 'samba-common' + sha256: abc + - name: 'samba-libs' # for cifs-utils + sha256: abc + - name: 'sysstat' + sha256: abc + - name: 'tar' + sha256: abc + - name: 'telnet' + sha256: abc + - name: 'tmux' + sha256: abc + - name: 'urw-base35-fonts' # for grafana + sha256: abc + - name: 'unzip' + sha256: abc + - name: 'vim-common' # for vim + sha256: abc + - name: 'vim-enhanced' + sha256: abc + - name: 'wget' + sha256: abc + - name: 'xorg-x11-font-utils' # for grafana + sha256: abc + - name: 'xorg-x11-server-utils' # for grafana + sha256: abc + - name: 'yum-plugin-versionlock' + sha256: abc + - name: 'yum-utils' + sha256: abc + + # to make remote-to-remote "synchronize" work in ansible + - name: 'rsync' + sha256: abc + + # K8s v1.18.6 (Epiphany >= v0.7.1) + - name: 'kubeadm-1.18.6' + sha256: abc + - name: 'kubectl-1.18.6' + sha256: abc + - name: 'kubelet-1.18.6' + sha256: abc + + # K8s v1.19.15 (Epiphany >= v1.3 transitional version) + - name: 'kubeadm-1.19.15' + sha256: abc + - name: 'kubectl-1.19.15' + sha256: abc + - name: 'kubelet-1.19.15' + sha256: abc + + # K8s v1.20.12 + - name: 'kubeadm-1.20.12' + sha256: abc + - name: 'kubectl-1.20.12' + sha256: abc + - name: 'kubelet-1.20.12' + sha256: abc + + # K8s v1.21.7 (Epiphany >= v1.3, transitional version) + - name: 'kubeadm-1.21.7' + sha256: abc + - name: 'kubectl-1.21.7' + sha256: abc + - name: 'kubelet-1.21.7' + sha256: abc + + # K8s v1.22.4 + - name: 'kubeadm-1.22.4' + sha256: abc + - name: 'kubectl-1.22.4' + sha256: abc + - name: 'kubelet-1.22.4' + sha256: abc + + # Kubernetes Generic + - name: 'kubernetes-cni-0.8.6-0' # since K8s v1.18.6 + sha256: abc + - name: 'kubernetes-cni-0.8.7-0' # since K8s v1.19.15 + sha256: abc + +files: + # Github repository for erlang rpm is used since packagecloud repository is limited to a certain number of versions and erlang package from erlang-solutions repository is much more complex and bigger + - url: 'https://github.com/rabbitmq/erlang-rpm/releases/download/v23.1.5/erlang-23.1.5-1.el7.x86_64.rpm' + sha256: abc + + # Grafana package is not downloaded from repository since it was not reliable (issue #2449) + - url: 'https://dl.grafana.com/oss/release/grafana-7.3.5-1.x86_64.rpm' + sha256: abc diff --git a/ansible/playbooks/roles/repository/files/download-requirements/requirements/x86_64/ubuntu.yml b/ansible/playbooks/roles/repository/files/download-requirements/requirements/x86_64/ubuntu.yml new file mode 100644 index 0000000000..a9979a8eab --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/requirements/x86_64/ubuntu.yml @@ -0,0 +1,420 @@ +--- +packages: + - name: 'adduser' + sha256: 5f7ea9d1d52a2a9c349468f89d160230e21c8542faed1b1a97c23bce873e17b4 + + - name: 'apt-transport-https' + sha256: 1c9d16d9ba2aeb4dbbc5c709d01ac215acf78418a6a52ebccb46cb51173db975 + + - name: 'auditd' + sha256: 3ddfcbb88001810307903781d7310e002fe0521eee43f088b90b4230e955da54 + + - name: 'bash-completion' + sha256: 65b7b824b86e6c270b134959de41c6328caf0c4378e34a65eaaf33fe72c3bbb6 + + - name: 'ca-certificates' + sha256: 8179442c9c582fd71fd3817a579bf5fe9503412c1e879d3ba4f0ed9a761e54f4 + + - name: 'cifs-utils' + sha256: 0498cb78158f0c64cb70063f2fc9a5322d7abc422a043718a4f63aa18c3d259b + + - name: 'containerd.io' + sha256: 5730728b1e82003eb32edc5462d46dfa9f443a4425410cf37094607ecfeb6662 + + - name: 'cri-tools=1.13.0*' + sha256: 4ff4588f5589826775f4a3bebd95aec5b9fb591ba8fb89a62845ffa8efe8cf22 + + - name: 'curl' + sha256: 648169e7ef8aef23a8b5c6880cb6a96f51a00ecc81d8e92a3988a8a0e54870a5 + + - name: 'docker-ce=5:20.10.8*' + sha256: d76024725ece03ee4b675e074b58998a63b3f0b773fd18fcf682e3fe4c08b07c + + - name: 'docker-ce-cli=5:20.10.8*' + sha256: d76024725ece03ee4b675e074b58998a63b3f0b773fd18fcf682e3fe4c08b07c + + - name: 'docker-ce-rootless-extras=5:20.10.8*' + sha256: 0cf15642ffe305a3abc8876973ca7290b9758cee895cad8316980e9633c5c597 + + - name: 'ebtables' + sha256: 7f880e5b09ca162900160dbd7d15739780b4c65427e9cabb30f41e48a2cef3ae + + # for opendistroforelasticsearch & logging roles + - name: 'elasticsearch-oss=7.10.2*' + sha256: ab68bdad718d7c3d5c206328f3301493016dc85dcbb39aca9de83dfadffcc25e + + # Erlang packages must be compatible with RabbitMQ version. + # Metapackages such as erlang and erlang-nox must only be used + # with apt version pinning. They do not pin their dependency versions. + # List based on: https://www.rabbitmq.com/install-debian.html#installing-erlang-package + - name: 'erlang-asn1=1:23.1.5*' + sha256: bce3b3db06d4f3ae3487ea539fea39d77f11cefe510ac6642a718acee725989a + + - name: 'erlang-base=1:23.1.5*' + sha256: 943d8f7f299dd27a2b91993a26cc18988039d83c504934d18fe6dbde16f7bd6e + + - name: 'erlang-crypto=1:23.1.5*' + sha256: d00e867f3d4508d78fbffab226c8247af937a9ffde46c2e77257ce1f258144f9 + + - name: 'erlang-eldap=1:23.1.5*' + sha256: 2b982eb5da6adbd450f2f1791349f983fd2fc47bcb3b1c8a1f80412001e3261f + + - name: 'erlang-ftp=1:23.1.5*' + sha256: 59653ae7cdc921ba66fd0cdbdda9730bccdab84f92b78bf2b6bafdfc580abada + + - name: 'erlang-inets=1:23.1.5*' + sha256: 7b54b1e86e7c20c0fde33a10d3fb448baba05bf5afc27c56ed8b9d7f31f934c7 + + - name: 'erlang-mnesia=1:23.1.5*' + sha256: 4060bac983ca1975a6f8701e3bfcb9044a642dcd64a52f0e2873e5e758306924 + + - name: 'erlang-os-mon=1:23.1.5*' + sha256: 9deee80e525470ddc1aeddd84e16726440dc9d66d174ce72535fcd56c39e10ab + + - name: 'erlang-parsetools=1:23.1.5*' + sha256: 4ccd3a68fa3e3511b23f86e139b27ba0d3c3fa6c99a6ae2f0bf4334d28a9f432 + + - name: 'erlang-public-key=1:23.1.5*' + sha256: d2117428607738461cafc450c8711deb1d0afeb9c7144746007a032e66e98983 + + - name: 'erlang-runtime-tools=1:23.1.5*' + sha256: cf86b47be619c29180349473956f248ba2827b5433316e6fca97d512cee40c27 + + - name: 'erlang-snmp=1:23.1.5*' + sha256: f5f3165e2d2ac31b005fe3f422d59c09fb34a226679b5217f9c2e302d380106b + + - name: 'erlang-ssl=1:23.1.5*' + sha256: a15ffb59182926d0e84465a0bd18f133d45293abcc0f848fd52014d749c3defe + + - name: 'erlang-syntax-tools=1:23.1.5*' + sha256: eea9b02271461289ca339087f551dd326fa68dc67929998ac8370e7b44d4046f + + - name: 'erlang-tftp=1:23.1.5*' + sha256: 1c97ead185ffaf838bbcfd9a25f2a7c334496a63b4e597dbd682efad7537e457 + + - name: 'erlang-tools=1:23.1.5*' + sha256: 96ba6054e5e7a64fe88c621f560270c8cc1967e8b10ff8a040e6460d37c215ae + + - name: 'erlang-xmerl=1:23.1.5*' + sha256: 393aab19e8e43b047230dcf8d83310e1da2bd45d9bb2646eb0b2eeb9b2b703cd + + - name: 'ethtool' + sha256: 886ac450953213f5e557c26c35449c2781020615c545af050c13aa71a870ed55 + + - name: 'filebeat=7.9.2*' + sha256: aed2a5be60bcbfdba3cc816e936793adf8e4a04c08498817c4ee57cdedfbe114 + + - name: 'firewalld' + sha256: fce416886c3b303412d2fddb75002bf174a4167a5fc2deb79d918d6d14389132 + + - name: 'fping' + sha256: 1af2189ef67e6d83da5446758f51429ecb1ed0d61bfda947b0b2a7808bbecd47 + + - name: 'gnupg2' + sha256: 5306077067dc9acb3070dffb24c56ea53c12cf75434068709d3a42a07ed20cfa + + - name: 'htop' + sha256: c803ab48c0f2e097283a2e0673914a758f10779a26a947a77d36a1e4353e85d6 + + - name: 'iftop' + sha256: 710571e93c98c1624f705358da70863953f438fd7983f662ba8e121f91270681 + + - name: 'jq' + sha256: 8e4c8223f6ec158dc6c2a0d065b76c337bb7664e35cbbecd2ad02142d3b83470 + + # for dpkg-scanpackages + - name: 'libdpkg-perl' + sha256: b7f34bb3574e2fae9054649d87a69e8d28a0f2c6a616d07a4f294d5462bb161c + + - name: 'libfontconfig1' + sha256: 7ca5523936a9b8e42f60c0f01c232145809387e5680b05c7bce0bea31844a522 + + - name: 'logrotate' + sha256: ba2b0ec4c730524deede0ff929fa66283f4bf6d76ea1de9d8fc48fa17a2309f4 + + - name: 'netcat' + sha256: f43ecd8753da0ddd048ea49c45c81a7bd7dbbe80b144ea9764cdae34a8472362 + + - name: 'net-tools' + sha256: 591808d272508bcafb28ab0b151e0d03a0c922a7f7f7e6469b41ec64c46b4a90 + + - name: 'nfs-common' + sha256: a3bc69c0b628784ad21278c2be271299351d992b165d87990b57aee7d38a5e84 + + # for nfs-common + - name: 'libtirpc3' + sha256: f82219f75dd5b498b65a3251557d77eed386248c6c2832b7d253b6797993e432 + + - name: 'opendistro-alerting=1.13.1*' + sha256: 291ae3f318dc1bc7a03467ac5e5c1d89cf8f8154a4263e8c6a5948961353a431 + + - name: 'opendistro-index-management=1.13.1*' + sha256: 409d8608a21c35fcc150356bb6777eac9630f4d97621cf903adbbb08db915e1a + + - name: 'opendistro-job-scheduler=1.13.0*' + sha256: 807b441968b70fef36b0e7cf24214803222bf61b3ec61ae62d77793a13c68a9a + + - name: 'opendistro-performance-analyzer=1.13.0*' + sha256: f25b6f78bda5e1a4db36daa9f2e37ed405fe50b4b2fdf491fc5cca0ba227bc6b + + - name: 'opendistro-security=1.13.1*' + sha256: bb378292cdce7c4c6efe15edf3dbf6afc045f17ce6412b124a32b5fb3c92847a + + - name: 'opendistro-sql=1.13.0*' + sha256: 99c340d6e292fbf69e4660ca7f0751afeef29468dd4c271b8a19d15a28c71aa6 + + - name: 'opendistroforelasticsearch-kibana=1.13.1*' + sha256: 1f5becee8cd5456b4f02b8a02c90f3edc1143d8f22423306170f39996390312a + + - name: 'openjdk-8-jre-headless' + sha256: c64dc41216d3f6ab34053e2134558c9071a16bbe417ccd91818edef7783aa065 + + - name: 'openssl' + sha256: 89e000938ab7145862ee476223c8344163083187457dc10f647a73c7b9463e33 + + - name: 'postgresql-13' + sha256: 115898f868fef4d90e76e203e6824ba72e4f7f2c8c599cffc6c2857592ace0bf + + - name: 'python3-psycopg2' + sha256: b2b4e219d8c30ca172d9275582cb57587948d0a9397274b6eea36add6f9e4ee1 + + - name: 'python3-selinux' + sha256: 1fae8d1c046067356d67fa556d7129edf8dc1748ad91db0dbfd4bf64373d654d + + - name: 'rabbitmq-server=3.8.9*' + sha256: 7cec0bf07a56480c51cf8cc920fc9a9747d304f0d533b28b1bbc5ea2a30f52da + + - name: 'smbclient' + sha256: 99cca29bb05d23ca8f03ffe8d83adbce8557d998e2dbe49fff01ce97db3590d1 + + - name: 'samba-common' + sha256: 9a2245a5b9476a06378b59a681846158544dd9e357c46f27497a34dae45a1fd8 + + - name: 'smbclient' + sha256: 99cca29bb05d23ca8f03ffe8d83adbce8557d998e2dbe49fff01ce97db3590d1 + + - name: 'software-properties-common' + sha256: acd6bfc18262a8e99fa6da4fbcf4f6d35225c93dfa04f7cf20c1f41774f3c1d0 + + - name: 'sshpass' + sha256: b31772d638e8debf2c7422bb6e2531da14964e378e31d9c46cba372bc4bd2e0a + + - name: 'sysstat' + sha256: b6a002446c2d339c1d52be3359666b91e13f7cc78fa716e031b942b8e328b8ee + + # for sysstat + - name: 'libsensors5' + sha256: 80a057d35c1b60717368325aacca6f61b009c8314ec8d96de415eba099c2d52f + + - name: 'tar' + sha256: 95de9ec03ea555bb836a6f4a98680773ca5175dd4d8175fba9e6cd60f10e51b0 + + - name: 'telnet' + sha256: 65e6bf503f79c257f2e8d9111b12ec16725f7810424334f83dcb0cdc1f7d9d09 + + - name: 'tmux' + sha256: a290fc9e6979cd1d755559e3a85cf2675bb5f697a4fae453c46f52bf4993eeb0 + + - name: 'unzip' + sha256: b1e596aec8566f86b4fc36e626a06938d95ff524caecd97dcf9d1951eec87aea + + - name: 'vim' + sha256: c0a2d158eb8dcadef082755ed41203568c86189b8510714d7cb2144fdb09159d + + # to make remote-to-remote "synchronize" work in ansible + - name: 'rsync' + sha256: f3f3edc2c9460a56c45ceaa3e8ee825d4f97a8e7d6e8cf7a68f74e9c87f0566e + + # for curl, issue #869 + - name: 'libcurl4' + sha256: 7c293bf1948c545ba659eae20ae6bc68c5bbf3f3ca9fbf2612dd6027064b54b3 + + # for openjdk-8-jre-headless + - name: 'libnss3' + sha256: ca624bc066d67f4f0aa71ed32abe507f62d02eb001dd3cc840d96270a3572e48 + + - name: 'libcups2' + sha256: 7b8d329aab2d1a10af0e2dc3877dbb5588b16afdb409354a78673bc207bfd223 + + - name: 'libavahi-client3' + sha256: d2fda1d966f1970b05773e497c870b1ebee3044c34db4decfa9052ba73395ed5 + + - name: 'libavahi-common3' + sha256: c830b27d2c9a2b74731909108108c715199cf5cbdc70eb33a000121e9f688008 + + - name: 'libjpeg8' + sha256: baaecbc8e7ef55fc1887365721a7771f7d533fabca38fca878668b9c8f7ee13f + + - name: 'libfontconfig1' + sha256: 7ca5523936a9b8e42f60c0f01c232145809387e5680b05c7bce0bea31844a522 + + - name: 'libxtst6' + sha256: 0e74134a90e54827e14324ae8e44abd057f0b0dd3d3bc15ac616a9b0215f3be9 + + - name: 'fontconfig-config' + sha256: 80b090925c52aff3f4681b361c9823b041c42c57cca58b5baf2541bafc25fed9 + + # for rabbit/erlang + - name: 'libodbc1' + sha256: 59ac60f7462e019ec19e615133fd8393853747bda3c1620e922a1d1ba49a4bf8 + + # for air-gap repo installation + - name: 'apache2' + sha256: c6509e10f14ce311c77a43c8a731d0ab4114b63f35cf7cf40fb9349986a1a388 + + - name: 'apache2-bin' + sha256: 233d6fbe9b17e4b762f3e2ce15ae3a707f27504453da72f211027e62e1f0067b + + - name: 'apache2-utils' + sha256: c6509e10f14ce311c77a43c8a731d0ab4114b63f35cf7cf40fb9349986a1a388 + + # for jq + - name: 'libjq1' + sha256: 5aafb335442d3a694b28204e390c831de9efc3f3a18245328840d406edc8a163 + + # for gnupg2 + - name: 'gnupg' + sha256: 868e3e9430202263f95fc8b6c92e62bd6cb44158aa5b095d9766f1bfa3794d29 + + - name: 'gpg' + sha256: 3df951e429460cf5837c3f387f014e6440efc9002839327f1b6e43e6d0d9f195 + + - name: 'gpg-agent' + sha256: 8f76c2622351bc2d1bd2c3f2628a328d262c2c04bac7bad8d7044c44d262c562 + + # for azure + - name: 'smbclient' + sha256: 99cca29bb05d23ca8f03ffe8d83adbce8557d998e2dbe49fff01ce97db3590d1 + + - name: 'samba-libs' + sha256: e170dfbf741570e4aae97f4b040106d51ae3ae89a52006663328acf56129404a + + - name: 'libsmbclient' + sha256: 4677d358e332e61015ed7c3d49c774bea963963d3fb6f4a176464f9c90d52d54 + + # postgres related packages + # if version is not specified, it's not related to postgres version and the latest is used + - name: 'pgdg-keyring' + sha256: 48c435aeff4a9a9949bcfac639505a5ba050fd1439e2ee5de3af1a942dd97c74 + + - name: 'postgresql-13-pgaudit=1.5.0*' + sha256: 862c53510538812e3a77201c7b6588539a82f969fbedf7b3f6eb4772adcb0e16 + + - name: 'postgresql-10-repmgr=5.2.1*' + sha256: 84d4fa03bd5035492fdda1ec4af942b1a6fab5232614c28b6de8630c44b0fe1d + + - name: 'postgresql-13-repmgr=5.2.1*' + sha256: 8764b52c72a2eb389266a4e6482dba47dfbc609537ca6991a452f838596b6b7e + + - name: 'postgresql-client-13' + sha256: dd58155a6f371e4ff2d2813f80f109dd675b653d12c4c939f10d6794d07a0cad + + - name: 'postgresql-client-common' + sha256: 2088256e1d84f3efe39c45d71a92e743dad2b24f76b062eb96683b34ba6cae29 + + - name: 'postgresql-common' + sha256: 0793d00857f1b28f29fdfa50bc2119b15327d4d701d2bac9481fa143c410e0da + + - name: 'repmgr-common=5.2.1*' + sha256: c0ab02a35d4f6c2fd7b4f5a632fe12fc69214f443a511427e383d51826e861d9 + + # for firewalld + - name: 'ipset' + sha256: 74369990cfd5b2f6fcfa13b7b9d084c930bd9a60644f7be845d978cda648b57d + + - name: 'python3-decorator' + sha256: bde6c1bc1bf6d26999b7b2e569888a29babd5d6a31c4ad6cb81633ed9f97204b + + - name: 'python3-selinux' + sha256: 1fae8d1c046067356d67fa556d7129edf8dc1748ad91db0dbfd4bf64373d654d + + - name: 'python3-slip' + sha256: 3b6a5bcead4ea193a0a7921a7268274a012d8ba455e7cb99393059bffa13da14 + + - name: 'python3-slip-dbus' + sha256: c183f91d0d79e8c605cc5695fe6bb1197017ab37549cc5652dbb1c547c75881d + + # for ansible module postgresql_query in role postgres-exporter + - name: 'libpq5' + sha256: 6e171f03d74fbe062abdcac330d10b9e3cb6d5e70849e8d8d92af11dc7ca9822 + + - name: 'python3-psycopg2' + sha256: b2b4e219d8c30ca172d9275582cb57587948d0a9397274b6eea36add6f9e4ee1 + + - name: 'python3-jmespath' + sha256: ef40ebbbc21db690f9a68177d9ab734e402f39236f2bc46ea3d75baa48d75e3a + + # for Ansible (certificate modules) + - name: 'python3-cryptography' + sha256: df7fc94dc9cd20da991147f56c7e2e2d54e1c27f10018aad8096b0c74a6b4550 + + # for python3-cryptography + - name: 'python3-cffi-backend' + sha256: b32482717cd8712359c58a796f76b4984598bb588d9635a1097b7a6ff391c334 + + # K8s v1.18.6 (Epiphany >= v0.7.1) + - name: 'kubeadm=1.18.6*' + sha256: a93baa2c0b6220df4fd3e1c54d0e93ff34a5281b9205720d1f10e3a96498bf9e + + - name: 'kubectl=1.18.6*' + sha256: 637b120b480b3ede53b93a140bc9d40df648e0ae1415da4a4d2584e1989fee92 + + - name: 'kubelet=1.18.6*' + sha256: 104709951795724cd57228d458da3adc3746c77447132f2e1317666b321eebbb + + # K8s v1.19.15 (Epiphany >= v1.3, transitional version) + - name: 'kubeadm=1.19.15*' + sha256: a93baa2c0b6220df4fd3e1c54d0e93ff34a5281b9205720d1f10e3a96498bf9e + + - name: 'kubectl=1.19.15*' + sha256: 637b120b480b3ede53b93a140bc9d40df648e0ae1415da4a4d2584e1989fee92 + + - name: 'kubelet=1.19.15*' + sha256: 104709951795724cd57228d458da3adc3746c77447132f2e1317666b321eebbb + + # K8s v1.20.12 (Epiphany >= v1.3, transitional version) + - name: 'kubeadm=1.20.12*' + sha256: a93baa2c0b6220df4fd3e1c54d0e93ff34a5281b9205720d1f10e3a96498bf9e + + - name: 'kubectl=1.20.12*' + sha256: 637b120b480b3ede53b93a140bc9d40df648e0ae1415da4a4d2584e1989fee92 + + - name: 'kubelet=1.20.12*' + sha256: 104709951795724cd57228d458da3adc3746c77447132f2e1317666b321eebbb + + # K8s v1.21.7 (Epiphany >= v1.3, transitional version) + - name: 'kubeadm=1.21.7*' + sha256: a93baa2c0b6220df4fd3e1c54d0e93ff34a5281b9205720d1f10e3a96498bf9e + + - name: 'kubectl=1.21.7*' + sha256: 637b120b480b3ede53b93a140bc9d40df648e0ae1415da4a4d2584e1989fee92 + + - name: 'kubelet=1.21.7*' + sha256: 104709951795724cd57228d458da3adc3746c77447132f2e1317666b321eebbb + + # K8s v1.22.4 + - name: 'kubeadm=1.22.4*' + sha256: a93baa2c0b6220df4fd3e1c54d0e93ff34a5281b9205720d1f10e3a96498bf9e + + - name: 'kubectl=1.22.4*' + sha256: 637b120b480b3ede53b93a140bc9d40df648e0ae1415da4a4d2584e1989fee92 + + - name: 'kubelet=1.22.4*' + sha256: 104709951795724cd57228d458da3adc3746c77447132f2e1317666b321eebbb + + # Kubernetes Generic + # kubernetes-cni-0.8.6 since K8s v1.18.6 + - name: 'kubernetes-cni=0.8.6-00*' + sha256: ca2303ea0eecadf379c65bad855f9ad7c95c16502c0e7b3d50edcb53403c500f + + # kubernetes-cni-0.8.7 since K8s v1.19.15 + - name: 'kubernetes-cni=0.8.7-00*' + sha256: ca2303ea0eecadf379c65bad855f9ad7c95c16502c0e7b3d50edcb53403c500f + +files: + # Switched from APT repo because there was only one (the latest) version available (issue #2262) + - url: 'https://packages.elastic.co/curator/5/debian9/pool/main/e/elasticsearch-curator/elasticsearch-curator_5.8.3_amd64.deb' + sha256: 575a41184899678d9769a8ea120134ec329c41967c94586c1aa6439aa68d4829 + # Grafana package is not downloaded from repository since it was not reliable (issue #2449) + - url: 'https://dl.grafana.com/oss/release/grafana_8.3.2_amd64.deb' + sha256: 3f5ecf5726223314aa3147a24c732cc9ccede86b7d703d4835a6fc69d0fffff8 diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/command/apt.py b/ansible/playbooks/roles/repository/files/download-requirements/src/command/apt.py new file mode 100644 index 0000000000..73cc4ef219 --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/command/apt.py @@ -0,0 +1,35 @@ +from src.command.command import Command + + +class Apt(Command): + """ + Interface for `apt` tool. + """ + + def __init__(self, retries: int): + super().__init__('apt', retries) + + def update(self): + """ + Interface for `apt-get update` + """ + self.run(['update']) + + + def download(self, package: str): + """ + Interface for `apt download package` + + :param package: package to be downloaded + """ + self.run(['download', package]) + + def install(self, package: str, assume_yes: bool = True): + """ + Interface for `apt install package` + + :param package: package to be installed + :param assume_yes: if set to True `-y` flag will be added + """ + no_ask: str = '-y' if assume_yes else '' + self.run(['install', no_ask, package]) diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/command/apt_cache.py b/ansible/playbooks/roles/repository/files/download-requirements/src/command/apt_cache.py new file mode 100644 index 0000000000..bfff74ffb9 --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/command/apt_cache.py @@ -0,0 +1,62 @@ +from typing import List + +from src.command.command import Command + + +class AptCache(Command): + """ + Interface for `apt-cache` tool. + """ + def __init__(self, retries: int): + super().__init__('apt-cache', retries) + + def get_package_dependencies(self, package: str) -> List[str]: + """ + Interface for `apt-cache depends` + + :param package: for which dependencies will be gathered + :returns: all required dependencies for `package` + """ + args: List[str] = ['depends', + '--no-recommends', + '--no-suggests', + '--no-conflicts', + '--no-breaks', + '--no-replaces', + '--no-enhances', + '--no-pre-depends', + package] + + raw_output = self.run(args).stdout + + virt_pkg: bool = False # True - virtual package detected, False - otherwise + virt_pkgs: List[str] = [] # cached virtual packages options + deps: List[str] = [] + for dep in raw_output.split('\n'): + if not dep: # skip empty lines + continue + + dep = dep.replace(' ', '') # remove white spaces + + if virt_pkg: + virt_pkgs.append(dep) # cache virtual package option + + if '<' in dep and '>' in dep: # virtual package, more than one dependency to choose + virt_pkg = True + continue + + if 'Depends:' in dep: # new dependency found + virt_pkg = False + + if virt_pkgs: # previous choices cached + # avoid conflicts by choosing only non-cached dependency: + if not any(map(lambda elem: elem in deps, virt_pkgs)): + deps.append(virt_pkgs[0].split('Depends:')[-1]) # pick first from the list + virt_pkgs.clear() + + dep = dep.split('Depends:')[-1] # remove "Depends: + + if not virt_pkg and dep != package: # avoid adding package itself + deps.append(dep) + + return list(set(deps)) diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/command/apt_key.py b/ansible/playbooks/roles/repository/files/download-requirements/src/command/apt_key.py new file mode 100644 index 0000000000..bfc079da68 --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/command/apt_key.py @@ -0,0 +1,20 @@ +from pathlib import Path + +from src.command.command import Command + + +class AptKey(Command): + """ + Interface for `apt-key` tool. + """ + + def __init__(self, retries: int): + super().__init__('apt-key', retries) + + def add(self, key: Path): + """ + Interface for `apt-key add` + + :key: key as file to be added + """ + self.run(['add', str(key)]) diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/command/command.py b/ansible/playbooks/roles/repository/files/download-requirements/src/command/command.py new file mode 100644 index 0000000000..993146c8c5 --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/command/command.py @@ -0,0 +1,83 @@ +import logging +import subprocess +from typing import List + +from src.error import CriticalError + + +class Command: + """ + Interface for running subprocesses + """ + + def __init__(self, process_name: str, retries: int, pipe_args: List[str] = None): + self.__proc_name: str = process_name + self.__retries: int = retries + self.__pipe_args: List[str] = pipe_args # used for __pipe__ + + def name(self) -> str: + return self.__proc_name + + def pipe_args(self) -> List[str]: + return self.__pipe_args or [] + + def run(self, args: List[str], + capture_output: bool = True, + accept_nonzero_returncode: bool = False) -> subprocess.CompletedProcess: + """ + Run subprocess with provided arguments + + :param args: additional args which will be used with __proc_name + :capture_output: save stdout/stderr to completed process object + :raises: :class:`CriticalError`: when number of retries exceeded + :returns: completed process object + """ + process_args = [self.__proc_name] + process_args.extend(args) + + additional_args = {'encoding': 'utf-8'} + if capture_output: + additional_args['stdout'] = subprocess.PIPE + additional_args['stderr'] = subprocess.PIPE + + for count in range(self.__retries): + logging.debug(f'[{count + 1}/{self.__retries}] Running: {self.__proc_name} {" ".join(args)} ') + + process = subprocess.run(process_args, **additional_args) + + if accept_nonzero_returncode: + return process + + if process.returncode == 0: + return process + + logging.warn(process.stderr) + + raise CriticalError('Retries count reached maximum!') + + def __or__(self, command) -> str: + """ + Run two subprocesses by piping output from the first process to the second process. + + :param command: process onto which output from the first process will be passed + :raises: :class:`CriticalError`: when number of retries exceeded + :returns: final stdout + """ + lproc_name = f'{self.__proc_name} {" ".join(self.__pipe_args)}' + rproc_name = f'{command.name()} {" ".join(command.pipe_args())}' + whole_process_name = f'{lproc_name} | {rproc_name}' + + for count in range(self.__retries): + logging.debug(f'[{count + 1}/{self.__retries}] Running: {whole_process_name}') + + lproc = subprocess.Popen([self.__proc_name] + self.__pipe_args, stdout=subprocess.PIPE) + rproc = subprocess.Popen([command.name()] + command.pipe_args(), stdin=lproc.stdout, stdout=subprocess.PIPE) + lproc.stdout.close() # Allow proc1 to receive a SIGPIPE if proc2 exits. + + output = rproc.communicate()[0].decode() + if rproc.returncode == 0: + return output + + logging.warn(lproc.stderr if not lproc.returncode == 0 else rproc.stderr) + + raise CriticalError('Retries count reached maximum!') diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/command/crane.py b/ansible/playbooks/roles/repository/files/download-requirements/src/command/crane.py new file mode 100644 index 0000000000..80b824e10b --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/command/crane.py @@ -0,0 +1,52 @@ +from os import chmod +from pathlib import Path +from shutil import move +from tempfile import mkstemp +from typing import List + +from src.command.command import Command + + +class Crane(Command): + """ + Interface for Crane + """ + + def __init__(self, retries: int): + super().__init__('crane', retries) + + def pull(self, image_name: str, + destination: Path, + platform: str, + legacy_format: bool = True, + insecure: bool = True): + """ + Download target image file + + :param image_name: address to the image + :param destination: where to store the downloaded image + :param platform: for which platform file will be downloaded + :param legacy_format: use legacy format + :param insecure: allow image references to be fetched without TLS + """ + crane_params: List[str] = ['pull'] + + if insecure: + crane_params.append('--insecure') + + crane_params.append(f'--platform={platform}') + + if legacy_format: + crane_params.append('--format=legacy') + + crane_params.append(image_name) + + tmpfile = mkstemp() + + crane_params.append(tmpfile[1]) + + self.run(crane_params) + + chmod(tmpfile[1], 0o0644) + + move(tmpfile[1], str(destination)) diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/command/dpkg.py b/ansible/playbooks/roles/repository/files/download-requirements/src/command/dpkg.py new file mode 100644 index 0000000000..19f1d5f87a --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/command/dpkg.py @@ -0,0 +1,23 @@ +from typing import List + +from src.command.command import Command + + +class Dpkg(Command): + """ + Interface for `dpkg` + """ + + def __init__(self, retries: int): + super().__init__('dpkg', retries) + + + def list_installed_packages(self) -> List[str]: + """ + List all installed packages on the current OS. + + :returns: packages installed on the machine + """ + + proc = self.run(['-l']) + return proc.stdout.split('\n') diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/command/repoquery.py b/ansible/playbooks/roles/repository/files/download-requirements/src/command/repoquery.py new file mode 100644 index 0000000000..d9d9f1ba50 --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/command/repoquery.py @@ -0,0 +1,45 @@ +from typing import List + +from src.command.command import Command +from src.error import CriticalError + + +class Repoquery(Command): + """ + Interface for `repoquery` + """ + + def __init__(self, retries: int): + super().__init__('repoquery', retries) + + def query(self, package: str, + queryformat: str, + arch: str, + requires: bool = False, + resolve: bool = False) -> List[str]: + args: List[str] = [] + + if requires: + args.append('--requires') + + if resolve: + args.append('--resolve') + + args.extend(['--queryformat', queryformat]) + args.append(f'--archlist={arch},noarch') + args.append(package) + + output = self.run(args).stdout + # yumdownloader doesn't set error code if repoquery returns empty output + if not output: + raise CriticalError(f'repquery failed for package `{package}`, reason: package not found') + elif 'error' in output: + raise CriticalError(f'repquery failed for package `{package}`, reason: `{output}`') + + packages: List[str] = [] + for line in output.split('\n'): + if line: + packages.append(line) + + return packages + diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/command/rpm.py b/ansible/playbooks/roles/repository/files/download-requirements/src/command/rpm.py new file mode 100644 index 0000000000..6028b3834a --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/command/rpm.py @@ -0,0 +1,36 @@ +from typing import List + +from src.command.command import Command + + +class Rpm(Command): + """ + Interface for `rpm` + """ + + def __init__(self, retries: int): + super().__init__('rpm', retries) + + def is_package_installed(self, package: str) -> bool: + """ + Check if `package` is installed on the OS. + + :param package: to be checked if installed + :returns: True - package installed, False - otherwise + """ + args: List[str] = ['--query', + '--quiet', + f'{package}'] + + if self.run(args).returncode == 0: + return True + + return False + + def import_key(self, key: str): + """ + Import pgp key by the `rpm` + + :key: key to be added + """ + self.run(['--import', key]) diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/command/tar.py b/ansible/playbooks/roles/repository/files/download-requirements/src/command/tar.py new file mode 100644 index 0000000000..4ce7f92464 --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/command/tar.py @@ -0,0 +1,99 @@ +from pathlib import Path +from typing import List + +from src.command.command import Command + + +class Tar(Command): + """ + Interface for `tar` + """ + + def __init__(self): + super().__init__('tar', 1) + + def pack(self, filename: Path, + target: str, + directory: Path = None, + verbose: bool = False, + compress: bool = False, + verify: bool = False): + """ + Create a tar archive + + :param filename: name for the archive to be created + :param target: files to be archived + :param directory: change directory before doing any actions + :param verbose: use verbose mode + :param uncompress: use zlib compression + :param verify: check file integrity + """ + short_flags: List[str] = ['-c'] # -czvf flags + tar_params: List[str] = [str(filename)] # all the other params + + if compress: + short_flags.append('z') + + if verbose: + short_flags.append('v') + + short_flags.append('f') + + if verify: + tar_params.append('--verify') + + if directory is not None: + tar_params.extend(['--directory', str(directory)]) + + if target: + tar_params.append(target) + + self.run([''.join(short_flags)] + tar_params) + + def unpack(self, filename: Path, + target: str = '', + absolute_name: bool = False, + directory: Path = None, + overwrite: bool = True, + strip_components: int = 0, + uncompress: bool = True, + verbose: bool = False): + """ + Unpack a tar archive + + :param filename: file to be extracted + :param target: name for the output file + :param absolute_name: use abs path names + :param directory: change directory before doing any actions + :param overwrite: overwrite existing files when extracting + :param strip_components: strip leading components from file names on extraction + :param uncompress: use zlib compression + :param verbose: use verbose mode + """ + short_flags: List[str] = ['-x'] # -xzvf flags + tar_params: List[str] = [str(filename)] # all the other params + + if uncompress: + short_flags.append('z') + + if verbose: + short_flags.append('v') + + short_flags.append('f') + + if absolute_name == True: + tar_params.append('--absolute_name') + + if directory is not None: + tar_params.extend(['--directory', str(directory)]) + + if strip_components: + tar_params.append(f'--strip-components={str(strip_components)}') + + if target: + tar_params.append(target) + + if overwrite: + tar_params.append('--overwrite') + + self.run([''.join(short_flags)] + tar_params) diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/command/toolchain.py b/ansible/playbooks/roles/repository/files/download-requirements/src/command/toolchain.py new file mode 100644 index 0000000000..f674806961 --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/command/toolchain.py @@ -0,0 +1,52 @@ +from src.command.apt import Apt +from src.command.apt_cache import AptCache +from src.command.apt_key import AptKey +from src.command.crane import Crane +from src.command.dpkg import Dpkg +from src.command.repoquery import Repoquery +from src.command.rpm import Rpm +from src.command.tar import Tar +from src.command.wget import Wget +from src.command.yum import Yum +from src.command.yum_config_manager import YumConfigManager +from src.command.yumdownloader import Yumdownloader + + +class Toolchain: + """ + Common tools used across all distributions + """ + + def __init__(self, retries: int): + self.crane = Crane(retries) + self.tar = Tar() + self.wget = Wget(retries) + + +class RedHatFamilyToolchain(Toolchain): + """ + Specific tools used by RedHat based distributions + """ + + def __init__(self, retries: int): + super().__init__(retries) + + self.repoquery = Repoquery(retries) + self.rpm = Rpm(retries) + self.yum = Yum(retries) + self.yumdownloader = Yumdownloader(retries) + self.yum_config_manager = YumConfigManager(retries) + + +class DebianFamilyToolchain(Toolchain): + """ + Specific tools used by Debian based distributions + """ + + def __init__(self, retries: int): + super().__init__(retries) + + self.apt = Apt(retries) + self.apt_cache = AptCache(retries) + self.apt_key = AptKey(retries) + self.dpkg = Dpkg(retries) diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/command/wget.py b/ansible/playbooks/roles/repository/files/download-requirements/src/command/wget.py new file mode 100644 index 0000000000..8faba3790c --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/command/wget.py @@ -0,0 +1,52 @@ +from enum import Enum +from pathlib import Path +from typing import List + +from src.command.command import Command + + +class IPFamily(Enum): + IPV4 = 'IPv4' + IPV6 = 'IPv6' + + +class Wget(Command): + """ + Interface for `wget` + """ + + def __init__(self, retries: int): + super().__init__('wget', retries) + self.__download_params: List[str] = [ + '--no-use-server-timestamps', + '--continue', + '--show-progress' + ] + + def download(self, url: str, + output_document: Path = None, + directory_prefix: Path = None, + ip_family: IPFamily = IPFamily.IPV4, + additional_params: bool = True): + """ + Download target file + + :param url: file to be downloaded + :param output_document: downloaded file will be stored under this path + :param directory_prefix: downloaded file will be stored under this path, keep original filename + :param ip_family: which IP version to be used + """ + output_params: List[str] = [] + if additional_params: + output_params.extend(self.__download_params) + + if output_document is not None: + output_params.append('-O') + output_params.append(str(output_document)) + + if directory_prefix is not None: + output_params.append(f'--directory-prefix={str(directory_prefix)}') + + output_params.append(f'--prefer-family={ip_family.value}') + + self.run(output_params + [url]) diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/command/yum.py b/ansible/playbooks/roles/repository/files/download-requirements/src/command/yum.py new file mode 100644 index 0000000000..bbd7dff694 --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/command/yum.py @@ -0,0 +1,93 @@ +from typing import List + +from src.command.command import Command + + +class Yum(Command): + """ + Interface for `yum` + """ + + def __init__(self, retries: int): + super().__init__('yum', retries) + + def update(self, enablerepo: str, + package: str = None, + disablerepo: str = '*', + assume_yes: bool = True): + """ + Interface for `yum update` + + :param enablerepo: + :param package: + :param disablerepo: + :param assume_yes: if set to True, -y flag will be used + """ + update_parameters: List[str] = ['update'] + + update_parameters.append('-y' if assume_yes else '') + + if package is not None: + update_parameters.append(package) + + update_parameters.append(f'--disablerepo={disablerepo}') + update_parameters.append(f'--enablerepo={enablerepo}') + + self.run(update_parameters) + + def install(self, package: str, + assume_yes: bool = True): + """ + Interface for `yum install -y` + + :param package: packaged to be installed + :param assume_yes: if set to True, -y flag will be used + """ + no_ask: str = '-y' if assume_yes else '' + self.run(['install', no_ask, package]) + + def is_repo_enabled(self, repo: str) -> bool: + output = self.run(['repolist', + 'enabled']).stdout + if repo in output: + return True + + return False + + def find_rhel_repo_id(self, patterns: List[str]) -> List[str]: + output = self.run(['repolist', + 'all']).stdout + + repos: List[str] = [] + for line in output.split('\n'): + for pattern in patterns: + if pattern in line: + repos.append(pattern) + + return repos + + def accept_keys(self): + # to accept import of repo's GPG key (for repo_gpgcheck=1) + self.run(['-y', 'repolist']) + + def is_repo_available(self, repo: str) -> bool: + retval = self.run(['-q', + '--disablerepo=*', + f'--enablerepo={repo}', + 'repoinfo']).returncode + + if retval == 0: + return True + + return False + + def makecache(self, fast: bool = True, + assume_yes: bool = True): + args: List[str] = ['makecache'] + + args.append('-y' if assume_yes else '') + + if fast: + args.append('fast') + + self.run(args) diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/command/yum_config_manager.py b/ansible/playbooks/roles/repository/files/download-requirements/src/command/yum_config_manager.py new file mode 100644 index 0000000000..1815c79ec6 --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/command/yum_config_manager.py @@ -0,0 +1,19 @@ +from src.command.command import Command + + +class YumConfigManager(Command): + """ + Interface for `yum-config-manager` + """ + + def __init__(self, retries: int): + super().__init__('yum-config-manager', retries) + + def enable_repo(self, repo: str): + self.run(['--enable', repo]) + + def add_repo(self, repo: str): + self.run(['--add-repo', repo]) + + def disable_repo(self, repo: str): + self.run(['--disable', repo]) diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/command/yumdownloader.py b/ansible/playbooks/roles/repository/files/download-requirements/src/command/yumdownloader.py new file mode 100644 index 0000000000..444310c1b5 --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/command/yumdownloader.py @@ -0,0 +1,29 @@ +from pathlib import Path +from typing import List + +from src.command.command import Command + + +class Yumdownloader(Command): + """ + Interface for `yumdownloader` + """ + + def __init__(self, retries: int): + super().__init__('yumdownloader', retries) + + def download_packages(self, packages: List[str], + arch: str, + exclude: str, + destdir: Path, + quiet: bool = True): + args: List[str] = [] + + if quiet: + args.append('--quiet') + + args.append(f'--archlist={arch}') + args.append(f'--exclude={exclude}') + args.append(f'--destdir={str(destdir)}') + + self.run(args) diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/config.py b/ansible/playbooks/roles/repository/files/download-requirements/src/config.py new file mode 100644 index 0000000000..6359d53360 --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/config.py @@ -0,0 +1,191 @@ +import logging +import os +import sys +from argparse import ArgumentParser, RawTextHelpFormatter +from enum import Enum +from pathlib import Path +from typing import List + +from src.error import CriticalError + + +class OSType(Enum): + """ Supported distribution types """ + Ubuntu='ubuntu' + RedHat='redhat' + CentOS='centos' + + +class OSArch(Enum): + """ Supported architecture types """ + X86_64='x86_64' + ARM64='arm64' + + +class Config: + def __init__(self, argv: List[str]): + self.dest_dashboards: Path + self.dest_dir: Path + self.dest_files: Path + self.dest_images: Path + self.dest_packages: Path + self.distro_subdir: Path + self.enable_backup: bool + self.log_file: Path + self.os_arch: OSArch + self.os_type: OSType + self.repo_path: Path + self.repos_backup_file: Path + self.reqs_path: Path + self.retries: int + self.script_path: Path + + self.__add_args(argv) + self.__log_info_summary() + + def __log_info_summary(self): + """ + Helper function for printing all parsed arguments + """ + + lines: List[str] = ['Info summary:'] + LINE_SIZE: int = 50 + lines.append('-' * LINE_SIZE) + + lines.append(f'OS Arch: {self.os_arch.value}') + lines.append(f'OS Type: {self.os_type.value}') + lines.append(f'Script location: {str(self.script_path.absolute())}') + lines.append(f'Directories used:') + lines.append(f'- dashboards: {str(self.dest_dashboards)}') + lines.append(f'- files: {str(self.dest_files)}') + lines.append(f'- images: {str(self.dest_images)}') + lines.append(f'- packages: {str(self.dest_packages)}') + + lines.append(f'Enable repos backup: {"Yes" if self.enable_backup else "No"}') + if self.enable_backup: + lines.append(f'Repos backup file: {str(self.repos_backup_file)}') + + lines.append(f'Log file location: {str(self.log_file.absolute())}') + lines.append(f'Retries count: {self.retries}') + + lines.append('-' * LINE_SIZE) + + logging.info('\n'.join(lines)) + + def __create_parser(self) -> ArgumentParser: + parser = ArgumentParser(description='Download Requirements', formatter_class=RawTextHelpFormatter) + + # required arguments: + parser.add_argument('destination_dir', metavar='DEST_DIR', type=Path, action='store', nargs='+', + help='requirements will be downloaded to this directory') + parser.add_argument('os_type', metavar='OS_TYPE', type=str, action='store', nargs='+', + help='which of the supported OS will be used: (`centos`, `debian`, `redhat`, `detect`)\n' + 'when using `detect`, script will try to find out which OS is being used') + + # optional arguments: + parser.add_argument('--enable-repos-backup', '-b', action='store_true', dest='enable_backup', default=False, + help=('when used, backup archive for packages will be created and used')), + parser.add_argument('--repos-backup-file', metavar='BACKUP_FILE', action='store', + dest='repos_backup_file', default='/var/tmp/enabled-system-repos.tar', + help='path to a backup file') + parser.add_argument('--retries-count', '-r', metavar='COUNT', type=int, action='store', dest='retries', + default=3, help='how many retries before stopping operation') + + + parser.add_argument('--log-file', '-l', metavar='LOG_FILE', type=Path, action='store', dest='log_file', + default=Path('./download-requirements.log'), + help='logs will be saved to this file') + parser.add_argument('--log-level', metavar='LOG_LEVEL', type=str, action='store', dest='log_level', + default='info', help='set up log level, available levels: (`error`, `warn`, `info`, `debug`)') + parser.add_argument('--no-logfile', action='store_true', dest='no_logfile', + help='no logfile will be created') + + return parser + + def __get_matching_os_type(self, os_type: str) -> OSType: + """ + Check if the parsed OS type fits supported distributons. + + :param os_type: distro type to be checked + :raise: on failure - CriticalError + """ + + for os in OSType: + if os.value.upper() in os_type.upper(): + logging.info(f'Found Matching OS: `{os.value}`') + return os + + raise CriticalError('Could not detect OS type') + + def __detect_os_type(self) -> OSType: + """ + On most modern GNU/Linux OSs info about current distribution + can be found at /etc/os-release. + Check this file to find out on which distro this script is ran. + """ + + os_release = Path('/etc/os-relase') + + if os_release.exists(): + with open(os_release) as os_release_handler: + for line in os_release_handler.readline(): + if 'NAME' in line: + return self.__get_matching_os_type(line.split('=')[1]) + + raise CriticalError('Could not detect OS type') + + def __setup_logger(self, log_level: str, log_file: Path, no_logfile: bool): + # setup the logger: + log_levels = { + # map input log level to Python's logging library + 'error': logging.ERROR, + 'warn': logging.WARN, + 'info': logging.INFO, + 'debug': logging.DEBUG + } + + log_format = '%(asctime)s [%(levelname)s]: %(message)s' + + # add stdout logger: + logging.basicConfig(stream=sys.stdout, level=log_levels[log_level.lower()], + format=log_format) + + # add log file: + if not no_logfile: + root_logger = logging.getLogger() + file_handler = logging.FileHandler(log_file) + file_handler.setLevel(log_levels[log_level.lower()]) + file_handler.setFormatter(logging.Formatter(fmt=log_format)) + root_logger.addHandler(file_handler) + + def __add_args(self, argv: List[str]): + """ + Run the parser and add all of the arguments to the Config object. + + :param argv: input arguments to be parsed + """ + + self.script_path = Path(argv[0]).absolute().parents[0] + self.repo_path = self.script_path / 'repositories' + self.reqs_path = self.script_path / 'requirements' + + args = self.__create_parser().parse_args(argv[1:]).__dict__ + + self.log_file = args['log_file'] + self.__setup_logger(args['log_level'], self.log_file, args['no_logfile']) + + # add required arguments: + self.os_type = self.__detect_os_type() if args['os_type'][0] == 'detect' else self.__get_matching_os_type(args['os_type'][0]) + self.dest_dir = args['destination_dir'][0] + self.dest_dashboards = self.dest_dir / 'dashboards' + self.dest_files = self.dest_dir / 'files' + self.dest_images = self.dest_dir / 'images' + self.dest_packages = self.dest_dir / 'packages' + + # add optional arguments + self.enable_backup = args['enable_backup'] + self.os_arch = OSArch(os.uname().machine) + self.repos_backup_file = Path(args['repos_backup_file']) + self.retries = args['retries'] + + self.distro_subdir = Path(f'{self.os_arch.value}/{self.os_type.value}') diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/error.py b/ansible/playbooks/roles/repository/files/download-requirements/src/error.py new file mode 100644 index 0000000000..0555839ee0 --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/error.py @@ -0,0 +1,12 @@ +import logging + + +class CriticalError(Exception): + """ + Raised when there was an error that could not be fixed by + download-requirements script. + """ + + def __init__(self, msg: str): + super().__init__() + logging.error(msg) diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/mode/base_mode.py b/ansible/playbooks/roles/repository/files/download-requirements/src/mode/base_mode.py new file mode 100644 index 0000000000..cb7e5c292a --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/mode/base_mode.py @@ -0,0 +1,250 @@ +import logging +from collections import defaultdict +from os import chmod +from pathlib import Path +from typing import Dict, List +from hashlib import sha256 + +import yaml + +from src.command.toolchain import Toolchain +from src.config import Config +from src.error import CriticalError + + +def get_sha256(req_path: Path) -> str: + """ + Calculate sha256 value for `req_path` file. + + :param req_path: of which file to calculate sha256 + :returns: calculated sha256 value, "-1" if file not found + """ + try: + with open(req_path, mode='rb') as req_file: + shagen = sha256() + shagen.update(req_file.read()) + return shagen.hexdigest() + except FileNotFoundError: + return "-1" + + +class BaseMode: + """ + An abstract class for running specific operations on target OS. + Main running method is :func:`~base_mode.BaseMode.run` + """ + + def __init__(self, config: Config): + self._cfg = config + + self._repositories: Dict[str, Dict] = self.__parse_repositories() + self._requirements: Dict[str, List[Dict]] = self.__parse_requirements() + + self._tools: Toolchain = self._construct_toolchain() + + def __parse_repositories(self) -> Dict[str, Dict]: + """ + Load repositories for target architecture/distro from a yaml file. + + :returns: parsed repositories data + """ + stream = open(self._cfg.repo_path / f'{self._cfg.distro_subdir}.yml') + return yaml.safe_load(stream)['repositories'] + + def __parse_requirements(self) -> Dict[str, List[Dict]]: + """ + Load requirements for target architecture/distro from a yaml file. + + :returns: parsed requirements data + """ + reqs = defaultdict(list) + + # target distro requirements + stream = open(self._cfg.reqs_path / f'{self._cfg.distro_subdir}.yml') + content = yaml.safe_load(stream) + for key in content.keys(): + reqs[key].extend(content[key]) + + for common_reqs in ['crane', 'files', 'images', 'dashboards']: + stream = open(self._cfg.reqs_path / f'{common_reqs}.yml') + content = yaml.safe_load(stream) + reqs[common_reqs].extend(content[common_reqs]) + + return reqs + + def _construct_toolchain(self) -> Toolchain: + """ + Setup suitable toolchain for target OS. + + :returns: newly constructed toolchain object + """ + raise NotImplementedError + + def _use_backup_repositories(self): + """ + Check if there were any critical issues and if so, try to restore the state using backup + """ + raise NotImplementedError + + def _add_third_party_repositories(self): + """ + Add third party repositories for target OS's package manager + """ + raise NotImplementedError + + def _install_base_packages(self): + """ + Ensure that packages for file downloading are installed on the OS. + """ + raise NotImplementedError + + def _download_packages(self): + """ + Download packages under `self._requirements['packages']` using target OS's package manager + """ + raise NotImplementedError + + def _download_file(self, file: str): + """ + Run command for downloading `file` on target OS. + + :param file: to be downloaded + """ + raise NotImplementedError + + def _download_dashboard(self, dashboard: str, output_file: Path): + """ + Run command for downloading `dashboard` on target OS. + + :param dashboard: to be downloded + :param output_file: under which filename dashboard will be saved + """ + raise NotImplementedError + + def __download_files(self): + """ + Download files under `self._requirements['files']` + """ + for file in self._requirements['files']: + try: + filepath = self._cfg.dest_files / file['url'].split('/')[-1] + if file['sha256'] == get_sha256(filepath): + logging.debug(f'- {file["url"]} - checksum ok, skipped') + continue + + logging.info(f'- {file["url"]}') + self._download_file(file['url']) + except CriticalError: + logging.warn(f'Could not download file: {file["url"]}') + + def _download_dashboards(self): + """ + Download dashboards under `self._requirements['dashboards']` + """ + for dashboard in self._requirements['dashboards']: + try: + output_file = self._cfg.dest_dashboards / f'{dashboard["name"]}.json' + + if dashboard['sha256'] == get_sha256(output_file): + logging.debug(f'- {dashboard["name"]} - checksum ok, skipped') + continue + + logging.info(f'- {dashboard["name"]}') + self._download_dashboard(dashboard['url'], output_file) + except CriticalError: + logging.warn(f'Could not download file: {dashboard["name"]}') + + def _download_crane(self): + """ + Download Crane package if needed and setup it's environment + """ + crane_path = self._cfg.dest_dir / 'crane' + crane_package_path = Path(f'{crane_path}.tar.gz') + + if self._requirements['crane'][0]['sha256'] == get_sha256(crane_package_path): + logging.debug(f'crane - checksum ok, skipped') + return + + self._tools.wget.download(self._requirements['crane'][0]['url'], crane_package_path) + self._tools.tar.unpack(crane_package_path, 'crane', directory=self._cfg.dest_dir) + chmod(crane_path, 0o0755) + + # create symlink to the crane file so that it'll be visible in shell + crane_symlink = Path('/usr/bin/crane') + if not crane_symlink.exists(): + crane_symlink.symlink_to(crane_path) + + def _download_images(self): + """ + Download images under `self._requirements['images']` using Crane + """ + platform: str = 'linux/amd64' if self._cfg.os_arch.X86_64 else 'linux/arm64' + for image in self._requirements['images']: + try: + url, version = image['name'].split(':') + filename = Path(f'{url.split("/")[-1]}_{version}.tar') # format: image_version.tar + + if image['sha256'] == get_sha256(self._cfg.dest_images / filename): + logging.debug(f'- {image["name"]} - checksum ok, skipped') + continue + + logging.info(f'- {image["name"]}') + self._tools.crane.pull(image['name'], self._cfg.dest_images / filename, platform) + except CriticalError: + logging.warn(f'Could not download image: `{image["name"]}`') + + def _cleanup(self): + """ + Optional step for cleanup routines + """ + pass + + def run(self): + """ + Run target mode. + + :raises: + :class:`CriticalError`: can be raised on exceeding retries + :class:`Exception`: on I/O OS failures + """ + # add required directories + self._cfg.dest_dashboards.mkdir(exist_ok=True, parents=True) + self._cfg.dest_files.mkdir(exist_ok=True, parents=True) + self._cfg.dest_images.mkdir(exist_ok=True, parents=True) + self._cfg.dest_packages.mkdir(exist_ok=True, parents=True) + + logging.info('Checking backup repositories...') + self._use_backup_repositories() + logging.info('Done checking backup repositories.') + + logging.info('Adding third party repositories...') + self._add_third_party_repositories() + logging.info('Done adding third party repositories.') + + logging.info('Installing base packages...') + self._install_base_packages() + logging.info('Done installing base packages.') + + logging.info('Downloading packages...') + self._download_packages() + logging.info('Done downloading packages.') + + logging.info('Downloading files...') + self.__download_files() + logging.info('Done downloading files.') + + logging.info('Downloading dashboards...') + self._download_dashboards() + logging.info('Done downloading dashboards.') + + logging.info('Downloading Crane...') + self._download_crane() + logging.info('Done downloading Crane.') + + logging.info('Downloading images...') + self._download_images() + logging.info('Done downloading images.') + + logging.info('Running cleanup...') + self._cleanup() + logging.info('Done running cleanup.') diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/mode/debian_family_mode.py b/ansible/playbooks/roles/repository/files/download-requirements/src/mode/debian_family_mode.py new file mode 100644 index 0000000000..650c5fd1ae --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/mode/debian_family_mode.py @@ -0,0 +1,130 @@ +from pathlib import Path +from shutil import move +from typing import List +import logging +import os + +from src.command.toolchain import DebianFamilyToolchain, Toolchain +from src.config import Config +from src.mode.base_mode import BaseMode, get_sha256 + + +def checksum_matching(sha256sum: str, matching_pkgs: List[Path]) -> bool: + # check whether any of the matching file has valid checksum + for pkg in matching_pkgs: + if sha256sum == get_sha256(pkg): + return True + + return False + + +class DebianFamilyMode(BaseMode): + """ + Used by distros based of Debian GNU/Linux + """ + + def __init__(self, config: Config): + super().__init__(config) + self.__create_repo_paths() + + def _construct_toolchain(self) -> Toolchain: + return DebianFamilyToolchain(self._cfg.retries) + + def __create_repo_paths(self): + for repo in self._repositories.keys(): + self._repositories[repo]['path'] = Path(self._repositories[repo]['path']) + + def _use_backup_repositories(self): + sources = Path('/etc/apt/sources.list') + if not sources.exists() or not sources.stat().st_size: + if self._cfg.repos_backup_file.exists() and self._cfg.enable_backup: + logging.warn('OS repositories seems missing, restoring...') + self._tools.tar.unpack(filename=self._cfg.repos_backup_file, + target='.', + directory=Path('/'), + absolute_name=True, + verbose=True) + else: + logging.warn(f'{str(sources)} seems to be missing, you either know what you are doing or ' + 'you need to fix your repositories') + + def _add_third_party_repositories(self): + # backup custom repositories to avoid possible conflicts + for repo_file in Path('/etc/apt/sources.list.d').iterdir(): + if repo_file.name.endswith('.list'): + repo_file.rename(f'{repo_file}.bak') + + # add third party keys + for repo in self._repositories: + data = self._repositories[repo] + key_file = Path(f'/tmp/{repo}') + self._tools.wget.download(data['key'], key_file) + self._tools.apt_key.add(key_file) + + # create repo files + for repo in self._repositories: + data = self._repositories[repo] + with data['path'].open(mode='a') as repo_handler: + repo_handler.write(data['content']) + + self._tools.apt.update() + + def _install_base_packages(self): + # install prerequisites which might be missing + installed_packages = self._tools.dpkg.list_installed_packages() + + logging.info('Installing base packages:') + for package in ['wget', 'gpg', 'curl', 'tar']: + if package not in installed_packages: + self._tools.apt.install(package, assume_yes=True) + logging.info(f'- {package}') + + def _download_packages(self): + for package in self._requirements['packages']: + pkg_base_name = package['name'].split('=')[0] + pkg_dir = self._cfg.dest_packages / pkg_base_name + pkg_dir.mkdir(exist_ok=True, parents=True) # make sure that the dir exists + + # Files downloaded by `apt download` cannot have custom names + # and they always starts with a package name + versioning and other info. + # Find if there is a file corresponding with it's package name + matching_pkgs: List[Path] = [pkg_file for pkg_file in pkg_dir.iterdir() if + pkg_file.name.startswith(pkg_base_name)] + + if checksum_matching(package['sha256'], matching_pkgs): + logging.debug(f'- {package["name"]} - checksum ok, skipped') + continue + + logging.info(f'- {package["name"]}') + + # path needs to be changed since `apt download` does not allow to set target dir + os.chdir(pkg_dir) + + # resolve dependencies for target package and if needed, download them first + deps: List[str] = self._tools.apt_cache.get_package_dependencies(package['name']) + + for dep in deps: + logging.info(f'-- {dep}') + self._tools.apt.download(dep) + + # finally download target package + self._tools.apt.download(package['name']) + + os.chdir(self._cfg.script_path) + + def _download_file(self, file: str): + self._tools.wget.download(file, directory_prefix=self._cfg.dest_files) + + def _download_dashboard(self, dashboard: str, output_file: Path): + self._tools.wget.download(dashboard, output_document=output_file) + + def _cleanup(self): + # cleaning up 3rd party repositories + for data in self._repositories.values(): + if data['path'].exists(): + data['path'].unlink() + + # restore masked custom repositories to their original names + for repo_file in Path('/etc/apt/sources.list.d').iterdir(): + if repo_file.name.endswith('.bak'): + move(str(repo_file.absolute()), str(repo_file.with_suffix('').absolute())) diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/mode/red_hat_family_mode.py b/ansible/playbooks/roles/repository/files/download-requirements/src/mode/red_hat_family_mode.py new file mode 100644 index 0000000000..ce5ad6735a --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/mode/red_hat_family_mode.py @@ -0,0 +1,138 @@ +import logging +from pathlib import Path +from typing import List + +from src.command.command import Command +from src.command.toolchain import RedHatFamilyToolchain, Toolchain +from src.config import Config +from src.mode.base_mode import BaseMode + + +class RedHatFamilyMode(BaseMode): + """ + Used by distros based of RedHat GNU/Linux + """ + + def __init__(self, config: Config): + super().__init__(config) + + def _construct_toolchain(self) -> Toolchain: + return RedHatFamilyToolchain(self._cfg.retries) + + def _use_backup_repositories(self): + sources = Path('/etc/yum.repos.d/epirepo.repo') + if not sources.exists() or not sources.stat().st_size: + if self._cfg.repos_backup_file.exists() and self._cfg.enable_backup: + logging.warn('OS repositories seems missing, restoring...') + self._tools.tar.unpack(filename=self._cfg.repos_backup_file, + target='.', + directory=Path('/'), + absolute_name=True, + verbose=True) + else: + logging.warn(f'{str(sources)} seems to be missing, you either know what you are doing or ' + 'you need to fix your repositories') + + def __enable_repos(self, repo_id_patterns: List[str]): + """ + :param repo_id_patterns: + """ + for repo in self._tools.yum.find_rhel_repo_id(repo_id_patterns): + if not self._tools.yum.is_repo_enabled(repo): + self._tools.yum_config_manager.enable_repo(repo) + + def _add_third_party_repositories(self): + # backup custom repositories to avoid possible conflicts + for repo_file in Path('/etc/yum.repos.d/').iterdir(): + if repo_file.name.endswith('.repo'): + repo_file.rename(f'{repo_file}.bak') + + # Fix for RHUI client certificate expiration [#2318] + if self._tools.yum.is_repo_enabled('rhui-microsoft-azure-rhel'): + self._tools.yum.update('rhui-microsoft-azure-rhel') + + # -> rhel-7-server-extras-rpms # for container-selinux package, this repo has different id names on clouds + # About rhel-7-server-extras-rpms: https://access.redhat.com/solutions/3418891 + repo_id_patterns = ['rhel-7-server-extras-rpms', + 'rhui-rhel-7-server-rhui-extras-rpms', + 'rhui-REGION-rhel-server-extras', + 'rhel-7-server-rhui-extras-rpms'] # on-prem|Azure|AWS7.8|AWS7.9 + self.__enable_repos(repo_id_patterns) + + # -> rhel-server-rhscl-7-rpms # for Red Hat Software Collections (RHSCL), this repo has different id names on clouds + # About rhel-server-rhscl-7-rpms: https://access.redhat.com/solutions/472793 + repo_id_patterns = ['rhel-server-rhscl-7-rpms', + 'rhui-rhel-server-rhui-rhscl-7-rpms', + 'rhui-REGION-rhel-server-rhscl', + 'rhel-server-rhui-rhscl-7-rpms'] # on-prem|Azure|AWS7.8|AWS7.9 + self.__enable_repos(repo_id_patterns) + + for repo in self._repositories: + repo_filepath = Path('/etc/yum.repos.d') / f'{repo}.repo' + content = self._repositories[repo]['data'] + content = content + f'\ngpgkey={" ".join(self._repositories[repo]["gpgkeys"])}' + + if not self._tools.yum.is_repo_enabled(repo): + with open(repo_filepath, mode='w') as repo_handler: + repo_handler.write(content) + + for key in self._repositories[repo]['gpgkeys']: + self._tools.rpm.import_key(key) + + self._tools.yum.accept_keys() + + # Official Docker CE repository, added with https://download.docker.com/linux/centos/docker-ce.repo, + # has broken URL (https://download.docker.com/linux/centos/7Server/x86_64/stable) for longer time. + # So direct (patched) link is used first if available. + if self._tools.yum.is_repo_available('docker-ce-stable-patched'): + self._tools.yum_config_manager.disable_repo('docker-ce-stable-patched') + + if not self._tools.yum.is_repo_enabled('docker-ce'): + self._tools.yum_config_manager.add_repo('https://download.docker.com/linux/centos/docker-ce.repo') + self._tools.yum.accept_keys() + + for repo in ['https://dl.2ndquadrant.com/default/release/get/10/rpm', # for repmgr + 'https://dl.2ndquadrant.com/default/release/get/13/rpm']: + Command('curl', self._cfg.retries, [repo]) | Command('bash', self._cfg.retries) # curl {repo} | bash + + # script adds 2 repositories, only 1 is required + for repo in ['2ndquadrant-dl-default-release-pg10-debug', + '2ndquadrant-dl-default-release-pg13-debug']: + self._tools.yum_config_manager.disable_repo(repo) + + def _install_base_packages(self): + # some packages are from EPEL repo + if not self._tools.rpm.is_package_installed('epel-release'): + self._tools.yum.install('https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm') + + # clean metadata for upgrades (when the same package can be downloaded from changed repo) + # TODO remove_yum_cache_for_untracked_repos + + self._tools.yum.makecache(True) + + for package in ['yum-utils', 'wget', 'curl', 'tar']: + if not self._tools.rpm.is_package_installed(package): + self._tools.yum.install(package) + + def _download_packages(self): + prereqs_dir = self._cfg.dest_packages / 'repo-prereqs' + prereqs_dir.mkdir(exist_ok=True, parents=True) + + collected_prereqs: List[str] = [] + for prereq_pkg in self._requirements['prereq-packages']: + collected_prereqs.append(self._tools.repoquery.query(prereq_pkg['name'], + queryformat='%{ui_nevra}', + arch=self._cfg.os_arch.value)) + + # download requirements (fixed versions) + if collected_prereqs: + self._tools.yumdownloader.download_packages(collected_prereqs, + arch=self._cfg.os_arch.value, + exclude='*i686', + destdir=prereqs_dir) + + def _download_file(self, file: str): + self._tools.wget.download(file, directory_prefix=self._cfg.dest_files, additional_params=False) + + def _download_dashboard(self, dashboard: str, output_file: Path): + self._tools.wget.download(dashboard, output_document=output_file, additional_params=False) diff --git a/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_apt.py b/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_apt.py new file mode 100644 index 0000000000..bc6b2dab35 --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_apt.py @@ -0,0 +1,21 @@ +from tests.mocks.command_run_mock import CommandRunMock + +from src.command.apt import Apt + + +def test_interface_update(mocker): + ''' Check argument construction for `apt update` ''' + with CommandRunMock(mocker, Apt(1).update) as call_args: + assert call_args == ['apt', 'update'] + + +def test_interface_download(mocker): + ''' Check argument construction for `apt download package` ''' + with CommandRunMock(mocker, Apt(1).download, {'package': 'vim'}) as call_args: + assert call_args == ['apt', 'download', 'vim'] + + +def test_interface_install(mocker): + ''' Check argument construction for `apt install -y package` ''' + with CommandRunMock(mocker, Apt(1).install, {'package': 'vim', 'assume_yes': True}) as call_args: + assert call_args == ['apt', 'install', '-y', 'vim'] diff --git a/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_crane.py b/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_crane.py new file mode 100644 index 0000000000..4722563aa8 --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_crane.py @@ -0,0 +1,19 @@ +from pathlib import Path +from tests.mocks.command_run_mock import CommandRunMock + +from src.command.crane import Crane + + +def test_interface_pull(mocker): + ''' Check argument construction for crane pull ''' + mocker.patch('src.command.crane.chmod', return_value=None) + mocker.patch('src.command.crane.mkstemp', return_value=[None, '/tmp/tmpfile']) + mocker.patch('src.command.crane.move', return_value=None) + + with CommandRunMock(mocker, Crane(1).pull, {'image_name': 'image', + 'destination': Path('/some/place'), + 'platform': 'platform', + 'legacy_format': True, + 'insecure': True}) as call_args: + assert call_args == ['crane', 'pull', '--insecure', '--platform=platform', '--format=legacy', + 'image', '/tmp/tmpfile'] diff --git a/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_tar.py b/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_tar.py new file mode 100644 index 0000000000..1c8bca198f --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_tar.py @@ -0,0 +1,34 @@ +from tests.mocks.command_run_mock import CommandRunMock + +from src.command.tar import Tar + + +def test_interface_list_items(mocker): + ''' Check argument construction for `tar -tf` ''' + with CommandRunMock(mocker, Tar().list_items, {'filename': '/var/log/file.tar.gz'}) as call_args: + assert call_args == ['tar', '-tf', '/var/log/file.tar.gz'] + + +def test_interface_pack(mocker): + ''' Check argument construction for `tar -cf` ''' + with CommandRunMock(mocker, Tar().pack, {'filename': '/tmp/package.tar.gz', + 'target': '*', + 'directory': '/some/directory', + 'verbose': True, + 'compress': True, + 'verify': True}) as call_args: + assert call_args == ['tar', '-czvf', '/tmp/package.tar.gz', '--verify', '--directory', '/some/directory', '*'] + + +def test_interface_unpack(mocker): + ''' Check argument construction for `tar -xf` ''' + with CommandRunMock(mocker, Tar().unpack, {'filename': '/tmp/package.tar.gz', + 'target': 'some_target', + 'directory': '/some/directory', + 'overwrite': True, + 'verbose': True, + 'uncompress': True, + 'strip_components': 2}) as call_args: + assert call_args == ['tar', '-xzvf', '/tmp/package.tar.gz', '--directory', '/some/directory', + '--strip-components=2', 'some_target', '--overwrite'] + diff --git a/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_wget.py b/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_wget.py new file mode 100644 index 0000000000..9849435f63 --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_wget.py @@ -0,0 +1,15 @@ +from pathlib import Path + +from src.command.wget import IPFamily, Wget +from tests.mocks.command_run_mock import CommandRunMock + + +def test_builder_download(mocker): + ''' Check argument construction for `wget` ''' + with CommandRunMock(mocker, Wget(1).download, {'url': 'http://some.url.com', + 'output_document': Path('/var/log/output_name'), + 'directory_prefix': Path('/custom/prefix'), + 'ip_family': IPFamily.IPV4}) as call_args: + assert call_args == ['wget', '--no-use-server-timestamps', '--continue', '--show-progress', + '-O', '/var/log/output_name', '--directory-prefix=/custom/prefix', + '--prefer-family=IPv4', 'http://some.url.com'] diff --git a/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_yum.py b/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_yum.py new file mode 100644 index 0000000000..c4963dab07 --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_yum.py @@ -0,0 +1,10 @@ +from tests.mocks.command_run_mock import CommandRunMock + +from src.command.yum import Yum + + +def test_builder_install(mocker): + ''' Check argument construction for `yum install -y` ''' + with CommandRunMock(mocker, Yum(1).install, {'package': 'vim', 'assume_yes': True}) as call_args: + assert call_args == ['yum', 'install', '-y', 'vim'] + diff --git a/ansible/playbooks/roles/repository/files/download-requirements/tests/mocks/command_run_mock.py b/ansible/playbooks/roles/repository/files/download-requirements/tests/mocks/command_run_mock.py new file mode 100644 index 0000000000..ac181dd3ec --- /dev/null +++ b/ansible/playbooks/roles/repository/files/download-requirements/tests/mocks/command_run_mock.py @@ -0,0 +1,45 @@ +import subprocess +from typing import Any, Callable, Dict, List +from unittest.mock import Mock + +from pytest_mock.plugin import MockerFixture + + +class CommandRunMock: + """ + Mock class for Command.run() calls. + Usage: + + with CommandRunMock(mocker, function_to_test, function_args) as call_args: + assert call_args == [expected_arg1, ...] + """ + def __init__(self, mocker: MockerFixture, func: Callable, args: Dict[str, Any] = None): + """ + :param mocker: mocker object provided by pytest + :param func: function which will be tested + :param args: parameters that will be passed to `__func` + """ + self.__mocker = mocker + self.__func = func + self.__args = args + + def __enter__(self) -> List[str]: + """ + :return: list of arguments passed to the subprocess.run() function + """ + mock = Mock() + mock.returncode = 0 + + self.__mocker.patch('src.command.command.subprocess.run', side_effect=lambda args, encoding, stdout, stderr: mock) + + spy = self.__mocker.spy(subprocess, 'run') + + if self.__args: + self.__func(**self.__args) + else: + self.__func() + + return spy.call_args[0][0] + + def __exit__(self, *args): + pass diff --git a/ansible/playbooks/roles/repository/files/download-requirements/ubuntu-20.04/add-repositories.sh b/ansible/playbooks/roles/repository/files/download-requirements/ubuntu-20.04/add-repositories.sh deleted file mode 100644 index 48b788a6cb..0000000000 --- a/ansible/playbooks/roles/repository/files/download-requirements/ubuntu-20.04/add-repositories.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash -eu - -wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | apt-key add - -echo "deb https://artifacts.elastic.co/packages/oss-6.x/apt stable main" | tee /etc/apt/sources.list.d/elastic-6.x.list - -wget -qO - https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - -echo "deb http://apt.kubernetes.io/ kubernetes-xenial main" | tee /etc/apt/sources.list.d/kubernetes.list - -wget -qO - https://packages.erlang-solutions.com/ubuntu/erlang_solutions.asc | apt-key add - -echo "deb https://packages.erlang-solutions.com/ubuntu focal contrib" | tee /etc/apt/sources.list.d/erlang-23.x.list - -# bionic is used since focal contains versions 3.8.11+ but we use 3.8.9 -wget -qO - https://packagecloud.io/rabbitmq/rabbitmq-server/gpgkey | apt-key add - -echo "deb https://packagecloud.io/rabbitmq/rabbitmq-server/ubuntu bionic main" | tee /etc/apt/sources.list.d/rabbitmq.list - -wget -qO - https://download.docker.com/linux/ubuntu/gpg | apt-key add - -echo "deb [arch=amd64] https://download.docker.com/linux/ubuntu focal stable" | tee /etc/apt/sources.list.d/docker-ce.list - -wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | apt-key add - -echo "deb https://artifacts.elastic.co/packages/oss-7.x/apt stable main" | tee /etc/apt/sources.list.d/elastic-7.x.list - -wget -qO - https://d3g5vo6xdbdb9a.cloudfront.net/GPG-KEY-opendistroforelasticsearch | apt-key add - -echo "deb https://d3g5vo6xdbdb9a.cloudfront.net/apt stable main" | tee -a /etc/apt/sources.list.d/opendistroforelasticsearch.list - -wget -qO - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add - -echo "deb http://apt.postgresql.org/pub/repos/apt focal-pgdg main" | tee /etc/apt/sources.list.d/pgdg.list - -# Historical packages from apt.postgresql.org -wget -qO - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add - -echo "deb http://apt-archive.postgresql.org/pub/repos/apt focal-pgdg-archive main" | tee /etc/apt/sources.list.d/pgdg-archive.list - -# Provides repmgr -wget -qO - https://dl.2ndquadrant.com/gpg-key.asc | apt-key add - -echo "deb https://dl.2ndquadrant.com/default/release/apt focal-2ndquadrant main" | tee -a /etc/apt/sources.list.d/2ndquadrant-dl-default-release.list diff --git a/ansible/playbooks/roles/repository/files/download-requirements/ubuntu-20.04/common.sh b/ansible/playbooks/roles/repository/files/download-requirements/ubuntu-20.04/common.sh deleted file mode 100644 index c21d72a7fc..0000000000 --- a/ansible/playbooks/roles/repository/files/download-requirements/ubuntu-20.04/common.sh +++ /dev/null @@ -1,137 +0,0 @@ -#!/bin/bash -# this file is just a bunch of functions meant to be called from other scripts - - -usage() { - echo "usage: ./$(basename "$0") " - echo "example: ./$(basename "$0") /tmp/downloads" - exit 1 -} - -# params: -remove_file() { - local file_path="$1" - - echol "Removing file: $file_path" - rm -f "$file_path" || exit_with_error "Command failed: rm -f \"$file_path\"" -} - -# params: -create_directory() { - local dir_path="$1" - - if [[ ! -d "$dir_path" ]]; then - mkdir -p $dir_path - fi -} - -# params: -download_image() { - local image_name="$1" - local dest_dir="$2" - - local splited_image=(${image_name//:/ }) - local repository=${splited_image[0]} - local tag=${splited_image[1]} - local repo_basename=$(basename -- "$repository") - local dst_image="${dest_dir}/${repo_basename}-${tag}.tar" - local retries=3 - - if [[ -f ${dst_image} ]]; then - echo "Image: "${dst_image}" already exists. Skipping..." - else - local tmp_file=$(mktemp) - echo "Downloading image: $1" - echo "Crane command is: ${CRANE_BIN} pull --insecure --format=legacy ${image_name} ${dst_image}" - # use temporary file for downloading to be safe from sudden interruptions (network, ctrl+c) - run_cmd_with_retries $retries ${CRANE_BIN} pull --insecure --platform=${docker_platform} --format=legacy ${image_name} ${tmp_file} && chmod 644 ${tmp_file} && mv ${tmp_file} ${dst_image} - fi -} - -# params: [new_filename] -download_file() { - local file_url="$1" - local dest_dir="$2" - if [[ ${3-} ]]; then - local new_filename="$3" - fi - - local file_name - file_name=$(basename "$file_url") - local dest_path="${dest_dir}/${file_name}" - local retries=3 - - # wget with --timestamping sometimes failes on AWS with ERROR 403: Forbidden - # so we remove existing file to overwrite it - - # remove old files to force redownload after a while - # just a precaution so --continue won't append and corrupt files localy if file is updated on server without name change - if [[ -f $dest_path && $(find "$dest_path" -mmin +60 -print) ]]; then - echol "File $dest_path older than 1h, redownloading..." - remove_file "$dest_path" - fi - - # --no-use-server-timestamps - we don't use --timestamping and we need to expire files somehow - # --continue - don't download the same file multiple times, gracefully skip if file is fully downloaded - if [[ ${new_filename-} ]]; then - echol "Downloading file: $file_url as $new_filename" - run_cmd_with_retries $retries wget --no-use-server-timestamps --continue --show-progress --prefer-family=IPv4 "${file_url}" -O "${dest_dir}/${new_filename}" - else - echol "Downloading file: $file_url" - run_cmd_with_retries $retries wget --no-use-server-timestamps --continue --show-progress --prefer-family=IPv4 --directory-prefix="${dest_dir}" "${file_url}" - fi -} - -# to download everything, add "--recurse" flag but then you will get much more packages (e.g. 596 vs 319) -deplist_cmd() { - apt-cache depends --no-recommends --no-suggests --no-conflicts --no-breaks --no-replaces --no-enhances --no-pre-depends $1 -} - -get_shell_escaped_array() { - if (( $# > 0 )); then - printf '%q\n' "$@" - fi -} - -print_array_as_shell_escaped_string() { - local output - output=$(get_shell_escaped_array "$@") - local -a escaped=() - if [ -n "$output" ]; then - readarray -t escaped <<< "$output" - fi - if (( ${#escaped[@]} > 0 )); then - printf '%s\n' "${escaped[*]}" - fi -} - -run_cmd() { - local -a cmd_arr=("$@") - local output - output=$(print_array_as_shell_escaped_string "${cmd_arr[@]}") - echo "Running command:" "$output" - "${cmd_arr[@]}" -} - -run_cmd_with_retries() { - local retries=${1} - shift - local -a cmd_arr=("$@") - ( # sub-shell is used to limit scope for 'set +e' - set +e - trap - ERR # disable global trap locally - for ((i=0; i <= retries; i++)); do - run_cmd "${cmd_arr[@]}" - return_code=$? - if (( return_code == 0 )); then - break - elif (( i < retries )); then - sleep 1 - echo "retrying ($(( i+1 ))/${retries})" - else - echo "ERROR: all attempts failed" - fi - done - return $return_code - ) -} diff --git a/ansible/playbooks/roles/repository/files/download-requirements/ubuntu-20.04/download-requirements.sh b/ansible/playbooks/roles/repository/files/download-requirements/ubuntu-20.04/download-requirements.sh deleted file mode 100644 index 22e6b597f7..0000000000 --- a/ansible/playbooks/roles/repository/files/download-requirements/ubuntu-20.04/download-requirements.sh +++ /dev/null @@ -1,248 +0,0 @@ -#!/bin/bash - -set -euo pipefail -export DEBIAN_FRONTEND=noninteractive - -script_path="$( cd "$(dirname "$0")" ; pwd -P )" - -# source common functions -. "${script_path}/common.sh" - -internet_access_checks_enabled="yes" -CREATE_LOGFILE="yes" -LOG_FILE_PATH="${script_path}/log" - -. "${script_path}/common/common_functions.sh" - -if [[ $# -lt 1 ]]; then - usage -fi - -readonly START_TIME=$(date +%s) - -script_file_name=$(basename "$0") -dst_dir=$(readlink -m "$1") # beautify input path - remove double slashes if occurs -dst_dir_packages="${dst_dir}/packages" -dst_dir_files="${dst_dir}/files" -dst_dir_images="${dst_dir}/images" -deplist="${script_path}/.dependencies" -retries="3" -download_cmd="run_cmd_with_retries $retries apt-get download" -add_repos="${script_path}/add-repositories.sh" -CRANE_BIN="${script_path}/crane" - -# arch -arch=$(uname -m) -echol "Detected arch: $arch" -input_file="${script_path}/requirements.${arch}.txt" -case $arch in -x86_64) - docker_platform="linux/amd64" - ;; - -*) - exit_with_error "Arch $arch unsupported" - ;; -esac -echol "Docker platform: $docker_platform" - -# checks - -[[ $EUID -eq 0 ]] || { echo "You have to run as root"; exit 1; } - -[[ -f $input_file ]] || exit_with_error "File not found: $input_file" - -# restore system repositories in case they're missing if ansible role gets interrupted - -enable_system_repos_script="/var/tmp/epi-repository-setup-scripts/enable-system-repos.sh" -disable_epirepo_client_script="/var/tmp/epi-repository-setup-scripts/disable-epirepo-client.sh" -apt_sources_list="/etc/apt/sources.list" - -if [[ ! -f $apt_sources_list || ! -s $apt_sources_list ]]; then - if [[ -f /var/tmp/enabled-system-repos.tar && -f $enable_system_repos_script ]]; then - echol "OS repositories seems missing, restoring..." - $enable_system_repos_script || exit_with_error "Could not restore system repositories" - $disable_epirepo_client_script || exit_with_error "Could not disable epirepo" - else - echol "$apt_sources_list seems missing or is empty, you either know what you're doing or you need to fix your repositories" - fi -fi - -check_connection apt $apt_sources_list - -# install prerequisites which might be missing -prerequisites=(wget gpg curl tar) -for i in ${prerequisites[@]}; do - dpkg -l | grep "^ii $i\s" &>/dev/null || apt install -y $i -done - -# some quick sanity check -echol "Dependency list: $deplist" -echol "Command used to download packages: $download_cmd" -echol "Destination directory for packages: $dst_dir_packages" - -# make sure destination dir exists -mkdir -p "$dst_dir_packages" -mkdir -p "$dst_dir_files" -mkdir -p "$dst_dir_images" - -# mask custom repositories to avoid possible conflicts -shopt -s nullglob -for i in /etc/apt/sources.list.d/*.list; do - mv "$i" "${i}.bak" -done -shopt -u nullglob - -# add 3rd party repositories -# TODO: See if we need to split this up to support different architectures -. "${add_repos}" - -check_connection apt $(ls /etc/apt/sources.list.d) -apt update - -# parse the input file, separete by tags: [crane], [packages], [files], [images] -crane=$(awk '/^$/ || /^#/ {next}; /\[crane\]/ {f=1; next}; /^\[/ {f=0}; f {print $0}' "${input_file}") -packages=$(awk '/^$/ || /^#/ {next}; /\[packages\]/ {f=1; next}; /^\[/ {f=0}; f {print $0}' "${input_file}") -files=$(awk '/^$/ || /^#/ {next}; /\[files\]/ {f=1; f=2; next}; /^\[/ {f=0}; f {print $0}' "${input_file}") -images=$(awk '/^$/ || /^#/ {next}; /\[images\]/ {f=1; next}; /^\[/ {f=0}; f {print $0}' "${input_file}") - -printf "\n" - -if [[ -e $deplist ]]; then - # clear list of cached dependencies if .dependencies is older than 15 minutes - find "$script_path" -type f -wholename "$deplist" -mmin +15 -exec rm "$deplist" \; - # clear list of cached dependencies if requirements.txt was recently edited - find "$script_path" -type f -wholename "$input_file" -mmin -1 -exec rm "$deplist" \; -fi - -# CRANE -if [[ -z "${crane}" || $(wc -l <<< "${crane}") -ne 1 ]] ; then - exit_with_error "Crane binary download path undefined or more than one download path defined" -else - if [[ -x $CRANE_BIN ]]; then - echol "Crane binary already exists" - else - file_url=$(head -n 1 <<< "${crane}") - - check_connection wget $file_url - - echol "Downloading crane from: $file_url" - download_file "$file_url" "$script_path" - tar_path="${script_path}/${file_url##*/}" - echol "Unpacking crane from $tar_path to $CRANE_BIN" - tar -xzf "$tar_path" --directory "$script_path" "crane" --overwrite - chmod +x "$CRANE_BIN" - remove_file "$tar_path" - [[ -f $CRANE_BIN ]] || exit_with_error "File not found: $CRANE_BIN" - [[ -x $CRANE_BIN ]] || exit_with_error "$CRANE_BIN has to be executable" - fi -fi - -printf "\n" - -check_connection crane $(for image in $images; do splitted=(${image//:/ }); echo "${splitted[0]}"; done) - -# PACKAGES -# if dependency list doesn't exist or is zero size then resolve dependency and store them in a deplist file -if [[ ! -f $deplist || ! -s $deplist ]]; then - # clean dependency list if process gets interrupted - trap "rm -f $deplist; echol 'Dependency resolution interrupted, cleaning cache file'" SIGINT SIGTERM - echo Resolving dependencies to download. This might take a while and will be cached in $deplist - while IFS= read -r package; do - echol "Package read from requirements file: $package" - # if package has a specified version e.g. "name 1.0" store it as "name=1.0*" for compatibility with "apt-get download" - package=$(echo "$package" | awk '{if($2 != "") {print $1 "=" $2 "*"} else {print $1}}') - echol "Package to download: $package" - # store package itself in the list of dependencies... - echol "$package" >> "$deplist" - # .. and create depency list for the package - # (names only for dependencies, no version check here, not necessary as most dependencies are backward-compatible) - dependencies=$(deplist_cmd "$package" | awk '/Depends/ && !/= v0.7.1) -kubeadm 1.18.6 -kubectl 1.18.6 -kubelet 1.18.6 - -# K8s v1.19.15 (Epiphany >= v1.3, transitional version) -kubeadm 1.19.15 -kubectl 1.19.15 -kubelet 1.19.15 - -# K8s v1.20.12 (Epiphany >= v1.3, transitional version) -kubeadm 1.20.12 -kubectl 1.20.12 -kubelet 1.20.12 - -# K8s v1.21.7 (Epiphany >= v1.3, transitional version) -kubeadm 1.21.7 -kubectl 1.21.7 -kubelet 1.21.7 - -# K8s v1.22.4 -kubeadm 1.22.4 -kubectl 1.22.4 -kubelet 1.22.4 - -# Kubernetes Generic -# kubernetes-cni-0.8.6 since K8s v1.18.6 -kubernetes-cni 0.8.6-00 -# kubernetes-cni-0.8.7 since K8s v1.19.15 -kubernetes-cni 0.8.7-00 - -[files] -# --- Packages --- -# Switched from APT repo because there was only one (the latest) version available (issue #2262) -https://packages.elastic.co/curator/5/debian9/pool/main/e/elasticsearch-curator/elasticsearch-curator_5.8.3_amd64.deb -# Grafana package is not downloaded from repository since it was not reliable (issue #2449) -https://dl.grafana.com/oss/release/grafana_8.3.2_amd64.deb -# --- Exporters --- -https://github.com/danielqsj/kafka_exporter/releases/download/v1.4.0/kafka_exporter-1.4.0.linux-amd64.tar.gz -https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/0.16.1/jmx_prometheus_javaagent-0.16.1.jar -https://github.com/prometheus/node_exporter/releases/download/v1.3.1/node_exporter-1.3.1.linux-amd64.tar.gz -https://github.com/prometheus-community/postgres_exporter/releases/download/v0.10.0/postgres_exporter-0.10.0.linux-amd64.tar.gz -# --- Misc --- -https://archive.apache.org/dist/kafka/2.6.0/kafka_2.12-2.6.0.tgz -https://archive.apache.org/dist/zookeeper/zookeeper-3.5.8/apache-zookeeper-3.5.8-bin.tar.gz -https://github.com/prometheus/alertmanager/releases/download/v0.23.0/alertmanager-0.23.0.linux-amd64.tar.gz -https://github.com/prometheus/prometheus/releases/download/v2.31.1/prometheus-2.31.1.linux-amd64.tar.gz -https://get.helm.sh/helm-v3.2.0-linux-amd64.tar.gz -https://archive.apache.org/dist/logging/log4j/2.17.1/apache-log4j-2.17.1-bin.tar.gz -# --- Helm charts --- -https://charts.bitnami.com/bitnami/node-exporter-2.3.17.tgz -https://helm.elastic.co/helm/filebeat/filebeat-7.9.2.tgz -# --- Grafana Dashboards --- -# Kubernetes Cluster -https://grafana.com/api/dashboards/7249/revisions/1/download grafana_dashboard_7249.json -# Kubernetes cluster monitoring (via Prometheus) -https://grafana.com/api/dashboards/315/revisions/3/download grafana_dashboard_315.json -# Node Exporter for Prometheus -https://grafana.com/api/dashboards/11074/revisions/9/download grafana_dashboard_11074.json -# Node Exporter Server Metrics -https://grafana.com/api/dashboards/405/revisions/8/download grafana_dashboard_405.json -# Postgres Overview -https://grafana.com/api/dashboards/455/revisions/2/download grafana_dashboard_455.json -# PostgreSQL Database -https://grafana.com/api/dashboards/9628/revisions/7/download grafana_dashboard_9628.json -# RabbitMQ Monitoring -https://grafana.com/api/dashboards/4279/revisions/4/download grafana_dashboard_4279.json -# Node Exporter Full -https://grafana.com/api/dashboards/1860/revisions/23/download grafana_dashboard_1860.json -# Kafka Exporter Overview -https://grafana.com/api/dashboards/7589/revisions/5/download grafana_dashboard_7589.json -# HaProxy backend (or frontend/servers) -https://grafana.com/api/dashboards/789/revisions/1/download grafana_dashboard_789.json -# Docker and Host Monitoring w/ Prometheus -https://grafana.com/api/dashboards/179/revisions/7/download grafana_dashboard_179.json -# Kubernetes pod and cluster monitoring (via Prometheus) -https://grafana.com/api/dashboards/6663/revisions/1/download grafana_dashboard_6663.json -# RabbitMQ cluster monitoring (via Prometheus) -https://grafana.com/api/dashboards/10991/revisions/11/download grafana_dashboard_10991.json - -[images] -haproxy:2.2.2-alpine -kubernetesui/dashboard:v2.3.1 -kubernetesui/metrics-scraper:v1.0.7 -registry:2 -# applications -bitnami/pgpool:4.2.4 -bitnami/pgbouncer:1.16.0 - -epiphanyplatform/keycloak:14.0.0 -rabbitmq:3.8.9 -# K8s -## v1.18.6 -k8s.gcr.io/kube-apiserver:v1.18.6 -k8s.gcr.io/kube-controller-manager:v1.18.6 -k8s.gcr.io/kube-scheduler:v1.18.6 -k8s.gcr.io/kube-proxy:v1.18.6 -k8s.gcr.io/coredns:1.6.7 -k8s.gcr.io/etcd:3.4.3-0 -quay.io/coreos/flannel:v0.12.0-amd64 -quay.io/coreos/flannel:v0.12.0 -calico/cni:v3.15.0 -calico/kube-controllers:v3.15.0 -calico/node:v3.15.0 -calico/pod2daemon-flexvol:v3.15.0 -## v1.19.15 -k8s.gcr.io/kube-apiserver:v1.19.15 -k8s.gcr.io/kube-controller-manager:v1.19.15 -k8s.gcr.io/kube-scheduler:v1.19.15 -k8s.gcr.io/kube-proxy:v1.19.15 -## v1.20.12 -k8s.gcr.io/kube-apiserver:v1.20.12 -k8s.gcr.io/kube-controller-manager:v1.20.12 -k8s.gcr.io/kube-scheduler:v1.20.12 -k8s.gcr.io/kube-proxy:v1.20.12 -k8s.gcr.io/coredns:1.7.0 -k8s.gcr.io/pause:3.2 -## v1.21.7 -k8s.gcr.io/kube-apiserver:v1.21.7 -k8s.gcr.io/kube-controller-manager:v1.21.7 -k8s.gcr.io/kube-scheduler:v1.21.7 -k8s.gcr.io/kube-proxy:v1.21.7 -k8s.gcr.io/coredns/coredns:v1.8.0 -k8s.gcr.io/etcd:3.4.13-0 -k8s.gcr.io/pause:3.4.1 -## v1.22.4 -k8s.gcr.io/kube-apiserver:v1.22.4 -k8s.gcr.io/kube-controller-manager:v1.22.4 -k8s.gcr.io/kube-scheduler:v1.22.4 -k8s.gcr.io/kube-proxy:v1.22.4 -k8s.gcr.io/coredns/coredns:v1.8.4 -k8s.gcr.io/etcd:3.5.0-0 -k8s.gcr.io/pause:3.5 -quay.io/coreos/flannel:v0.14.0-amd64 -quay.io/coreos/flannel:v0.14.0 -calico/cni:v3.20.3 -calico/kube-controllers:v3.20.3 -calico/node:v3.20.3 -calico/pod2daemon-flexvol:v3.20.3 diff --git a/ansible/playbooks/roles/repository/tasks/Debian/install-packages.yml b/ansible/playbooks/roles/repository/tasks/Debian/install-packages.yml new file mode 100644 index 0000000000..7849a475d0 --- /dev/null +++ b/ansible/playbooks/roles/repository/tasks/Debian/install-packages.yml @@ -0,0 +1,10 @@ +--- +- name: Install Debian family packages essential for repository to work + apt: + name: + - python3-yaml + state: present + register: result + retries: 3 + delay: 1 + until: result is succeeded diff --git a/ansible/playbooks/roles/repository/tasks/RedHat/install-packages.yml b/ansible/playbooks/roles/repository/tasks/RedHat/install-packages.yml new file mode 100644 index 0000000000..f7f4b381a9 --- /dev/null +++ b/ansible/playbooks/roles/repository/tasks/RedHat/install-packages.yml @@ -0,0 +1,17 @@ +--- +- name: Install RedHat family packages for repository to work + yum: + name: + - python3 # python3.6 by default on RHEL7 + - rh-python36-PyYAML + state: present + register: result + retries: 3 + delay: 1 + until: result is succeeded + +- name: Create symlink to PyYAML for python3.6 # Needed on RHEL7 + file: + src: /opt/rh/rh-python36/root/usr/lib64/python3.6/site-packages/yaml + dest: /usr/lib64/python3.6/site-packages/yaml + state: link diff --git a/ansible/playbooks/roles/repository/tasks/check-whether-to-run-download.yml b/ansible/playbooks/roles/repository/tasks/check-whether-to-run-download.yml index 7e5f504a64..f0d1fc4452 100644 --- a/ansible/playbooks/roles/repository/tasks/check-whether-to-run-download.yml +++ b/ansible/playbooks/roles/repository/tasks/check-whether-to-run-download.yml @@ -4,12 +4,12 @@ - name: Check if flag file exists stat: - path: /var/tmp/epi-download-requirements/download-requirements-done.flag + path: "{{ download_requirements_flag }}" register: stat_flag_file - name: Remove download-requirements-done.flag file if expired file: - path: /var/tmp/epi-download-requirements/download-requirements-done.flag + path: "{{ download_requirements_flag }}" state: absent when: - stat_flag_file.stat.exists @@ -17,5 +17,5 @@ - name: Check whether to run download script stat: - path: /var/tmp/epi-download-requirements/download-requirements-done.flag + path: "{{ download_requirements_flag }}" register: stat_flag_file diff --git a/ansible/playbooks/roles/repository/tasks/download-requirements.yml b/ansible/playbooks/roles/repository/tasks/download-requirements.yml index 61392a5195..ecc669a286 100644 --- a/ansible/playbooks/roles/repository/tasks/download-requirements.yml +++ b/ansible/playbooks/roles/repository/tasks/download-requirements.yml @@ -4,14 +4,19 @@ - name: |- Run download-requirements script, this can take a long time - You can check progress on repository host with: journalctl -f -t download-requirements.sh + You can check progress on repository host with: journalctl -f -t download-requirements.py shell: >- - set -o pipefail && /var/tmp/epi-download-requirements/download-requirements.sh /var/www/html/epirepo --no-logfile |& - tee >(systemd-cat --identifier=download-requirements.sh) + set -o pipefail && "{{ download_requirements_script }}" /var/www/html/epirepo "{{ ansible_distribution }}" --no-logfile |& + tee >(systemd-cat --identifier=download-requirements.py) args: executable: /bin/bash - name: Create flag file to not re-download requirements next time file: - path: /var/tmp/epi-download-requirements/download-requirements-done.flag + path: "{{ download_requirements_flag }}" state: touch + +- name: STOP HERE + fail: + msg: GESZTOPT DANKE! + when: true diff --git a/ansible/playbooks/roles/repository/tasks/setup.yml b/ansible/playbooks/roles/repository/tasks/setup.yml index 273e443ae3..eec65a151f 100644 --- a/ansible/playbooks/roles/repository/tasks/setup.yml +++ b/ansible/playbooks/roles/repository/tasks/setup.yml @@ -16,28 +16,38 @@ 'redhat-7' if (ansible_distribution == 'RedHat' and ansible_distribution_major_version == '7') else 'ubuntu-20.04' if (ansible_distribution == 'Ubuntu' and ansible_distribution_version == '20.04') else None }} -- name: Download Epiphany requirements on repository host # online mode +- name: Set paths for download-requirements script + set_fact: + download_requirements_dir: "{{ _download_requirements_dir }}" + download_requirements_script: "{{ _download_requirements_dir }}/download-requirements.py" + download_requirements_flag: "{{ _download_requirements_dir }}/download-requirements-done.flag" + vars: + _download_requirements_dir: "/var/tmp/epi-download-requirements" + +- name: Setup venv and download Epiphany requirements on repository host # online mode block: - - name: Check if OS distribution is supported - assert: - that: download_script_subdir | length > 0 - fail_msg: "Your OS distribution is not supported" + - name: Install required packages for repository + include_tasks: "{{ ansible_os_family }}/install-packages.yml" + + - name: Create download-requirements directory + file: + path: "{{ download_requirements_dir }}" + state: directory - name: Copy files for downloading requirements to repository host - copy: - src: download-requirements/{{ download_script_subdir }}/ - dest: /var/tmp/epi-download-requirements + synchronize: + src: download-requirements/ + dest: "{{ download_requirements_dir }}" + recursive: true + rsync_opts: + - "--exclude=tests" # tests not needed + - "--exclude=__pycache__" - name: Make download script executable file: - dest: /var/tmp/epi-download-requirements/download-requirements.sh + dest: "{{ download_requirements_script }}" mode: a+x - - name: Copy common files for downloading requirements to repository host - copy: - src: download-requirements/common - dest: /var/tmp/epi-download-requirements - when: - not offline_mode - not custom_repository_url