Skip to content

Commit

Permalink
WIP: Feature/offline Ubuntu part (hitachienergy#580)
Browse files Browse the repository at this point in the history
* offline repo for ubuntu - merge scripts into repository role - initial support

* requirements and repo creation fixes

* ubuntu offline-online installation works

* apt-cache policy not needed here

* cleanups

* added missing bash error handling

* added removal of 3rd party repos, other cleanups, install wget and gpg for minimal OS
  • Loading branch information
lukurde authored Oct 10, 2019
1 parent 827b1ec commit ec1abf0
Show file tree
Hide file tree
Showing 19 changed files with 441 additions and 887 deletions.
11 changes: 6 additions & 5 deletions core/src/epicli/cli/engine/ansible/AnsibleRunner.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,11 +50,12 @@ def run(self):
if not Config().offline_requirements:
shutil.copy(os.path.join(dirname(dirname(inspect.getfile(os))), 'skopeo_linux'), '/tmp')

# todo: install packages to run ansible on Red Hat hosts
self.ansible_command.run_task_with_retries(hosts='all', inventory=inventory_path, module='raw',
args='cat /etc/lsb-release | grep -i DISTRIB_ID | grep -i ubuntu && '
'sudo apt-get update && sudo apt-get install -y python-simplejson '
'|| echo \'Cannot find information about Ubuntu distribution\'', retries=5)
# ~~this shouldn't be needed at all~~
# # todo: install packages to run ansible on Red Hat hosts
# self.ansible_command.run_task_with_retries(hosts='all', inventory=inventory_path, module='raw',
# args='cat /etc/lsb-release | grep -i DISTRIB_ID | grep -i ubuntu && '
# 'sudo apt-get update && sudo apt-get install -y python-simplejson '
# '|| echo \'Cannot find information about Ubuntu distribution\'', retries=5)

self.ansible_vars_generator.run()

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,14 @@
# splitted to separate tasks to make it work on Debian/Ubuntu:
# https://github.com/elastic/elasticsearch/issues/33607#issue-359124678

- name: Install prerequisites for Elasticsearch
apt:
name:
- openjdk-8-jre-headless
state: present

- name: Install Elasticsearch package
apt:
name:
- openjdk-8-jre
- elasticsearch-oss
state: present
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,15 @@
- gnupg2
when: ansible_os_family == "Debian"

#TODO: this is a quick workaround, we should tackle versioning in a smarter way
- name: Install Grafana
package:
name: "grafana-{{ grafana_version }}"
state: present
state: present
when: ansible_os_family == "RedHat"

- name: Install Grafana
package:
name: "grafana={{ grafana_version }}"
state: present
when: ansible_os_family == "Debian"
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@

- name: Install Java package
package:
name: "openjdk-8-jre"
name: "openjdk-8-jre-headless"
state: present
when: ansible_os_family == "Debian"

Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
#!/bin/bash
#!/bin/bash -eu

SERVER_IP=$1;
REPOSITORY_URL=$1

echo "deb [trusted=yes] http://$SERVER_IP/epirepo/ packages/" > /etc/apt/sources.list.d/epirepo.list;
echo "deb [trusted=yes] $REPOSITORY_URL/packages ./" > /etc/apt/sources.list.d/epirepo.list

apt-cache policy;

apt update;
apt update
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
#!/bin/bash -eu

REPOS_BACKUP_FILE=/var/tmp/enabled-system-repos.tar

if [ ! -f "$REPOS_BACKUP_FILE" ]; then
tar --ignore-failed-read --absolute-names -cvpf ${REPOS_BACKUP_FILE} /etc/apt/sources.list /etc/apt/sources.list.d/ 2>&1
fi
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
#!/bin/bash -eu

rm -f /etc/apt/sources.list.d/epirepo.list
apt-get clean
apt update
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
#!/bin/bash -eu

REPOS_BACKUP_FILE=/var/tmp/enabled-system-repos.tar

if [ -f "$REPOS_BACKUP_FILE" ]; then
rm -f /etc/apt/sources.list /etc/apt/sources.list.d/*
else
echo "${REPOS_BACKUP_FILE} file not found. You don't seem to have a backup of the repositories. Cowardly refusing to delete system files."
fi
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
#!/bin/bash -eu

REPOS_BACKUP_FILE=/var/tmp/enabled-system-repos.tar

tar -C / --absolute-name -xvf ${REPOS_BACKUP_FILE} 2>&1
Original file line number Diff line number Diff line change
@@ -1,19 +1,22 @@
wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | sudo apt-key add -
echo "deb https://artifacts.elastic.co/packages/oss-6.x/apt stable main" | sudo tee -a /etc/apt/sources.list.d/elastic-6.x.list
#!/bin/bash -eu

wget -qO - https://packages.elastic.co/GPG-KEY-elasticsearch | sudo apt-key add -
echo "deb [arch=amd64] https://packages.elastic.co/curator/5/debian stable main" | sudo tee -a /etc/apt/sources.list.d/elastic-curator-6.x.list
wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | apt-key add -
echo "deb https://artifacts.elastic.co/packages/oss-6.x/apt stable main" | tee /etc/apt/sources.list.d/elastic-6.x.list

wget -qO - https://packages.grafana.com/gpg.key | sudo apt-key add -
echo "deb https://packages.grafana.com/oss/deb stable main" | sudo tee -a /etc/apt/sources.list.d/grafana.list
wget -qO - https://packages.elastic.co/GPG-KEY-elasticsearch | apt-key add -
echo "deb [arch=amd64] https://packages.elastic.co/curator/5/debian stable main" | tee /etc/apt/sources.list.d/elastic-curator-6.x.list

wget -qO - https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key add -
echo "deb http://apt.kubernetes.io/ kubernetes-xenial main" | sudo tee -a /etc/apt/sources.list.d/kubernetes.list
wget -qO - https://packages.grafana.com/gpg.key | apt-key add -
echo "deb https://packages.grafana.com/oss/deb stable main" | tee /etc/apt/sources.list.d/grafana.list

wget -qO - https://github.com/rabbitmq/signing-keys/releases/download/2.0/rabbitmq-release-signing-key.asc | sudo apt-key add -
echo "deb http://dl.bintray.com/rabbitmq-erlang/debian bionic erlang-21.x" | sudo tee -a /etc/apt/sources.list.d/erlang-21.x.list
wget -qO - https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
echo "deb http://apt.kubernetes.io/ kubernetes-xenial main" | tee /etc/apt/sources.list.d/kubernetes.list

wget -qO - https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
echo "deb [arch=amd64] https://download.docker.com/linux/ubuntu bionic stable" | sudo tee -a /etc/apt/sources.list.d/docker-ce.list
wget -qO - https://github.com/rabbitmq/signing-keys/releases/download/2.0/rabbitmq-release-signing-key.asc | apt-key add -
echo "deb http://dl.bintray.com/rabbitmq-erlang/debian bionic erlang-21.x" | tee /etc/apt/sources.list.d/erlang-21.x.list
echo "deb https://dl.bintray.com/rabbitmq/debian bionic main" | tee /etc/apt/sources.list.d/rabbitmq.list

apt update
wget -qO - https://download.docker.com/linux/ubuntu/gpg | apt-key add -
echo "deb [arch=amd64] https://download.docker.com/linux/ubuntu bionic stable" | tee /etc/apt/sources.list.d/docker-ce.list

apt update
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
# this file is just a bunch of functions meant to be called from other scripts


usage() {
echo "usage: ./$(basename $0) <download_dir>"
echo " ./$(basename $0) /tmp/downloads"
[ -z "$1" ] || exit "$1"
}

echol() {
echo -e "$1" | tee --append $logfile
}

# params: <file_path>
remove_file() {
local file_path="$1"

echol "Removing file: $file_path"
rm -f "$file_path" || exit_with_error "Command failed: rm -f \"$file_path\""
}

# params: <dir_path>
create_directory() {
local dir_path="$1"

if [[ ! -d "$dir_path" ]]; then
mkdir -p $dir_path
fi
}

# params: <image_name> <dest_dir>
# todo: skip on existing (maybe when checksum is correct?)
download_image() {
local image_name="$1"
local dest_dir="$2"

local splited_image=(${image_name//:/ })
local repository=${splited_image[0]}
local tag=${splited_image[1]}
local repo_basename=$(basename -- "$repository")
local dst_image="${dest_dir}/${repo_basename}-${tag}.tar"

#[[ ! -f $dst_image ]] || remove_file "$dst_image"
if [[ -f $dst_image ]]; then
echo "Image: "$dst_image" already exists. Skipping..."
else
local tmp_file=$(mktemp)
echo "Downloading image: $1"
echo "Skopeo command is: ./skopeo_linux --insecure-policy copy docker://$image_name docker-archive:$dst_image:$repository:$tag"
# use temporary file for downloading to be safe from sudden interruptions (network, ctrl+c)
./skopeo_linux --insecure-policy copy docker://$image_name docker-archive:$tmp_file:$repository:$tag && mv $tmp_file $dst_image
fi
}

# params: <file_url> <dest_dir>
download_file() {
local file_url="$1"
local dest_dir="$2"

local file_name=$(basename "$file_url")
local dest_path="$dest_dir/$file_name"

# wget with --timestamping sometimes failes on AWS with ERROR 403: Forbidden
# so we remove existing file to overwrite it

# remove old files to force redownload after a while
# just a precaution so --continue won't append and corrupt files localy if file is updated on server without name change
if [[ $(find ${dest_path} -mmin +60 -print) ]]; then
echol "File ${dest_path} older than 1h, redownloading..."
remove_file "$dest_path"
fi

echol "Downloading file: $file"

# --no-use-server-timestamps - we don't use --timestamping and we need to expire files somehow
# --continue - don't download the same file multiple times, gracefully skip if file is fully downloaded
wget --no-use-server-timestamps --continue --show-progress --directory-prefix="${dest_dir}" "${file_url}"

#wget --no-verbose --directory-prefix="$dest_dir" "$file_url" ||
#exit_with_error "Command failed: wget --no-verbose --directory-prefix=\"$dest_dir\" \"$file_url\""
}
Original file line number Diff line number Diff line change
@@ -1,10 +1,142 @@
#!/bin/bash

./skopeo_linux --version
set -euo pipefail

./add-repositories.sh
# source common functions
. common.sh

PACKAGE_LIST=$(cat ./requirements.txt)
for package in $PACKAGE_LIST ; do
echo $package
apt-get install -y --download-only $package
done
if [[ $# -lt 1 ]]; then
usage
exit
fi

if [[ "$EUID" -ne 0 ]]; then
echo "err: this script must be run as root"
exit
fi

script_path="$( cd "$(dirname "$0")" ; pwd -P )"
input_file="${script_path}/requirements.txt"
dst_dir=$(readlink -m $1) # beautify input path - remove double slashes if occurs
dst_dir_packages="${dst_dir}/packages"
dst_dir_files="${dst_dir}/files"
dst_dir_images="${dst_dir}/images"
deplist="${script_path}/.dependencies"
logfile="${script_path}/log"
download_cmd="apt-get download"
add_repos="${script_path}/add-repositories.sh"

# to download everything add "--recurse" here:
deplist_cmd() {
apt-cache depends --no-recommends --no-suggests --no-conflicts --no-breaks --no-replaces --no-enhances --no-pre-depends $1
}

# install prerequisites which might be missing
apt install wget gpg

# some quick sanity check
echo "dependency list: ${deplist}"
echo "command used to download packages: ${download_cmd}"
echo "destination directory for packages: ${dst_dir_packages}"

# make sure destination dir exists
mkdir -p "${dst_dir_packages}"
mkdir -p "${dst_dir_files}"
mkdir -p "${dst_dir_images}"

# add 3rd party repositories
. ${add_repos}

# parse the input file, separete by tags: [packages], [files], [images]
packages=$(awk '/^$/ || /^#/ {next}; /\[packages\]/ {f=1; next}; /^\[/ {f=0}; f {print $0}' "${input_file}")
files=$(awk '/^$/ || /^#/ {next}; /\[files\]/ {f=1; next}; /^\[/ {f=0}; f {print $0}' "${input_file}")
images=$(awk '/^$/ || /^#/ {next}; /\[images\]/ {f=1; next}; /^\[/ {f=0}; f {print $0}' "${input_file}")

printf "\n"

# clear list of cached dependencies if .dependencies is older than 15 minutes
find "$script_path" -type f -wholename "${deplist}" -mmin +15 -exec rm "${deplist}" \;
# clear list of cached dependencies if requirements.txt was recently edited
find "$script_path" -type f -wholename "$input_file" -mmin -1 -exec rm "${deplist}" \;

# PACKAGES
# if dependency list doesn't exist or is zero size then resolve dependency and store them in a deplist file
if [[ ! -f ${deplist} ]] || [[ ! -s ${deplist} ]] ; then
# clean dependency list if process gets interrupted
trap "rm -f ${deplist}; echo 'dependency resolution interrupted, cleaning cache file'" SIGINT SIGTERM
echo Resolving dependencies to download. This might take a while and will be cached in ${deplist}
while IFS= read -r package; do
echo "package read from requirements file: $package" | tee -a ${logfile}
# if package has a specified version e.g. "name 1.0" store it as "name=1.0*" for compatibility with "apt-get download"
package=$(echo ${package} | awk '{if($2 != "") {print $1 "=" $2 "*"} else {print $1}}')
echo "package to download: $package" | tee -a ${logfile}
# store package itself in the list of dependencies...
echo "${package}" >> "${deplist}"
# .. and create depency list for the package
# (names only for dependencies, no version check here, not necessary as most dependencies are backward-compatible)
dependencies=$(deplist_cmd "${package}" | awk '/Depends/ && !/</ {print$2}' | tee -a "${deplist}")
done <<< "${packages}"
fi

# sort and uniq dependencies
sort -u -o ${deplist} ${deplist}

# be verbose, show what will be downloaded
echo "packages to be downloaded:"
cat -n "${deplist}"

# download dependencies (apt-get sandboxing warning when running as root are harmless)
cd $dst_dir_packages && xargs --no-run-if-empty --arg-file=${deplist} --delimiter='\n' ${download_cmd} | tee -a ${logfile}
cd $script_path

printf "\n"

# FILES
# process files
if [[ -z "${files}" ]]; then
echo "no files to download"
else
# be verbose, show what will be downloaded
# TODO: this is the list of all files shows on every run, not only the files that will be downloaded this run
echo "files to be downloaded:"
cat -n <<< "${files}"

printf "\n"
# download files using wget
while IFS= read -r file; do
# download files, skip if exists
#wget --no-verbose --continue --directory-prefix="${dst_dir_files}" "${file}"
#wget --continue --show-progress --directory-prefix="${dst_dir_files}" "${file}"
download_file "${file}" "${dst_dir_files}"
done <<< "${files}"
fi

printf "\n"

# IMAGES
# process images

create_directory "${dst_dir_images}"

if [[ -z "${images}" ]]; then
echo "No images to download"
else
# be verbose, show what will be downloaded
echo "Images to be downloaded:"
cat -n <<< "${images}"

printf "\n"
# download images using skopeo
while IFS= read -r image_name; do
download_image "${image_name}" "${dst_dir_images}"
done <<< "${images}"
fi

# CLEANUP
for i in $(grep -o '[[:blank:]]/etc/apt/sources.list.d/.*list' ${add_repos}); do
if [[ -f ${i} ]]; then
echol "Cleaning up 3rd party repository: rm ${i}"
rm -f ${i}
#TODO: remove apt keys
fi
done
Loading

0 comments on commit ec1abf0

Please sign in to comment.