From 287ed6e1776f694acb23325714937d026b53919e Mon Sep 17 00:00:00 2001 From: cicharka Date: Mon, 21 Feb 2022 19:28:01 +0100 Subject: [PATCH 01/21] Zookeeper: move upgrade tasks to Zookeeper role * upgrade playbook updated in order to use upgrade zookeeper tasks from a role --- .../tasks/upgrade}/install-upgrade.yml | 0 .../zookeeper.yml => zookeeper/tasks/upgrade/main.yml} | 0 .../tasks/upgrade}/preflight-check.yml | 0 ansible/playbooks/upgrade.yml | 8 +++++--- 4 files changed, 5 insertions(+), 3 deletions(-) rename ansible/playbooks/roles/{upgrade/tasks/zookeeper => zookeeper/tasks/upgrade}/install-upgrade.yml (100%) rename ansible/playbooks/roles/{upgrade/tasks/zookeeper.yml => zookeeper/tasks/upgrade/main.yml} (100%) rename ansible/playbooks/roles/{upgrade/tasks/zookeeper => zookeeper/tasks/upgrade}/preflight-check.yml (100%) diff --git a/ansible/playbooks/roles/upgrade/tasks/zookeeper/install-upgrade.yml b/ansible/playbooks/roles/zookeeper/tasks/upgrade/install-upgrade.yml similarity index 100% rename from ansible/playbooks/roles/upgrade/tasks/zookeeper/install-upgrade.yml rename to ansible/playbooks/roles/zookeeper/tasks/upgrade/install-upgrade.yml diff --git a/ansible/playbooks/roles/upgrade/tasks/zookeeper.yml b/ansible/playbooks/roles/zookeeper/tasks/upgrade/main.yml similarity index 100% rename from ansible/playbooks/roles/upgrade/tasks/zookeeper.yml rename to ansible/playbooks/roles/zookeeper/tasks/upgrade/main.yml diff --git a/ansible/playbooks/roles/upgrade/tasks/zookeeper/preflight-check.yml b/ansible/playbooks/roles/zookeeper/tasks/upgrade/preflight-check.yml similarity index 100% rename from ansible/playbooks/roles/upgrade/tasks/zookeeper/preflight-check.yml rename to ansible/playbooks/roles/zookeeper/tasks/upgrade/preflight-check.yml diff --git a/ansible/playbooks/upgrade.yml b/ansible/playbooks/upgrade.yml index 81cb260ea1..a378398b0b 100644 --- a/ansible/playbooks/upgrade.yml +++ b/ansible/playbooks/upgrade.yml @@ -218,14 +218,16 @@ tasks_from: upgrade/main when: "'grafana' in upgrade_components or upgrade_components|length == 0" +# === kafka === + - hosts: zookeeper serial: 1 become: true become_method: sudo tasks: - - import_role: - name: upgrade - tasks_from: zookeeper + - include_role: + name: zookeeper + tasks_from: upgrade/main vars: { lock_file: /var/tmp/zookeeper-upgrade-in-progress.flag } when: "'zookeeper' in upgrade_components or upgrade_components|length == 0" From 169390184c8ef7addf27f4b9529a678ccc92109c Mon Sep 17 00:00:00 2001 From: cicharka Date: Tue, 22 Feb 2022 09:29:06 +0100 Subject: [PATCH 02/21] Zookeeper: refactor installation and upgrade --- .../roles/zookeeper/defaults/main.yml | 1 + .../common/download_and_unpack_binary.yml | 30 ++++++++ .../playbooks/roles/zookeeper/tasks/main.yml | 69 ++++--------------- .../tasks/upgrade/install-upgrade.yml | 47 +++---------- .../roles/zookeeper/tasks/upgrade/main.yml | 12 ++-- 5 files changed, 58 insertions(+), 101 deletions(-) create mode 100644 ansible/playbooks/roles/zookeeper/tasks/common/download_and_unpack_binary.yml diff --git a/ansible/playbooks/roles/zookeeper/defaults/main.yml b/ansible/playbooks/roles/zookeeper/defaults/main.yml index d74ce924bb..bb7d4fe902 100644 --- a/ansible/playbooks/roles/zookeeper/defaults/main.yml +++ b/ansible/playbooks/roles/zookeeper/defaults/main.yml @@ -5,6 +5,7 @@ zookeeper_bin_filename: "apache-zookeeper-3.5.8-bin.tar.gz" zookeeper_hosts: "{{ groups['zookeeper'] }}" +zookeeper_install_dir: "/opt/zookeeper-{{ zookeeper_version }}" zookeeper_data_dir: /var/lib/zookeeper zookeeper_log_dir: /var/log/zookeeper diff --git a/ansible/playbooks/roles/zookeeper/tasks/common/download_and_unpack_binary.yml b/ansible/playbooks/roles/zookeeper/tasks/common/download_and_unpack_binary.yml new file mode 100644 index 0000000000..5fea6a0a12 --- /dev/null +++ b/ansible/playbooks/roles/zookeeper/tasks/common/download_and_unpack_binary.yml @@ -0,0 +1,30 @@ +--- +- name: Download Zookeeper binary + include_role: + name: download + tasks_from: download_file + vars: + file_name: "{{ zookeeper_bin_filename }}" + +- name: Create {{ zookeeper_install_dir }} directory + become: yes + file: + path: "{{ zookeeper_install_dir }}" + recurse: yes + owner: "{{ zookeeper_user }}" + group: "{{ zookeeper_group }}" + mode: u=rwx,g=rx,o=rx + state: directory + +- name: Unpack Zookeeper-{{ zookeeper_version }} binary + become: true + unarchive: + remote_src: yes + src: "{{ download_directory }}/{{ zookeeper_bin_filename }}" + dest: "{{ zookeeper_install_dir }}" + creates: "{{ zookeeper_install_dir }}/bin" + extra_opts: [--strip-components=1] + mode: u=rwx,g=rx,o=rx + owner: "{{ zookeeper_user }}" + group: "{{ zookeeper_group }}" + check_mode: false diff --git a/ansible/playbooks/roles/zookeeper/tasks/main.yml b/ansible/playbooks/roles/zookeeper/tasks/main.yml index ba5a093080..790fabb510 100644 --- a/ansible/playbooks/roles/zookeeper/tasks/main.yml +++ b/ansible/playbooks/roles/zookeeper/tasks/main.yml @@ -24,55 +24,8 @@ module_defaults: yum: { lock_timeout: "{{ yum_lock_timeout }}" } -- name: Check if jmx exporter is available - stat: - path: "{{ prometheus_jmx_exporter_path }}" - register: exporter - -- name: Set Zookeeper variable with version name - set_fact: - zookeeper_name: "zookeeper-{{ zookeeper_version }}" - changed_when: false - -- name: Set Zookeeper install dir for {{ zookeeper_name }} - set_fact: - zookeeper_install_dir: "/opt/{{ zookeeper_name }}" - -- name: Set Zookeeper file name to install - set_fact: - zookeeper_file_name: "{{ zookeeper_bin_filename }}" - -- name: Download Zookeeper binaries - include_role: - name: download - tasks_from: download_file - vars: - file_name: "{{ zookeeper_file_name }}" - -- name: Create {{ zookeeper_install_dir }} directories - become: yes - file: - path: "{{ item }}" - recurse: yes - owner: "{{ zookeeper_user }}" - group: "{{ zookeeper_group }}" - mode: u=rwx,g=rx,o= - state: directory - with_items: - - "{{ zookeeper_install_dir }}" - -- name: Unpack {{ zookeeper_name }} binary - become: true - unarchive: - remote_src: yes - src: "{{ download_directory }}/{{ zookeeper_file_name }}" - dest: "{{ zookeeper_install_dir }}" - creates: "{{ zookeeper_install_dir }}/bin" - extra_opts: [--strip-components=1] - mode: u=rwx,g=rx,o=rx - owner: "{{ zookeeper_user }}" - group: "{{ zookeeper_group }}" - check_mode: false +- name: Download and unpack Zookeeper's binary + include_tasks: common/download_and_unpack_binary.yml - name: Create directories file: @@ -86,6 +39,11 @@ - "{{ zookeeper_install_dir }}/conf" - "/etc/zookeeper/conf" +- name: Check if jmx exporter is available + stat: + path: "{{ prometheus_jmx_exporter_path }}" + register: exporter + - name: Create Zookeeper service template: src: zookeeper.service.j2 @@ -112,7 +70,11 @@ notify: Restart zookeeper - name: Link /opt/zookeeper to the right version - file: path=/opt/zookeeper state=link src="{{ zookeeper_install_dir }}" + file: + path: /opt/zookeeper + state: link + src: "{{ zookeeper_install_dir }}" + # force: ? - name: Add Zookeeper's bin dir to the PATH copy: @@ -127,16 +89,11 @@ notify: - Restart zookeeper -- name: Enable Zookeeper service +- name: Enable and start Zookeeper service service: name: zookeeper enabled: yes - -- name: Start Zookeeper - service: - name: zookeeper state: started - enabled: yes - include_tasks: metrics.yml when: exporter.stat.exists diff --git a/ansible/playbooks/roles/zookeeper/tasks/upgrade/install-upgrade.yml b/ansible/playbooks/roles/zookeeper/tasks/upgrade/install-upgrade.yml index 7f43d185af..a8a78487e0 100644 --- a/ansible/playbooks/roles/zookeeper/tasks/upgrade/install-upgrade.yml +++ b/ansible/playbooks/roles/zookeeper/tasks/upgrade/install-upgrade.yml @@ -1,46 +1,19 @@ --- - -# Some tasks in this file are the same as in zookeeper role. It should be refactored (with splitting code into separate files) in order to reuse common tasks here. -- name: Download Zookeeper binaries - include_role: - name: download - tasks_from: download_file - vars: - file_name: "{{ zookeeper_defaults.zookeeper_bin_filename }}" - -- name: Create Zookeeper directories - become: yes - file: - path: "/opt/zookeeper-{{ zookeeper_defaults.zookeeper_version }}" - recurse: yes - owner: "{{ zookeeper_defaults.zookeeper_user }}" - group: "{{ zookeeper_defaults.zookeeper_group }}" - mode: u=rwx,g=rx,o=rx - state: directory - -- name: Uncompress Zookeeper installation file - unarchive: - remote_src: yes - src: "{{ download_directory }}/{{ zookeeper_defaults.zookeeper_bin_filename }}" - dest: /opt/zookeeper-{{ zookeeper_defaults.zookeeper_version }} - creates: "/opt/zookeeper-{{ zookeeper_defaults.zookeeper_version }}/bin" - extra_opts: [--strip-components=1] - mode: u=rwx,g=rx,o=rx - owner: "{{ zookeeper_defaults.zookeeper_user }}" - group: "{{ zookeeper_defaults.zookeeper_group }}" +- name: Download and unpack Zookeeper's binary + include_tasks: common/download_and_unpack_binary.yml - name: Copy configuration from previous version copy: remote_src: yes src: /opt/zookeeper/conf/ - dest: /opt/zookeeper-{{ zookeeper_defaults.zookeeper_version }}/conf + dest: /opt/zookeeper-{{ zookeeper_version }}/conf mode: preserve - name: Link /opt/zookeeper to recently installed version file: - dest: /opt/zookeeper + path: /opt/zookeeper state: link - src: /opt/zookeeper-{{ zookeeper_defaults.zookeeper_version }} + src: "{{ zookeeper_install_dir }}" force: yes - name: Reconfigure Zookeeper service to use symbolic link @@ -52,18 +25,18 @@ - name: Check if any snapshots exists in data dir what is necessary in order to run zookeeper after upgrade find: - paths: "{{ zookeeper_defaults.zookeeper_data_dir }}/version-2" + paths: "{{ zookeeper_data_dir }}/version-2" patterns: "snapshot.*" register: snapshot_exists # From 3.5.5 version, ZooKeeper is not able to start when no snapshot files present, what is valid scenario in 3.4.X version. Empty snapshot downloaded from Zookeeper's Jira ticket. - name: Copy empty snapshot if not exists copy: - dest: "{{ zookeeper_defaults.zookeeper_data_dir }}/version-2" + dest: "{{ zookeeper_data_dir }}/version-2" src: roles/zookeeper/files/snapshot.0 mode: u=rw,g=r,o=r - owner: "{{ zookeeper_defaults.zookeeper_user }}" - group: "{{ zookeeper_defaults.zookeeper_group }}" + owner: "{{ zookeeper_user }}" + group: "{{ zookeeper_group }}" when: snapshot_exists.matched == 0 - name: Start Zookeeper service @@ -76,4 +49,4 @@ file: path: /opt/zookeeper-{{ before_upgrade_zookeeper_version }} state: absent - when: before_upgrade_zookeeper_version != zookeeper_defaults.zookeeper_version + when: before_upgrade_zookeeper_version != zookeeper_version diff --git a/ansible/playbooks/roles/zookeeper/tasks/upgrade/main.yml b/ansible/playbooks/roles/zookeeper/tasks/upgrade/main.yml index 8e4778e0b9..18621892d7 100644 --- a/ansible/playbooks/roles/zookeeper/tasks/upgrade/main.yml +++ b/ansible/playbooks/roles/zookeeper/tasks/upgrade/main.yml @@ -1,11 +1,6 @@ --- -- name: Include defaults from zookeeper role - include_vars: - file: roles/zookeeper/defaults/main.yml - name: zookeeper_defaults - - name: Include pre-flight checks - include_tasks: zookeeper/preflight-check.yml + include_tasks: upgrade/preflight-check.yml - name: Get installed Zookeeper's version stat: @@ -41,9 +36,10 @@ state: stopped - name: Include upgrade Zookeeper task - include_tasks: zookeeper/install-upgrade.yml + include_tasks: upgrade/install-upgrade.yml when: - - lock_file_status.stat.exists or before_upgrade_zookeeper_version is version( zookeeper_defaults.zookeeper_version, '<' ) + - lock_file_status.stat.exists + or before_upgrade_zookeeper_version is version( zookeeper_version, '<' ) - name: Remove Zookeeper upgrade flag file file: From 553b4e4eadbde298599df2f6cc7a9f34c85138a7 Mon Sep 17 00:00:00 2001 From: cicharka Date: Tue, 22 Feb 2022 11:01:50 +0100 Subject: [PATCH 03/21] Zookeeper: small fixes Signed-off-by: cicharka --- ansible/playbooks/roles/zookeeper/tasks/metrics.yml | 2 +- .../playbooks/roles/zookeeper/tasks/upgrade/install-upgrade.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ansible/playbooks/roles/zookeeper/tasks/metrics.yml b/ansible/playbooks/roles/zookeeper/tasks/metrics.yml index 7e5e838131..4a78350d9f 100644 --- a/ansible/playbooks/roles/zookeeper/tasks/metrics.yml +++ b/ansible/playbooks/roles/zookeeper/tasks/metrics.yml @@ -5,7 +5,7 @@ append: yes - name: prometheus jmx | configuration file - become: yes + become: true copy: dest: "{{ prometheus_jmx_config }}" src: jmx-zookeeper-config.yml diff --git a/ansible/playbooks/roles/zookeeper/tasks/upgrade/install-upgrade.yml b/ansible/playbooks/roles/zookeeper/tasks/upgrade/install-upgrade.yml index a8a78487e0..8f0ff60d41 100644 --- a/ansible/playbooks/roles/zookeeper/tasks/upgrade/install-upgrade.yml +++ b/ansible/playbooks/roles/zookeeper/tasks/upgrade/install-upgrade.yml @@ -6,7 +6,7 @@ copy: remote_src: yes src: /opt/zookeeper/conf/ - dest: /opt/zookeeper-{{ zookeeper_version }}/conf + dest: "{{ zookeeper_install_dir }}}/conf" mode: preserve - name: Link /opt/zookeeper to recently installed version From 45faecd7b9e35697e56c9a6e538cc1b70bc4a0d7 Mon Sep 17 00:00:00 2001 From: cicharka Date: Tue, 22 Feb 2022 11:07:01 +0100 Subject: [PATCH 04/21] Kafka: move upgrade tasks to Kafka role --- .../tasks/kafka => kafka/tasks/upgrade}/install-upgrade.yml | 0 .../tasks/kafka.yml => kafka/tasks/upgrade/main.yml} | 0 .../tasks/kafka => kafka/tasks/upgrade}/preflight-check.yml | 0 .../kafka => kafka/tasks/upgrade}/set-updated-version.yml | 0 .../kafka => kafka/tasks/upgrade}/update-properties.yml | 0 ansible/playbooks/upgrade.yml | 6 +++--- 6 files changed, 3 insertions(+), 3 deletions(-) rename ansible/playbooks/roles/{upgrade/tasks/kafka => kafka/tasks/upgrade}/install-upgrade.yml (100%) rename ansible/playbooks/roles/{upgrade/tasks/kafka.yml => kafka/tasks/upgrade/main.yml} (100%) rename ansible/playbooks/roles/{upgrade/tasks/kafka => kafka/tasks/upgrade}/preflight-check.yml (100%) rename ansible/playbooks/roles/{upgrade/tasks/kafka => kafka/tasks/upgrade}/set-updated-version.yml (100%) rename ansible/playbooks/roles/{upgrade/tasks/kafka => kafka/tasks/upgrade}/update-properties.yml (100%) diff --git a/ansible/playbooks/roles/upgrade/tasks/kafka/install-upgrade.yml b/ansible/playbooks/roles/kafka/tasks/upgrade/install-upgrade.yml similarity index 100% rename from ansible/playbooks/roles/upgrade/tasks/kafka/install-upgrade.yml rename to ansible/playbooks/roles/kafka/tasks/upgrade/install-upgrade.yml diff --git a/ansible/playbooks/roles/upgrade/tasks/kafka.yml b/ansible/playbooks/roles/kafka/tasks/upgrade/main.yml similarity index 100% rename from ansible/playbooks/roles/upgrade/tasks/kafka.yml rename to ansible/playbooks/roles/kafka/tasks/upgrade/main.yml diff --git a/ansible/playbooks/roles/upgrade/tasks/kafka/preflight-check.yml b/ansible/playbooks/roles/kafka/tasks/upgrade/preflight-check.yml similarity index 100% rename from ansible/playbooks/roles/upgrade/tasks/kafka/preflight-check.yml rename to ansible/playbooks/roles/kafka/tasks/upgrade/preflight-check.yml diff --git a/ansible/playbooks/roles/upgrade/tasks/kafka/set-updated-version.yml b/ansible/playbooks/roles/kafka/tasks/upgrade/set-updated-version.yml similarity index 100% rename from ansible/playbooks/roles/upgrade/tasks/kafka/set-updated-version.yml rename to ansible/playbooks/roles/kafka/tasks/upgrade/set-updated-version.yml diff --git a/ansible/playbooks/roles/upgrade/tasks/kafka/update-properties.yml b/ansible/playbooks/roles/kafka/tasks/upgrade/update-properties.yml similarity index 100% rename from ansible/playbooks/roles/upgrade/tasks/kafka/update-properties.yml rename to ansible/playbooks/roles/kafka/tasks/upgrade/update-properties.yml diff --git a/ansible/playbooks/upgrade.yml b/ansible/playbooks/upgrade.yml index a378398b0b..50521c275d 100644 --- a/ansible/playbooks/upgrade.yml +++ b/ansible/playbooks/upgrade.yml @@ -239,9 +239,9 @@ shell: executable: /bin/bash tasks: - - import_role: - name: upgrade - tasks_from: kafka + - include_role: + name: kafka + tasks_from: upgrade/main vars: { lock_file: /var/tmp/kafka-upgrade-in-progress.flag } when: "'kafka' in upgrade_components or upgrade_components|length == 0" From 77770b51f44ecc599de70406bd88efa0e649d488 Mon Sep 17 00:00:00 2001 From: cicharka Date: Tue, 22 Feb 2022 15:01:33 +0100 Subject: [PATCH 05/21] Kafka: Refactor installation and upgrade tasks --- .../playbooks/roles/kafka/defaults/main.yml | 2 + .../common/download_and_unpack_binary.yml | 22 ++++++++++ .../roles/kafka/tasks/{ => common}/start.yml | 1 + .../roles/kafka/tasks/common/stop.yml | 6 +++ ansible/playbooks/roles/kafka/tasks/main.yml | 3 +- .../playbooks/roles/kafka/tasks/metrics.yml | 2 +- .../roles/kafka/tasks/setup-kafka.yml | 41 ++++--------------- ansible/playbooks/roles/kafka/tasks/stop.yml | 4 -- .../kafka/tasks/upgrade/install-upgrade.yml | 31 +++----------- .../roles/kafka/tasks/upgrade/main.yml | 27 ++++-------- .../tasks/upgrade/set-updated-version.yml | 5 +-- 11 files changed, 54 insertions(+), 90 deletions(-) create mode 100644 ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml rename ansible/playbooks/roles/kafka/tasks/{ => common}/start.yml (92%) create mode 100644 ansible/playbooks/roles/kafka/tasks/common/stop.yml delete mode 100644 ansible/playbooks/roles/kafka/tasks/stop.yml diff --git a/ansible/playbooks/roles/kafka/defaults/main.yml b/ansible/playbooks/roles/kafka/defaults/main.yml index 0abffa7915..418416f82f 100644 --- a/ansible/playbooks/roles/kafka/defaults/main.yml +++ b/ansible/playbooks/roles/kafka/defaults/main.yml @@ -1,4 +1,6 @@ kafka_version: 2.6.0 scala_version: 2.12 kafka_bin_filename: "kafka_2.12-2.6.0.tgz" +kafka_install_dir: "/opt/kafka_{{ scala_version }}-{{ kafka_version }}" + prometheus_jmx_exporter_path: /opt/jmx-exporter/jmx_prometheus_javaagent.jar diff --git a/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml b/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml new file mode 100644 index 0000000000..3f7450c9a9 --- /dev/null +++ b/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml @@ -0,0 +1,22 @@ +--- + +- name: Download Kafka binaries + include_role: + name: download + tasks_from: download_file + vars: + file_name: "{{ kafka_bin_filename }}" + +- name: Uncompress the Kafka tar + unarchive: + remote_src: yes + creates: "{{ kafka_install_dir }}" + src: "{{ download_directory }}/{{ kafka_bin_filename }}" + dest: /opt + +- name: Change ownership on Kafka directory + file: + path: "{{ kafka_install_dir }}" + state: directory + owner: kafka + group: kafka \ No newline at end of file diff --git a/ansible/playbooks/roles/kafka/tasks/start.yml b/ansible/playbooks/roles/kafka/tasks/common/start.yml similarity index 92% rename from ansible/playbooks/roles/kafka/tasks/start.yml rename to ansible/playbooks/roles/kafka/tasks/common/start.yml index 47a87e4181..7ea0224c90 100644 --- a/ansible/playbooks/roles/kafka/tasks/start.yml +++ b/ansible/playbooks/roles/kafka/tasks/common/start.yml @@ -5,6 +5,7 @@ name: kafka state: started enabled: yes + daemon-reload: yes # - name: wait for kafka port # wait_for: host={{kafka.listen_address| default('localhost')}} port={{kafka.port}} state=started timeout={{ kafka.wait_for_period }} diff --git a/ansible/playbooks/roles/kafka/tasks/common/stop.yml b/ansible/playbooks/roles/kafka/tasks/common/stop.yml new file mode 100644 index 0000000000..27b4ab806e --- /dev/null +++ b/ansible/playbooks/roles/kafka/tasks/common/stop.yml @@ -0,0 +1,6 @@ +--- + +- name: Stop Kafka + systemd: + name: kafka + state: stopped diff --git a/ansible/playbooks/roles/kafka/tasks/main.yml b/ansible/playbooks/roles/kafka/tasks/main.yml index 30df870093..d9bfcf2938 100644 --- a/ansible/playbooks/roles/kafka/tasks/main.yml +++ b/ansible/playbooks/roles/kafka/tasks/main.yml @@ -13,5 +13,4 @@ - include_tasks: metrics.yml when: exporter.stat.exists -- include_tasks: start.yml - +- include_tasks: common/start.yml diff --git a/ansible/playbooks/roles/kafka/tasks/metrics.yml b/ansible/playbooks/roles/kafka/tasks/metrics.yml index 519d2476ff..5860540c83 100644 --- a/ansible/playbooks/roles/kafka/tasks/metrics.yml +++ b/ansible/playbooks/roles/kafka/tasks/metrics.yml @@ -7,7 +7,7 @@ append: yes - name: prometheus jmx | configuration file - become: yes + become: true copy: dest: "{{ specification.prometheus_jmx_config }}" src: jmx-kafka-config.yml diff --git a/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml b/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml index 7b793c8191..4db5b1fc6c 100644 --- a/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml +++ b/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml @@ -24,16 +24,8 @@ module_defaults: yum: { lock_timeout: "{{ yum_lock_timeout }}" } -- name: Set Kafka file name to install - set_fact: - kafka_file_name: "{{ kafka_bin_filename }}" - -- name: Download Kafka binaries - include_role: - name: download - tasks_from: download_file - vars: - file_name: "{{ kafka_file_name }}" +- name: Download and unpack Kafka's binary + include_tasks: common/download_and_unpack_binary.yml - name: Add Kafka's bin dir to the PATH copy: @@ -41,31 +33,11 @@ dest: "/etc/profile.d/kafka_path.sh" mode: 0755 -- name: Check for Kafka package - stat: - path: /opt/kafka_{{ scala_version }}-{{ kafka_version }}/bin/kafka-server-start.sh - register: kafka_package - -- name: Uncompress the Kafka tar - unarchive: - remote_src: yes - creates: /opt/kafka_{{ scala_version }}-{{ kafka_version }} - src: "{{ download_directory }}/{{ kafka_file_name }}" - dest: /opt - when: not kafka_package.stat.exists - -- name: Change ownership on kafka directory. - file: - path: /opt/kafka_{{ scala_version }}-{{ kafka_version }} - state: directory - owner: kafka - group: kafka - - name: Link /opt/kafka to the right version file: - dest: /opt/kafka + path: /opt/kafka state: link - src: /opt/kafka_{{ scala_version }}-{{ kafka_version }} + src: "{{ kafka_install_dir }}" - name: Create systemd config template: @@ -78,7 +50,8 @@ - restart kafka - name: Reload daemon - command: systemctl daemon-reload + systemd: + daemon-reload: yes - name: Create data_dir file: @@ -114,7 +87,7 @@ # Setup log4j.properties - name: Create log4j.properties file: - dest: "{{ specification.kafka_var.conf_dir }}/log4j.properties" + path: "{{ specification.kafka_var.conf_dir }}/log4j.properties" owner: "{{ specification.kafka_var.user }}" group: "{{ specification.kafka_var.group }}" mode: 0644 diff --git a/ansible/playbooks/roles/kafka/tasks/stop.yml b/ansible/playbooks/roles/kafka/tasks/stop.yml deleted file mode 100644 index d8416fb302..0000000000 --- a/ansible/playbooks/roles/kafka/tasks/stop.yml +++ /dev/null @@ -1,4 +0,0 @@ ---- - -- name: Stop Kafka - service: name=kafka state=stopped diff --git a/ansible/playbooks/roles/kafka/tasks/upgrade/install-upgrade.yml b/ansible/playbooks/roles/kafka/tasks/upgrade/install-upgrade.yml index eb2448e81d..68ead8df3c 100644 --- a/ansible/playbooks/roles/kafka/tasks/upgrade/install-upgrade.yml +++ b/ansible/playbooks/roles/kafka/tasks/upgrade/install-upgrade.yml @@ -1,45 +1,24 @@ --- -- name: Set Kafka installation file name as fact - set_fact: - kafka_installation_file_name: "kafka_{{ scala_version }}-{{ kafka_version.new }}.tgz" - -- name: Download Kafka binaries - include_role: - name: download - tasks_from: download_file - vars: - file_name: "{{ kafka_installation_file_name }}" - -- name: Uncompress Kafka installation file - unarchive: - remote_src: yes - src: "{{ download_directory }}/{{ kafka_installation_file_name }}" - dest: /opt - -- name: Change ownership on kafka directory - file: - path: /opt/kafka_{{ scala_version }}-{{ kafka_version.new }} - state: directory - owner: kafka - group: kafka +- name: Download and unpack Kafka's binary + include_tasks: common/download_and_unpack_binary.yml - name: Copy configuration from previous version copy: remote_src: yes src: /opt/kafka/config/ - dest: /opt/kafka_{{ scala_version }}-{{ kafka_version.new }}/config + dest: "{{ kafka_install_dir}}/config" mode: preserve - name: Link /opt/kafka to recently installed version file: dest: /opt/kafka state: link - src: /opt/kafka_{{ scala_version }}-{{ kafka_version.new }} + src: "{{ kafka_install_dir}}" force: yes - name: Remove previous version binaries file: - path: /opt/kafka_{{ scala_version }}-{{ kafka_version.old }} + path: /opt/kafka_{{ scala_version }}-{{ old_kafka_version }} state: absent - name: Get log.dirs property diff --git a/ansible/playbooks/roles/kafka/tasks/upgrade/main.yml b/ansible/playbooks/roles/kafka/tasks/upgrade/main.yml index 68798a0953..d79968d856 100644 --- a/ansible/playbooks/roles/kafka/tasks/upgrade/main.yml +++ b/ansible/playbooks/roles/kafka/tasks/upgrade/main.yml @@ -1,11 +1,6 @@ --- -- name: Include defaults from kafka role - include_vars: - file: roles/kafka/defaults/main.yml - name: kafka_defaults - - name: Include pre-flight checks - include_tasks: kafka/preflight-check.yml + include_tasks: upgrade/preflight-check.yml - name: Get installed Kafka version shell: >- @@ -13,13 +8,9 @@ /opt/kafka/bin/kafka-server-start.sh --version | grep Commit | grep -oP '^\d+\.\d+\.\d+' register: result -- name: Set common facts +- name: Set old Kafka version fact set_fact: - kafka_version: - old: "{{ result.stdout }}" - new: "{{ kafka_defaults.kafka_version }}" - scala_version: "{{ kafka_defaults.scala_version }}" - kafka_bin_filename: "{{ kafka_defaults.kafka_bin_filename }}" + old_kafka_version: "{{ result.stdout }}" - name: Check for upgrade flag file stat: @@ -34,20 +25,18 @@ state: touch - name: Stop Kafka service - service: - name: kafka - state: stopped + include_tasks: common/stop.yml - name: Include update Kafka properties tasks - include_tasks: kafka/update-properties.yml + include_tasks: upgrade/update-properties.yml - name: Include Kafka upgrade tasks - include_tasks: kafka/install-upgrade.yml + include_tasks: upgrade/install-upgrade.yml - name: Include set Kafka version tasks - include_tasks: kafka/set-updated-version.yml + include_tasks: upgrade/set-updated-version.yml when: - - lock_file_status.stat.exists or kafka_version.old is version( kafka_version.new, '<' ) + - lock_file_status.stat.exists or old_kafka_version is version( kafka_version, '<' ) - name: Remove Kafka upgrade flag file file: diff --git a/ansible/playbooks/roles/kafka/tasks/upgrade/set-updated-version.yml b/ansible/playbooks/roles/kafka/tasks/upgrade/set-updated-version.yml index 0eda1ed76c..6014cb41f6 100644 --- a/ansible/playbooks/roles/kafka/tasks/upgrade/set-updated-version.yml +++ b/ansible/playbooks/roles/kafka/tasks/upgrade/set-updated-version.yml @@ -10,7 +10,4 @@ line: "inter.broker.protocol.version={{ kafka_version.new }}" - name: Start kafka service - systemd: - name: kafka - state: started - daemon-reload: yes + include_tasks: common/start.yml From d360f2fe22be4b43d2dac28dc88e60a31250a282 Mon Sep 17 00:00:00 2001 From: cicharka Date: Wed, 23 Feb 2022 16:27:39 +0100 Subject: [PATCH 06/21] Kafka: upgrade to version 2.8.1 --- ansible/playbooks/roles/kafka/defaults/main.yml | 4 ++-- .../roles/kafka/tasks/common/download_and_unpack_binary.yml | 2 +- .../files/download-requirements/requirements/x86_64/files.yml | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/ansible/playbooks/roles/kafka/defaults/main.yml b/ansible/playbooks/roles/kafka/defaults/main.yml index 418416f82f..d20bad6641 100644 --- a/ansible/playbooks/roles/kafka/defaults/main.yml +++ b/ansible/playbooks/roles/kafka/defaults/main.yml @@ -1,6 +1,6 @@ -kafka_version: 2.6.0 +kafka_version: 2.8.1 scala_version: 2.12 -kafka_bin_filename: "kafka_2.12-2.6.0.tgz" +kafka_bin_filename: "kafka_2.12-2.8.1.tgz" kafka_install_dir: "/opt/kafka_{{ scala_version }}-{{ kafka_version }}" prometheus_jmx_exporter_path: /opt/jmx-exporter/jmx_prometheus_javaagent.jar diff --git a/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml b/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml index 3f7450c9a9..59d7974b2a 100644 --- a/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml +++ b/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml @@ -19,4 +19,4 @@ path: "{{ kafka_install_dir }}" state: directory owner: kafka - group: kafka \ No newline at end of file + group: kafka diff --git a/ansible/playbooks/roles/repository/files/download-requirements/requirements/x86_64/files.yml b/ansible/playbooks/roles/repository/files/download-requirements/requirements/x86_64/files.yml index 27734b80ec..cf1c6844dc 100644 --- a/ansible/playbooks/roles/repository/files/download-requirements/requirements/x86_64/files.yml +++ b/ansible/playbooks/roles/repository/files/download-requirements/requirements/x86_64/files.yml @@ -14,8 +14,8 @@ files: sha256: 1d1a008c5e29673b404a9ce119b7516fa59974aeda2f47d4a0446d102abce8a1 # --- Misc --- - 'https://archive.apache.org/dist/kafka/2.6.0/kafka_2.12-2.6.0.tgz': - sha256: 086bf9ca1fcbe2abe5c62e73d6f172adb1ee5a5b42732e153fb4d4ec82dab69f + 'https://archive.apache.org/dist/kafka/2.8.1/kafka_2.12-2.8.1.tgz': + sha256: 175a4134efc569a586d58916cd16ce70f868b13dea2b5a3d12a67b1395d59f98 'https://archive.apache.org/dist/zookeeper/zookeeper-3.5.8/apache-zookeeper-3.5.8-bin.tar.gz': sha256: c35ed6786d59b73920243f1a324d24c2ddfafb379041d7a350cc9a341c52caf3 From 50421b7b29008608de06165e4d7c79395cfbf57a Mon Sep 17 00:00:00 2001 From: cicharka Date: Thu, 24 Feb 2022 09:12:46 +0100 Subject: [PATCH 07/21] Remove kafka_var #2803 --- .../kafka/tasks/generate-certificates.yml | 88 +++---- .../playbooks/roles/kafka/tasks/metrics.yml | 6 +- .../roles/kafka/tasks/setup-kafka.yml | 44 ++-- .../kafka/templates/client-ssl.properties.j2 | 4 +- .../roles/kafka/templates/kafka.service.j2 | 10 +- .../templates/kafka_producer_consumer.py.j2 | 6 +- .../kafka/templates/kafka_server_jaas.conf.j2 | 6 +- .../roles/kafka/templates/logrotate.conf.j2 | 2 +- .../kafka/templates/server.properties.j2 | 86 +++---- docs/home/howto/RETENTION.md | 13 +- .../common/defaults/configuration/kafka.yml | 131 +++++----- .../common/validation/configuration/kafka.yml | 239 +++++++++--------- 12 files changed, 315 insertions(+), 320 deletions(-) diff --git a/ansible/playbooks/roles/kafka/tasks/generate-certificates.yml b/ansible/playbooks/roles/kafka/tasks/generate-certificates.yml index 18ef907cd2..04184fe72e 100644 --- a/ansible/playbooks/roles/kafka/tasks/generate-certificates.yml +++ b/ansible/playbooks/roles/kafka/tasks/generate-certificates.yml @@ -1,40 +1,40 @@ - name: Create stores directory file: - path: "{{ specification.kafka_var.security.ssl.server.keystore_location | dirname }}" + path: "{{ specification.security.ssl.server.keystore_location | dirname }}" state: directory - owner: "{{ specification.kafka_var.user }}" - group: "{{ specification.kafka_var.group }}" + owner: "{{ specification.user }}" + group: "{{ specification.group }}" mode: "0755" - name: Check if keystore exists on broker stat: - path: "{{ specification.kafka_var.security.ssl.server.keystore_location }}" + path: "{{ specification.security.ssl.server.keystore_location }}" changed_when: false register: keystore_exists - name: Generate keystore for each server - shell: keytool -keystore {{ specification.kafka_var.security.ssl.server.keystore_location }} \ - -alias localhost -validity {{ specification.kafka_var.security.ssl.server.cert_validity }} -genkey -keyalg RSA \ - -noprompt -storepass {{ specification.kafka_var.security.ssl.server.passwords.keystore }} \ - -keypass {{ specification.kafka_var.security.ssl.server.passwords.key }} \ + shell: keytool -keystore {{ specification.security.ssl.server.keystore_location }} \ + -alias localhost -validity {{ specification.security.ssl.server.cert_validity }} -genkey -keyalg RSA \ + -noprompt -storepass {{ specification.security.ssl.server.passwords.keystore }} \ + -keypass {{ specification.security.ssl.server.passwords.key }} \ -dname "CN={{ inventory_hostname }}" -ext SAN="DNS:{{ inventory_hostname }}" when: - not keystore_exists.stat.exists - name: Check if signing certificate exists stat: - path: "{{ specification.kafka_var.security.ssl.server.keystore_location | dirname }}/ca-cert" + path: "{{ specification.security.ssl.server.keystore_location | dirname }}/ca-cert" register: signing_certificate_exists changed_when: false when: - groups['kafka'][0] == inventory_hostname - name: Generate signing certificate - shell: openssl req -new -x509 -keyout {{ specification.kafka_var.security.ssl.server.keystore_location | dirname }}/ca-key \ - -out {{ specification.kafka_var.security.ssl.server.keystore_location | dirname }}/ca-cert \ - -days {{ specification.kafka_var.security.ssl.server.cert_validity }} \ + shell: openssl req -new -x509 -keyout {{ specification.security.ssl.server.keystore_location | dirname }}/ca-key \ + -out {{ specification.security.ssl.server.keystore_location | dirname }}/ca-cert \ + -days {{ specification.security.ssl.server.cert_validity }} \ -subj "/CN={{ inventory_hostname }}" \ - --passout pass:{{ specification.kafka_var.security.ssl.server.passwords.key }} + --passout pass:{{ specification.security.ssl.server.passwords.key }} when: - groups['kafka'][0] == inventory_hostname - not signing_certificate_exists.stat.exists @@ -42,14 +42,14 @@ - name: Create kafka certificates directory on Epiphany host become: false file: - path: "{{ specification.kafka_var.security.ssl.server.local_cert_download_path }}" + path: "{{ specification.security.ssl.server.local_cert_download_path }}" state: directory delegate_to: localhost - name: Fetching files fetch: - src: "{{ specification.kafka_var.security.ssl.server.keystore_location | dirname }}/{{ item }}" - dest: "{{ specification.kafka_var.security.ssl.server.local_cert_download_path }}/{{ item }}" + src: "{{ specification.security.ssl.server.keystore_location | dirname }}/{{ item }}" + dest: "{{ specification.security.ssl.server.local_cert_download_path }}/{{ item }}" flat: yes loop: - "ca-cert" @@ -59,8 +59,8 @@ - name: Copy signing certificate and key to brokers copy: - src: "{{ specification.kafka_var.security.ssl.server.local_cert_download_path }}/{{ item }}" - dest: "{{ specification.kafka_var.security.ssl.server.keystore_location | dirname }}/" + src: "{{ specification.security.ssl.server.local_cert_download_path }}/{{ item }}" + dest: "{{ specification.security.ssl.server.keystore_location | dirname }}/" loop: - "ca-cert" - "ca-key" @@ -69,20 +69,20 @@ - name: Check if trustore exists stat: - path: "{{ specification.kafka_var.security.ssl.server.truststore_location }}" + path: "{{ specification.security.ssl.server.truststore_location }}" register: trustore_exists - name: Create trustore - shell: keytool -noprompt -keystore "{{ specification.kafka_var.security.ssl.server.truststore_location }}" -alias CARoot \ - -import -file "{{ specification.kafka_var.security.ssl.server.keystore_location | dirname }}/ca-cert" \ - -storepass {{ specification.kafka_var.security.ssl.server.passwords.keystore }} \ - -keypass {{ specification.kafka_var.security.ssl.server.passwords.key }} + shell: keytool -noprompt -keystore "{{ specification.security.ssl.server.truststore_location }}" -alias CARoot \ + -import -file "{{ specification.security.ssl.server.keystore_location | dirname }}/ca-cert" \ + -storepass {{ specification.security.ssl.server.passwords.keystore }} \ + -keypass {{ specification.security.ssl.server.passwords.key }} when: - not trustore_exists.stat.exists - name: Check if CA certificate is already imported - shell: keytool -list -v -keystore {{ specification.kafka_var.security.ssl.server.keystore_location }} \ - -storepass {{ specification.kafka_var.security.ssl.server.passwords.keystore }} \ + shell: keytool -list -v -keystore {{ specification.security.ssl.server.keystore_location }} \ + -storepass {{ specification.security.ssl.server.passwords.keystore }} \ | grep -i "Alias name" | grep -i "caroot" failed_when: "caroot_exists.rc == 2" changed_when: false @@ -90,8 +90,8 @@ - name: Check if certificate signed by CA is already imported shell: |- - keytool -list -v -keystore {{ specification.kafka_var.security.ssl.server.keystore_location }} \ - -storepass {{ specification.kafka_var.security.ssl.server.passwords.keystore }} \ + keytool -list -v -keystore {{ specification.security.ssl.server.keystore_location }} \ + -storepass {{ specification.security.ssl.server.passwords.keystore }} \ -alias localhost \ | grep -i 'Certificate chain length: 2' failed_when: "signed_cert_exists.rc == 2" @@ -99,41 +99,41 @@ register: signed_cert_exists - name: Export certificate to sign certificate with CA - shell: keytool -noprompt -keystore {{ specification.kafka_var.security.ssl.server.keystore_location }} \ + shell: keytool -noprompt -keystore {{ specification.security.ssl.server.keystore_location }} \ -alias localhost -certreq \ - -file "{{ specification.kafka_var.security.ssl.server.keystore_location | dirname }}/cert-file" \ - -storepass {{ specification.kafka_var.security.ssl.server.passwords.keystore }} \ - -keypass {{ specification.kafka_var.security.ssl.server.passwords.key }} + -file "{{ specification.security.ssl.server.keystore_location | dirname }}/cert-file" \ + -storepass {{ specification.security.ssl.server.passwords.keystore }} \ + -keypass {{ specification.security.ssl.server.passwords.key }} when: - signed_cert_exists.rc == 1 - name: Signing certificate with CA - shell: openssl x509 -req -CA "{{ specification.kafka_var.security.ssl.server.keystore_location | dirname }}/ca-cert" \ - -CAkey "{{ specification.kafka_var.security.ssl.server.keystore_location | dirname }}/ca-key" \ - -in "{{ specification.kafka_var.security.ssl.server.keystore_location | dirname }}/cert-file" \ - -out "{{ specification.kafka_var.security.ssl.server.keystore_location | dirname }}/cert-signed" \ - -days {{ specification.kafka_var.security.ssl.server.cert_validity }} -CAcreateserial \ - -passin pass:{{ specification.kafka_var.security.ssl.server.passwords.key }} + shell: openssl x509 -req -CA "{{ specification.security.ssl.server.keystore_location | dirname }}/ca-cert" \ + -CAkey "{{ specification.security.ssl.server.keystore_location | dirname }}/ca-key" \ + -in "{{ specification.security.ssl.server.keystore_location | dirname }}/cert-file" \ + -out "{{ specification.security.ssl.server.keystore_location | dirname }}/cert-signed" \ + -days {{ specification.security.ssl.server.cert_validity }} -CAcreateserial \ + -passin pass:{{ specification.security.ssl.server.passwords.key }} when: - signed_cert_exists.rc == 1 - name: Import certificate CA - shell: keytool -noprompt -keystore {{ specification.kafka_var.security.ssl.server.keystore_location }} -alias CARoot \ - -import -file "{{ specification.kafka_var.security.ssl.server.keystore_location | dirname }}/ca-cert" \ - -storepass {{ specification.kafka_var.security.ssl.server.passwords.keystore }} + shell: keytool -noprompt -keystore {{ specification.security.ssl.server.keystore_location }} -alias CARoot \ + -import -file "{{ specification.security.ssl.server.keystore_location | dirname }}/ca-cert" \ + -storepass {{ specification.security.ssl.server.passwords.keystore }} when: - caroot_exists.rc == 1 - name: Import certificate signed by CA - shell: keytool -noprompt -keystore {{ specification.kafka_var.security.ssl.server.keystore_location }} -alias localhost \ - -import -file "{{ specification.kafka_var.security.ssl.server.keystore_location | dirname }}/cert-signed" \ - -storepass {{ specification.kafka_var.security.ssl.server.passwords.keystore }} + shell: keytool -noprompt -keystore {{ specification.security.ssl.server.keystore_location }} -alias localhost \ + -import -file "{{ specification.security.ssl.server.keystore_location | dirname }}/cert-signed" \ + -storepass {{ specification.security.ssl.server.passwords.keystore }} when: - signed_cert_exists.rc == 1 - name: Remove extracted key and cert from others than root node file: - path: "{{ specification.kafka_var.security.ssl.server.keystore_location | dirname }}/{{ item }}" + path: "{{ specification.security.ssl.server.keystore_location | dirname }}/{{ item }}" state: absent loop: - "ca-cert" diff --git a/ansible/playbooks/roles/kafka/tasks/metrics.yml b/ansible/playbooks/roles/kafka/tasks/metrics.yml index 5860540c83..6b2c3a760b 100644 --- a/ansible/playbooks/roles/kafka/tasks/metrics.yml +++ b/ansible/playbooks/roles/kafka/tasks/metrics.yml @@ -2,7 +2,7 @@ - name: prometheus jmx | add kafka user to correct jmx exporter user user: - name: "{{ specification.kafka_var.user }}" + name: "{{ specification.user }}" groups: "{{ specification.jmx_exporter_group }}" append: yes @@ -11,8 +11,8 @@ copy: dest: "{{ specification.prometheus_jmx_config }}" src: jmx-kafka-config.yml - owner: "{{ specification.kafka_var.user }}" - group: "{{ specification.kafka_var.group }}" + owner: "{{ specification.user }}" + group: "{{ specification.group }}" mode: 0644 - name: delegated | create prometheus system group diff --git a/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml b/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml index 4db5b1fc6c..7fc630d28f 100644 --- a/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml +++ b/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml @@ -1,14 +1,14 @@ --- - name: Setup group group: - name: "{{ specification.kafka_var.group }}" + name: "{{ specification.group }}" system: yes - name: Setup user user: - name: "{{ specification.kafka_var.user }}" + name: "{{ specification.user }}" system: yes - group: "{{ specification.kafka_var.group }}" + group: "{{ specification.group }}" shell: "/usr/sbin/nologin" - name: Install Java package @@ -55,31 +55,31 @@ - name: Create data_dir file: - path: "{{ specification.kafka_var.data_dir }}" + path: "{{ specification.data_dir }}" state: directory - owner: "{{ specification.kafka_var.user }}" - group: "{{ specification.kafka_var.group }}" + owner: "{{ specification.user }}" + group: "{{ specification.group }}" mode: 0755 - name: Remove lost+found in the datadir file: - path: "{{ specification.kafka_var.data_dir }}/lost+found" + path: "{{ specification.data_dir }}/lost+found" state: absent - name: Create log_dir file: - path: "{{ specification.kafka_var.log_dir }}" + path: "{{ specification.log_dir }}" state: directory - owner: "{{ specification.kafka_var.user }}" - group: "{{ specification.kafka_var.group }}" + owner: "{{ specification.user }}" + group: "{{ specification.group }}" mode: 0755 - name: Create /etc/kafka directory file: path: /etc/kafka state: directory - owner: "{{ specification.kafka_var.user }}" - group: "{{ specification.kafka_var.group }}" + owner: "{{ specification.user }}" + group: "{{ specification.group }}" # - name: link conf_dir to /opt/kafka/config # file: dest=/etc/kafka owner=kafka group=kafka state=link src=/opt/kafka/config @@ -87,23 +87,23 @@ # Setup log4j.properties - name: Create log4j.properties file: - path: "{{ specification.kafka_var.conf_dir }}/log4j.properties" - owner: "{{ specification.kafka_var.user }}" - group: "{{ specification.kafka_var.group }}" + path: "{{ specification.conf_dir }}/log4j.properties" + owner: "{{ specification.user }}" + group: "{{ specification.group }}" mode: 0644 - name: Generate certificate include_tasks: generate-certificates.yml when: - - specification.kafka_var.security.ssl.enabled is defined - - specification.kafka_var.security.ssl.enabled + - specification.security.ssl.enabled is defined + - specification.security.ssl.enabled # Setup server.properties - name: Create server.properties template: - dest: "{{ specification.kafka_var.conf_dir }}/server.properties" - owner: "{{ specification.kafka_var.user }}" - group: "{{ specification.kafka_var.group }}" + dest: "{{ specification.conf_dir }}/server.properties" + owner: "{{ specification.user }}" + group: "{{ specification.group }}" # Was 0640 mode: 0644 src: server.properties.j2 @@ -114,7 +114,7 @@ - name: Delete meta.properties become: true file: - path: "{{ specification.kafka_var.data_dir }}/meta.properties" + path: "{{ specification.data_dir }}/meta.properties" state: absent when: create_server_properties.changed @@ -128,7 +128,7 @@ - name: configure system settings, file descriptors and number of threads for kafka pam_limits: - domain: "{{ specification.kafka_var.user }}" + domain: "{{ specification.user }}" limit_type: "{{ item.limit_type }}" limit_item: "{{ item.limit_item }}" value: "{{item.value}}" diff --git a/ansible/playbooks/roles/kafka/templates/client-ssl.properties.j2 b/ansible/playbooks/roles/kafka/templates/client-ssl.properties.j2 index 81b69ab3d6..c4807c9863 100644 --- a/ansible/playbooks/roles/kafka/templates/client-ssl.properties.j2 +++ b/ansible/playbooks/roles/kafka/templates/client-ssl.properties.j2 @@ -1,4 +1,4 @@ -bootstrap.servers={{ kafka_hosts }}:{{ specification.kafka_var.security.ssl.port }} +bootstrap.servers={{ kafka_hosts }}:{{ specification.security.ssl.port }} security.protocol=SSL ssl.truststore.location=/var/private/ssl/kafka.client.truststore.jks -ssl.truststore.password={{ specification.kafka_var.security.ssl.client.passwords.truststore }} +ssl.truststore.password={{ specification.security.ssl.client.passwords.truststore }} diff --git a/ansible/playbooks/roles/kafka/templates/kafka.service.j2 b/ansible/playbooks/roles/kafka/templates/kafka.service.j2 index 18c69243ee..71d6815140 100644 --- a/ansible/playbooks/roles/kafka/templates/kafka.service.j2 +++ b/ansible/playbooks/roles/kafka/templates/kafka.service.j2 @@ -2,8 +2,8 @@ Description=Kafka Daemon After=zookeeper.service -{% if specification.kafka_var.javax_net_debug is defined %} -{% set javax_debug = '-Djavax.net.debug=' ~ specification.kafka_var.javax_net_debug %} +{% if specification.javax_net_debug is defined %} +{% set javax_debug = '-Djavax.net.debug=' ~ specification.javax_net_debug %} {% else %} {% set javax_debug = '' %} {% endif %} @@ -14,14 +14,14 @@ User=kafka Group=kafka LimitNOFILE=32768 Restart=on-failure -Environment="KAFKA_HEAP_OPTS={{ specification.kafka_var.heap_opts }}" -Environment="LOG_DIR={{ specification.kafka_var.log_dir }}" +Environment="KAFKA_HEAP_OPTS={{ specification.heap_opts }}" +Environment="LOG_DIR={{ specification.log_dir }}" {% if exporter.stat.exists %} Environment="KAFKA_OPTS={{ javax_debug }} -javaagent:{{ prometheus_jmx_exporter_path }}={{ specification.prometheus_jmx_exporter_web_listen_port }}:{{ specification.prometheus_jmx_config }}" {% else %} Environment="KAFKA_OPTS={{ javax_debug }}" {% endif %} -Environment="KAFKA_JMX_OPTS={{ specification.kafka_var.jmx_opts }}" +Environment="KAFKA_JMX_OPTS={{ specification.jmx_opts }}" ExecStart=/opt/kafka/bin/kafka-server-start.sh /opt/kafka/config/server.properties [Install] diff --git a/ansible/playbooks/roles/kafka/templates/kafka_producer_consumer.py.j2 b/ansible/playbooks/roles/kafka/templates/kafka_producer_consumer.py.j2 index 0f798a4480..b3bf8e47a3 100644 --- a/ansible/playbooks/roles/kafka/templates/kafka_producer_consumer.py.j2 +++ b/ansible/playbooks/roles/kafka/templates/kafka_producer_consumer.py.j2 @@ -20,8 +20,8 @@ class Producer(threading.Thread): producer = KafkaProducer(bootstrap_servers='{{ kafka_hosts }}') while not self.stop_event.is_set(): - {% for msg in specification.kafka_var.tests.epiphany_topic_test_msgs %} - producer.send('{{ specification.kafka_var.tests.epiphany_topic_test }}', b"{{ msg }}") + {% for msg in specification.tests.epiphany_topic_test_msgs %} + producer.send('{{ specification.tests.epiphany_topic_test }}', b"{{ msg }}") {% endfor %} time.sleep(1) @@ -40,7 +40,7 @@ class Consumer(multiprocessing.Process): consumer = KafkaConsumer(bootstrap_servers='{{ kafka_hosts }}', auto_offset_reset='earliest', consumer_timeout_ms=1000) - consumer.subscribe(['{{ specification.kafka_var.tests.epiphany_topic_test }}']) + consumer.subscribe(['{{ specification.tests.epiphany_topic_test }}']) while not self.stop_event.is_set(): for message in consumer: diff --git a/ansible/playbooks/roles/kafka/templates/kafka_server_jaas.conf.j2 b/ansible/playbooks/roles/kafka/templates/kafka_server_jaas.conf.j2 index ca801c1b8b..176c77ffe7 100644 --- a/ansible/playbooks/roles/kafka/templates/kafka_server_jaas.conf.j2 +++ b/ansible/playbooks/roles/kafka/templates/kafka_server_jaas.conf.j2 @@ -1,8 +1,8 @@ KafkaServer { org.apache.kafka.common.security.plain.PlainLoginModule required - username="{{ specification.kafka_var.admin }}" - password="{{ specification.kafka_var.admin_pwd }}" - user_admin="{{ specification.kafka_var.admin_pwd }}" + username="{{ specification.admin }}" + password="{{ specification.admin_pwd }}" + user_admin="{{ specification.admin_pwd }}" {%- for host in kafka_hosts %} user_{{host}}="kafkabroker1-secret"; {%- endfor %} diff --git a/ansible/playbooks/roles/kafka/templates/logrotate.conf.j2 b/ansible/playbooks/roles/kafka/templates/logrotate.conf.j2 index 3c0d37d799..ad2b60e6d1 100644 --- a/ansible/playbooks/roles/kafka/templates/logrotate.conf.j2 +++ b/ansible/playbooks/roles/kafka/templates/logrotate.conf.j2 @@ -1,4 +1,4 @@ -{{ specification.kafka_var.log_dir }}/*.log { +{{ specification.log_dir }}/*.log { rotate 5 daily compress diff --git a/ansible/playbooks/roles/kafka/templates/server.properties.j2 b/ansible/playbooks/roles/kafka/templates/server.properties.j2 index 6f2643beac..63ec2cd81b 100644 --- a/ansible/playbooks/roles/kafka/templates/server.properties.j2 +++ b/ansible/playbooks/roles/kafka/templates/server.properties.j2 @@ -11,23 +11,23 @@ auto.create.topics.enable=true delete.topic.enable=true #default replication factors for automatically created topics -default.replication.factor={{ specification.kafka_var.default_replication_factor }} +default.replication.factor={{ specification.default_replication_factor }} #The replication factor for the offsets topic (set higher to ensure availability). Internal topic creation will fail until the cluster size meets this replication factor requirement. -offsets.topic.replication.factor={{ specification.kafka_var.offsets_topic_replication_factor }} +offsets.topic.replication.factor={{ specification.offsets_topic_replication_factor }} #Offsets older than this retention period will be discarded -offsets.retention.minutes={{ specification.kafka_var.offset_retention_minutes }} +offsets.retention.minutes={{ specification.offset_retention_minutes }} #The maximum number of incremental fetch sessions that we will maintain -max.incremental.fetch.session.cache.slots={{ specification.kafka_var.max_incremental_fetch_session_cache_slots }} +max.incremental.fetch.session.cache.slots={{ specification.max_incremental_fetch_session_cache_slots }} #Enable controlled shutdown of the server -controlled.shutdown.enable={{ specification.kafka_var.controlled_shutdown_enable | lower }} +controlled.shutdown.enable={{ specification.controlled_shutdown_enable | lower }} #Number of fetcher threads used to replicate messages from a source broker. #Increasing this value can increase the degree of I/O parallelism in the follower broker. -num.replica.fetchers={{ specification.kafka_var.num_replica_fetchers }} +num.replica.fetchers={{ specification.num_replica_fetchers }} #The number of bytes of messages to attempt to fetch for each partition. #This is not an absolute maximum, if the first record batch in the first @@ -35,10 +35,10 @@ num.replica.fetchers={{ specification.kafka_var.num_replica_fetchers }} #the record batch will still be returned to ensure that progress can be made. #The maximum record batch size accepted by the broker is defined #via message.max.bytes (broker config) or max.message.bytes (topic config). -replica.fetch.max.bytes={{ specification.kafka_var.replica_fetch_max_bytes }} +replica.fetch.max.bytes={{ specification.replica_fetch_max_bytes }} #The socket receive buffer for network requests -replica.socket.receive.buffer.bytes={{ specification.kafka_var.replica_socket_receive_buffer_bytes }} +replica.socket.receive.buffer.bytes={{ specification.replica_socket_receive_buffer_bytes }} ############################# Socket Server Settings ############################# {%- endif %} @@ -50,10 +50,10 @@ replica.socket.receive.buffer.bytes={{ specification.kafka_var.replica_socket_re # listeners = listener_name://host_name:port # EXAMPLE: # listeners = PLAINTEXT://your.host.name:9092 -{% if specification.kafka_var.security.ssl.enabled -%} -listeners=SSL://{{ inventory_hostname }}:{{ specification.kafka_var.security.ssl.port }} +{% if specification.security.ssl.enabled -%} +listeners=SSL://{{ inventory_hostname }}:{{ specification.security.ssl.port }} {% else %} -listeners=PLAINTEXT://{{ ansible_default_ipv4.address }}:{{ specification.kafka_var.port }} +listeners=PLAINTEXT://{{ ansible_default_ipv4.address }}:{{ specification.port }} {%- endif %} # Hostname and port the broker will advertise to producers and consumers. If not set, @@ -65,47 +65,47 @@ listeners=PLAINTEXT://{{ ansible_default_ipv4.address }}:{{ specification.kafka_ #listener.security.protocol.map=PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL # The number of threads handling network requests -num.network.threads={{ specification.kafka_var.socket_settings.network_threads }} +num.network.threads={{ specification.socket_settings.network_threads }} # The number of threads doing disk I/O -num.io.threads={{ specification.kafka_var.socket_settings.io_threads }} +num.io.threads={{ specification.socket_settings.io_threads }} # The send buffer (SO_SNDBUF) used by the socket server -socket.send.buffer.bytes={{ specification.kafka_var.socket_settings.send_buffer_bytes }} +socket.send.buffer.bytes={{ specification.socket_settings.send_buffer_bytes }} # The receive buffer (SO_RCVBUF) used by the socket server -socket.receive.buffer.bytes={{ specification.kafka_var.socket_settings.receive_buffer_bytes }} +socket.receive.buffer.bytes={{ specification.socket_settings.receive_buffer_bytes }} # The maximum size of a request that the socket server will accept (protection against OOM) -socket.request.max.bytes={{ specification.kafka_var.socket_settings.request_max_bytes }} +socket.request.max.bytes={{ specification.socket_settings.request_max_bytes }} ############################# Security ######################################### # Security protocol used to communicate between brokers -{% if specification.kafka_var.security.ssl.enabled -%} +{% if specification.security.ssl.enabled -%} # If not enabled it should default to PLAINTEXT -security.inter.broker.protocol={{ specification.kafka_var.security.inter_broker_protocol }} +security.inter.broker.protocol={{ specification.security.inter_broker_protocol }} #### Encryption Settings #### -ssl.endpoint.identification.algorithm={{ specification.kafka_var.security.ssl.endpoint_identification_algorithm }} +ssl.endpoint.identification.algorithm={{ specification.security.ssl.endpoint_identification_algorithm }} -ssl.keystore.location={{ specification.kafka_var.security.ssl.server.keystore_location }} -ssl.keystore.password={{ specification.kafka_var.security.ssl.server.passwords.keystore }} -ssl.truststore.location={{ specification.kafka_var.security.ssl.server.truststore_location }} -ssl.truststore.password={{ specification.kafka_var.security.ssl.server.passwords.truststore }} -ssl.key.password={{ specification.kafka_var.security.ssl.server.passwords.key }} +ssl.keystore.location={{ specification.security.ssl.server.keystore_location }} +ssl.keystore.password={{ specification.security.ssl.server.passwords.keystore }} +ssl.truststore.location={{ specification.security.ssl.server.truststore_location }} +ssl.truststore.password={{ specification.security.ssl.server.passwords.truststore }} +ssl.key.password={{ specification.security.ssl.server.passwords.key }} -ssl.client.auth={{ specification.kafka_var.security.ssl.client_auth }} +ssl.client.auth={{ specification.security.ssl.client_auth }} {%- endif %} -{% if specification.kafka_var.security.authentication.enabled %} -{% if specification.kafka_var.security.authentication.authentication_method == "sasl" -%} +{% if specification.security.authentication.enabled %} +{% if specification.security.authentication.authentication_method == "sasl" -%} #### Authentication Settings #### # SASL mechanism used for inter-broker communication. -sasl.mechanism.inter.broker.protocol={{ specification.kafka_var.security.authentication.sasl_mechanism_inter_broker_protocol }} +sasl.mechanism.inter.broker.protocol={{ specification.security.authentication.sasl_mechanism_inter_broker_protocol }} -sasl.enabled.mechanisms={{ specification.kafka_var.security.sasl_authentication.enabled_mechanisms }} +sasl.enabled.mechanisms={{ specification.security.sasl_authentication.enabled_mechanisms }} # The list of SASL mechanisms enabled in the Kafka server. The list may contain any mechanism # for which a security provider is available. Only GSSAPI is enabled by default. @@ -113,23 +113,23 @@ sasl.enabled.mechanisms={{ specification.kafka_var.security.sasl_authentication. ############################# ACLs ############################################# # The authorizer class that should be used for authorization -authorizer.class.name={{ specification.kafka_var.security.authorization.authorizer_class_name }} +authorizer.class.name={{ specification.security.authorization.authorizer_class_name }} # If a Resource R has no associated ACLs, no one other than super users is allowed to access R. If you want to change that behavior, set this property to true -allow.everyone.if.no.acl.found={{ specification.kafka_var.security.authorization.allow_everyone_if_no_acl_found }} +allow.everyone.if.no.acl.found={{ specification.security.authorization.allow_everyone_if_no_acl_found }} -{% if specification.kafka_var.security.authentication.enabled and specification.kafka_var.security.authorization.enabled -%} +{% if specification.security.authentication.enabled and specification.security.authorization.enabled -%} -{% if specification.kafka_var.security.authentication.authentication_method == "certificates" -%} +{% if specification.security.authentication.authentication_method == "certificates" -%} {% set super_users = groups['kafka'] %} -{% if specification.kafka_var.security.authorization.super_users is defined -%} -{% set super_users = super_users + specification.kafka_var.security.authorization.super_users %} +{% if specification.security.authorization.super_users is defined -%} +{% set super_users = super_users + specification.security.authorization.super_users %} {%- endif %} super.users=User:CN={{ super_users | list | join(';User:CN=') }}; {%- endif %} -{% if specification.kafka_var.security.authentication.authentication_method == "sasl" and specification.kafka_var.security.authorization.super_users is defined -%} -super.users={{ specification.kafka_var.security.authorization.super_users }} +{% if specification.security.authentication.authentication_method == "sasl" and specification.security.authorization.super_users is defined -%} +super.users={{ specification.security.authorization.super_users }} {%- endif %} {%- endif %} @@ -138,20 +138,20 @@ super.users={{ specification.kafka_var.security.authorization.super_users }} ############################# Log Basics ############################# # A comma seperated list of directories under which to store log files -log.dirs={{ specification.kafka_var.data_dir }} +log.dirs={{ specification.data_dir }} # The default number of log partitions per topic. More partitions allow greater # parallelism for consumption, but this will also result in more files across # the brokers. -num.partitions={{ specification.kafka_var.partitions }} +num.partitions={{ specification.partitions }} # The number of threads per data directory to be used for log recovery at startup and flushing at shutdown. # This value is recommended to be increased for installations with data dirs located in RAID array. -num.recovery.threads.per.data.dir={{ specification.kafka_var.num_recovery_threads_per_data_dir }} +num.recovery.threads.per.data.dir={{ specification.num_recovery_threads_per_data_dir }} # When a producer sets acks to "all" (or "-1"), this configuration specifies the minimum number # of replicas that must acknowledge a write for the write to be considered successful. -min.insync.replicas={{ specification.kafka_var.min_insync_replicas }} +min.insync.replicas={{ specification.min_insync_replicas }} ############################# Log Flush Policy ############################# @@ -178,11 +178,11 @@ min.insync.replicas={{ specification.kafka_var.min_insync_replicas }} # from the end of the log. # The minimum age of a log file to be eligible for deletion due to age -log.retention.hours={{ specification.kafka_var.log_retention_hours }} +log.retention.hours={{ specification.log_retention_hours }} # A size-based retention policy for logs. Segments are pruned from the log as long as the remaining # segments don't drop below log.retention.bytes. Functions independently of log.retention.hours. -log.retention.bytes={{ specification.kafka_var.log_retention_bytes }} +log.retention.bytes={{ specification.log_retention_bytes }} # The maximum size of a log segment file. When this size is reached a new log segment will be created. log.segment.bytes=1073741824 diff --git a/docs/home/howto/RETENTION.md b/docs/home/howto/RETENTION.md index 3fa6ccdb9c..6ae5b8d87f 100644 --- a/docs/home/howto/RETENTION.md +++ b/docs/home/howto/RETENTION.md @@ -22,23 +22,22 @@ kind: configuration/kafka title: "Kafka" name: default specification: - kafka_var: - partitions: 8 - log_retention_hours: 168 - log_retention_bytes: -1 + partitions: 8 + log_retention_hours: 168 + log_retention_bytes: -1 ``` ### Configuration parameters -#### specification.kafka_var.partitions +#### specification.partitions Sets [num.partitions](https://kafka.apache.org/documentation/#brokerconfigs_num.partitions) parameter -#### specification.kafka_var.log_retention_hours +#### specification.log_retention_hours Sets [log.retention.hours](https://kafka.apache.org/documentation/#brokerconfigs_log.retention.bytes) parameter -#### specification.kafka_var.log_retention_bytes +#### specification.log_retention_bytes Sets [log.retention.bytes](https://kafka.apache.org/documentation/#brokerconfigs_log.retention.bytes) parameter diff --git a/schema/common/defaults/configuration/kafka.yml b/schema/common/defaults/configuration/kafka.yml index 1da6a76f96..30feadfdf9 100644 --- a/schema/common/defaults/configuration/kafka.yml +++ b/schema/common/defaults/configuration/kafka.yml @@ -2,72 +2,71 @@ kind: configuration/kafka title: "Kafka" name: default specification: - kafka_var: - enabled: True - admin: kafka - admin_pwd: epiphany - # javax_net_debug: all # uncomment to activate debugging, other debug options: https://colinpaice.blog/2020/04/05/using-java-djavax-net-debug-to-examine-data-flows-including-tls/ - security: - ssl: - enabled: False - port: 9093 - server: - local_cert_download_path: kafka-certs - keystore_location: /var/private/ssl/kafka.server.keystore.jks - truststore_location: /var/private/ssl/kafka.server.truststore.jks - cert_validity: 365 - passwords: - keystore: PasswordToChange - truststore: PasswordToChange - key: PasswordToChange - endpoint_identification_algorithm: HTTPS - client_auth: required - encrypt_at_rest: False - inter_broker_protocol: PLAINTEXT - authorization: - enabled: False - authorizer_class_name: kafka.security.auth.SimpleAclAuthorizer - allow_everyone_if_no_acl_found: False - super_users: - - tester01 - - tester02 - users: - - name: test_user - topic: test_topic - authentication: - enabled: False - authentication_method: certificates - sasl_mechanism_inter_broker_protocol: - sasl_enabled_mechanisms: PLAIN - sha: "b28e81705e30528f1abb6766e22dfe9dae50b1e1e93330c880928ff7a08e6b38ee71cbfc96ec14369b2dfd24293938702cab422173c8e01955a9d1746ae43f98" - port: 9092 - min_insync_replicas: 1 # Minimum number of replicas (ack write) - default_replication_factor: 1 # Minimum number of automatically created topics - offsets_topic_replication_factor: 1 # Minimum number of offsets topic (consider higher value for HA) - num_recovery_threads_per_data_dir: 1 # Minimum number of recovery threads per data dir - num_replica_fetchers: 1 # Minimum number of replica fetchers - replica_fetch_max_bytes: 1048576 - replica_socket_receive_buffer_bytes: 65536 - partitions: 8 # 100 x brokers x replicas for reasonable size cluster. Small clusters can be less - log_retention_hours: 168 # The minimum age of a log file to be eligible for deletion due to age - log_retention_bytes: -1 # -1 is no size limit only a time limit (log_retention_hours). This limit is enforced at the partition level, multiply it by the number of partitions to compute the topic retention in bytes. - offset_retention_minutes: 10080 # Offsets older than this retention period will be discarded - heap_opts: "-Xmx2G -Xms2G" - opts: "-Djavax.net.debug=all" - jmx_opts: - max_incremental_fetch_session_cache_slots: 1000 - controlled_shutdown_enable: true - group: kafka - user: kafka - conf_dir: /opt/kafka/config - data_dir: /var/lib/kafka - log_dir: /var/log/kafka - socket_settings: - network_threads: 3 # The number of threads handling network requests - io_threads: 8 # The number of threads doing disk I/O - send_buffer_bytes: 102400 # The send buffer (SO_SNDBUF) used by the socket server - receive_buffer_bytes: 102400 # The receive buffer (SO_RCVBUF) used by the socket server - request_max_bytes: 104857600 # The maximum size of a request that the socket server will accept (protection against OOM) + enabled: True + admin: kafka + admin_pwd: epiphany + # javax_net_debug: all # uncomment to activate debugging, other debug options: https://colinpaice.blog/2020/04/05/using-java-djavax-net-debug-to-examine-data-flows-including-tls/ + security: + ssl: + enabled: False + port: 9093 + server: + local_cert_download_path: kafka-certs + keystore_location: /var/private/ssl/kafka.server.keystore.jks + truststore_location: /var/private/ssl/kafka.server.truststore.jks + cert_validity: 365 + passwords: + keystore: PasswordToChange + truststore: PasswordToChange + key: PasswordToChange + endpoint_identification_algorithm: HTTPS + client_auth: required + encrypt_at_rest: False + inter_broker_protocol: PLAINTEXT + authorization: + enabled: False + authorizer_class_name: kafka.security.auth.SimpleAclAuthorizer + allow_everyone_if_no_acl_found: False + super_users: + - tester01 + - tester02 + users: + - name: test_user + topic: test_topic + authentication: + enabled: False + authentication_method: certificates + sasl_mechanism_inter_broker_protocol: + sasl_enabled_mechanisms: PLAIN + sha: "b28e81705e30528f1abb6766e22dfe9dae50b1e1e93330c880928ff7a08e6b38ee71cbfc96ec14369b2dfd24293938702cab422173c8e01955a9d1746ae43f98" + port: 9092 + min_insync_replicas: 1 # Minimum number of replicas (ack write) + default_replication_factor: 1 # Minimum number of automatically created topics + offsets_topic_replication_factor: 1 # Minimum number of offsets topic (consider higher value for HA) + num_recovery_threads_per_data_dir: 1 # Minimum number of recovery threads per data dir + num_replica_fetchers: 1 # Minimum number of replica fetchers + replica_fetch_max_bytes: 1048576 + replica_socket_receive_buffer_bytes: 65536 + partitions: 8 # 100 x brokers x replicas for reasonable size cluster. Small clusters can be less + log_retention_hours: 168 # The minimum age of a log file to be eligible for deletion due to age + log_retention_bytes: -1 # -1 is no size limit only a time limit (log_retention_hours). This limit is enforced at the partition level, multiply it by the number of partitions to compute the topic retention in bytes. + offset_retention_minutes: 10080 # Offsets older than this retention period will be discarded + heap_opts: "-Xmx2G -Xms2G" + opts: "-Djavax.net.debug=all" + jmx_opts: + max_incremental_fetch_session_cache_slots: 1000 + controlled_shutdown_enable: true + group: kafka + user: kafka + conf_dir: /opt/kafka/config + data_dir: /var/lib/kafka + log_dir: /var/log/kafka + socket_settings: + network_threads: 3 # The number of threads handling network requests + io_threads: 8 # The number of threads doing disk I/O + send_buffer_bytes: 102400 # The send buffer (SO_SNDBUF) used by the socket server + receive_buffer_bytes: 102400 # The receive buffer (SO_RCVBUF) used by the socket server + request_max_bytes: 104857600 # The maximum size of a request that the socket server will accept (protection against OOM) zookeeper_set_acl: false zookeeper_hosts: "{{ groups['zookeeper']|join(':2181,') }}:2181" jmx_exporter_user: jmx-exporter diff --git a/schema/common/validation/configuration/kafka.yml b/schema/common/validation/configuration/kafka.yml index 32b7cb5b98..ebbd14ba68 100644 --- a/schema/common/validation/configuration/kafka.yml +++ b/schema/common/validation/configuration/kafka.yml @@ -3,147 +3,144 @@ title: "Kafka specification schema" description: "Kafka specification schema" type: object properties: - kafka_var: + enabled: + type: boolean + admin: + type: string + admin_pwd: + type: string + javax_net_debug: + type: string + security: type: object properties: - enabled: - type: boolean - admin: - type: string - admin_pwd: - type: string - javax_net_debug: - type: string - security: + ssl: type: object properties: - ssl: + enabled: + type: boolean + port: + type: integer + server: type: object properties: - enabled: - type: boolean - port: + local_cert_download_path: + type: string + keystore_location: + type: string + truststore_location: + type: string + cert_validity: type: integer - server: + passwords: type: object properties: - local_cert_download_path: + keystore: type: string - keystore_location: + truststore: type: string - truststore_location: + key: type: string - cert_validity: - type: integer - passwords: - type: object - properties: - keystore: - type: string - truststore: - type: string - key: - type: string - endpoint_identification_algorithm: - type: string - client_auth: - type: string - encrypt_at_rest: + endpoint_identification_algorithm: + type: string + client_auth: + type: string + encrypt_at_rest: + type: boolean + inter_broker_protocol: + type: string + authorization: + type: object + properties: + enabled: type: boolean - inter_broker_protocol: + authorizer_class_name: type: string - authorization: - type: object - properties: - enabled: - type: boolean - authorizer_class_name: - type: string - allow_everyone_if_no_acl_found: - type: boolean - super_users: - type: array - items: + allow_everyone_if_no_acl_found: + type: boolean + super_users: + type: array + items: + type: string + users: + type: array + items: + type: object + properties: + name: type: string - users: - type: array - items: - type: object - properties: - name: - type: string - topic: - type: string - authentication: - type: object - properties: - enabled: - type: boolean - authentication_method: - type: string - sasl_mechanism_inter_broker_protocol: - type: 'null' - sasl_enabled_mechanisms: - type: string - sha: - type: string - port: - type: integer - min_insync_replicas: - type: integer - default_replication_factor: - type: integer - offsets_topic_replication_factor: - type: integer - num_recovery_threads_per_data_dir: - type: integer - num_replica_fetchers: - type: integer - replica_fetch_max_bytes: - type: integer - replica_socket_receive_buffer_bytes: - type: integer - partitions: + topic: + type: string + authentication: + type: object + properties: + enabled: + type: boolean + authentication_method: + type: string + sasl_mechanism_inter_broker_protocol: + type: 'null' + sasl_enabled_mechanisms: + type: string + sha: + type: string + port: + type: integer + min_insync_replicas: + type: integer + default_replication_factor: + type: integer + offsets_topic_replication_factor: + type: integer + num_recovery_threads_per_data_dir: + type: integer + num_replica_fetchers: + type: integer + replica_fetch_max_bytes: + type: integer + replica_socket_receive_buffer_bytes: + type: integer + partitions: + type: integer + log_retention_hours: + type: integer + log_retention_bytes: + type: integer + offset_retention_minutes: + type: integer + heap_opts: + type: string + opts: + type: string + jmx_opts: + type: 'null' + max_incremental_fetch_session_cache_slots: + type: integer + controlled_shutdown_enable: + type: boolean + group: + type: string + user: + type: string + conf_dir: + type: string + data_dir: + type: string + log_dir: + type: string + socket_settings: + type: object + properties: + network_threads: type: integer - log_retention_hours: + io_threads: type: integer - log_retention_bytes: + send_buffer_bytes: type: integer - offset_retention_minutes: + receive_buffer_bytes: type: integer - heap_opts: - type: string - opts: - type: string - jmx_opts: - type: 'null' - max_incremental_fetch_session_cache_slots: + request_max_bytes: type: integer - controlled_shutdown_enable: - type: boolean - group: - type: string - user: - type: string - conf_dir: - type: string - data_dir: - type: string - log_dir: - type: string - socket_settings: - type: object - properties: - network_threads: - type: integer - io_threads: - type: integer - send_buffer_bytes: - type: integer - receive_buffer_bytes: - type: integer - request_max_bytes: - type: integer zookeeper_set_acl: type: boolean zookeeper_hosts: From 92417ced5d04a3cdce4d9b55c48138a4ad96fd39 Mon Sep 17 00:00:00 2001 From: cicharka Date: Thu, 24 Feb 2022 09:29:56 +0100 Subject: [PATCH 08/21] Changelog updated --- docs/changelogs/CHANGELOG-2.0.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/changelogs/CHANGELOG-2.0.md b/docs/changelogs/CHANGELOG-2.0.md index f797ddeae4..a06a389520 100644 --- a/docs/changelogs/CHANGELOG-2.0.md +++ b/docs/changelogs/CHANGELOG-2.0.md @@ -35,11 +35,16 @@ - [#2966](https://github.com/epiphany-platform/epiphany/issues/2966) - `epicli init --full` does not generate configuration for OpenDistro - [#2942](https://github.com/epiphany-platform/epiphany/issues/2942) - rsync command fails trying to copy artifacts - [#2930](https://github.com/epiphany-platform/epiphany/issues/2930) - Backup/recovery commands fail when default configuration for backup attached to cluster-config.yml +<<<<<<< HEAD - [#2989](https://github.com/epiphany-platform/epiphany/issues/2989) - Task `Remove swap from /etc/fstab` does not remove swap entry from file +<<<<<<< HEAD - [#2907](https://github.com/epiphany-platform/epiphany/issues/2907) - Backup/recovery commands fail when executed directly after upgrade +<<<<<<< HEAD - [#3025](https://github.com/epiphany-platform/epiphany/issues/3025) - Running yum commands may hang waiting for user input - [#2728](https://github.com/epiphany-platform/epiphany/issues/2728) - PostgreSQL's configuration files located outside the data directory are not copied by repmgr - [#3029](https://github.com/epiphany-platform/epiphany/issues/3029) - [RHEL] Single machine upgrade fails on preflight check: 'dict object' has no attribute 'size_available' +- [#2803](https://github.com/epiphany-platform/epiphany/issues/2803) - Refactor: rename 'kafka_var' setting + ### Updated @@ -55,6 +60,7 @@ - [#2847](https://github.com/epiphany-platform/epiphany/issues/2847) - Upgrade Ansible to 5.2.0 - Ansible 2.10.15 to 5.2.0 - Python 3.7 to 3.10 +- [#2871](https://github.com/epiphany-platform/epiphany/issues/2871) - Upgrade Kafka to 2.8.1 ### Removed From c66c694e2cbfdb74577e681772650cb77cf7eb4c Mon Sep 17 00:00:00 2001 From: cicharka Date: Mon, 28 Feb 2022 23:09:38 +0100 Subject: [PATCH 09/21] Kafka: fix upgrade and update components --- .../roles/kafka/tasks/upgrade/set-updated-version.yml | 2 +- .../playbooks/roles/kafka/tasks/upgrade/update-properties.yml | 4 ++-- docs/changelogs/CHANGELOG-2.0.md | 4 ---- docs/home/COMPONENTS.md | 2 +- 4 files changed, 4 insertions(+), 8 deletions(-) diff --git a/ansible/playbooks/roles/kafka/tasks/upgrade/set-updated-version.yml b/ansible/playbooks/roles/kafka/tasks/upgrade/set-updated-version.yml index 6014cb41f6..c6c064cc69 100644 --- a/ansible/playbooks/roles/kafka/tasks/upgrade/set-updated-version.yml +++ b/ansible/playbooks/roles/kafka/tasks/upgrade/set-updated-version.yml @@ -7,7 +7,7 @@ lineinfile: path: /opt/kafka/config/server.properties regexp: "^inter.broker.protocol.version" - line: "inter.broker.protocol.version={{ kafka_version.new }}" + line: "inter.broker.protocol.version={{ kafka_version }}" - name: Start kafka service include_tasks: common/start.yml diff --git a/ansible/playbooks/roles/kafka/tasks/upgrade/update-properties.yml b/ansible/playbooks/roles/kafka/tasks/upgrade/update-properties.yml index 7d4b1923e9..f16499aa7a 100644 --- a/ansible/playbooks/roles/kafka/tasks/upgrade/update-properties.yml +++ b/ansible/playbooks/roles/kafka/tasks/upgrade/update-properties.yml @@ -11,7 +11,7 @@ - name: Add current_kafka_version property lineinfile: path: /opt/kafka/config/server.properties - line: "CURRENT_KAFKA_VERSION={{ kafka_version.old }}" + line: "CURRENT_KAFKA_VERSION={{ old_kafka_version }}" when: current_kafka_version_property.stdout == "" - name: Check if inter.broker.protocol.version property is defined @@ -26,5 +26,5 @@ - name: Add inter.broker.protocol.version property lineinfile: path: /opt/kafka/config/server.properties - line: "inter.broker.protocol.version={{ kafka_version.old }}" + line: "inter.broker.protocol.version={{ old_kafka_version }}" when: inter_broker_protocol_version_property.stdout == "" diff --git a/docs/changelogs/CHANGELOG-2.0.md b/docs/changelogs/CHANGELOG-2.0.md index a06a389520..238967c2b1 100644 --- a/docs/changelogs/CHANGELOG-2.0.md +++ b/docs/changelogs/CHANGELOG-2.0.md @@ -35,17 +35,13 @@ - [#2966](https://github.com/epiphany-platform/epiphany/issues/2966) - `epicli init --full` does not generate configuration for OpenDistro - [#2942](https://github.com/epiphany-platform/epiphany/issues/2942) - rsync command fails trying to copy artifacts - [#2930](https://github.com/epiphany-platform/epiphany/issues/2930) - Backup/recovery commands fail when default configuration for backup attached to cluster-config.yml -<<<<<<< HEAD - [#2989](https://github.com/epiphany-platform/epiphany/issues/2989) - Task `Remove swap from /etc/fstab` does not remove swap entry from file -<<<<<<< HEAD - [#2907](https://github.com/epiphany-platform/epiphany/issues/2907) - Backup/recovery commands fail when executed directly after upgrade -<<<<<<< HEAD - [#3025](https://github.com/epiphany-platform/epiphany/issues/3025) - Running yum commands may hang waiting for user input - [#2728](https://github.com/epiphany-platform/epiphany/issues/2728) - PostgreSQL's configuration files located outside the data directory are not copied by repmgr - [#3029](https://github.com/epiphany-platform/epiphany/issues/3029) - [RHEL] Single machine upgrade fails on preflight check: 'dict object' has no attribute 'size_available' - [#2803](https://github.com/epiphany-platform/epiphany/issues/2803) - Refactor: rename 'kafka_var' setting - ### Updated - [#2828](https://github.com/epiphany-platform/epiphany/issues/2828) - K8s improvements diff --git a/docs/home/COMPONENTS.md b/docs/home/COMPONENTS.md index 23f77bb4c0..c255092445 100644 --- a/docs/home/COMPONENTS.md +++ b/docs/home/COMPONENTS.md @@ -14,7 +14,7 @@ Note that versions are default versions and can be changed in certain cases thro | Flannel | 0.14.0 | https://github.com/coreos/flannel/ | [Apache License 2.0](https://www.apache.org/licenses/LICENSE-2.0) | | Canal | 3.20.3 | https://github.com/projectcalico/calico | [Apache License 2.0](https://www.apache.org/licenses/LICENSE-2.0) | | Coredns | 1.8.4 | https://github.com/coredns/coredns | [Apache License 2.0](https://www.apache.org/licenses/LICENSE-2.0) | -| Kafka | 2.6.0 | https://github.com/apache/kafka | [Apache License 2.0](https://www.apache.org/licenses/LICENSE-2.0) | +| Kafka | 2.8.1 | https://github.com/apache/kafka | [Apache License 2.0](https://www.apache.org/licenses/LICENSE-2.0) | | Zookeeper | 3.5.8 | https://github.com/apache/zookeeper | [Apache License 2.0](https://www.apache.org/licenses/LICENSE-2.0) | | RabbitMQ | 3.8.9 | https://github.com/rabbitmq/rabbitmq-server | [Mozilla Public License](https://www.mozilla.org/en-US/MPL/) | | Docker CE | 20.10.8 | https://docs.docker.com/engine/release-notes/ | [Apache License 2.0](https://www.apache.org/licenses/LICENSE-2.0) | From 0e19ad3c8f895b9fc639048b20eac673defcc9c3 Mon Sep 17 00:00:00 2001 From: cicharka Date: Tue, 1 Mar 2022 15:54:49 +0100 Subject: [PATCH 10/21] kafka and zookeeper: fixes after review --- .../tasks/common/download_and_unpack_binary.yml | 1 - .../playbooks/roles/kafka/tasks/common/start.yml | 1 - .../playbooks/roles/kafka/tasks/common/stop.yml | 1 - .../roles/kafka/tasks/generate-certificates.yml | 14 +++++++------- ansible/playbooks/roles/kafka/tasks/metrics.yml | 4 ++-- .../playbooks/roles/kafka/tasks/setup-kafka.yml | 14 +++++++------- ansible/playbooks/roles/zookeeper/tasks/main.yml | 1 - 7 files changed, 16 insertions(+), 20 deletions(-) diff --git a/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml b/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml index 59d7974b2a..a21fbb925a 100644 --- a/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml +++ b/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml @@ -1,5 +1,4 @@ --- - - name: Download Kafka binaries include_role: name: download diff --git a/ansible/playbooks/roles/kafka/tasks/common/start.yml b/ansible/playbooks/roles/kafka/tasks/common/start.yml index 7ea0224c90..06462f582f 100644 --- a/ansible/playbooks/roles/kafka/tasks/common/start.yml +++ b/ansible/playbooks/roles/kafka/tasks/common/start.yml @@ -1,5 +1,4 @@ --- - - name: Enable and Start Kafka service: name: kafka diff --git a/ansible/playbooks/roles/kafka/tasks/common/stop.yml b/ansible/playbooks/roles/kafka/tasks/common/stop.yml index 27b4ab806e..492ed55ac1 100644 --- a/ansible/playbooks/roles/kafka/tasks/common/stop.yml +++ b/ansible/playbooks/roles/kafka/tasks/common/stop.yml @@ -1,5 +1,4 @@ --- - - name: Stop Kafka systemd: name: kafka diff --git a/ansible/playbooks/roles/kafka/tasks/generate-certificates.yml b/ansible/playbooks/roles/kafka/tasks/generate-certificates.yml index 04184fe72e..f4000c8f8a 100644 --- a/ansible/playbooks/roles/kafka/tasks/generate-certificates.yml +++ b/ansible/playbooks/roles/kafka/tasks/generate-certificates.yml @@ -4,7 +4,7 @@ state: directory owner: "{{ specification.user }}" group: "{{ specification.group }}" - mode: "0755" + mode: u=rwx,go=rx - name: Check if keystore exists on broker stat: @@ -13,11 +13,11 @@ register: keystore_exists - name: Generate keystore for each server - shell: keytool -keystore {{ specification.security.ssl.server.keystore_location }} \ - -alias localhost -validity {{ specification.security.ssl.server.cert_validity }} -genkey -keyalg RSA \ - -noprompt -storepass {{ specification.security.ssl.server.passwords.keystore }} \ - -keypass {{ specification.security.ssl.server.passwords.key }} \ - -dname "CN={{ inventory_hostname }}" -ext SAN="DNS:{{ inventory_hostname }}" + command: keytool -keystore {{ specification.security.ssl.server.keystore_location }} \ + -alias localhost -validity {{ specification.security.ssl.server.cert_validity }} -genkey -keyalg RSA \ + -noprompt -storepass {{ specification.security.ssl.server.passwords.keystore }} \ + -keypass {{ specification.security.ssl.server.passwords.key }} \ + -dname "CN={{ inventory_hostname }}" -ext SAN="DNS:{{ inventory_hostname }}" when: - not keystore_exists.stat.exists @@ -81,7 +81,7 @@ - not trustore_exists.stat.exists - name: Check if CA certificate is already imported - shell: keytool -list -v -keystore {{ specification.security.ssl.server.keystore_location }} \ + shell: set -o pipefail && keytool -list -v -keystore {{ specification.security.ssl.server.keystore_location }} \ -storepass {{ specification.security.ssl.server.passwords.keystore }} \ | grep -i "Alias name" | grep -i "caroot" failed_when: "caroot_exists.rc == 2" diff --git a/ansible/playbooks/roles/kafka/tasks/metrics.yml b/ansible/playbooks/roles/kafka/tasks/metrics.yml index 6b2c3a760b..6b7127949d 100644 --- a/ansible/playbooks/roles/kafka/tasks/metrics.yml +++ b/ansible/playbooks/roles/kafka/tasks/metrics.yml @@ -13,7 +13,7 @@ src: jmx-kafka-config.yml owner: "{{ specification.user }}" group: "{{ specification.group }}" - mode: 0644 + mode: u=rx,go=r - name: delegated | create prometheus system group group: @@ -52,7 +52,7 @@ src: file_sd_config.yml.j2 owner: root group: root - mode: 0644 + mode: u=rx,go=r delegate_to: "{{ item }}" notify: restart prometheus with_inventory_hostnames: diff --git a/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml b/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml index 7fc630d28f..b1f060937e 100644 --- a/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml +++ b/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml @@ -31,7 +31,7 @@ copy: content: "export PATH=$PATH:/opt/kafka/bin" dest: "/etc/profile.d/kafka_path.sh" - mode: 0755 + mode: u=rwx,go=rx - name: Link /opt/kafka to the right version file: @@ -44,7 +44,7 @@ dest: /etc/systemd/system/kafka.service owner: root group: root - mode: 0644 + mode: u=rw,go=r src: kafka.service.j2 notify: - restart kafka @@ -59,7 +59,7 @@ state: directory owner: "{{ specification.user }}" group: "{{ specification.group }}" - mode: 0755 + mode: u=rwx,go=rx - name: Remove lost+found in the datadir file: @@ -72,7 +72,7 @@ state: directory owner: "{{ specification.user }}" group: "{{ specification.group }}" - mode: 0755 + mode: u=rwx,go=rx - name: Create /etc/kafka directory file: @@ -90,7 +90,7 @@ path: "{{ specification.conf_dir }}/log4j.properties" owner: "{{ specification.user }}" group: "{{ specification.group }}" - mode: 0644 + mode: u=rw,go=r - name: Generate certificate include_tasks: generate-certificates.yml @@ -105,7 +105,7 @@ owner: "{{ specification.user }}" group: "{{ specification.group }}" # Was 0640 - mode: 0644 + mode: u=rw,go=r src: server.properties.j2 register: create_server_properties notify: @@ -123,7 +123,7 @@ dest: /etc/logrotate.d/kafka owner: root group: root - mode: 0644 + mode: u=rw,go=r src: logrotate.conf.j2 - name: configure system settings, file descriptors and number of threads for kafka diff --git a/ansible/playbooks/roles/zookeeper/tasks/main.yml b/ansible/playbooks/roles/zookeeper/tasks/main.yml index 790fabb510..5c7766f162 100644 --- a/ansible/playbooks/roles/zookeeper/tasks/main.yml +++ b/ansible/playbooks/roles/zookeeper/tasks/main.yml @@ -74,7 +74,6 @@ path: /opt/zookeeper state: link src: "{{ zookeeper_install_dir }}" - # force: ? - name: Add Zookeeper's bin dir to the PATH copy: From ae45c71887353e65c7c2d690bc32f7c8ff53fc3c Mon Sep 17 00:00:00 2001 From: cicharka Date: Fri, 4 Mar 2022 08:39:04 +0100 Subject: [PATCH 11/21] Kafka: use specification to set directory permissions --- .../roles/kafka/tasks/common/download_and_unpack_binary.yml | 4 ++-- cli/src/ansible/AnsibleVarsGenerator.py | 2 +- docs/home/howto/UPGRADE.md | 1 + 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml b/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml index a21fbb925a..72af8cef1d 100644 --- a/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml +++ b/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml @@ -17,5 +17,5 @@ file: path: "{{ kafka_install_dir }}" state: directory - owner: kafka - group: kafka + owner: "{{ specification.user }}" + group: "{{ specification.group }}" diff --git a/cli/src/ansible/AnsibleVarsGenerator.py b/cli/src/ansible/AnsibleVarsGenerator.py index 7353cd0647..d44fd8086a 100644 --- a/cli/src/ansible/AnsibleVarsGenerator.py +++ b/cli/src/ansible/AnsibleVarsGenerator.py @@ -72,7 +72,7 @@ def generate(self): # is changed between versions (e.g. wal_keep_segments -> wal_keep_size) and sometimes previous parameters # are not compatible with the new ones, defaults are used for template processing roles_with_defaults = [ - 'grafana', 'haproxy', 'image_registry', 'jmx_exporter', 'kafka_exporter', + 'grafana', 'haproxy', 'image_registry', 'jmx_exporter', 'kafka', 'kafka_exporter', 'kibana', 'logging', 'node_exporter', 'postgres_exporter', 'postgresql', 'prometheus', 'rabbitmq', 'repository' ] diff --git a/docs/home/howto/UPGRADE.md b/docs/home/howto/UPGRADE.md index 0086bbb0b1..04aaaa20da 100644 --- a/docs/home/howto/UPGRADE.md +++ b/docs/home/howto/UPGRADE.md @@ -249,6 +249,7 @@ Kafka will be automatically updated to the latest version supported by Epiphany. version [here](../COMPONENTS.md#epiphany-cluster-components). Kafka brokers are updated one by one - but the update procedure does not guarantee "zero downtime" because it depends on the number of available brokers, topics, and partitioning configuration. +Note that old Kafka binaries are removed during upgrade. ### ZooKeeper upgrade From 3df6ea2162724b1d2143be06636ca9c2af546afd Mon Sep 17 00:00:00 2001 From: cicharka Date: Mon, 7 Mar 2022 07:52:37 +0100 Subject: [PATCH 12/21] * refactor more tasks --- .../playbooks/roles/kafka/handlers/main.yml | 11 ++-- .../common/download_and_unpack_binary.yml | 2 +- .../kafka/tasks/generate-certificates.yml | 63 +++++++++++-------- ansible/playbooks/roles/kafka/tasks/main.yml | 3 + .../playbooks/roles/kafka/tasks/metrics.yml | 32 ++++++---- .../roles/kafka/tasks/setup-kafka.yml | 6 +- .../kafka/tasks/upgrade/install-upgrade.yml | 2 +- .../roles/kafka/tasks/upgrade/main.yml | 5 +- .../kafka/tasks/upgrade/preflight-check.yml | 3 + .../tasks/upgrade/set-updated-version.yml | 3 + 10 files changed, 79 insertions(+), 51 deletions(-) diff --git a/ansible/playbooks/roles/kafka/handlers/main.yml b/ansible/playbooks/roles/kafka/handlers/main.yml index b7668bd059..542c3cb116 100644 --- a/ansible/playbooks/roles/kafka/handlers/main.yml +++ b/ansible/playbooks/roles/kafka/handlers/main.yml @@ -1,7 +1,7 @@ --- # Handlers for Kafka -- name: restart kafka +- name: Restart kafka service: name: kafka state: restarted @@ -9,12 +9,13 @@ retries: 10 delay: 10 -- name: restart prometheus +- name: Restart prometheus become: true systemd: daemon_reload: true name: prometheus state: restarted - delegate_to: "{{ item }}" - with_inventory_hostnames: - - prometheus + delegate_to: "{{ node }}" + loop_control: + loop_var: node + loop: "{{ groups.prometheus }}" diff --git a/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml b/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml index 72af8cef1d..ef42aa6201 100644 --- a/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml +++ b/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml @@ -8,7 +8,7 @@ - name: Uncompress the Kafka tar unarchive: - remote_src: yes + remote_src: true creates: "{{ kafka_install_dir }}" src: "{{ download_directory }}/{{ kafka_bin_filename }}" dest: /opt diff --git a/ansible/playbooks/roles/kafka/tasks/generate-certificates.yml b/ansible/playbooks/roles/kafka/tasks/generate-certificates.yml index f4000c8f8a..154e47a042 100644 --- a/ansible/playbooks/roles/kafka/tasks/generate-certificates.yml +++ b/ansible/playbooks/roles/kafka/tasks/generate-certificates.yml @@ -9,6 +9,9 @@ - name: Check if keystore exists on broker stat: path: "{{ specification.security.ssl.server.keystore_location }}" + get_attributes: false + get_checksum: false + get_mime: false changed_when: false register: keystore_exists @@ -24,17 +27,20 @@ - name: Check if signing certificate exists stat: path: "{{ specification.security.ssl.server.keystore_location | dirname }}/ca-cert" + get_attributes: false + get_checksum: false + get_mime: false register: signing_certificate_exists changed_when: false when: - groups['kafka'][0] == inventory_hostname - name: Generate signing certificate - shell: openssl req -new -x509 -keyout {{ specification.security.ssl.server.keystore_location | dirname }}/ca-key \ - -out {{ specification.security.ssl.server.keystore_location | dirname }}/ca-cert \ - -days {{ specification.security.ssl.server.cert_validity }} \ - -subj "/CN={{ inventory_hostname }}" \ - --passout pass:{{ specification.security.ssl.server.passwords.key }} + command: openssl req -new -x509 -keyout {{ specification.security.ssl.server.keystore_location | dirname }}/ca-key \ + -out {{ specification.security.ssl.server.keystore_location | dirname }}/ca-cert \ + -days {{ specification.security.ssl.server.cert_validity }} \ + -subj "/CN={{ inventory_hostname }}" \ + --passout pass:{{ specification.security.ssl.server.passwords.key }} when: - groups['kafka'][0] == inventory_hostname - not signing_certificate_exists.stat.exists @@ -47,7 +53,7 @@ delegate_to: localhost - name: Fetching files - fetch: + slurp: src: "{{ specification.security.ssl.server.keystore_location | dirname }}/{{ item }}" dest: "{{ specification.security.ssl.server.local_cert_download_path }}/{{ item }}" flat: yes @@ -70,13 +76,16 @@ - name: Check if trustore exists stat: path: "{{ specification.security.ssl.server.truststore_location }}" + get_attributes: false + get_checksum: false + get_mime: false register: trustore_exists - name: Create trustore - shell: keytool -noprompt -keystore "{{ specification.security.ssl.server.truststore_location }}" -alias CARoot \ - -import -file "{{ specification.security.ssl.server.keystore_location | dirname }}/ca-cert" \ - -storepass {{ specification.security.ssl.server.passwords.keystore }} \ - -keypass {{ specification.security.ssl.server.passwords.key }} + command: keytool -noprompt -keystore "{{ specification.security.ssl.server.truststore_location }}" -alias CARoot \ + -import -file "{{ specification.security.ssl.server.keystore_location | dirname }}/ca-cert" \ + -storepass {{ specification.security.ssl.server.passwords.keystore }} \ + -keypass {{ specification.security.ssl.server.passwords.key }} when: - not trustore_exists.stat.exists @@ -99,35 +108,35 @@ register: signed_cert_exists - name: Export certificate to sign certificate with CA - shell: keytool -noprompt -keystore {{ specification.security.ssl.server.keystore_location }} \ - -alias localhost -certreq \ - -file "{{ specification.security.ssl.server.keystore_location | dirname }}/cert-file" \ - -storepass {{ specification.security.ssl.server.passwords.keystore }} \ - -keypass {{ specification.security.ssl.server.passwords.key }} + command: keytool -noprompt -keystore {{ specification.security.ssl.server.keystore_location }} \ + -alias localhost -certreq \ + -file "{{ specification.security.ssl.server.keystore_location | dirname }}/cert-file" \ + -storepass {{ specification.security.ssl.server.passwords.keystore }} \ + -keypass {{ specification.security.ssl.server.passwords.key }} when: - signed_cert_exists.rc == 1 - name: Signing certificate with CA - shell: openssl x509 -req -CA "{{ specification.security.ssl.server.keystore_location | dirname }}/ca-cert" \ - -CAkey "{{ specification.security.ssl.server.keystore_location | dirname }}/ca-key" \ - -in "{{ specification.security.ssl.server.keystore_location | dirname }}/cert-file" \ - -out "{{ specification.security.ssl.server.keystore_location | dirname }}/cert-signed" \ - -days {{ specification.security.ssl.server.cert_validity }} -CAcreateserial \ - -passin pass:{{ specification.security.ssl.server.passwords.key }} + command: openssl x509 -req -CA "{{ specification.security.ssl.server.keystore_location | dirname }}/ca-cert" \ + -CAkey "{{ specification.security.ssl.server.keystore_location | dirname }}/ca-key" \ + -in "{{ specification.security.ssl.server.keystore_location | dirname }}/cert-file" \ + -out "{{ specification.security.ssl.server.keystore_location | dirname }}/cert-signed" \ + -days {{ specification.security.ssl.server.cert_validity }} -CAcreateserial \ + -passin pass:{{ specification.security.ssl.server.passwords.key }} when: - signed_cert_exists.rc == 1 - name: Import certificate CA - shell: keytool -noprompt -keystore {{ specification.security.ssl.server.keystore_location }} -alias CARoot \ - -import -file "{{ specification.security.ssl.server.keystore_location | dirname }}/ca-cert" \ - -storepass {{ specification.security.ssl.server.passwords.keystore }} + command: keytool -noprompt -keystore {{ specification.security.ssl.server.keystore_location }} -alias CARoot \ + -import -file "{{ specification.security.ssl.server.keystore_location | dirname }}/ca-cert" \ + -storepass {{ specification.security.ssl.server.passwords.keystore }} when: - caroot_exists.rc == 1 - name: Import certificate signed by CA - shell: keytool -noprompt -keystore {{ specification.security.ssl.server.keystore_location }} -alias localhost \ - -import -file "{{ specification.security.ssl.server.keystore_location | dirname }}/cert-signed" \ - -storepass {{ specification.security.ssl.server.passwords.keystore }} + command: keytool -noprompt -keystore {{ specification.security.ssl.server.keystore_location }} -alias localhost \ + -import -file "{{ specification.security.ssl.server.keystore_location | dirname }}/cert-signed" \ + -storepass {{ specification.security.ssl.server.passwords.keystore }} when: - signed_cert_exists.rc == 1 diff --git a/ansible/playbooks/roles/kafka/tasks/main.yml b/ansible/playbooks/roles/kafka/tasks/main.yml index d9bfcf2938..1c29043b55 100644 --- a/ansible/playbooks/roles/kafka/tasks/main.yml +++ b/ansible/playbooks/roles/kafka/tasks/main.yml @@ -3,6 +3,9 @@ - name: Check if jmx exporter is available stat: + get_attributes: false + get_checksum: false + get_mime: false path: "{{ prometheus_jmx_exporter_path }}" register: exporter diff --git a/ansible/playbooks/roles/kafka/tasks/metrics.yml b/ansible/playbooks/roles/kafka/tasks/metrics.yml index 6b7127949d..3621d963f6 100644 --- a/ansible/playbooks/roles/kafka/tasks/metrics.yml +++ b/ansible/playbooks/roles/kafka/tasks/metrics.yml @@ -20,9 +20,10 @@ name: prometheus system: true state: present - delegate_to: "{{ item }}" - with_inventory_hostnames: - - prometheus + delegate_to: "{{ node }}" + loop_control: + loop_var: node + loop: "{{ groups.prometheus }}" - name: delegated | create prometheus system user user: @@ -31,9 +32,11 @@ shell: "/usr/sbin/nologin" group: prometheus createhome: false - delegate_to: "{{ item }}" - with_inventory_hostnames: - - prometheus + delegate_to: "{{ node }}" + loop_control: + loop_var: node + loop: "{{ groups.prometheus }}" + - name: delegated | create file_sd for service discovery configs file: @@ -42,9 +45,10 @@ owner: root group: prometheus mode: 0750 - delegate_to: "{{ item }}" - with_inventory_hostnames: - - prometheus + delegate_to: "{{ node }}" + loop_control: + loop_var: node + loop: "{{ groups.prometheus }}" - name: delegated | copy file_sd_config to prometheus hosts template: @@ -53,7 +57,9 @@ owner: root group: root mode: u=rx,go=r - delegate_to: "{{ item }}" - notify: restart prometheus - with_inventory_hostnames: - - prometheus + notify: Restart prometheus + delegate_to: "{{ node }}" + loop_control: + loop_var: node + loop: "{{ groups.prometheus }}" + diff --git a/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml b/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml index b1f060937e..a3110f89e9 100644 --- a/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml +++ b/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml @@ -47,7 +47,7 @@ mode: u=rw,go=r src: kafka.service.j2 notify: - - restart kafka + - Restart kafka - name: Reload daemon systemd: @@ -109,7 +109,7 @@ src: server.properties.j2 register: create_server_properties notify: - - restart kafka + - Restart kafka - name: Delete meta.properties become: true @@ -139,7 +139,7 @@ - { limit_type: 'hard', limit_item: 'memlock', value: unlimited } - name: reload settings from all system configuration files - shell: sysctl --system + command: sysctl --system # SASL Setup # - name: copy SASL config file diff --git a/ansible/playbooks/roles/kafka/tasks/upgrade/install-upgrade.yml b/ansible/playbooks/roles/kafka/tasks/upgrade/install-upgrade.yml index 68ead8df3c..5b1b05938e 100644 --- a/ansible/playbooks/roles/kafka/tasks/upgrade/install-upgrade.yml +++ b/ansible/playbooks/roles/kafka/tasks/upgrade/install-upgrade.yml @@ -4,7 +4,7 @@ - name: Copy configuration from previous version copy: - remote_src: yes + remote_src: true src: /opt/kafka/config/ dest: "{{ kafka_install_dir}}/config" mode: preserve diff --git a/ansible/playbooks/roles/kafka/tasks/upgrade/main.yml b/ansible/playbooks/roles/kafka/tasks/upgrade/main.yml index d79968d856..512f666379 100644 --- a/ansible/playbooks/roles/kafka/tasks/upgrade/main.yml +++ b/ansible/playbooks/roles/kafka/tasks/upgrade/main.yml @@ -8,13 +8,16 @@ /opt/kafka/bin/kafka-server-start.sh --version | grep Commit | grep -oP '^\d+\.\d+\.\d+' register: result -- name: Set old Kafka version fact +- name: Set old_kafka_version fact set_fact: old_kafka_version: "{{ result.stdout }}" - name: Check for upgrade flag file stat: path: "{{ lock_file }}" + get_attributes: false + get_checksum: false + get_mime: false register: lock_file_status - name: Include upgrade tasks diff --git a/ansible/playbooks/roles/kafka/tasks/upgrade/preflight-check.yml b/ansible/playbooks/roles/kafka/tasks/upgrade/preflight-check.yml index ec4283ddb9..2d5635606b 100644 --- a/ansible/playbooks/roles/kafka/tasks/upgrade/preflight-check.yml +++ b/ansible/playbooks/roles/kafka/tasks/upgrade/preflight-check.yml @@ -2,6 +2,9 @@ - name: Check if Kafka is installed in default location stat: path: /opt/kafka/bin/kafka-server-start.sh + get_attributes: false + get_checksum: false + get_mime: false register: kafka_exec_file - name: Assert Kafka location diff --git a/ansible/playbooks/roles/kafka/tasks/upgrade/set-updated-version.yml b/ansible/playbooks/roles/kafka/tasks/upgrade/set-updated-version.yml index c6c064cc69..4058df7a55 100644 --- a/ansible/playbooks/roles/kafka/tasks/upgrade/set-updated-version.yml +++ b/ansible/playbooks/roles/kafka/tasks/upgrade/set-updated-version.yml @@ -1,6 +1,9 @@ --- - name: Check if server.properties file exists stat: + get_attributes: false + get_checksum: false + get_mime: false path: /opt/kafka/config/server.properties - name: Modify inter.broker.protocol.version property From 4eccaaf763e4ffc6d9bc723edb38d82886b627bb Mon Sep 17 00:00:00 2001 From: cicharka Date: Mon, 7 Mar 2022 09:50:53 +0100 Subject: [PATCH 13/21] * change 'yes' to 'true' in tasks --- ansible/playbooks/roles/kafka/handlers/main.yml | 2 +- ansible/playbooks/roles/kafka/tasks/common/start.yml | 4 ++-- .../playbooks/roles/kafka/tasks/generate-certificates.yml | 2 +- ansible/playbooks/roles/kafka/tasks/metrics.yml | 2 +- ansible/playbooks/roles/kafka/tasks/setup-kafka.yml | 6 +++--- .../playbooks/roles/kafka/tasks/upgrade/install-upgrade.yml | 2 +- .../zookeeper/tasks/common/download_and_unpack_binary.yml | 6 +++--- ansible/playbooks/roles/zookeeper/tasks/main.yml | 6 +++--- ansible/playbooks/roles/zookeeper/tasks/metrics.yml | 2 +- .../roles/zookeeper/tasks/upgrade/install-upgrade.yml | 6 +++--- 10 files changed, 19 insertions(+), 19 deletions(-) diff --git a/ansible/playbooks/roles/kafka/handlers/main.yml b/ansible/playbooks/roles/kafka/handlers/main.yml index 542c3cb116..bf812de251 100644 --- a/ansible/playbooks/roles/kafka/handlers/main.yml +++ b/ansible/playbooks/roles/kafka/handlers/main.yml @@ -5,7 +5,7 @@ service: name: kafka state: restarted - enabled: yes + enabled: true retries: 10 delay: 10 diff --git a/ansible/playbooks/roles/kafka/tasks/common/start.yml b/ansible/playbooks/roles/kafka/tasks/common/start.yml index 06462f582f..1f033ccfdd 100644 --- a/ansible/playbooks/roles/kafka/tasks/common/start.yml +++ b/ansible/playbooks/roles/kafka/tasks/common/start.yml @@ -3,8 +3,8 @@ service: name: kafka state: started - enabled: yes - daemon-reload: yes + enabled: true + daemon-reload: true # - name: wait for kafka port # wait_for: host={{kafka.listen_address| default('localhost')}} port={{kafka.port}} state=started timeout={{ kafka.wait_for_period }} diff --git a/ansible/playbooks/roles/kafka/tasks/generate-certificates.yml b/ansible/playbooks/roles/kafka/tasks/generate-certificates.yml index 154e47a042..9a96fc4d8f 100644 --- a/ansible/playbooks/roles/kafka/tasks/generate-certificates.yml +++ b/ansible/playbooks/roles/kafka/tasks/generate-certificates.yml @@ -56,7 +56,7 @@ slurp: src: "{{ specification.security.ssl.server.keystore_location | dirname }}/{{ item }}" dest: "{{ specification.security.ssl.server.local_cert_download_path }}/{{ item }}" - flat: yes + flat: true loop: - "ca-cert" - "ca-key" diff --git a/ansible/playbooks/roles/kafka/tasks/metrics.yml b/ansible/playbooks/roles/kafka/tasks/metrics.yml index 3621d963f6..678f47c84a 100644 --- a/ansible/playbooks/roles/kafka/tasks/metrics.yml +++ b/ansible/playbooks/roles/kafka/tasks/metrics.yml @@ -4,7 +4,7 @@ user: name: "{{ specification.user }}" groups: "{{ specification.jmx_exporter_group }}" - append: yes + append: true - name: prometheus jmx | configuration file become: true diff --git a/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml b/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml index a3110f89e9..19c4c75d8b 100644 --- a/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml +++ b/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml @@ -2,12 +2,12 @@ - name: Setup group group: name: "{{ specification.group }}" - system: yes + system: true - name: Setup user user: name: "{{ specification.user }}" - system: yes + system: true group: "{{ specification.group }}" shell: "/usr/sbin/nologin" @@ -51,7 +51,7 @@ - name: Reload daemon systemd: - daemon-reload: yes + daemon-reload: true - name: Create data_dir file: diff --git a/ansible/playbooks/roles/kafka/tasks/upgrade/install-upgrade.yml b/ansible/playbooks/roles/kafka/tasks/upgrade/install-upgrade.yml index 5b1b05938e..bdff295c4c 100644 --- a/ansible/playbooks/roles/kafka/tasks/upgrade/install-upgrade.yml +++ b/ansible/playbooks/roles/kafka/tasks/upgrade/install-upgrade.yml @@ -14,7 +14,7 @@ dest: /opt/kafka state: link src: "{{ kafka_install_dir}}" - force: yes + force: true - name: Remove previous version binaries file: diff --git a/ansible/playbooks/roles/zookeeper/tasks/common/download_and_unpack_binary.yml b/ansible/playbooks/roles/zookeeper/tasks/common/download_and_unpack_binary.yml index 5fea6a0a12..a07982e145 100644 --- a/ansible/playbooks/roles/zookeeper/tasks/common/download_and_unpack_binary.yml +++ b/ansible/playbooks/roles/zookeeper/tasks/common/download_and_unpack_binary.yml @@ -7,10 +7,10 @@ file_name: "{{ zookeeper_bin_filename }}" - name: Create {{ zookeeper_install_dir }} directory - become: yes + become: true file: path: "{{ zookeeper_install_dir }}" - recurse: yes + recurse: true owner: "{{ zookeeper_user }}" group: "{{ zookeeper_group }}" mode: u=rwx,g=rx,o=rx @@ -19,7 +19,7 @@ - name: Unpack Zookeeper-{{ zookeeper_version }} binary become: true unarchive: - remote_src: yes + remote_src: true src: "{{ download_directory }}/{{ zookeeper_bin_filename }}" dest: "{{ zookeeper_install_dir }}" creates: "{{ zookeeper_install_dir }}/bin" diff --git a/ansible/playbooks/roles/zookeeper/tasks/main.yml b/ansible/playbooks/roles/zookeeper/tasks/main.yml index 5c7766f162..c804420fa8 100644 --- a/ansible/playbooks/roles/zookeeper/tasks/main.yml +++ b/ansible/playbooks/roles/zookeeper/tasks/main.yml @@ -2,13 +2,13 @@ - name: Create Zookeeper group group: name: "{{ zookeeper_group }}" - system: yes + system: true - name: Create Zookeeper user user: name: "{{ zookeeper_user }}" group: "{{ zookeeper_group }}" - system: yes + system: true shell: "/usr/sbin/nologin" - name: Install Java package @@ -91,7 +91,7 @@ - name: Enable and start Zookeeper service service: name: zookeeper - enabled: yes + enabled: true state: started - include_tasks: metrics.yml diff --git a/ansible/playbooks/roles/zookeeper/tasks/metrics.yml b/ansible/playbooks/roles/zookeeper/tasks/metrics.yml index 4a78350d9f..99aba036ae 100644 --- a/ansible/playbooks/roles/zookeeper/tasks/metrics.yml +++ b/ansible/playbooks/roles/zookeeper/tasks/metrics.yml @@ -2,7 +2,7 @@ user: name: "{{ zookeeper_user }}" groups: "{{ jmx_exporter_group }}" - append: yes + append: true - name: prometheus jmx | configuration file become: true diff --git a/ansible/playbooks/roles/zookeeper/tasks/upgrade/install-upgrade.yml b/ansible/playbooks/roles/zookeeper/tasks/upgrade/install-upgrade.yml index 8f0ff60d41..4619dcd8e2 100644 --- a/ansible/playbooks/roles/zookeeper/tasks/upgrade/install-upgrade.yml +++ b/ansible/playbooks/roles/zookeeper/tasks/upgrade/install-upgrade.yml @@ -4,7 +4,7 @@ - name: Copy configuration from previous version copy: - remote_src: yes + remote_src: true src: /opt/zookeeper/conf/ dest: "{{ zookeeper_install_dir }}}/conf" mode: preserve @@ -14,7 +14,7 @@ path: /opt/zookeeper state: link src: "{{ zookeeper_install_dir }}" - force: yes + force: true - name: Reconfigure Zookeeper service to use symbolic link lineinfile: @@ -43,7 +43,7 @@ systemd: name: zookeeper state: started - daemon-reload: yes + daemon-reload: true - name: Remove previous version binaries file: From 9dc66c1ba3f32e1197658b1ea2a8d6ded21c4dbe Mon Sep 17 00:00:00 2001 From: cicharka Date: Mon, 7 Mar 2022 16:37:51 +0100 Subject: [PATCH 14/21] fix permission --- ansible/playbooks/roles/kafka/tasks/metrics.yml | 2 +- ansible/playbooks/roles/kafka/tasks/verify-kafka.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ansible/playbooks/roles/kafka/tasks/metrics.yml b/ansible/playbooks/roles/kafka/tasks/metrics.yml index 678f47c84a..fb7f53c268 100644 --- a/ansible/playbooks/roles/kafka/tasks/metrics.yml +++ b/ansible/playbooks/roles/kafka/tasks/metrics.yml @@ -44,7 +44,7 @@ state: directory owner: root group: prometheus - mode: 0750 + mode: u=rwx,g=rx,o= delegate_to: "{{ node }}" loop_control: loop_var: node diff --git a/ansible/playbooks/roles/kafka/tasks/verify-kafka.yml b/ansible/playbooks/roles/kafka/tasks/verify-kafka.yml index dde93e7e33..6d45957bed 100644 --- a/ansible/playbooks/roles/kafka/tasks/verify-kafka.yml +++ b/ansible/playbooks/roles/kafka/tasks/verify-kafka.yml @@ -10,5 +10,5 @@ dest: "/home/{{ admin_user.name }}/kafka_producer_consumer.py" owner: "{{ admin_user.name }}" group: "{{ admin_user.name }}" - mode: 0755 + mode: u=rwx,go=rx src: kafka_producer_consumer.py.j2 From 878275cfca7c1f46bbc1a9e5c6ff7db250dab15c Mon Sep 17 00:00:00 2001 From: cicharka Date: Tue, 8 Mar 2022 14:45:55 +0100 Subject: [PATCH 15/21] fix ansible-lint error for kafka role --- ansible/playbooks/kafka.yml | 4 +-- .../common/download_and_unpack_binary.yml | 1 + .../kafka/tasks/generate-certificates.yml | 8 +++-- .../playbooks/roles/kafka/tasks/metrics.yml | 1 - .../roles/kafka/tasks/setup-kafka.yml | 29 ++++++++++++++----- .../kafka/tasks/upgrade/install-upgrade.yml | 6 ++-- .../roles/kafka/tasks/upgrade/main.yml | 1 + .../kafka/tasks/upgrade/preflight-check.yml | 24 +++++++-------- .../kafka/tasks/upgrade/update-properties.yml | 4 +-- 9 files changed, 47 insertions(+), 31 deletions(-) diff --git a/ansible/playbooks/kafka.yml b/ansible/playbooks/kafka.yml index 85609a5aa1..78d3762f30 100644 --- a/ansible/playbooks/kafka.yml +++ b/ansible/playbooks/kafka.yml @@ -2,8 +2,8 @@ # Ansible playbook that makes sure the base items for all nodes are installed - hosts: all - gather_facts: yes - tasks: [ ] + gather_facts: true + tasks: [] - hosts: kafka become: true diff --git a/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml b/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml index ef42aa6201..7596f736bf 100644 --- a/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml +++ b/ansible/playbooks/roles/kafka/tasks/common/download_and_unpack_binary.yml @@ -17,5 +17,6 @@ file: path: "{{ kafka_install_dir }}" state: directory + mode: u=rwx,go=rx owner: "{{ specification.user }}" group: "{{ specification.group }}" diff --git a/ansible/playbooks/roles/kafka/tasks/generate-certificates.yml b/ansible/playbooks/roles/kafka/tasks/generate-certificates.yml index 9a96fc4d8f..ed6e216943 100644 --- a/ansible/playbooks/roles/kafka/tasks/generate-certificates.yml +++ b/ansible/playbooks/roles/kafka/tasks/generate-certificates.yml @@ -50,6 +50,7 @@ file: path: "{{ specification.security.ssl.server.local_cert_download_path }}" state: directory + mode: u=rwx,go= delegate_to: localhost - name: Fetching files @@ -65,8 +66,9 @@ - name: Copy signing certificate and key to brokers copy: - src: "{{ specification.security.ssl.server.local_cert_download_path }}/{{ item }}" - dest: "{{ specification.security.ssl.server.keystore_location | dirname }}/" + src: "{{ specification.security.ssl.server.local_cert_download_path }}/{{ item }}" + dest: "{{ specification.security.ssl.server.keystore_location | dirname }}/" + mode: preserve loop: - "ca-cert" - "ca-key" @@ -99,7 +101,7 @@ - name: Check if certificate signed by CA is already imported shell: |- - keytool -list -v -keystore {{ specification.security.ssl.server.keystore_location }} \ + set -o pipefail && keytool -list -v -keystore {{ specification.security.ssl.server.keystore_location }} \ -storepass {{ specification.security.ssl.server.passwords.keystore }} \ -alias localhost \ | grep -i 'Certificate chain length: 2' diff --git a/ansible/playbooks/roles/kafka/tasks/metrics.yml b/ansible/playbooks/roles/kafka/tasks/metrics.yml index fb7f53c268..1476221db8 100644 --- a/ansible/playbooks/roles/kafka/tasks/metrics.yml +++ b/ansible/playbooks/roles/kafka/tasks/metrics.yml @@ -62,4 +62,3 @@ loop_control: loop_var: node loop: "{{ groups.prometheus }}" - diff --git a/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml b/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml index 19c4c75d8b..52d5371b0e 100644 --- a/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml +++ b/ansible/playbooks/roles/kafka/tasks/setup-kafka.yml @@ -22,7 +22,8 @@ RedHat: - java-1.8.0-openjdk-headless module_defaults: - yum: { lock_timeout: "{{ yum_lock_timeout }}" } + yum: + lock_timeout: "{{ yum_lock_timeout }}" - name: Download and unpack Kafka's binary include_tasks: common/download_and_unpack_binary.yml @@ -74,7 +75,7 @@ group: "{{ specification.group }}" mode: u=rwx,go=rx -- name: Create /etc/kafka directory +- name: Create /etc/kafka directory # noqa risky-file-permissions file: path: /etc/kafka state: directory @@ -131,12 +132,24 @@ domain: "{{ specification.user }}" limit_type: "{{ item.limit_type }}" limit_item: "{{ item.limit_item }}" - value: "{{item.value}}" - with_items: - - { limit_type: '-', limit_item: 'nofile', value: 128000 } - - { limit_type: '-', limit_item: 'nproc', value: 128000 } - - { limit_type: 'soft', limit_item: 'memlock', value: unlimited } - - { limit_type: 'hard', limit_item: 'memlock', value: unlimited } + value: "{{ item.value }}" + loop: + - + limit_type: '-' + limit_item: 'nofile' + value: 128000 + - + limit_type: '-' + limit_item: 'nproc' + value: 128000 + - + limit_type: 'soft' + limit_item: 'memlock' + value: unlimited + - + limit_type: 'hard' + limit_item: 'memlock' + value: unlimited - name: reload settings from all system configuration files command: sysctl --system diff --git a/ansible/playbooks/roles/kafka/tasks/upgrade/install-upgrade.yml b/ansible/playbooks/roles/kafka/tasks/upgrade/install-upgrade.yml index bdff295c4c..6e2dc9201f 100644 --- a/ansible/playbooks/roles/kafka/tasks/upgrade/install-upgrade.yml +++ b/ansible/playbooks/roles/kafka/tasks/upgrade/install-upgrade.yml @@ -6,14 +6,14 @@ copy: remote_src: true src: /opt/kafka/config/ - dest: "{{ kafka_install_dir}}/config" + dest: "{{ kafka_install_dir }}/config" mode: preserve - name: Link /opt/kafka to recently installed version file: dest: /opt/kafka state: link - src: "{{ kafka_install_dir}}" + src: "{{ kafka_install_dir }}" force: true - name: Remove previous version binaries @@ -26,7 +26,7 @@ set -o pipefail && grep log.dirs /opt/kafka/config/server.properties | awk -F'=' '{print $2}' register: log_dirs - changed_when: False + changed_when: false - name: Remove lost+found directory from log.dirs file: diff --git a/ansible/playbooks/roles/kafka/tasks/upgrade/main.yml b/ansible/playbooks/roles/kafka/tasks/upgrade/main.yml index 512f666379..b7245a8d4f 100644 --- a/ansible/playbooks/roles/kafka/tasks/upgrade/main.yml +++ b/ansible/playbooks/roles/kafka/tasks/upgrade/main.yml @@ -25,6 +25,7 @@ - name: Create upgrade flag file file: path: "{{ lock_file }}" + mode: u=rw,g=r,o= state: touch - name: Stop Kafka service diff --git a/ansible/playbooks/roles/kafka/tasks/upgrade/preflight-check.yml b/ansible/playbooks/roles/kafka/tasks/upgrade/preflight-check.yml index 2d5635606b..97aa26ac87 100644 --- a/ansible/playbooks/roles/kafka/tasks/upgrade/preflight-check.yml +++ b/ansible/playbooks/roles/kafka/tasks/upgrade/preflight-check.yml @@ -1,14 +1,14 @@ --- - - name: Check if Kafka is installed in default location - stat: - path: /opt/kafka/bin/kafka-server-start.sh - get_attributes: false - get_checksum: false - get_mime: false - register: kafka_exec_file +- name: Check if Kafka is installed in default location + stat: + path: /opt/kafka/bin/kafka-server-start.sh + get_attributes: false + get_checksum: false + get_mime: false + register: kafka_exec_file - - name: Assert Kafka location - assert: - that: - - kafka_exec_file.stat.exists - fail_msg: Kafka not found in /opt/kafka (Epiphany default) - check your configuration \ No newline at end of file +- name: Assert Kafka location + assert: + that: + - kafka_exec_file.stat.exists + fail_msg: Kafka not found in /opt/kafka (Epiphany default) - check your configuration diff --git a/ansible/playbooks/roles/kafka/tasks/upgrade/update-properties.yml b/ansible/playbooks/roles/kafka/tasks/upgrade/update-properties.yml index f16499aa7a..584f96f9ba 100644 --- a/ansible/playbooks/roles/kafka/tasks/upgrade/update-properties.yml +++ b/ansible/playbooks/roles/kafka/tasks/upgrade/update-properties.yml @@ -6,7 +6,7 @@ register: current_kafka_version_property failed_when: - result.rc == 2 - changed_when: False + changed_when: false - name: Add current_kafka_version property lineinfile: @@ -21,7 +21,7 @@ register: inter_broker_protocol_version_property failed_when: - result.rc == 2 - changed_when: False + changed_when: false - name: Add inter.broker.protocol.version property lineinfile: From bad7418930d26454e07cf0a29f252ef30ded799e Mon Sep 17 00:00:00 2001 From: cicharka Date: Tue, 8 Mar 2022 15:46:28 +0100 Subject: [PATCH 16/21] fix ansible-lint errors for zookeeper role --- .../roles/zookeeper/handlers/main.yml | 2 +- .../playbooks/roles/zookeeper/tasks/main.yml | 14 +++++++---- .../tasks/upgrade/install-upgrade.yml | 3 ++- .../tasks/upgrade/preflight-check.yml | 24 +++++++++---------- ansible/playbooks/zookeeper.yml | 4 ++-- 5 files changed, 27 insertions(+), 20 deletions(-) diff --git a/ansible/playbooks/roles/zookeeper/handlers/main.yml b/ansible/playbooks/roles/zookeeper/handlers/main.yml index 1dd6e5db57..f4c3fd1480 100644 --- a/ansible/playbooks/roles/zookeeper/handlers/main.yml +++ b/ansible/playbooks/roles/zookeeper/handlers/main.yml @@ -16,4 +16,4 @@ state: restarted delegate_to: "{{ item }}" with_inventory_hostnames: - - prometheus \ No newline at end of file + - prometheus diff --git a/ansible/playbooks/roles/zookeeper/tasks/main.yml b/ansible/playbooks/roles/zookeeper/tasks/main.yml index c804420fa8..6ed5cb8ae9 100644 --- a/ansible/playbooks/roles/zookeeper/tasks/main.yml +++ b/ansible/playbooks/roles/zookeeper/tasks/main.yml @@ -22,7 +22,8 @@ RedHat: - java-1.8.0-openjdk-headless module_defaults: - yum: { lock_timeout: "{{ yum_lock_timeout }}" } + yum: + lock_timeout: "{{ yum_lock_timeout }}" - name: Download and unpack Zookeeper's binary include_tasks: common/download_and_unpack_binary.yml @@ -31,6 +32,7 @@ file: path: "{{ item }}" state: directory + mode: u=rwx,go=rx owner: "{{ zookeeper_user }}" group: "{{ zookeeper_group }}" with_items: @@ -42,9 +44,12 @@ - name: Check if jmx exporter is available stat: path: "{{ prometheus_jmx_exporter_path }}" + get_attributes: false + get_checksum: false + get_mime: false register: exporter -- name: Create Zookeeper service +- name: Create Zookeeper service # noqa risky-file-permissions (https://github.com/ansible-community/ansible-lint/pull/1030) template: src: zookeeper.service.j2 dest: /lib/systemd/system/zookeeper.service @@ -59,9 +64,10 @@ dest: /var/lib/zookeeper/myid owner: "{{ zookeeper_user }}" group: "{{ zookeeper_group }}" + mode: preserve notify: Restart zookeeper -- name: Configure Zookeeper +- name: Configure Zookeeper # noqa risky-file-permissions (https://github.com/ansible-community/ansible-lint/pull/1030) template: src: zoo.cfg.j2 dest: "{{ zookeeper_install_dir }}/conf/zoo.cfg" @@ -81,7 +87,7 @@ dest: "/etc/profile.d/zookeeper_path.sh" mode: u=rwx,g=rx,o=rx -- name: Update the log4j config with saner production values +- name: Update the log4j config with saner production values # noqa risky-file-permissions (https://github.com/ansible-community/ansible-lint/pull/1030) template: src: log4j.properties.j2 dest: "{{ zookeeper_install_dir }}/conf/log4j.properties" diff --git a/ansible/playbooks/roles/zookeeper/tasks/upgrade/install-upgrade.yml b/ansible/playbooks/roles/zookeeper/tasks/upgrade/install-upgrade.yml index 4619dcd8e2..c3fbd77e17 100644 --- a/ansible/playbooks/roles/zookeeper/tasks/upgrade/install-upgrade.yml +++ b/ansible/playbooks/roles/zookeeper/tasks/upgrade/install-upgrade.yml @@ -29,7 +29,8 @@ patterns: "snapshot.*" register: snapshot_exists -# From 3.5.5 version, ZooKeeper is not able to start when no snapshot files present, what is valid scenario in 3.4.X version. Empty snapshot downloaded from Zookeeper's Jira ticket. +# From 3.5.5 version, ZooKeeper is not able to start when no snapshot files present, what is valid scenario in 3.4.X version. +# Empty snapshot downloaded from Zookeeper's Jira ticket. - name: Copy empty snapshot if not exists copy: dest: "{{ zookeeper_data_dir }}/version-2" diff --git a/ansible/playbooks/roles/zookeeper/tasks/upgrade/preflight-check.yml b/ansible/playbooks/roles/zookeeper/tasks/upgrade/preflight-check.yml index b5c9cb6cc6..b13fb74d86 100644 --- a/ansible/playbooks/roles/zookeeper/tasks/upgrade/preflight-check.yml +++ b/ansible/playbooks/roles/zookeeper/tasks/upgrade/preflight-check.yml @@ -1,14 +1,14 @@ --- - - name: Check if Zookeeper is installed in default location - stat: - path: /opt/zookeeper/bin/zkServer.sh - get_attributes: false - get_checksum: false - get_mime: false - register: zookeeper_exec_file +- name: Check if Zookeeper is installed in default location + stat: + path: /opt/zookeeper/bin/zkServer.sh + get_attributes: false + get_checksum: false + get_mime: false + register: zookeeper_exec_file - - name: Assert Zookeeper location - assert: - that: - - zookeeper_exec_file.stat.exists - fail_msg: Zookeeper not found in /opt/zookeeper (Epiphany default) - check your configuration +- name: Assert Zookeeper location + assert: + that: + - zookeeper_exec_file.stat.exists + fail_msg: Zookeeper not found in /opt/zookeeper (Epiphany default) - check your configuration diff --git a/ansible/playbooks/zookeeper.yml b/ansible/playbooks/zookeeper.yml index d681feaa53..a8c0d8366f 100644 --- a/ansible/playbooks/zookeeper.yml +++ b/ansible/playbooks/zookeeper.yml @@ -2,8 +2,8 @@ # Ansible playbook that makes sure the base items for all nodes are installed - hosts: all - gather_facts: yes - tasks: [ ] + gather_facts: true + tasks: [] - hosts: zookeeper become: true From b1a080247debe0efc379159f916ab98722d54ecb Mon Sep 17 00:00:00 2001 From: cicharka Date: Wed, 23 Mar 2022 13:40:46 +0100 Subject: [PATCH 17/21] Add verification for prometheus groups existence --- .../playbooks/roles/kafka/handlers/main.yml | 1 + .../playbooks/roles/kafka/tasks/metrics.yml | 90 ++++++++++--------- 2 files changed, 47 insertions(+), 44 deletions(-) diff --git a/ansible/playbooks/roles/kafka/handlers/main.yml b/ansible/playbooks/roles/kafka/handlers/main.yml index bf812de251..36fbfe6c82 100644 --- a/ansible/playbooks/roles/kafka/handlers/main.yml +++ b/ansible/playbooks/roles/kafka/handlers/main.yml @@ -19,3 +19,4 @@ loop_control: loop_var: node loop: "{{ groups.prometheus }}" + when: groups.prometheus is defined diff --git a/ansible/playbooks/roles/kafka/tasks/metrics.yml b/ansible/playbooks/roles/kafka/tasks/metrics.yml index 1476221db8..4c33a5ba76 100644 --- a/ansible/playbooks/roles/kafka/tasks/metrics.yml +++ b/ansible/playbooks/roles/kafka/tasks/metrics.yml @@ -15,50 +15,52 @@ group: "{{ specification.group }}" mode: u=rx,go=r -- name: delegated | create prometheus system group - group: - name: prometheus - system: true - state: present - delegate_to: "{{ node }}" - loop_control: - loop_var: node - loop: "{{ groups.prometheus }}" - -- name: delegated | create prometheus system user - user: - name: prometheus - system: true - shell: "/usr/sbin/nologin" - group: prometheus - createhome: false - delegate_to: "{{ node }}" - loop_control: - loop_var: node - loop: "{{ groups.prometheus }}" +- name: Configure metrics on prometheus machines + when: groups.prometheus is defined + block: + - name: delegated | create prometheus system group + group: + name: prometheus + system: true + state: present + delegate_to: "{{ node }}" + loop_control: + loop_var: node + loop: "{{ groups.prometheus }}" + - name: delegated | create prometheus system user + user: + name: prometheus + system: true + shell: "/usr/sbin/nologin" + group: prometheus + createhome: false + delegate_to: "{{ node }}" + loop_control: + loop_var: node + loop: "{{ groups.prometheus }}" -- name: delegated | create file_sd for service discovery configs - file: - dest: "{{ specification.prometheus_config_dir }}/file_sd" - state: directory - owner: root - group: prometheus - mode: u=rwx,g=rx,o= - delegate_to: "{{ node }}" - loop_control: - loop_var: node - loop: "{{ groups.prometheus }}" + - name: delegated | create file_sd for service discovery configs + file: + dest: "{{ specification.prometheus_config_dir }}/file_sd" + state: directory + owner: root + group: prometheus + mode: u=rwx,g=rx,o= + delegate_to: "{{ node }}" + loop_control: + loop_var: node + loop: "{{ groups.prometheus }}" -- name: delegated | copy file_sd_config to prometheus hosts - template: - dest: "{{ specification.prometheus_config_dir }}/file_sd/kafka-jmx-{{ inventory_hostname }}.yml" - src: file_sd_config.yml.j2 - owner: root - group: root - mode: u=rx,go=r - notify: Restart prometheus - delegate_to: "{{ node }}" - loop_control: - loop_var: node - loop: "{{ groups.prometheus }}" + - name: delegated | copy file_sd_config to prometheus hosts + template: + dest: "{{ specification.prometheus_config_dir }}/file_sd/kafka-jmx-{{ inventory_hostname }}.yml" + src: file_sd_config.yml.j2 + owner: root + group: root + mode: u=rx,go=r + notify: Restart prometheus + delegate_to: "{{ node }}" + loop_control: + loop_var: node + loop: "{{ groups.prometheus }}" From b67c405e97f699e2bc6f554e9f1e129100ebede4 Mon Sep 17 00:00:00 2001 From: cicharka Date: Wed, 23 Mar 2022 15:31:03 +0100 Subject: [PATCH 18/21] kafka: adjust kafka.version in kafka exporter schema --- schema/common/defaults/configuration/kafka-exporter.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schema/common/defaults/configuration/kafka-exporter.yml b/schema/common/defaults/configuration/kafka-exporter.yml index 334bc124e4..de44bac6c8 100644 --- a/schema/common/defaults/configuration/kafka-exporter.yml +++ b/schema/common/defaults/configuration/kafka-exporter.yml @@ -12,7 +12,7 @@ specification: - '--group.filter=.*' # Regex that determines which consumer groups to collect. #- '--tls.insecure-skip-tls-verify' # If true, the server's certificate will not be checked for validity. This will make your HTTPS connections insecure. #- '--log.enable-sarama' # Turn on Sarama logging - - '--kafka.version=2.6.0' + - '--kafka.version=2.8.1' #- '--sasl.enabled' # Connect using SASL/PLAIN. #- '--sasl.handshake' # Only set this to false if using a non-Kafka SASL proxy #- '--sasl.username=""' From 5c1a3eeefcf067e95ba0d5208539b83a4a0ea56e Mon Sep 17 00:00:00 2001 From: cicharka Date: Fri, 25 Mar 2022 13:41:03 +0100 Subject: [PATCH 19/21] ensure that kafka_exporter definition is up to date with currently used kafka version --- .../kafka_exporter/tasks/upgrade/main.yml | 39 +++++++++++++++++-- 1 file changed, 35 insertions(+), 4 deletions(-) diff --git a/ansible/playbooks/roles/kafka_exporter/tasks/upgrade/main.yml b/ansible/playbooks/roles/kafka_exporter/tasks/upgrade/main.yml index b309f8fa55..c2b9692829 100644 --- a/ansible/playbooks/roles/kafka_exporter/tasks/upgrade/main.yml +++ b/ansible/playbooks/roles/kafka_exporter/tasks/upgrade/main.yml @@ -30,10 +30,41 @@ set_fact: kafka_exporter_version_old: "{{ kafka_exporter_installed_version.stdout }}" - - name: Avoiding risk of downgrade - when: kafka_exporter_version_old is not version( kafka_exporter.version, '<' ) - debug: - msg: "Skipping upgrade: Kafka Exporter in the same or newer version already installed!" + # Between versions v1.3 and v2.0 there is no kafka_exporter version upgrade, but there is kafka upgrade + # For this reason kafka_exporter service definition needs to be updated - by simply enforing kafka_exporter upgrade + - name: Verify kafka version used in kafka_exporter service definition + block: + - name: Get installed Kafka version + shell: >- + set -o pipefail && + /opt/kafka/bin/kafka-server-start.sh --version | grep Commit | grep -oP '^\d+\.\d+\.\d+' + register: result + + - name: Set current_kafka_version fact + set_fact: + current_kafka_version: "{{ result.stdout }}" + + - name: Fetch kafka-exporter.service file from the remote + slurp: + src: /etc/systemd/system/kafka-exporter.service + register: kafka_exporter_service_definition + + - name: Parse kafka-exporter.service content + set_fact: + _exporter_service_definition_content: "{{ kafka_exporter_service_definition['content'] | b64decode | from_ini }}" + + - name: Get kafka version used in service definition + set_fact: + kafka_version_in_definition: "{{ _kafka_version_in_definition[0].split('=')[-1] }}" + vars: + _kafka_version_in_definition: + "{{ _exporter_service_definition_content['Service']['execstart'].split(' ') | select('search', 'kafka.version') }}" + + - name: Avoiding risk of downgrade + when: kafka_exporter_version_old is not version( kafka_exporter.version, '<' ) or + current_kafka_version is not version( kafka_version_in_definition, '!=') + debug: + msg: "Skipping upgrade: Kafka Exporter in the same or newer version already installed!" - name: Run upgrade tasks when: lock_file_status.stat.exists or kafka_exporter_version_old is version( kafka_exporter.version, '<' ) From 58174b6cf7d7425861d1d044f5f4a38249f98248 Mon Sep 17 00:00:00 2001 From: cicharka Date: Fri, 25 Mar 2022 13:59:51 +0100 Subject: [PATCH 20/21] remove kafka.version configuration flag from schema * We have specified version of kafka used in epiphany, therefore it should not be possible for user to adjust its version in kafka_exporter flags used in service definition --- .../roles/kafka_exporter/templates/kafka-exporter.service.j2 | 2 +- schema/common/defaults/configuration/kafka-exporter.yml | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/ansible/playbooks/roles/kafka_exporter/templates/kafka-exporter.service.j2 b/ansible/playbooks/roles/kafka_exporter/templates/kafka-exporter.service.j2 index 9ed7ebbe63..b961102ce8 100644 --- a/ansible/playbooks/roles/kafka_exporter/templates/kafka-exporter.service.j2 +++ b/ansible/playbooks/roles/kafka_exporter/templates/kafka-exporter.service.j2 @@ -7,7 +7,7 @@ After=kafka.service User=kafka_exporter Group=kafka_exporter ExecStartPre=/bin/bash -c '(while ! ss -H -t -l -n sport = :9092 | grep -q "^LISTEN.*:9092"; do echo "Waiting for Kafka Broker port to be listening..."; sleep 2; done)' -ExecStart=/opt/kafka_exporter/kafka_exporter {{ kafka_instances }} {% for flag in specification.config_flags %} {{ flag }} {% endfor %} +ExecStart=/opt/kafka_exporter/kafka_exporter {{ kafka_instances }} {% for flag in specification.config_flags %} {{ flag }} {% endfor %} --kafka.version=2.8.1 SyslogIdentifier=kafka_exporter Restart=always diff --git a/schema/common/defaults/configuration/kafka-exporter.yml b/schema/common/defaults/configuration/kafka-exporter.yml index de44bac6c8..720b9c9c85 100644 --- a/schema/common/defaults/configuration/kafka-exporter.yml +++ b/schema/common/defaults/configuration/kafka-exporter.yml @@ -12,7 +12,6 @@ specification: - '--group.filter=.*' # Regex that determines which consumer groups to collect. #- '--tls.insecure-skip-tls-verify' # If true, the server's certificate will not be checked for validity. This will make your HTTPS connections insecure. #- '--log.enable-sarama' # Turn on Sarama logging - - '--kafka.version=2.8.1' #- '--sasl.enabled' # Connect using SASL/PLAIN. #- '--sasl.handshake' # Only set this to false if using a non-Kafka SASL proxy #- '--sasl.username=""' From 7616a9aecb9de458887d556c298a639a80ccc1bb Mon Sep 17 00:00:00 2001 From: cicharka Date: Fri, 25 Mar 2022 15:48:34 +0100 Subject: [PATCH 21/21] * adjust service definition verification --- .../roles/kafka/files/jmx-kafka-config.yml | 2 +- .../kafka_exporter/tasks/upgrade/main.yml | 49 ++++++------------- .../upgrade/verify-service-definition.yml | 46 +++++++++++++++++ 3 files changed, 61 insertions(+), 36 deletions(-) create mode 100644 ansible/playbooks/roles/kafka_exporter/tasks/upgrade/verify-service-definition.yml diff --git a/ansible/playbooks/roles/kafka/files/jmx-kafka-config.yml b/ansible/playbooks/roles/kafka/files/jmx-kafka-config.yml index 6a394fd80a..21b32cb214 100644 --- a/ansible/playbooks/roles/kafka/files/jmx-kafka-config.yml +++ b/ansible/playbooks/roles/kafka/files/jmx-kafka-config.yml @@ -85,4 +85,4 @@ rules: name: kafka_$1_$2_$3 type: GAUGE labels: - quantile: "0.$4" \ No newline at end of file + quantile: "0.$4" diff --git a/ansible/playbooks/roles/kafka_exporter/tasks/upgrade/main.yml b/ansible/playbooks/roles/kafka_exporter/tasks/upgrade/main.yml index c2b9692829..d262b42f5c 100644 --- a/ansible/playbooks/roles/kafka_exporter/tasks/upgrade/main.yml +++ b/ansible/playbooks/roles/kafka_exporter/tasks/upgrade/main.yml @@ -30,41 +30,10 @@ set_fact: kafka_exporter_version_old: "{{ kafka_exporter_installed_version.stdout }}" - # Between versions v1.3 and v2.0 there is no kafka_exporter version upgrade, but there is kafka upgrade - # For this reason kafka_exporter service definition needs to be updated - by simply enforing kafka_exporter upgrade - - name: Verify kafka version used in kafka_exporter service definition - block: - - name: Get installed Kafka version - shell: >- - set -o pipefail && - /opt/kafka/bin/kafka-server-start.sh --version | grep Commit | grep -oP '^\d+\.\d+\.\d+' - register: result - - - name: Set current_kafka_version fact - set_fact: - current_kafka_version: "{{ result.stdout }}" - - - name: Fetch kafka-exporter.service file from the remote - slurp: - src: /etc/systemd/system/kafka-exporter.service - register: kafka_exporter_service_definition - - - name: Parse kafka-exporter.service content - set_fact: - _exporter_service_definition_content: "{{ kafka_exporter_service_definition['content'] | b64decode | from_ini }}" - - - name: Get kafka version used in service definition - set_fact: - kafka_version_in_definition: "{{ _kafka_version_in_definition[0].split('=')[-1] }}" - vars: - _kafka_version_in_definition: - "{{ _exporter_service_definition_content['Service']['execstart'].split(' ') | select('search', 'kafka.version') }}" - - - name: Avoiding risk of downgrade - when: kafka_exporter_version_old is not version( kafka_exporter.version, '<' ) or - current_kafka_version is not version( kafka_version_in_definition, '!=') - debug: - msg: "Skipping upgrade: Kafka Exporter in the same or newer version already installed!" + - name: Avoiding risk of downgrade + when: kafka_exporter_version_old is not version( kafka_exporter.version, '<' ) + debug: + msg: "Skipping upgrade: Kafka Exporter in the same or newer version already installed!" - name: Run upgrade tasks when: lock_file_status.stat.exists or kafka_exporter_version_old is version( kafka_exporter.version, '<' ) @@ -132,3 +101,13 @@ file: path: "{{ lock_file }}" state: absent + + - name: Set kafka_exporter_upgraded flag + set_fact: + kafka_exporter_upgraded: True + +# Between versions v1.3 and v2.0 there is no kafka-exporter version upgrade, but there is kafka upgrade +# For this reason kafka-exporter service definition needs to be updated +- name: Verify if kafka-exporter service definition needs to be updated + include_tasks: upgrade/verify-service-definition.yml + when: kafka_exporter_upgraded is not defined diff --git a/ansible/playbooks/roles/kafka_exporter/tasks/upgrade/verify-service-definition.yml b/ansible/playbooks/roles/kafka_exporter/tasks/upgrade/verify-service-definition.yml new file mode 100644 index 0000000000..27ee46e2c1 --- /dev/null +++ b/ansible/playbooks/roles/kafka_exporter/tasks/upgrade/verify-service-definition.yml @@ -0,0 +1,46 @@ +--- +- name: Get installed Kafka version + shell: >- + set -o pipefail && + /opt/kafka/bin/kafka-server-start.sh --version | grep Commit | grep -oP '^\d+\.\d+\.\d+' + register: result + +- name: Set current_kafka_version fact + set_fact: + current_kafka_version: "{{ result.stdout }}" + +- name: Fetch kafka-exporter.service file from the remote + slurp: + src: /etc/systemd/system/kafka-exporter.service + register: kafka_exporter_service_definition + +- name: Parse kafka-exporter.service content + set_fact: + _exporter_service_definition_content: "{{ kafka_exporter_service_definition['content'] | b64decode | from_ini }}" + +- name: Get kafka version used in service definition + set_fact: + kafka_version_in_definition: "{{ _kafka_version_in_definition[0].split('=')[-1] }}" + vars: + _kafka_version_in_definition: + "{{ _exporter_service_definition_content['Service']['execstart'].split(' ') | select('search', 'kafka.version') }}" + +- name: Update service definition + when: current_kafka_version is not version( kafka_version_in_definition, '==') + block: + - name: Update service + template: + src: kafka-exporter.service.j2 + dest: /etc/systemd/system/kafka-exporter.service + owner: root + group: root + mode: u=rw,go=r + + - name: Restart kafka-exporter service + service: + name: kafka-exporter + state: restarted + + - name: Reload systemd daemons + systemd: + daemon_reload: true