From 267be30e214c6e56a818a3e84dd2726064765213 Mon Sep 17 00:00:00 2001 From: Ashlee Jones Date: Fri, 27 Sep 2019 11:40:04 +0100 Subject: [PATCH 01/19] Update helk-kibana-analysis-basic.yml Mapped ES port to host system --- docker/helk-kibana-analysis-basic.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docker/helk-kibana-analysis-basic.yml b/docker/helk-kibana-analysis-basic.yml index 4e9fbba8..08cd651d 100644 --- a/docker/helk-kibana-analysis-basic.yml +++ b/docker/helk-kibana-analysis-basic.yml @@ -25,6 +25,8 @@ services: soft: 160000 hard: 160000 restart: always + ports: + - "9200:9200" networks: helk: helk-logstash: @@ -157,4 +159,4 @@ secrets: kibana.yml: file: ./helk-kibana/config/kibana.yml htpasswd.users: - file: ./helk-nginx/htpasswd.users \ No newline at end of file + file: ./helk-nginx/htpasswd.users From 2fca6733185e05d2dc7baa8c91dbb26ce5f8f3d1 Mon Sep 17 00:00:00 2001 From: Ashlee Jones Date: Fri, 27 Sep 2019 11:41:29 +0100 Subject: [PATCH 02/19] Update helk-kibana-analysis-alert-basic.yml --- docker/helk-kibana-analysis-alert-basic.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker/helk-kibana-analysis-alert-basic.yml b/docker/helk-kibana-analysis-alert-basic.yml index bfec88a3..5eb6a849 100644 --- a/docker/helk-kibana-analysis-alert-basic.yml +++ b/docker/helk-kibana-analysis-alert-basic.yml @@ -25,6 +25,8 @@ services: soft: 160000 hard: 160000 restart: always + ports: + - "9200:9200" networks: helk: helk-logstash: From f71965b70cdde5a7e2a2bfdd4957ae74c510b757 Mon Sep 17 00:00:00 2001 From: Ashlee Jones Date: Fri, 27 Sep 2019 11:42:10 +0100 Subject: [PATCH 03/19] Update helk-kibana-analysis-alert-trial.yml --- docker/helk-kibana-analysis-alert-trial.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker/helk-kibana-analysis-alert-trial.yml b/docker/helk-kibana-analysis-alert-trial.yml index 027ab206..60daf4b5 100644 --- a/docker/helk-kibana-analysis-alert-trial.yml +++ b/docker/helk-kibana-analysis-alert-trial.yml @@ -26,6 +26,8 @@ services: soft: 160000 hard: 160000 restart: always + ports: + - "9200:9200" networks: helk: helk-logstash: From a794662b5a6462ce3a24cf26acaa197f8ede3503 Mon Sep 17 00:00:00 2001 From: Ashlee Jones Date: Fri, 27 Sep 2019 11:42:44 +0100 Subject: [PATCH 04/19] Update helk-kibana-notebook-analysis-alert-basic.yml --- docker/helk-kibana-notebook-analysis-alert-basic.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker/helk-kibana-notebook-analysis-alert-basic.yml b/docker/helk-kibana-notebook-analysis-alert-basic.yml index 090cd305..7168100f 100644 --- a/docker/helk-kibana-notebook-analysis-alert-basic.yml +++ b/docker/helk-kibana-notebook-analysis-alert-basic.yml @@ -25,6 +25,8 @@ services: soft: 160000 hard: 160000 restart: always + ports: + - "9200:9200" networks: helk: helk-logstash: From 49553308b7bf7a106c823fa951843a1b986eff20 Mon Sep 17 00:00:00 2001 From: Ashlee Jones Date: Fri, 27 Sep 2019 11:43:18 +0100 Subject: [PATCH 05/19] Update helk-kibana-analysis-trial.yml --- docker/helk-kibana-analysis-trial.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docker/helk-kibana-analysis-trial.yml b/docker/helk-kibana-analysis-trial.yml index 3b50ee57..cdf72d38 100644 --- a/docker/helk-kibana-analysis-trial.yml +++ b/docker/helk-kibana-analysis-trial.yml @@ -26,6 +26,8 @@ services: soft: 160000 hard: 160000 restart: always + ports: + - "9200:9200" networks: helk: helk-logstash: @@ -154,4 +156,4 @@ secrets: elasticsearch.yml: file: ./helk-elasticsearch/config/elasticsearch.yml kibana.yml: - file: ./helk-kibana/config/kibana.yml \ No newline at end of file + file: ./helk-kibana/config/kibana.yml From a9efc6035b92fd55ef7ff7ae1940873b4bac62b5 Mon Sep 17 00:00:00 2001 From: Ashlee Jones Date: Fri, 27 Sep 2019 11:43:47 +0100 Subject: [PATCH 06/19] Update helk-kibana-notebook-analysis-basic.yml --- docker/helk-kibana-notebook-analysis-basic.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docker/helk-kibana-notebook-analysis-basic.yml b/docker/helk-kibana-notebook-analysis-basic.yml index b1032f74..b6be805a 100644 --- a/docker/helk-kibana-notebook-analysis-basic.yml +++ b/docker/helk-kibana-notebook-analysis-basic.yml @@ -25,6 +25,8 @@ services: soft: 160000 hard: 160000 restart: always + ports: + - "9200:9200" networks: helk: helk-logstash: @@ -198,4 +200,4 @@ secrets: kibana.yml: file: ./helk-kibana/config/kibana.yml htpasswd.users: - file: ./helk-nginx/htpasswd.users \ No newline at end of file + file: ./helk-nginx/htpasswd.users From 0d5dbb92a9c1990f7ce6e25e085071fd52c99a6a Mon Sep 17 00:00:00 2001 From: Ashlee Jones Date: Fri, 27 Sep 2019 11:44:09 +0100 Subject: [PATCH 07/19] Update helk-kibana-notebook-analysis-trial.yml --- docker/helk-kibana-notebook-analysis-trial.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker/helk-kibana-notebook-analysis-trial.yml b/docker/helk-kibana-notebook-analysis-trial.yml index f058e7b3..246ed623 100644 --- a/docker/helk-kibana-notebook-analysis-trial.yml +++ b/docker/helk-kibana-notebook-analysis-trial.yml @@ -26,6 +26,8 @@ services: soft: 160000 hard: 160000 restart: always + ports: + - "9200:9200" networks: helk: helk-logstash: From 4dea56defd698ef802d05caa4e1ae98f67adce63 Mon Sep 17 00:00:00 2001 From: Ashlee Jones Date: Fri, 27 Sep 2019 11:49:56 +0100 Subject: [PATCH 08/19] Update helk_powershell_susp_ps_commands.yml --- .../rules/helk_powershell_susp_ps_commands.yml | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/docker/helk-elastalert/rules/helk_powershell_susp_ps_commands.yml b/docker/helk-elastalert/rules/helk_powershell_susp_ps_commands.yml index 30d52d21..e33de006 100644 --- a/docker/helk-elastalert/rules/helk_powershell_susp_ps_commands.yml +++ b/docker/helk-elastalert/rules/helk_powershell_susp_ps_commands.yml @@ -1,5 +1,12 @@ alert: -- debug +- email +email: + - "support@infinitehosting.uk" +smtp_host: "smtp.outlook.com" #for O365 email addresses +smtp_port: 587 #for O365 email addresses +smtp_ssl: true +from_addr: "support@infinitehosting.uk" +smtp_auth_file: "smtp_auth_file.yaml" description: Detects potential suspicious powershell parameters filter: - query: @@ -10,4 +17,4 @@ name: Windows-Suspicious-Powershell-commands_0 priority: 2 realert: minutes: 0 -type: any \ No newline at end of file +type: any From 5c7cbd363aca4c879de35cee7f819e119164aca4 Mon Sep 17 00:00:00 2001 From: AlfieJ04 Date: Wed, 2 Oct 2019 08:15:47 +0100 Subject: [PATCH 09/19] Merged HELK-CUSTOM with HELK --- README.md | 17 +- docker/helk-curator/Dockerfile | 56 ++++ docker/helk-curator/actions.yaml | 123 ++++++++ docker/helk-curator/curator.yml | 22 ++ docker/helk-curator/helk-curator-cron | 2 + docker/helk-elastalert/Dockerfile | 2 + docker/helk-elastalert/config.yaml | 2 +- .../helk_powershell_susp_ps_commands.yml | 3 +- .../helk_security_dcsync_backdoor_user.yml | 10 +- .../rules/helk_security_dcsync_non_dc.yml | 10 +- .../helk_security_rubes_logon_process.yml | 10 +- .../rules/helk_security_share_chrome_pipe.yml | 8 + .../rules/helk_sysmon_bits.yml | 8 + .../helk_sysmon_cmdline_file_creation.yml | 8 + .../helk_sysmon_cobalt_strike_msagent.yml | 8 + .../rules/helk_sysmon_cs_rundll32_network.yml | 8 + .../rules/helk_sysmon_csharp_compile.yml | 8 + .../rules/helk_sysmon_dcom_lm.yml | 8 + .../rules/helk_sysmon_internal_monologue.yml | 8 + .../rules/helk_sysmon_kerb_no_lsass.yml | 8 + .../rules/helk_sysmon_logonscripts_lm.yml | 8 + .../rules/helk_sysmon_net_administrators.yml | 8 + .../rules/helk_sysmon_net_group_domains.yml | 8 + ...elk_sysmon_sc_query_securitydescriptor.yml | 8 + ...lk_sysmon_scriptable_protocol_handlers.yml | 8 + .../rules/helk_sysmon_services_rare_child.yml | 8 + .../rules/helk_sysmon_wevtutil.yml | 8 + .../rules/helk_sysmon_wsmprovhost_winrm.yml | 8 + .../rules/helk_system_psexec_psh.yml | 8 + docker/helk-elastalert/smtp_auth_file.yaml | 2 + docker/helk-kibana-analysis-alert-basic.yml | 11 + docker/helk-kibana-analysis-alert-trial.yml | 11 + docker/helk-kibana-analysis-basic.yml | 11 + docker/helk-kibana-analysis-trial.yml | 11 + ...k-kibana-notebook-analysis-alert-basic.yml | 11 + ...k-kibana-notebook-analysis-alert-trial.yml | 11 + .../helk-kibana-notebook-analysis-basic.yml | 11 + .../helk-kibana-notebook-analysis-trial.yml | 11 + docker/helk_install.sh | 5 +- wiki/Architecture-Overview.md | 36 +++ wiki/Check-Kafka-topic-ingestion.md | 29 ++ wiki/Check-Winlogbeat-shipping.md | 6 + wiki/Create-Plugins-Offline-Package.md | 44 +++ wiki/Curator.md | 0 wiki/Deploy-KSQL-CLI-Locally.md | 161 +++++++++++ wiki/Elasticsearch.md | 100 +++++++ wiki/Export-Docker-Images-locally.md | 80 ++++++ wiki/Home.md | 14 + wiki/Installation.md | 272 ++++++++++++++++++ wiki/Kafka.md | 73 +++++ wiki/Kibana.md | 55 ++++ wiki/Load-Local-Docker-Images.md | 79 +++++ wiki/Logstash.md | 2 + wiki/Spark.md | 107 +++++++ wiki/Update-Kafka-Broker-IP.md | 33 +++ wiki/_Sidebar.md | 22 ++ 56 files changed, 1594 insertions(+), 15 deletions(-) create mode 100644 docker/helk-curator/Dockerfile create mode 100644 docker/helk-curator/actions.yaml create mode 100644 docker/helk-curator/curator.yml create mode 100644 docker/helk-curator/helk-curator-cron create mode 100644 docker/helk-elastalert/smtp_auth_file.yaml create mode 100644 wiki/Architecture-Overview.md create mode 100644 wiki/Check-Kafka-topic-ingestion.md create mode 100644 wiki/Check-Winlogbeat-shipping.md create mode 100644 wiki/Create-Plugins-Offline-Package.md create mode 100644 wiki/Curator.md create mode 100644 wiki/Deploy-KSQL-CLI-Locally.md create mode 100644 wiki/Elasticsearch.md create mode 100644 wiki/Export-Docker-Images-locally.md create mode 100644 wiki/Home.md create mode 100644 wiki/Installation.md create mode 100644 wiki/Kafka.md create mode 100644 wiki/Kibana.md create mode 100644 wiki/Load-Local-Docker-Images.md create mode 100644 wiki/Logstash.md create mode 100644 wiki/Spark.md create mode 100644 wiki/Update-Kafka-Broker-IP.md create mode 100644 wiki/_Sidebar.md diff --git a/README.md b/README.md index 82aa7d50..1c8ffb94 100644 --- a/README.md +++ b/README.md @@ -40,14 +40,14 @@ The project is currently in an alpha stage, which means that the code and the fu ## WIKI -* [Introduction](https://github.com/Cyb3rWard0g/HELK/wiki) -* [Architecture Overview](https://github.com/Cyb3rWard0g/HELK/wiki/Architecture-Overview) - * [Kafka](https://github.com/Cyb3rWard0g/HELK/wiki/Kafka) - * [Logstash](https://github.com/Cyb3rWard0g/HELK/wiki/Logstash) - * [Elasticsearch](https://github.com/Cyb3rWard0g/HELK/wiki/Elasticsearch) - * [Kibana](https://github.com/Cyb3rWard0g/HELK/wiki/Kibana) - * [Spark](https://github.com/Cyb3rWard0g/HELK/wiki/Spark) -* [Installation](https://github.com/Cyb3rWard0g/HELK/wiki/Installation) +* [Introduction](https://github.com/AlfieJ04/HELK-CUSTOM/wiki) +* [Architecture Overview](https://github.com/AlfieJ04/HELK-CUSTOM/wiki/Architecture-Overview) + * [Kafka](https://github.com/AlfieJ04/HELK-CUSTOM/wiki/Kafka) + * [Logstash](https://github.com/AlfieJ04/HELK-CUSTOM/wiki/Logstash) + * [Elasticsearch](https://github.com/AlfieJ04/HELK-CUSTOM/wiki/Elasticsearch) + * [Kibana](https://github.com/AlfieJ04/HELK-CUSTOM/wiki/Kibana) + * [Spark](https://github.com/AlfieJ04/HELK-CUSTOM/wiki/Spark) +* [Installation](https://github.com/AlfieJ04/HELK-CUSTOM/wiki/Installation) ## (Docker) Accessing the HELK's Images @@ -95,6 +95,7 @@ root@ede2a2503030:/opt/helk/scripts# # Contributors +* Ashlee Jones [@AshleeJones04](https://twitter.com/AshleeJones04) * Jose Luis Rodriguez [@Cyb3rPandaH](https://twitter.com/Cyb3rPandaH) * Robby Winchester [@robwinchester3](https://twitter.com/robwinchester3) * Jared Atkinson [@jaredatkinson](https://twitter.com/jaredcatkinson) diff --git a/docker/helk-curator/Dockerfile b/docker/helk-curator/Dockerfile new file mode 100644 index 00000000..66583f12 --- /dev/null +++ b/docker/helk-curator/Dockerfile @@ -0,0 +1,56 @@ +# HELK script: HELK Curator Dockerfile +# HELK build Stage: Alpha +# Author: Ashlee Jones (@AshleeJones04) +# License: Apache 2.0 + +# References: +# https://github.com/elastic/curator + +FROM cyb3rward0g/helk-base:0.0.3 +LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g" +LABEL description="Dockerfile base for the HELK Curator." + +ENV CURATOR_GID=934 +ENV CURATOR_UID=934 +ENV CURATOR_USER=curatoruser +ENV CURATOR_HOME=/usr/share/curator +ENV DEBIAN_FRONTEND noninteractive + +# *********** Installing Prerequisites *************** +# -qq : No output except for errors +RUN apt-get update -qq && apt-get install -qqy --no-install-recommends \ + libmagic-dev \ + build-essential \ + python3-setuptools \ + git \ + python3-pip \ + python3-dev \ + tzdata \ + nano \ + cron \ + # ********* Clean **************************** + && apt-get -qy clean \ + autoremove \ + && rm -rf /var/lib/apt/lists/* \ + # ********* Install Curator ************** + && bash -c 'mkdir -pv /usr/share/curator' \ + && cd ${CURATOR_HOME} \ + && wget https://raw.githubusercontent.com/elastic/curator/master/requirements.txt \ + && pip3 install wheel \ + && pip3 install -r requirements.txt \ + && pip3 install elasticsearch-curator + +# ********* Copy Curator files and setup cron ************** +COPY actions.yaml ${CURATOR_HOME}/ +COPY curator.yml ${CURATOR_HOME}/ +COPY helk-curator-cron /etc/cron.d/helk-curator-cron +RUN chmod 0644 /etc/cron.d/helk-curator-cron +RUN crontab /etc/cron.d/helk-curator-cron +RUN touch /var/log/helk-curator-cron.log + + +# *********** RUN Curator *************** +WORKDIR ${CURATOR_HOME} +#ENTRYPOINT ["./curator-entrypoint.sh"] +#CMD ["curator","--config","/usr/share/curator/curator.yml","/usr/share/curator/actions.yaml"] +CMD cron && tail -f /var/log/helk-curator-cron.log \ No newline at end of file diff --git a/docker/helk-curator/actions.yaml b/docker/helk-curator/actions.yaml new file mode 100644 index 00000000..00f03a12 --- /dev/null +++ b/docker/helk-curator/actions.yaml @@ -0,0 +1,123 @@ +--- +# Remember, leave a key empty if there is no value. None will be a string, +# not a Python "NoneType" +# +# Also remember that all examples have 'disable_action' set to True. If you +# want to use this action as a template, be sure to set this to False after +# copying it. +actions: + 1: + action: delete_indices + description: >- + Delete indices older than 7 days (based on index name), for winlogbeat- + prefixed indices. Ignore the error if the filter does not result in an + actionable list of indices (ignore_empty_list) and exit cleanly. + options: + ignore_empty_list: True + timeout_override: + continue_if_exception: False + disable_action: False + filters: + - filtertype: pattern + kind: prefix + value: logs-endpoint-winevent- + exclude: + - filtertype: age + source: name + direction: older + timestring: '%Y.%m.%d' + unit: days + unit_count: 7 + exclude: + 2: + action: delete_indices + description: >- + Delete indices older than 7 days (based on index name), for filebeat- + prefixed indices. Ignore the error if the filter does not result in an + actionable list of indices (ignore_empty_list) and exit cleanly. + options: + ignore_empty_list: True + timeout_override: + continue_if_exception: False + disable_action: False + filters: + - filtertype: pattern + kind: prefix + value: winlogbeat- + exclude: + - filtertype: age + source: name + direction: older + timestring: '%Y.%m.%d' + unit: days + unit_count: 7 + exclude: + 3: + action: delete_indices + description: >- + Delete indices older than 7 days (based on index name), for packetbeat- + prefixed indices. Ignore the error if the filter does not result in an + actionable list of indices (ignore_empty_list) and exit cleanly. + options: + ignore_empty_list: True + timeout_override: + continue_if_exception: False + disable_action: False + filters: + - filtertype: pattern + kind: prefix + value: .monitoring-kibana- + exclude: + - filtertype: age + source: name + direction: older + timestring: '%Y.%m.%d' + unit: days + unit_count: 7 + exclude: + 4: + action: delete_indices + description: >- + Delete indices older than 7 days (based on index name), for metricbeat- + prefixed indices. Ignore the error if the filter does not result in an + actionable list of indices (ignore_empty_list) and exit cleanly. + options: + ignore_empty_list: True + timeout_override: + continue_if_exception: False + disable_action: False + filters: + - filtertype: pattern + kind: prefix + value: mitre-attack- + exclude: + - filtertype: age + source: name + direction: older + timestring: '%Y.%m.%d' + unit: days + unit_count: 7 + exclude: + 5: + action: delete_indices + description: >- + Delete indices older than 7 days (based on index name), for metricbeat- + prefixed indices. Ignore the error if the filter does not result in an + actionable list of indices (ignore_empty_list) and exit cleanly. + options: + ignore_empty_list: True + timeout_override: + continue_if_exception: False + disable_action: False + filters: + - filtertype: pattern + kind: prefix + value: .monitoring-logstash- + exclude: + - filtertype: age + source: name + direction: older + timestring: '%Y.%m.%d' + unit: days + unit_count: 7 + exclude: \ No newline at end of file diff --git a/docker/helk-curator/curator.yml b/docker/helk-curator/curator.yml new file mode 100644 index 00000000..9c047b8d --- /dev/null +++ b/docker/helk-curator/curator.yml @@ -0,0 +1,22 @@ + +# Remember, leave a key empty if there is no value. None will be a string, +# not a Python "NoneType" +client: + hosts: + - localhost + port: 9200 + url_prefix: + use_ssl: False + certificate: + client_cert: + client_key: + ssl_no_validate: False + http_auth: + timeout: 30 + master_only: False + +logging: + loglevel: INFO + logfile: + logformat: default + blacklist: ['elasticsearch', 'urllib3'] diff --git a/docker/helk-curator/helk-curator-cron b/docker/helk-curator/helk-curator-cron new file mode 100644 index 00000000..a33a003f --- /dev/null +++ b/docker/helk-curator/helk-curator-cron @@ -0,0 +1,2 @@ +0 0 * * * curator --config /usr/share/curator/curator.yml /usr/share/curator/actions.yaml >> /var/log/helk-curator-cron.log 2>&1 +# An empty line is required at the end of this file for a valid cron file. diff --git a/docker/helk-elastalert/Dockerfile b/docker/helk-elastalert/Dockerfile index f4d39b89..1b685488 100644 --- a/docker/helk-elastalert/Dockerfile +++ b/docker/helk-elastalert/Dockerfile @@ -28,6 +28,7 @@ RUN apt-get update -qq && apt-get install -qqy --no-install-recommends \ python3-dev \ python3-setuptools \ tzdata \ + nano \ # ********* Clean **************************** && apt-get -qy clean \ autoremove \ @@ -47,6 +48,7 @@ RUN apt-get update -qq && apt-get install -qqy --no-install-recommends \ # ********* Copy Elastalert files ************** COPY scripts/* ${ESALERT_HOME}/ COPY config.yaml ${ESALERT_HOME}/ +COPY smtp_auth_file.yaml ${ESALERT_HOME}/ COPY rules/* ${ESALERT_HOME}/rules/ COPY sigmac/sigmac-config.yml ${ESALERT_SIGMA_HOME}/sigmac-config.yml diff --git a/docker/helk-elastalert/config.yaml b/docker/helk-elastalert/config.yaml index 77c26acf..efcb4a7d 100644 --- a/docker/helk-elastalert/config.yaml +++ b/docker/helk-elastalert/config.yaml @@ -13,7 +13,7 @@ run_every: seconds: 30 buffer_time: seconds: 45 -es_host: helk-elasticsearch +es_host: HELKIP es_port: 9200 alert_time_limit: days: 1 diff --git a/docker/helk-elastalert/rules/helk_powershell_susp_ps_commands.yml b/docker/helk-elastalert/rules/helk_powershell_susp_ps_commands.yml index e33de006..ee705930 100644 --- a/docker/helk-elastalert/rules/helk_powershell_susp_ps_commands.yml +++ b/docker/helk-elastalert/rules/helk_powershell_susp_ps_commands.yml @@ -1,4 +1,5 @@ alert: +- debug - email email: - "support@infinitehosting.uk" @@ -6,7 +7,7 @@ smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true from_addr: "support@infinitehosting.uk" -smtp_auth_file: "smtp_auth_file.yaml" +smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential suspicious powershell parameters filter: - query: diff --git a/docker/helk-elastalert/rules/helk_security_dcsync_backdoor_user.yml b/docker/helk-elastalert/rules/helk_security_dcsync_backdoor_user.yml index beabda3c..bacce58a 100644 --- a/docker/helk-elastalert/rules/helk_security_dcsync_backdoor_user.yml +++ b/docker/helk-elastalert/rules/helk_security_dcsync_backdoor_user.yml @@ -1,5 +1,13 @@ alert: - debug +- email +email: + - "support@infinitehosting.uk" +smtp_host: "smtp.outlook.com" #for O365 email addresses +smtp_port: 587 #for O365 email addresses +smtp_ssl: true +from_addr: "support@infinitehosting.uk" +smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential DCSync backdoor user filter: - query: @@ -10,4 +18,4 @@ name: Windows-DCSync-backdoor-user_0 priority: 2 realert: minutes: 0 -type: any \ No newline at end of file +type: any diff --git a/docker/helk-elastalert/rules/helk_security_dcsync_non_dc.yml b/docker/helk-elastalert/rules/helk_security_dcsync_non_dc.yml index 85a8ad0b..99903a65 100644 --- a/docker/helk-elastalert/rules/helk_security_dcsync_non_dc.yml +++ b/docker/helk-elastalert/rules/helk_security_dcsync_non_dc.yml @@ -1,5 +1,13 @@ alert: - debug +- email +email: + - "support@infinitehosting.uk" +smtp_host: "smtp.outlook.com" #for O365 email addresses +smtp_port: 587 #for O365 email addresses +smtp_ssl: true +from_addr: "support@infinitehosting.uk" +smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential DCSync from non-dcs filter: - query: @@ -10,4 +18,4 @@ name: Windows-dcsync-non-dc_0 priority: 2 realert: minutes: 0 -type: any \ No newline at end of file +type: any diff --git a/docker/helk-elastalert/rules/helk_security_rubes_logon_process.yml b/docker/helk-elastalert/rules/helk_security_rubes_logon_process.yml index cdf9b538..233a94b3 100644 --- a/docker/helk-elastalert/rules/helk_security_rubes_logon_process.yml +++ b/docker/helk-elastalert/rules/helk_security_rubes_logon_process.yml @@ -1,5 +1,13 @@ alert: - debug +- email +email: + - "support@infinitehosting.uk" +smtp_host: "smtp.outlook.com" #for O365 email addresses +smtp_port: 587 #for O365 email addresses +smtp_ssl: true +from_addr: "support@infinitehosting.uk" +smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential use of rubeus via the trusted logon process filter: - query: @@ -10,4 +18,4 @@ name: Windows-Rubeus-logon-process_0 priority: 2 realert: minutes: 0 -type: any \ No newline at end of file +type: any diff --git a/docker/helk-elastalert/rules/helk_security_share_chrome_pipe.yml b/docker/helk-elastalert/rules/helk_security_share_chrome_pipe.yml index 5264be09..f9f24ce1 100644 --- a/docker/helk-elastalert/rules/helk_security_share_chrome_pipe.yml +++ b/docker/helk-elastalert/rules/helk_security_share_chrome_pipe.yml @@ -1,5 +1,13 @@ alert: - debug +- email +email: + - "support@infinitehosting.uk" +smtp_host: "smtp.outlook.com" #for O365 email addresses +smtp_port: 587 #for O365 email addresses +smtp_ssl: true +from_addr: "support@infinitehosting.uk" +smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects suspicious use the common chrome named pipe filter: - query: diff --git a/docker/helk-elastalert/rules/helk_sysmon_bits.yml b/docker/helk-elastalert/rules/helk_sysmon_bits.yml index a00b6bba..5c40e568 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_bits.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_bits.yml @@ -1,5 +1,13 @@ alert: - debug +- email +email: + - "support@infinitehosting.uk" +smtp_host: "smtp.outlook.com" #for O365 email addresses +smtp_port: 587 #for O365 email addresses +smtp_ssl: true +from_addr: "support@infinitehosting.uk" +smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects suspicious bits activity filter: - query: diff --git a/docker/helk-elastalert/rules/helk_sysmon_cmdline_file_creation.yml b/docker/helk-elastalert/rules/helk_sysmon_cmdline_file_creation.yml index 520c3110..6784bdbd 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_cmdline_file_creation.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_cmdline_file_creation.yml @@ -1,5 +1,13 @@ alert: - debug +- email +email: + - "support@infinitehosting.uk" +smtp_host: "smtp.outlook.com" #for O365 email addresses +smtp_port: 587 #for O365 email addresses +smtp_ssl: true +from_addr: "support@infinitehosting.uk" +smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects cmdline files being created on victim filter: - query: diff --git a/docker/helk-elastalert/rules/helk_sysmon_cobalt_strike_msagent.yml b/docker/helk-elastalert/rules/helk_sysmon_cobalt_strike_msagent.yml index c832f9ad..c317b52a 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_cobalt_strike_msagent.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_cobalt_strike_msagent.yml @@ -1,5 +1,13 @@ alert: - debug +- email +email: + - "support@infinitehosting.uk" +smtp_host: "smtp.outlook.com" #for O365 email addresses +smtp_port: 587 #for O365 email addresses +smtp_ssl: true +from_addr: "support@infinitehosting.uk" +smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential use of named pipes by Cobalt Strike agents filter: - query: diff --git a/docker/helk-elastalert/rules/helk_sysmon_cs_rundll32_network.yml b/docker/helk-elastalert/rules/helk_sysmon_cs_rundll32_network.yml index 4b569c91..c46507c0 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_cs_rundll32_network.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_cs_rundll32_network.yml @@ -1,5 +1,13 @@ alert: - debug +- email +email: + - "support@infinitehosting.uk" +smtp_host: "smtp.outlook.com" #for O365 email addresses +smtp_port: 587 #for O365 email addresses +smtp_ssl: true +from_addr: "support@infinitehosting.uk" +smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential use of CS with rundll32 calling out filter: - query: diff --git a/docker/helk-elastalert/rules/helk_sysmon_csharp_compile.yml b/docker/helk-elastalert/rules/helk_sysmon_csharp_compile.yml index 16849603..bea19984 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_csharp_compile.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_csharp_compile.yml @@ -1,5 +1,13 @@ alert: - debug +- email +email: + - "support@infinitehosting.uk" +smtp_host: "smtp.outlook.com" #for O365 email addresses +smtp_port: 587 #for O365 email addresses +smtp_ssl: true +from_addr: "support@infinitehosting.uk" +smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential Csharp compiling filter: - query: diff --git a/docker/helk-elastalert/rules/helk_sysmon_dcom_lm.yml b/docker/helk-elastalert/rules/helk_sysmon_dcom_lm.yml index 240a9ff3..70f222cc 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_dcom_lm.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_dcom_lm.yml @@ -1,5 +1,13 @@ alert: - debug +- email +email: + - "support@infinitehosting.uk" +smtp_host: "smtp.outlook.com" #for O365 email addresses +smtp_port: 587 #for O365 email addresses +smtp_ssl: true +from_addr: "support@infinitehosting.uk" +smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential lateral movement via dcom filter: - query: diff --git a/docker/helk-elastalert/rules/helk_sysmon_internal_monologue.yml b/docker/helk-elastalert/rules/helk_sysmon_internal_monologue.yml index bf20d2ea..f3a70383 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_internal_monologue.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_internal_monologue.yml @@ -1,5 +1,13 @@ alert: - debug +- email +email: + - "support@infinitehosting.uk" +smtp_host: "smtp.outlook.com" #for O365 email addresses +smtp_port: 587 #for O365 email addresses +smtp_ssl: true +from_addr: "support@infinitehosting.uk" +smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential internal monologue filter: - query: diff --git a/docker/helk-elastalert/rules/helk_sysmon_kerb_no_lsass.yml b/docker/helk-elastalert/rules/helk_sysmon_kerb_no_lsass.yml index 887d6c6f..c9989204 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_kerb_no_lsass.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_kerb_no_lsass.yml @@ -1,5 +1,13 @@ alert: - debug +- email +email: + - "support@infinitehosting.uk" +smtp_host: "smtp.outlook.com" #for O365 email addresses +smtp_port: 587 #for O365 email addresses +smtp_ssl: true +from_addr: "support@infinitehosting.uk" +smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential calls to DCs via Kerberos port from non lsass filter: - query: diff --git a/docker/helk-elastalert/rules/helk_sysmon_logonscripts_lm.yml b/docker/helk-elastalert/rules/helk_sysmon_logonscripts_lm.yml index a4dfd09d..8f1f5aa8 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_logonscripts_lm.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_logonscripts_lm.yml @@ -1,5 +1,13 @@ alert: - debug +- email +email: + - "support@infinitehosting.uk" +smtp_host: "smtp.outlook.com" #for O365 email addresses +smtp_port: 587 #for O365 email addresses +smtp_ssl: true +from_addr: "support@infinitehosting.uk" +smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential lateral movement via logon scripts filter: - query: diff --git a/docker/helk-elastalert/rules/helk_sysmon_net_administrators.yml b/docker/helk-elastalert/rules/helk_sysmon_net_administrators.yml index 5f9f23ee..c7f70c96 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_net_administrators.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_net_administrators.yml @@ -1,5 +1,13 @@ alert: - debug +- email +email: + - "support@infinitehosting.uk" +smtp_host: "smtp.outlook.com" #for O365 email addresses +smtp_port: 587 #for O365 email addresses +smtp_ssl: true +from_addr: "support@infinitehosting.uk" +smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential recon for admininstrators via net samrp filter: - query: diff --git a/docker/helk-elastalert/rules/helk_sysmon_net_group_domains.yml b/docker/helk-elastalert/rules/helk_sysmon_net_group_domains.yml index 640d6d51..3e882333 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_net_group_domains.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_net_group_domains.yml @@ -1,5 +1,13 @@ alert: - debug +- email +email: + - "support@infinitehosting.uk" +smtp_host: "smtp.outlook.com" #for O365 email addresses +smtp_port: 587 #for O365 email addresses +smtp_ssl: true +from_addr: "support@infinitehosting.uk" +smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential recon for domain admins via net samrp filter: - query: diff --git a/docker/helk-elastalert/rules/helk_sysmon_sc_query_securitydescriptor.yml b/docker/helk-elastalert/rules/helk_sysmon_sc_query_securitydescriptor.yml index 618b051a..82df4c53 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_sc_query_securitydescriptor.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_sc_query_securitydescriptor.yml @@ -1,5 +1,13 @@ alert: - debug +- email +email: + - "support@infinitehosting.uk" +smtp_host: "smtp.outlook.com" #for O365 email addresses +smtp_port: 587 #for O365 email addresses +smtp_ssl: true +from_addr: "support@infinitehosting.uk" +smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects adversaries accessing services security descriptors filter: - query: diff --git a/docker/helk-elastalert/rules/helk_sysmon_scriptable_protocol_handlers.yml b/docker/helk-elastalert/rules/helk_sysmon_scriptable_protocol_handlers.yml index 98617065..30642636 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_scriptable_protocol_handlers.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_scriptable_protocol_handlers.yml @@ -1,5 +1,13 @@ alert: - debug +- email +email: + - "support@infinitehosting.uk" +smtp_host: "smtp.outlook.com" #for O365 email addresses +smtp_port: 587 #for O365 email addresses +smtp_ssl: true +from_addr: "support@infinitehosting.uk" +smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential scriptable handlers filter: - query: diff --git a/docker/helk-elastalert/rules/helk_sysmon_services_rare_child.yml b/docker/helk-elastalert/rules/helk_sysmon_services_rare_child.yml index f7f3a8db..754e9c2a 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_services_rare_child.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_services_rare_child.yml @@ -1,5 +1,13 @@ alert: - debug +- email +email: + - "support@infinitehosting.uk" +smtp_host: "smtp.outlook.com" #for O365 email addresses +smtp_port: 587 #for O365 email addresses +smtp_ssl: true +from_addr: "support@infinitehosting.uk" +smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects rare childs from services.exe (gold image based) filter: - query: diff --git a/docker/helk-elastalert/rules/helk_sysmon_wevtutil.yml b/docker/helk-elastalert/rules/helk_sysmon_wevtutil.yml index 98b86264..88f955ef 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_wevtutil.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_wevtutil.yml @@ -1,5 +1,13 @@ alert: - debug +- email +email: + - "support@infinitehosting.uk" +smtp_host: "smtp.outlook.com" #for O365 email addresses +smtp_port: 587 #for O365 email addresses +smtp_ssl: true +from_addr: "support@infinitehosting.uk" +smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects adversaries clearing logs via wevtutil filter: - query: diff --git a/docker/helk-elastalert/rules/helk_sysmon_wsmprovhost_winrm.yml b/docker/helk-elastalert/rules/helk_sysmon_wsmprovhost_winrm.yml index 38276880..6f9b2705 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_wsmprovhost_winrm.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_wsmprovhost_winrm.yml @@ -1,5 +1,13 @@ alert: - debug +- email +email: + - "support@infinitehosting.uk" +smtp_host: "smtp.outlook.com" #for O365 email addresses +smtp_port: 587 #for O365 email addresses +smtp_ssl: true +from_addr: "support@infinitehosting.uk" +smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects use of winrm over the network filter: - query: diff --git a/docker/helk-elastalert/rules/helk_system_psexec_psh.yml b/docker/helk-elastalert/rules/helk_system_psexec_psh.yml index b2c589a8..57f194b2 100644 --- a/docker/helk-elastalert/rules/helk_system_psexec_psh.yml +++ b/docker/helk-elastalert/rules/helk_system_psexec_psh.yml @@ -1,5 +1,13 @@ alert: - debug +- email +email: + - "support@infinitehosting.uk" +smtp_host: "smtp.outlook.com" #for O365 email addresses +smtp_port: 587 #for O365 email addresses +smtp_ssl: true +from_addr: "support@infinitehosting.uk" +smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential psexec via powershell module filter: - query: diff --git a/docker/helk-elastalert/smtp_auth_file.yaml b/docker/helk-elastalert/smtp_auth_file.yaml new file mode 100644 index 00000000..2ff9bde7 --- /dev/null +++ b/docker/helk-elastalert/smtp_auth_file.yaml @@ -0,0 +1,2 @@ +user: "" +password: "" \ No newline at end of file diff --git a/docker/helk-kibana-analysis-alert-basic.yml b/docker/helk-kibana-analysis-alert-basic.yml index 5eb6a849..b4f3efda 100644 --- a/docker/helk-kibana-analysis-alert-basic.yml +++ b/docker/helk-kibana-analysis-alert-basic.yml @@ -153,6 +153,17 @@ services: ES_PORT: 9200 networks: helk: + helk-curator: + image: alfiej04/helk-curator:0.0.2 + container_name: helk-curator + restart: always + depends_on: + - helk-elasticsearch + environment: + ES_HOST: helk-elasticsearch + ES_PORT: 9200 + networks: + helk: networks: helk: diff --git a/docker/helk-kibana-analysis-alert-trial.yml b/docker/helk-kibana-analysis-alert-trial.yml index 60daf4b5..2e66244b 100644 --- a/docker/helk-kibana-analysis-alert-trial.yml +++ b/docker/helk-kibana-analysis-alert-trial.yml @@ -155,6 +155,17 @@ services: ELASTIC_PASSWORD: ${ELASTIC_PASSWORD} networks: helk: + helk-curator: + image: alfiej04/helk-curator:0.0.2 + container_name: helk-curator + restart: always + depends_on: + - helk-elasticsearch + environment: + ES_HOST: helk-elasticsearch + ES_PORT: 9200 + networks: + helk: networks: helk: diff --git a/docker/helk-kibana-analysis-basic.yml b/docker/helk-kibana-analysis-basic.yml index 08cd651d..92a16a98 100644 --- a/docker/helk-kibana-analysis-basic.yml +++ b/docker/helk-kibana-analysis-basic.yml @@ -142,6 +142,17 @@ services: tty: true networks: helk: + helk-curator: + image: alfiej04/helk-curator:0.0.2 + container_name: helk-curator + restart: always + depends_on: + - helk-elasticsearch + environment: + ES_HOST: helk-elasticsearch + ES_PORT: 9200 + networks: + helk: networks: helk: diff --git a/docker/helk-kibana-analysis-trial.yml b/docker/helk-kibana-analysis-trial.yml index cdf72d38..5364106e 100644 --- a/docker/helk-kibana-analysis-trial.yml +++ b/docker/helk-kibana-analysis-trial.yml @@ -143,6 +143,17 @@ services: tty: true networks: helk: + helk-curator: + image: alfiej04/helk-curator:0.0.2 + container_name: helk-curator + restart: always + depends_on: + - helk-elasticsearch + environment: + ES_HOST: helk-elasticsearch + ES_PORT: 9200 + networks: + helk: networks: helk: diff --git a/docker/helk-kibana-notebook-analysis-alert-basic.yml b/docker/helk-kibana-notebook-analysis-alert-basic.yml index 7168100f..c359c9f0 100644 --- a/docker/helk-kibana-notebook-analysis-alert-basic.yml +++ b/docker/helk-kibana-notebook-analysis-alert-basic.yml @@ -192,6 +192,17 @@ services: ES_PORT: 9200 networks: helk: + helk-curator: + image: alfiej04/helk-curator:0.0.2 + container_name: helk-curator + restart: always + depends_on: + - helk-elasticsearch + environment: + ES_HOST: helk-elasticsearch + ES_PORT: 9200 + networks: + helk: networks: helk: diff --git a/docker/helk-kibana-notebook-analysis-alert-trial.yml b/docker/helk-kibana-notebook-analysis-alert-trial.yml index 9baedfa9..b4882881 100644 --- a/docker/helk-kibana-notebook-analysis-alert-trial.yml +++ b/docker/helk-kibana-notebook-analysis-alert-trial.yml @@ -193,6 +193,17 @@ services: ELASTIC_PASSWORD: ${ELASTIC_PASSWORD} networks: helk: + helk-curator: + image: alfiej04/helk-curator:0.0.2 + container_name: helk-curator + restart: always + depends_on: + - helk-elasticsearch + environment: + ES_HOST: helk-elasticsearch + ES_PORT: 9200 + networks: + helk: networks: helk: diff --git a/docker/helk-kibana-notebook-analysis-basic.yml b/docker/helk-kibana-notebook-analysis-basic.yml index b6be805a..627ffc6b 100644 --- a/docker/helk-kibana-notebook-analysis-basic.yml +++ b/docker/helk-kibana-notebook-analysis-basic.yml @@ -181,6 +181,17 @@ services: - helk-spark-master networks: helk: + helk-curator: + image: alfiej04/helk-curator:0.0.2 + container_name: helk-curator + restart: always + depends_on: + - helk-elasticsearch + environment: + ES_HOST: helk-elasticsearch + ES_PORT: 9200 + networks: + helk: networks: helk: diff --git a/docker/helk-kibana-notebook-analysis-trial.yml b/docker/helk-kibana-notebook-analysis-trial.yml index 246ed623..68514486 100644 --- a/docker/helk-kibana-notebook-analysis-trial.yml +++ b/docker/helk-kibana-notebook-analysis-trial.yml @@ -183,6 +183,17 @@ services: - helk-spark-master networks: helk: + helk-curator: + image: alfiej04/helk-curator:0.0.2 + container_name: helk-curator + restart: always + depends_on: + - helk-elasticsearch + environment: + ES_HOST: helk-elasticsearch + ES_PORT: 9200 + networks: + helk: networks: helk: diff --git a/docker/helk_install.sh b/docker/helk_install.sh index 726e6559..c8f130b5 100755 --- a/docker/helk_install.sh +++ b/docker/helk_install.sh @@ -3,7 +3,7 @@ # HELK script: helk_install.sh # HELK script description: HELK installation # HELK build Stage: Alpha -# Author: Roberto Rodriguez (@Cyb3rWard0g) +# Author: Ashlee Jones (@AshleeJones04) # License: GPL-3.0 # *********** Helk log tagging variables *************** @@ -12,7 +12,7 @@ HELK_INFO_TAG="[HELK-INSTALLATION-INFO]" HELK_ERROR_TAG="[HELK-INSTALLATION-ERROR]" # Make sure to use "echo -e" with this variable INSTALL_ERROR_CHECK_WIKI="$HELK_ERROR_TAG Check the requirements section in our installation Wiki\ -\n$HELK_ERROR_TAG Installation Wiki: https://github.com/Cyb3rWard0g/HELK/wiki/Installation" +\n$HELK_ERROR_TAG Installation Wiki: https://github.com/AlfieJ04/HELK-CUSTOM/wiki/Installation" # *********** Variables for user modification *************** # Careful editing unless you know what you are doing :) @@ -499,6 +499,7 @@ show_final_information(){ echo "HELK KIBANA URL: https://${HOST_IP}" echo "HELK KIBANA USER: helk" echo "HELK KIBANA PASSWORD: ${KIBANA_UI_PASSWORD_INPUT}" + echo "HELK Elasticsearch URL: http://${HOST_IP}:9200" echo "HELK SPARK MASTER UI: http://${HOST_IP}:8080" echo "HELK JUPYTER SERVER URL: http://${HOST_IP}/jupyter" get_jupyter_credentials diff --git a/wiki/Architecture-Overview.md b/wiki/Architecture-Overview.md new file mode 100644 index 00000000..bd7c3ddf --- /dev/null +++ b/wiki/Architecture-Overview.md @@ -0,0 +1,36 @@ +# Design +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/HELK_Design.png]] + +The HELK follows the native flow of an ELK stack with events being sent (preferably from Winlogbeat for now) to Kafka brokers. Next, they get filtered by Logstash and sent over to an Elasticsearch database. Then, they can be visualized in a Kibana instance. However, what sets the HELK apart from other ELK builds is the extra analytic capabilities provided by Apache Spark, GraphFrames and Jupyter. More soon.... + +# Core Components Definitions +## Kafka +"Kafka is a distributed publish-subscribe messaging system used for building real-time data pipelines and streaming apps. It is horizontally scalable, fault-tolerant, wicked fast, and runs in production in thousands of companies." [Kafka](https://kafka.apache.org/) + +## Elasticsearch +"Elasticsearch is a highly scalable open-source full-text search and analytics engine. It allows you to store, search, and analyze big volumes of data quickly and in near real time. It is generally used as the underlying engine/technology that powers applications that have complex search features and requirements." [Elastic Reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/getting-started.html) + +## Logstash +"Logstash is an open source data collection engine with real-time pipelining capabilities. Logstash can dynamically unify data from disparate sources and normalize the data into destinations of your choice. Cleanse and democratize all your data for diverse advanced downstream analytics and visualization use cases. + [Elastic Reference](https://www.elastic.co/guide/en/logstash/current/introduction.html) + +## Kibana +"Kibana is an open source analytics and visualization platform designed to work with Elasticsearch. You use Kibana to search, view, and interact with data stored in Elasticsearch indices. You can easily perform advanced data analysis and visualize your data in a variety of charts, tables, and maps. +Kibana makes it easy to understand large volumes of data. Its simple, browser-based interface enables you to quickly create and share dynamic dashboards that display changes to Elasticsearch queries in real time." [Elastic Reference](https://www.elastic.co/guide/en/kibana/current/introduction.html) + +## ES-Hadoop +"Elasticsearch for Apache Hadoop is an open-source, stand-alone, self-contained, small library that allows Hadoop jobs (whether using Map/Reduce or libraries built upon it such as Hive, Pig or Cascading or new upcoming libraries like Apache Spark ) to interact with Elasticsearch. One can think of it as a connector that allows data to flow bi-directionaly so that applications can leverage transparently the Elasticsearch engine capabilities to significantly enrich their capabilities and increase the performance." [Elastic Reference](https://www.elastic.co/guide/en/elasticsearch/hadoop/current/reference.html) + +## Apache Spark +"Apache Spark is a fast and general-purpose cluster computing system. It provides high-level APIs in Java, Scala, Python and R, and an optimized engine that supports general execution graphs. It also supports a rich set of higher-level tools including Spark SQL for SQL and structured data processing, MLlib for machine learning, GraphX for graph processing, and Spark Streaming." [Apache Spark Reference](https://spark.apache.org/docs/latest/) + +## GraphFrames +"GraphFrames is a package for Apache Spark which provides DataFrame-based Graphs. It provides high-level APIs in Scala, Java, and Python. It aims to provide both the functionality of GraphX and extended functionality taking advantage of Spark DataFrames. This extended functionality includes motif finding, DataFrame-based serialization, and highly expressive graph queries." [Graphframes Reference](https://graphframes.github.io/) + +## Jupyter Notebook +"The Jupyter Notebook is an open-source web application that allows you to create and share documents that contain live code, equations, visualizations and narrative text. Uses include: data cleaning and transformation, numerical simulation, statistical modeling, data visualization, machine learning, and much more."[Jupyter Reference](http://jupyter.org/) + +# Enrichments +Another component that sets the HELK apart form other ELK builds is the different enrichments applied to the data it collects. +## AlienVault OTX +"The AlienVault Open Threat Exchange (OTX) is the world’s most authoritative open threat information sharing and analysis network. OTX provides access to a global community of threat researchers and security professionals, with more than 50,000 participants in 140 countries, who contribute over four million threat indicators daily."[AlienVault OTX Reference](https://www.alienvault.com/documentation/otx/about-otx.htm) \ No newline at end of file diff --git a/wiki/Check-Kafka-topic-ingestion.md b/wiki/Check-Kafka-topic-ingestion.md new file mode 100644 index 00000000..1d0589ff --- /dev/null +++ b/wiki/Check-Kafka-topic-ingestion.md @@ -0,0 +1,29 @@ +There are a few ways that you can accomplish this. + +# HELK's Kafka broker container + +Access your kafka broker container by running the following command: +``` +sudo docker exec -ti helk-kafka-broker bash +``` + +Run the `kafka-console-consumer.sh` script available in the container: +``` +/opt/helk/kafka/bin/kafka-console-consumer.sh --bootstrap-server helk-kafka-broker:9092 --topic winlogbeat --from-beginning +``` + +or simply run the script without an interactive shell +``` +sudo docker exec -ti helk-kafka-broker /opt/helk/kafka/bin/kafka-console-consumer.sh --bootstrap-server helk-kafka-broker:9092 --topic winlogbeat --from-beginning +``` + +# Kafkacat +It is generic non-JVM producer and consumer for Apache Kafka >=0.8, think of it as a netcat for Kafka. You can install it by following the [instructions](https://github.com/edenhill/kafkacat#install) from the Kafkacat repo. +``` +kafkacat -b 10.0.10.100:9092 -t winlogbeat -C +``` + +# References +* [Kafka Consumer Example](https://kafka.apache.org/quickstart#quickstart_consume) +* [Kafkacat](https://github.com/edenhill/kafkacat) + diff --git a/wiki/Check-Winlogbeat-shipping.md b/wiki/Check-Winlogbeat-shipping.md new file mode 100644 index 00000000..2a443771 --- /dev/null +++ b/wiki/Check-Winlogbeat-shipping.md @@ -0,0 +1,6 @@ +You can check how your logs are being sent to the HELK by running the following command in your systems (producers): +``` +winlogbeat.exe -e +``` +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KAFKA-producer1.png]] +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KAFKA-producer2.png]] \ No newline at end of file diff --git a/wiki/Create-Plugins-Offline-Package.md b/wiki/Create-Plugins-Offline-Package.md new file mode 100644 index 00000000..85f86728 --- /dev/null +++ b/wiki/Create-Plugins-Offline-Package.md @@ -0,0 +1,44 @@ +If you are installing HELK, and the `helk-logstash` extra plugins are still being installed over the Internet, you can use the following steps to export them in an zipped offline package to then be loaded to the system that does not have access to the Internet and it is stuck at installing plugins. + +Remember that you will need to do this in a system where HELK is already installed and the plugins were installed successfully. + +* Access your helk-logstash docker container in the system where HELK was successfully installed already: + +``` +helk@ONLINE-HELK:~$ sudo docker exec -ti helk-logstash bash + +bash-4.2$ + +``` + +* Using the `logstash-plugin` script prepare and export the plugins offline package + +``` +bash-4.2$ bin/logstash-plugin prepare-offline-pack logstash-filter-translate logstash-filter-dns logstash-filter-cidr logstash-filter-geoip logstash-filter-dissect logstash-output-kafka logstash-input-kafka logstash-filter-alter logstash-filter-fingerprint logstash-filter-prune logstash-codec-gzip_lines logstash-codec-netflow logstash-filter-i18n logstash-filter-environment logstash-filter-de_dot logstash-input-wmi logstash-filter-clone +Offline package created at: /usr/share/logstash/logstash-offline-plugins-6.6.1.zip + +You can install it with this command `bin/logstash-plugin install file:///usr/share/logstash/logstash-offline-plugins-6.6.1.zip` + +bash-4.2$ ls /usr/share/logstash/ +bin CONTRIBUTORS data Gemfile.lock LICENSE.txt logstash-core-plugin-api modules output_templates scripts vendor +config cti Gemfile lib logstash-core logstash-offline-plugins-6.6.1.zip NOTICE.TXT pipeline tools x-pack + +bash-4.2$ exit +exit +``` + +* Copy the offline package from your helk-logstash container to your local system + +``` +helk@ONLINE-HELK:~$ sudo docker cp helk-logstash:/usr/share/logstash/logstash-offline-plugins-6.6.1.zip . +helk@ONLINE-HELK:~$ ls +logstash-offline-plugins-6.6.1.zip +``` + +* Copy the `logstash-offline-plugins-6.6.1.zip` to the OFFLINE-ISOLATED (10.0.10.102) system. You should be able to ssh to it. + +``` +helk@ONLINE-HELK:~$ scp logstash-offline-plugins-6.6.1.zip helk@10.0.10.102:/home/helk/ +``` + +Now you should be able to use it in the offline-isolated HELK system \ No newline at end of file diff --git a/wiki/Curator.md b/wiki/Curator.md new file mode 100644 index 00000000..e69de29b diff --git a/wiki/Deploy-KSQL-CLI-Locally.md b/wiki/Deploy-KSQL-CLI-Locally.md new file mode 100644 index 00000000..35928735 --- /dev/null +++ b/wiki/Deploy-KSQL-CLI-Locally.md @@ -0,0 +1,161 @@ +You can use KSQL CLI to connect to the HELK's KSQL Server from a different system. You will have to download the self-managed software Confluent platform and then run `KSQL` + +* Download the self-managed software Confluent platform in a `.tar.gz` format from: https://www.confluent.io/download/#popup_form_3109 +* Decompress the folder: +``` +Robertos-MBP:~ wardog$ +Robertos-MBP:~ wardog$ cd Downloads/ +Robertos-MBP:Downloads wardog$ tar -xvzf confluent-5.1.2-2.11.tar.gz +x confluent-5.1.2/ +x confluent-5.1.2/src/ +x confluent-5.1.2/src/avro-cpp-1.8.0-confluent5.1.2.tar.gz +x confluent-5.1.2/src/librdkafka-0.11.6-confluent5.1.2.tar.gz +x confluent-5.1.2/src/confluent-libserdes-5.1.2.tar.gz +x confluent-5.1.2/src/avro-c-1.8.0-confluent5.1.2.tar.gz +x confluent-5.1.2/lib/ +``` +* Access the KSQL scripts: + +``` +Robertos-MBP:Downloads wardog$ +Robertos-MBP:Downloads wardog$ cd confluent-5.1.2 +Robertos-MBP:confluent-5.1.2 wardog$ +Robertos-MBP:confluent-5.1.2 wardog$ ls +README bin etc lib logs share src +Robertos-MBP:confluent-5.1.2 wardog$ +Robertos-MBP:confluent-5.1.2 wardog$ cd bin/ +Robertos-MBP:bin wardog$ +Robertos-MBP:bin wardog$ ls +confluent kafka-acls kafka-mirror-maker kafka-server-stop schema-registry-start +confluent-hub kafka-api-start kafka-mqtt-run-class kafka-streams-application-reset schema-registry-stop +confluent-rebalancer kafka-avro-console-consumer kafka-mqtt-start kafka-topics schema-registry-stop-service +connect-distributed kafka-avro-console-producer kafka-mqtt-stop kafka-verifiable-consumer security-plugins-run-class +connect-standalone kafka-broker-api-versions kafka-preferred-replica-election kafka-verifiable-producer sr-acl-cli +control-center-3_0_0-reset kafka-configs kafka-producer-perf-test ksql support-metrics-bundle +control-center-3_0_1-reset kafka-console-consumer kafka-reassign-partitions ksql-datagen windows +control-center-console-consumer kafka-console-producer kafka-replica-verification ksql-print-metrics zookeeper-security-migration +control-center-export kafka-consumer-groups kafka-rest-run-class ksql-run-class zookeeper-server-start +control-center-reset kafka-consumer-perf-test kafka-rest-start ksql-server-start zookeeper-server-stop +control-center-run-class kafka-delegation-tokens kafka-rest-stop ksql-server-stop zookeeper-shell +control-center-set-acls kafka-delete-records kafka-rest-stop-service ksql-stop +control-center-start kafka-dump-log kafka-run-class replicator +control-center-stop kafka-log-dirs kafka-server-start schema-registry-run-class +Robertos-MBP:bin wardog$ + +``` + +* Check the options for KSQL: + +``` +Robertos-MBP:bin wardog$ +Robertos-MBP:bin wardog$ ./ksql --help +NAME + ksql - KSQL CLI + +SYNOPSIS + ksql [ --config-file ] [ {-h | --help} ] + [ --output ] + [ --query-row-limit ] + [ --query-timeout ] [--] + +OPTIONS + --config-file + A file specifying configs for Ksql and its underlying Kafka Streams + instance(s). Refer to KSQL documentation for a list of available + configs. + + -h, --help + Display help information + + --output + The output format to use (either 'JSON' or 'TABULAR'; can be + changed during REPL as well; defaults to TABULAR) + + --query-row-limit + An optional maximum number of rows to read from streamed queries + + This options value must fall in the following range: value >= 1 + + + --query-timeout + An optional time limit (in milliseconds) for streamed queries + + This options value must fall in the following range: value >= 1 + + + -- + This option can be used to separate command-line options from the + list of arguments (useful when arguments might be mistaken for + command-line options) + + + The address of the Ksql server to connect to (ex: + http://confluent.io:9098) + + This option may occur a maximum of 1 times + + +Robertos-MBP:bin wardog$ +``` + +* Connect to the HELK KSQL Server. You will just need to point to the IP address of your HELK Docker environment over port 8088 + +``` +Robertos-MBP:bin wardog$ +Robertos-MBP:bin wardog$ ./ksql http://192.168.64.138:8088 + + =========================================== + = _ __ _____ ____ _ = + = | |/ // ____|/ __ \| | = + = | ' /| (___ | | | | | = + = | < \___ \| | | | | = + = | . \ ____) | |__| | |____ = + = |_|\_\_____/ \___\_\______| = + = = + = Streaming SQL Engine for Apache Kafka® = + =========================================== + +Copyright 2017-2018 Confluent Inc. + +CLI v5.1.2, Server v5.1.0 located at http://192.168.64.138:8088 + +Having trouble? Type 'help' (case-insensitive) for a rundown of how things work! + +ksql> +``` + +* Verify that you can see the topics available in the HELK Kafka broker + +``` +Robertos-MBP:bin wardog$ +Robertos-MBP:bin wardog$ ./ksql http://192.168.64.138:8088 + + =========================================== + = _ __ _____ ____ _ = + = | |/ // ____|/ __ \| | = + = | ' /| (___ | | | | | = + = | < \___ \| | | | | = + = | . \ ____) | |__| | |____ = + = |_|\_\_____/ \___\_\______| = + = = + = Streaming SQL Engine for Apache Kafka® = + =========================================== + +Copyright 2017-2018 Confluent Inc. + +CLI v5.1.2, Server v5.1.0 located at http://192.168.64.138:8088 + +Having trouble? Type 'help' (case-insensitive) for a rundown of how things work! + +ksql> SHOW TOPICS; + + Kafka Topic | Registered | Partitions | Partition Replicas | Consumers | ConsumerGroups +----------------------------------------------------------------------------------------- + filebeat | false | 1 | 1 | 0 | 0 + SYSMON_JOIN | false | 1 | 1 | 0 | 0 + winlogbeat | false | 1 | 1 | 0 | 0 + winsecurity | false | 1 | 1 | 0 | 0 + winsysmon | false | 1 | 1 | 0 | 0 +----------------------------------------------------------------------------------------- +ksql> +``` \ No newline at end of file diff --git a/wiki/Elasticsearch.md b/wiki/Elasticsearch.md new file mode 100644 index 00000000..90b7f828 --- /dev/null +++ b/wiki/Elasticsearch.md @@ -0,0 +1,100 @@ +# Design +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/ELASTICSEARCH-Design.png]] + +# Settings +## HELK's Heap Size +Heap can be set one of four ways, as detailed below. + +#### 1) Allow HELK to calculate how much to assign. +This is based on the available memory and variables shown in the code block below. +It’s very important to note `available memory`, not the amount of memory the host has. +An example to show why this is critical to understand.. If you have a 100GB RAM server, but the server is actively using 90GBs of RAM - then you will NOT get the max 31GB heap/memory for elasticsearch. In this example you would actually end up getting roughly 3 GBs for the heap. Because, with only 10 GBs of available/free memory, it could cause drastic issues to lock up all of the remaining memory! +``` +if available memory >= 1000 MBs and <= 5999 MBs: + then set to 1400 MBs +else if available memory => 6000 MBs and <= 12999 MBs: + then set to 3200 MBs +else if available memory => 13000 MBs and <= 16000 MBs: + then set to 6500 MBs +else: + if available memory => 31 GBs: + then set to 31 GBs + else: + set to available memory in GBs +``` + +#### 2) Set your own heap +In order to define your own heap settings, in the file `HELK/docker/helk-elasticsearch/config/jvm.options` +edit the following two lines that begin with +`#-Xms` +`#-Xmx` +Then make sure to restart elasticsearch. +**Always set the min and max JVM heap size to the same value +Also, you will be restarting elasticsearch. Therefore your cluster will temporarily be down as the elasticsearch service/database is coming back online** +Here is an example of how to perform the above: +``` +# Edit the file jvm file +sudo nano HELK/docker/helk-elasticsearch/config/jvm.options +# Resulting lines (as mentioned that you should edit from above) +# should look something like the following if you wanted to set the heap to 16GBs +-Xms16g +-Xmx16g +# Restart elasticsearch +docker restart helk-elasticsearch +``` + +#### 3) Add `ES_JAVA_OPTS` to the docker config file +Which docker config file to use is shown later. +You will add this value under `services.helk-elasticsearch.environment`. +Example, if I used the option for ELK + Kafka with no license and no alerting and I wanted to set the heap to 16GBs +Then I would edit `HELK/docker/helk-kibana-analysis-basic.yml` and add the following line under the environment seciton: +`- "ES_JAVA_OPTS=-Xms16g -Xmx16g"` +Then make sure rebuild the elasticsearch docker container. +**Always set the min and max JVM heap size to the same value +Also, you will be restarting elasticsearch. Therefore your cluster will temporarily be down as the elasticsearch service/database is coming back online** +**Note if you are using (elastic) license you will need to set your ELASTIC_PASSWORD and KIBANA_UI_PASSWORD variables (and logstash password if applicable)** +Here is how to perform the above: +``` +# Example config (only showing the beginning lines) Note, that these settings may not match your config exactly, but that the important thing is to have the value under the environment section +version: '3.5' + +services: + helk-elasticsearch: + image: docker.elastic.co/elasticsearch/elasticsearch:7.3.1 + container_name: helk-elasticsearch + secrets: + - source: elasticsearch.yml + target: /usr/share/elasticsearch/config/elasticsearch.yml + volumes: + - esdata:/usr/share/elasticsearch/data + - ./helk-elasticsearch/scripts:/usr/share/elasticsearch/scripts + - ./helk-elasticsearch/config/jvm.options:/usr/share/elasticsearch/config/jvm.options + entrypoint: /usr/share/elasticsearch/scripts/elasticsearch-entrypoint.sh + environment: + - cluster.name=helk-cluster + - node.name=helk-1 + - xpack.license.self_generated.type=basic + - xpack.security.enabled=false + - "ES_JAVA_OPTS= -Xms16g -Xmx16g" + ulimits: + memlock: + soft: -1 + hard: -1 + nproc: 20480 + nofile: + soft: 160000 + hard: 160000 + restart: always + networks: + helk: +# Rebuild the elasticsearch docker container +`docker-compose -f HELK/docker/helk-kibana-analysis-basic.yml up --build -d` + +``` + +#### 4) Set at run time using custom bash variable +Example bash variable such as. +`export ES_JAVA_OPTS="-Xms16g -Xmx16g"` +Then run the following using your own docker config file. +`docker-compose -f $PlaceDockerConfigFileNameHere up --build -d` +**Only use this option if you explicitly need to. Please know what your getting into to ;)** \ No newline at end of file diff --git a/wiki/Export-Docker-Images-locally.md b/wiki/Export-Docker-Images-locally.md new file mode 100644 index 00000000..61c02788 --- /dev/null +++ b/wiki/Export-Docker-Images-locally.md @@ -0,0 +1,80 @@ +If the system where you are planning to install HELK is isolated from the Internet, you can run HELK on another system that has access to the Internet and then export the built/downloaded images to `.tar` files. You can then `LOAD` Those image files in the system that is isolated from the Internet. + +* List all the images available in the non-isolated system via the docker `images` command + +``` +helk@ubuntu:~$ sudo docker images + +REPOSITORY TAG IMAGE ID CREATED SIZE +cyb3rward0g/helk-jupyter 0.1.1 efa46ecc8d32 2 days ago 2.18GB +confluentinc/cp-ksql-server 5.1.2 f57298019757 6 days ago 514MB +confluentinc/cp-ksql-cli 5.1.2 bd411ce0ba9f 6 days ago 510MB +docker.elastic.co/logstash/logstash 6.6.1 3e7fbb7964ee 11 days ago 786MB +docker.elastic.co/kibana/kibana 6.6.1 b94222148a00 11 days ago 710MB +docker.elastic.co/elasticsearch/elasticsearch 6.6.1 c6ffcb0ee97e 11 days ago 842MB +cyb3rward0g/helk-elastalert 0.2.1 569f588a22fc 3 weeks ago 758MB +cyb3rward0g/helk-kafka-broker 2.1.0 7b3e7f9ce732 2 months ago 388MB +cyb3rward0g/helk-zookeeper 2.1.0 abb732da3e50 2 months ago 388MB +cyb3rward0g/helk-spark-worker 2.4.0 b1545b0582db 2 months ago 579MB +cyb3rward0g/helk-spark-master 2.4.0 70fc61de3445 2 months ago 579MB +cyb3rward0g/helk-nginx 0.0.7 280d044b6719 6 months ago 329MB +``` + +* List all the containers running in the non-isolated system via the docker `ps` command + +``` +helk@ubuntu:~$ sudo docker ps + +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +de048c88dc7f confluentinc/cp-ksql-cli:5.1.2 "/bin/sh" 6 hours ago Up 6 hours helk-ksql-cli +69e06070c14c confluentinc/cp-ksql-server:5.1.2 "/etc/confluent/dock…" 6 hours ago Up 6 hours 0.0.0.0:8088->8088/tcp helk-ksql-server +d57967977c9c cyb3rward0g/helk-kafka-broker:2.1.0 "./kafka-entrypoint.…" 6 hours ago Up 6 hours 0.0.0.0:9092->9092/tcp helk-kafka-broker +4889e917d76d cyb3rward0g/helk-spark-worker:2.4.0 "./spark-worker-entr…" 6 hours ago Up 6 hours helk-spark-worker +c0a29d8b18a7 cyb3rward0g/helk-nginx:0.0.7 "/opt/helk/scripts/n…" 6 hours ago Up 6 hours 0.0.0.0:80->80/tcp, 0.0.0.0:443->443/tcp helk-nginx +6a887d693a31 cyb3rward0g/helk-elastalert:0.2.1 "./elastalert-entryp…" 6 hours ago Up 6 hours helk-elastalert +a32be7a399c7 cyb3rward0g/helk-zookeeper:2.1.0 "./zookeeper-entrypo…" 6 hours ago Up 6 hours 2181/tcp, 2888/tcp, 3888/tcp helk-zookeeper +c636a8a1e8f7 cyb3rward0g/helk-spark-master:2.4.0 "./spark-master-entr…" 6 hours ago Up 6 hours 7077/tcp, 0.0.0.0:8080->8080/tcp helk-spark-master +ef1b8d8015ab cyb3rward0g/helk-jupyter:0.1.1 "./jupyter-entrypoin…" 6 hours ago Up 6 hours 8000/tcp helk-jupyter +bafeeb1587cf docker.elastic.co/logstash/logstash:6.6.1 "/usr/share/logstash…" 6 hours ago Up 6 hours 0.0.0.0:5044->5044/tcp, 0.0.0.0:8531->8531/tcp, 9600/tcp helk-logstash +29b57e5c71e5 docker.elastic.co/kibana/kibana:6.6.1 "/usr/share/kibana/s…" 6 hours ago Up 6 hours 5601/tcp helk-kibana +48499aa83917 docker.elastic.co/elasticsearch/elasticsearch:6.6.1 "/usr/share/elastics…" 6 hours ago Up 6 hours 9200/tcp, 9300/tcp helk-elasticsearch +``` + +* Export images as `tar` files: + +``` +helk@ubuntu:~$ sudo docker save -o /home/helk/helk-ksql-cli.tar confluentinc/cp-ksql-cli:5.1.2 +helk@ubuntu:~$ sudo docker save -o /home/helk/helk-ksql-server.tar confluentinc/cp-ksql-server:5.1.2 +helk@ubuntu:~$ sudo docker save -o /home/helk/helk-kafka-broker.tar cyb3rward0g/helk-kafka-broker:2.1.0 +helk@ubuntu:~$ sudo docker save -o /home/helk/helk-spark-worker.tar cyb3rward0g/helk-spark-worker:2.4.0 +helk@ubuntu:~$ sudo docker save -o /home/helk/helk-nginx.tar cyb3rward0g/helk-nginx:0.0.7 +helk@ubuntu:~$ sudo docker save -o /home/helk/helk-elastalert.tar cyb3rward0g/helk-elastalert:0.2.1 +helk@ubuntu:~$ sudo docker save -o /home/helk/helk-zookeeper.tar cyb3rward0g/helk-zookeeper:2.1.0 +helk@ubuntu:~$ sudo docker save -o /home/helk/helk-spark-master.tar cyb3rward0g/helk-spark-master:2.4.0 +helk@ubuntu:~$ sudo docker save -o /home/helk/helk-logstash.tar docker.elastic.co/logstash/logstash:6.6.1 +helk@ubuntu:~$ sudo docker save -o /home/helk/helk-kibana.tar docker.elastic.co/kibana/kibana:6.6.1 +helk@ubuntu:~$ sudo docker save -o /home/helk/helk-elasticsearch.tar docker.elastic.co/elasticsearch/elasticsearch:6.6.1 +helk@ubuntu:~$ sudo docker save -o /home/helk/helk-jupyter.tar cyb3rward0g/helk-jupyter:0.1.1 +``` +* check images + +``` +helk@ubuntu:~$ ls -l + +total 10810584 +drwxrwxr-x 9 helk helk 4096 Feb 24 21:01 HELK +-rw------- 1 root root 778629632 Feb 25 03:07 helk-elastalert.tar +-rw------- 1 root root 854236160 Feb 25 03:12 helk-elasticsearch.tar +-rw------- 1 root root 2254629888 Feb 25 03:14 helk-jupyter.tar +-rw------- 1 root root 395871744 Feb 25 03:04 helk-kafka-broker.tar +-rw------- 1 root root 767277568 Feb 25 03:11 helk-kibana.tar +-rw------- 1 root root 521177600 Feb 25 03:00 helk-ksql-cli.tar +-rw------- 1 root root 525901824 Feb 25 03:02 helk-ksql-server.tar +-rw------- 1 root root 810578944 Feb 25 03:09 helk-logstash.tar +-rw------- 1 root root 335945728 Feb 25 03:06 helk-nginx.tar +-rw------- 1 root root 587616768 Feb 25 03:08 helk-spark-master.tar +-rw------- 1 root root 587616768 Feb 25 03:05 helk-spark-worker.tar +-rw------- 1 root root 395854848 Feb 25 03:08 helk-zookeeper.tar + +helk@ubuntu:~$ +``` \ No newline at end of file diff --git a/wiki/Home.md b/wiki/Home.md new file mode 100644 index 00000000..02c846e2 --- /dev/null +++ b/wiki/Home.md @@ -0,0 +1,14 @@ + +# What is HELK? +HELK is an ELK (Elasticsearch, Logstash & Kibana) stack with advanced hunting analytic capabilities provided by the implementation of Spark & Graphframes technologies. The Hunting ELK or simply the HELK is one of the first public builds that enables data science features to an ELK stack for free. In addition, it comes with a Jupyter Notebook integration for prototyping in Big Data/Machine learning use cases. This stack provides a full-text search engine mixed with great visualizations, graph relational queries and advanced analytics. + +# Why HELK? +Nowadays, enabling the right event logging and centralizing the collection of different data sources is finally becoming a basic security standard. This allows organizations to not just increase the level of visibility from an endpoint and network perspective, but to adopt new concepts within their security teams such as threat hunting. Even though it might seem that collecting a lot of data is all a hunt team needs to be successful, there are several challenges that hunters face when using large, unstructured and sometimes incomplete data. One of this challenges is to make sense of the disparate data sources in an easy and consistent way when trying to effectively detect adversarial techniques. + +ELK stacks have already been adopted considerably by small and large organizations for data ingestion, storage and visualization. Therefore, using it as a main structure with Spark and GraphFrames on the top of it allow hunt teams to effectively take their hunt skills and program to the next level. This approach is affordable, scalable, and can be used during research or any other engagement where blue and red teams meet. + +# When and where do I use HELK? +HELK was built primarily for research, but due to its flexible design, it can be deployed in larger environments with the right configurations and scalable infrastructure. You can go from simply searching a specific string to create advanced graph queries and apply algorithms to the data stored in an Elasticsearch database. Therefore, there are a variety of use cases that can be prototyped with the HELK. The main implementation of this project is Threat Hunting (Active Defense). + +# How do I use HELK? +If you have used an ELK stack before or followed any of the ["Chronicles of a Threat Hunter"](https://cyberwardog.blogspot.com/) series by [@Cyb3rWard0g](https://twitter.com/Cyb3rWard0g), you will find the HELK pretty easy to follow. The new data science features will be explained in more details in the HOW TO section of this wiki. Also, stay tuned for future blog posts on how to use the new HELK capabilities. Follow [@THE_HELK](https://twitter.com/THE_HELK) & [@Cyb3rWard0g](https://twitter.com/Cyb3rWard0g) for any updates. \ No newline at end of file diff --git a/wiki/Installation.md b/wiki/Installation.md new file mode 100644 index 00000000..ad7c35ce --- /dev/null +++ b/wiki/Installation.md @@ -0,0 +1,272 @@ +# Requirements (Please Read Carefully) +* **OS Name:** + * Ubuntu 18.04 (preferred) + * HELK uses the official Docker Community Edition (CE) bash script (Edge Version) to install Docker for you. The Docker CE Edge script supports the following distros: **ubuntu**, **debian**, **raspbian**, **centos**, and **fedora**. + * You can see the specific distro versions supported in the script [here](https://get.docker.com/). + * If you have Docker & Docker-Compose already installed in your system, make sure you uninstall them to avoid old incompatible version. Let HELK use the official Docker CE Edge script execution to install Docker. +* **Processor/OS Architecture:** + * 64-bit also known as x64, x86_64, AMD64 and Intel 64. + * FYI: old processors don't supports SSE3 instructions to start ML on elasticsearch. Since version 6.1 Elastic has been compiling the ML programs on the assumption that SSE4.2 instructions are available (See: https://github.com/Cyb3rWard0g/HELK/issues/321 and https://discuss.elastic.co/t/failed-to-start-machine-learning-on-elasticsearch-7-0-0/178216/7) +* **Network Connection:** NAT or Bridge +* **RAM:** There are four options, and the following are minimum requirements (include more if you are able). + * **Option 1: 5GB** includes `KAFKA + KSQL + ELK + NGNIX.` + * **Option 2: 5GB** includes `KAFKA + KSQL + ELK + NGNIX + ELASTALERT` + * **Option 3: 7GB** includes `KAFKA + KSQL + ELK + NGNIX + SPARK + JUPYTER`. + * **Option 4: 8GB** includes `KAFKA + KSQL + ELK + NGNIX + SPARK + JUPYTER + ELASTALERT`. +* **Cores:** 4 (minimum) +* **Disk:** 25GB for testing purposes and 100GB+ for production (minimum) +* **Applications:** + * Docker: 18.06.1-ce+ & Docker-Compose (HELK INSTALLS THIS FOR YOU) + * [Winlogbeat](https://www.elastic.co/downloads/beats/winlogbeat) running on your endpoints or centralized WEF server (that your endpoints are forwarding to). + * You can install Winlogbeat by following one of [@Cyb3rWard0g](https://twitter.com/Cyb3rWard0g) posts [here](https://cyberwardog.blogspot.com/2017/02/setting-up-pentesting-i-mean-threat_87.html). + * [Winlogbeat config](https://github.com/Cyb3rWard0g/HELK/blob/master/winlogbeat/winlogbeat.yml) recommended by the HELK since it uses the [Kafka output plugin](https://www.elastic.co/guide/en/beats/winlogbeat/current/kafka-output.html) and it is already pointing to the right ports with recommended options. You will just have to add your HELK's IP address. +# HELK Download +Run the following commands to clone the HELK repo via git. +``` +git clone https://github.com/Cyb3rWard0g/HELK.git +``` +Change your current directory location to the new HELK directory, and run the **helk_install.sh** bash script as root. +``` +cd HELK/docker +sudo ./helk_install.sh +``` +# HELK Install +In order to make the installation of the HELK easy for everyone, the project comes with an install script named **helk_install.sh**. This script builds and runs everything you for HELK automatically. During the installation process, the script will allow you to set up the following: +* Set the HELK's option. For this document we are going to use option 2 (ELK + KSQL + Elastalert + Spark + Jupyter) +* Set the Kibana User's password. Default user is **helk** +* Set the HELK's IP. By default you can confirm that you want to use your HOST IP address for the HELK, unless you want to use a different one. Press \[Return\] or let the script continue on its own (30 Seconds sleep). +* Set the HELK's License Subscription. By default the HELK has the **basic** subscription selected. You can set it to **trial** if you want. If you want to learn more about subscriptions go [here](https://www.elastic.co/subscriptions) + * If the license is set to **trial**, HELK asks you to set the password for the **elastic** account. +``` +helk@ubuntu:~$ +helk@ubuntu:~$ ls +HELK +helk@ubuntu:~$ cd HELK/docker/ +helk@ubuntu:~/HELK/docker$ sudo ./helk_install.sh + +********************************************** +** HELK - THE HUNTING ELK ** +** ** +** Author: Roberto Rodriguez (@Cyb3rWard0g) ** +** HELK build version: v0.1.7-alpha02262019 ** +** HELK ELK version: 6.6.1 ** +** License: GPL-3.0 ** +********************************************** + +[HELK-INSTALLATION-INFO] HELK being hosted on a Linux box +[HELK-INSTALLATION-INFO] Available Memory: 12463 MBs +[HELK-INSTALLATION-INFO] You're using ubuntu version xenial + +***************************************************** +* HELK - Docker Compose Build Choices * +***************************************************** + +1. KAFKA + KSQL + ELK + NGNIX + ELASTALERT +2. KAFKA + KSQL + ELK + NGNIX + ELASTALERT + SPARK + JUPYTER + +Enter build choice [ 1 - 2]: 2 +[HELK-INSTALLATION-INFO] HELK build set to 2 +[HELK-INSTALLATION-INFO] Set HELK elastic subscription (basic or trial): basic +[HELK-INSTALLATION-INFO] Set HELK IP. Default value is your current IP: 192.168.64.138 +[HELK-INSTALLATION-INFO] Set HELK Kibana UI Password: hunting +[HELK-INSTALLATION-INFO] Verify HELK Kibana UI Password: hunting +[HELK-INSTALLATION-INFO] Docker already installed +[HELK-INSTALLATION-INFO] Making sure you assigned enough disk space to the current Docker base directory +[HELK-INSTALLATION-INFO] Available Docker Disk: 67 GBs +[HELK-INSTALLATION-INFO] Installing docker-compose.. +[HELK-INSTALLATION-INFO] Checking local vm.max_map_count variable and setting it to 4120294 +[HELK-INSTALLATION-INFO] Building & running HELK from helk-kibana-notebook-analysis-basic.yml file.. +[HELK-INSTALLATION-INFO] Waiting for some services to be up ..... +.... +...... +``` +# Monitor HELK installation Logs (Always) +Once the installation kicks in, it will start showing you pre-defined messages about the installation, but no all the details of what is actually happening in the background. It is designed that way to keep your main screen clean and let you know where it is in the installation process. + +What I recommend to do all the time is to open another shell and monitor the HELK installation logs by using the **tail** command and pointing it to the **/var/log/helk-install.log** file that gets created by the **helk_install** script as soon as it is run. This log file is available on your local host even if you are deploying the HELK via Docker (I want to make sure it is clear that it is a local file). +``` +helk@HELK:~$ tail -f /var/log/helk-install.log + +Creating network "docker_helk" with driver "bridge" +Creating volume "docker_esdata" with local driver +Pulling helk-elasticsearch (docker.elastic.co/elasticsearch/elasticsearch:6.6.1)... +6.6.1: Pulling from elasticsearch/elasticsearch +Pulling helk-kibana (docker.elastic.co/kibana/kibana:6.6.1)... +6.6.1: Pulling from kibana/kibana +Pulling helk-logstash (docker.elastic.co/logstash/logstash:6.6.1)... +6.6.1: Pulling from logstash/logstash +Pulling helk-jupyter (cyb3rward0g/helk-jupyter:0.1.2)... +0.1.2: Pulling from cyb3rward0g/helk-jupyter +Pulling helk-nginx (cyb3rward0g/helk-nginx:0.0.7)... +0.0.7: Pulling from cyb3rward0g/helk-nginx +Pulling helk-spark-master (cyb3rward0g/helk-spark-master:2.4.0-a)... +2.4.0-a: Pulling from cyb3rward0g/helk-spark-master +Pulling helk-spark-worker (cyb3rward0g/helk-spark-worker:2.4.0-a)... +2.4.0-a: Pulling from cyb3rward0g/helk-spark-worker +Pulling helk-zookeeper (cyb3rward0g/helk-zookeeper:2.1.0)... +2.1.0: Pulling from cyb3rward0g/helk-zookeeper +Pulling helk-kafka-broker (cyb3rward0g/helk-kafka-broker:2.1.0)... +2.1.0: Pulling from cyb3rward0g/helk-kafka-broker +Pulling helk-ksql-server (confluentinc/cp-ksql-server:5.1.2)... +5.1.2: Pulling from confluentinc/cp-ksql-server +Pulling helk-ksql-cli (confluentinc/cp-ksql-cli:5.1.2)... +5.1.2: Pulling from confluentinc/cp-ksql-cli +Pulling helk-elastalert (cyb3rward0g/helk-elastalert:0.2.1)... +0.2.1: Pulling from cyb3rward0g/helk-elastalert +Creating helk-elasticsearch ... done +Creating helk-kibana ... done +Creating helk-logstash ... done +Creating helk-spark-master ... done +Creating helk-elastalert ... done +Creating helk-zookeeper ... done +Creating helk-jupyter ... done +Creating helk-spark-worker ... done +Creating helk-kafka-broker ... done +Creating helk-nginx ... done +Creating helk-ksql-server ... done +Creating helk-ksql-cli ... done +``` +Once you see that the containers have been created you can check all the containers running by executing the following: + +``` +helk@HELK:~$ sudo docker ps + +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +968576241e9c confluentinc/cp-ksql-server:5.1.2 "/etc/confluent/dock…" 28 minutes ago Up 26 minutes 0.0.0.0:8088->8088/tcp helk-ksql-server +154593559d13 cyb3rward0g/helk-kafka-broker:2.1.0 "./kafka-entrypoint.…" 28 minutes ago Up 26 minutes 0.0.0.0:9092->9092/tcp helk-kafka-broker +d883541a64f1 cyb3rward0g/helk-nginx:0.0.7 "/opt/helk/scripts/n…" About an hour ago Up 26 minutes 0.0.0.0:80->80/tcp, 0.0.0.0:443->443/tcp helk-nginx +527ef236543a cyb3rward0g/helk-spark-worker:2.4.0-a "./spark-worker-entr…" About an hour ago Up 26 minutes helk-spark-worker +27cfaf7a8e84 cyb3rward0g/helk-jupyter:0.1.2 "./jupyter-entrypoin…" About an hour ago Up 26 minutes 8000/tcp, 8888/tcp helk-jupyter +75002248e916 cyb3rward0g/helk-zookeeper:2.1.0 "./zookeeper-entrypo…" About an hour ago Up 26 minutes 2181/tcp, 2888/tcp, 3888/tcp helk-zookeeper +ee0120167ffa cyb3rward0g/helk-elastalert:0.2.1 "./elastalert-entryp…" About an hour ago Up 26 minutes helk-elastalert +4dc2722cdd53 cyb3rward0g/helk-spark-master:2.4.0-a "./spark-master-entr…" About an hour ago Up 26 minutes 7077/tcp, 0.0.0.0:8080->8080/tcp helk-spark-master +9c1eb230b0ff docker.elastic.co/logstash/logstash:6.6.1 "/usr/share/logstash…" About an hour ago Up 26 minutes 0.0.0.0:5044->5044/tcp, 0.0.0.0:8531->8531/tcp, 9600/tcp helk-logstash +f018f16d9792 docker.elastic.co/kibana/kibana:6.6.1 "/usr/share/kibana/s…" About an hour ago Up 26 minutes 5601/tcp helk-kibana +6ec5779e9e01 docker.elastic.co/elasticsearch/elasticsearch:6.6.1 "/usr/share/elastics…" About an hour ago Up 26 minutes 9200/tcp, 9300/tcp helk-elasticsearch + +``` + +If you want to monitor the resources being utilized (Memory, CPU, etc), you can run the following: +``` +helk@HELK:~$ sudo docker stats --all + +CONTAINER ID NAME CPU % MEM USAGE / LIMIT MEM % NET I/O BLOCK I/O PIDS +ba46d256ee18 helk-ksql-cli 0.00% 0B / 0B 0.00% 0B / 0B 0B / 0B 0 +968576241e9c helk-ksql-server 1.43% 242MiB / 12.62GiB 1.87% 667kB / 584kB 96.1MB / 73.7kB 29 +154593559d13 helk-kafka-broker 2.83% 318.7MiB / 12.62GiB 2.47% 1.47MB / 1.6MB 50.7MB / 2.01MB 67 +d883541a64f1 helk-nginx 0.10% 3.223MiB / 12.62GiB 0.02% 14.7MB / 14.8MB 9.35MB / 12.3kB 5 +527ef236543a helk-spark-worker 0.43% 177.7MiB / 12.62GiB 1.38% 19.5kB / 147kB 37.1MB / 32.8kB 28 +27cfaf7a8e84 helk-jupyter 0.12% 45.42MiB / 12.62GiB 0.35% 1.64kB / 0B 66.3MB / 733kB 9 +75002248e916 helk-zookeeper 0.26% 62.6MiB / 12.62GiB 0.48% 150kB / 118kB 2.75MB / 172kB 23 +ee0120167ffa helk-elastalert 2.60% 40.97MiB / 12.62GiB 0.32% 12MB / 17.4MB 38.3MB / 8.19kB 1 +4dc2722cdd53 helk-spark-master 0.50% 187.2MiB / 12.62GiB 1.45% 148kB / 17.8kB 52.3MB / 32.8kB 28 +9c1eb230b0ff helk-logstash 15.96% 1.807GiB / 12.62GiB 14.32% 871kB / 110MB 165MB / 2.95MB 62 +f018f16d9792 helk-kibana 2.73% 179.1MiB / 12.62GiB 1.39% 3.71MB / 17.6MB 250MB / 4.1kB 13 +6ec5779e9e01 helk-elasticsearch 12.56% 2.46GiB / 12.62GiB 19.50% 130MB / 15.8MB 293MB / 226MB 61 +``` + +You should also monitor the logs of each container while they are being initialized: + +Just run the following: + +``` +helk@HELK:~$ sudo docker logs --follow helk-elasticsearch + +[HELK-ES-DOCKER-INSTALLATION-INFO] Setting ES_JAVA_OPTS to -Xms1200m -Xmx1200m -XX:-UseConcMarkSweepGC -XX:-UseCMSInitiatingOccupancyOnly -XX:+UseG1GC +[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Elastic license to basic +[HELK-ES-DOCKER-INSTALLATION-INFO] Running docker-entrypoint script.. +OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release. +OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release. +[2019-03-16T17:13:58,710][INFO ][o.e.e.NodeEnvironment ] [helk-1] using [1] data paths, mounts [[/usr/share/elasticsearch/data (/dev/sda1)]], net usable_space [60.7gb], net total_space [72.7gb], types [ext4] +[2019-03-16T17:13:58,722][INFO ][o.e.e.NodeEnvironment ] [helk-1] heap size [1.1gb], compressed ordinary object pointers [true] +[2019-03-16T17:13:58,728][INFO ][o.e.n.Node ] [helk-1] node name [helk-1], node ID [En7HptZKTNmv4R6-Qb99UA] +[2019-03-16T17:13:58,729][INFO ][o.e.n.Node ] [helk-1] version[6.6.1], pid[12], build[default/tar/1fd8f69/2019-02-13T17:10:04.160291Z], OS[Linux/4.4.0-116-generic/amd64], JVM[Oracle Corporation/OpenJDK 64-Bit Server VM/11.0.1/11.0.1+13] +[2019-03-16T17:13:58,734][INFO ][o.e.n.Node ] [helk-1] JVM arguments [-Xms1g, -Xmx1g, -XX:+UseConcMarkSweepGC, -XX:CMSInitiatingOccupancyFraction=75, -XX:+UseCMSInitiatingOccupancyOnly, -Des.networkaddress.cache.ttl=60, -Des.networkaddress.cache.negative.ttl=10, -XX:+AlwaysPreTouch, -Xss1m, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djna.nosys=true, -XX:-OmitStackTraceInFastThrow, -Dio.netty.noUnsafe=true, -Dio.netty.noKeySetOptimization=true, -Dio.netty.recycler.maxCapacityPerThread=0, -Dlog4j.shutdownHookEnabled=false, -Dlog4j2.disable.jmx=true, -Djava.io.tmpdir=/tmp/elasticsearch-7720073513605769733, -XX:+HeapDumpOnOutOfMemoryError, -XX:HeapDumpPath=data, -XX:ErrorFile=logs/hs_err_pid%p.log, -Xlog:gc*,gc+age=trace,safepoint:file=logs/gc.log:utctime,pid,tags:filecount=32,filesize=64m, -Djava.locale.providers=COMPAT, -XX:UseAVX=2, -Des.cgroups.hierarchy.override=/, -Xms1200m, -Xmx1200m, -XX:-UseConcMarkSweepGC, -XX:-UseCMSInitiatingOccupancyOnly, -XX:+UseG1GC, -Des.path.home=/usr/share/elasticsearch, -Des.path.conf=/usr/share/elasticsearch/config, -Des.distribution.flavor=default, -Des.distribution.type=tar] +[2019-03-16T17:14:03,510][INFO ][o.e.p.PluginsService ] [helk-1] loaded module [aggs-matrix-stats] +[2019-03-16T17:14:03,517][INFO ][o.e.p.PluginsService ] [helk-1] loaded module [analysis-common] +[2019-03-16T17:14:03,517][INFO ][o.e.p.PluginsService ] [helk-1] loaded module [ingest-common] +[2019-03-16T17:14:03,517][INFO ][o.e.p.PluginsService ] [helk-1] loaded module [lang-expression] +[2019-03-16T17:14:03,517][INFO ][o.e.p.PluginsService ] [helk-1] loaded module [lang-mustache] +[2019-03-16T17:14:03,518][INFO ][o.e.p.PluginsService ] [helk-1] loaded module [lang-painless] +[2019-03-16T17:14:03,518][INFO ][o.e.p.PluginsService ] [helk-1] loaded module [mapper-extras] +[2019-03-16T17:14:03,518][INFO ][o.e.p.PluginsService ] [helk-1] loaded module [parent-join] +[2019-03-16T17:14:03,518][INFO ][o.e.p.PluginsService ] [helk-1] loaded module [percolator] +[2019-03-16T17:14:03,519][INFO ][o.e.p.PluginsService ] [helk-1] loaded module [rank-eval] +[2019-03-16T17:14:03,519][INFO ][o.e.p.PluginsService ] [helk-1] loaded module [reindex] +.. +.... +``` + +All you need to do now for the other ones is just replace helk-elasticsearch with the specific containers name: +``` +sudo docker logs --follow +``` + +Remember that you can also access your docker images by running the following commands: +``` +sudo docker exec -ti helk-elasticsearch bash +root@7a9d6443a4bf:/opt/helk/scripts# +``` + +# Final Details +Once your HELK installation ends, you will be presented with information that you will need to access the HELK and all its other components. + +You will get the following information: + +``` +*********************************************************************************** +** [HELK-INSTALLATION-INFO] HELK WAS INSTALLED SUCCESSFULLY ** +** [HELK-INSTALLATION-INFO] USE THE FOLLOWING SETTINGS TO INTERACT WITH THE HELK ** +*********************************************************************************** + +HELK KIBANA URL: https://192.168.64.138 +HELK KIBANA USER: helk +HELK KIBANA PASSWORD: hunting +HELK SPARK MASTER UI: http://192.168.64.138:8080 +HELK JUPYTER SERVER URL: http://192.168.64.138/jupyter +HELK JUPYTER CURRENT TOKEN: e8e83f5c9fe93882a970ce352d566adfb032b0975549449c +HELK ZOOKEEPER: 192.168.64.138:2181 +HELK KSQL SERVER: 192.168.64.138:8088 + +IT IS HUNTING SEASON!!!!! +``` +| Type | Description | +|--------|---------| +| HELK KIBANA URL | URL to access the Kibana server. You will need to copy that and paste it in your browser to access Kibana. Make sure you use **https** since Kibana is running behind NGINX via port 443 with a self-signed certificate| +| HELK KIBANA USER & PASSWORD | Credentials used to access Kibana | +| HELK SPARK MASTER UI | URL to access the Spark Master server (Spark Standalone). That server manages the Spark Workers used during execution of code by Jupyter Notebooks. Spark Master acts as a proxy to Spark Workers and applications running | +| HELK JUPYTER SERVER URL | URL to access the Jupyter notebook server. | +| HELK JUPYTER CURRENT TOKEN | Jupyter token to log in instead of providing a password | +| ZOOKEEPER | URL for the kafka cluster zookeeper | +| KSQL SERVER| URL to access the KSQL server and send SQL queries to the data in the kafka brokers| + + + +# Access HELK Web Interface +Open your preferred browser, go to your HELK's IP address, and enter the HELK credentials **(helk:hunting)**. By default, you will be presented by the Kibana's Home page. Once there, you could explore the different features that Kibana provides. I personally like to check the **Index Patterns** first and then **Discovery** + +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KIBANA-Home.png]] + +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KIBANA-IndexPatterns.png]] + +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KIBANA-Discovery.png]] + +# Access Jupyter Notebook Interface +HELK now comes with a Jupyter notebok server that spawns a Jupyter lab extension. + +Use the HELK JUPYTER SERVER URL and you will get the following prompt + +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/JUPYTER-login.png]] + +You will then be sent to the Jupyter Lab menu: + +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/JUPYTER-menu.png]] + +You can double-click on one of the notebooks and start playing with them: + +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/JUPYTER-notebook.png]] + + +I hope this document was helpful to deploy your own HELK. Let us know if you have any questions or if you think that this document can be improved. Feel free to create an **issue** for updates to this procedure. A more detailed **HOW-TO** will be developed soon to go into more details of how to use all the HELK components. + +IT IS HUNTING SEASON!! \ No newline at end of file diff --git a/wiki/Kafka.md b/wiki/Kafka.md new file mode 100644 index 00000000..c53f4ae5 --- /dev/null +++ b/wiki/Kafka.md @@ -0,0 +1,73 @@ +# Design +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KAFKA-Design.png]] + +# Kafka Ecosystem +## Producers +Producers publish data to the topics of their choice. The producer is responsible for choosing which record to assign to which partition within the topic. + +HELK currently accepts data sent to a few topics such as `winlogbeat` for Windows systems and `filebeat` for Linux or OSX systems. From a Windows perspective, it is common to have **Winlogbeat** (Log Shipper/Producer) installed on all the endpoints. However, it is recommended to use solutions such as [Windows Event Forwarding (WEF)](https://docs.microsoft.com/en-us/windows/security/threat-protection/use-windows-event-forwarding-to-assist-in-intrusion-detection) servers to collect and centralize your logs, and then have Winlogbeat or NXlog installed on them to ship the logs to your HELK Kafka broker. + +When using **Winlogbeat** you can use the following config: +``` +winlogbeat.event_logs: + - name: Application + ignore_older: 30m + - name: Security + ignore_older: 30m + - name: System + ignore_older: 30m + - name: Microsoft-windows-sysmon/operational + ignore_older: 30m + - name: Microsoft-windows-PowerShell/Operational + ignore_older: 30m + event_id: 4103, 4104 + - name: Windows PowerShell + event_id: 400,600 + ignore_older: 30m + - name: Microsoft-Windows-WMI-Activity/Operational + event_id: 5857,5858,5859,5860,5861 + +output.kafka: + hosts: [":9092"] + topic: "winlogbeat" + max_retries: 2 + max_message_bytes: 1000000 +``` +You can check the how-to section in this wiki to learn how to check if your winlogbeat log shipper is sending data to a Kafka broker. + +## Kafka Broker +HELK uses a kafka cluster conformed of 1 broker (Not really a cluster, but it is a good start to host it in a lab environment). If you add more brokers to the cluster, each broker would have it's own ID number and topic log partitions. Connecting to one broker bootstraps a client to the entire Kafka cluster. + +The HELK broker has its own `server.properties` file. You can find it [here](https://github.com/Cyb3rWard0g/HELK/blob/master/docker/helk-kafka-broker/server.properties). Some of the basic settings that you need to understand are the following: + +| Name | Description | Type | Value | +|--------|---------|-------|-------| +| broker.id | The broker id for this server. If unset, a unique broker id will be generated. To avoid conflicts between zookeeper generated broker id's and user configured broker id's, generated broker ids start from reserved.broker.max.id + 1. | int | 1 | +| listeners | Listener List - Comma-separated list of URIs we will listen on and the listener names. Specify hostname as 0.0.0.0 to bind to all interfaces. For the docker deployment, it is set to the kafka broker container name and used to communicate with other containers inside of the docker environment ONLY | string | PLAINTEXT://helk-kafka-broker:9092 | +| advertised.listeners | Listeners to publish to ZooKeeper for clients to use, if different than the `listeners` config property. In IaaS environments, this may need to be different from the interface to which the broker binds. For the docker deployment, this is the IP address of the machine hosting your docker containers. This will be ip address that your producers can talk to from outside of the docker environment. When Broker starts, the current value is updated automatically by the environment variable ADVERTISED_LISTENER | string | PLAINTEXT://HELKIP:9092 | +| log.dirs | The directories in which the log data is kept. If not set, the value in log.dir is used | string | /tmp/kafka-logs | +| auto.create.topics.enable | Enable auto creation of topic on the server. This is disabled in HELK to avoid any producers creating new topics | boolean | false | +| log.retention.hours | The minimum age of a log file to be eligible for deletion due to age | int | 4 | + +## Zookeeper +Kafka needs ZooKeeper to work efficiently in the cluster. Kafka uses Zookeeper to do leadership election of Kafka Broker and Topic Partition pairs. Kafka uses Zookeeper to manage service discovery for Kafka Brokers that form the cluster. Zookeeper sends changes of the topology to Kafka, so each node in the cluster knows when a new broker joined, a Broker died, a topic was removed or a topic was added, etc. Zookeeper provides an in-sync view of Kafka Cluster configuration. + +## HELK Kafka Topics +Kafka automatically creates 3 topics: + +| topic | Description | +|--------|---------| +| winlogbeat | Main topic that stores raw event log data sent from endpoints with winlogbeat installed. | +| SYSMON_JOIN | Topic that stores Windows Sysmon events that have been enriched by KSQL commands to join **ProcessCreate** (event 1) and **NetworkConnect** (event 3) by their `ProcessGUID` values. | +| winsysmon | Topic used for Logstash to send transformed/parsed Windows Sysmon event data back. | +| winsecurity | topic used for Logstash to send transformed/parsed Windows security event data back. | +| filebeat | Topic that stores OSQuery data | + +# How-To +* [Check Kafka topic ingestion](https://github.com/Cyb3rWard0g/HELK/wiki/Check-Kafka-topic-ingestion) +* [Check Winlogbeat shipping](https://github.com/Cyb3rWard0g/HELK/wiki/Check-Winlogbeat-shipping) +* [Update Kafka Broker IP](https://github.com/Cyb3rWard0g/HELK/wiki/Update-Kafka-Broker-IP) + +# References +* [Kafka Producer API](http://kafka.apache.org/documentation.html#producerapi) +* [Kafka Architecture](http://cloudurable.com/blog/kafka-architecture/index.html) \ No newline at end of file diff --git a/wiki/Kibana.md b/wiki/Kibana.md new file mode 100644 index 00000000..8262f85a --- /dev/null +++ b/wiki/Kibana.md @@ -0,0 +1,55 @@ +# Design +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KIBANA-Design.png]] + +# Visualize your logs +## Discover +Make sure you have logs being sent to your HELK first (At least Windows security and Sysmon events). Then, go to http:// in your preferred browser. If you dont have logs being sent to your HELK pipe (Kafka) or just starting to get processed by Kafka and Logstash, you might get the message " +No matching indices found: No indices match pattern "logs-endpoint-winevent-sysmon-*"** + +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KIBANA-NoIndicesPattern.png]] + +That is normal at the beginning. Refresh your screen a couple of times in order to start visualizing your logs. + +Currently, HELK creates automatically 7 index patterns for you and sets **logs-endpoint-winevent-sysmon-*** as your default one: +* "logs-*" +* "logs-endpoint-winevent-sysmon-*" +* "logs-endpoint-winevent-security-*" +* "logs-endpoint-winevent-application-*" +* "logs-endpoint-winevent-system-*" +* "logs-endpoint-winevent-powershell-*" +* "logs-endpoint-winevent-wmiactivity-*" + +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KIBANA-Discovery.png]] + +# Dashboards +Currently, the HELK comes with 3 dashboards: + +## Global_Dashboard + +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KIBANA-GlobalDashboard.png]] + +## Network_Dashboard + +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KIBANA-NetworkDashboard.png]] + +## Sysmon_Dashboard + +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KIBANA-SysmonDashboard.png]] + +# Monitoring Views (x-Pack Basic Free License) + +## Kibana Initial Overview + +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/MONITORING-Kibana-Overview.png]] + +## Elasticsearch Overview + +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/MONITORING-Elasticsearch-Overview.png]] + +## Logstash Overview + +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/MONITORING-Logstash-Overview.png]] + +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/MONITORING-Logstash-Nodes-Overview.png]] + + diff --git a/wiki/Load-Local-Docker-Images.md b/wiki/Load-Local-Docker-Images.md new file mode 100644 index 00000000..fdf6e948 --- /dev/null +++ b/wiki/Load-Local-Docker-Images.md @@ -0,0 +1,79 @@ +If you followed [this document](https://github.com/Cyb3rWard0g/HELK/wiki/Export-Docker-Images-locally) to export your docker images locally, you should be ready to load them into an isolated system where it cannot access the dockerhub registry. + +Copy images to the isolated (10.0.10.102) system + +``` +NO-NISOLATED@helk:~$ for f in /home/helk/*.tar; do scp $f helk@10.0.10.102:/tmp/; done + +helk-spark-worker.tar 100% 560MB 24.4MB/s 00:23 +helk-ksql-server.tar 100% 502MB 29.5MB/s 00:17 +helk-logstash.tar 100% 773MB 28.6MB/s 00:27 +helk-ksql-cli.tar 100% 497MB 21.6MB/s 00:23 +helk-elasticsearch.tar 100% 815MB 29.1MB/s 00:28 +``` + +Check if images exist in the isolated system + +``` +ISOLATED@helk:~$ ls /tmp/ +helk-elastalert.tar helk-jupyter.tar helk-kibana.tar helk-ksql-server.tar helk-nginx.tar helk-spark-worker.tar helk-elasticsearch.tar helk-kafka-broker.tar helk-ksql-cli.tar helk-logstash.tar helk-spark-master.tar helk-zookeeper.tar +``` +Load images with the `docker load` commands: + +``` +ISOLATED@helk:~$ for i in /tmp/*.tar; do sudo docker load --input $i; done + +f49017d4d5ce: Loading layer [==================================================>] 85.96MB/85.96MB +8f2b771487e9: Loading layer [==================================================>] 15.87kB/15.87kB +ccd4d61916aa: Loading layer [==================================================>] 10.24kB/10.24kB +c01d74f99de4: Loading layer [==================================================>] 5.632kB/5.632kB +268a067217b5: Loading layer [==================================================>] 3.072kB/3.072kB +831fff32e4f2: Loading layer [==================================================>] 65.02kB/65.02kB +c89f4fbc01f8: Loading layer [==================================================>] 103.4MB/103.4MB +adfd094c5517: Loading layer [==================================================>] 3.245MB/3.245MB +c73538215c3e: Loading layer [==================================================>] 567.6MB/567.6MB +080f01d1ecbc: Loading layer [==================================================>] 13.31kB/13.31kB +60bbd38a907e: Loading layer [==================================================>] 3.584kB/3.584kB +9affd17eb100: Loading layer [==================================================>] 5.632kB/5.632kB +0561c04cbf7e: Loading layer [==================================================>] 7.168kB/7.168kB +ba0201512417: Loading layer [==================================================>] 18.29MB/18.29MB +Loaded image: cyb3rward0g/helk-elastalert:0.2.1 +071d8bd76517: Loading layer [==================================================>] 210.2MB/210.2MB +a175339dcf83: Loading layer [==================================================>] 310.5MB/310.5MB +9a70a6f483f7: Loading layer [==================================================>] 95.68MB/95.68MB +f4db77828c81: Loading layer [==================================================>] 311.3kB/311.3kB +be48c67e9d13: Loading layer [==================================================>] 237.5MB/237.5MB +432cb712190e: Loading layer [==================================================>] 7.68kB/7.68kB +a512981fd597: Loading layer [==================================================>] 9.728kB/9.728kB +Loaded image: docker.elastic.co/elasticsearch/elasticsearch:6.6.1 +49778752e7ec: Loading layer [==================================================>] 394.9MB/394.9MB +5f3913b1d541: Loading layer [==================================================>] 1.667GB/1.667GB +77fa3a9c5ff6: Loading layer [==================================================>] 7.168kB/7.168kB +cbc15b984e03: Loading layer [==================================================>] 10.24kB/10.24kB +38c44d7a52f6: Loading layer [==================================================>] 5.12kB/5.12kB +0ec2dbbfd6c7: Loading layer [==================================================>] 3.584kB/3.584kB +Loaded image: cyb3rward0g/helk-jupyter:0.1.1 +4e31d8c1cf96: Loading layer [==================================================>] 203.1MB/203.1MB +efb23c49455d: Loading layer [==================================================>] 11.26kB/11.26kB +``` + +check if images are loaded via the `docker images` command + +``` +ISOLATED@helk:~$ sudo docker images + +REPOSITORY TAG IMAGE ID CREATED SIZE +cyb3rward0g/helk-jupyter 0.1.1 efa46ecc8d32 2 days ago 2.18GB +confluentinc/cp-ksql-server 5.1.2 f57298019757 6 days ago 514MB +confluentinc/cp-ksql-cli 5.1.2 bd411ce0ba9f 6 days ago 510MB +docker.elastic.co/logstash/logstash 6.6.1 3e7fbb7964ee 11 days ago 786MB +docker.elastic.co/kibana/kibana 6.6.1 b94222148a00 11 days ago 710MB +docker.elastic.co/elasticsearch/elasticsearch 6.6.1 c6ffcb0ee97e 11 days ago 842MB +cyb3rward0g/helk-elastalert 0.2.1 569f588a22fc 3 weeks ago 758MB +cyb3rward0g/helk-kafka-broker 2.1.0 7b3e7f9ce732 2 months ago 388MB +cyb3rward0g/helk-zookeeper 2.1.0 abb732da3e50 2 months ago 388MB +cyb3rward0g/helk-spark-worker 2.4.0 b1545b0582db 2 months ago 579MB +cyb3rward0g/helk-spark-master 2.4.0 70fc61de3445 2 months ago 579MB +cyb3rward0g/helk-nginx 0.0.7 280d044b6719 6 months ago 329MB +helk@helk:~$ +``` \ No newline at end of file diff --git a/wiki/Logstash.md b/wiki/Logstash.md new file mode 100644 index 00000000..7e9f6b5c --- /dev/null +++ b/wiki/Logstash.md @@ -0,0 +1,2 @@ +# Design +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/LOGSTASH-Design.png]] \ No newline at end of file diff --git a/wiki/Spark.md b/wiki/Spark.md new file mode 100644 index 00000000..9ca5c2ec --- /dev/null +++ b/wiki/Spark.md @@ -0,0 +1,107 @@ +# Design +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/SPARK-Design.png]] + +# Spark Cluster Standalone Mode +Spark’s standalone cluster manager is a lightweight platform built specifically for Apache Spark workloads. Using it, you can run multiple Spark Applications on the same cluster. It also provides simple interfaces for doing so but can scale to large Spark workloads. The main disadvantage of the standalone mode is that it’s more limited than the other cluster managers— in particular, your cluster can only run Spark. + +Chambers, Bill; Zaharia, Matei. Spark: The Definitive Guide: Big Data Processing Made Simple (Kindle Locations 9911-9914). O'Reilly Media. Kindle Edition. + +* **Spark Cluster Master:**(often written standalone Master) is the cluster manager for Spark Standalone cluster +* **Spark Cluster Worker:**(aka standalone slave) is a logical node in a Spark Standalone cluster +[Source](https://jaceklaskowski.gitbooks.io/mastering-apache-spark/content/spark-standalone.html) + +## Spark Pyspark UI + +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/SPARK-Pyspark-UI.png]] + +## Spark Custer Master UI + +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/SPARK-Cluster-Manager.png]] + +## Spark Cluster Worker UI + +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/SPARK-Cluster-Worker.png]] + +# Jupyter Integration +"The Jupyter Notebook is an open-source web application that allows you to create and share documents that contain live code, equations, visualizations and narrative text. Uses include: data cleaning and transformation, numerical simulation, statistical modeling, data visualization, machine learning, and much more."Jupyter Reference." [Jupyter](http://jupyter.org/) +HELK integrates the Jupyter Notebook project with Spark via the **PYSPARK_DRIVER_PYTHON**. Basically, when the HELK runs **/bin/pyspark**, Jupyter notebook is executed as PYSPARK's Python Driver. The **PYSPARK_DRIVER_PYTHON_OPTS** value is the following: +``` +"notebook --NotebookApp.open_browser=False --NotebookApp.ip='*' --NotebookApp.port=8880 --allow-root" +``` +# Test Spark, GraphFrames & Jupyter Integration +By default, the Jupyter server gets started automatically after installing the HELK. +* Access the Jupyter Server: + * Go to your :8880 in your preferred browser + * Enter the token provided after installing the HELK +* Go to the training/jupyter_notebooks/getting_started/ folder +* Open the Check_Spark_Graphframes_Integrations notebook + * Check the saved output (Make sure that you have Sysmon & Windows Security event logs being sent to your HELK. Otherwise you will get errors in your Jupyter Notebook when trying to replicate the basic commands) + * Clear the output from the notebook and run everything again + +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/HELK_checking_integrations.png]] + +# Apache Arrow Integration (Convert to Pandas - Optimization) +Apache Arrow is an in-memory columnar data format that is used in Spark to efficiently transfer data between JVM and Python processes. This currently is most beneficial to Python users that work with Pandas/NumPy data. [Apache Spark](https://spark.apache.org/docs/latest/sql-programming-guide.html#pyspark-usage-guide-for-pandas-with-apache-arrow) + +[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/SPARK-ApacheArrow.png]] + +Example from [Apache Arrow](https://arrow.apache.org/blog/2017/07/26/spark-arrow/) + +# Spark Packages +## elasticsearch-hadoop-6.2.4 +"Elasticsearch for Apache Hadoop is an open-source, stand-alone, self-contained, small library that allows Hadoop jobs (whether using Map/Reduce or libraries built upon it such as Hive, Pig or Cascading or new upcoming libraries like Apache Spark ) to interact with Elasticsearch. One can think of it as a connector that allows data to flow bi-directionaly so that applications can leverage transparently the Elasticsearch engine capabilities to significantly enrich their capabilities and increase the performance. +Elasticsearch-hadoop provides native integration between Elasticsearch and Apache Spark, in the form of an RDD (Resilient Distributed Dataset) (or Pair RDD to be precise) that can read data from Elasticsearch. The RDD is offered in two flavors: one for Scala (which returns the data as Tuple2 with Scala collections) and one for Java (which returns the data as Tuple2 containing java.util collections). Just like other libraries, elasticsearch-hadoop needs to be available in Spark’s classpath. As Spark has multiple deployment modes, this can translate to the target classpath, whether it is on only one node (as is the case with the local mode - which will be used through-out the documentation) or per-node depending on the desired infrastructure." [Elastic](https://www.elastic.co/guide/en/elasticsearch/hadoop/current/spark.html) + +## graphframes:graphframes:0.5.0-spark2.1-s_2.11 +"This is a prototype package for DataFrame-based graphs in Spark. Users can write highly expressive queries by leveraging the DataFrame API, combined with a new API for motif finding. The user also benefits from DataFrame performance optimizations within the Spark SQL engine." [SparkPackages](https://spark-packages.org/package/graphframes/graphframes) +"It aims to provide both the functionality of GraphX and extended functionality taking advantage of Spark DataFrames. This extended functionality includes motif finding, DataFrame-based serialization, and highly expressive graph queries." [Graphframes](https://graphframes.github.io/) + +## org.apache.spark:spark-sql-kafka-0-10_2.11:2.3.0 +"Structured Streaming integration for Kafka 0.10 to poll data from Kafka" [Structured Streaming Kafka](https://spark.apache.org/docs/latest/structured-streaming-kafka-integration.html) + +## databricks:spark-sklearn:0.2.3 +"This package contains some tools to integrate the Spark computing framework with the popular scikit-learn machine library. Among other tools: 1) train and evaluate multiple scikit-learn models in parallel. It is a distributed analog to the multicore implementation included by default in scikit-learn. 2) convert Spark's Dataframes seamlessly into numpy ndarrays or sparse matrices. 3) (experimental) distribute Scipy's sparse matrices as a dataset of sparse vectors." [SparkPackages](https://spark-packages.org/package/databricks/spark-sklearn) + +# Other Python Packages + +## Pandas +"Pandas is an open source, BSD-licensed library providing high-performance, easy-to-use data structures and data analysis tools for the Python programming language." [Pandas Pydata](https://pandas.pydata.org/pandas-docs/stable/overview.html) + +## Scipy +"It is a Python-based ecosystem of open-source software for mathematics, science, and engineering." [Scipy Org.](https://www.scipy.org/) + +## Scikit-learn +"Simple and efficient tools for data mining and data analysis. Built on NumPy, SciPy, and matplotlib." [Scikit-Learn Org.](http://scikit-learn.org/stable/index.html) + +## Nltk +"NLTK is a leading platform for building Python programs to work with human language data. It provides easy-to-use interfaces to over 50 corpora and lexical resources such as WordNet, along with a suite of text processing libraries for classification, tokenization, stemming, tagging, parsing, and semantic reasoning, wrappers for industrial-strength NLP libraries, and an active discussion forum." [Ntlk Org.](http://www.nltk.org/) + +## Matplotlib +"Matplotlib is a Python 2D plotting library which produces publication quality figures in a variety of hardcopy formats and interactive environments across platforms. Matplotlib can be used in Python scripts, the Python and IPython shell, the jupyter notebook, web application servers, and four graphical user interface toolkits." [Matplotlib](https://matplotlib.org/index.html) + +## Seaborn +"Seaborn is a Python visualization library based on matplotlib. It provides a high-level interface for drawing attractive statistical graphics." [Seaborn Pydata](https://seaborn.pydata.org/index.html) + +## Datasketch +"Datasketch gives you probabilistic data structures that can process and search very large amount of data super fast, with little loss of accuracy." [Datasketch Github](https://github.com/ekzhu/datasketch) + +## Keras +"Keras is a high-level neural networks API, written in Python and capable of running on top of TensorFlow, CNTK, or Theano. It was developed with a focus on enabling fast experimentation. Being able to go from idea to result with the least possible delay is key to doing good research." [Keras](https://keras.io/) + +## Pyflux +"PyFlux is an open source time series library for Python. The library has a good array of modern time series models, as well as a flexible array of inference options (frequentist and Bayesian) that can be applied to these models. By combining breadth of models with breadth of inference, PyFlux allows for a probabilistic approach to time series modelling." [Pyflux Github](https://github.com/RJT1990/pyflux) + +## Imbalanced-learn +"imbalanced-learn is a python package offering a number of re-sampling techniques commonly used in datasets showing strong between-class imbalance. It is compatible with scikit-learn and is part of scikit-learn-contrib projects." [Imbalanced Learn](https://github.com/scikit-learn-contrib/imbalanced-learn) + +## Lime +"This project is about explaining what machine learning classifiers (or models) are doing. Lime is able to explain any black box classifier, with two or more classes. All we require is that the classifier implements a function that takes in raw text or a numpy array and outputs a probability for each class. Support for scikit-learn classifiers is built-in." [Lime](https://github.com/marcotcr/lime) + +## Pyarrow +Apache Arrow is a cross-language development platform for in-memory data. It specifies a standardized language-independent columnar memory format for flat and hierarchical data, organized for efficient analytic operations on modern hardware. It also provides computational libraries and zero-copy streaming messaging and interprocess communication. [Apache Arrow](https://arrow.apache.org/docs/python/) + +## NetworkX +NetworkX is a Python package for the creation, manipulation, and study of the structure, dynamics, and functions of complex networks.[NetworkX](https://networkx.github.io/) + +## Nxviz +nxviz is a graph visualization package for NetworkX. With nxviz, you can create beautiful graph visualizations by a declarative API. [Nxviz](https://github.com/ericmjl/nxviz) \ No newline at end of file diff --git a/wiki/Update-Kafka-Broker-IP.md b/wiki/Update-Kafka-Broker-IP.md new file mode 100644 index 00000000..68cf7489 --- /dev/null +++ b/wiki/Update-Kafka-Broker-IP.md @@ -0,0 +1,33 @@ +# Docker Deployment +For the docker deployment, you will have to update the environment variable `ADVERTISED_LISTENER` first. You can do this in your system hosting the entire HELK or the Kafka broker itself if your distributed your docker containers across other systems. + +``` +export ADVERTISED_LISTENER=10.0.10.104 +``` + +Then, you can simply just run `docker-compose` the same way how it was used to build the HELK. This will re-create the system with the new value assigned to the environment variable `ADVERTISED_LISTENER`. +``` +sudo -E docker-compose -f helk-kibana-notebook-analysis-basic.yml up -d +``` + +If you just restart your containers, it will not update the environment variable in the Kafka broker. You have to re-create the container. Not re-creating the broker would still show you messages like the ones below: + +``` +[2019-01-25 05:35:21,026] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) +[2019-01-25 05:35:24,194] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) +[2019-01-25 05:35:27,362] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) +[2019-01-25 05:35:30,530] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) +[2019-01-25 05:35:33,698] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) +[2019-01-25 05:35:36,866] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) +[2019-01-25 05:35:40,034] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) +[2019-01-25 05:35:43,238] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) +[2019-01-25 05:35:46,306] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) +[2019-01-25 05:35:49,382] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) +[2019-01-25 05:35:52,450] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) +[2019-01-25 05:35:55,522] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) +[2019-01-25 05:35:58,594] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) +[2019-01-25 05:36:01,714] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) +[2019-01-25 05:36:04,770] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) +[2019-01-25 05:36:08,450] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) +[2019-01-25 05:36:11,650] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) +``` \ No newline at end of file diff --git a/wiki/_Sidebar.md b/wiki/_Sidebar.md new file mode 100644 index 00000000..3e2676fb --- /dev/null +++ b/wiki/_Sidebar.md @@ -0,0 +1,22 @@ +## [Introduction](https://github.com/Cyb3rWard0g/HELK/wiki) + +## [Architecture](https://github.com/Cyb3rWard0g/HELK/wiki/Architecture-Overview) + * [Kafka](https://github.com/Cyb3rWard0g/HELK/wiki/Kafka) + * [Logstash](https://github.com/Cyb3rWard0g/HELK/wiki/Logstash) + * [Elasticsearch](https://github.com/Cyb3rWard0g/HELK/wiki/Elasticsearch) + * [Kibana](https://github.com/Cyb3rWard0g/HELK/wiki/Kibana) + * [Spark](https://github.com/Cyb3rWard0g/HELK/wiki/Spark) + +## [Installation](https://github.com/Cyb3rWard0g/HELK/wiki/Installation) + +## How-To +### Kafka +* [Check Kafka topic ingestion](https://github.com/Cyb3rWard0g/HELK/wiki/Check-Kafka-topic-ingestion) +* [Check Winlogbeat shipping](https://github.com/Cyb3rWard0g/HELK/wiki/Check-Winlogbeat-shipping) +* [Update Kafka Broker IP](https://github.com/Cyb3rWard0g/HELK/wiki/Update-Kafka-Broker-IP) +* [Deploy KSQL CLI Locally](https://github.com/Cyb3rWard0g/HELK/wiki/Deploy-KSQL-CLI-Locally) +* [Create Logstash Plugins Offline Package](https://github.com/Cyb3rWard0g/HELK/wiki/Create-Plugins-Offline-Package) + +### Docker +* [Export Docker Images Locally](https://github.com/Cyb3rWard0g/HELK/wiki/Export-Docker-Images-locally) +* [Load Local Docker Images](https://github.com/Cyb3rWard0g/HELK/wiki/Load-Local-Docker-Images) \ No newline at end of file From 69a70fad9592be859d47c17cdf451d93839f0d8c Mon Sep 17 00:00:00 2001 From: AlfieJ04 Date: Wed, 2 Oct 2019 08:43:46 +0100 Subject: [PATCH 10/19] Added defaults for PR --- README.md | 17 ++++++++--------- .../rules/helk_powershell_susp_ps_commands.yml | 4 ++-- .../helk_security_dcsync_backdoor_user.yml | 4 ++-- .../rules/helk_security_dcsync_non_dc.yml | 4 ++-- .../rules/helk_security_rubes_logon_process.yml | 4 ++-- .../rules/helk_security_share_chrome_pipe.yml | 4 ++-- .../helk-elastalert/rules/helk_sysmon_bits.yml | 4 ++-- .../rules/helk_sysmon_cmdline_file_creation.yml | 4 ++-- .../rules/helk_sysmon_cobalt_strike_msagent.yml | 4 ++-- .../rules/helk_sysmon_cs_rundll32_network.yml | 4 ++-- .../rules/helk_sysmon_csharp_compile.yml | 4 ++-- .../rules/helk_sysmon_dcom_lm.yml | 4 ++-- .../rules/helk_sysmon_internal_monologue.yml | 4 ++-- .../rules/helk_sysmon_kerb_no_lsass.yml | 4 ++-- .../rules/helk_sysmon_logonscripts_lm.yml | 4 ++-- .../rules/helk_sysmon_net_administrators.yml | 4 ++-- .../rules/helk_sysmon_net_group_domains.yml | 4 ++-- .../helk_sysmon_sc_query_securitydescriptor.yml | 4 ++-- ...helk_sysmon_scriptable_protocol_handlers.yml | 4 ++-- .../rules/helk_sysmon_services_rare_child.yml | 4 ++-- .../rules/helk_sysmon_wevtutil.yml | 4 ++-- .../rules/helk_sysmon_wsmprovhost_winrm.yml | 4 ++-- .../rules/helk_system_psexec_psh.yml | 4 ++-- docker/helk_install.sh | 4 ++-- 24 files changed, 54 insertions(+), 55 deletions(-) diff --git a/README.md b/README.md index 1c8ffb94..d1624c43 100644 --- a/README.md +++ b/README.md @@ -40,14 +40,14 @@ The project is currently in an alpha stage, which means that the code and the fu ## WIKI -* [Introduction](https://github.com/AlfieJ04/HELK-CUSTOM/wiki) -* [Architecture Overview](https://github.com/AlfieJ04/HELK-CUSTOM/wiki/Architecture-Overview) - * [Kafka](https://github.com/AlfieJ04/HELK-CUSTOM/wiki/Kafka) - * [Logstash](https://github.com/AlfieJ04/HELK-CUSTOM/wiki/Logstash) - * [Elasticsearch](https://github.com/AlfieJ04/HELK-CUSTOM/wiki/Elasticsearch) - * [Kibana](https://github.com/AlfieJ04/HELK-CUSTOM/wiki/Kibana) - * [Spark](https://github.com/AlfieJ04/HELK-CUSTOM/wiki/Spark) -* [Installation](https://github.com/AlfieJ04/HELK-CUSTOM/wiki/Installation) +* [Introduction](https://github.com/Cyb3rWard0g/HELK/wiki) +* [Architecture Overview](https://github.com/Cyb3rWard0g/HELK/wiki/Installation/Architecture-Overview) + * [Kafka](https://github.com/Cyb3rWard0g/HELK/wiki/Kafka) + * [Logstash](https://github.com/Cyb3rWard0g/HELK/wiki/Logstash) + * [Elasticsearch](https://github.com/Cyb3rWard0g/HELK/wiki/Elasticsearch) + * [Kibana](https://github.com/Cyb3rWard0g/HELK/wiki/Kibana) + * [Spark](https://github.com/Cyb3rWard0g/HELK/wiki/Spark) +* [Installation](https://github.com/Cyb3rWard0g/HELK/wiki/Installation) ## (Docker) Accessing the HELK's Images @@ -95,7 +95,6 @@ root@ede2a2503030:/opt/helk/scripts# # Contributors -* Ashlee Jones [@AshleeJones04](https://twitter.com/AshleeJones04) * Jose Luis Rodriguez [@Cyb3rPandaH](https://twitter.com/Cyb3rPandaH) * Robby Winchester [@robwinchester3](https://twitter.com/robwinchester3) * Jared Atkinson [@jaredatkinson](https://twitter.com/jaredcatkinson) diff --git a/docker/helk-elastalert/rules/helk_powershell_susp_ps_commands.yml b/docker/helk-elastalert/rules/helk_powershell_susp_ps_commands.yml index ee705930..8143ca4c 100644 --- a/docker/helk-elastalert/rules/helk_powershell_susp_ps_commands.yml +++ b/docker/helk-elastalert/rules/helk_powershell_susp_ps_commands.yml @@ -2,11 +2,11 @@ alert: - debug - email email: - - "support@infinitehosting.uk" + - "your@emailaddress.com" smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true -from_addr: "support@infinitehosting.uk" +from_addr: "your@emailaddress.com" smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential suspicious powershell parameters filter: diff --git a/docker/helk-elastalert/rules/helk_security_dcsync_backdoor_user.yml b/docker/helk-elastalert/rules/helk_security_dcsync_backdoor_user.yml index bacce58a..9335e453 100644 --- a/docker/helk-elastalert/rules/helk_security_dcsync_backdoor_user.yml +++ b/docker/helk-elastalert/rules/helk_security_dcsync_backdoor_user.yml @@ -2,11 +2,11 @@ alert: - debug - email email: - - "support@infinitehosting.uk" + - "your@emailaddress.com" smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true -from_addr: "support@infinitehosting.uk" +from_addr: "your@emailaddress.com" smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential DCSync backdoor user filter: diff --git a/docker/helk-elastalert/rules/helk_security_dcsync_non_dc.yml b/docker/helk-elastalert/rules/helk_security_dcsync_non_dc.yml index 99903a65..bb182191 100644 --- a/docker/helk-elastalert/rules/helk_security_dcsync_non_dc.yml +++ b/docker/helk-elastalert/rules/helk_security_dcsync_non_dc.yml @@ -2,11 +2,11 @@ alert: - debug - email email: - - "support@infinitehosting.uk" + - "your@emailaddress.com" smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true -from_addr: "support@infinitehosting.uk" +from_addr: "your@emailaddress.com" smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential DCSync from non-dcs filter: diff --git a/docker/helk-elastalert/rules/helk_security_rubes_logon_process.yml b/docker/helk-elastalert/rules/helk_security_rubes_logon_process.yml index 233a94b3..ca7fbd97 100644 --- a/docker/helk-elastalert/rules/helk_security_rubes_logon_process.yml +++ b/docker/helk-elastalert/rules/helk_security_rubes_logon_process.yml @@ -2,11 +2,11 @@ alert: - debug - email email: - - "support@infinitehosting.uk" + - "your@emailaddress.com" smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true -from_addr: "support@infinitehosting.uk" +from_addr: "your@emailaddress.com" smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential use of rubeus via the trusted logon process filter: diff --git a/docker/helk-elastalert/rules/helk_security_share_chrome_pipe.yml b/docker/helk-elastalert/rules/helk_security_share_chrome_pipe.yml index f9f24ce1..d96bced0 100644 --- a/docker/helk-elastalert/rules/helk_security_share_chrome_pipe.yml +++ b/docker/helk-elastalert/rules/helk_security_share_chrome_pipe.yml @@ -2,11 +2,11 @@ alert: - debug - email email: - - "support@infinitehosting.uk" + - "your@emailaddress.com" smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true -from_addr: "support@infinitehosting.uk" +from_addr: "your@emailaddress.com" smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects suspicious use the common chrome named pipe filter: diff --git a/docker/helk-elastalert/rules/helk_sysmon_bits.yml b/docker/helk-elastalert/rules/helk_sysmon_bits.yml index 5c40e568..81efc25c 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_bits.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_bits.yml @@ -2,11 +2,11 @@ alert: - debug - email email: - - "support@infinitehosting.uk" + - "your@emailaddress.com" smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true -from_addr: "support@infinitehosting.uk" +from_addr: "your@emailaddress.com" smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects suspicious bits activity filter: diff --git a/docker/helk-elastalert/rules/helk_sysmon_cmdline_file_creation.yml b/docker/helk-elastalert/rules/helk_sysmon_cmdline_file_creation.yml index 6784bdbd..59bc2d73 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_cmdline_file_creation.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_cmdline_file_creation.yml @@ -2,11 +2,11 @@ alert: - debug - email email: - - "support@infinitehosting.uk" + - "your@emailaddress.com" smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true -from_addr: "support@infinitehosting.uk" +from_addr: "your@emailaddress.com" smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects cmdline files being created on victim filter: diff --git a/docker/helk-elastalert/rules/helk_sysmon_cobalt_strike_msagent.yml b/docker/helk-elastalert/rules/helk_sysmon_cobalt_strike_msagent.yml index c317b52a..c2e91962 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_cobalt_strike_msagent.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_cobalt_strike_msagent.yml @@ -2,11 +2,11 @@ alert: - debug - email email: - - "support@infinitehosting.uk" + - "your@emailaddress.com" smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true -from_addr: "support@infinitehosting.uk" +from_addr: "your@emailaddress.com" smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential use of named pipes by Cobalt Strike agents filter: diff --git a/docker/helk-elastalert/rules/helk_sysmon_cs_rundll32_network.yml b/docker/helk-elastalert/rules/helk_sysmon_cs_rundll32_network.yml index c46507c0..42c228a5 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_cs_rundll32_network.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_cs_rundll32_network.yml @@ -2,11 +2,11 @@ alert: - debug - email email: - - "support@infinitehosting.uk" + - "your@emailaddress.com" smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true -from_addr: "support@infinitehosting.uk" +from_addr: "your@emailaddress.com" smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential use of CS with rundll32 calling out filter: diff --git a/docker/helk-elastalert/rules/helk_sysmon_csharp_compile.yml b/docker/helk-elastalert/rules/helk_sysmon_csharp_compile.yml index bea19984..7e56cce6 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_csharp_compile.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_csharp_compile.yml @@ -2,11 +2,11 @@ alert: - debug - email email: - - "support@infinitehosting.uk" + - "your@emailaddress.com" smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true -from_addr: "support@infinitehosting.uk" +from_addr: "your@emailaddress.com" smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential Csharp compiling filter: diff --git a/docker/helk-elastalert/rules/helk_sysmon_dcom_lm.yml b/docker/helk-elastalert/rules/helk_sysmon_dcom_lm.yml index 70f222cc..f7313f31 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_dcom_lm.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_dcom_lm.yml @@ -2,11 +2,11 @@ alert: - debug - email email: - - "support@infinitehosting.uk" + - "your@emailaddress.com" smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true -from_addr: "support@infinitehosting.uk" +from_addr: "your@emailaddress.com" smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential lateral movement via dcom filter: diff --git a/docker/helk-elastalert/rules/helk_sysmon_internal_monologue.yml b/docker/helk-elastalert/rules/helk_sysmon_internal_monologue.yml index f3a70383..9f42cdfc 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_internal_monologue.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_internal_monologue.yml @@ -2,11 +2,11 @@ alert: - debug - email email: - - "support@infinitehosting.uk" + - "your@emailaddress.com" smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true -from_addr: "support@infinitehosting.uk" +from_addr: "your@emailaddress.com" smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential internal monologue filter: diff --git a/docker/helk-elastalert/rules/helk_sysmon_kerb_no_lsass.yml b/docker/helk-elastalert/rules/helk_sysmon_kerb_no_lsass.yml index c9989204..b7710a2f 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_kerb_no_lsass.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_kerb_no_lsass.yml @@ -2,11 +2,11 @@ alert: - debug - email email: - - "support@infinitehosting.uk" + - "your@emailaddress.com" smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true -from_addr: "support@infinitehosting.uk" +from_addr: "your@emailaddress.com" smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential calls to DCs via Kerberos port from non lsass filter: diff --git a/docker/helk-elastalert/rules/helk_sysmon_logonscripts_lm.yml b/docker/helk-elastalert/rules/helk_sysmon_logonscripts_lm.yml index 8f1f5aa8..beeecd73 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_logonscripts_lm.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_logonscripts_lm.yml @@ -2,11 +2,11 @@ alert: - debug - email email: - - "support@infinitehosting.uk" + - "your@emailaddress.com" smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true -from_addr: "support@infinitehosting.uk" +from_addr: "your@emailaddress.com" smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential lateral movement via logon scripts filter: diff --git a/docker/helk-elastalert/rules/helk_sysmon_net_administrators.yml b/docker/helk-elastalert/rules/helk_sysmon_net_administrators.yml index c7f70c96..50eb97ca 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_net_administrators.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_net_administrators.yml @@ -2,11 +2,11 @@ alert: - debug - email email: - - "support@infinitehosting.uk" + - "your@emailaddress.com" smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true -from_addr: "support@infinitehosting.uk" +from_addr: "your@emailaddress.com" smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential recon for admininstrators via net samrp filter: diff --git a/docker/helk-elastalert/rules/helk_sysmon_net_group_domains.yml b/docker/helk-elastalert/rules/helk_sysmon_net_group_domains.yml index 3e882333..a4bce450 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_net_group_domains.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_net_group_domains.yml @@ -2,11 +2,11 @@ alert: - debug - email email: - - "support@infinitehosting.uk" + - "your@emailaddress.com" smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true -from_addr: "support@infinitehosting.uk" +from_addr: "your@emailaddress.com" smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential recon for domain admins via net samrp filter: diff --git a/docker/helk-elastalert/rules/helk_sysmon_sc_query_securitydescriptor.yml b/docker/helk-elastalert/rules/helk_sysmon_sc_query_securitydescriptor.yml index 82df4c53..641d24c2 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_sc_query_securitydescriptor.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_sc_query_securitydescriptor.yml @@ -2,11 +2,11 @@ alert: - debug - email email: - - "support@infinitehosting.uk" + - "your@emailaddress.com" smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true -from_addr: "support@infinitehosting.uk" +from_addr: "your@emailaddress.com" smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects adversaries accessing services security descriptors filter: diff --git a/docker/helk-elastalert/rules/helk_sysmon_scriptable_protocol_handlers.yml b/docker/helk-elastalert/rules/helk_sysmon_scriptable_protocol_handlers.yml index 30642636..6af07db1 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_scriptable_protocol_handlers.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_scriptable_protocol_handlers.yml @@ -2,11 +2,11 @@ alert: - debug - email email: - - "support@infinitehosting.uk" + - "your@emailaddress.com" smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true -from_addr: "support@infinitehosting.uk" +from_addr: "your@emailaddress.com" smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential scriptable handlers filter: diff --git a/docker/helk-elastalert/rules/helk_sysmon_services_rare_child.yml b/docker/helk-elastalert/rules/helk_sysmon_services_rare_child.yml index 754e9c2a..0859eaae 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_services_rare_child.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_services_rare_child.yml @@ -2,11 +2,11 @@ alert: - debug - email email: - - "support@infinitehosting.uk" + - "your@emailaddress.com" smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true -from_addr: "support@infinitehosting.uk" +from_addr: "your@emailaddress.com" smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects rare childs from services.exe (gold image based) filter: diff --git a/docker/helk-elastalert/rules/helk_sysmon_wevtutil.yml b/docker/helk-elastalert/rules/helk_sysmon_wevtutil.yml index 88f955ef..f1cd3b34 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_wevtutil.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_wevtutil.yml @@ -2,11 +2,11 @@ alert: - debug - email email: - - "support@infinitehosting.uk" + - "your@emailaddress.com" smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true -from_addr: "support@infinitehosting.uk" +from_addr: "your@emailaddress.com" smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects adversaries clearing logs via wevtutil filter: diff --git a/docker/helk-elastalert/rules/helk_sysmon_wsmprovhost_winrm.yml b/docker/helk-elastalert/rules/helk_sysmon_wsmprovhost_winrm.yml index 6f9b2705..e8f4114b 100644 --- a/docker/helk-elastalert/rules/helk_sysmon_wsmprovhost_winrm.yml +++ b/docker/helk-elastalert/rules/helk_sysmon_wsmprovhost_winrm.yml @@ -2,11 +2,11 @@ alert: - debug - email email: - - "support@infinitehosting.uk" + - "your@emailaddress.com" smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true -from_addr: "support@infinitehosting.uk" +from_addr: "your@emailaddress.com" smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects use of winrm over the network filter: diff --git a/docker/helk-elastalert/rules/helk_system_psexec_psh.yml b/docker/helk-elastalert/rules/helk_system_psexec_psh.yml index 57f194b2..f4249e7f 100644 --- a/docker/helk-elastalert/rules/helk_system_psexec_psh.yml +++ b/docker/helk-elastalert/rules/helk_system_psexec_psh.yml @@ -2,11 +2,11 @@ alert: - debug - email email: - - "support@infinitehosting.uk" + - "your@emailaddress.com" smtp_host: "smtp.outlook.com" #for O365 email addresses smtp_port: 587 #for O365 email addresses smtp_ssl: true -from_addr: "support@infinitehosting.uk" +from_addr: "your@emailaddress.com" smtp_auth_file: "/etc/elastalert/smtp_auth_file.yaml" description: Detects potential psexec via powershell module filter: diff --git a/docker/helk_install.sh b/docker/helk_install.sh index c8f130b5..f1fc59b9 100755 --- a/docker/helk_install.sh +++ b/docker/helk_install.sh @@ -3,7 +3,7 @@ # HELK script: helk_install.sh # HELK script description: HELK installation # HELK build Stage: Alpha -# Author: Ashlee Jones (@AshleeJones04) +# Author: Roberto Rodriguez (@Cyb3rWard0g) # License: GPL-3.0 # *********** Helk log tagging variables *************** @@ -12,7 +12,7 @@ HELK_INFO_TAG="[HELK-INSTALLATION-INFO]" HELK_ERROR_TAG="[HELK-INSTALLATION-ERROR]" # Make sure to use "echo -e" with this variable INSTALL_ERROR_CHECK_WIKI="$HELK_ERROR_TAG Check the requirements section in our installation Wiki\ -\n$HELK_ERROR_TAG Installation Wiki: https://github.com/AlfieJ04/HELK-CUSTOM/wiki/Installation" +\n$HELK_ERROR_TAG Installation Wiki: https://github.com/Cyb3rWard0g/HELK/wiki/Installation" # *********** Variables for user modification *************** # Careful editing unless you know what you are doing :) From dfb5d443bd1d037dd7606d6b6d471e3cef28c6ea Mon Sep 17 00:00:00 2001 From: AlfieJ04 Date: Wed, 2 Oct 2019 08:49:54 +0100 Subject: [PATCH 11/19] Fixed Typo in Readme, Changed license in Curator --- README.md | 2 +- docker/helk-curator/Dockerfile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index d1624c43..82aa7d50 100644 --- a/README.md +++ b/README.md @@ -41,7 +41,7 @@ The project is currently in an alpha stage, which means that the code and the fu ## WIKI * [Introduction](https://github.com/Cyb3rWard0g/HELK/wiki) -* [Architecture Overview](https://github.com/Cyb3rWard0g/HELK/wiki/Installation/Architecture-Overview) +* [Architecture Overview](https://github.com/Cyb3rWard0g/HELK/wiki/Architecture-Overview) * [Kafka](https://github.com/Cyb3rWard0g/HELK/wiki/Kafka) * [Logstash](https://github.com/Cyb3rWard0g/HELK/wiki/Logstash) * [Elasticsearch](https://github.com/Cyb3rWard0g/HELK/wiki/Elasticsearch) diff --git a/docker/helk-curator/Dockerfile b/docker/helk-curator/Dockerfile index 66583f12..964d0dd4 100644 --- a/docker/helk-curator/Dockerfile +++ b/docker/helk-curator/Dockerfile @@ -1,7 +1,7 @@ # HELK script: HELK Curator Dockerfile # HELK build Stage: Alpha # Author: Ashlee Jones (@AshleeJones04) -# License: Apache 2.0 +# License: GPL-3.0 # References: # https://github.com/elastic/curator From 19b4513a210f5b0a023c4a0f53a6c377995d813e Mon Sep 17 00:00:00 2001 From: AlfieJ04 Date: Wed, 2 Oct 2019 08:57:18 +0100 Subject: [PATCH 12/19] Added ES URL to final information --- docker/helk_install.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker/helk_install.sh b/docker/helk_install.sh index f1fc59b9..141eacfe 100755 --- a/docker/helk_install.sh +++ b/docker/helk_install.sh @@ -499,12 +499,13 @@ show_final_information(){ echo "HELK KIBANA URL: https://${HOST_IP}" echo "HELK KIBANA USER: helk" echo "HELK KIBANA PASSWORD: ${KIBANA_UI_PASSWORD_INPUT}" - echo "HELK Elasticsearch URL: http://${HOST_IP}:9200" + echo "HELK ELASTICSEARCH URL: http://${HOST_IP}:9200" echo "HELK SPARK MASTER UI: http://${HOST_IP}:8080" echo "HELK JUPYTER SERVER URL: http://${HOST_IP}/jupyter" get_jupyter_credentials elif [[ ${HELK_BUILD} == "helk-kibana-analysis" ]] || [[ ${HELK_BUILD} == "helk-kibana-analysis-alert" ]]; then echo "HELK KIBANA URL: https://${HOST_IP}" + echo "HELK ELASTICSEARCH URL: http://${HOST_IP}:9200" echo "HELK KIBANA USER: helk" echo "HELK KIBANA PASSWORD: ${KIBANA_UI_PASSWORD_INPUT}" fi From 9d85a1e0ade98b14730a57098be669215af046c3 Mon Sep 17 00:00:00 2001 From: AlfieJ04 Date: Wed, 2 Oct 2019 09:54:17 +0100 Subject: [PATCH 13/19] Added Curator to Architecture-Overview --- wiki/Architecture-Overview.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/wiki/Architecture-Overview.md b/wiki/Architecture-Overview.md index bd7c3ddf..5ebbbb58 100644 --- a/wiki/Architecture-Overview.md +++ b/wiki/Architecture-Overview.md @@ -30,6 +30,9 @@ Kibana makes it easy to understand large volumes of data. Its simple, browser-ba ## Jupyter Notebook "The Jupyter Notebook is an open-source web application that allows you to create and share documents that contain live code, equations, visualizations and narrative text. Uses include: data cleaning and transformation, numerical simulation, statistical modeling, data visualization, machine learning, and much more."[Jupyter Reference](http://jupyter.org/) +## Curator +"Curator now performs many operations on your Elasticsearch indices, from delete to snapshot to shard allocation routing." + # Enrichments Another component that sets the HELK apart form other ELK builds is the different enrichments applied to the data it collects. ## AlienVault OTX From 5326d4ebd41de4aba5f647fe243a11c4fb5d5732 Mon Sep 17 00:00:00 2001 From: AlfieJ04 Date: Wed, 2 Oct 2019 09:56:19 +0100 Subject: [PATCH 14/19] Added Curator Reference --- wiki/Architecture-Overview.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/wiki/Architecture-Overview.md b/wiki/Architecture-Overview.md index 5ebbbb58..1262d8bb 100644 --- a/wiki/Architecture-Overview.md +++ b/wiki/Architecture-Overview.md @@ -31,7 +31,7 @@ Kibana makes it easy to understand large volumes of data. Its simple, browser-ba "The Jupyter Notebook is an open-source web application that allows you to create and share documents that contain live code, equations, visualizations and narrative text. Uses include: data cleaning and transformation, numerical simulation, statistical modeling, data visualization, machine learning, and much more."[Jupyter Reference](http://jupyter.org/) ## Curator -"Curator now performs many operations on your Elasticsearch indices, from delete to snapshot to shard allocation routing." +"Curator now performs many operations on your Elasticsearch indices, from delete to snapshot to shard allocation routing." [Curator Reference](https://www.elastic.co/guide/en/elasticsearch/client/curator/current/command-line.html) # Enrichments Another component that sets the HELK apart form other ELK builds is the different enrichments applied to the data it collects. From 10f36e01668a2cc04bed64cfdd6275a59177afec Mon Sep 17 00:00:00 2001 From: AlfieJ04 Date: Wed, 2 Oct 2019 09:59:25 +0100 Subject: [PATCH 15/19] Removed local Wiki from fork --- wiki/Architecture-Overview.md | 39 ---- wiki/Check-Kafka-topic-ingestion.md | 29 --- wiki/Check-Winlogbeat-shipping.md | 6 - wiki/Create-Plugins-Offline-Package.md | 44 ---- wiki/Curator.md | 0 wiki/Deploy-KSQL-CLI-Locally.md | 161 --------------- wiki/Elasticsearch.md | 100 --------- wiki/Export-Docker-Images-locally.md | 80 -------- wiki/Home.md | 14 -- wiki/Installation.md | 272 ------------------------- wiki/Kafka.md | 73 ------- wiki/Kibana.md | 55 ----- wiki/Load-Local-Docker-Images.md | 79 ------- wiki/Logstash.md | 2 - wiki/Spark.md | 107 ---------- wiki/Update-Kafka-Broker-IP.md | 33 --- wiki/_Sidebar.md | 22 -- 17 files changed, 1116 deletions(-) delete mode 100644 wiki/Architecture-Overview.md delete mode 100644 wiki/Check-Kafka-topic-ingestion.md delete mode 100644 wiki/Check-Winlogbeat-shipping.md delete mode 100644 wiki/Create-Plugins-Offline-Package.md delete mode 100644 wiki/Curator.md delete mode 100644 wiki/Deploy-KSQL-CLI-Locally.md delete mode 100644 wiki/Elasticsearch.md delete mode 100644 wiki/Export-Docker-Images-locally.md delete mode 100644 wiki/Home.md delete mode 100644 wiki/Installation.md delete mode 100644 wiki/Kafka.md delete mode 100644 wiki/Kibana.md delete mode 100644 wiki/Load-Local-Docker-Images.md delete mode 100644 wiki/Logstash.md delete mode 100644 wiki/Spark.md delete mode 100644 wiki/Update-Kafka-Broker-IP.md delete mode 100644 wiki/_Sidebar.md diff --git a/wiki/Architecture-Overview.md b/wiki/Architecture-Overview.md deleted file mode 100644 index 1262d8bb..00000000 --- a/wiki/Architecture-Overview.md +++ /dev/null @@ -1,39 +0,0 @@ -# Design -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/HELK_Design.png]] - -The HELK follows the native flow of an ELK stack with events being sent (preferably from Winlogbeat for now) to Kafka brokers. Next, they get filtered by Logstash and sent over to an Elasticsearch database. Then, they can be visualized in a Kibana instance. However, what sets the HELK apart from other ELK builds is the extra analytic capabilities provided by Apache Spark, GraphFrames and Jupyter. More soon.... - -# Core Components Definitions -## Kafka -"Kafka is a distributed publish-subscribe messaging system used for building real-time data pipelines and streaming apps. It is horizontally scalable, fault-tolerant, wicked fast, and runs in production in thousands of companies." [Kafka](https://kafka.apache.org/) - -## Elasticsearch -"Elasticsearch is a highly scalable open-source full-text search and analytics engine. It allows you to store, search, and analyze big volumes of data quickly and in near real time. It is generally used as the underlying engine/technology that powers applications that have complex search features and requirements." [Elastic Reference](https://www.elastic.co/guide/en/elasticsearch/reference/current/getting-started.html) - -## Logstash -"Logstash is an open source data collection engine with real-time pipelining capabilities. Logstash can dynamically unify data from disparate sources and normalize the data into destinations of your choice. Cleanse and democratize all your data for diverse advanced downstream analytics and visualization use cases. - [Elastic Reference](https://www.elastic.co/guide/en/logstash/current/introduction.html) - -## Kibana -"Kibana is an open source analytics and visualization platform designed to work with Elasticsearch. You use Kibana to search, view, and interact with data stored in Elasticsearch indices. You can easily perform advanced data analysis and visualize your data in a variety of charts, tables, and maps. -Kibana makes it easy to understand large volumes of data. Its simple, browser-based interface enables you to quickly create and share dynamic dashboards that display changes to Elasticsearch queries in real time." [Elastic Reference](https://www.elastic.co/guide/en/kibana/current/introduction.html) - -## ES-Hadoop -"Elasticsearch for Apache Hadoop is an open-source, stand-alone, self-contained, small library that allows Hadoop jobs (whether using Map/Reduce or libraries built upon it such as Hive, Pig or Cascading or new upcoming libraries like Apache Spark ) to interact with Elasticsearch. One can think of it as a connector that allows data to flow bi-directionaly so that applications can leverage transparently the Elasticsearch engine capabilities to significantly enrich their capabilities and increase the performance." [Elastic Reference](https://www.elastic.co/guide/en/elasticsearch/hadoop/current/reference.html) - -## Apache Spark -"Apache Spark is a fast and general-purpose cluster computing system. It provides high-level APIs in Java, Scala, Python and R, and an optimized engine that supports general execution graphs. It also supports a rich set of higher-level tools including Spark SQL for SQL and structured data processing, MLlib for machine learning, GraphX for graph processing, and Spark Streaming." [Apache Spark Reference](https://spark.apache.org/docs/latest/) - -## GraphFrames -"GraphFrames is a package for Apache Spark which provides DataFrame-based Graphs. It provides high-level APIs in Scala, Java, and Python. It aims to provide both the functionality of GraphX and extended functionality taking advantage of Spark DataFrames. This extended functionality includes motif finding, DataFrame-based serialization, and highly expressive graph queries." [Graphframes Reference](https://graphframes.github.io/) - -## Jupyter Notebook -"The Jupyter Notebook is an open-source web application that allows you to create and share documents that contain live code, equations, visualizations and narrative text. Uses include: data cleaning and transformation, numerical simulation, statistical modeling, data visualization, machine learning, and much more."[Jupyter Reference](http://jupyter.org/) - -## Curator -"Curator now performs many operations on your Elasticsearch indices, from delete to snapshot to shard allocation routing." [Curator Reference](https://www.elastic.co/guide/en/elasticsearch/client/curator/current/command-line.html) - -# Enrichments -Another component that sets the HELK apart form other ELK builds is the different enrichments applied to the data it collects. -## AlienVault OTX -"The AlienVault Open Threat Exchange (OTX) is the world’s most authoritative open threat information sharing and analysis network. OTX provides access to a global community of threat researchers and security professionals, with more than 50,000 participants in 140 countries, who contribute over four million threat indicators daily."[AlienVault OTX Reference](https://www.alienvault.com/documentation/otx/about-otx.htm) \ No newline at end of file diff --git a/wiki/Check-Kafka-topic-ingestion.md b/wiki/Check-Kafka-topic-ingestion.md deleted file mode 100644 index 1d0589ff..00000000 --- a/wiki/Check-Kafka-topic-ingestion.md +++ /dev/null @@ -1,29 +0,0 @@ -There are a few ways that you can accomplish this. - -# HELK's Kafka broker container - -Access your kafka broker container by running the following command: -``` -sudo docker exec -ti helk-kafka-broker bash -``` - -Run the `kafka-console-consumer.sh` script available in the container: -``` -/opt/helk/kafka/bin/kafka-console-consumer.sh --bootstrap-server helk-kafka-broker:9092 --topic winlogbeat --from-beginning -``` - -or simply run the script without an interactive shell -``` -sudo docker exec -ti helk-kafka-broker /opt/helk/kafka/bin/kafka-console-consumer.sh --bootstrap-server helk-kafka-broker:9092 --topic winlogbeat --from-beginning -``` - -# Kafkacat -It is generic non-JVM producer and consumer for Apache Kafka >=0.8, think of it as a netcat for Kafka. You can install it by following the [instructions](https://github.com/edenhill/kafkacat#install) from the Kafkacat repo. -``` -kafkacat -b 10.0.10.100:9092 -t winlogbeat -C -``` - -# References -* [Kafka Consumer Example](https://kafka.apache.org/quickstart#quickstart_consume) -* [Kafkacat](https://github.com/edenhill/kafkacat) - diff --git a/wiki/Check-Winlogbeat-shipping.md b/wiki/Check-Winlogbeat-shipping.md deleted file mode 100644 index 2a443771..00000000 --- a/wiki/Check-Winlogbeat-shipping.md +++ /dev/null @@ -1,6 +0,0 @@ -You can check how your logs are being sent to the HELK by running the following command in your systems (producers): -``` -winlogbeat.exe -e -``` -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KAFKA-producer1.png]] -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KAFKA-producer2.png]] \ No newline at end of file diff --git a/wiki/Create-Plugins-Offline-Package.md b/wiki/Create-Plugins-Offline-Package.md deleted file mode 100644 index 85f86728..00000000 --- a/wiki/Create-Plugins-Offline-Package.md +++ /dev/null @@ -1,44 +0,0 @@ -If you are installing HELK, and the `helk-logstash` extra plugins are still being installed over the Internet, you can use the following steps to export them in an zipped offline package to then be loaded to the system that does not have access to the Internet and it is stuck at installing plugins. - -Remember that you will need to do this in a system where HELK is already installed and the plugins were installed successfully. - -* Access your helk-logstash docker container in the system where HELK was successfully installed already: - -``` -helk@ONLINE-HELK:~$ sudo docker exec -ti helk-logstash bash - -bash-4.2$ - -``` - -* Using the `logstash-plugin` script prepare and export the plugins offline package - -``` -bash-4.2$ bin/logstash-plugin prepare-offline-pack logstash-filter-translate logstash-filter-dns logstash-filter-cidr logstash-filter-geoip logstash-filter-dissect logstash-output-kafka logstash-input-kafka logstash-filter-alter logstash-filter-fingerprint logstash-filter-prune logstash-codec-gzip_lines logstash-codec-netflow logstash-filter-i18n logstash-filter-environment logstash-filter-de_dot logstash-input-wmi logstash-filter-clone -Offline package created at: /usr/share/logstash/logstash-offline-plugins-6.6.1.zip - -You can install it with this command `bin/logstash-plugin install file:///usr/share/logstash/logstash-offline-plugins-6.6.1.zip` - -bash-4.2$ ls /usr/share/logstash/ -bin CONTRIBUTORS data Gemfile.lock LICENSE.txt logstash-core-plugin-api modules output_templates scripts vendor -config cti Gemfile lib logstash-core logstash-offline-plugins-6.6.1.zip NOTICE.TXT pipeline tools x-pack - -bash-4.2$ exit -exit -``` - -* Copy the offline package from your helk-logstash container to your local system - -``` -helk@ONLINE-HELK:~$ sudo docker cp helk-logstash:/usr/share/logstash/logstash-offline-plugins-6.6.1.zip . -helk@ONLINE-HELK:~$ ls -logstash-offline-plugins-6.6.1.zip -``` - -* Copy the `logstash-offline-plugins-6.6.1.zip` to the OFFLINE-ISOLATED (10.0.10.102) system. You should be able to ssh to it. - -``` -helk@ONLINE-HELK:~$ scp logstash-offline-plugins-6.6.1.zip helk@10.0.10.102:/home/helk/ -``` - -Now you should be able to use it in the offline-isolated HELK system \ No newline at end of file diff --git a/wiki/Curator.md b/wiki/Curator.md deleted file mode 100644 index e69de29b..00000000 diff --git a/wiki/Deploy-KSQL-CLI-Locally.md b/wiki/Deploy-KSQL-CLI-Locally.md deleted file mode 100644 index 35928735..00000000 --- a/wiki/Deploy-KSQL-CLI-Locally.md +++ /dev/null @@ -1,161 +0,0 @@ -You can use KSQL CLI to connect to the HELK's KSQL Server from a different system. You will have to download the self-managed software Confluent platform and then run `KSQL` - -* Download the self-managed software Confluent platform in a `.tar.gz` format from: https://www.confluent.io/download/#popup_form_3109 -* Decompress the folder: -``` -Robertos-MBP:~ wardog$ -Robertos-MBP:~ wardog$ cd Downloads/ -Robertos-MBP:Downloads wardog$ tar -xvzf confluent-5.1.2-2.11.tar.gz -x confluent-5.1.2/ -x confluent-5.1.2/src/ -x confluent-5.1.2/src/avro-cpp-1.8.0-confluent5.1.2.tar.gz -x confluent-5.1.2/src/librdkafka-0.11.6-confluent5.1.2.tar.gz -x confluent-5.1.2/src/confluent-libserdes-5.1.2.tar.gz -x confluent-5.1.2/src/avro-c-1.8.0-confluent5.1.2.tar.gz -x confluent-5.1.2/lib/ -``` -* Access the KSQL scripts: - -``` -Robertos-MBP:Downloads wardog$ -Robertos-MBP:Downloads wardog$ cd confluent-5.1.2 -Robertos-MBP:confluent-5.1.2 wardog$ -Robertos-MBP:confluent-5.1.2 wardog$ ls -README bin etc lib logs share src -Robertos-MBP:confluent-5.1.2 wardog$ -Robertos-MBP:confluent-5.1.2 wardog$ cd bin/ -Robertos-MBP:bin wardog$ -Robertos-MBP:bin wardog$ ls -confluent kafka-acls kafka-mirror-maker kafka-server-stop schema-registry-start -confluent-hub kafka-api-start kafka-mqtt-run-class kafka-streams-application-reset schema-registry-stop -confluent-rebalancer kafka-avro-console-consumer kafka-mqtt-start kafka-topics schema-registry-stop-service -connect-distributed kafka-avro-console-producer kafka-mqtt-stop kafka-verifiable-consumer security-plugins-run-class -connect-standalone kafka-broker-api-versions kafka-preferred-replica-election kafka-verifiable-producer sr-acl-cli -control-center-3_0_0-reset kafka-configs kafka-producer-perf-test ksql support-metrics-bundle -control-center-3_0_1-reset kafka-console-consumer kafka-reassign-partitions ksql-datagen windows -control-center-console-consumer kafka-console-producer kafka-replica-verification ksql-print-metrics zookeeper-security-migration -control-center-export kafka-consumer-groups kafka-rest-run-class ksql-run-class zookeeper-server-start -control-center-reset kafka-consumer-perf-test kafka-rest-start ksql-server-start zookeeper-server-stop -control-center-run-class kafka-delegation-tokens kafka-rest-stop ksql-server-stop zookeeper-shell -control-center-set-acls kafka-delete-records kafka-rest-stop-service ksql-stop -control-center-start kafka-dump-log kafka-run-class replicator -control-center-stop kafka-log-dirs kafka-server-start schema-registry-run-class -Robertos-MBP:bin wardog$ - -``` - -* Check the options for KSQL: - -``` -Robertos-MBP:bin wardog$ -Robertos-MBP:bin wardog$ ./ksql --help -NAME - ksql - KSQL CLI - -SYNOPSIS - ksql [ --config-file ] [ {-h | --help} ] - [ --output ] - [ --query-row-limit ] - [ --query-timeout ] [--] - -OPTIONS - --config-file - A file specifying configs for Ksql and its underlying Kafka Streams - instance(s). Refer to KSQL documentation for a list of available - configs. - - -h, --help - Display help information - - --output - The output format to use (either 'JSON' or 'TABULAR'; can be - changed during REPL as well; defaults to TABULAR) - - --query-row-limit - An optional maximum number of rows to read from streamed queries - - This options value must fall in the following range: value >= 1 - - - --query-timeout - An optional time limit (in milliseconds) for streamed queries - - This options value must fall in the following range: value >= 1 - - - -- - This option can be used to separate command-line options from the - list of arguments (useful when arguments might be mistaken for - command-line options) - - - The address of the Ksql server to connect to (ex: - http://confluent.io:9098) - - This option may occur a maximum of 1 times - - -Robertos-MBP:bin wardog$ -``` - -* Connect to the HELK KSQL Server. You will just need to point to the IP address of your HELK Docker environment over port 8088 - -``` -Robertos-MBP:bin wardog$ -Robertos-MBP:bin wardog$ ./ksql http://192.168.64.138:8088 - - =========================================== - = _ __ _____ ____ _ = - = | |/ // ____|/ __ \| | = - = | ' /| (___ | | | | | = - = | < \___ \| | | | | = - = | . \ ____) | |__| | |____ = - = |_|\_\_____/ \___\_\______| = - = = - = Streaming SQL Engine for Apache Kafka® = - =========================================== - -Copyright 2017-2018 Confluent Inc. - -CLI v5.1.2, Server v5.1.0 located at http://192.168.64.138:8088 - -Having trouble? Type 'help' (case-insensitive) for a rundown of how things work! - -ksql> -``` - -* Verify that you can see the topics available in the HELK Kafka broker - -``` -Robertos-MBP:bin wardog$ -Robertos-MBP:bin wardog$ ./ksql http://192.168.64.138:8088 - - =========================================== - = _ __ _____ ____ _ = - = | |/ // ____|/ __ \| | = - = | ' /| (___ | | | | | = - = | < \___ \| | | | | = - = | . \ ____) | |__| | |____ = - = |_|\_\_____/ \___\_\______| = - = = - = Streaming SQL Engine for Apache Kafka® = - =========================================== - -Copyright 2017-2018 Confluent Inc. - -CLI v5.1.2, Server v5.1.0 located at http://192.168.64.138:8088 - -Having trouble? Type 'help' (case-insensitive) for a rundown of how things work! - -ksql> SHOW TOPICS; - - Kafka Topic | Registered | Partitions | Partition Replicas | Consumers | ConsumerGroups ------------------------------------------------------------------------------------------ - filebeat | false | 1 | 1 | 0 | 0 - SYSMON_JOIN | false | 1 | 1 | 0 | 0 - winlogbeat | false | 1 | 1 | 0 | 0 - winsecurity | false | 1 | 1 | 0 | 0 - winsysmon | false | 1 | 1 | 0 | 0 ------------------------------------------------------------------------------------------ -ksql> -``` \ No newline at end of file diff --git a/wiki/Elasticsearch.md b/wiki/Elasticsearch.md deleted file mode 100644 index 90b7f828..00000000 --- a/wiki/Elasticsearch.md +++ /dev/null @@ -1,100 +0,0 @@ -# Design -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/ELASTICSEARCH-Design.png]] - -# Settings -## HELK's Heap Size -Heap can be set one of four ways, as detailed below. - -#### 1) Allow HELK to calculate how much to assign. -This is based on the available memory and variables shown in the code block below. -It’s very important to note `available memory`, not the amount of memory the host has. -An example to show why this is critical to understand.. If you have a 100GB RAM server, but the server is actively using 90GBs of RAM - then you will NOT get the max 31GB heap/memory for elasticsearch. In this example you would actually end up getting roughly 3 GBs for the heap. Because, with only 10 GBs of available/free memory, it could cause drastic issues to lock up all of the remaining memory! -``` -if available memory >= 1000 MBs and <= 5999 MBs: - then set to 1400 MBs -else if available memory => 6000 MBs and <= 12999 MBs: - then set to 3200 MBs -else if available memory => 13000 MBs and <= 16000 MBs: - then set to 6500 MBs -else: - if available memory => 31 GBs: - then set to 31 GBs - else: - set to available memory in GBs -``` - -#### 2) Set your own heap -In order to define your own heap settings, in the file `HELK/docker/helk-elasticsearch/config/jvm.options` -edit the following two lines that begin with -`#-Xms` -`#-Xmx` -Then make sure to restart elasticsearch. -**Always set the min and max JVM heap size to the same value -Also, you will be restarting elasticsearch. Therefore your cluster will temporarily be down as the elasticsearch service/database is coming back online** -Here is an example of how to perform the above: -``` -# Edit the file jvm file -sudo nano HELK/docker/helk-elasticsearch/config/jvm.options -# Resulting lines (as mentioned that you should edit from above) -# should look something like the following if you wanted to set the heap to 16GBs --Xms16g --Xmx16g -# Restart elasticsearch -docker restart helk-elasticsearch -``` - -#### 3) Add `ES_JAVA_OPTS` to the docker config file -Which docker config file to use is shown later. -You will add this value under `services.helk-elasticsearch.environment`. -Example, if I used the option for ELK + Kafka with no license and no alerting and I wanted to set the heap to 16GBs -Then I would edit `HELK/docker/helk-kibana-analysis-basic.yml` and add the following line under the environment seciton: -`- "ES_JAVA_OPTS=-Xms16g -Xmx16g"` -Then make sure rebuild the elasticsearch docker container. -**Always set the min and max JVM heap size to the same value -Also, you will be restarting elasticsearch. Therefore your cluster will temporarily be down as the elasticsearch service/database is coming back online** -**Note if you are using (elastic) license you will need to set your ELASTIC_PASSWORD and KIBANA_UI_PASSWORD variables (and logstash password if applicable)** -Here is how to perform the above: -``` -# Example config (only showing the beginning lines) Note, that these settings may not match your config exactly, but that the important thing is to have the value under the environment section -version: '3.5' - -services: - helk-elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:7.3.1 - container_name: helk-elasticsearch - secrets: - - source: elasticsearch.yml - target: /usr/share/elasticsearch/config/elasticsearch.yml - volumes: - - esdata:/usr/share/elasticsearch/data - - ./helk-elasticsearch/scripts:/usr/share/elasticsearch/scripts - - ./helk-elasticsearch/config/jvm.options:/usr/share/elasticsearch/config/jvm.options - entrypoint: /usr/share/elasticsearch/scripts/elasticsearch-entrypoint.sh - environment: - - cluster.name=helk-cluster - - node.name=helk-1 - - xpack.license.self_generated.type=basic - - xpack.security.enabled=false - - "ES_JAVA_OPTS= -Xms16g -Xmx16g" - ulimits: - memlock: - soft: -1 - hard: -1 - nproc: 20480 - nofile: - soft: 160000 - hard: 160000 - restart: always - networks: - helk: -# Rebuild the elasticsearch docker container -`docker-compose -f HELK/docker/helk-kibana-analysis-basic.yml up --build -d` - -``` - -#### 4) Set at run time using custom bash variable -Example bash variable such as. -`export ES_JAVA_OPTS="-Xms16g -Xmx16g"` -Then run the following using your own docker config file. -`docker-compose -f $PlaceDockerConfigFileNameHere up --build -d` -**Only use this option if you explicitly need to. Please know what your getting into to ;)** \ No newline at end of file diff --git a/wiki/Export-Docker-Images-locally.md b/wiki/Export-Docker-Images-locally.md deleted file mode 100644 index 61c02788..00000000 --- a/wiki/Export-Docker-Images-locally.md +++ /dev/null @@ -1,80 +0,0 @@ -If the system where you are planning to install HELK is isolated from the Internet, you can run HELK on another system that has access to the Internet and then export the built/downloaded images to `.tar` files. You can then `LOAD` Those image files in the system that is isolated from the Internet. - -* List all the images available in the non-isolated system via the docker `images` command - -``` -helk@ubuntu:~$ sudo docker images - -REPOSITORY TAG IMAGE ID CREATED SIZE -cyb3rward0g/helk-jupyter 0.1.1 efa46ecc8d32 2 days ago 2.18GB -confluentinc/cp-ksql-server 5.1.2 f57298019757 6 days ago 514MB -confluentinc/cp-ksql-cli 5.1.2 bd411ce0ba9f 6 days ago 510MB -docker.elastic.co/logstash/logstash 6.6.1 3e7fbb7964ee 11 days ago 786MB -docker.elastic.co/kibana/kibana 6.6.1 b94222148a00 11 days ago 710MB -docker.elastic.co/elasticsearch/elasticsearch 6.6.1 c6ffcb0ee97e 11 days ago 842MB -cyb3rward0g/helk-elastalert 0.2.1 569f588a22fc 3 weeks ago 758MB -cyb3rward0g/helk-kafka-broker 2.1.0 7b3e7f9ce732 2 months ago 388MB -cyb3rward0g/helk-zookeeper 2.1.0 abb732da3e50 2 months ago 388MB -cyb3rward0g/helk-spark-worker 2.4.0 b1545b0582db 2 months ago 579MB -cyb3rward0g/helk-spark-master 2.4.0 70fc61de3445 2 months ago 579MB -cyb3rward0g/helk-nginx 0.0.7 280d044b6719 6 months ago 329MB -``` - -* List all the containers running in the non-isolated system via the docker `ps` command - -``` -helk@ubuntu:~$ sudo docker ps - -CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES -de048c88dc7f confluentinc/cp-ksql-cli:5.1.2 "/bin/sh" 6 hours ago Up 6 hours helk-ksql-cli -69e06070c14c confluentinc/cp-ksql-server:5.1.2 "/etc/confluent/dock…" 6 hours ago Up 6 hours 0.0.0.0:8088->8088/tcp helk-ksql-server -d57967977c9c cyb3rward0g/helk-kafka-broker:2.1.0 "./kafka-entrypoint.…" 6 hours ago Up 6 hours 0.0.0.0:9092->9092/tcp helk-kafka-broker -4889e917d76d cyb3rward0g/helk-spark-worker:2.4.0 "./spark-worker-entr…" 6 hours ago Up 6 hours helk-spark-worker -c0a29d8b18a7 cyb3rward0g/helk-nginx:0.0.7 "/opt/helk/scripts/n…" 6 hours ago Up 6 hours 0.0.0.0:80->80/tcp, 0.0.0.0:443->443/tcp helk-nginx -6a887d693a31 cyb3rward0g/helk-elastalert:0.2.1 "./elastalert-entryp…" 6 hours ago Up 6 hours helk-elastalert -a32be7a399c7 cyb3rward0g/helk-zookeeper:2.1.0 "./zookeeper-entrypo…" 6 hours ago Up 6 hours 2181/tcp, 2888/tcp, 3888/tcp helk-zookeeper -c636a8a1e8f7 cyb3rward0g/helk-spark-master:2.4.0 "./spark-master-entr…" 6 hours ago Up 6 hours 7077/tcp, 0.0.0.0:8080->8080/tcp helk-spark-master -ef1b8d8015ab cyb3rward0g/helk-jupyter:0.1.1 "./jupyter-entrypoin…" 6 hours ago Up 6 hours 8000/tcp helk-jupyter -bafeeb1587cf docker.elastic.co/logstash/logstash:6.6.1 "/usr/share/logstash…" 6 hours ago Up 6 hours 0.0.0.0:5044->5044/tcp, 0.0.0.0:8531->8531/tcp, 9600/tcp helk-logstash -29b57e5c71e5 docker.elastic.co/kibana/kibana:6.6.1 "/usr/share/kibana/s…" 6 hours ago Up 6 hours 5601/tcp helk-kibana -48499aa83917 docker.elastic.co/elasticsearch/elasticsearch:6.6.1 "/usr/share/elastics…" 6 hours ago Up 6 hours 9200/tcp, 9300/tcp helk-elasticsearch -``` - -* Export images as `tar` files: - -``` -helk@ubuntu:~$ sudo docker save -o /home/helk/helk-ksql-cli.tar confluentinc/cp-ksql-cli:5.1.2 -helk@ubuntu:~$ sudo docker save -o /home/helk/helk-ksql-server.tar confluentinc/cp-ksql-server:5.1.2 -helk@ubuntu:~$ sudo docker save -o /home/helk/helk-kafka-broker.tar cyb3rward0g/helk-kafka-broker:2.1.0 -helk@ubuntu:~$ sudo docker save -o /home/helk/helk-spark-worker.tar cyb3rward0g/helk-spark-worker:2.4.0 -helk@ubuntu:~$ sudo docker save -o /home/helk/helk-nginx.tar cyb3rward0g/helk-nginx:0.0.7 -helk@ubuntu:~$ sudo docker save -o /home/helk/helk-elastalert.tar cyb3rward0g/helk-elastalert:0.2.1 -helk@ubuntu:~$ sudo docker save -o /home/helk/helk-zookeeper.tar cyb3rward0g/helk-zookeeper:2.1.0 -helk@ubuntu:~$ sudo docker save -o /home/helk/helk-spark-master.tar cyb3rward0g/helk-spark-master:2.4.0 -helk@ubuntu:~$ sudo docker save -o /home/helk/helk-logstash.tar docker.elastic.co/logstash/logstash:6.6.1 -helk@ubuntu:~$ sudo docker save -o /home/helk/helk-kibana.tar docker.elastic.co/kibana/kibana:6.6.1 -helk@ubuntu:~$ sudo docker save -o /home/helk/helk-elasticsearch.tar docker.elastic.co/elasticsearch/elasticsearch:6.6.1 -helk@ubuntu:~$ sudo docker save -o /home/helk/helk-jupyter.tar cyb3rward0g/helk-jupyter:0.1.1 -``` -* check images - -``` -helk@ubuntu:~$ ls -l - -total 10810584 -drwxrwxr-x 9 helk helk 4096 Feb 24 21:01 HELK --rw------- 1 root root 778629632 Feb 25 03:07 helk-elastalert.tar --rw------- 1 root root 854236160 Feb 25 03:12 helk-elasticsearch.tar --rw------- 1 root root 2254629888 Feb 25 03:14 helk-jupyter.tar --rw------- 1 root root 395871744 Feb 25 03:04 helk-kafka-broker.tar --rw------- 1 root root 767277568 Feb 25 03:11 helk-kibana.tar --rw------- 1 root root 521177600 Feb 25 03:00 helk-ksql-cli.tar --rw------- 1 root root 525901824 Feb 25 03:02 helk-ksql-server.tar --rw------- 1 root root 810578944 Feb 25 03:09 helk-logstash.tar --rw------- 1 root root 335945728 Feb 25 03:06 helk-nginx.tar --rw------- 1 root root 587616768 Feb 25 03:08 helk-spark-master.tar --rw------- 1 root root 587616768 Feb 25 03:05 helk-spark-worker.tar --rw------- 1 root root 395854848 Feb 25 03:08 helk-zookeeper.tar - -helk@ubuntu:~$ -``` \ No newline at end of file diff --git a/wiki/Home.md b/wiki/Home.md deleted file mode 100644 index 02c846e2..00000000 --- a/wiki/Home.md +++ /dev/null @@ -1,14 +0,0 @@ - -# What is HELK? -HELK is an ELK (Elasticsearch, Logstash & Kibana) stack with advanced hunting analytic capabilities provided by the implementation of Spark & Graphframes technologies. The Hunting ELK or simply the HELK is one of the first public builds that enables data science features to an ELK stack for free. In addition, it comes with a Jupyter Notebook integration for prototyping in Big Data/Machine learning use cases. This stack provides a full-text search engine mixed with great visualizations, graph relational queries and advanced analytics. - -# Why HELK? -Nowadays, enabling the right event logging and centralizing the collection of different data sources is finally becoming a basic security standard. This allows organizations to not just increase the level of visibility from an endpoint and network perspective, but to adopt new concepts within their security teams such as threat hunting. Even though it might seem that collecting a lot of data is all a hunt team needs to be successful, there are several challenges that hunters face when using large, unstructured and sometimes incomplete data. One of this challenges is to make sense of the disparate data sources in an easy and consistent way when trying to effectively detect adversarial techniques. - -ELK stacks have already been adopted considerably by small and large organizations for data ingestion, storage and visualization. Therefore, using it as a main structure with Spark and GraphFrames on the top of it allow hunt teams to effectively take their hunt skills and program to the next level. This approach is affordable, scalable, and can be used during research or any other engagement where blue and red teams meet. - -# When and where do I use HELK? -HELK was built primarily for research, but due to its flexible design, it can be deployed in larger environments with the right configurations and scalable infrastructure. You can go from simply searching a specific string to create advanced graph queries and apply algorithms to the data stored in an Elasticsearch database. Therefore, there are a variety of use cases that can be prototyped with the HELK. The main implementation of this project is Threat Hunting (Active Defense). - -# How do I use HELK? -If you have used an ELK stack before or followed any of the ["Chronicles of a Threat Hunter"](https://cyberwardog.blogspot.com/) series by [@Cyb3rWard0g](https://twitter.com/Cyb3rWard0g), you will find the HELK pretty easy to follow. The new data science features will be explained in more details in the HOW TO section of this wiki. Also, stay tuned for future blog posts on how to use the new HELK capabilities. Follow [@THE_HELK](https://twitter.com/THE_HELK) & [@Cyb3rWard0g](https://twitter.com/Cyb3rWard0g) for any updates. \ No newline at end of file diff --git a/wiki/Installation.md b/wiki/Installation.md deleted file mode 100644 index ad7c35ce..00000000 --- a/wiki/Installation.md +++ /dev/null @@ -1,272 +0,0 @@ -# Requirements (Please Read Carefully) -* **OS Name:** - * Ubuntu 18.04 (preferred) - * HELK uses the official Docker Community Edition (CE) bash script (Edge Version) to install Docker for you. The Docker CE Edge script supports the following distros: **ubuntu**, **debian**, **raspbian**, **centos**, and **fedora**. - * You can see the specific distro versions supported in the script [here](https://get.docker.com/). - * If you have Docker & Docker-Compose already installed in your system, make sure you uninstall them to avoid old incompatible version. Let HELK use the official Docker CE Edge script execution to install Docker. -* **Processor/OS Architecture:** - * 64-bit also known as x64, x86_64, AMD64 and Intel 64. - * FYI: old processors don't supports SSE3 instructions to start ML on elasticsearch. Since version 6.1 Elastic has been compiling the ML programs on the assumption that SSE4.2 instructions are available (See: https://github.com/Cyb3rWard0g/HELK/issues/321 and https://discuss.elastic.co/t/failed-to-start-machine-learning-on-elasticsearch-7-0-0/178216/7) -* **Network Connection:** NAT or Bridge -* **RAM:** There are four options, and the following are minimum requirements (include more if you are able). - * **Option 1: 5GB** includes `KAFKA + KSQL + ELK + NGNIX.` - * **Option 2: 5GB** includes `KAFKA + KSQL + ELK + NGNIX + ELASTALERT` - * **Option 3: 7GB** includes `KAFKA + KSQL + ELK + NGNIX + SPARK + JUPYTER`. - * **Option 4: 8GB** includes `KAFKA + KSQL + ELK + NGNIX + SPARK + JUPYTER + ELASTALERT`. -* **Cores:** 4 (minimum) -* **Disk:** 25GB for testing purposes and 100GB+ for production (minimum) -* **Applications:** - * Docker: 18.06.1-ce+ & Docker-Compose (HELK INSTALLS THIS FOR YOU) - * [Winlogbeat](https://www.elastic.co/downloads/beats/winlogbeat) running on your endpoints or centralized WEF server (that your endpoints are forwarding to). - * You can install Winlogbeat by following one of [@Cyb3rWard0g](https://twitter.com/Cyb3rWard0g) posts [here](https://cyberwardog.blogspot.com/2017/02/setting-up-pentesting-i-mean-threat_87.html). - * [Winlogbeat config](https://github.com/Cyb3rWard0g/HELK/blob/master/winlogbeat/winlogbeat.yml) recommended by the HELK since it uses the [Kafka output plugin](https://www.elastic.co/guide/en/beats/winlogbeat/current/kafka-output.html) and it is already pointing to the right ports with recommended options. You will just have to add your HELK's IP address. -# HELK Download -Run the following commands to clone the HELK repo via git. -``` -git clone https://github.com/Cyb3rWard0g/HELK.git -``` -Change your current directory location to the new HELK directory, and run the **helk_install.sh** bash script as root. -``` -cd HELK/docker -sudo ./helk_install.sh -``` -# HELK Install -In order to make the installation of the HELK easy for everyone, the project comes with an install script named **helk_install.sh**. This script builds and runs everything you for HELK automatically. During the installation process, the script will allow you to set up the following: -* Set the HELK's option. For this document we are going to use option 2 (ELK + KSQL + Elastalert + Spark + Jupyter) -* Set the Kibana User's password. Default user is **helk** -* Set the HELK's IP. By default you can confirm that you want to use your HOST IP address for the HELK, unless you want to use a different one. Press \[Return\] or let the script continue on its own (30 Seconds sleep). -* Set the HELK's License Subscription. By default the HELK has the **basic** subscription selected. You can set it to **trial** if you want. If you want to learn more about subscriptions go [here](https://www.elastic.co/subscriptions) - * If the license is set to **trial**, HELK asks you to set the password for the **elastic** account. -``` -helk@ubuntu:~$ -helk@ubuntu:~$ ls -HELK -helk@ubuntu:~$ cd HELK/docker/ -helk@ubuntu:~/HELK/docker$ sudo ./helk_install.sh - -********************************************** -** HELK - THE HUNTING ELK ** -** ** -** Author: Roberto Rodriguez (@Cyb3rWard0g) ** -** HELK build version: v0.1.7-alpha02262019 ** -** HELK ELK version: 6.6.1 ** -** License: GPL-3.0 ** -********************************************** - -[HELK-INSTALLATION-INFO] HELK being hosted on a Linux box -[HELK-INSTALLATION-INFO] Available Memory: 12463 MBs -[HELK-INSTALLATION-INFO] You're using ubuntu version xenial - -***************************************************** -* HELK - Docker Compose Build Choices * -***************************************************** - -1. KAFKA + KSQL + ELK + NGNIX + ELASTALERT -2. KAFKA + KSQL + ELK + NGNIX + ELASTALERT + SPARK + JUPYTER - -Enter build choice [ 1 - 2]: 2 -[HELK-INSTALLATION-INFO] HELK build set to 2 -[HELK-INSTALLATION-INFO] Set HELK elastic subscription (basic or trial): basic -[HELK-INSTALLATION-INFO] Set HELK IP. Default value is your current IP: 192.168.64.138 -[HELK-INSTALLATION-INFO] Set HELK Kibana UI Password: hunting -[HELK-INSTALLATION-INFO] Verify HELK Kibana UI Password: hunting -[HELK-INSTALLATION-INFO] Docker already installed -[HELK-INSTALLATION-INFO] Making sure you assigned enough disk space to the current Docker base directory -[HELK-INSTALLATION-INFO] Available Docker Disk: 67 GBs -[HELK-INSTALLATION-INFO] Installing docker-compose.. -[HELK-INSTALLATION-INFO] Checking local vm.max_map_count variable and setting it to 4120294 -[HELK-INSTALLATION-INFO] Building & running HELK from helk-kibana-notebook-analysis-basic.yml file.. -[HELK-INSTALLATION-INFO] Waiting for some services to be up ..... -.... -...... -``` -# Monitor HELK installation Logs (Always) -Once the installation kicks in, it will start showing you pre-defined messages about the installation, but no all the details of what is actually happening in the background. It is designed that way to keep your main screen clean and let you know where it is in the installation process. - -What I recommend to do all the time is to open another shell and monitor the HELK installation logs by using the **tail** command and pointing it to the **/var/log/helk-install.log** file that gets created by the **helk_install** script as soon as it is run. This log file is available on your local host even if you are deploying the HELK via Docker (I want to make sure it is clear that it is a local file). -``` -helk@HELK:~$ tail -f /var/log/helk-install.log - -Creating network "docker_helk" with driver "bridge" -Creating volume "docker_esdata" with local driver -Pulling helk-elasticsearch (docker.elastic.co/elasticsearch/elasticsearch:6.6.1)... -6.6.1: Pulling from elasticsearch/elasticsearch -Pulling helk-kibana (docker.elastic.co/kibana/kibana:6.6.1)... -6.6.1: Pulling from kibana/kibana -Pulling helk-logstash (docker.elastic.co/logstash/logstash:6.6.1)... -6.6.1: Pulling from logstash/logstash -Pulling helk-jupyter (cyb3rward0g/helk-jupyter:0.1.2)... -0.1.2: Pulling from cyb3rward0g/helk-jupyter -Pulling helk-nginx (cyb3rward0g/helk-nginx:0.0.7)... -0.0.7: Pulling from cyb3rward0g/helk-nginx -Pulling helk-spark-master (cyb3rward0g/helk-spark-master:2.4.0-a)... -2.4.0-a: Pulling from cyb3rward0g/helk-spark-master -Pulling helk-spark-worker (cyb3rward0g/helk-spark-worker:2.4.0-a)... -2.4.0-a: Pulling from cyb3rward0g/helk-spark-worker -Pulling helk-zookeeper (cyb3rward0g/helk-zookeeper:2.1.0)... -2.1.0: Pulling from cyb3rward0g/helk-zookeeper -Pulling helk-kafka-broker (cyb3rward0g/helk-kafka-broker:2.1.0)... -2.1.0: Pulling from cyb3rward0g/helk-kafka-broker -Pulling helk-ksql-server (confluentinc/cp-ksql-server:5.1.2)... -5.1.2: Pulling from confluentinc/cp-ksql-server -Pulling helk-ksql-cli (confluentinc/cp-ksql-cli:5.1.2)... -5.1.2: Pulling from confluentinc/cp-ksql-cli -Pulling helk-elastalert (cyb3rward0g/helk-elastalert:0.2.1)... -0.2.1: Pulling from cyb3rward0g/helk-elastalert -Creating helk-elasticsearch ... done -Creating helk-kibana ... done -Creating helk-logstash ... done -Creating helk-spark-master ... done -Creating helk-elastalert ... done -Creating helk-zookeeper ... done -Creating helk-jupyter ... done -Creating helk-spark-worker ... done -Creating helk-kafka-broker ... done -Creating helk-nginx ... done -Creating helk-ksql-server ... done -Creating helk-ksql-cli ... done -``` -Once you see that the containers have been created you can check all the containers running by executing the following: - -``` -helk@HELK:~$ sudo docker ps - -CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES -968576241e9c confluentinc/cp-ksql-server:5.1.2 "/etc/confluent/dock…" 28 minutes ago Up 26 minutes 0.0.0.0:8088->8088/tcp helk-ksql-server -154593559d13 cyb3rward0g/helk-kafka-broker:2.1.0 "./kafka-entrypoint.…" 28 minutes ago Up 26 minutes 0.0.0.0:9092->9092/tcp helk-kafka-broker -d883541a64f1 cyb3rward0g/helk-nginx:0.0.7 "/opt/helk/scripts/n…" About an hour ago Up 26 minutes 0.0.0.0:80->80/tcp, 0.0.0.0:443->443/tcp helk-nginx -527ef236543a cyb3rward0g/helk-spark-worker:2.4.0-a "./spark-worker-entr…" About an hour ago Up 26 minutes helk-spark-worker -27cfaf7a8e84 cyb3rward0g/helk-jupyter:0.1.2 "./jupyter-entrypoin…" About an hour ago Up 26 minutes 8000/tcp, 8888/tcp helk-jupyter -75002248e916 cyb3rward0g/helk-zookeeper:2.1.0 "./zookeeper-entrypo…" About an hour ago Up 26 minutes 2181/tcp, 2888/tcp, 3888/tcp helk-zookeeper -ee0120167ffa cyb3rward0g/helk-elastalert:0.2.1 "./elastalert-entryp…" About an hour ago Up 26 minutes helk-elastalert -4dc2722cdd53 cyb3rward0g/helk-spark-master:2.4.0-a "./spark-master-entr…" About an hour ago Up 26 minutes 7077/tcp, 0.0.0.0:8080->8080/tcp helk-spark-master -9c1eb230b0ff docker.elastic.co/logstash/logstash:6.6.1 "/usr/share/logstash…" About an hour ago Up 26 minutes 0.0.0.0:5044->5044/tcp, 0.0.0.0:8531->8531/tcp, 9600/tcp helk-logstash -f018f16d9792 docker.elastic.co/kibana/kibana:6.6.1 "/usr/share/kibana/s…" About an hour ago Up 26 minutes 5601/tcp helk-kibana -6ec5779e9e01 docker.elastic.co/elasticsearch/elasticsearch:6.6.1 "/usr/share/elastics…" About an hour ago Up 26 minutes 9200/tcp, 9300/tcp helk-elasticsearch - -``` - -If you want to monitor the resources being utilized (Memory, CPU, etc), you can run the following: -``` -helk@HELK:~$ sudo docker stats --all - -CONTAINER ID NAME CPU % MEM USAGE / LIMIT MEM % NET I/O BLOCK I/O PIDS -ba46d256ee18 helk-ksql-cli 0.00% 0B / 0B 0.00% 0B / 0B 0B / 0B 0 -968576241e9c helk-ksql-server 1.43% 242MiB / 12.62GiB 1.87% 667kB / 584kB 96.1MB / 73.7kB 29 -154593559d13 helk-kafka-broker 2.83% 318.7MiB / 12.62GiB 2.47% 1.47MB / 1.6MB 50.7MB / 2.01MB 67 -d883541a64f1 helk-nginx 0.10% 3.223MiB / 12.62GiB 0.02% 14.7MB / 14.8MB 9.35MB / 12.3kB 5 -527ef236543a helk-spark-worker 0.43% 177.7MiB / 12.62GiB 1.38% 19.5kB / 147kB 37.1MB / 32.8kB 28 -27cfaf7a8e84 helk-jupyter 0.12% 45.42MiB / 12.62GiB 0.35% 1.64kB / 0B 66.3MB / 733kB 9 -75002248e916 helk-zookeeper 0.26% 62.6MiB / 12.62GiB 0.48% 150kB / 118kB 2.75MB / 172kB 23 -ee0120167ffa helk-elastalert 2.60% 40.97MiB / 12.62GiB 0.32% 12MB / 17.4MB 38.3MB / 8.19kB 1 -4dc2722cdd53 helk-spark-master 0.50% 187.2MiB / 12.62GiB 1.45% 148kB / 17.8kB 52.3MB / 32.8kB 28 -9c1eb230b0ff helk-logstash 15.96% 1.807GiB / 12.62GiB 14.32% 871kB / 110MB 165MB / 2.95MB 62 -f018f16d9792 helk-kibana 2.73% 179.1MiB / 12.62GiB 1.39% 3.71MB / 17.6MB 250MB / 4.1kB 13 -6ec5779e9e01 helk-elasticsearch 12.56% 2.46GiB / 12.62GiB 19.50% 130MB / 15.8MB 293MB / 226MB 61 -``` - -You should also monitor the logs of each container while they are being initialized: - -Just run the following: - -``` -helk@HELK:~$ sudo docker logs --follow helk-elasticsearch - -[HELK-ES-DOCKER-INSTALLATION-INFO] Setting ES_JAVA_OPTS to -Xms1200m -Xmx1200m -XX:-UseConcMarkSweepGC -XX:-UseCMSInitiatingOccupancyOnly -XX:+UseG1GC -[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Elastic license to basic -[HELK-ES-DOCKER-INSTALLATION-INFO] Running docker-entrypoint script.. -OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release. -OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release. -[2019-03-16T17:13:58,710][INFO ][o.e.e.NodeEnvironment ] [helk-1] using [1] data paths, mounts [[/usr/share/elasticsearch/data (/dev/sda1)]], net usable_space [60.7gb], net total_space [72.7gb], types [ext4] -[2019-03-16T17:13:58,722][INFO ][o.e.e.NodeEnvironment ] [helk-1] heap size [1.1gb], compressed ordinary object pointers [true] -[2019-03-16T17:13:58,728][INFO ][o.e.n.Node ] [helk-1] node name [helk-1], node ID [En7HptZKTNmv4R6-Qb99UA] -[2019-03-16T17:13:58,729][INFO ][o.e.n.Node ] [helk-1] version[6.6.1], pid[12], build[default/tar/1fd8f69/2019-02-13T17:10:04.160291Z], OS[Linux/4.4.0-116-generic/amd64], JVM[Oracle Corporation/OpenJDK 64-Bit Server VM/11.0.1/11.0.1+13] -[2019-03-16T17:13:58,734][INFO ][o.e.n.Node ] [helk-1] JVM arguments [-Xms1g, -Xmx1g, -XX:+UseConcMarkSweepGC, -XX:CMSInitiatingOccupancyFraction=75, -XX:+UseCMSInitiatingOccupancyOnly, -Des.networkaddress.cache.ttl=60, -Des.networkaddress.cache.negative.ttl=10, -XX:+AlwaysPreTouch, -Xss1m, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djna.nosys=true, -XX:-OmitStackTraceInFastThrow, -Dio.netty.noUnsafe=true, -Dio.netty.noKeySetOptimization=true, -Dio.netty.recycler.maxCapacityPerThread=0, -Dlog4j.shutdownHookEnabled=false, -Dlog4j2.disable.jmx=true, -Djava.io.tmpdir=/tmp/elasticsearch-7720073513605769733, -XX:+HeapDumpOnOutOfMemoryError, -XX:HeapDumpPath=data, -XX:ErrorFile=logs/hs_err_pid%p.log, -Xlog:gc*,gc+age=trace,safepoint:file=logs/gc.log:utctime,pid,tags:filecount=32,filesize=64m, -Djava.locale.providers=COMPAT, -XX:UseAVX=2, -Des.cgroups.hierarchy.override=/, -Xms1200m, -Xmx1200m, -XX:-UseConcMarkSweepGC, -XX:-UseCMSInitiatingOccupancyOnly, -XX:+UseG1GC, -Des.path.home=/usr/share/elasticsearch, -Des.path.conf=/usr/share/elasticsearch/config, -Des.distribution.flavor=default, -Des.distribution.type=tar] -[2019-03-16T17:14:03,510][INFO ][o.e.p.PluginsService ] [helk-1] loaded module [aggs-matrix-stats] -[2019-03-16T17:14:03,517][INFO ][o.e.p.PluginsService ] [helk-1] loaded module [analysis-common] -[2019-03-16T17:14:03,517][INFO ][o.e.p.PluginsService ] [helk-1] loaded module [ingest-common] -[2019-03-16T17:14:03,517][INFO ][o.e.p.PluginsService ] [helk-1] loaded module [lang-expression] -[2019-03-16T17:14:03,517][INFO ][o.e.p.PluginsService ] [helk-1] loaded module [lang-mustache] -[2019-03-16T17:14:03,518][INFO ][o.e.p.PluginsService ] [helk-1] loaded module [lang-painless] -[2019-03-16T17:14:03,518][INFO ][o.e.p.PluginsService ] [helk-1] loaded module [mapper-extras] -[2019-03-16T17:14:03,518][INFO ][o.e.p.PluginsService ] [helk-1] loaded module [parent-join] -[2019-03-16T17:14:03,518][INFO ][o.e.p.PluginsService ] [helk-1] loaded module [percolator] -[2019-03-16T17:14:03,519][INFO ][o.e.p.PluginsService ] [helk-1] loaded module [rank-eval] -[2019-03-16T17:14:03,519][INFO ][o.e.p.PluginsService ] [helk-1] loaded module [reindex] -.. -.... -``` - -All you need to do now for the other ones is just replace helk-elasticsearch with the specific containers name: -``` -sudo docker logs --follow -``` - -Remember that you can also access your docker images by running the following commands: -``` -sudo docker exec -ti helk-elasticsearch bash -root@7a9d6443a4bf:/opt/helk/scripts# -``` - -# Final Details -Once your HELK installation ends, you will be presented with information that you will need to access the HELK and all its other components. - -You will get the following information: - -``` -*********************************************************************************** -** [HELK-INSTALLATION-INFO] HELK WAS INSTALLED SUCCESSFULLY ** -** [HELK-INSTALLATION-INFO] USE THE FOLLOWING SETTINGS TO INTERACT WITH THE HELK ** -*********************************************************************************** - -HELK KIBANA URL: https://192.168.64.138 -HELK KIBANA USER: helk -HELK KIBANA PASSWORD: hunting -HELK SPARK MASTER UI: http://192.168.64.138:8080 -HELK JUPYTER SERVER URL: http://192.168.64.138/jupyter -HELK JUPYTER CURRENT TOKEN: e8e83f5c9fe93882a970ce352d566adfb032b0975549449c -HELK ZOOKEEPER: 192.168.64.138:2181 -HELK KSQL SERVER: 192.168.64.138:8088 - -IT IS HUNTING SEASON!!!!! -``` -| Type | Description | -|--------|---------| -| HELK KIBANA URL | URL to access the Kibana server. You will need to copy that and paste it in your browser to access Kibana. Make sure you use **https** since Kibana is running behind NGINX via port 443 with a self-signed certificate| -| HELK KIBANA USER & PASSWORD | Credentials used to access Kibana | -| HELK SPARK MASTER UI | URL to access the Spark Master server (Spark Standalone). That server manages the Spark Workers used during execution of code by Jupyter Notebooks. Spark Master acts as a proxy to Spark Workers and applications running | -| HELK JUPYTER SERVER URL | URL to access the Jupyter notebook server. | -| HELK JUPYTER CURRENT TOKEN | Jupyter token to log in instead of providing a password | -| ZOOKEEPER | URL for the kafka cluster zookeeper | -| KSQL SERVER| URL to access the KSQL server and send SQL queries to the data in the kafka brokers| - - - -# Access HELK Web Interface -Open your preferred browser, go to your HELK's IP address, and enter the HELK credentials **(helk:hunting)**. By default, you will be presented by the Kibana's Home page. Once there, you could explore the different features that Kibana provides. I personally like to check the **Index Patterns** first and then **Discovery** - -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KIBANA-Home.png]] - -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KIBANA-IndexPatterns.png]] - -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KIBANA-Discovery.png]] - -# Access Jupyter Notebook Interface -HELK now comes with a Jupyter notebok server that spawns a Jupyter lab extension. - -Use the HELK JUPYTER SERVER URL and you will get the following prompt - -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/JUPYTER-login.png]] - -You will then be sent to the Jupyter Lab menu: - -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/JUPYTER-menu.png]] - -You can double-click on one of the notebooks and start playing with them: - -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/JUPYTER-notebook.png]] - - -I hope this document was helpful to deploy your own HELK. Let us know if you have any questions or if you think that this document can be improved. Feel free to create an **issue** for updates to this procedure. A more detailed **HOW-TO** will be developed soon to go into more details of how to use all the HELK components. - -IT IS HUNTING SEASON!! \ No newline at end of file diff --git a/wiki/Kafka.md b/wiki/Kafka.md deleted file mode 100644 index c53f4ae5..00000000 --- a/wiki/Kafka.md +++ /dev/null @@ -1,73 +0,0 @@ -# Design -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KAFKA-Design.png]] - -# Kafka Ecosystem -## Producers -Producers publish data to the topics of their choice. The producer is responsible for choosing which record to assign to which partition within the topic. - -HELK currently accepts data sent to a few topics such as `winlogbeat` for Windows systems and `filebeat` for Linux or OSX systems. From a Windows perspective, it is common to have **Winlogbeat** (Log Shipper/Producer) installed on all the endpoints. However, it is recommended to use solutions such as [Windows Event Forwarding (WEF)](https://docs.microsoft.com/en-us/windows/security/threat-protection/use-windows-event-forwarding-to-assist-in-intrusion-detection) servers to collect and centralize your logs, and then have Winlogbeat or NXlog installed on them to ship the logs to your HELK Kafka broker. - -When using **Winlogbeat** you can use the following config: -``` -winlogbeat.event_logs: - - name: Application - ignore_older: 30m - - name: Security - ignore_older: 30m - - name: System - ignore_older: 30m - - name: Microsoft-windows-sysmon/operational - ignore_older: 30m - - name: Microsoft-windows-PowerShell/Operational - ignore_older: 30m - event_id: 4103, 4104 - - name: Windows PowerShell - event_id: 400,600 - ignore_older: 30m - - name: Microsoft-Windows-WMI-Activity/Operational - event_id: 5857,5858,5859,5860,5861 - -output.kafka: - hosts: [":9092"] - topic: "winlogbeat" - max_retries: 2 - max_message_bytes: 1000000 -``` -You can check the how-to section in this wiki to learn how to check if your winlogbeat log shipper is sending data to a Kafka broker. - -## Kafka Broker -HELK uses a kafka cluster conformed of 1 broker (Not really a cluster, but it is a good start to host it in a lab environment). If you add more brokers to the cluster, each broker would have it's own ID number and topic log partitions. Connecting to one broker bootstraps a client to the entire Kafka cluster. - -The HELK broker has its own `server.properties` file. You can find it [here](https://github.com/Cyb3rWard0g/HELK/blob/master/docker/helk-kafka-broker/server.properties). Some of the basic settings that you need to understand are the following: - -| Name | Description | Type | Value | -|--------|---------|-------|-------| -| broker.id | The broker id for this server. If unset, a unique broker id will be generated. To avoid conflicts between zookeeper generated broker id's and user configured broker id's, generated broker ids start from reserved.broker.max.id + 1. | int | 1 | -| listeners | Listener List - Comma-separated list of URIs we will listen on and the listener names. Specify hostname as 0.0.0.0 to bind to all interfaces. For the docker deployment, it is set to the kafka broker container name and used to communicate with other containers inside of the docker environment ONLY | string | PLAINTEXT://helk-kafka-broker:9092 | -| advertised.listeners | Listeners to publish to ZooKeeper for clients to use, if different than the `listeners` config property. In IaaS environments, this may need to be different from the interface to which the broker binds. For the docker deployment, this is the IP address of the machine hosting your docker containers. This will be ip address that your producers can talk to from outside of the docker environment. When Broker starts, the current value is updated automatically by the environment variable ADVERTISED_LISTENER | string | PLAINTEXT://HELKIP:9092 | -| log.dirs | The directories in which the log data is kept. If not set, the value in log.dir is used | string | /tmp/kafka-logs | -| auto.create.topics.enable | Enable auto creation of topic on the server. This is disabled in HELK to avoid any producers creating new topics | boolean | false | -| log.retention.hours | The minimum age of a log file to be eligible for deletion due to age | int | 4 | - -## Zookeeper -Kafka needs ZooKeeper to work efficiently in the cluster. Kafka uses Zookeeper to do leadership election of Kafka Broker and Topic Partition pairs. Kafka uses Zookeeper to manage service discovery for Kafka Brokers that form the cluster. Zookeeper sends changes of the topology to Kafka, so each node in the cluster knows when a new broker joined, a Broker died, a topic was removed or a topic was added, etc. Zookeeper provides an in-sync view of Kafka Cluster configuration. - -## HELK Kafka Topics -Kafka automatically creates 3 topics: - -| topic | Description | -|--------|---------| -| winlogbeat | Main topic that stores raw event log data sent from endpoints with winlogbeat installed. | -| SYSMON_JOIN | Topic that stores Windows Sysmon events that have been enriched by KSQL commands to join **ProcessCreate** (event 1) and **NetworkConnect** (event 3) by their `ProcessGUID` values. | -| winsysmon | Topic used for Logstash to send transformed/parsed Windows Sysmon event data back. | -| winsecurity | topic used for Logstash to send transformed/parsed Windows security event data back. | -| filebeat | Topic that stores OSQuery data | - -# How-To -* [Check Kafka topic ingestion](https://github.com/Cyb3rWard0g/HELK/wiki/Check-Kafka-topic-ingestion) -* [Check Winlogbeat shipping](https://github.com/Cyb3rWard0g/HELK/wiki/Check-Winlogbeat-shipping) -* [Update Kafka Broker IP](https://github.com/Cyb3rWard0g/HELK/wiki/Update-Kafka-Broker-IP) - -# References -* [Kafka Producer API](http://kafka.apache.org/documentation.html#producerapi) -* [Kafka Architecture](http://cloudurable.com/blog/kafka-architecture/index.html) \ No newline at end of file diff --git a/wiki/Kibana.md b/wiki/Kibana.md deleted file mode 100644 index 8262f85a..00000000 --- a/wiki/Kibana.md +++ /dev/null @@ -1,55 +0,0 @@ -# Design -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KIBANA-Design.png]] - -# Visualize your logs -## Discover -Make sure you have logs being sent to your HELK first (At least Windows security and Sysmon events). Then, go to http:// in your preferred browser. If you dont have logs being sent to your HELK pipe (Kafka) or just starting to get processed by Kafka and Logstash, you might get the message " -No matching indices found: No indices match pattern "logs-endpoint-winevent-sysmon-*"** - -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KIBANA-NoIndicesPattern.png]] - -That is normal at the beginning. Refresh your screen a couple of times in order to start visualizing your logs. - -Currently, HELK creates automatically 7 index patterns for you and sets **logs-endpoint-winevent-sysmon-*** as your default one: -* "logs-*" -* "logs-endpoint-winevent-sysmon-*" -* "logs-endpoint-winevent-security-*" -* "logs-endpoint-winevent-application-*" -* "logs-endpoint-winevent-system-*" -* "logs-endpoint-winevent-powershell-*" -* "logs-endpoint-winevent-wmiactivity-*" - -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KIBANA-Discovery.png]] - -# Dashboards -Currently, the HELK comes with 3 dashboards: - -## Global_Dashboard - -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KIBANA-GlobalDashboard.png]] - -## Network_Dashboard - -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KIBANA-NetworkDashboard.png]] - -## Sysmon_Dashboard - -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/KIBANA-SysmonDashboard.png]] - -# Monitoring Views (x-Pack Basic Free License) - -## Kibana Initial Overview - -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/MONITORING-Kibana-Overview.png]] - -## Elasticsearch Overview - -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/MONITORING-Elasticsearch-Overview.png]] - -## Logstash Overview - -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/MONITORING-Logstash-Overview.png]] - -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/MONITORING-Logstash-Nodes-Overview.png]] - - diff --git a/wiki/Load-Local-Docker-Images.md b/wiki/Load-Local-Docker-Images.md deleted file mode 100644 index fdf6e948..00000000 --- a/wiki/Load-Local-Docker-Images.md +++ /dev/null @@ -1,79 +0,0 @@ -If you followed [this document](https://github.com/Cyb3rWard0g/HELK/wiki/Export-Docker-Images-locally) to export your docker images locally, you should be ready to load them into an isolated system where it cannot access the dockerhub registry. - -Copy images to the isolated (10.0.10.102) system - -``` -NO-NISOLATED@helk:~$ for f in /home/helk/*.tar; do scp $f helk@10.0.10.102:/tmp/; done - -helk-spark-worker.tar 100% 560MB 24.4MB/s 00:23 -helk-ksql-server.tar 100% 502MB 29.5MB/s 00:17 -helk-logstash.tar 100% 773MB 28.6MB/s 00:27 -helk-ksql-cli.tar 100% 497MB 21.6MB/s 00:23 -helk-elasticsearch.tar 100% 815MB 29.1MB/s 00:28 -``` - -Check if images exist in the isolated system - -``` -ISOLATED@helk:~$ ls /tmp/ -helk-elastalert.tar helk-jupyter.tar helk-kibana.tar helk-ksql-server.tar helk-nginx.tar helk-spark-worker.tar helk-elasticsearch.tar helk-kafka-broker.tar helk-ksql-cli.tar helk-logstash.tar helk-spark-master.tar helk-zookeeper.tar -``` -Load images with the `docker load` commands: - -``` -ISOLATED@helk:~$ for i in /tmp/*.tar; do sudo docker load --input $i; done - -f49017d4d5ce: Loading layer [==================================================>] 85.96MB/85.96MB -8f2b771487e9: Loading layer [==================================================>] 15.87kB/15.87kB -ccd4d61916aa: Loading layer [==================================================>] 10.24kB/10.24kB -c01d74f99de4: Loading layer [==================================================>] 5.632kB/5.632kB -268a067217b5: Loading layer [==================================================>] 3.072kB/3.072kB -831fff32e4f2: Loading layer [==================================================>] 65.02kB/65.02kB -c89f4fbc01f8: Loading layer [==================================================>] 103.4MB/103.4MB -adfd094c5517: Loading layer [==================================================>] 3.245MB/3.245MB -c73538215c3e: Loading layer [==================================================>] 567.6MB/567.6MB -080f01d1ecbc: Loading layer [==================================================>] 13.31kB/13.31kB -60bbd38a907e: Loading layer [==================================================>] 3.584kB/3.584kB -9affd17eb100: Loading layer [==================================================>] 5.632kB/5.632kB -0561c04cbf7e: Loading layer [==================================================>] 7.168kB/7.168kB -ba0201512417: Loading layer [==================================================>] 18.29MB/18.29MB -Loaded image: cyb3rward0g/helk-elastalert:0.2.1 -071d8bd76517: Loading layer [==================================================>] 210.2MB/210.2MB -a175339dcf83: Loading layer [==================================================>] 310.5MB/310.5MB -9a70a6f483f7: Loading layer [==================================================>] 95.68MB/95.68MB -f4db77828c81: Loading layer [==================================================>] 311.3kB/311.3kB -be48c67e9d13: Loading layer [==================================================>] 237.5MB/237.5MB -432cb712190e: Loading layer [==================================================>] 7.68kB/7.68kB -a512981fd597: Loading layer [==================================================>] 9.728kB/9.728kB -Loaded image: docker.elastic.co/elasticsearch/elasticsearch:6.6.1 -49778752e7ec: Loading layer [==================================================>] 394.9MB/394.9MB -5f3913b1d541: Loading layer [==================================================>] 1.667GB/1.667GB -77fa3a9c5ff6: Loading layer [==================================================>] 7.168kB/7.168kB -cbc15b984e03: Loading layer [==================================================>] 10.24kB/10.24kB -38c44d7a52f6: Loading layer [==================================================>] 5.12kB/5.12kB -0ec2dbbfd6c7: Loading layer [==================================================>] 3.584kB/3.584kB -Loaded image: cyb3rward0g/helk-jupyter:0.1.1 -4e31d8c1cf96: Loading layer [==================================================>] 203.1MB/203.1MB -efb23c49455d: Loading layer [==================================================>] 11.26kB/11.26kB -``` - -check if images are loaded via the `docker images` command - -``` -ISOLATED@helk:~$ sudo docker images - -REPOSITORY TAG IMAGE ID CREATED SIZE -cyb3rward0g/helk-jupyter 0.1.1 efa46ecc8d32 2 days ago 2.18GB -confluentinc/cp-ksql-server 5.1.2 f57298019757 6 days ago 514MB -confluentinc/cp-ksql-cli 5.1.2 bd411ce0ba9f 6 days ago 510MB -docker.elastic.co/logstash/logstash 6.6.1 3e7fbb7964ee 11 days ago 786MB -docker.elastic.co/kibana/kibana 6.6.1 b94222148a00 11 days ago 710MB -docker.elastic.co/elasticsearch/elasticsearch 6.6.1 c6ffcb0ee97e 11 days ago 842MB -cyb3rward0g/helk-elastalert 0.2.1 569f588a22fc 3 weeks ago 758MB -cyb3rward0g/helk-kafka-broker 2.1.0 7b3e7f9ce732 2 months ago 388MB -cyb3rward0g/helk-zookeeper 2.1.0 abb732da3e50 2 months ago 388MB -cyb3rward0g/helk-spark-worker 2.4.0 b1545b0582db 2 months ago 579MB -cyb3rward0g/helk-spark-master 2.4.0 70fc61de3445 2 months ago 579MB -cyb3rward0g/helk-nginx 0.0.7 280d044b6719 6 months ago 329MB -helk@helk:~$ -``` \ No newline at end of file diff --git a/wiki/Logstash.md b/wiki/Logstash.md deleted file mode 100644 index 7e9f6b5c..00000000 --- a/wiki/Logstash.md +++ /dev/null @@ -1,2 +0,0 @@ -# Design -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/LOGSTASH-Design.png]] \ No newline at end of file diff --git a/wiki/Spark.md b/wiki/Spark.md deleted file mode 100644 index 9ca5c2ec..00000000 --- a/wiki/Spark.md +++ /dev/null @@ -1,107 +0,0 @@ -# Design -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/SPARK-Design.png]] - -# Spark Cluster Standalone Mode -Spark’s standalone cluster manager is a lightweight platform built specifically for Apache Spark workloads. Using it, you can run multiple Spark Applications on the same cluster. It also provides simple interfaces for doing so but can scale to large Spark workloads. The main disadvantage of the standalone mode is that it’s more limited than the other cluster managers— in particular, your cluster can only run Spark. - -Chambers, Bill; Zaharia, Matei. Spark: The Definitive Guide: Big Data Processing Made Simple (Kindle Locations 9911-9914). O'Reilly Media. Kindle Edition. - -* **Spark Cluster Master:**(often written standalone Master) is the cluster manager for Spark Standalone cluster -* **Spark Cluster Worker:**(aka standalone slave) is a logical node in a Spark Standalone cluster -[Source](https://jaceklaskowski.gitbooks.io/mastering-apache-spark/content/spark-standalone.html) - -## Spark Pyspark UI - -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/SPARK-Pyspark-UI.png]] - -## Spark Custer Master UI - -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/SPARK-Cluster-Manager.png]] - -## Spark Cluster Worker UI - -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/SPARK-Cluster-Worker.png]] - -# Jupyter Integration -"The Jupyter Notebook is an open-source web application that allows you to create and share documents that contain live code, equations, visualizations and narrative text. Uses include: data cleaning and transformation, numerical simulation, statistical modeling, data visualization, machine learning, and much more."Jupyter Reference." [Jupyter](http://jupyter.org/) -HELK integrates the Jupyter Notebook project with Spark via the **PYSPARK_DRIVER_PYTHON**. Basically, when the HELK runs **/bin/pyspark**, Jupyter notebook is executed as PYSPARK's Python Driver. The **PYSPARK_DRIVER_PYTHON_OPTS** value is the following: -``` -"notebook --NotebookApp.open_browser=False --NotebookApp.ip='*' --NotebookApp.port=8880 --allow-root" -``` -# Test Spark, GraphFrames & Jupyter Integration -By default, the Jupyter server gets started automatically after installing the HELK. -* Access the Jupyter Server: - * Go to your :8880 in your preferred browser - * Enter the token provided after installing the HELK -* Go to the training/jupyter_notebooks/getting_started/ folder -* Open the Check_Spark_Graphframes_Integrations notebook - * Check the saved output (Make sure that you have Sysmon & Windows Security event logs being sent to your HELK. Otherwise you will get errors in your Jupyter Notebook when trying to replicate the basic commands) - * Clear the output from the notebook and run everything again - -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/HELK_checking_integrations.png]] - -# Apache Arrow Integration (Convert to Pandas - Optimization) -Apache Arrow is an in-memory columnar data format that is used in Spark to efficiently transfer data between JVM and Python processes. This currently is most beneficial to Python users that work with Pandas/NumPy data. [Apache Spark](https://spark.apache.org/docs/latest/sql-programming-guide.html#pyspark-usage-guide-for-pandas-with-apache-arrow) - -[[https://github.com/Cyb3rWard0g/HELK/raw/master/resources/images/SPARK-ApacheArrow.png]] - -Example from [Apache Arrow](https://arrow.apache.org/blog/2017/07/26/spark-arrow/) - -# Spark Packages -## elasticsearch-hadoop-6.2.4 -"Elasticsearch for Apache Hadoop is an open-source, stand-alone, self-contained, small library that allows Hadoop jobs (whether using Map/Reduce or libraries built upon it such as Hive, Pig or Cascading or new upcoming libraries like Apache Spark ) to interact with Elasticsearch. One can think of it as a connector that allows data to flow bi-directionaly so that applications can leverage transparently the Elasticsearch engine capabilities to significantly enrich their capabilities and increase the performance. -Elasticsearch-hadoop provides native integration between Elasticsearch and Apache Spark, in the form of an RDD (Resilient Distributed Dataset) (or Pair RDD to be precise) that can read data from Elasticsearch. The RDD is offered in two flavors: one for Scala (which returns the data as Tuple2 with Scala collections) and one for Java (which returns the data as Tuple2 containing java.util collections). Just like other libraries, elasticsearch-hadoop needs to be available in Spark’s classpath. As Spark has multiple deployment modes, this can translate to the target classpath, whether it is on only one node (as is the case with the local mode - which will be used through-out the documentation) or per-node depending on the desired infrastructure." [Elastic](https://www.elastic.co/guide/en/elasticsearch/hadoop/current/spark.html) - -## graphframes:graphframes:0.5.0-spark2.1-s_2.11 -"This is a prototype package for DataFrame-based graphs in Spark. Users can write highly expressive queries by leveraging the DataFrame API, combined with a new API for motif finding. The user also benefits from DataFrame performance optimizations within the Spark SQL engine." [SparkPackages](https://spark-packages.org/package/graphframes/graphframes) -"It aims to provide both the functionality of GraphX and extended functionality taking advantage of Spark DataFrames. This extended functionality includes motif finding, DataFrame-based serialization, and highly expressive graph queries." [Graphframes](https://graphframes.github.io/) - -## org.apache.spark:spark-sql-kafka-0-10_2.11:2.3.0 -"Structured Streaming integration for Kafka 0.10 to poll data from Kafka" [Structured Streaming Kafka](https://spark.apache.org/docs/latest/structured-streaming-kafka-integration.html) - -## databricks:spark-sklearn:0.2.3 -"This package contains some tools to integrate the Spark computing framework with the popular scikit-learn machine library. Among other tools: 1) train and evaluate multiple scikit-learn models in parallel. It is a distributed analog to the multicore implementation included by default in scikit-learn. 2) convert Spark's Dataframes seamlessly into numpy ndarrays or sparse matrices. 3) (experimental) distribute Scipy's sparse matrices as a dataset of sparse vectors." [SparkPackages](https://spark-packages.org/package/databricks/spark-sklearn) - -# Other Python Packages - -## Pandas -"Pandas is an open source, BSD-licensed library providing high-performance, easy-to-use data structures and data analysis tools for the Python programming language." [Pandas Pydata](https://pandas.pydata.org/pandas-docs/stable/overview.html) - -## Scipy -"It is a Python-based ecosystem of open-source software for mathematics, science, and engineering." [Scipy Org.](https://www.scipy.org/) - -## Scikit-learn -"Simple and efficient tools for data mining and data analysis. Built on NumPy, SciPy, and matplotlib." [Scikit-Learn Org.](http://scikit-learn.org/stable/index.html) - -## Nltk -"NLTK is a leading platform for building Python programs to work with human language data. It provides easy-to-use interfaces to over 50 corpora and lexical resources such as WordNet, along with a suite of text processing libraries for classification, tokenization, stemming, tagging, parsing, and semantic reasoning, wrappers for industrial-strength NLP libraries, and an active discussion forum." [Ntlk Org.](http://www.nltk.org/) - -## Matplotlib -"Matplotlib is a Python 2D plotting library which produces publication quality figures in a variety of hardcopy formats and interactive environments across platforms. Matplotlib can be used in Python scripts, the Python and IPython shell, the jupyter notebook, web application servers, and four graphical user interface toolkits." [Matplotlib](https://matplotlib.org/index.html) - -## Seaborn -"Seaborn is a Python visualization library based on matplotlib. It provides a high-level interface for drawing attractive statistical graphics." [Seaborn Pydata](https://seaborn.pydata.org/index.html) - -## Datasketch -"Datasketch gives you probabilistic data structures that can process and search very large amount of data super fast, with little loss of accuracy." [Datasketch Github](https://github.com/ekzhu/datasketch) - -## Keras -"Keras is a high-level neural networks API, written in Python and capable of running on top of TensorFlow, CNTK, or Theano. It was developed with a focus on enabling fast experimentation. Being able to go from idea to result with the least possible delay is key to doing good research." [Keras](https://keras.io/) - -## Pyflux -"PyFlux is an open source time series library for Python. The library has a good array of modern time series models, as well as a flexible array of inference options (frequentist and Bayesian) that can be applied to these models. By combining breadth of models with breadth of inference, PyFlux allows for a probabilistic approach to time series modelling." [Pyflux Github](https://github.com/RJT1990/pyflux) - -## Imbalanced-learn -"imbalanced-learn is a python package offering a number of re-sampling techniques commonly used in datasets showing strong between-class imbalance. It is compatible with scikit-learn and is part of scikit-learn-contrib projects." [Imbalanced Learn](https://github.com/scikit-learn-contrib/imbalanced-learn) - -## Lime -"This project is about explaining what machine learning classifiers (or models) are doing. Lime is able to explain any black box classifier, with two or more classes. All we require is that the classifier implements a function that takes in raw text or a numpy array and outputs a probability for each class. Support for scikit-learn classifiers is built-in." [Lime](https://github.com/marcotcr/lime) - -## Pyarrow -Apache Arrow is a cross-language development platform for in-memory data. It specifies a standardized language-independent columnar memory format for flat and hierarchical data, organized for efficient analytic operations on modern hardware. It also provides computational libraries and zero-copy streaming messaging and interprocess communication. [Apache Arrow](https://arrow.apache.org/docs/python/) - -## NetworkX -NetworkX is a Python package for the creation, manipulation, and study of the structure, dynamics, and functions of complex networks.[NetworkX](https://networkx.github.io/) - -## Nxviz -nxviz is a graph visualization package for NetworkX. With nxviz, you can create beautiful graph visualizations by a declarative API. [Nxviz](https://github.com/ericmjl/nxviz) \ No newline at end of file diff --git a/wiki/Update-Kafka-Broker-IP.md b/wiki/Update-Kafka-Broker-IP.md deleted file mode 100644 index 68cf7489..00000000 --- a/wiki/Update-Kafka-Broker-IP.md +++ /dev/null @@ -1,33 +0,0 @@ -# Docker Deployment -For the docker deployment, you will have to update the environment variable `ADVERTISED_LISTENER` first. You can do this in your system hosting the entire HELK or the Kafka broker itself if your distributed your docker containers across other systems. - -``` -export ADVERTISED_LISTENER=10.0.10.104 -``` - -Then, you can simply just run `docker-compose` the same way how it was used to build the HELK. This will re-create the system with the new value assigned to the environment variable `ADVERTISED_LISTENER`. -``` -sudo -E docker-compose -f helk-kibana-notebook-analysis-basic.yml up -d -``` - -If you just restart your containers, it will not update the environment variable in the Kafka broker. You have to re-create the container. Not re-creating the broker would still show you messages like the ones below: - -``` -[2019-01-25 05:35:21,026] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:24,194] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:27,362] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:30,530] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:33,698] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:36,866] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:40,034] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:43,238] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:46,306] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:49,382] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:52,450] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:55,522] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:58,594] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:36:01,714] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:36:04,770] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:36:08,450] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:36:11,650] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -``` \ No newline at end of file diff --git a/wiki/_Sidebar.md b/wiki/_Sidebar.md deleted file mode 100644 index 3e2676fb..00000000 --- a/wiki/_Sidebar.md +++ /dev/null @@ -1,22 +0,0 @@ -## [Introduction](https://github.com/Cyb3rWard0g/HELK/wiki) - -## [Architecture](https://github.com/Cyb3rWard0g/HELK/wiki/Architecture-Overview) - * [Kafka](https://github.com/Cyb3rWard0g/HELK/wiki/Kafka) - * [Logstash](https://github.com/Cyb3rWard0g/HELK/wiki/Logstash) - * [Elasticsearch](https://github.com/Cyb3rWard0g/HELK/wiki/Elasticsearch) - * [Kibana](https://github.com/Cyb3rWard0g/HELK/wiki/Kibana) - * [Spark](https://github.com/Cyb3rWard0g/HELK/wiki/Spark) - -## [Installation](https://github.com/Cyb3rWard0g/HELK/wiki/Installation) - -## How-To -### Kafka -* [Check Kafka topic ingestion](https://github.com/Cyb3rWard0g/HELK/wiki/Check-Kafka-topic-ingestion) -* [Check Winlogbeat shipping](https://github.com/Cyb3rWard0g/HELK/wiki/Check-Winlogbeat-shipping) -* [Update Kafka Broker IP](https://github.com/Cyb3rWard0g/HELK/wiki/Update-Kafka-Broker-IP) -* [Deploy KSQL CLI Locally](https://github.com/Cyb3rWard0g/HELK/wiki/Deploy-KSQL-CLI-Locally) -* [Create Logstash Plugins Offline Package](https://github.com/Cyb3rWard0g/HELK/wiki/Create-Plugins-Offline-Package) - -### Docker -* [Export Docker Images Locally](https://github.com/Cyb3rWard0g/HELK/wiki/Export-Docker-Images-locally) -* [Load Local Docker Images](https://github.com/Cyb3rWard0g/HELK/wiki/Load-Local-Docker-Images) \ No newline at end of file From c82c11e645ac8710b372cedd94a0784c9b4595d3 Mon Sep 17 00:00:00 2001 From: AlfieJ04 Date: Thu, 3 Oct 2019 08:23:09 +0100 Subject: [PATCH 16/19] Added ELASTIC_PASSWORD var to Curator --- docker/helk-kibana-analysis-alert-trial.yml | 1 + docker/helk-kibana-analysis-trial.yml | 1 + docker/helk-kibana-notebook-analysis-alert-trial.yml | 1 + docker/helk-kibana-notebook-analysis-trial.yml | 1 + 4 files changed, 4 insertions(+) diff --git a/docker/helk-kibana-analysis-alert-trial.yml b/docker/helk-kibana-analysis-alert-trial.yml index 2e66244b..b0461aa5 100644 --- a/docker/helk-kibana-analysis-alert-trial.yml +++ b/docker/helk-kibana-analysis-alert-trial.yml @@ -164,6 +164,7 @@ services: environment: ES_HOST: helk-elasticsearch ES_PORT: 9200 + ELASTIC_PASSWORD: ${ELASTIC_PASSWORD} networks: helk: diff --git a/docker/helk-kibana-analysis-trial.yml b/docker/helk-kibana-analysis-trial.yml index 5364106e..8655e29a 100644 --- a/docker/helk-kibana-analysis-trial.yml +++ b/docker/helk-kibana-analysis-trial.yml @@ -152,6 +152,7 @@ services: environment: ES_HOST: helk-elasticsearch ES_PORT: 9200 + ELASTIC_PASSWORD: ${ELASTIC_PASSWORD} networks: helk: diff --git a/docker/helk-kibana-notebook-analysis-alert-trial.yml b/docker/helk-kibana-notebook-analysis-alert-trial.yml index b4882881..5c4dd2a8 100644 --- a/docker/helk-kibana-notebook-analysis-alert-trial.yml +++ b/docker/helk-kibana-notebook-analysis-alert-trial.yml @@ -202,6 +202,7 @@ services: environment: ES_HOST: helk-elasticsearch ES_PORT: 9200 + ELASTIC_PASSWORD: ${ELASTIC_PASSWORD} networks: helk: diff --git a/docker/helk-kibana-notebook-analysis-trial.yml b/docker/helk-kibana-notebook-analysis-trial.yml index 68514486..fe875aaf 100644 --- a/docker/helk-kibana-notebook-analysis-trial.yml +++ b/docker/helk-kibana-notebook-analysis-trial.yml @@ -192,6 +192,7 @@ services: environment: ES_HOST: helk-elasticsearch ES_PORT: 9200 + ELASTIC_PASSWORD: ${ELASTIC_PASSWORD} networks: helk: From 2eea87fbfdf0f919e61bf4f74c3c972b27b00a88 Mon Sep 17 00:00:00 2001 From: AlfieJ04 Date: Fri, 4 Oct 2019 16:51:47 +0100 Subject: [PATCH 17/19] Updated Curator container version --- docker/helk-kibana-analysis-alert-basic.yml | 2 +- docker/helk-kibana-analysis-alert-trial.yml | 2 +- docker/helk-kibana-analysis-basic.yml | 2 +- docker/helk-kibana-analysis-trial.yml | 2 +- docker/helk-kibana-notebook-analysis-alert-basic.yml | 2 +- docker/helk-kibana-notebook-analysis-alert-trial.yml | 2 +- docker/helk-kibana-notebook-analysis-basic.yml | 2 +- docker/helk-kibana-notebook-analysis-trial.yml | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/docker/helk-kibana-analysis-alert-basic.yml b/docker/helk-kibana-analysis-alert-basic.yml index b4f3efda..53cbdcf2 100644 --- a/docker/helk-kibana-analysis-alert-basic.yml +++ b/docker/helk-kibana-analysis-alert-basic.yml @@ -154,7 +154,7 @@ services: networks: helk: helk-curator: - image: alfiej04/helk-curator:0.0.2 + image: alfiej04/helk-curator:0.0.3 container_name: helk-curator restart: always depends_on: diff --git a/docker/helk-kibana-analysis-alert-trial.yml b/docker/helk-kibana-analysis-alert-trial.yml index b0461aa5..9ccf224f 100644 --- a/docker/helk-kibana-analysis-alert-trial.yml +++ b/docker/helk-kibana-analysis-alert-trial.yml @@ -156,7 +156,7 @@ services: networks: helk: helk-curator: - image: alfiej04/helk-curator:0.0.2 + image: alfiej04/helk-curator:0.0.3 container_name: helk-curator restart: always depends_on: diff --git a/docker/helk-kibana-analysis-basic.yml b/docker/helk-kibana-analysis-basic.yml index 92a16a98..0d65d41a 100644 --- a/docker/helk-kibana-analysis-basic.yml +++ b/docker/helk-kibana-analysis-basic.yml @@ -143,7 +143,7 @@ services: networks: helk: helk-curator: - image: alfiej04/helk-curator:0.0.2 + image: alfiej04/helk-curator:0.0.3 container_name: helk-curator restart: always depends_on: diff --git a/docker/helk-kibana-analysis-trial.yml b/docker/helk-kibana-analysis-trial.yml index 8655e29a..e2ada643 100644 --- a/docker/helk-kibana-analysis-trial.yml +++ b/docker/helk-kibana-analysis-trial.yml @@ -144,7 +144,7 @@ services: networks: helk: helk-curator: - image: alfiej04/helk-curator:0.0.2 + image: alfiej04/helk-curator:0.0.3 container_name: helk-curator restart: always depends_on: diff --git a/docker/helk-kibana-notebook-analysis-alert-basic.yml b/docker/helk-kibana-notebook-analysis-alert-basic.yml index c359c9f0..d8f5ba0d 100644 --- a/docker/helk-kibana-notebook-analysis-alert-basic.yml +++ b/docker/helk-kibana-notebook-analysis-alert-basic.yml @@ -193,7 +193,7 @@ services: networks: helk: helk-curator: - image: alfiej04/helk-curator:0.0.2 + image: alfiej04/helk-curator:0.0.3 container_name: helk-curator restart: always depends_on: diff --git a/docker/helk-kibana-notebook-analysis-alert-trial.yml b/docker/helk-kibana-notebook-analysis-alert-trial.yml index 5c4dd2a8..0c82d936 100644 --- a/docker/helk-kibana-notebook-analysis-alert-trial.yml +++ b/docker/helk-kibana-notebook-analysis-alert-trial.yml @@ -194,7 +194,7 @@ services: networks: helk: helk-curator: - image: alfiej04/helk-curator:0.0.2 + image: alfiej04/helk-curator:0.0.3 container_name: helk-curator restart: always depends_on: diff --git a/docker/helk-kibana-notebook-analysis-basic.yml b/docker/helk-kibana-notebook-analysis-basic.yml index 627ffc6b..42529bae 100644 --- a/docker/helk-kibana-notebook-analysis-basic.yml +++ b/docker/helk-kibana-notebook-analysis-basic.yml @@ -182,7 +182,7 @@ services: networks: helk: helk-curator: - image: alfiej04/helk-curator:0.0.2 + image: alfiej04/helk-curator:0.0.3 container_name: helk-curator restart: always depends_on: diff --git a/docker/helk-kibana-notebook-analysis-trial.yml b/docker/helk-kibana-notebook-analysis-trial.yml index fe875aaf..0a92c942 100644 --- a/docker/helk-kibana-notebook-analysis-trial.yml +++ b/docker/helk-kibana-notebook-analysis-trial.yml @@ -184,7 +184,7 @@ services: networks: helk: helk-curator: - image: alfiej04/helk-curator:0.0.2 + image: alfiej04/helk-curator:0.0.3 container_name: helk-curator restart: always depends_on: From 2ab14b961b5d54c8f495a384feb04375c5811da2 Mon Sep 17 00:00:00 2001 From: AlfieJ04 Date: Sat, 5 Oct 2019 09:41:20 +0100 Subject: [PATCH 18/19] Amended helk-curator-cron --- docker/helk-curator/helk-curator-cron | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/helk-curator/helk-curator-cron b/docker/helk-curator/helk-curator-cron index a33a003f..864b56f3 100644 --- a/docker/helk-curator/helk-curator-cron +++ b/docker/helk-curator/helk-curator-cron @@ -1,2 +1,2 @@ -0 0 * * * curator --config /usr/share/curator/curator.yml /usr/share/curator/actions.yaml >> /var/log/helk-curator-cron.log 2>&1 +0 0 * * * /usr/share/curator --config /usr/share/curator/curator.yml /usr/share/curator/actions.yaml >> /var/log/helk-curator-cron.log 2>&1 # An empty line is required at the end of this file for a valid cron file. From 6d7e954a0e5355c29b65a7a4ab0686486d86151d Mon Sep 17 00:00:00 2001 From: AlfieJ04 Date: Sat, 5 Oct 2019 09:56:27 +0100 Subject: [PATCH 19/19] Amended helk-curator-cron --- docker/helk-curator/helk-curator-cron | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/helk-curator/helk-curator-cron b/docker/helk-curator/helk-curator-cron index 864b56f3..5460a282 100644 --- a/docker/helk-curator/helk-curator-cron +++ b/docker/helk-curator/helk-curator-cron @@ -1,2 +1,2 @@ -0 0 * * * /usr/share/curator --config /usr/share/curator/curator.yml /usr/share/curator/actions.yaml >> /var/log/helk-curator-cron.log 2>&1 +0 0 * * * /usr/local/bin/curator --config /usr/share/curator/curator.yml /usr/share/curator/actions.yaml >> /var/log/helk-curator-cron.log 2>&1 # An empty line is required at the end of this file for a valid cron file.