diff --git a/.dockerignore b/.dockerignore
index 0c5848b74..dfcb9cbf2 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -14,6 +14,7 @@
**/.envrc
**/.direnv
**/.opensearch*.curlrc
+.configured
.trigger_workflow_build
.tmp
docker-compose*yml
diff --git a/.github/workflows/api-build-and-push-ghcr.yml b/.github/workflows/api-build-and-push-ghcr.yml
index e786a1a81..20c86a2dd 100644
--- a/.github/workflows/api-build-and-push-ghcr.yml
+++ b/.github/workflows/api-build-and-push-ghcr.yml
@@ -5,10 +5,13 @@ on:
branches:
- main
- development
+ - kubernetes
paths:
- 'api/**'
- 'Dockerfiles/api.Dockerfile'
- 'shared/bin/*'
+ - '!shared/bin/configure-interfaces.py'
+ - '!shared/bin/configure-capture.py'
- '.trigger_workflow_build'
workflow_dispatch:
repository_dispatch:
@@ -59,4 +62,4 @@ jobs:
context: .
file: ./Dockerfiles/api.Dockerfile
push: true
- tags: ghcr.io/${{ github.repository_owner }}/malcolmnetsec/api:${{ steps.extract_branch.outputs.branch }}
+ tags: ghcr.io/${{ github.repository_owner }}/malcolm/api:${{ steps.extract_branch.outputs.branch }}
diff --git a/.github/workflows/arkime-build-and-push-ghcr.yml b/.github/workflows/arkime-build-and-push-ghcr.yml
index 1c73661f4..120e014b3 100644
--- a/.github/workflows/arkime-build-and-push-ghcr.yml
+++ b/.github/workflows/arkime-build-and-push-ghcr.yml
@@ -5,10 +5,13 @@ on:
branches:
- main
- development
+ - kubernetes
paths:
- 'arkime/**'
- 'Dockerfiles/arkime.Dockerfile'
- 'shared/bin/*'
+ - '!shared/bin/configure-interfaces.py'
+ - '!shared/bin/configure-capture.py'
- '.trigger_workflow_build'
workflow_dispatch:
repository_dispatch:
@@ -61,4 +64,4 @@ jobs:
build-args: |
MAXMIND_GEOIP_DB_LICENSE_KEY=${{ secrets.MAXMIND_GEOIP_DB_LICENSE_KEY }}
push: true
- tags: ghcr.io/${{ github.repository_owner }}/malcolmnetsec/arkime:${{ steps.extract_branch.outputs.branch }}
+ tags: ghcr.io/${{ github.repository_owner }}/malcolm/arkime:${{ steps.extract_branch.outputs.branch }}
diff --git a/.github/workflows/dashboards-build-and-push-ghcr.yml b/.github/workflows/dashboards-build-and-push-ghcr.yml
index dacd0e6e2..442da4eef 100644
--- a/.github/workflows/dashboards-build-and-push-ghcr.yml
+++ b/.github/workflows/dashboards-build-and-push-ghcr.yml
@@ -5,10 +5,13 @@ on:
branches:
- main
- development
+ - kubernetes
paths:
- 'dashboards/**'
- 'Dockerfiles/dashboards.Dockerfile'
- 'shared/bin/*'
+ - '!shared/bin/configure-interfaces.py'
+ - '!shared/bin/configure-capture.py'
- '.trigger_workflow_build'
workflow_dispatch:
repository_dispatch:
@@ -59,4 +62,4 @@ jobs:
context: .
file: ./Dockerfiles/dashboards.Dockerfile
push: true
- tags: ghcr.io/${{ github.repository_owner }}/malcolmnetsec/dashboards:${{ steps.extract_branch.outputs.branch }}
+ tags: ghcr.io/${{ github.repository_owner }}/malcolm/dashboards:${{ steps.extract_branch.outputs.branch }}
diff --git a/.github/workflows/dashboards-helper-build-and-push-ghcr.yml b/.github/workflows/dashboards-helper-build-and-push-ghcr.yml
index 96c3a1912..aeaf743fb 100644
--- a/.github/workflows/dashboards-helper-build-and-push-ghcr.yml
+++ b/.github/workflows/dashboards-helper-build-and-push-ghcr.yml
@@ -5,10 +5,13 @@ on:
branches:
- main
- development
+ - kubernetes
paths:
- 'dashboards/**'
- 'Dockerfiles/dashboards-helper.Dockerfile'
- 'shared/bin/*'
+ - '!shared/bin/configure-interfaces.py'
+ - '!shared/bin/configure-capture.py'
- '.trigger_workflow_build'
workflow_dispatch:
repository_dispatch:
@@ -59,4 +62,4 @@ jobs:
context: .
file: ./Dockerfiles/dashboards-helper.Dockerfile
push: true
- tags: ghcr.io/${{ github.repository_owner }}/malcolmnetsec/dashboards-helper:${{ steps.extract_branch.outputs.branch }}
+ tags: ghcr.io/${{ github.repository_owner }}/malcolm/dashboards-helper:${{ steps.extract_branch.outputs.branch }}
diff --git a/.github/workflows/file-monitor-build-and-push-ghcr.yml b/.github/workflows/file-monitor-build-and-push-ghcr.yml
index ae6b8b932..37af5c097 100644
--- a/.github/workflows/file-monitor-build-and-push-ghcr.yml
+++ b/.github/workflows/file-monitor-build-and-push-ghcr.yml
@@ -5,10 +5,13 @@ on:
branches:
- main
- development
+ - kubernetes
paths:
- 'file-monitor/**'
- 'Dockerfiles/file-monitor.Dockerfile'
- 'shared/bin/*'
+ - '!shared/bin/configure-interfaces.py'
+ - '!shared/bin/configure-capture.py'
- '.trigger_workflow_build'
workflow_dispatch:
repository_dispatch:
@@ -59,4 +62,4 @@ jobs:
context: .
file: ./Dockerfiles/file-monitor.Dockerfile
push: true
- tags: ghcr.io/${{ github.repository_owner }}/malcolmnetsec/file-monitor:${{ steps.extract_branch.outputs.branch }}
+ tags: ghcr.io/${{ github.repository_owner }}/malcolm/file-monitor:${{ steps.extract_branch.outputs.branch }}
diff --git a/.github/workflows/file-upload-build-and-push-ghcr.yml b/.github/workflows/file-upload-build-and-push-ghcr.yml
index 86d2a128b..b457b86ee 100644
--- a/.github/workflows/file-upload-build-and-push-ghcr.yml
+++ b/.github/workflows/file-upload-build-and-push-ghcr.yml
@@ -5,10 +5,13 @@ on:
branches:
- main
- development
+ - kubernetes
paths:
- 'file-upload/**'
- 'Dockerfiles/file-upload.Dockerfile'
- 'shared/bin/*'
+ - '!shared/bin/configure-interfaces.py'
+ - '!shared/bin/configure-capture.py'
- '.trigger_workflow_build'
workflow_dispatch:
repository_dispatch:
@@ -59,4 +62,4 @@ jobs:
context: .
file: ./Dockerfiles/file-upload.Dockerfile
push: true
- tags: ghcr.io/${{ github.repository_owner }}/malcolmnetsec/file-upload:${{ steps.extract_branch.outputs.branch }}
+ tags: ghcr.io/${{ github.repository_owner }}/malcolm/file-upload:${{ steps.extract_branch.outputs.branch }}
diff --git a/.github/workflows/filebeat-build-and-push-ghcr.yml b/.github/workflows/filebeat-build-and-push-ghcr.yml
index 4784f7727..bb744d177 100644
--- a/.github/workflows/filebeat-build-and-push-ghcr.yml
+++ b/.github/workflows/filebeat-build-and-push-ghcr.yml
@@ -5,10 +5,13 @@ on:
branches:
- main
- development
+ - kubernetes
paths:
- 'filebeat/**'
- 'Dockerfiles/filebeat.Dockerfile'
- 'shared/bin/*'
+ - '!shared/bin/configure-interfaces.py'
+ - '!shared/bin/configure-capture.py'
- '.trigger_workflow_build'
workflow_dispatch:
repository_dispatch:
@@ -59,4 +62,4 @@ jobs:
context: .
file: ./Dockerfiles/filebeat.Dockerfile
push: true
- tags: ghcr.io/${{ github.repository_owner }}/malcolmnetsec/filebeat-oss:${{ steps.extract_branch.outputs.branch }}
+ tags: ghcr.io/${{ github.repository_owner }}/malcolm/filebeat-oss:${{ steps.extract_branch.outputs.branch }}
diff --git a/.github/workflows/freq-build-and-push-ghcr.yml b/.github/workflows/freq-build-and-push-ghcr.yml
index f498b34a0..12281f7a9 100644
--- a/.github/workflows/freq-build-and-push-ghcr.yml
+++ b/.github/workflows/freq-build-and-push-ghcr.yml
@@ -5,10 +5,13 @@ on:
branches:
- main
- development
+ - kubernetes
paths:
- 'freq-server/**'
- 'Dockerfiles/freq.Dockerfile'
- 'shared/bin/*'
+ - '!shared/bin/configure-interfaces.py'
+ - '!shared/bin/configure-capture.py'
- '.trigger_workflow_build'
workflow_dispatch:
repository_dispatch:
@@ -59,4 +62,4 @@ jobs:
context: .
file: ./Dockerfiles/freq.Dockerfile
push: true
- tags: ghcr.io/${{ github.repository_owner }}/malcolmnetsec/freq:${{ steps.extract_branch.outputs.branch }}
+ tags: ghcr.io/${{ github.repository_owner }}/malcolm/freq:${{ steps.extract_branch.outputs.branch }}
diff --git a/.github/workflows/htadmin-build-and-push-ghcr.yml b/.github/workflows/htadmin-build-and-push-ghcr.yml
index 812740c8f..3af590976 100644
--- a/.github/workflows/htadmin-build-and-push-ghcr.yml
+++ b/.github/workflows/htadmin-build-and-push-ghcr.yml
@@ -5,10 +5,13 @@ on:
branches:
- main
- development
+ - kubernetes
paths:
- 'htadmin/**'
- 'Dockerfiles/htadmin.Dockerfile'
- 'shared/bin/*'
+ - '!shared/bin/configure-interfaces.py'
+ - '!shared/bin/configure-capture.py'
- '.trigger_workflow_build'
workflow_dispatch:
repository_dispatch:
@@ -59,4 +62,4 @@ jobs:
context: .
file: ./Dockerfiles/htadmin.Dockerfile
push: true
- tags: ghcr.io/${{ github.repository_owner }}/malcolmnetsec/htadmin:${{ steps.extract_branch.outputs.branch }}
+ tags: ghcr.io/${{ github.repository_owner }}/malcolm/htadmin:${{ steps.extract_branch.outputs.branch }}
diff --git a/.github/workflows/logstash-build-and-push-ghcr.yml b/.github/workflows/logstash-build-and-push-ghcr.yml
index fe6a6c4f9..a8d69eb27 100644
--- a/.github/workflows/logstash-build-and-push-ghcr.yml
+++ b/.github/workflows/logstash-build-and-push-ghcr.yml
@@ -5,10 +5,13 @@ on:
branches:
- main
- development
+ - kubernetes
paths:
- 'logstash/**'
- 'Dockerfiles/logstash.Dockerfile'
- 'shared/bin/*'
+ - '!shared/bin/configure-interfaces.py'
+ - '!shared/bin/configure-capture.py'
- '.trigger_workflow_build'
workflow_dispatch:
repository_dispatch:
@@ -59,4 +62,4 @@ jobs:
context: .
file: ./Dockerfiles/logstash.Dockerfile
push: true
- tags: ghcr.io/${{ github.repository_owner }}/malcolmnetsec/logstash-oss:${{ steps.extract_branch.outputs.branch }}
+ tags: ghcr.io/${{ github.repository_owner }}/malcolm/logstash-oss:${{ steps.extract_branch.outputs.branch }}
diff --git a/.github/workflows/malcolm-iso-build-docker-wrap-push-ghcr.yml b/.github/workflows/malcolm-iso-build-docker-wrap-push-ghcr.yml
index 599ea01f8..c95a3ffd3 100644
--- a/.github/workflows/malcolm-iso-build-docker-wrap-push-ghcr.yml
+++ b/.github/workflows/malcolm-iso-build-docker-wrap-push-ghcr.yml
@@ -5,9 +5,11 @@ on:
branches:
- main
- development
+ - kubernetes
paths:
- 'malcolm-iso/**'
- 'shared/bin/*'
+ - '!shared/bin/configure-capture.py'
- '.trigger_iso_workflow_build'
- '.github/workflows/malcolm-iso-build-docker-wrap-push-ghcr.yml'
workflow_dispatch:
@@ -94,7 +96,7 @@ jobs:
run: |
IMAGES=( $(grep image: docker-compose.yml | awk '{print $2}' | sort -u) )
for IMAGE in "${IMAGES[@]}"; do
- REPO_IMAGE="$(echo "$IMAGE" | sed "s@^\(malcolmnetsec\)@ghcr.io/${{ github.repository_owner }}/\1@" | sed "s/:.*/:${{ steps.extract_branch.outputs.branch }}/")"
+ REPO_IMAGE="$(echo "$IMAGE" | sed "s@^.*\(malcolm\)@ghcr.io/${{ github.repository_owner }}/\1@" | sed "s/:.*/:${{ steps.extract_branch.outputs.branch }}/")"
docker pull "$REPO_IMAGE" && \
docker tag "$REPO_IMAGE" "$IMAGE" && \
docker rmi "$REPO_IMAGE"
@@ -124,4 +126,4 @@ jobs:
with:
context: ./malcolm-iso
push: true
- tags: ghcr.io/${{ github.repository_owner }}/malcolmnetsec/malcolm:${{ steps.extract_branch.outputs.branch }}
\ No newline at end of file
+ tags: ghcr.io/${{ github.repository_owner }}/malcolm/malcolm:${{ steps.extract_branch.outputs.branch }}
\ No newline at end of file
diff --git a/.github/workflows/name-map-ui-build-and-push-ghcr.yml b/.github/workflows/name-map-ui-build-and-push-ghcr.yml
index f433f7f35..0d9982356 100644
--- a/.github/workflows/name-map-ui-build-and-push-ghcr.yml
+++ b/.github/workflows/name-map-ui-build-and-push-ghcr.yml
@@ -5,10 +5,13 @@ on:
branches:
- main
- development
+ - kubernetes
paths:
- 'name-map-ui/**'
- 'Dockerfiles/name-map-ui.Dockerfile'
- 'shared/bin/*'
+ - '!shared/bin/configure-interfaces.py'
+ - '!shared/bin/configure-capture.py'
- '.trigger_workflow_build'
workflow_dispatch:
repository_dispatch:
@@ -59,4 +62,4 @@ jobs:
context: .
file: ./Dockerfiles/name-map-ui.Dockerfile
push: true
- tags: ghcr.io/${{ github.repository_owner }}/malcolmnetsec/name-map-ui:${{ steps.extract_branch.outputs.branch }}
+ tags: ghcr.io/${{ github.repository_owner }}/malcolm/name-map-ui:${{ steps.extract_branch.outputs.branch }}
diff --git a/.github/workflows/netbox-build-and-push-ghcr.yml b/.github/workflows/netbox-build-and-push-ghcr.yml
index a7fee896c..cbb2fa980 100644
--- a/.github/workflows/netbox-build-and-push-ghcr.yml
+++ b/.github/workflows/netbox-build-and-push-ghcr.yml
@@ -5,10 +5,13 @@ on:
branches:
- main
- development
+ - kubernetes
paths:
- 'netbox/**'
- 'Dockerfiles/netbox.Dockerfile'
- 'shared/bin/*'
+ - '!shared/bin/configure-interfaces.py'
+ - '!shared/bin/configure-capture.py'
- '.trigger_workflow_build'
workflow_dispatch:
repository_dispatch:
@@ -59,4 +62,4 @@ jobs:
context: .
file: ./Dockerfiles/netbox.Dockerfile
push: true
- tags: ghcr.io/${{ github.repository_owner }}/malcolmnetsec/netbox:${{ steps.extract_branch.outputs.branch }}
+ tags: ghcr.io/${{ github.repository_owner }}/malcolm/netbox:${{ steps.extract_branch.outputs.branch }}
diff --git a/.github/workflows/nginx-build-and-push-ghcr.yml b/.github/workflows/nginx-build-and-push-ghcr.yml
index 7301baccd..77e379f70 100644
--- a/.github/workflows/nginx-build-and-push-ghcr.yml
+++ b/.github/workflows/nginx-build-and-push-ghcr.yml
@@ -5,15 +5,19 @@ on:
branches:
- main
- development
+ - kubernetes
paths:
- 'nginx/**'
- 'Dockerfiles/nginx.Dockerfile'
- 'shared/bin/*'
+ - '!shared/bin/configure-interfaces.py'
+ - '!shared/bin/configure-capture.py'
- '.trigger_workflow_build'
- '_config.yml'
- '_includes/**'
- '_layouts/**'
- 'docs/**'
+ - '!docs/download.md'
- 'Gemfile'
- 'README.md'
workflow_dispatch:
@@ -73,4 +77,4 @@ jobs:
VCS_REVISION=${{ steps.extract_commit_sha.outputs.sha }}
GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }}
push: true
- tags: ghcr.io/${{ github.repository_owner }}/malcolmnetsec/nginx-proxy:${{ steps.extract_branch.outputs.branch }}
+ tags: ghcr.io/${{ github.repository_owner }}/malcolm/nginx-proxy:${{ steps.extract_branch.outputs.branch }}
diff --git a/.github/workflows/opensearch-build-and-push-ghcr.yml b/.github/workflows/opensearch-build-and-push-ghcr.yml
index b9de9d74f..177c8d1e1 100644
--- a/.github/workflows/opensearch-build-and-push-ghcr.yml
+++ b/.github/workflows/opensearch-build-and-push-ghcr.yml
@@ -5,9 +5,12 @@ on:
branches:
- main
- development
+ - kubernetes
paths:
- 'Dockerfiles/opensearch.Dockerfile'
- 'shared/bin/*'
+ - '!shared/bin/configure-interfaces.py'
+ - '!shared/bin/configure-capture.py'
- '.trigger_workflow_build'
workflow_dispatch:
repository_dispatch:
@@ -58,4 +61,4 @@ jobs:
context: .
file: ./Dockerfiles/opensearch.Dockerfile
push: true
- tags: ghcr.io/${{ github.repository_owner }}/malcolmnetsec/opensearch:${{ steps.extract_branch.outputs.branch }}
+ tags: ghcr.io/${{ github.repository_owner }}/malcolm/opensearch:${{ steps.extract_branch.outputs.branch }}
diff --git a/.github/workflows/pcap-capture-build-and-push-ghcr.yml b/.github/workflows/pcap-capture-build-and-push-ghcr.yml
index 07bd3944b..2d4dc0c6e 100644
--- a/.github/workflows/pcap-capture-build-and-push-ghcr.yml
+++ b/.github/workflows/pcap-capture-build-and-push-ghcr.yml
@@ -5,10 +5,13 @@ on:
branches:
- main
- development
+ - kubernetes
paths:
- 'pcap-capture/**'
- 'Dockerfiles/pcap-capture.Dockerfile'
- 'shared/bin/*'
+ - '!shared/bin/configure-interfaces.py'
+ - '!shared/bin/configure-capture.py'
- '.trigger_workflow_build'
workflow_dispatch:
repository_dispatch:
@@ -59,4 +62,4 @@ jobs:
context: .
file: ./Dockerfiles/pcap-capture.Dockerfile
push: true
- tags: ghcr.io/${{ github.repository_owner }}/malcolmnetsec/pcap-capture:${{ steps.extract_branch.outputs.branch }}
+ tags: ghcr.io/${{ github.repository_owner }}/malcolm/pcap-capture:${{ steps.extract_branch.outputs.branch }}
diff --git a/.github/workflows/pcap-monitor-build-and-push-ghcr.yml b/.github/workflows/pcap-monitor-build-and-push-ghcr.yml
index f4c52be24..14d37084e 100644
--- a/.github/workflows/pcap-monitor-build-and-push-ghcr.yml
+++ b/.github/workflows/pcap-monitor-build-and-push-ghcr.yml
@@ -5,10 +5,13 @@ on:
branches:
- main
- development
+ - kubernetes
paths:
- 'pcap-monitor/**'
- 'Dockerfiles/pcap-monitor.Dockerfile'
- 'shared/bin/*'
+ - '!shared/bin/configure-interfaces.py'
+ - '!shared/bin/configure-capture.py'
- '.trigger_workflow_build'
workflow_dispatch:
repository_dispatch:
@@ -59,4 +62,4 @@ jobs:
context: .
file: ./Dockerfiles/pcap-monitor.Dockerfile
push: true
- tags: ghcr.io/${{ github.repository_owner }}/malcolmnetsec/pcap-monitor:${{ steps.extract_branch.outputs.branch }}
+ tags: ghcr.io/${{ github.repository_owner }}/malcolm/pcap-monitor:${{ steps.extract_branch.outputs.branch }}
diff --git a/.github/workflows/postgresql-build-and-push-ghcr.yml b/.github/workflows/postgresql-build-and-push-ghcr.yml
index 3f78d13b5..6326f9fbc 100644
--- a/.github/workflows/postgresql-build-and-push-ghcr.yml
+++ b/.github/workflows/postgresql-build-and-push-ghcr.yml
@@ -5,9 +5,12 @@ on:
branches:
- main
- development
+ - kubernetes
paths:
- 'Dockerfiles/postgresql.Dockerfile'
- 'shared/bin/*'
+ - '!shared/bin/configure-interfaces.py'
+ - '!shared/bin/configure-capture.py'
- '.trigger_workflow_build'
workflow_dispatch:
repository_dispatch:
@@ -58,4 +61,4 @@ jobs:
context: .
file: ./Dockerfiles/postgresql.Dockerfile
push: true
- tags: ghcr.io/${{ github.repository_owner }}/malcolmnetsec/postgresql:${{ steps.extract_branch.outputs.branch }}
+ tags: ghcr.io/${{ github.repository_owner }}/malcolm/postgresql:${{ steps.extract_branch.outputs.branch }}
diff --git a/.github/workflows/redis-build-and-push-ghcr.yml b/.github/workflows/redis-build-and-push-ghcr.yml
index af0b312cf..29796936d 100644
--- a/.github/workflows/redis-build-and-push-ghcr.yml
+++ b/.github/workflows/redis-build-and-push-ghcr.yml
@@ -5,9 +5,12 @@ on:
branches:
- main
- development
+ - kubernetes
paths:
- 'Dockerfiles/redis.Dockerfile'
- 'shared/bin/*'
+ - '!shared/bin/configure-interfaces.py'
+ - '!shared/bin/configure-capture.py'
- '.trigger_workflow_build'
workflow_dispatch:
repository_dispatch:
@@ -58,4 +61,4 @@ jobs:
context: .
file: ./Dockerfiles/redis.Dockerfile
push: true
- tags: ghcr.io/${{ github.repository_owner }}/malcolmnetsec/redis:${{ steps.extract_branch.outputs.branch }}
+ tags: ghcr.io/${{ github.repository_owner }}/malcolm/redis:${{ steps.extract_branch.outputs.branch }}
diff --git a/.github/workflows/sensor-iso-build-docker-wrap-push-ghcr.yml b/.github/workflows/sensor-iso-build-docker-wrap-push-ghcr.yml
index 78cbe5b2c..fd9499911 100644
--- a/.github/workflows/sensor-iso-build-docker-wrap-push-ghcr.yml
+++ b/.github/workflows/sensor-iso-build-docker-wrap-push-ghcr.yml
@@ -5,6 +5,7 @@ on:
branches:
- main
- development
+ - kubernetes
paths:
- 'sensor-iso/**'
- 'shared/bin/*'
@@ -116,4 +117,4 @@ jobs:
with:
context: ./sensor-iso
push: true
- tags: ghcr.io/${{ github.repository_owner }}/malcolmnetsec/hedgehog:${{ steps.extract_branch.outputs.branch }}
\ No newline at end of file
+ tags: ghcr.io/${{ github.repository_owner }}/malcolm/hedgehog:${{ steps.extract_branch.outputs.branch }}
\ No newline at end of file
diff --git a/.github/workflows/suricata-build-and-push-ghcr.yml b/.github/workflows/suricata-build-and-push-ghcr.yml
index 9fb1f70af..8086860d8 100644
--- a/.github/workflows/suricata-build-and-push-ghcr.yml
+++ b/.github/workflows/suricata-build-and-push-ghcr.yml
@@ -5,10 +5,13 @@ on:
branches:
- main
- development
+ - kubernetes
paths:
- 'suricata/**'
- 'Dockerfiles/suricata.Dockerfile'
- 'shared/bin/*'
+ - '!shared/bin/configure-interfaces.py'
+ - '!shared/bin/configure-capture.py'
- '.trigger_workflow_build'
workflow_dispatch:
repository_dispatch:
@@ -59,4 +62,4 @@ jobs:
context: .
file: ./Dockerfiles/suricata.Dockerfile
push: true
- tags: ghcr.io/${{ github.repository_owner }}/malcolmnetsec/suricata:${{ steps.extract_branch.outputs.branch }}
+ tags: ghcr.io/${{ github.repository_owner }}/malcolm/suricata:${{ steps.extract_branch.outputs.branch }}
diff --git a/.github/workflows/zeek-build-and-push-ghcr.yml b/.github/workflows/zeek-build-and-push-ghcr.yml
index 4447f7171..8dbebdb59 100644
--- a/.github/workflows/zeek-build-and-push-ghcr.yml
+++ b/.github/workflows/zeek-build-and-push-ghcr.yml
@@ -5,10 +5,13 @@ on:
branches:
- main
- development
+ - kubernetes
paths:
- 'zeek/**'
- 'Dockerfiles/zeek.Dockerfile'
- 'shared/bin/*'
+ - '!shared/bin/configure-interfaces.py'
+ - '!shared/bin/configure-capture.py'
- '.trigger_workflow_build'
workflow_dispatch:
repository_dispatch:
@@ -59,4 +62,4 @@ jobs:
context: .
file: ./Dockerfiles/zeek.Dockerfile
push: true
- tags: ghcr.io/${{ github.repository_owner }}/malcolmnetsec/zeek:${{ steps.extract_branch.outputs.branch }}
+ tags: ghcr.io/${{ github.repository_owner }}/malcolm/zeek:${{ steps.extract_branch.outputs.branch }}
diff --git a/.gitignore b/.gitignore
index 330a7790d..4ce70a661 100644
--- a/.gitignore
+++ b/.gitignore
@@ -17,6 +17,10 @@
/htadmin/config.ini
/htadmin/metadata
+# runtime
+.tmp
+.configured
+
# development
.envrc
.direnv
@@ -40,4 +44,3 @@ __pycache__/
__pypackages__/
*.py[cod]
*$py.class
-.tmp
diff --git a/.trigger_workflow_build b/.trigger_workflow_build
index 4eb949f9a..c69b966fd 100644
--- a/.trigger_workflow_build
+++ b/.trigger_workflow_build
@@ -1,2 +1,2 @@
# this file exists solely for the purpose of being updated and seen by github to trigger a commit build action
-1
+2
diff --git a/Dockerfiles/api.Dockerfile b/Dockerfiles/api.Dockerfile
index 57bdc5615..50bbbc3e3 100644
--- a/Dockerfiles/api.Dockerfile
+++ b/Dockerfiles/api.Dockerfile
@@ -27,7 +27,7 @@ LABEL org.opencontainers.image.url='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.documentation='https://github.com/cisagov/Malcolm/blob/main/README.md'
LABEL org.opencontainers.image.source='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.vendor='Cybersecurity and Infrastructure Security Agency'
-LABEL org.opencontainers.image.title='malcolmnetsec/api'
+LABEL org.opencontainers.image.title='ghcr.io/cisagov/malcolm/api'
LABEL org.opencontainers.image.description='Malcolm container providing a REST API for some information about network traffic'
ARG DEFAULT_UID=1000
@@ -76,7 +76,7 @@ COPY shared/bin/opensearch_status.sh "${APP_HOME}"/
ADD shared/bin/docker-uid-gid-setup.sh /usr/local/bin/
RUN apt-get -q update \
&& apt-get -y -q --no-install-recommends upgrade \
- && apt-get -y -q --no-install-recommends install curl netcat tini \
+ && apt-get -y -q --no-install-recommends install curl netcat rsync tini \
&& python3 -m pip install --upgrade pip \
&& python3 -m pip install --no-cache /wheels/* \
&& chmod 755 /usr/local/bin/docker-uid-gid-setup.sh \
diff --git a/Dockerfiles/arkime.Dockerfile b/Dockerfiles/arkime.Dockerfile
index efa3875c5..3e539c025 100644
--- a/Dockerfiles/arkime.Dockerfile
+++ b/Dockerfiles/arkime.Dockerfile
@@ -45,6 +45,7 @@ RUN apt-get -q update && \
python3-pip \
python3-setuptools \
python3-wheel \
+ rsync \
sudo \
swig \
wget \
@@ -79,7 +80,7 @@ LABEL org.opencontainers.image.url='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.documentation='https://github.com/cisagov/Malcolm/blob/main/README.md'
LABEL org.opencontainers.image.source='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.vendor='Cybersecurity and Infrastructure Security Agency'
-LABEL org.opencontainers.image.title='malcolmnetsec/arkime'
+LABEL org.opencontainers.image.title='ghcr.io/cisagov/malcolm/arkime'
LABEL org.opencontainers.image.description='Malcolm container providing Arkime'
ARG DEFAULT_UID=1000
diff --git a/Dockerfiles/dashboards-helper.Dockerfile b/Dockerfiles/dashboards-helper.Dockerfile
index 514120bda..6974ea512 100644
--- a/Dockerfiles/dashboards-helper.Dockerfile
+++ b/Dockerfiles/dashboards-helper.Dockerfile
@@ -7,7 +7,7 @@ LABEL org.opencontainers.image.url='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.documentation='https://github.com/cisagov/Malcolm/blob/main/README.md'
LABEL org.opencontainers.image.source='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.vendor='Cybersecurity and Infrastructure Security Agency'
-LABEL org.opencontainers.image.title='malcolmnetsec/dashboards-helper'
+LABEL org.opencontainers.image.title='ghcr.io/cisagov/malcolm/dashboards-helper'
LABEL org.opencontainers.image.description='Malcolm container providing OpenSearch Dashboards support functions'
ARG DEFAULT_UID=1000
@@ -71,7 +71,7 @@ ADD scripts/malcolm_common.py /data/
RUN apk update --no-cache && \
apk upgrade --no-cache && \
- apk --no-cache add bash python3 py3-pip curl openssl procps psmisc npm shadow jq tini && \
+ apk --no-cache add bash python3 py3-pip curl openssl procps psmisc npm rsync shadow jq tini && \
npm install -g http-server && \
pip3 install supervisor humanfriendly requests && \
curl -fsSLO "$SUPERCRONIC_URL" && \
diff --git a/Dockerfiles/dashboards.Dockerfile b/Dockerfiles/dashboards.Dockerfile
index 842d448d9..3d01a60b2 100644
--- a/Dockerfiles/dashboards.Dockerfile
+++ b/Dockerfiles/dashboards.Dockerfile
@@ -76,7 +76,7 @@ LABEL org.opencontainers.image.url='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.documentation='https://github.com/cisagov/Malcolm/blob/master/README.md'
LABEL org.opencontainers.image.source='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.vendor='Cybersecurity and Infrastructure Security Agency'
-LABEL org.opencontainers.image.title='malcolmnetsec/opensearch-dashboards'
+LABEL org.opencontainers.image.title='ghcr.io/cisagov/malcolm/opensearch-dashboards'
LABEL org.opencontainers.image.description='Malcolm container providing OpenSearch Dashboards'
ARG DEFAULT_UID=1000
@@ -116,7 +116,7 @@ COPY --from=build /usr/share/opensearch-dashboards/plugins/sankey_vis/build/kbnS
ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /usr/bin/tini
RUN yum upgrade -y && \
- yum install -y curl psmisc util-linux openssl python3 zip unzip && \
+ yum install -y curl psmisc util-linux openssl rsync python3 zip unzip && \
usermod -a -G tty ${PUSER} && \
# Malcolm manages authentication and encryption via NGINX reverse proxy
/usr/share/opensearch-dashboards/bin/opensearch-dashboards-plugin remove securityDashboards --allow-root && \
diff --git a/Dockerfiles/file-monitor.Dockerfile b/Dockerfiles/file-monitor.Dockerfile
index 47d18ee59..8b59a3a32 100644
--- a/Dockerfiles/file-monitor.Dockerfile
+++ b/Dockerfiles/file-monitor.Dockerfile
@@ -7,7 +7,7 @@ LABEL org.opencontainers.image.url='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.documentation='https://github.com/cisagov/Malcolm/blob/main/README.md'
LABEL org.opencontainers.image.source='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.vendor='Cybersecurity and Infrastructure Security Agency'
-LABEL org.opencontainers.image.title='malcolmnetsec/file-monitor'
+LABEL org.opencontainers.image.title='ghcr.io/cisagov/malcolm/file-monitor'
LABEL org.opencontainers.image.description='Malcolm container for scanning files extracted by Zeek'
ARG DEFAULT_UID=1000
@@ -75,7 +75,7 @@ ENV EXTRACTED_FILE_ENABLE_CAPA $EXTRACTED_FILE_ENABLE_CAPA
ENV EXTRACTED_FILE_CAPA_VERBOSE $EXTRACTED_FILE_CAPA_VERBOSE
ENV SRC_BASE_DIR "/usr/local/src"
ENV CLAMAV_RULES_DIR "/var/lib/clamav"
-ENV YARA_VERSION "4.2.3"
+ENV YARA_VERSION "4.3.0"
ENV YARA_URL "https://github.com/VirusTotal/yara/archive/v${YARA_VERSION}.tar.gz"
ENV YARA_RULES_SRC_DIR "/yara-rules-src"
ENV YARA_RULES_DIR "/yara-rules"
@@ -132,7 +132,8 @@ RUN sed -i "s/bullseye main/bullseye main contrib non-free/g" /etc/apt/sources.l
python3-pip \
python3-pyinotify \
python3-requests \
- python3-zmq && \
+ python3-zmq \
+ rsync && \
pip3 install clamd supervisor yara-python python-magic psutil pycryptodome && \
curl -fsSLO "$SUPERCRONIC_URL" && \
echo "${SUPERCRONIC_SHA1SUM} ${SUPERCRONIC}" | sha1sum -c - && \
diff --git a/Dockerfiles/file-upload.Dockerfile b/Dockerfiles/file-upload.Dockerfile
index 15c7b2cb2..c88a46ae3 100644
--- a/Dockerfiles/file-upload.Dockerfile
+++ b/Dockerfiles/file-upload.Dockerfile
@@ -32,7 +32,7 @@ LABEL org.opencontainers.image.url='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.documentation='https://github.com/cisagov/Malcolm/blob/main/README.md'
LABEL org.opencontainers.image.source='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.vendor='Cybersecurity and Infrastructure Security Agency'
-LABEL org.opencontainers.image.title='malcolmnetsec/file-upload'
+LABEL org.opencontainers.image.title='ghcr.io/cisagov/malcolm/file-upload'
LABEL org.opencontainers.image.description='Malcolm container providing an interface for uploading PCAP files and Zeek logs for processing'
ARG DEFAULT_UID=33
@@ -67,6 +67,7 @@ RUN apt-get -q update && \
php$PHP_VERSION-fpm \
php$PHP_VERSION-apcu \
nginx-light \
+ rsync \
tini && \
apt-get clean -y -q && \
rm -rf /var/lib/apt/lists/*
diff --git a/Dockerfiles/filebeat.Dockerfile b/Dockerfiles/filebeat.Dockerfile
index 552aa8a53..207bf8cd1 100644
--- a/Dockerfiles/filebeat.Dockerfile
+++ b/Dockerfiles/filebeat.Dockerfile
@@ -7,7 +7,7 @@ LABEL org.opencontainers.image.url='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.documentation='https://github.com/cisagov/Malcolm/blob/main/README.md'
LABEL org.opencontainers.image.source='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.vendor='Cybersecurity and Infrastructure Security Agency'
-LABEL org.opencontainers.image.title='malcolmnetsec/filebeat-oss'
+LABEL org.opencontainers.image.title='ghcr.io/cisagov/malcolm/filebeat-oss'
LABEL org.opencontainers.image.description='Malcolm container providing Filebeat (the Apache-licensed variant)'
ARG DEFAULT_UID=1000
@@ -86,6 +86,7 @@ RUN apt-get -q update && \
psmisc \
python3-pip \
python3-setuptools \
+ rsync \
tar \
unar \
unzip \
diff --git a/Dockerfiles/freq.Dockerfile b/Dockerfiles/freq.Dockerfile
index 94b8f5432..97ab68f74 100644
--- a/Dockerfiles/freq.Dockerfile
+++ b/Dockerfiles/freq.Dockerfile
@@ -7,7 +7,7 @@ LABEL org.opencontainers.image.url='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.documentation='https://github.com/cisagov/Malcolm/blob/main/README.md'
LABEL org.opencontainers.image.source='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.vendor='Cybersecurity and Infrastructure Security Agency'
-LABEL org.opencontainers.image.title='malcolmnetsec/freq'
+LABEL org.opencontainers.image.title='ghcr.io/cisagov/malcolm/freq'
LABEL org.opencontainers.image.description='Malcolm container providing an interface to Mark Baggett''s freq_server.py'
ARG DEFAULT_UID=1000
@@ -38,6 +38,7 @@ RUN apt-get -q update && \
python3 \
python3-dev \
python3-pip \
+ rsync \
tini && \
pip3 install supervisor six && \
cd /opt && \
diff --git a/Dockerfiles/htadmin.Dockerfile b/Dockerfiles/htadmin.Dockerfile
index 16cd4fab1..4da110ff6 100644
--- a/Dockerfiles/htadmin.Dockerfile
+++ b/Dockerfiles/htadmin.Dockerfile
@@ -7,7 +7,7 @@ LABEL org.opencontainers.image.url='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.documentation='https://github.com/cisagov/Malcolm/blob/main/README.md'
LABEL org.opencontainers.image.source='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.vendor='Cybersecurity and Infrastructure Security Agency'
-LABEL org.opencontainers.image.title='malcolmnetsec/htadmin'
+LABEL org.opencontainers.image.title='ghcr.io/cisagov/malcolm/htadmin'
LABEL org.opencontainers.image.description='Malcolm container providing htadmin for managing login accounts in an htpasswd file'
ARG DEFAULT_UID=33
@@ -51,6 +51,7 @@ RUN apt-get -q update && \
php$PHP_VERSION-fpm \
php$PHP_VERSION-gd \
procps \
+ rsync \
supervisor \
tini && \
( yes '' | pecl channel-update pecl.php.net ) && \
diff --git a/Dockerfiles/logstash.Dockerfile b/Dockerfiles/logstash.Dockerfile
index 7ced0f3ea..116530852 100644
--- a/Dockerfiles/logstash.Dockerfile
+++ b/Dockerfiles/logstash.Dockerfile
@@ -6,7 +6,7 @@ LABEL org.opencontainers.image.url='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.documentation='https://github.com/cisagov/Malcolm/blob/main/README.md'
LABEL org.opencontainers.image.source='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.vendor='Cybersecurity and Infrastructure Security Agency'
-LABEL org.opencontainers.image.title='malcolmnetsec/logstash-oss'
+LABEL org.opencontainers.image.title='ghcr.io/cisagov/malcolm/logstash-oss'
LABEL org.opencontainers.image.description='Malcolm container providing Logstash (the Apache-licensed variant)'
ARG DEFAULT_UID=1000
@@ -55,6 +55,7 @@ RUN set -x && \
python3-setuptools \
python3-pip \
python3-requests \
+ rsync \
tini && \
chmod +x /usr/bin/tini && \
pip3 install ipaddress supervisor manuf pyyaml && \
diff --git a/Dockerfiles/name-map-ui.Dockerfile b/Dockerfiles/name-map-ui.Dockerfile
index c1b799013..0925de6dc 100644
--- a/Dockerfiles/name-map-ui.Dockerfile
+++ b/Dockerfiles/name-map-ui.Dockerfile
@@ -7,7 +7,7 @@ LABEL org.opencontainers.image.url='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.documentation='https://github.com/cisagov/Malcolm/blob/main/README.md'
LABEL org.opencontainers.image.source='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.vendor='Cybersecurity and Infrastructure Security Agency'
-LABEL org.opencontainers.image.title='malcolmnetsec/name-map-ui'
+LABEL org.opencontainers.image.title='ghcr.io/cisagov/malcolm/name-map-ui'
LABEL org.opencontainers.image.description='Malcolm container providing a user interface for mapping names to network hosts and subnets'
ARG DEFAULT_UID=1000
@@ -28,7 +28,7 @@ RUN apk update --no-cache && \
apk upgrade --no-cache && \
apk --no-cache add bash php81 php81-fpm php81-mysqli php81-json php81-openssl php81-curl php81-fileinfo \
php81-zlib php81-xml php81-phar php81-intl php81-dom php81-xmlreader php81-ctype php81-session \
- php81-mbstring php81-gd nginx supervisor curl inotify-tools file psmisc shadow openssl tini
+ php81-mbstring php81-gd nginx supervisor curl inotify-tools file psmisc rsync shadow openssl tini
COPY name-map-ui/config/nginx.conf /etc/nginx/nginx.conf
COPY name-map-ui/config/fpm-pool.conf /etc/php81/php-fpm.d/www.conf
diff --git a/Dockerfiles/netbox.Dockerfile b/Dockerfiles/netbox.Dockerfile
index 9cdc51318..06068dd01 100644
--- a/Dockerfiles/netbox.Dockerfile
+++ b/Dockerfiles/netbox.Dockerfile
@@ -7,7 +7,7 @@ LABEL org.opencontainers.image.url='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.documentation='https://github.com/cisagov/Malcolm/blob/main/README.md'
LABEL org.opencontainers.image.source='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.vendor='Cybersecurity and Infrastructure Security Agency'
-LABEL org.opencontainers.image.title='malcolmnetsec/netbox'
+LABEL org.opencontainers.image.title='ghcr.io/cisagov/malcolm/netbox'
LABEL org.opencontainers.image.description='Malcolm container providing the NetBox asset management system'
ENV DEBIAN_FRONTEND noninteractive
@@ -45,6 +45,7 @@ RUN apt-get -q update && \
jq \
procps \
psmisc \
+ rsync \
supervisor \
tini && \
/opt/netbox/venv/bin/python -m pip install psycopg2 pynetbox python-slugify randomcolor && \
@@ -68,7 +69,7 @@ RUN apt-get -q update && \
curl -sSL "$NETBOX_DEVICETYPE_LIBRARY_URL" | tar xzvf - -C ./"$(basename "${NETBOX_DEVICETYPE_LIBRARY_PATH}")" --strip-components 1 && \
mkdir -p /opt/netbox/netbox/$BASE_PATH && \
mv /opt/netbox/netbox/static /opt/netbox/netbox/$BASE_PATH/static && \
- jq '. += { "settings": { "http": { "discard_unsafe_fields": false } } }' /etc/unit/nginx-unit.json | jq 'del(.listeners."[::]:8080")' | jq ".routes[0].match.uri = \"/${BASE_PATH}/static/*\"" > /etc/unit/nginx-unit-new.json && \
+ jq '. += { "settings": { "http": { "discard_unsafe_fields": false } } }' /etc/unit/nginx-unit.json | jq 'del(.listeners."[::]:8080")' | jq 'del(.listeners."[::]:8081")' | jq ".routes.main[0].match.uri = \"/${BASE_PATH}/static/*\"" > /etc/unit/nginx-unit-new.json && \
mv /etc/unit/nginx-unit-new.json /etc/unit/nginx-unit.json && \
chmod 644 /etc/unit/nginx-unit.json && \
tr -cd '\11\12\15\40-\176' < /opt/netbox/netbox/netbox/configuration.py > /opt/netbox/netbox/netbox/configuration_ascii.py && \
diff --git a/Dockerfiles/nginx.Dockerfile b/Dockerfiles/nginx.Dockerfile
index 80ba31a19..521ca70e2 100644
--- a/Dockerfiles/nginx.Dockerfile
+++ b/Dockerfiles/nginx.Dockerfile
@@ -41,7 +41,7 @@ LABEL org.opencontainers.image.url='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.documentation='https://github.com/cisagov/Malcolm/blob/main/README.md'
LABEL org.opencontainers.image.source='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.vendor='Cybersecurity and Infrastructure Security Agency'
-LABEL org.opencontainers.image.title='malcolmnetsec/nginx-proxy'
+LABEL org.opencontainers.image.title='ghcr.io/cisagov/malcolm/nginx-proxy'
LABEL org.opencontainers.image.description='Malcolm container providing an NGINX reverse proxy for the other services'
ARG DEFAULT_UID=101
@@ -143,7 +143,7 @@ RUN set -x ; \
" ; \
apk update --no-cache; \
apk upgrade --no-cache; \
- apk add --no-cache curl shadow libressl; \
+ apk add --no-cache curl rsync shadow libressl; \
addgroup -g ${DEFAULT_GID} -S ${PGROUP} ; \
adduser -S -D -H -u ${DEFAULT_UID} -h /var/cache/nginx -s /sbin/nologin -G ${PGROUP} -g ${PUSER} ${PUSER} ; \
addgroup ${PUSER} shadow ; \
diff --git a/Dockerfiles/opensearch.Dockerfile b/Dockerfiles/opensearch.Dockerfile
index f6d168fd1..1de50debf 100644
--- a/Dockerfiles/opensearch.Dockerfile
+++ b/Dockerfiles/opensearch.Dockerfile
@@ -7,7 +7,7 @@ LABEL org.opencontainers.image.url='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.documentation='https://github.com/cisagov/Malcolm/blob/main/README.md'
LABEL org.opencontainers.image.source='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.vendor='Cybersecurity and Infrastructure Security Agency'
-LABEL org.opencontainers.image.title='malcolmnetsec/opensearch'
+LABEL org.opencontainers.image.title='ghcr.io/cisagov/malcolm/opensearch'
LABEL org.opencontainers.image.description='Malcolm container providing OpenSearch'
ARG DEFAULT_UID=1000
@@ -41,7 +41,7 @@ ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /usr/
# Remove the opensearch-security plugin - Malcolm manages authentication and encryption via NGINX reverse proxy
# Remove the performance-analyzer plugin - Reduce resources in docker image
-RUN yum install -y openssl util-linux procps && \
+RUN yum install -y openssl util-linux procps rsync && \
yum upgrade -y && \
/usr/share/opensearch/bin/opensearch-plugin remove opensearch-security --purge && \
/usr/share/opensearch/bin/opensearch-plugin remove opensearch-performance-analyzer --purge && \
diff --git a/Dockerfiles/pcap-capture.Dockerfile b/Dockerfiles/pcap-capture.Dockerfile
index 8941c676d..b7de4d7c0 100644
--- a/Dockerfiles/pcap-capture.Dockerfile
+++ b/Dockerfiles/pcap-capture.Dockerfile
@@ -7,7 +7,7 @@ LABEL org.opencontainers.image.url='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.documentation='https://github.com/cisagov/Malcolm/blob/main/README.md'
LABEL org.opencontainers.image.source='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.vendor='Cybersecurity and Infrastructure Security Agency'
-LABEL org.opencontainers.image.title='malcolmnetsec/pcap-capture'
+LABEL org.opencontainers.image.title='ghcr.io/cisagov/malcolm/pcap-capture'
LABEL org.opencontainers.image.description='Malcolm container providing network traffic capture capabilities via netsniff-ng and tcpdump'
ARG DEFAULT_UID=1000
@@ -68,6 +68,7 @@ RUN apt-get -q update && \
openssl \
procps \
psmisc \
+ rsync \
supervisor \
tcpdump \
tini && \
diff --git a/Dockerfiles/pcap-monitor.Dockerfile b/Dockerfiles/pcap-monitor.Dockerfile
index 263016c0c..5c8a5636f 100644
--- a/Dockerfiles/pcap-monitor.Dockerfile
+++ b/Dockerfiles/pcap-monitor.Dockerfile
@@ -7,7 +7,7 @@ LABEL org.opencontainers.image.url='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.documentation='https://github.com/cisagov/Malcolm/blob/main/README.md'
LABEL org.opencontainers.image.source='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.vendor='Cybersecurity and Infrastructure Security Agency'
-LABEL org.opencontainers.image.title='malcolmnetsec/pcap-monitor'
+LABEL org.opencontainers.image.title='ghcr.io/cisagov/malcolm/pcap-monitor'
LABEL org.opencontainers.image.description='Malcolm container watching for captured or uploaded artifacts to be processed'
ARG DEFAULT_UID=1000
@@ -54,6 +54,7 @@ RUN apt-get -q update && \
python3-pip \
python3-setuptools \
python3-wheel \
+ rsync \
supervisor \
tini \
vim-tiny && \
diff --git a/Dockerfiles/postgresql.Dockerfile b/Dockerfiles/postgresql.Dockerfile
index b895d51ec..f7931ca5b 100644
--- a/Dockerfiles/postgresql.Dockerfile
+++ b/Dockerfiles/postgresql.Dockerfile
@@ -7,7 +7,7 @@ LABEL org.opencontainers.image.url='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.documentation='https://github.com/cisagov/Malcolm/blob/main/README.md'
LABEL org.opencontainers.image.source='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.vendor='Cybersecurity and Infrastructure Security Agency'
-LABEL org.opencontainers.image.title='malcolmnetsec/postgresql'
+LABEL org.opencontainers.image.title='ghcr.io/cisagov/malcolm/postgresql'
LABEL org.opencontainers.image.description='Malcolm container providing the PostgreSQL object-relational database'
ARG DEFAULT_UID=1000
@@ -27,7 +27,7 @@ COPY --from=pierrezemb/gostatic --chmod=755 /goStatic /usr/bin/goStatic
RUN apk update --no-cache && \
apk upgrade --no-cache && \
- apk add --no-cache bash procps psmisc shadow tini && \
+ apk add --no-cache bash procps psmisc rsync shadow tini && \
apk add --no-cache --virtual .build-deps rsync && \
rsync -a /usr/local/bin/ /usr/bin/ && \
rsync -a /usr/local/share/ /usr/share/ && \
diff --git a/Dockerfiles/redis.Dockerfile b/Dockerfiles/redis.Dockerfile
index 50758f82e..314453b3d 100644
--- a/Dockerfiles/redis.Dockerfile
+++ b/Dockerfiles/redis.Dockerfile
@@ -7,7 +7,7 @@ LABEL org.opencontainers.image.url='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.documentation='https://github.com/cisagov/Malcolm/blob/main/README.md'
LABEL org.opencontainers.image.source='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.vendor='Cybersecurity and Infrastructure Security Agency'
-LABEL org.opencontainers.image.title='malcolmnetsec/redis'
+LABEL org.opencontainers.image.title='ghcr.io/cisagov/malcolm/redis'
LABEL org.opencontainers.image.description='Malcolm container providing Redis, an in-memory data structure store'
ARG DEFAULT_UID=999
@@ -26,7 +26,7 @@ COPY --from=pierrezemb/gostatic --chmod=755 /goStatic /usr/bin/goStatic
RUN apk update --no-cache && \
apk upgrade --no-cache && \
- apk --no-cache add bash psmisc shadow tini && \
+ apk --no-cache add bash psmisc rsync shadow tini && \
addgroup ${PUSER} tty
WORKDIR /home/${PUSER}
diff --git a/Dockerfiles/suricata.Dockerfile b/Dockerfiles/suricata.Dockerfile
index 42dde7a4f..bfbea18bb 100644
--- a/Dockerfiles/suricata.Dockerfile
+++ b/Dockerfiles/suricata.Dockerfile
@@ -6,7 +6,7 @@ LABEL org.opencontainers.image.url='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.documentation='https://github.com/cisagov/Malcolm/blob/main/README.md'
LABEL org.opencontainers.image.source='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.vendor='Cybersecurity and Infrastructure Security Agency'
-LABEL org.opencontainers.image.title='malcolmnetsec/suricata'
+LABEL org.opencontainers.image.title='ghcr.io/cisagov/malcolm/suricata'
LABEL org.opencontainers.image.description='Malcolm container providing Suricata'
ENV DEBIAN_FRONTEND noninteractive
@@ -92,6 +92,7 @@ RUN sed -i "s/bullseye main/bullseye main contrib non-free/g" /etc/apt/sources.l
psmisc \
python3-ruamel.yaml \
python3-zmq \
+ rsync \
supervisor \
vim-tiny \
tini \
diff --git a/Dockerfiles/zeek.Dockerfile b/Dockerfiles/zeek.Dockerfile
index e0c59c022..9ce2e4340 100644
--- a/Dockerfiles/zeek.Dockerfile
+++ b/Dockerfiles/zeek.Dockerfile
@@ -8,7 +8,7 @@ LABEL org.opencontainers.image.url='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.documentation='https://github.com/cisagov/Malcolm/blob/main/README.md'
LABEL org.opencontainers.image.source='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.vendor='Cybersecurity and Infrastructure Security Agency'
-LABEL org.opencontainers.image.title='malcolmnetsec/zeek'
+LABEL org.opencontainers.image.title='ghcr.io/cisagov/malcolm/zeek'
LABEL org.opencontainers.image.description='Malcolm container providing Zeek'
ENV DEBIAN_FRONTEND noninteractive
@@ -30,8 +30,8 @@ ENV PGROUP "zeeker"
ENV PUSER_PRIV_DROP false
# for download and install
-ARG ZEEK_LTS=true
-ARG ZEEK_VERSION=5.0.7-0
+ARG ZEEK_LTS=
+ARG ZEEK_VERSION=5.2.0-0
ENV ZEEK_LTS $ZEEK_LTS
ENV ZEEK_VERSION $ZEEK_VERSION
@@ -102,6 +102,7 @@ RUN export DEBARCH=$(dpkg --print-architecture) && \
python3-tz \
python3-wheel \
python3-zmq \
+ rsync \
supervisor \
swig \
tini \
@@ -163,8 +164,8 @@ ADD shared/bin/nic-capture-setup.sh /usr/local/bin/
# sanity checks to make sure the plugins installed and copied over correctly
# these ENVs should match the number of third party scripts/plugins installed by zeek_install_plugins.sh
-ENV ZEEK_THIRD_PARTY_PLUGINS_COUNT 22
-ENV ZEEK_THIRD_PARTY_PLUGINS_GREP "(Zeek::Spicy|ANALYZER_SPICY_DHCP|ANALYZER_SPICY_DNS|ANALYZER_SPICY_HTTP|ANALYZER_SPICY__OSPF|ANALYZER_SPICY_OPENVPN_UDP\b|ANALYZER_SPICY_IPSEC_UDP\b|ANALYZER_SPICY_TFTP|ANALYZER_SPICY_WIREGUARD|ANALYZER_SPICY_LDAP_TCP|ANALYZER_SPICY_GENISYS_TCP|ANALYZER_S7COMM_TCP|Corelight::CommunityID|Corelight::PE_XOR|ICSNPP::BACnet|ICSNPP::BSAP|ICSNPP::ENIP|ICSNPP::ETHERCAT|ICSNPP::OPCUA_Binary|Salesforce::GQUIC|Zeek::PROFINET|Zeek::TDS)"
+ENV ZEEK_THIRD_PARTY_PLUGINS_COUNT 23
+ENV ZEEK_THIRD_PARTY_PLUGINS_GREP "(Zeek::Spicy|ANALYZER_SPICY_DHCP|ANALYZER_SPICY_DNS|ANALYZER_SPICY_HTTP|ANALYZER_SPICY_OSPF|ANALYZER_SPICY_OPENVPN_UDP\b|ANALYZER_SPICY_IPSEC_UDP\b|ANALYZER_SPICY_TFTP|ANALYZER_SPICY_WIREGUARD|ANALYZER_SPICY_LDAP_TCP|ANALYZER_SPICY_SYNCHROPHASOR_TCP|ANALYZER_SPICY_GENISYS_TCP|ANALYZER_S7COMM_TCP|Corelight::CommunityID|Corelight::PE_XOR|ICSNPP::BACnet|ICSNPP::BSAP|ICSNPP::ENIP|ICSNPP::ETHERCAT|ICSNPP::OPCUA_Binary|Salesforce::GQUIC|Zeek::PROFINET|Zeek::TDS)"
ENV ZEEK_THIRD_PARTY_SCRIPTS_COUNT 25
ENV ZEEK_THIRD_PARTY_SCRIPTS_GREP "(bro-is-darknet/main|bro-simple-scan/scan|bzar/main|callstranger-detector/callstranger|cve-2020-0601/cve-2020-0601|cve-2020-13777/cve-2020-13777|CVE-2020-16898/CVE-2020-16898|CVE-2021-38647/omigod|CVE-2021-31166/detect|CVE-2021-41773/CVE_2021_41773|CVE-2021-42292/main|cve-2021-44228/CVE_2021_44228|cve-2022-22954/main|cve-2022-26809/main|CVE-2022-3602/__load__|hassh/hassh|http-more-files-names/main|ja3/ja3|pingback/detect|ripple20/ripple20|SIGRed/CVE-2020-1350|zeek-EternalSafety/main|zeek-httpattacks/main|zeek-sniffpass/__load__|zerologon/main)\.(zeek|bro)"
diff --git a/dashboards/anomaly_detectors/malcolm_init_dummy.json b/dashboards/anomaly_detectors/malcolm_init_dummy.json
index 18f9bead5..5ee2a4095 100644
--- a/dashboards/anomaly_detectors/malcolm_init_dummy.json
+++ b/dashboards/anomaly_detectors/malcolm_init_dummy.json
@@ -1,7 +1,7 @@
{
"name": "malcolm_init_dummy",
"description": "A dummy detector to force opensearch anomaly detection index creation",
- "time_field": "timestamp",
+ "time_field": "firstPacket",
"indices": [
"arkime_sessions3-*"
],
diff --git a/dashboards/templates/malcolm_beats_template.json b/dashboards/templates/malcolm_beats_template.json
index 4effb4f03..fdbad390e 100644
--- a/dashboards/templates/malcolm_beats_template.json
+++ b/dashboards/templates/malcolm_beats_template.json
@@ -26,7 +26,8 @@
"template" :{
"settings" : {
"index" : {
- "mapping.total_fields.limit" : "5000"
+ "mapping.total_fields.limit" : "5000",
+ "mapping.nested_fields.limit" : "250"
}
},
"mappings": {
diff --git a/dashboards/templates/malcolm_template.json b/dashboards/templates/malcolm_template.json
index 04d6fb0f7..36b8e85ea 100644
--- a/dashboards/templates/malcolm_template.json
+++ b/dashboards/templates/malcolm_template.json
@@ -30,7 +30,8 @@
"template" :{
"settings" : {
"index" : {
- "mapping.total_fields.limit" : "5000"
+ "mapping.total_fields.limit" : "5000",
+ "mapping.nested_fields.limit" : "250"
}
},
"mappings": {
diff --git a/docker-compose-standalone.yml b/docker-compose-standalone.yml
index ccdc723e3..2e11c66c4 100644
--- a/docker-compose-standalone.yml
+++ b/docker-compose-standalone.yml
@@ -359,7 +359,7 @@ x-pcap-capture-variables: &pcap-capture-variables
services:
opensearch:
- image: malcolmnetsec/opensearch:23.03.0
+ image: ghcr.io/idaholab/malcolm/opensearch:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -367,9 +367,7 @@ services:
networks:
- default
environment:
- << : *process-variables
- << : *ssl-variables
- << : *opensearch-variables
+ <<: [*opensearch-variables, *ssl-variables, *process-variables]
logger.level : 'WARN'
bootstrap.memory_lock : 'true'
MAX_LOCKED_MEMORY : 'unlimited'
@@ -400,7 +398,7 @@ services:
retries: 3
start_period: 180s
dashboards-helper:
- image: malcolmnetsec/dashboards-helper:23.03.0
+ image: ghcr.io/idaholab/malcolm/dashboards-helper:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -408,10 +406,7 @@ services:
networks:
- default
environment:
- << : *process-variables
- << : *ssl-variables
- << : *opensearch-variables
- << : *dashboards-helper-variables
+ <<: [*dashboards-helper-variables, *opensearch-variables, *ssl-variables, *process-variables]
DASHBOARDS_URL : 'http://dashboards:5601/dashboards'
VIRTUAL_HOST : 'dashboards-helper.malcolm.local'
ARKIME_INDEX_PATTERN : 'arkime_sessions3-*'
@@ -431,7 +426,7 @@ services:
retries: 3
start_period: 30s
dashboards:
- image: malcolmnetsec/dashboards:23.03.0
+ image: ghcr.io/idaholab/malcolm/dashboards:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -439,9 +434,7 @@ services:
networks:
- default
environment:
- << : *process-variables
- << : *ssl-variables
- << : *opensearch-variables
+ <<: [*opensearch-variables, *ssl-variables, *process-variables]
VIRTUAL_HOST : 'dashboards.malcolm.local'
depends_on:
- opensearch
@@ -456,7 +449,7 @@ services:
retries: 3
start_period: 210s
logstash:
- image: malcolmnetsec/logstash-oss:23.03.0
+ image: ghcr.io/idaholab/malcolm/logstash-oss:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -471,13 +464,7 @@ services:
- IPC_LOCK
env_file: ./netbox/env/netbox.env
environment:
- << : *process-variables
- << : *ssl-variables
- << : *opensearch-variables
- << : *netbox-variables
- << : *logstash-variables
- << : *common-beats-variables
- << : *common-lookup-variables
+ <<: [*common-lookup-variables, *common-beats-variables, *logstash-variables, *netbox-variables, *opensearch-variables, *ssl-variables, *process-variables]
LS_JAVA_OPTS : '-server -Xms2g -Xmx2g -Xss1536k -XX:-HeapDumpOnOutOfMemoryError -Djava.security.egd=file:/dev/./urandom -Dlog4j.formatMsgNoLookups=true'
depends_on:
- opensearch
@@ -499,7 +486,7 @@ services:
retries: 3
start_period: 600s
filebeat:
- image: malcolmnetsec/filebeat-oss:23.03.0
+ image: ghcr.io/idaholab/malcolm/filebeat-oss:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -507,13 +494,7 @@ services:
networks:
- default
environment:
- << : *process-variables
- << : *ssl-variables
- << : *nginx-variables
- << : *opensearch-variables
- << : *filebeat-variables
- << : *common-upload-variables
- << : *common-beats-variables
+ <<: [*common-beats-variables, *common-upload-variables, *filebeat-variables, *opensearch-variables, *nginx-variables, *ssl-variables, *process-variables]
FILEBEAT_ZEEK_LOG_PATH : '/zeek/current'
FILEBEAT_ZEEK_LOG_LIVE_PATH : '/zeek/live'
FILEBEAT_SURICATA_LOG_PATH : '/suricata'
@@ -538,7 +519,7 @@ services:
retries: 3
start_period: 60s
arkime:
- image: malcolmnetsec/arkime:23.03.0
+ image: ghcr.io/idaholab/malcolm/arkime:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -548,11 +529,7 @@ services:
env_file:
- ./auth.env
environment:
- << : *process-variables
- << : *ssl-variables
- << : *opensearch-variables
- << : *common-upload-variables
- << : *arkime-variables
+ <<: [*arkime-variables, *common-upload-variables, *opensearch-variables, *ssl-variables, *process-variables]
VIRTUAL_HOST : 'arkime.malcolm.local'
OPENSEARCH_MAX_SHARDS_PER_NODE : 2500
VIEWER : 'on'
@@ -576,7 +553,7 @@ services:
retries: 3
start_period: 210s
zeek:
- image: malcolmnetsec/zeek:23.03.0
+ image: ghcr.io/idaholab/malcolm/zeek:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -593,11 +570,7 @@ services:
- NET_RAW
- SYS_ADMIN
environment:
- << : *process-variables
- << : *ssl-variables
- << : *common-upload-variables
- << : *zeek-variables
- << : *zeek-offline-variables
+ <<: [*zeek-offline-variables, *zeek-variables, *common-upload-variables, *ssl-variables, *process-variables]
ZEEK_PCAP_PROCESSOR : 'true'
ZEEK_CRON : 'false'
depends_on:
@@ -615,7 +588,7 @@ services:
retries: 3
start_period: 60s
zeek-live:
- image: malcolmnetsec/zeek:23.03.0
+ image: ghcr.io/idaholab/malcolm/zeek:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -630,12 +603,7 @@ services:
- NET_RAW
- SYS_ADMIN
environment:
- << : *process-variables
- << : *ssl-variables
- << : *common-upload-variables
- << : *zeek-variables
- << : *zeek-live-variables
- << : *pcap-capture-variables
+ <<: [*pcap-capture-variables, *zeek-live-variables, *zeek-variables, *common-upload-variables, *ssl-variables, *process-variables]
ZEEK_PCAP_PROCESSOR : 'false'
ZEEK_CRON : 'true'
ZEEK_LOG_PATH : '/zeek/live'
@@ -647,7 +615,7 @@ services:
- ./zeek-logs/extract_files:/zeek/extract_files
- ./zeek/intel:/opt/zeek/share/zeek/site/intel
suricata:
- image: malcolmnetsec/suricata:23.03.0
+ image: ghcr.io/idaholab/malcolm/suricata:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -664,11 +632,7 @@ services:
- NET_RAW
- SYS_ADMIN
environment:
- << : *process-variables
- << : *ssl-variables
- << : *common-upload-variables
- << : *suricata-variables
- << : *suricata-offline-variables
+ <<: [*suricata-offline-variables, *suricata-variables, *common-upload-variables, *ssl-variables, *process-variables]
SURICATA_PCAP_PROCESSOR : 'true'
depends_on:
- logstash
@@ -684,7 +648,7 @@ services:
retries: 3
start_period: 120s
suricata-live:
- image: malcolmnetsec/suricata:23.03.0
+ image: ghcr.io/idaholab/malcolm/suricata:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -699,19 +663,14 @@ services:
- NET_RAW
- SYS_ADMIN
environment:
- << : *process-variables
- << : *ssl-variables
- << : *common-upload-variables
- << : *suricata-variables
- << : *suricata-live-variables
- << : *pcap-capture-variables
+ <<: [*pcap-capture-variables, *suricata-live-variables, *suricata-variables, *common-upload-variables, *ssl-variables, *process-variables]
SURICATA_PCAP_PROCESSOR : 'false'
volumes:
- ./nginx/ca-trust:/var/local/ca-trust:ro
- ./suricata-logs:/var/log/suricata
- ./suricata/rules:/opt/suricata/rules:ro
file-monitor:
- image: malcolmnetsec/file-monitor:23.03.0
+ image: ghcr.io/idaholab/malcolm/file-monitor:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -719,9 +678,7 @@ services:
networks:
- default
environment:
- << : *process-variables
- << : *ssl-variables
- << : *zeek-variables
+ <<: [*zeek-variables, *ssl-variables, *process-variables]
VIRTUAL_HOST : 'file-monitor.malcolm.local'
volumes:
- ./nginx/ca-trust:/var/local/ca-trust:ro
@@ -735,7 +692,7 @@ services:
retries: 3
start_period: 60s
pcap-capture:
- image: malcolmnetsec/pcap-capture:23.03.0
+ image: ghcr.io/idaholab/malcolm/pcap-capture:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -750,14 +707,12 @@ services:
- NET_RAW
- SYS_ADMIN
environment:
- << : *process-variables
- << : *ssl-variables
- << : *pcap-capture-variables
+ <<: [*pcap-capture-variables, *ssl-variables, *process-variables]
volumes:
- ./nginx/ca-trust:/var/local/ca-trust:ro
- ./pcap/upload:/pcap
pcap-monitor:
- image: malcolmnetsec/pcap-monitor:23.03.0
+ image: ghcr.io/idaholab/malcolm/pcap-monitor:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -765,10 +720,7 @@ services:
networks:
- default
environment:
- << : *process-variables
- << : *ssl-variables
- << : *opensearch-variables
- << : *common-upload-variables
+ <<: [*common-upload-variables, *opensearch-variables, *ssl-variables, *process-variables]
depends_on:
- opensearch
volumes:
@@ -783,7 +735,7 @@ services:
retries: 3
start_period: 90s
upload:
- image: malcolmnetsec/file-upload:23.03.0
+ image: ghcr.io/idaholab/malcolm/file-upload:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -793,8 +745,7 @@ services:
env_file:
- ./auth.env
environment:
- << : *process-variables
- << : *ssl-variables
+ <<: [*ssl-variables, *process-variables]
SITE_NAME : 'Capture File and Log Archive Upload'
VIRTUAL_HOST : 'upload.malcolm.local'
depends_on:
@@ -811,7 +762,7 @@ services:
retries: 3
start_period: 60s
htadmin:
- image: malcolmnetsec/htadmin:23.03.0
+ image: ghcr.io/idaholab/malcolm/htadmin:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -819,9 +770,7 @@ services:
networks:
- default
environment:
- << : *process-variables
- << : *ssl-variables
- << : *auth-variables
+ <<: [*auth-variables, *ssl-variables, *process-variables]
VIRTUAL_HOST : 'htadmin.malcolm.local'
volumes:
- ./nginx/ca-trust:/var/local/ca-trust:ro
@@ -835,7 +784,7 @@ services:
retries: 3
start_period: 60s
freq:
- image: malcolmnetsec/freq:23.03.0
+ image: ghcr.io/idaholab/malcolm/freq:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -843,9 +792,7 @@ services:
networks:
- default
environment:
- << : *process-variables
- << : *ssl-variables
- << : *common-lookup-variables
+ <<: [*common-lookup-variables, *ssl-variables, *process-variables]
VIRTUAL_HOST : 'freq.malcolm.local'
volumes:
- ./nginx/ca-trust:/var/local/ca-trust:ro
@@ -856,7 +803,7 @@ services:
retries: 3
start_period: 60s
name-map-ui:
- image: malcolmnetsec/name-map-ui:23.03.0
+ image: ghcr.io/idaholab/malcolm/name-map-ui:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -864,8 +811,7 @@ services:
networks:
- default
environment:
- << : *process-variables
- << : *ssl-variables
+ <<: [*ssl-variables, *process-variables]
VIRTUAL_HOST : 'name-map-ui.malcolm.local'
volumes:
- ./nginx/ca-trust:/var/local/ca-trust:ro
@@ -877,7 +823,7 @@ services:
retries: 3
start_period: 60s
netbox:
- image: malcolmnetsec/netbox:23.03.0
+ image: ghcr.io/idaholab/malcolm/netbox:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -886,9 +832,7 @@ services:
- default
env_file: ./netbox/env/netbox.env
environment:
- << : *process-variables
- << : *ssl-variables
- << : *netbox-variables
+ <<: [*netbox-variables, *ssl-variables, *process-variables]
VIRTUAL_HOST : 'netbox.malcolm.local'
depends_on:
- netbox-postgres
@@ -908,7 +852,7 @@ services:
retries: 3
start_period: 120s
netbox-postgres:
- image: malcolmnetsec/postgresql:23.03.0
+ image: ghcr.io/idaholab/malcolm/postgresql:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -917,9 +861,7 @@ services:
- default
env_file: ./netbox/env/postgres.env
environment:
- << : *process-variables
- << : *ssl-variables
- << : *netbox-variables
+ <<: [*netbox-variables, *ssl-variables, *process-variables]
VIRTUAL_HOST : 'netbox-postgres.malcolm.local'
volumes:
- ./nginx/ca-trust:/var/local/ca-trust:ro
@@ -931,7 +873,7 @@ services:
retries: 3
start_period: 45s
netbox-redis:
- image: malcolmnetsec/redis:23.03.0
+ image: ghcr.io/idaholab/malcolm/redis:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -940,9 +882,7 @@ services:
- default
env_file: ./netbox/env/redis.env
environment:
- << : *process-variables
- << : *ssl-variables
- << : *netbox-variables
+ <<: [*netbox-variables, *ssl-variables, *process-variables]
VIRTUAL_HOST : 'netbox-redis.malcolm.local'
command:
- sh
@@ -958,7 +898,7 @@ services:
retries: 3
start_period: 45s
netbox-redis-cache:
- image: malcolmnetsec/redis:23.03.0
+ image: ghcr.io/idaholab/malcolm/redis:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -967,9 +907,7 @@ services:
- default
env_file: ./netbox/env/redis-cache.env
environment:
- << : *process-variables
- << : *ssl-variables
- << : *netbox-variables
+ <<: [*netbox-variables, *ssl-variables, *process-variables]
VIRTUAL_HOST : 'netbox-redis-cache.malcolm.local'
command:
- sh
@@ -984,7 +922,7 @@ services:
retries: 3
start_period: 45s
api:
- image: malcolmnetsec/api:23.03.0
+ image: ghcr.io/idaholab/malcolm/api:23.04.0
command: gunicorn --bind 0:5000 manage:app
restart: "no"
stdin_open: false
@@ -993,9 +931,7 @@ services:
networks:
- default
environment:
- << : *process-variables
- << : *ssl-variables
- << : *opensearch-variables
+ <<: [*opensearch-variables, *ssl-variables, *process-variables]
VIRTUAL_HOST : 'api.malcolm.local'
volumes:
- ./nginx/ca-trust:/var/local/ca-trust:ro
@@ -1007,7 +943,7 @@ services:
retries: 3
start_period: 60s
nginx-proxy:
- image: malcolmnetsec/nginx-proxy:23.03.0
+ image: ghcr.io/idaholab/malcolm/nginx-proxy:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -1015,10 +951,7 @@ services:
networks:
- default
environment:
- << : *process-variables
- << : *ssl-variables
- << : *auth-variables
- << : *nginx-variables
+ <<: [*nginx-variables, *auth-variables, *ssl-variables, *process-variables]
depends_on:
- api
- arkime
diff --git a/docker-compose.yml b/docker-compose.yml
index 25cb415e6..600d418e8 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -362,7 +362,7 @@ services:
build:
context: .
dockerfile: Dockerfiles/opensearch.Dockerfile
- image: malcolmnetsec/opensearch:23.03.0
+ image: ghcr.io/idaholab/malcolm/opensearch:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -370,9 +370,7 @@ services:
networks:
- default
environment:
- << : *process-variables
- << : *ssl-variables
- << : *opensearch-variables
+ <<: [*opensearch-variables, *ssl-variables, *process-variables]
logger.level : 'INFO'
bootstrap.memory_lock : 'true'
MAX_LOCKED_MEMORY : 'unlimited'
@@ -406,7 +404,7 @@ services:
build:
context: .
dockerfile: Dockerfiles/dashboards-helper.Dockerfile
- image: malcolmnetsec/dashboards-helper:23.03.0
+ image: ghcr.io/idaholab/malcolm/dashboards-helper:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -414,10 +412,7 @@ services:
networks:
- default
environment:
- << : *process-variables
- << : *ssl-variables
- << : *opensearch-variables
- << : *dashboards-helper-variables
+ <<: [*dashboards-helper-variables, *opensearch-variables, *ssl-variables, *process-variables]
DASHBOARDS_URL : 'http://dashboards:5601/dashboards'
VIRTUAL_HOST : 'dashboards-helper.malcolm.local'
ARKIME_INDEX_PATTERN : 'arkime_sessions3-*'
@@ -440,7 +435,7 @@ services:
build:
context: .
dockerfile: Dockerfiles/dashboards.Dockerfile
- image: malcolmnetsec/dashboards:23.03.0
+ image: ghcr.io/idaholab/malcolm/dashboards:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -448,9 +443,7 @@ services:
networks:
- default
environment:
- << : *process-variables
- << : *ssl-variables
- << : *opensearch-variables
+ <<: [*opensearch-variables, *ssl-variables, *process-variables]
VIRTUAL_HOST : 'dashboards.malcolm.local'
depends_on:
- opensearch
@@ -468,7 +461,7 @@ services:
build:
context: .
dockerfile: Dockerfiles/logstash.Dockerfile
- image: malcolmnetsec/logstash-oss:23.03.0
+ image: ghcr.io/idaholab/malcolm/logstash-oss:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -483,13 +476,7 @@ services:
- IPC_LOCK
env_file: ./netbox/env/netbox.env
environment:
- << : *process-variables
- << : *ssl-variables
- << : *opensearch-variables
- << : *netbox-variables
- << : *logstash-variables
- << : *common-beats-variables
- << : *common-lookup-variables
+ <<: [*common-lookup-variables, *common-beats-variables, *logstash-variables, *netbox-variables, *opensearch-variables, *ssl-variables, *process-variables]
LS_JAVA_OPTS : '-server -Xms2g -Xmx2g -Xss1536k -XX:-HeapDumpOnOutOfMemoryError -Djava.security.egd=file:/dev/./urandom -Dlog4j.formatMsgNoLookups=true'
depends_on:
- opensearch
@@ -518,7 +505,7 @@ services:
build:
context: .
dockerfile: Dockerfiles/filebeat.Dockerfile
- image: malcolmnetsec/filebeat-oss:23.03.0
+ image: ghcr.io/idaholab/malcolm/filebeat-oss:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -526,13 +513,7 @@ services:
networks:
- default
environment:
- << : *process-variables
- << : *ssl-variables
- << : *nginx-variables
- << : *opensearch-variables
- << : *filebeat-variables
- << : *common-upload-variables
- << : *common-beats-variables
+ <<: [*common-beats-variables, *common-upload-variables, *filebeat-variables, *opensearch-variables, *nginx-variables, *ssl-variables, *process-variables]
FILEBEAT_ZEEK_LOG_PATH : '/zeek/current'
FILEBEAT_ZEEK_LOG_LIVE_PATH : '/zeek/live'
FILEBEAT_SURICATA_LOG_PATH : '/suricata'
@@ -560,7 +541,7 @@ services:
build:
context: .
dockerfile: Dockerfiles/arkime.Dockerfile
- image: malcolmnetsec/arkime:23.03.0
+ image: ghcr.io/idaholab/malcolm/arkime:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -570,11 +551,7 @@ services:
env_file:
- ./auth.env
environment:
- << : *process-variables
- << : *ssl-variables
- << : *opensearch-variables
- << : *common-upload-variables
- << : *arkime-variables
+ <<: [*arkime-variables, *common-upload-variables, *opensearch-variables, *ssl-variables, *process-variables]
VIRTUAL_HOST : 'arkime.malcolm.local'
OPENSEARCH_MAX_SHARDS_PER_NODE : 2500
VIEWER : 'on'
@@ -604,7 +581,7 @@ services:
build:
context: .
dockerfile: Dockerfiles/zeek.Dockerfile
- image: malcolmnetsec/zeek:23.03.0
+ image: ghcr.io/idaholab/malcolm/zeek:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -621,11 +598,7 @@ services:
- NET_RAW
- SYS_ADMIN
environment:
- << : *process-variables
- << : *ssl-variables
- << : *common-upload-variables
- << : *zeek-variables
- << : *zeek-offline-variables
+ <<: [*zeek-offline-variables, *zeek-variables, *common-upload-variables, *ssl-variables, *process-variables]
ZEEK_PCAP_PROCESSOR : 'true'
ZEEK_CRON : 'false'
depends_on:
@@ -647,7 +620,7 @@ services:
build:
context: .
dockerfile: Dockerfiles/zeek.Dockerfile
- image: malcolmnetsec/zeek:23.03.0
+ image: ghcr.io/idaholab/malcolm/zeek:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -662,12 +635,7 @@ services:
- NET_RAW
- SYS_ADMIN
environment:
- << : *process-variables
- << : *ssl-variables
- << : *common-upload-variables
- << : *zeek-variables
- << : *zeek-live-variables
- << : *pcap-capture-variables
+ <<: [*pcap-capture-variables, *zeek-live-variables, *zeek-variables, *common-upload-variables, *ssl-variables, *process-variables]
ZEEK_PCAP_PROCESSOR : 'false'
ZEEK_CRON : 'true'
ZEEK_LOG_PATH : '/zeek/live'
@@ -683,7 +651,7 @@ services:
build:
context: .
dockerfile: Dockerfiles/suricata.Dockerfile
- image: malcolmnetsec/suricata:23.03.0
+ image: ghcr.io/idaholab/malcolm/suricata:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -700,11 +668,7 @@ services:
- NET_RAW
- SYS_ADMIN
environment:
- << : *process-variables
- << : *ssl-variables
- << : *common-upload-variables
- << : *suricata-variables
- << : *suricata-offline-variables
+ <<: [*suricata-offline-variables, *suricata-variables, *common-upload-variables, *ssl-variables, *process-variables]
SURICATA_PCAP_PROCESSOR : 'true'
depends_on:
- logstash
@@ -723,7 +687,7 @@ services:
build:
context: .
dockerfile: Dockerfiles/suricata.Dockerfile
- image: malcolmnetsec/suricata:23.03.0
+ image: ghcr.io/idaholab/malcolm/suricata:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -738,12 +702,7 @@ services:
- NET_RAW
- SYS_ADMIN
environment:
- << : *process-variables
- << : *ssl-variables
- << : *common-upload-variables
- << : *suricata-variables
- << : *suricata-live-variables
- << : *pcap-capture-variables
+ <<: [*pcap-capture-variables, *suricata-live-variables, *suricata-variables, *common-upload-variables, *ssl-variables, *process-variables]
SURICATA_PCAP_PROCESSOR : 'false'
volumes:
- ./nginx/ca-trust:/var/local/ca-trust:ro
@@ -753,7 +712,7 @@ services:
build:
context: .
dockerfile: Dockerfiles/file-monitor.Dockerfile
- image: malcolmnetsec/file-monitor:23.03.0
+ image: ghcr.io/idaholab/malcolm/file-monitor:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -761,9 +720,7 @@ services:
networks:
- default
environment:
- << : *process-variables
- << : *ssl-variables
- << : *zeek-variables
+ <<: [*zeek-variables, *ssl-variables, *process-variables]
VIRTUAL_HOST : 'file-monitor.malcolm.local'
volumes:
- ./nginx/ca-trust:/var/local/ca-trust:ro
@@ -780,7 +737,7 @@ services:
build:
context: .
dockerfile: Dockerfiles/pcap-capture.Dockerfile
- image: malcolmnetsec/pcap-capture:23.03.0
+ image: ghcr.io/idaholab/malcolm/pcap-capture:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -795,9 +752,7 @@ services:
- NET_RAW
- SYS_ADMIN
environment:
- << : *process-variables
- << : *ssl-variables
- << : *pcap-capture-variables
+ <<: [*pcap-capture-variables, *ssl-variables, *process-variables]
volumes:
- ./nginx/ca-trust:/var/local/ca-trust:ro
- ./pcap/upload:/pcap
@@ -805,7 +760,7 @@ services:
build:
context: .
dockerfile: Dockerfiles/pcap-monitor.Dockerfile
- image: malcolmnetsec/pcap-monitor:23.03.0
+ image: ghcr.io/idaholab/malcolm/pcap-monitor:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -813,10 +768,7 @@ services:
networks:
- default
environment:
- << : *process-variables
- << : *ssl-variables
- << : *opensearch-variables
- << : *common-upload-variables
+ <<: [*common-upload-variables, *opensearch-variables, *ssl-variables, *process-variables]
depends_on:
- opensearch
volumes:
@@ -834,7 +786,7 @@ services:
build:
context: .
dockerfile: Dockerfiles/file-upload.Dockerfile
- image: malcolmnetsec/file-upload:23.03.0
+ image: ghcr.io/idaholab/malcolm/file-upload:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -844,8 +796,7 @@ services:
env_file:
- ./auth.env
environment:
- << : *process-variables
- << : *ssl-variables
+ <<: [*ssl-variables, *process-variables]
SITE_NAME : 'Capture File and Log Archive Upload'
VIRTUAL_HOST : 'upload.malcolm.local'
depends_on:
@@ -862,7 +813,7 @@ services:
retries: 3
start_period: 60s
htadmin:
- image: malcolmnetsec/htadmin:23.03.0
+ image: ghcr.io/idaholab/malcolm/htadmin:23.04.0
build:
context: .
dockerfile: Dockerfiles/htadmin.Dockerfile
@@ -873,9 +824,7 @@ services:
networks:
- default
environment:
- << : *process-variables
- << : *ssl-variables
- << : *auth-variables
+ <<: [*auth-variables, *ssl-variables, *process-variables]
VIRTUAL_HOST : 'htadmin.malcolm.local'
volumes:
- ./nginx/ca-trust:/var/local/ca-trust:ro
@@ -889,7 +838,7 @@ services:
retries: 3
start_period: 60s
freq:
- image: malcolmnetsec/freq:23.03.0
+ image: ghcr.io/idaholab/malcolm/freq:23.04.0
build:
context: .
dockerfile: Dockerfiles/freq.Dockerfile
@@ -900,9 +849,7 @@ services:
networks:
- default
environment:
- << : *process-variables
- << : *ssl-variables
- << : *common-lookup-variables
+ <<: [*common-lookup-variables, *ssl-variables, *process-variables]
VIRTUAL_HOST : 'freq.malcolm.local'
volumes:
- ./nginx/ca-trust:/var/local/ca-trust:ro
@@ -913,7 +860,7 @@ services:
retries: 3
start_period: 60s
name-map-ui:
- image: malcolmnetsec/name-map-ui:23.03.0
+ image: ghcr.io/idaholab/malcolm/name-map-ui:23.04.0
build:
context: .
dockerfile: Dockerfiles/name-map-ui.Dockerfile
@@ -924,8 +871,7 @@ services:
networks:
- default
environment:
- << : *process-variables
- << : *ssl-variables
+ <<: [*ssl-variables, *process-variables]
VIRTUAL_HOST : 'name-map-ui.malcolm.local'
volumes:
- ./nginx/ca-trust:/var/local/ca-trust:ro
@@ -937,7 +883,7 @@ services:
retries: 3
start_period: 60s
netbox:
- image: malcolmnetsec/netbox:23.03.0
+ image: ghcr.io/idaholab/malcolm/netbox:23.04.0
build:
context: .
dockerfile: Dockerfiles/netbox.Dockerfile
@@ -949,9 +895,7 @@ services:
- default
env_file: ./netbox/env/netbox.env
environment:
- << : *process-variables
- << : *ssl-variables
- << : *netbox-variables
+ <<: [*netbox-variables, *ssl-variables, *process-variables]
VIRTUAL_HOST : 'netbox.malcolm.local'
depends_on:
- netbox-postgres
@@ -972,7 +916,7 @@ services:
retries: 3
start_period: 120s
netbox-postgres:
- image: malcolmnetsec/postgresql:23.03.0
+ image: ghcr.io/idaholab/malcolm/postgresql:23.04.0
build:
context: .
dockerfile: Dockerfiles/postgresql.Dockerfile
@@ -984,9 +928,7 @@ services:
- default
env_file: ./netbox/env/postgres.env
environment:
- << : *process-variables
- << : *ssl-variables
- << : *netbox-variables
+ <<: [*netbox-variables, *ssl-variables, *process-variables]
VIRTUAL_HOST : 'netbox-postgres.malcolm.local'
volumes:
- ./nginx/ca-trust:/var/local/ca-trust:ro
@@ -998,7 +940,7 @@ services:
retries: 3
start_period: 45s
netbox-redis:
- image: malcolmnetsec/redis:23.03.0
+ image: ghcr.io/idaholab/malcolm/redis:23.04.0
build:
context: .
dockerfile: Dockerfiles/redis.Dockerfile
@@ -1010,9 +952,7 @@ services:
- default
env_file: ./netbox/env/redis.env
environment:
- << : *process-variables
- << : *ssl-variables
- << : *netbox-variables
+ <<: [*netbox-variables, *ssl-variables, *process-variables]
VIRTUAL_HOST : 'netbox-redis.malcolm.local'
command:
- sh
@@ -1028,7 +968,7 @@ services:
retries: 3
start_period: 45s
netbox-redis-cache:
- image: malcolmnetsec/redis:23.03.0
+ image: ghcr.io/idaholab/malcolm/redis:23.04.0
build:
context: .
dockerfile: Dockerfiles/redis.Dockerfile
@@ -1040,9 +980,7 @@ services:
- default
env_file: ./netbox/env/redis-cache.env
environment:
- << : *process-variables
- << : *ssl-variables
- << : *netbox-variables
+ <<: [*netbox-variables, *ssl-variables, *process-variables]
VIRTUAL_HOST : 'netbox-redis-cache.malcolm.local'
command:
- sh
@@ -1057,7 +995,7 @@ services:
retries: 3
start_period: 45s
api:
- image: malcolmnetsec/api:23.03.0
+ image: ghcr.io/idaholab/malcolm/api:23.04.0
build:
context: .
dockerfile: Dockerfiles/api.Dockerfile
@@ -1069,9 +1007,7 @@ services:
networks:
- default
environment:
- << : *process-variables
- << : *ssl-variables
- << : *opensearch-variables
+ <<: [*opensearch-variables, *ssl-variables, *process-variables]
VIRTUAL_HOST : 'api.malcolm.local'
volumes:
- ./nginx/ca-trust:/var/local/ca-trust:ro
@@ -1086,7 +1022,7 @@ services:
build:
context: .
dockerfile: Dockerfiles/nginx.Dockerfile
- image: malcolmnetsec/nginx-proxy:23.03.0
+ image: ghcr.io/idaholab/malcolm/nginx-proxy:23.04.0
restart: "no"
stdin_open: false
tty: true
@@ -1094,10 +1030,7 @@ services:
networks:
- default
environment:
- << : *process-variables
- << : *ssl-variables
- << : *auth-variables
- << : *nginx-variables
+ <<: [*nginx-variables, *auth-variables, *ssl-variables, *process-variables]
depends_on:
- api
- arkime
diff --git a/docs/alerting.md b/docs/alerting.md
index 4aa5f83ab..283974315 100644
--- a/docs/alerting.md
+++ b/docs/alerting.md
@@ -20,9 +20,7 @@ Store administrator username/password for local Malcolm access? (Y/n): n
(Re)generate self-signed certificates for a remote log forwarder (Y/n): n
-Store username/password for primary remote OpenSearch instance? (y/N): n
-
-Store username/password for secondary remote OpenSearch instance? (y/N): n
+Will Malcolm be using an existing remote primary or secondary OpenSearch instance? (y/N): n
Store username/password for email alert sender account? (y/N): y
diff --git a/docs/arkime.md b/docs/arkime.md
index 755798a76..a95464617 100644
--- a/docs/arkime.md
+++ b/docs/arkime.md
@@ -94,7 +94,7 @@ Arkime's **SPI** (**S**ession **P**rofile **I**nformation) **View** provides a q
Click the the plus **➕** icon to the right of a category to expand it. The values for specific fields are displayed by clicking the field description in the field list underneath the category name. The list of field names can be filtered by typing part of the field name in the *Search for fields to display in this category* text input. The **Load All** and **Unload All** buttons can be used to toggle display of all of the fields belonging to that category. Once displayed, a field's name or one of its values may be clicked to provide further actions for filtering or displaying that field or its values. Of particular interest may be the **Open [fieldname] SPI Graph** option when clicking on a field's name. This will open a new tab with the SPI Graph ([see below](#ArkimeSPIGraph)) populated with the field's top values.
-Note that because the SPIView page can potentially run many queries, SPIView limits the search domain to seven days (in other words, seven indices, as each index represents one day's worth of data). When using SPIView, you will have best results if you limit your search time frame to less than or equal to seven days. This limit can be adjusted by editing the `spiDataMaxIndices` setting in [config.ini]({{ site.github.repository_url }}/blob/{{ site.github.build_revision }}/arkime/etc/config.ini) and rebuilding the `malcolmnetsec/arkime` docker container.
+Note that because the SPIView page can potentially run many queries, SPIView limits the search domain to seven days (in other words, seven indices, as each index represents one day's worth of data). When using SPIView, you will have best results if you limit your search time frame to less than or equal to seven days. This limit can be adjusted by editing the `spiDataMaxIndices` setting in [config.ini]({{ site.github.repository_url }}/blob/{{ site.github.build_revision }}/arkime/etc/config.ini) and rebuilding the `ghcr.io/idaholab/malcolm/arkime` docker container.
See also Arkime's usage documentation for more information on [SPIView](https://localhost/help#spiview).
diff --git a/docs/contributing-local-modifications.md b/docs/contributing-local-modifications.md
index cd1444b08..3a26098ab 100644
--- a/docs/contributing-local-modifications.md
+++ b/docs/contributing-local-modifications.md
@@ -117,6 +117,6 @@ See the documentation on [Docker bind mount](https://docs.docker.com/storage/bin
Another method for modifying your local copies of Malcolm's services' containers is to [build your own](development.md#Build) containers with the modifications baked-in.
-For example, say you wanted to create a Malcolm container which includes a new dashboard for OpenSearch Dashboards and a new enrichment filter `.conf` file for Logstash. After placing these files under `./dashboards/dashboards` and `./logstash/pipelines/enrichment`, respectively, in your Malcolm working copy, run `./build.sh dashboards-helper logstash` to build just those containers. After the build completes, you can run `docker images` and see you have fresh images for `malcolmnetsec/dashboards-helper` and `malcolmnetsec/logstash-oss`. You may need to review the contents of the [Dockerfiles]({{ site.github.repository_url }}/blob/{{ site.github.build_revision }}/Dockerfiles) to determine the correct service and filesystem location within that service's Docker image depending on what you're trying to accomplish.
+For example, say you wanted to create a Malcolm container which includes a new dashboard for OpenSearch Dashboards and a new enrichment filter `.conf` file for Logstash. After placing these files under `./dashboards/dashboards` and `./logstash/pipelines/enrichment`, respectively, in your Malcolm working copy, run `./build.sh dashboards-helper logstash` to build just those containers. After the build completes, you can run `docker images` and see you have fresh images for `ghcr.io/idaholab/malcolm/dashboards-helper` and `ghcr.io/idaholab/malcolm/logstash-oss`. You may need to review the contents of the [Dockerfiles]({{ site.github.repository_url }}/blob/{{ site.github.build_revision }}/Dockerfiles) to determine the correct service and filesystem location within that service's Docker image depending on what you're trying to accomplish.
-Alternately, if you have forked Malcolm on GitHub, [workflow files]({{ site.github.repository_url }}/tree/{{ site.github.build_revision }}/.github/workflows/) are provided which contain instructions for GitHub to build the docker images and [sensor](live-analysis.md#Hedgehog) and [Malcolm](malcolm-iso.md#ISO) installer ISOs. The resulting images are named according to the pattern `ghcr.io/owner/malcolmnetsec/image:branch` (e.g., if you've forked Malcolm with the github user `romeogdetlevjr`, the `arkime` container built for the `main` would be named `ghcr.io/romeogdetlevjr/malcolmnetsec/arkime:main`). To run your local instance of Malcolm using these images instead of the official ones, you'll need to edit your `docker-compose.yml` file(s) and replace the `image:` tags according to this new pattern, or use the bash helper script `./shared/bin/github_image_helper.sh` to pull and re-tag the images.
\ No newline at end of file
+Alternately, if you have forked Malcolm on GitHub, [workflow files]({{ site.github.repository_url }}/tree/{{ site.github.build_revision }}/.github/workflows/) are provided which contain instructions for GitHub to build the docker images and [sensor](live-analysis.md#Hedgehog) and [Malcolm](malcolm-iso.md#ISO) installer ISOs. The resulting images are named according to the pattern `ghcr.io/owner/malcolm/image:branch` (e.g., if you've forked Malcolm with the github user `romeogdetlevjr`, the `arkime` container built for the `main` would be named `ghcr.io/romeogdetlevjr/malcolm/arkime:main`). To run your local instance of Malcolm using these images instead of the official ones, you'll need to edit your `docker-compose.yml` file(s) and replace the `image:` tags according to this new pattern, or use the bash helper script `./shared/bin/github_image_helper.sh` to pull and re-tag the images.
\ No newline at end of file
diff --git a/docs/development.md b/docs/development.md
index 10c1d2515..6633038af 100644
--- a/docs/development.md
+++ b/docs/development.md
@@ -54,28 +54,28 @@ $ ./scripts/build.sh
Then, go take a walk or something since it will be a while. When you're done, you can run `docker images` and see you have fresh images for:
-* `malcolmnetsec/api` (based on `python:3-slim`)
-* `malcolmnetsec/arkime` (based on `debian:11-slim`)
-* `malcolmnetsec/dashboards-helper` (based on `alpine:3.17`)
-* `malcolmnetsec/dashboards` (based on `opensearchproject/opensearch-dashboards`)
-* `malcolmnetsec/file-monitor` (based on `debian:11-slim`)
-* `malcolmnetsec/file-upload` (based on `debian:11-slim`)
-* `malcolmnetsec/filebeat-oss` (based on `docker.elastic.co/beats/filebeat-oss`)
-* `malcolmnetsec/freq` (based on `debian:11-slim`)
-* `malcolmnetsec/htadmin` (based on `debian:11-slim`)
-* `malcolmnetsec/logstash-oss` (based on `opensearchproject/logstash-oss-with-opensearch-output-plugin`)
-* `malcolmnetsec/name-map-ui` (based on `alpine:3.17`)
-* `malcolmnetsec/netbox` (based on `netboxcommunity/netbox:latest`)
-* `malcolmnetsec/nginx-proxy` (based on `alpine:3.17`)
-* `malcolmnetsec/opensearch` (based on `opensearchproject/opensearch`)
-* `malcolmnetsec/pcap-capture` (based on `debian:11-slim`)
-* `malcolmnetsec/pcap-monitor` (based on `debian:11-slim`)
-* `malcolmnetsec/postgresql` (based on `postgres:14-alpine`)
-* `malcolmnetsec/redis` (based on `redis:7-alpine`)
-* `malcolmnetsec/suricata` (based on `debian:11-slim`)
-* `malcolmnetsec/zeek` (based on `debian:11-slim`)
-
-Alternately, if you have forked Malcolm on GitHub, [workflow files]({{ site.github.repository_url }}/tree/{{ site.github.build_revision }}/.github/workflows/) are provided which contain instructions for GitHub to build the docker images and [sensor](live-analysis.md#Hedgehog) and [Malcolm](malcolm-iso.md#ISO) installer ISOs. The resulting images are named according to the pattern `ghcr.io/owner/malcolmnetsec/image:branch` (e.g., if you've forked Malcolm with the github user `romeogdetlevjr`, the `arkime` container built for the `main` would be named `ghcr.io/romeogdetlevjr/malcolmnetsec/arkime:main`). To run your local instance of Malcolm using these images instead of the official ones, you'll need to edit your `docker-compose.yml` file(s) and replace the `image:` tags according to this new pattern, or use the bash helper script `./shared/bin/github_image_helper.sh` to pull and re-tag the images.
+* `ghcr.io/idaholab/malcolm/api` (based on `python:3-slim`)
+* `ghcr.io/idaholab/malcolm/arkime` (based on `debian:11-slim`)
+* `ghcr.io/idaholab/malcolm/dashboards-helper` (based on `alpine:3.17`)
+* `ghcr.io/idaholab/malcolm/dashboards` (based on `opensearchproject/opensearch-dashboards`)
+* `ghcr.io/idaholab/malcolm/file-monitor` (based on `debian:11-slim`)
+* `ghcr.io/idaholab/malcolm/file-upload` (based on `debian:11-slim`)
+* `ghcr.io/idaholab/malcolm/filebeat-oss` (based on `docker.elastic.co/beats/filebeat-oss`)
+* `ghcr.io/idaholab/malcolm/freq` (based on `debian:11-slim`)
+* `ghcr.io/idaholab/malcolm/htadmin` (based on `debian:11-slim`)
+* `ghcr.io/idaholab/malcolm/logstash-oss` (based on `opensearchproject/logstash-oss-with-opensearch-output-plugin`)
+* `ghcr.io/idaholab/malcolm/name-map-ui` (based on `alpine:3.17`)
+* `ghcr.io/idaholab/malcolm/netbox` (based on `netboxcommunity/netbox:latest`)
+* `ghcr.io/idaholab/malcolm/nginx-proxy` (based on `alpine:3.17`)
+* `ghcr.io/idaholab/malcolm/opensearch` (based on `opensearchproject/opensearch`)
+* `ghcr.io/idaholab/malcolm/pcap-capture` (based on `debian:11-slim`)
+* `ghcr.io/idaholab/malcolm/pcap-monitor` (based on `debian:11-slim`)
+* `ghcr.io/idaholab/malcolm/postgresql` (based on `postgres:14-alpine`)
+* `ghcr.io/idaholab/malcolm/redis` (based on `redis:7-alpine`)
+* `ghcr.io/idaholab/malcolm/suricata` (based on `debian:11-slim`)
+* `ghcr.io/idaholab/malcolm/zeek` (based on `debian:11-slim`)
+
+Alternately, if you have forked Malcolm on GitHub, [workflow files]({{ site.github.repository_url }}/tree/{{ site.github.build_revision }}/.github/workflows/) are provided which contain instructions for GitHub to build the docker images and [sensor](live-analysis.md#Hedgehog) and [Malcolm](malcolm-iso.md#ISO) installer ISOs. The resulting images are named according to the pattern `ghcr.io/owner/malcolm/image:branch` (e.g., if you've forked Malcolm with the github user `romeogdetlevjr`, the `arkime` container built for the `main` would be named `ghcr.io/romeogdetlevjr/malcolm/arkime:main`). To run your local instance of Malcolm using these images instead of the official ones, you'll need to edit your `docker-compose.yml` file(s) and replace the `image:` tags according to this new pattern, or use the bash helper script `./shared/bin/github_image_helper.sh` to pull and re-tag the images.
# Pre-Packaged installation files
@@ -93,13 +93,13 @@ Administrator username: analyst
analyst password:
analyst password (again):
+Additional local accounts can be created at https://localhost:488/ when Malcolm is running
+
(Re)generate self-signed certificates for HTTPS access (Y/n): y
(Re)generate self-signed certificates for a remote log forwarder (Y/n): y
-Store username/password for primary remote OpenSearch instance? (y/N): n
-
-Store username/password for secondary remote OpenSearch instance? (y/N): n
+Will Malcolm be using an existing remote primary or secondary OpenSearch instance? (y/N): n
Store username/password for email alert sender account? (y/N): n
diff --git a/docs/download.md b/docs/download.md
index f0ae7097e..228d137d8 100644
--- a/docs/download.md
+++ b/docs/download.md
@@ -4,7 +4,7 @@
### Docker images
-Malcolm operates as a cluster of Docker containers, isolated sandboxes which each serve a dedicated function of the system. Its Docker images can be pulled from [Docker Hub](https://hub.docker.com/u/malcolmnetsec) or built from source by following the instructions in the [Quick Start](quickstart.md#QuickStart) section of the documentation.
+Malcolm operates as a cluster of Docker containers, isolated sandboxes which each serve a dedicated function of the system. Its Docker images can be pulled from [GitHub](https://github.com/orgs/idaholab/packages?repo_name=Malcolm) or built from source by following the instructions in the [Quick Start](quickstart.md#QuickStart) section of the documentation.
### Installer ISO
@@ -16,7 +16,7 @@ While official downloads of the Malcolm installer ISO are not provided, an **uno
| ISO | SHA256 |
|---|---|
-| [malcolm-23.03.0.iso](/iso/malcolm-23.03.0.iso) (5.3GiB) | [`9459fb0ce61fba8c7a9a9457b24d42182519dbb62247111471e38f8c190113eb`](/iso/malcolm-23.03.0.iso.sha256.txt) |
+| [malcolm-23.04.0.iso](/iso/malcolm-23.04.0.iso) (5.2GiB) | [`6f8292a3c0c0c43b3ea7919b0b5ad1caa1140796da315a779522cb998dea8d13`](/iso/malcolm-23.04.0.iso.sha256.txt) |
## Hedgehog Linux
@@ -26,7 +26,7 @@ While official downloads of the Malcolm installer ISO are not provided, an **uno
| ISO | SHA256 |
|---|---|
-| [hedgehog-23.03.0.iso](/iso/hedgehog-23.03.0.iso) (2.3GiB) | [`3cdba91e417f6ada83130aabc3be38dd0a8b12b6bda227859a546ace198680bc`](/iso/hedgehog-23.03.0.iso.sha256.txt) |
+| [hedgehog-23.04.0.iso](/iso/hedgehog-23.04.0.iso) (2.3GiB) | [`b0ef7afbd1fb8157b55115ca2a7ab118206b9498ab5a11c916f315c26775b0df`](/iso/hedgehog-23.04.0.iso.sha256.txt) |
## Warning
diff --git a/docs/hedgehog-iso-build.md b/docs/hedgehog-iso-build.md
index c37ed0b23..884995b0c 100644
--- a/docs/hedgehog-iso-build.md
+++ b/docs/hedgehog-iso-build.md
@@ -29,7 +29,7 @@ Building the ISO may take 90 minutes or more depending on your system. As the bu
```
…
-Finished, created "/sensor-build/hedgehog-23.03.0.iso"
+Finished, created "/sensor-build/hedgehog-23.04.0.iso"
…
```
diff --git a/docs/hedgehog.md b/docs/hedgehog.md
index e0493a3e9..8464427f6 100644
--- a/docs/hedgehog.md
+++ b/docs/hedgehog.md
@@ -2,7 +2,7 @@
**Network Traffic Capture Appliance**
-![](./images/hedgehog/logo/hedgehog-color-w-text.png)
+![Hedgehog Linux](./images/hedgehog/logo/hedgehog-color-w-text.png)
Hedgehog Linux is a Debian-based operating system built to
diff --git a/docs/host-config-windows.md b/docs/host-config-windows.md
index a12716b33..c024bf79d 100644
--- a/docs/host-config-windows.md
+++ b/docs/host-config-windows.md
@@ -6,7 +6,7 @@ Installing and configuring [Docker to run under Windows](https://docs.docker.com
1. Be running Windows 10, version 1903 or higher
1. Prepare your system and [install WSL](https://docs.microsoft.com/en-us/windows/wsl/install) and a Linux distribution by running `wsl --install -d Debian` in PowerShell as Administrator (these instructions are tested with Debian, but may work with other distributions)
-1. Install Docker Desktop for Windows either by downloading the installer from the [official Docker site](https://hub.docker.com/editions/community/docker-ce-desktop-windows) or installing it through [chocolatey](https://chocolatey.org/packages/docker-desktop).
+1. Install Docker Desktop for Windows either by downloading the installer from the [official Docker site](https://docs.docker.com/desktop/install/windows-install/) or installing it through [chocolatey](https://chocolatey.org/packages/docker-desktop).
1. Follow the [Docker Desktop WSL 2 backend](https://docs.docker.com/desktop/windows/wsl/) instructions to finish configuration and review best practices
1. Reboot
1. Open the WSL distribution's terminal and run run `docker info` to make sure Docker is running
diff --git a/docs/images/hedgehog/images/hedgehog-color-w-text.png b/docs/images/hedgehog/images/hedgehog-color-w-text.png
deleted file mode 120000
index e03fab34e..000000000
--- a/docs/images/hedgehog/images/hedgehog-color-w-text.png
+++ /dev/null
@@ -1 +0,0 @@
-../logo/hedgehog-color-w-text.png
\ No newline at end of file
diff --git a/docs/malcolm-iso.md b/docs/malcolm-iso.md
index edeb32810..4c013dbbb 100644
--- a/docs/malcolm-iso.md
+++ b/docs/malcolm-iso.md
@@ -41,11 +41,11 @@ Building the ISO may take 30 minutes or more depending on your system. As the bu
```
…
-Finished, created "/malcolm-build/malcolm-iso/malcolm-23.03.0.iso"
+Finished, created "/malcolm-build/malcolm-iso/malcolm-23.04.0.iso"
…
```
-By default, Malcolm's Docker images are not packaged with the installer ISO, assuming instead that you will pull the [latest images](https://hub.docker.com/u/malcolmnetsec) with a `docker-compose pull` command as described in the [Quick start](quickstart.md#QuickStart) section. If you wish to build an ISO with the latest Malcolm images included, follow the directions to create [pre-packaged installation files](development.md#Packager), which include a tarball with a name like `malcolm_YYYYMMDD_HHNNSS_xxxxxxx_images.tar.gz`. Then, pass that images tarball to the ISO build script with a `-d`, like this:
+By default, Malcolm's Docker images are not packaged with the installer ISO, assuming instead that you will pull the [latest images](https://github.com/orgs/idaholab/packages?repo_name=Malcolm) with a `docker-compose pull` command as described in the [Quick start](quickstart.md#QuickStart) section. If you wish to build an ISO with the latest Malcolm images included, follow the directions to create [pre-packaged installation files](development.md#Packager), which include a tarball with a name like `malcolm_YYYYMMDD_HHNNSS_xxxxxxx_images.tar.gz`. Then, pass that images tarball to the ISO build script with a `-d`, like this:
```
$ ./malcolm-iso/build_via_vagrant.sh -f -d malcolm_YYYYMMDD_HHNNSS_xxxxxxx_images.tar.gz
diff --git a/docs/malcolm-upgrade.md b/docs/malcolm-upgrade.md
index 112f3fefc..9e81b79b2 100644
--- a/docs/malcolm-upgrade.md
+++ b/docs/malcolm-upgrade.md
@@ -48,7 +48,7 @@ If you installed Malcolm from [pre-packaged installation files]({{ site.github.r
5. re-run `./scripts/install.py --configure` as described in [System configuration and tuning](malcolm-config.md#ConfigAndTuning)
6. using a file comparison tool (e.g., `diff`, `meld`, `Beyond Compare`, etc.), compare `docker-compose.yml` and the `docker-compare.yml` file you backed up in step 3, and manually migrate over any customizations you wish to preserve from that file (e.g., `PCAP_FILTER`, `MAXMIND_GEOIP_DB_LICENSE_KEY`, `MANAGE_PCAP_FILES`; [anything else](malcolm-config.md#DockerComposeYml) you may have edited by hand in `docker-compose.yml` that's not prompted for in `install.py --configure`)
7. pull the new docker images (this will take a while)
- * `docker-compose pull` to pull them from Docker Hub or `docker-compose load -i malcolm_YYYYMMDD_HHNNSS_xxxxxxx_images.tar.gz` if you have an offline tarball of the Malcolm docker images
+ * `docker-compose pull` to pull them from [GitHub](https://github.com/orgs/idaholab/packages?repo_name=Malcolm) or `docker-compose load -i malcolm_YYYYMMDD_HHNNSS_xxxxxxx_images.tar.gz` if you have an offline tarball of the Malcolm docker images
8. start Malcolm
* `./scripts/start`
9. you may be prompted to [configure authentication](authsetup.md#AuthSetup) if there are new authentication-related files that need to be generated
diff --git a/docs/opensearch-instances.md b/docs/opensearch-instances.md
index c77ab38c3..57427db2a 100644
--- a/docs/opensearch-instances.md
+++ b/docs/opensearch-instances.md
@@ -55,8 +55,12 @@ OpenSearch username: servicedb
servicedb password:
servicedb password (again):
+Additional local accounts can be created at https://localhost:488/ when Malcolm is running
+
Require SSL certificate validation for OpenSearch communication? (Y/n): n
+Will Malcolm be using an existing remote primary or secondary OpenSearch instance? (y/N): y
+
Store username/password for secondary remote OpenSearch instance? (y/N): y
OpenSearch username: remotedb
diff --git a/docs/protocols.md b/docs/protocols.md
index 9f32a4ff5..e40e11084 100644
--- a/docs/protocols.md
+++ b/docs/protocols.md
@@ -45,6 +45,7 @@ Malcolm uses [Zeek](https://docs.zeek.org/en/stable/script-reference/proto-analy
|Simple Network Management Protocol (SNMP)|[🔗](https://en.wikipedia.org/wiki/Simple_Network_Management_Protocol)|[🔗](https://tools.ietf.org/html/rfc2578)|[✓](https://github.com/arkime/arkime/blob/master/capture/parsers/smtp.c)|[✓](https://docs.zeek.org/en/stable/scripts/base/protocols/snmp/main.zeek.html#type-SNMP::Info)|
|SOCKS|[🔗](https://en.wikipedia.org/wiki/SOCKS)|[🔗](https://tools.ietf.org/html/rfc1928)|[✓](https://github.com/arkime/arkime/blob/master/capture/parsers/socks.c)|[✓](https://docs.zeek.org/en/stable/scripts/base/protocols/socks/main.zeek.html#type-SOCKS::Info)|
|STUN (Session Traversal Utilities for NAT)|[🔗](https://en.wikipedia.org/wiki/STUN)|[🔗](https://datatracker.ietf.org/doc/html/rfc3489)|[✓](https://github.com/arkime/arkime/blob/main/capture/parsers/misc.c#L147)|[✓](https://github.com/corelight/zeek-spicy-stun)|
+|Synchrophasor|[🔗](https://wiki.wireshark.org/IEEE-C37.118.md)[🔗](https://en.wikipedia.org/wiki/C37.118)|[🔗](https://standards.ieee.org/ieee/C37.118.2/4921/)||[✓](https://github.com/cisagov/icsnpp-synchrophasor)|
|Syslog|[🔗](https://en.wikipedia.org/wiki/Syslog)|[🔗](https://tools.ietf.org/html/rfc5424)|[✓](https://github.com/arkime/arkime/blob/master/capture/parsers/tls.c)|[✓](https://docs.zeek.org/en/stable/scripts/base/protocols/syslog/main.zeek.html#type-Syslog::Info)|
|Tabular Data Stream (TDS)|[🔗](https://en.wikipedia.org/wiki/Tabular_Data_Stream)|[🔗](https://www.freetds.org/tds.html) [🔗](https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-tds/b46a581a-39de-4745-b076-ec4dbb7d13ec)|[✓](https://github.com/arkime/arkime/blob/master/capture/parsers/tds.c)|[✓](https://github.com/amzn/zeek-plugin-tds/blob/master/scripts/main.zeek)|
|Telnet / remote shell (rsh) / remote login (rlogin)|[🔗](https://en.wikipedia.org/wiki/Telnet)[🔗](https://en.wikipedia.org/wiki/Berkeley_r-commands)|[🔗](https://tools.ietf.org/html/rfc854)[🔗](https://tools.ietf.org/html/rfc1282)|[✓](https://github.com/arkime/arkime/blob/master/capture/parsers/misc.c#L336)|[✓](https://docs.zeek.org/en/current/scripts/base/bif/plugins/Zeek_Login.events.bif.zeek.html)[❋]({{ site.github.repository_url }}/blob/{{ site.github.build_revision }}/zeek/config/login.zeek)|
diff --git a/docs/quickstart.md b/docs/quickstart.md
index 01dbc0a35..55416e359 100644
--- a/docs/quickstart.md
+++ b/docs/quickstart.md
@@ -24,7 +24,7 @@ You must run [`auth_setup`](authsetup.md#AuthSetup) prior to pulling Malcolm's D
### Pull Malcolm's Docker images
-Malcolm's Docker images are periodically built and hosted on [Docker Hub](https://hub.docker.com/u/malcolmnetsec). If you already have [Docker](https://www.docker.com/) and [Docker Compose](https://docs.docker.com/compose/), these prebuilt images can be pulled by navigating into the Malcolm directory (containing the `docker-compose.yml` file) and running `docker-compose pull` like this:
+Malcolm's Docker images are periodically built and hosted on [GitHub](https://github.com/orgs/idaholab/packages?repo_name=Malcolm). If you already have [Docker](https://www.docker.com/) and [Docker Compose](https://docs.docker.com/compose/), these prebuilt images can be pulled by navigating into the Malcolm directory (containing the `docker-compose.yml` file) and running `docker-compose pull` like this:
```
$ docker-compose pull
Pulling api ... done
@@ -53,26 +53,26 @@ You can then observe that the images have been retrieved by running `docker imag
```
$ docker images
REPOSITORY TAG IMAGE ID CREATED SIZE
-malcolmnetsec/api 23.03.0 xxxxxxxxxxxx 3 days ago 158MB
-malcolmnetsec/arkime 23.03.0 xxxxxxxxxxxx 3 days ago 816MB
-malcolmnetsec/dashboards 23.03.0 xxxxxxxxxxxx 3 days ago 1.02GB
-malcolmnetsec/dashboards-helper 23.03.0 xxxxxxxxxxxx 3 days ago 184MB
-malcolmnetsec/file-monitor 23.03.0 xxxxxxxxxxxx 3 days ago 588MB
-malcolmnetsec/file-upload 23.03.0 xxxxxxxxxxxx 3 days ago 259MB
-malcolmnetsec/filebeat-oss 23.03.0 xxxxxxxxxxxx 3 days ago 624MB
-malcolmnetsec/freq 23.03.0 xxxxxxxxxxxx 3 days ago 132MB
-malcolmnetsec/htadmin 23.03.0 xxxxxxxxxxxx 3 days ago 242MB
-malcolmnetsec/logstash-oss 23.03.0 xxxxxxxxxxxx 3 days ago 1.35GB
-malcolmnetsec/name-map-ui 23.03.0 xxxxxxxxxxxx 3 days ago 143MB
-malcolmnetsec/netbox 23.03.0 xxxxxxxxxxxx 3 days ago 1.01GB
-malcolmnetsec/nginx-proxy 23.03.0 xxxxxxxxxxxx 3 days ago 121MB
-malcolmnetsec/opensearch 23.03.0 xxxxxxxxxxxx 3 days ago 1.17GB
-malcolmnetsec/pcap-capture 23.03.0 xxxxxxxxxxxx 3 days ago 121MB
-malcolmnetsec/pcap-monitor 23.03.0 xxxxxxxxxxxx 3 days ago 213MB
-malcolmnetsec/postgresql 23.03.0 xxxxxxxxxxxx 3 days ago 268MB
-malcolmnetsec/redis 23.03.0 xxxxxxxxxxxx 3 days ago 34.2MB
-malcolmnetsec/suricata 23.03.0 xxxxxxxxxxxx 3 days ago 278MB
-malcolmnetsec/zeek 23.03.0 xxxxxxxxxxxx 3 days ago 1GB
+ghcr.io/idaholab/malcolm/api 23.04.0 xxxxxxxxxxxx 3 days ago 158MB
+ghcr.io/idaholab/malcolm/arkime 23.04.0 xxxxxxxxxxxx 3 days ago 816MB
+ghcr.io/idaholab/malcolm/dashboards 23.04.0 xxxxxxxxxxxx 3 days ago 1.02GB
+ghcr.io/idaholab/malcolm/dashboards-helper 23.04.0 xxxxxxxxxxxx 3 days ago 184MB
+ghcr.io/idaholab/malcolm/file-monitor 23.04.0 xxxxxxxxxxxx 3 days ago 588MB
+ghcr.io/idaholab/malcolm/file-upload 23.04.0 xxxxxxxxxxxx 3 days ago 259MB
+ghcr.io/idaholab/malcolm/filebeat-oss 23.04.0 xxxxxxxxxxxx 3 days ago 624MB
+ghcr.io/idaholab/malcolm/freq 23.04.0 xxxxxxxxxxxx 3 days ago 132MB
+ghcr.io/idaholab/malcolm/htadmin 23.04.0 xxxxxxxxxxxx 3 days ago 242MB
+ghcr.io/idaholab/malcolm/logstash-oss 23.04.0 xxxxxxxxxxxx 3 days ago 1.35GB
+ghcr.io/idaholab/malcolm/name-map-ui 23.04.0 xxxxxxxxxxxx 3 days ago 143MB
+ghcr.io/idaholab/malcolm/netbox 23.04.0 xxxxxxxxxxxx 3 days ago 1.01GB
+ghcr.io/idaholab/malcolm/nginx-proxy 23.04.0 xxxxxxxxxxxx 3 days ago 121MB
+ghcr.io/idaholab/malcolm/opensearch 23.04.0 xxxxxxxxxxxx 3 days ago 1.17GB
+ghcr.io/idaholab/malcolm/pcap-capture 23.04.0 xxxxxxxxxxxx 3 days ago 121MB
+ghcr.io/idaholab/malcolm/pcap-monitor 23.04.0 xxxxxxxxxxxx 3 days ago 213MB
+ghcr.io/idaholab/malcolm/postgresql 23.04.0 xxxxxxxxxxxx 3 days ago 268MB
+ghcr.io/idaholab/malcolm/redis 23.04.0 xxxxxxxxxxxx 3 days ago 34.2MB
+ghcr.io/idaholab/malcolm/suricata 23.04.0 xxxxxxxxxxxx 3 days ago 278MB
+ghcr.io/idaholab/malcolm/zeek 23.04.0 xxxxxxxxxxxx 3 days ago 1GB
```
### Import from pre-packaged tarballs
diff --git a/docs/ubuntu-install-example.md b/docs/ubuntu-install-example.md
index 61785dcde..051aacf61 100644
--- a/docs/ubuntu-install-example.md
+++ b/docs/ubuntu-install-example.md
@@ -198,6 +198,8 @@ Should Malcolm analyze live network traffic with Suricata? (y/N): y
Should Malcolm analyze live network traffic with Zeek? (y/N): y
+Should Malcolm use "best guess" to identify potential OT/ICS traffic with Zeek? (y/N): n
+
Specify capture interface(s) (comma-separated): eth0
Capture filter (tcpdump-like filter expression; leave blank to capture all traffic) (): not port 5044 and not port 8005 and not port 9200
@@ -222,20 +224,20 @@ Administrator username: analyst
analyst password:
analyst password (again):
+Additional local accounts can be created at https://localhost:488/ when Malcolm is running
+
(Re)generate self-signed certificates for HTTPS access (Y/n): y
(Re)generate self-signed certificates for a remote log forwarder (Y/n): y
-Store username/password for primary remote OpenSearch instance? (y/N): n
-
-Store username/password for secondary remote OpenSearch instance? (y/N): n
+Will Malcolm be using an existing remote primary or secondary OpenSearch instance? (y/N): n
Store username/password for email alert sender account? (y/N): n
(Re)generate internal passwords for NetBox (Y/n): y
```
-For now, rather than [build Malcolm from scratch](development.md#Build), we'll pull images from [Docker Hub](https://hub.docker.com/u/malcolmnetsec):
+For now, rather than [build Malcolm from scratch](development.md#Build), we'll pull images from [GitHub](https://github.com/orgs/idaholab/packages?repo_name=Malcolm):
```
user@host:~/Malcolm$ docker-compose pull
Pulling api ... done
@@ -261,26 +263,26 @@ Pulling zeek ... done
user@host:~/Malcolm$ docker images
REPOSITORY TAG IMAGE ID CREATED SIZE
-malcolmnetsec/api 23.03.0 xxxxxxxxxxxx 3 days ago 158MB
-malcolmnetsec/arkime 23.03.0 xxxxxxxxxxxx 3 days ago 816MB
-malcolmnetsec/dashboards 23.03.0 xxxxxxxxxxxx 3 days ago 1.02GB
-malcolmnetsec/dashboards-helper 23.03.0 xxxxxxxxxxxx 3 days ago 184MB
-malcolmnetsec/file-monitor 23.03.0 xxxxxxxxxxxx 3 days ago 588MB
-malcolmnetsec/file-upload 23.03.0 xxxxxxxxxxxx 3 days ago 259MB
-malcolmnetsec/filebeat-oss 23.03.0 xxxxxxxxxxxx 3 days ago 624MB
-malcolmnetsec/freq 23.03.0 xxxxxxxxxxxx 3 days ago 132MB
-malcolmnetsec/htadmin 23.03.0 xxxxxxxxxxxx 3 days ago 242MB
-malcolmnetsec/logstash-oss 23.03.0 xxxxxxxxxxxx 3 days ago 1.35GB
-malcolmnetsec/name-map-ui 23.03.0 xxxxxxxxxxxx 3 days ago 143MB
-malcolmnetsec/netbox 23.03.0 xxxxxxxxxxxx 3 days ago 1.01GB
-malcolmnetsec/nginx-proxy 23.03.0 xxxxxxxxxxxx 3 days ago 121MB
-malcolmnetsec/opensearch 23.03.0 xxxxxxxxxxxx 3 days ago 1.17GB
-malcolmnetsec/pcap-capture 23.03.0 xxxxxxxxxxxx 3 days ago 121MB
-malcolmnetsec/pcap-monitor 23.03.0 xxxxxxxxxxxx 3 days ago 213MB
-malcolmnetsec/postgresql 23.03.0 xxxxxxxxxxxx 3 days ago 268MB
-malcolmnetsec/redis 23.03.0 xxxxxxxxxxxx 3 days ago 34.2MB
-malcolmnetsec/suricata 23.03.0 xxxxxxxxxxxx 3 days ago 278MB
-malcolmnetsec/zeek 23.03.0 xxxxxxxxxxxx 3 days ago 1GB
+ghcr.io/idaholab/malcolm/api 23.04.0 xxxxxxxxxxxx 3 days ago 158MB
+ghcr.io/idaholab/malcolm/arkime 23.04.0 xxxxxxxxxxxx 3 days ago 816MB
+ghcr.io/idaholab/malcolm/dashboards 23.04.0 xxxxxxxxxxxx 3 days ago 1.02GB
+ghcr.io/idaholab/malcolm/dashboards-helper 23.04.0 xxxxxxxxxxxx 3 days ago 184MB
+ghcr.io/idaholab/malcolm/file-monitor 23.04.0 xxxxxxxxxxxx 3 days ago 588MB
+ghcr.io/idaholab/malcolm/file-upload 23.04.0 xxxxxxxxxxxx 3 days ago 259MB
+ghcr.io/idaholab/malcolm/filebeat-oss 23.04.0 xxxxxxxxxxxx 3 days ago 624MB
+ghcr.io/idaholab/malcolm/freq 23.04.0 xxxxxxxxxxxx 3 days ago 132MB
+ghcr.io/idaholab/malcolm/htadmin 23.04.0 xxxxxxxxxxxx 3 days ago 242MB
+ghcr.io/idaholab/malcolm/logstash-oss 23.04.0 xxxxxxxxxxxx 3 days ago 1.35GB
+ghcr.io/idaholab/malcolm/name-map-ui 23.04.0 xxxxxxxxxxxx 3 days ago 143MB
+ghcr.io/idaholab/malcolm/netbox 23.04.0 xxxxxxxxxxxx 3 days ago 1.01GB
+ghcr.io/idaholab/malcolm/nginx-proxy 23.04.0 xxxxxxxxxxxx 3 days ago 121MB
+ghcr.io/idaholab/malcolm/opensearch 23.04.0 xxxxxxxxxxxx 3 days ago 1.17GB
+ghcr.io/idaholab/malcolm/pcap-capture 23.04.0 xxxxxxxxxxxx 3 days ago 121MB
+ghcr.io/idaholab/malcolm/pcap-monitor 23.04.0 xxxxxxxxxxxx 3 days ago 213MB
+ghcr.io/idaholab/malcolm/postgresql 23.04.0 xxxxxxxxxxxx 3 days ago 268MB
+ghcr.io/idaholab/malcolm/redis 23.04.0 xxxxxxxxxxxx 3 days ago 34.2MB
+ghcr.io/idaholab/malcolm/suricata 23.04.0 xxxxxxxxxxxx 3 days ago 278MB
+ghcr.io/idaholab/malcolm/zeek 23.04.0 xxxxxxxxxxxx 3 days ago 1GB
```
Finally, we can start Malcolm. When Malcolm starts it will stream informational and debug messages to the console. If you wish, you can safely close the console or use `Ctrl+C` to stop these messages; Malcolm will continue running in the background.
diff --git a/docs/zeek-intel.md b/docs/zeek-intel.md
index 9d82d08e5..34290449a 100644
--- a/docs/zeek-intel.md
+++ b/docs/zeek-intel.md
@@ -6,7 +6,7 @@
To quote Zeek's [Intelligence Framework](https://docs.zeek.org/en/master/frameworks/intel.html) documentation, "The goals of Zeek’s Intelligence Framework are to consume intelligence data, make it available for matching, and provide infrastructure to improve performance and memory utilization. Data in the Intelligence Framework is an atomic piece of intelligence such as an IP address or an e-mail address. This atomic data will be packed with metadata such as a freeform source field, a freeform descriptive field, and a URL which might lead to more information about the specific item." Zeek [intelligence](https://docs.zeek.org/en/master/scripts/base/frameworks/intel/main.zeek.html) [indicator types](https://docs.zeek.org/en/master/scripts/base/frameworks/intel/main.zeek.html#type-Intel::Type) include IP addresses, URLs, file names, hashes, email addresses, and more.
-Malcolm doesn't come bundled with intelligence files from any particular feed, but they can be easily included into your local instance. On [startup]({{ site.github.repository_url }}/blob/{{ site.github.build_revision }}/shared/bin/zeek_intel_setup.sh), Malcolm's `malcolmnetsec/zeek` docker container enumerates the subdirectories under `./zeek/intel` (which is [bind mounted](https://docs.docker.com/storage/bind-mounts/) into the container's runtime) and configures Zeek so that those intelligence files will be automatically included in its local policy. Subdirectories under `./zeek/intel` which contain their own `__load__.zeek` file will be `@load`-ed as-is, while subdirectories containing "loose" intelligence files will be [loaded](https://docs.zeek.org/en/master/frameworks/intel.html#loading-intelligence) automatically with a `redef Intel::read_files` directive.
+Malcolm doesn't come bundled with intelligence files from any particular feed, but they can be easily included into your local instance. On [startup]({{ site.github.repository_url }}/blob/{{ site.github.build_revision }}/shared/bin/zeek_intel_setup.sh), Malcolm's `ghcr.io/idaholab/malcolm/zeek` docker container enumerates the subdirectories under `./zeek/intel` (which is [bind mounted](https://docs.docker.com/storage/bind-mounts/) into the container's runtime) and configures Zeek so that those intelligence files will be automatically included in its local policy. Subdirectories under `./zeek/intel` which contain their own `__load__.zeek` file will be `@load`-ed as-is, while subdirectories containing "loose" intelligence files will be [loaded](https://docs.zeek.org/en/master/frameworks/intel.html#loading-intelligence) automatically with a `redef Intel::read_files` directive.
Note that Malcolm does not manage updates for these intelligence files. You should use the update mechanism suggested by your feeds' maintainers to keep them up to date, or use a [TAXII](#ZeekIntelSTIX) or [MISP](#ZeekIntelMISP) feed as described below.
diff --git a/filebeat/scripts/zeek-log-fields.json b/filebeat/scripts/zeek-log-fields.json
index cf6614202..290ba4bde 100644
--- a/filebeat/scripts/zeek-log-fields.json
+++ b/filebeat/scripts/zeek-log-fields.json
@@ -76,7 +76,38 @@
"sha256",
"extracted",
"extracted_cutoff",
- "extracted_size"
+ "extracted_size",
+ "ftime"
+ ],
+ [
+ "ts",
+ "fuid",
+ "uid",
+ "id.orig_h",
+ "id.orig_p",
+ "id.resp_h",
+ "id.resp_p",
+ "source",
+ "depth",
+ "analyzers",
+ "mime_type",
+ "filename",
+ "duration",
+ "local_orig",
+ "is_orig",
+ "seen_bytes",
+ "total_bytes",
+ "missing_bytes",
+ "overflow_bytes",
+ "timedout",
+ "parent_fuid",
+ "md5",
+ "sha1",
+ "sha256",
+ "extracted",
+ "extracted_cutoff",
+ "extracted_size",
+ "ftime"
]
],
"http": [
diff --git a/logstash/pipelines/zeek/10_zeek_prep.conf b/logstash/pipelines/zeek/10_zeek_prep.conf
index 15d1d17fa..48c5a2d1e 100644
--- a/logstash/pipelines/zeek/10_zeek_prep.conf
+++ b/logstash/pipelines/zeek/10_zeek_prep.conf
@@ -24,7 +24,8 @@ filter {
}
# report types we're going to ignore
- if (([log_source] == "bsap_ip_unknown") or
+ if (([log_source] == "analyzer") or
+ ([log_source] == "bsap_ip_unknown") or
([log_source] == "bsap_serial_unknown") or
([log_source] == "ecat_arp_info") or
([log_source] == "reporter") or
diff --git a/logstash/pipelines/zeek/11_zeek_parse.conf b/logstash/pipelines/zeek/11_zeek_parse.conf
index 9314d04b9..2c6f1203b 100644
--- a/logstash/pipelines/zeek/11_zeek_parse.conf
+++ b/logstash/pipelines/zeek/11_zeek_parse.conf
@@ -1205,34 +1205,42 @@ filter {
# files.log
# https://docs.zeek.org/en/stable/scripts/base/frameworks/files/main.zeek.html#type-Files::Info
- # TODO: spicy-zip adds ftime (https://github.com/zeek/spicy-zip/blob/main/analyzer/main.zeek) which screws my bitmaps up :/
-
if ([@metadata][zeek_fields_bitmap] and [@metadata][zeek_fields_bitmap_version]) {
- # bitmap files.log field configuration version 0
+ # bitmap files.log field configuration version 1
#
- # all fields (with extracted file size info) : 0x01FFFFFF / 33554431
- # all fields (without extracted file size info) : 0x007FFFFF / 8388607
+ # all fields (v5.1+, with extracted file size info and spicy-zip's ftime) : 0x0FFFFFFF / 268435455
- if ([@metadata][zeek_fields_bitmap_version] == 0) {
+ # bitmap files.log field configuration version 0
+ # all fields (< v5.1+, with extracted file size info and spicy-zip's ftime) : 0x03FFFFFF / 67108863
- if ([@metadata][zeek_fields_bitmap] == 33554431) {
+ if ([@metadata][zeek_fields_bitmap_version] == 1) {
+
+ if ([@metadata][zeek_fields_bitmap] == 268435455) {
dissect {
- id => "dissect_zeek_files_with_all_fields"
+ id => "dissect_zeek_v51_files_with_all_fields"
# zeek's default delimiter is a literal tab, MAKE SURE YOUR EDITOR DOESN'T SCREW IT UP
mapping => {
- "[message]" => "%{[zeek_cols][ts]} %{[zeek_cols][fuid]} %{[zeek_cols][tx_hosts]} %{[zeek_cols][rx_hosts]} %{[zeek_cols][conn_uids]} %{[zeek_cols][source]} %{[zeek_cols][depth]} %{[zeek_cols][analyzers]} %{[zeek_cols][mime_type]} %{[zeek_cols][filename]} %{[zeek_cols][duration]} %{[zeek_cols][local_orig]} %{[zeek_cols][is_orig]} %{[zeek_cols][seen_bytes]} %{[zeek_cols][total_bytes]} %{[zeek_cols][missing_bytes]} %{[zeek_cols][overflow_bytes]} %{[zeek_cols][timedout]} %{[zeek_cols][parent_fuid]} %{[zeek_cols][md5]} %{[zeek_cols][sha1]} %{[zeek_cols][sha256]} %{[zeek_cols][extracted]} %{[zeek_cols][extracted_cutoff]} %{[zeek_cols][extracted_size]}"
+ "[message]" => "%{[zeek_cols][ts]} %{[zeek_cols][fuid]} %{[zeek_cols][uid]} %{[zeek_cols][id.orig_h]} %{[zeek_cols][id.orig_p]} %{[zeek_cols][id.resp_h]} %{[zeek_cols][id.resp_p]} %{[zeek_cols][source]} %{[zeek_cols][depth]} %{[zeek_cols][analyzers]} %{[zeek_cols][mime_type]} %{[zeek_cols][filename]} %{[zeek_cols][duration]} %{[zeek_cols][local_orig]} %{[zeek_cols][is_orig]} %{[zeek_cols][seen_bytes]} %{[zeek_cols][total_bytes]} %{[zeek_cols][missing_bytes]} %{[zeek_cols][overflow_bytes]} %{[zeek_cols][timedout]} %{[zeek_cols][parent_fuid]} %{[zeek_cols][md5]} %{[zeek_cols][sha1]} %{[zeek_cols][sha256]} %{[zeek_cols][extracted]} %{[zeek_cols][extracted_cutoff]} %{[zeek_cols][extracted_size]} %{[zeek_cols][ftime]}"
}
}
- } else if ([@metadata][zeek_fields_bitmap] == 8388607) {
+ } else {
+ # who knows? the files.log preprocessed bitmap is not one we're expecting, we've got to guess and cannot use dissect
+ mutate { id => "mutate_add_tag_dissect_failure_unknown_files_v51_bitmap"
+ add_tag => [ "_dissectfailure" ] }
+ }
+
+ } else if ([@metadata][zeek_fields_bitmap_version] == 0) {
+
+ if ([@metadata][zeek_fields_bitmap] == 67108863) {
dissect {
- id => "dissect_zeek_files_with_all_fields_minus_extract_size"
+ id => "dissect_zeek_files_with_all_fields"
# zeek's default delimiter is a literal tab, MAKE SURE YOUR EDITOR DOESN'T SCREW IT UP
mapping => {
- "[message]" => "%{[zeek_cols][ts]} %{[zeek_cols][fuid]} %{[zeek_cols][tx_hosts]} %{[zeek_cols][rx_hosts]} %{[zeek_cols][conn_uids]} %{[zeek_cols][source]} %{[zeek_cols][depth]} %{[zeek_cols][analyzers]} %{[zeek_cols][mime_type]} %{[zeek_cols][filename]} %{[zeek_cols][duration]} %{[zeek_cols][local_orig]} %{[zeek_cols][is_orig]} %{[zeek_cols][seen_bytes]} %{[zeek_cols][total_bytes]} %{[zeek_cols][missing_bytes]} %{[zeek_cols][overflow_bytes]} %{[zeek_cols][timedout]} %{[zeek_cols][parent_fuid]} %{[zeek_cols][md5]} %{[zeek_cols][sha1]} %{[zeek_cols][sha256]} %{[zeek_cols][extracted]}"
+ "[message]" => "%{[zeek_cols][ts]} %{[zeek_cols][fuid]} %{[zeek_cols][tx_hosts]} %{[zeek_cols][rx_hosts]} %{[zeek_cols][conn_uids]} %{[zeek_cols][source]} %{[zeek_cols][depth]} %{[zeek_cols][analyzers]} %{[zeek_cols][mime_type]} %{[zeek_cols][filename]} %{[zeek_cols][duration]} %{[zeek_cols][local_orig]} %{[zeek_cols][is_orig]} %{[zeek_cols][seen_bytes]} %{[zeek_cols][total_bytes]} %{[zeek_cols][missing_bytes]} %{[zeek_cols][overflow_bytes]} %{[zeek_cols][timedout]} %{[zeek_cols][parent_fuid]} %{[zeek_cols][md5]} %{[zeek_cols][sha1]} %{[zeek_cols][sha256]} %{[zeek_cols][extracted]} %{[zeek_cols][extracted_cutoff]} %{[zeek_cols][extracted_size]} %{[zeek_cols][ftime]}"
}
}
@@ -1262,7 +1270,7 @@ filter {
}
ruby {
id => "ruby_zip_zeek_files"
- init => "$zeek_files_field_names = [ 'ts', 'fuid', 'tx_hosts', 'rx_hosts', 'conn_uids', 'source', 'depth', 'analyzers', 'mime_type', 'filename', 'duration', 'local_orig', 'is_orig', 'seen_bytes', 'total_bytes', 'missing_bytes', 'overflow_bytes', 'timedout', 'parent_fuid', 'md5', 'sha1', 'sha256', 'extracted', 'extracted_cutoff', 'extracted_size', 'ftime' ]"
+ init => "$zeek_files_field_names = [ 'ts', 'fuid', 'uid', 'orig_h', 'orig_p', 'resp_h', 'resp_p', 'source', 'depth', 'analyzers', 'mime_type', 'filename', 'duration', 'local_orig', 'is_orig', 'seen_bytes', 'total_bytes', 'missing_bytes', 'overflow_bytes', 'timedout', 'parent_fuid', 'md5', 'sha1', 'sha256', 'extracted', 'extracted_cutoff', 'extracted_size', 'ftime' ]"
code => "event.set('[zeek_cols]', $zeek_files_field_names.zip(event.get('[message]')).to_h)"
}
}
@@ -1286,7 +1294,7 @@ filter {
add_field => { "[rootId]" => "%{[zeek_cols][conn_uids][0]}"
"[zeek_cols][uid]" => "%{[zeek_cols][conn_uids][0]}" }
}
- } else {
+ } else if (![zeek_cols][uid]) {
mutate {
id => "mutate_add_fields_zeek_files_fuid_to_uid"
add_field => { "[zeek_cols][uid]" => "%{[zeek_cols][fuid]}" }
diff --git a/malcolm-iso/Dockerfile b/malcolm-iso/Dockerfile
index 7bf0119db..e6bde3829 100644
--- a/malcolm-iso/Dockerfile
+++ b/malcolm-iso/Dockerfile
@@ -8,7 +8,7 @@ LABEL org.opencontainers.image.url='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.documentation='https://github.com/cisagov/Malcolm/blob/main/README.md'
LABEL org.opencontainers.image.source='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.vendor='Cybersecurity and Infrastructure Security Agency'
-LABEL org.opencontainers.image.title='malcolmnetsec/malcolm'
+LABEL org.opencontainers.image.title='ghcr.io/cisagov/malcolm/malcolm'
LABEL org.opencontainers.image.description='Malcolm network traffic analysis tool suite ISO in qemu'
ARG QEMU_CPU=4
diff --git a/malcolm-iso/config/hooks/normal/0991-security-performance.hook.chroot b/malcolm-iso/config/hooks/normal/0991-security-performance.hook.chroot
index d6cdecbe5..1ad819668 100755
--- a/malcolm-iso/config/hooks/normal/0991-security-performance.hook.chroot
+++ b/malcolm-iso/config/hooks/normal/0991-security-performance.hook.chroot
@@ -25,6 +25,7 @@ UFW_ALLOW_RULES=(
5045/tcp
5601/tcp
8022/tcp
+ 9009:9013/tcp
9200/tcp
)
for i in ${UFW_ALLOW_RULES[@]}; do
diff --git a/malcolm-iso/config/includes.chroot/etc/skel/.config/autostart/malcolm-first-run-configure.desktop b/malcolm-iso/config/includes.chroot/etc/skel/.config/autostart/malcolm-first-run-configure.desktop
new file mode 100644
index 000000000..eec434ee2
--- /dev/null
+++ b/malcolm-iso/config/includes.chroot/etc/skel/.config/autostart/malcolm-first-run-configure.desktop
@@ -0,0 +1,7 @@
+[Desktop Entry]
+Encoding=UTF-8
+Name=malcolm-first-run-configure
+Comment=First-time Malcolm configuration
+Exec=/usr/local/bin/malcolm-first-run-configure.sh
+Terminal=false
+Type=Application
diff --git a/malcolm-iso/config/includes.chroot/etc/skel/.config/xfce4/xfconf/xfce-perchannel-xml/xfce4-session.xml b/malcolm-iso/config/includes.chroot/etc/skel/.config/xfce4/xfconf/xfce-perchannel-xml/xfce4-session.xml
index d70f54885..5175d0bbe 100644
--- a/malcolm-iso/config/includes.chroot/etc/skel/.config/xfce4/xfconf/xfce-perchannel-xml/xfce4-session.xml
+++ b/malcolm-iso/config/includes.chroot/etc/skel/.config/xfce4/xfconf/xfce-perchannel-xml/xfce4-session.xml
@@ -15,6 +15,7 @@
+
diff --git a/scripts/control.py b/scripts/control.py
index 596ce0349..84e0c83ff 100755
--- a/scripts/control.py
+++ b/scripts/control.py
@@ -162,7 +162,7 @@ def keystore_op(service, dropPriv=False, *keystore_args, **run_process_kwargs):
serviceImage = None
composeFileLines = list()
with open(args.composeFile, 'r') as f:
- composeFileLines = [x for x in f.readlines() if f'image: malcolmnetsec/{service}' in x]
+ composeFileLines = [x for x in f.readlines() if f'image: ghcr.io/idaholab/malcolm/{service}' in x]
if (len(composeFileLines) > 0) and (len(composeFileLines[0]) > 0):
imageLineValues = composeFileLines[0].split()
if len(imageLineValues) > 1:
@@ -747,6 +747,11 @@ def start():
global dockerBin
global dockerComposeBin
+ # touch the htadmin metadata file and .opensearch.*.curlrc files
+ open(os.path.join(MalcolmPath, os.path.join('htadmin', 'metadata')), 'a').close()
+ open(os.path.join(MalcolmPath, '.opensearch.primary.curlrc'), 'a').close()
+ open(os.path.join(MalcolmPath, '.opensearch.secondary.curlrc'), 'a').close()
+
# make sure the auth files exist. if we are in an interactive shell and we're
# missing any of the auth files, prompt to create them now
if sys.__stdin__.isatty() and (not MalcolmAuthFilesExist()):
@@ -758,11 +763,6 @@ def start():
'Malcolm administrator account authentication files are missing, please run ./scripts/auth_setup to generate them'
)
- # touch the htadmin metadata file and .opensearch.*.curlrc files
- open(os.path.join(MalcolmPath, os.path.join('htadmin', 'metadata')), 'a').close()
- open(os.path.join(MalcolmPath, '.opensearch.primary.curlrc'), 'a').close()
- open(os.path.join(MalcolmPath, '.opensearch.secondary.curlrc'), 'a').close()
-
# if the OpenSearch keystore doesn't exist exist, create empty ones
if not os.path.isfile(os.path.join(MalcolmPath, os.path.join('opensearch', 'opensearch.keystore'))):
keystore_op('opensearch', True, 'create')
@@ -864,535 +864,646 @@ def authSetup(wipe=False):
global dockerComposeBin
global opensslBin
- if YesOrNo('Store administrator username/password for local Malcolm access?', default=True):
- # prompt username and password
- usernamePrevious = None
- password = None
- passwordConfirm = None
- passwordEncrypted = ''
-
- while True:
- username = AskForString("Administrator username")
- if len(username) > 0:
- break
-
- while True:
- password = AskForPassword(f"{username} password: ")
- passwordConfirm = AskForPassword(f"{username} password (again): ")
- if password == passwordConfirm:
- break
- eprint("Passwords do not match")
-
- # get previous admin username to remove from htpasswd file if it's changed
- authEnvFile = os.path.join(MalcolmPath, 'auth.env')
- if os.path.isfile(authEnvFile):
- prevAuthInfo = defaultdict(str)
- with open(authEnvFile, 'r') as f:
- for line in f:
- try:
- k, v = line.rstrip().split("=")
- prevAuthInfo[k] = v.strip('"')
- except:
- pass
- if len(prevAuthInfo['MALCOLM_USERNAME']) > 0:
- usernamePrevious = prevAuthInfo['MALCOLM_USERNAME']
-
- # get openssl hash of password
- err, out = run_process([opensslBin, 'passwd', '-1', '-stdin'], stdin=password, stderr=False, debug=args.debug)
- if (err == 0) and (len(out) > 0) and (len(out[0]) > 0):
- passwordEncrypted = out[0]
- else:
- raise Exception('Unable to generate password hash with openssl')
-
- # write auth.env (used by htadmin and file-upload containers)
- with open(authEnvFile, 'w') as f:
- f.write(
- "# Malcolm Administrator username and encrypted password for nginx reverse proxy (and upload server's SFTP access)\n"
- )
- f.write(f'MALCOLM_USERNAME={username}\n')
- f.write(f'MALCOLM_PASSWORD={b64encode(passwordEncrypted.encode()).decode("ascii")}\n')
- os.chmod(authEnvFile, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
-
- # create or update the htpasswd file
- htpasswdFile = os.path.join(MalcolmPath, os.path.join('nginx', 'htpasswd'))
- htpasswdCmd = ['htpasswd', '-i', '-B', htpasswdFile, username]
- if not os.path.isfile(htpasswdFile):
- htpasswdCmd.insert(1, '-c')
- err, out = run_process(htpasswdCmd, stdin=password, stderr=True, debug=args.debug)
- if err != 0:
- raise Exception(f'Unable to generate htpasswd file: {out}')
-
- # if the admininstrator username has changed, remove the previous administrator username from htpasswd
- if (usernamePrevious is not None) and (usernamePrevious != username):
- htpasswdLines = list()
- with open(htpasswdFile, 'r') as f:
- htpasswdLines = f.readlines()
- with open(htpasswdFile, 'w') as f:
- for line in htpasswdLines:
- if not line.startswith(f"{usernamePrevious}:"):
- f.write(line)
-
- # configure default LDAP stuff (they'll have to edit it by hand later)
- ldapConfFile = os.path.join(MalcolmPath, os.path.join('nginx', 'nginx_ldap.conf'))
- if not os.path.isfile(ldapConfFile):
- ldapDefaults = defaultdict(str)
- if os.path.isfile(os.path.join(MalcolmPath, '.ldap_config_defaults')):
- ldapDefaults = defaultdict(str)
- with open(os.path.join(MalcolmPath, '.ldap_config_defaults'), 'r') as f:
- for line in f:
- try:
- k, v = line.rstrip().split("=")
- ldapDefaults[k] = v.strip('"').strip("'")
- except:
- pass
- ldapProto = ldapDefaults.get("LDAP_PROTO", "ldap://")
- ldapHost = ldapDefaults.get("LDAP_HOST", "ds.example.com")
- ldapPort = ldapDefaults.get("LDAP_PORT", "3268")
- ldapType = ldapDefaults.get("LDAP_SERVER_TYPE", "winldap")
- if ldapType == "openldap":
- ldapUri = 'DC=example,DC=com?uid?sub?(objectClass=posixAccount)'
- ldapGroupAttr = "memberUid"
- ldapGroupAttrIsDN = "off"
- else:
- ldapUri = 'DC=example,DC=com?sAMAccountName?sub?(objectClass=person)'
- ldapGroupAttr = "member"
- ldapGroupAttrIsDN = "on"
- with open(ldapConfFile, 'w') as f:
- f.write('# This is a sample configuration for the ldap_server section of nginx.conf.\n')
- f.write('# Yours will vary depending on how your Active Directory/LDAP server is configured.\n')
- f.write('# See https://github.com/kvspb/nginx-auth-ldap#available-config-parameters for options.\n\n')
- f.write('ldap_server ad_server {\n')
- f.write(f' url "{ldapProto}{ldapHost}:{ldapPort}/{ldapUri}";\n\n')
- f.write(' binddn "bind_dn";\n')
- f.write(' binddn_passwd "bind_dn_password";\n\n')
- f.write(f' group_attribute {ldapGroupAttr};\n')
- f.write(f' group_attribute_is_dn {ldapGroupAttrIsDN};\n')
- f.write(' require group "CN=malcolm,OU=groups,DC=example,DC=com";\n')
- f.write(' require valid_user;\n')
- f.write(' satisfy all;\n')
- f.write('}\n\n')
- f.write('auth_ldap_cache_enabled on;\n')
- f.write('auth_ldap_cache_expiration_time 10000;\n')
- f.write('auth_ldap_cache_size 1000;\n')
- os.chmod(ldapConfFile, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
-
- # populate htadmin config file
- with open(os.path.join(MalcolmPath, os.path.join('htadmin', 'config.ini')), 'w') as f:
- f.write('; HTAdmin config file.\n\n')
- f.write('[application]\n')
- f.write('; Change this to customize your title:\n')
- f.write('app_title = Malcolm User Management\n\n')
- f.write('; htpasswd file\n')
- f.write('secure_path = ./config/htpasswd\n')
- f.write('; metadata file\n')
- f.write('metadata_path = ./config/metadata\n\n')
- f.write('; administrator user/password (htpasswd -b -c -B ...)\n')
- f.write(f'admin_user = {username}\n\n')
- f.write('; username field quality checks\n')
- f.write(';\n')
- f.write('min_username_len = 4\n')
- f.write('max_username_len = 12\n\n')
- f.write('; Password field quality checks\n')
- f.write(';\n')
- f.write('min_password_len = 6\n')
- f.write('max_password_len = 20\n\n')
-
- # touch the metadata file
- open(os.path.join(MalcolmPath, os.path.join('htadmin', 'metadata')), 'a').close()
-
- # generate HTTPS self-signed certificates
- if YesOrNo('(Re)generate self-signed certificates for HTTPS access', default=True):
- with pushd(os.path.join(MalcolmPath, os.path.join('nginx', 'certs'))):
- # remove previous files
- for oldfile in glob.glob("*.pem"):
- os.remove(oldfile)
-
- # generate dhparam -------------------------------
- err, out = run_process(
- [opensslBin, 'dhparam', '-out', 'dhparam.pem', '2048'], stderr=True, debug=args.debug
- )
- if err != 0:
- raise Exception(f'Unable to generate dhparam.pem file: {out}')
-
- # generate key/cert -------------------------------
- err, out = run_process(
- [
- opensslBin,
- 'req',
- '-subj',
- '/CN=localhost',
- '-x509',
- '-newkey',
- 'rsa:4096',
- '-nodes',
- '-keyout',
- 'key.pem',
- '-out',
- 'cert.pem',
- '-days',
- '3650',
- ],
- stderr=True,
- debug=args.debug,
- )
- if err != 0:
- raise Exception(f'Unable to generate key.pem/cert.pem file(s): {out}')
-
- # generate beats/logstash self-signed certificates
+ # for beats/logstash self-signed certificates
logstashPath = os.path.join(MalcolmPath, os.path.join('logstash', 'certs'))
filebeatPath = os.path.join(MalcolmPath, os.path.join('filebeat', 'certs'))
- if YesOrNo('(Re)generate self-signed certificates for a remote log forwarder', default=True):
- with pushd(logstashPath):
- # make clean to clean previous files
- for pat in ['*.srl', '*.csr', '*.key', '*.crt', '*.pem']:
- for oldfile in glob.glob(pat):
- os.remove(oldfile)
-
- # -----------------------------------------------
- # generate new ca/server/client certificates/keys
- # ca -------------------------------
- err, out = run_process([opensslBin, 'genrsa', '-out', 'ca.key', '2048'], stderr=True, debug=args.debug)
- if err != 0:
- raise Exception(f'Unable to generate ca.key: {out}')
- err, out = run_process(
- [
- opensslBin,
- 'req',
- '-x509',
- '-new',
- '-nodes',
- '-key',
- 'ca.key',
- '-sha256',
- '-days',
- '9999',
- '-subj',
- '/C=US/ST=ID/O=sensor/OU=ca',
- '-out',
- 'ca.crt',
- ],
- stderr=True,
- debug=args.debug,
+ txRxScript = None
+ if (pyPlatform != PLATFORM_WINDOWS) and Which("croc"):
+ txRxScript = 'tx-rx-secure.sh' if Which('tx-rx-secure.sh') else None
+ if not txRxScript:
+ txRxScript = os.path.join(
+ MalcolmPath, os.path.join('shared', os.path.join('bin', os.path.join('tx-rx-secure.sh')))
)
- if err != 0:
- raise Exception(f'Unable to generate ca.crt: {out}')
-
- # server -------------------------------
- err, out = run_process([opensslBin, 'genrsa', '-out', 'server.key', '2048'], stderr=True, debug=args.debug)
- if err != 0:
- raise Exception(f'Unable to generate server.key: {out}')
-
- err, out = run_process(
- [
- opensslBin,
- 'req',
- '-sha512',
- '-new',
- '-key',
- 'server.key',
- '-out',
- 'server.csr',
- '-config',
- 'server.conf',
- ],
- stderr=True,
- debug=args.debug,
- )
- if err != 0:
- raise Exception(f'Unable to generate server.csr: {out}')
-
- err, out = run_process(
- [
- opensslBin,
- 'x509',
- '-days',
- '3650',
- '-req',
- '-sha512',
- '-in',
- 'server.csr',
- '-CAcreateserial',
- '-CA',
- 'ca.crt',
- '-CAkey',
- 'ca.key',
- '-out',
- 'server.crt',
- '-extensions',
- 'v3_req',
- '-extfile',
- 'server.conf',
- ],
- stderr=True,
- debug=args.debug,
- )
- if err != 0:
- raise Exception(f'Unable to generate server.crt: {out}')
-
- shutil.move("server.key", "server.key.pem")
- err, out = run_process(
- [opensslBin, 'pkcs8', '-in', 'server.key.pem', '-topk8', '-nocrypt', '-out', 'server.key'],
- stderr=True,
- debug=args.debug,
- )
- if err != 0:
- raise Exception(f'Unable to generate server.key: {out}')
-
- # client -------------------------------
- err, out = run_process([opensslBin, 'genrsa', '-out', 'client.key', '2048'], stderr=True, debug=args.debug)
- if err != 0:
- raise Exception(f'Unable to generate client.key: {out}')
+ txRxScript = txRxScript if (txRxScript and os.path.isfile(txRxScript)) else '/usr/local/bin/tx-rx-secure.sh'
+ txRxScript = txRxScript if (txRxScript and os.path.isfile(txRxScript)) else '/usr/bin/tx-rx-secure.sh'
+ txRxScript = txRxScript if (txRxScript and os.path.isfile(txRxScript)) else None
+
+ # don't make them go through every thing every time, give them a choice instead
+ authModeChoices = (
+ (
+ 'all',
+ "Configure all authentication-related settings",
+ True,
+ True,
+ ),
+ (
+ 'admin',
+ "Store administrator username/password for local Malcolm access",
+ False,
+ True,
+ ),
+ (
+ 'webcerts',
+ "(Re)generate self-signed certificates for HTTPS access",
+ False,
+ True,
+ ),
+ (
+ 'fwcerts',
+ "(Re)generate self-signed certificates for a remote log forwarder",
+ False,
+ True,
+ ),
+ (
+ 'remoteos',
+ "Configure remote primary or secondary OpenSearch instance",
+ False,
+ False,
+ ),
+ (
+ 'email',
+ "Store username/password for email alert sender account",
+ False,
+ False,
+ ),
+ (
+ 'netbox',
+ "(Re)generate internal passwords for NetBox",
+ False,
+ not os.path.isfile(os.path.join(MalcolmPath, os.path.join('netbox', os.path.join('env', 'netbox.env')))),
+ ),
+ (
+ 'txfwcerts',
+ "Transfer self-signed client certificates to a remote log forwarder",
+ False,
+ False,
+ ),
+ )[: 8 if txRxScript else -1]
- err, out = run_process(
- [
- opensslBin,
- 'req',
- '-sha512',
- '-new',
- '-key',
- 'client.key',
- '-out',
- 'client.csr',
- '-config',
- 'client.conf',
- ],
- stderr=True,
- debug=args.debug,
- )
- if err != 0:
- raise Exception(f'Unable to generate client.csr: {out}')
+ authMode = ChooseOne(
+ 'Configure Authentication',
+ choices=[x[:-1] for x in authModeChoices],
+ )
- err, out = run_process(
- [
- opensslBin,
- 'x509',
- '-days',
- '3650',
- '-req',
- '-sha512',
- '-in',
- 'client.csr',
- '-CAcreateserial',
- '-CA',
- 'ca.crt',
- '-CAkey',
- 'ca.key',
- '-out',
- 'client.crt',
- '-extensions',
- 'v3_req',
- '-extensions',
- 'usr_cert',
- '-extfile',
- 'client.conf',
- ],
- stderr=True,
- debug=args.debug,
- )
- if err != 0:
- raise Exception(f'Unable to generate client.crt: {out}')
- # -----------------------------------------------
+ for authItem in authModeChoices[1:]:
+ if ((authMode == 'all') and YesOrNo(f'{authItem[1]}?', default=authItem[3])) or (
+ (authMode != 'all') and (authMode == authItem[0])
+ ):
+ if authItem[0] == 'admin':
+ # prompt username and password
+ usernamePrevious = None
+ password = None
+ passwordConfirm = None
+ passwordEncrypted = ''
+
+ while True:
+ username = AskForString("Administrator username")
+ if len(username) > 0:
+ break
+
+ while True:
+ password = AskForPassword(f"{username} password: ")
+ passwordConfirm = AskForPassword(f"{username} password (again): ")
+ if password == passwordConfirm:
+ break
+ eprint("Passwords do not match")
+
+ # get previous admin username to remove from htpasswd file if it's changed
+ authEnvFile = os.path.join(MalcolmPath, 'auth.env')
+ if os.path.isfile(authEnvFile):
+ prevAuthInfo = defaultdict(str)
+ with open(authEnvFile, 'r') as f:
+ for line in f:
+ try:
+ k, v = line.rstrip().split("=")
+ prevAuthInfo[k] = v.strip('"')
+ except:
+ pass
+ if len(prevAuthInfo['MALCOLM_USERNAME']) > 0:
+ usernamePrevious = prevAuthInfo['MALCOLM_USERNAME']
- # mkdir filebeat/certs if it doesn't exist
- try:
- os.makedirs(filebeatPath)
- except OSError as exc:
- if (exc.errno == errno.EEXIST) and os.path.isdir(filebeatPath):
- pass
+ # get openssl hash of password
+ err, out = run_process(
+ [opensslBin, 'passwd', '-1', '-stdin'], stdin=password, stderr=False, debug=args.debug
+ )
+ if (err == 0) and (len(out) > 0) and (len(out[0]) > 0):
+ passwordEncrypted = out[0]
else:
- raise
-
- # remove previous files in filebeat/certs
- for oldfile in glob.glob(os.path.join(filebeatPath, "*")):
- os.remove(oldfile)
+ raise Exception('Unable to generate password hash with openssl')
+
+ # write auth.env (used by htadmin and file-upload containers)
+ with open(authEnvFile, 'w') as f:
+ f.write(
+ "# Malcolm Administrator username and encrypted password for nginx reverse proxy (and upload server's SFTP access)\n"
+ )
+ f.write(f'MALCOLM_USERNAME={username}\n')
+ f.write(f'MALCOLM_PASSWORD={b64encode(passwordEncrypted.encode()).decode("ascii")}\n')
+ os.chmod(authEnvFile, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
+
+ # create or update the htpasswd file
+ htpasswdFile = os.path.join(MalcolmPath, os.path.join('nginx', 'htpasswd'))
+ htpasswdCmd = ['htpasswd', '-i', '-B', htpasswdFile, username]
+ if not os.path.isfile(htpasswdFile):
+ htpasswdCmd.insert(1, '-c')
+ err, out = run_process(htpasswdCmd, stdin=password, stderr=True, debug=args.debug)
+ if err != 0:
+ raise Exception(f'Unable to generate htpasswd file: {out}')
+
+ # if the admininstrator username has changed, remove the previous administrator username from htpasswd
+ if (usernamePrevious is not None) and (usernamePrevious != username):
+ htpasswdLines = list()
+ with open(htpasswdFile, 'r') as f:
+ htpasswdLines = f.readlines()
+ with open(htpasswdFile, 'w') as f:
+ for line in htpasswdLines:
+ if not line.startswith(f"{usernamePrevious}:"):
+ f.write(line)
+
+ # configure default LDAP stuff (they'll have to edit it by hand later)
+ ldapConfFile = os.path.join(MalcolmPath, os.path.join('nginx', 'nginx_ldap.conf'))
+ if not os.path.isfile(ldapConfFile):
+ ldapDefaults = defaultdict(str)
+ if os.path.isfile(os.path.join(MalcolmPath, '.ldap_config_defaults')):
+ ldapDefaults = defaultdict(str)
+ with open(os.path.join(MalcolmPath, '.ldap_config_defaults'), 'r') as f:
+ for line in f:
+ try:
+ k, v = line.rstrip().split("=")
+ ldapDefaults[k] = v.strip('"').strip("'")
+ except:
+ pass
+ ldapProto = ldapDefaults.get("LDAP_PROTO", "ldap://")
+ ldapHost = ldapDefaults.get("LDAP_HOST", "ds.example.com")
+ ldapPort = ldapDefaults.get("LDAP_PORT", "3268")
+ ldapType = ldapDefaults.get("LDAP_SERVER_TYPE", "winldap")
+ if ldapType == "openldap":
+ ldapUri = 'DC=example,DC=com?uid?sub?(objectClass=posixAccount)'
+ ldapGroupAttr = "memberUid"
+ ldapGroupAttrIsDN = "off"
+ else:
+ ldapUri = 'DC=example,DC=com?sAMAccountName?sub?(objectClass=person)'
+ ldapGroupAttr = "member"
+ ldapGroupAttrIsDN = "on"
+ with open(ldapConfFile, 'w') as f:
+ f.write('# This is a sample configuration for the ldap_server section of nginx.conf.\n')
+ f.write('# Yours will vary depending on how your Active Directory/LDAP server is configured.\n')
+ f.write(
+ '# See https://github.com/kvspb/nginx-auth-ldap#available-config-parameters for options.\n\n'
+ )
+ f.write('ldap_server ad_server {\n')
+ f.write(f' url "{ldapProto}{ldapHost}:{ldapPort}/{ldapUri}";\n\n')
+ f.write(' binddn "bind_dn";\n')
+ f.write(' binddn_passwd "bind_dn_password";\n\n')
+ f.write(f' group_attribute {ldapGroupAttr};\n')
+ f.write(f' group_attribute_is_dn {ldapGroupAttrIsDN};\n')
+ f.write(' require group "CN=malcolm,OU=groups,DC=example,DC=com";\n')
+ f.write(' require valid_user;\n')
+ f.write(' satisfy all;\n')
+ f.write('}\n\n')
+ f.write('auth_ldap_cache_enabled on;\n')
+ f.write('auth_ldap_cache_expiration_time 10000;\n')
+ f.write('auth_ldap_cache_size 1000;\n')
+ os.chmod(ldapConfFile, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
+
+ # populate htadmin config file
+ with open(os.path.join(MalcolmPath, os.path.join('htadmin', 'config.ini')), 'w') as f:
+ f.write('; HTAdmin config file.\n\n')
+ f.write('[application]\n')
+ f.write('; Change this to customize your title:\n')
+ f.write('app_title = Malcolm User Management\n\n')
+ f.write('; htpasswd file\n')
+ f.write('secure_path = ./config/htpasswd\n')
+ f.write('; metadata file\n')
+ f.write('metadata_path = ./config/metadata\n\n')
+ f.write('; administrator user/password (htpasswd -b -c -B ...)\n')
+ f.write(f'admin_user = {username}\n\n')
+ f.write('; username field quality checks\n')
+ f.write(';\n')
+ f.write('min_username_len = 4\n')
+ f.write('max_username_len = 12\n\n')
+ f.write('; Password field quality checks\n')
+ f.write(';\n')
+ f.write('min_password_len = 6\n')
+ f.write('max_password_len = 20\n\n')
+
+ # touch the metadata file
+ open(os.path.join(MalcolmPath, os.path.join('htadmin', 'metadata')), 'a').close()
+
+ DisplayMessage(
+ f'Additional local accounts can be created at https://localhost:488/ when Malcolm is running',
+ )
- # copy the ca so logstasn and filebeat both have it
- shutil.copy2(os.path.join(logstashPath, "ca.crt"), filebeatPath)
+ # generate HTTPS self-signed certificates
+ elif authItem[0] == 'webcerts':
+ with pushd(os.path.join(MalcolmPath, os.path.join('nginx', 'certs'))):
+ # remove previous files
+ for oldfile in glob.glob("*.pem"):
+ os.remove(oldfile)
+
+ # generate dhparam -------------------------------
+ err, out = run_process(
+ [opensslBin, 'dhparam', '-out', 'dhparam.pem', '2048'], stderr=True, debug=args.debug
+ )
+ if err != 0:
+ raise Exception(f'Unable to generate dhparam.pem file: {out}')
+
+ # generate key/cert -------------------------------
+ err, out = run_process(
+ [
+ opensslBin,
+ 'req',
+ '-subj',
+ '/CN=localhost',
+ '-x509',
+ '-newkey',
+ 'rsa:4096',
+ '-nodes',
+ '-keyout',
+ 'key.pem',
+ '-out',
+ 'cert.pem',
+ '-days',
+ '3650',
+ ],
+ stderr=True,
+ debug=args.debug,
+ )
+ if err != 0:
+ raise Exception(f'Unable to generate key.pem/cert.pem file(s): {out}')
+
+ elif authItem[0] == 'fwcerts':
+ with pushd(logstashPath):
+ # make clean to clean previous files
+ for pat in ['*.srl', '*.csr', '*.key', '*.crt', '*.pem']:
+ for oldfile in glob.glob(pat):
+ os.remove(oldfile)
+
+ # -----------------------------------------------
+ # generate new ca/server/client certificates/keys
+ # ca -------------------------------
+ err, out = run_process(
+ [opensslBin, 'genrsa', '-out', 'ca.key', '2048'], stderr=True, debug=args.debug
+ )
+ if err != 0:
+ raise Exception(f'Unable to generate ca.key: {out}')
+
+ err, out = run_process(
+ [
+ opensslBin,
+ 'req',
+ '-x509',
+ '-new',
+ '-nodes',
+ '-key',
+ 'ca.key',
+ '-sha256',
+ '-days',
+ '9999',
+ '-subj',
+ '/C=US/ST=ID/O=sensor/OU=ca',
+ '-out',
+ 'ca.crt',
+ ],
+ stderr=True,
+ debug=args.debug,
+ )
+ if err != 0:
+ raise Exception(f'Unable to generate ca.crt: {out}')
+
+ # server -------------------------------
+ err, out = run_process(
+ [opensslBin, 'genrsa', '-out', 'server.key', '2048'], stderr=True, debug=args.debug
+ )
+ if err != 0:
+ raise Exception(f'Unable to generate server.key: {out}')
+
+ err, out = run_process(
+ [
+ opensslBin,
+ 'req',
+ '-sha512',
+ '-new',
+ '-key',
+ 'server.key',
+ '-out',
+ 'server.csr',
+ '-config',
+ 'server.conf',
+ ],
+ stderr=True,
+ debug=args.debug,
+ )
+ if err != 0:
+ raise Exception(f'Unable to generate server.csr: {out}')
+
+ err, out = run_process(
+ [
+ opensslBin,
+ 'x509',
+ '-days',
+ '3650',
+ '-req',
+ '-sha512',
+ '-in',
+ 'server.csr',
+ '-CAcreateserial',
+ '-CA',
+ 'ca.crt',
+ '-CAkey',
+ 'ca.key',
+ '-out',
+ 'server.crt',
+ '-extensions',
+ 'v3_req',
+ '-extfile',
+ 'server.conf',
+ ],
+ stderr=True,
+ debug=args.debug,
+ )
+ if err != 0:
+ raise Exception(f'Unable to generate server.crt: {out}')
+
+ shutil.move("server.key", "server.key.pem")
+ err, out = run_process(
+ [opensslBin, 'pkcs8', '-in', 'server.key.pem', '-topk8', '-nocrypt', '-out', 'server.key'],
+ stderr=True,
+ debug=args.debug,
+ )
+ if err != 0:
+ raise Exception(f'Unable to generate server.key: {out}')
+
+ # client -------------------------------
+ err, out = run_process(
+ [opensslBin, 'genrsa', '-out', 'client.key', '2048'], stderr=True, debug=args.debug
+ )
+ if err != 0:
+ raise Exception(f'Unable to generate client.key: {out}')
+
+ err, out = run_process(
+ [
+ opensslBin,
+ 'req',
+ '-sha512',
+ '-new',
+ '-key',
+ 'client.key',
+ '-out',
+ 'client.csr',
+ '-config',
+ 'client.conf',
+ ],
+ stderr=True,
+ debug=args.debug,
+ )
+ if err != 0:
+ raise Exception(f'Unable to generate client.csr: {out}')
+
+ err, out = run_process(
+ [
+ opensslBin,
+ 'x509',
+ '-days',
+ '3650',
+ '-req',
+ '-sha512',
+ '-in',
+ 'client.csr',
+ '-CAcreateserial',
+ '-CA',
+ 'ca.crt',
+ '-CAkey',
+ 'ca.key',
+ '-out',
+ 'client.crt',
+ '-extensions',
+ 'v3_req',
+ '-extensions',
+ 'usr_cert',
+ '-extfile',
+ 'client.conf',
+ ],
+ stderr=True,
+ debug=args.debug,
+ )
+ if err != 0:
+ raise Exception(f'Unable to generate client.crt: {out}')
+ # -----------------------------------------------
+
+ # mkdir filebeat/certs if it doesn't exist
+ try:
+ os.makedirs(filebeatPath)
+ except OSError as exc:
+ if (exc.errno == errno.EEXIST) and os.path.isdir(filebeatPath):
+ pass
+ else:
+ raise
- # move the client certs for filebeat
- for f in ['client.key', 'client.crt']:
- shutil.move(os.path.join(logstashPath, f), filebeatPath)
+ # remove previous files in filebeat/certs
+ for oldfile in glob.glob(os.path.join(filebeatPath, "*")):
+ os.remove(oldfile)
+
+ # copy the ca so logstasn and filebeat both have it
+ shutil.copy2(os.path.join(logstashPath, "ca.crt"), filebeatPath)
+
+ # move the client certs for filebeat
+ for f in ['client.key', 'client.crt']:
+ shutil.move(os.path.join(logstashPath, f), filebeatPath)
+
+ # remove leftovers
+ for pat in ['*.srl', '*.csr', '*.pem']:
+ for oldfile in glob.glob(pat):
+ os.remove(oldfile)
+
+ # create and populate connection parameters file for remote OpenSearch instance(s)
+ elif authItem[0] == 'remoteos':
+ for instance in ['primary', 'secondary']:
+ openSearchCredFileName = os.path.join(MalcolmPath, f'.opensearch.{instance}.curlrc')
+ if YesOrNo(
+ f'Store username/password for {instance} remote OpenSearch instance?',
+ default=False,
+ ):
+ prevCurlContents = ParseCurlFile(openSearchCredFileName)
- # remove leftovers
- for pat in ['*.srl', '*.csr', '*.pem']:
- for oldfile in glob.glob(pat):
- os.remove(oldfile)
+ # prompt host, username and password
+ esUsername = None
+ esPassword = None
+ esPasswordConfirm = None
- # create and populate connection parameters file for remote OpenSearch instance(s)
- for instance in ['primary', 'secondary']:
- openSearchCredFileName = os.path.join(MalcolmPath, f'.opensearch.{instance}.curlrc')
- if YesOrNo(
- f'Store username/password for {instance} remote OpenSearch instance?',
- default=False,
- ):
- prevCurlContents = ParseCurlFile(openSearchCredFileName)
+ while True:
+ esUsername = AskForString(
+ "OpenSearch username",
+ default=prevCurlContents['user'],
+ )
+ if (len(esUsername) > 0) and (':' not in esUsername):
+ break
+ eprint("Username is blank (or contains a colon, which is not allowed)")
+
+ while True:
+ esPassword = AskForPassword(f"{esUsername} password: ")
+ if (
+ (len(esPassword) == 0)
+ and (prevCurlContents['password'] is not None)
+ and YesOrNo(f'Use previously entered password for "{esUsername}"?', default=True)
+ ):
+ esPassword = prevCurlContents['password']
+ esPasswordConfirm = esPassword
+ else:
+ esPasswordConfirm = AskForPassword(f"{esUsername} password (again): ")
+ if (esPassword == esPasswordConfirm) and (len(esPassword) > 0):
+ break
+ eprint("Passwords do not match")
+
+ esSslVerify = YesOrNo(
+ f'Require SSL certificate validation for OpenSearch communication?',
+ default=(not (('k' in prevCurlContents) or ('insecure' in prevCurlContents))),
+ )
- # prompt host, username and password
- esUsername = None
- esPassword = None
- esPasswordConfirm = None
+ with open(openSearchCredFileName, 'w') as f:
+ f.write(f'user: "{EscapeForCurl(esUsername)}:{EscapeForCurl(esPassword)}"\n')
+ if not esSslVerify:
+ f.write('insecure\n')
- while True:
- esUsername = AskForString(
- "OpenSearch username",
- default=prevCurlContents['user'],
- )
- if (len(esUsername) > 0) and (':' not in esUsername):
- break
- eprint("Username is blank (or contains a colon, which is not allowed)")
-
- while True:
- esPassword = AskForPassword(f"{esUsername} password: ")
- if (
- (len(esPassword) == 0)
- and (prevCurlContents['password'] is not None)
- and YesOrNo(f'Use previously entered password for "{esUsername}"?', default=True)
- ):
- esPassword = prevCurlContents['password']
- esPasswordConfirm = esPassword
+ else:
+ try:
+ os.remove(openSearchCredFileName)
+ except:
+ pass
+ open(openSearchCredFileName, 'a').close()
+ os.chmod(openSearchCredFileName, stat.S_IRUSR | stat.S_IWUSR)
+
+ # OpenSearch authenticate sender account credentials
+ # https://opensearch.org/docs/latest/monitoring-plugins/alerting/monitors/#authenticate-sender-account
+ elif authItem[0] == 'email':
+ # prompt username and password
+ emailPassword = None
+ emailPasswordConfirm = None
+ emailSender = AskForString("OpenSearch alerting email sender name")
+ while True:
+ emailUsername = AskForString("Email account username")
+ if len(emailUsername) > 0:
+ break
+
+ while True:
+ emailPassword = AskForPassword(f"{emailUsername} password: ")
+ emailPasswordConfirm = AskForPassword(f"{emailUsername} password (again): ")
+ if emailPassword == emailPasswordConfirm:
+ break
+ eprint("Passwords do not match")
+
+ # create OpenSearch keystore file, don't complain if it already exists, and set the keystore items
+ usernameKey = f'plugins.alerting.destination.email.{emailSender}.username'
+ passwordKey = f'plugins.alerting.destination.email.{emailSender}.password'
+
+ keystore_op('opensearch', True, 'create', stdin='N')
+ keystore_op('opensearch', True, 'remove', usernameKey)
+ keystore_op('opensearch', True, 'add', usernameKey, '--stdin', stdin=emailUsername)
+ keystore_op('opensearch', True, 'remove', passwordKey)
+ keystore_op('opensearch', True, 'add', passwordKey, '--stdin', stdin=emailPassword)
+ success, results = keystore_op('opensearch', True, 'list')
+ results = [
+ x
+ for x in results
+ if x and (not x.upper().startswith('WARNING')) and (not x.upper().startswith('KEYSTORE'))
+ ]
+ if success and (usernameKey in results) and (passwordKey in results):
+ eprint(f"Email alert sender account variables stored: {', '.join(results)}")
else:
- esPasswordConfirm = AskForPassword(f"{esUsername} password (again): ")
- if (esPassword == esPasswordConfirm) and (len(esPassword) > 0):
- break
- eprint("Passwords do not match")
-
- esSslVerify = YesOrNo(
- f'Require SSL certificate validation for OpenSearch communication?',
- default=(not (('k' in prevCurlContents) or ('insecure' in prevCurlContents))),
- )
-
- with open(openSearchCredFileName, 'w') as f:
- f.write(f'user: "{EscapeForCurl(esUsername)}:{EscapeForCurl(esPassword)}"\n')
- if not esSslVerify:
- f.write('insecure\n')
-
- else:
- try:
- os.remove(openSearchCredFileName)
- except:
- pass
- open(openSearchCredFileName, 'a').close()
- os.chmod(openSearchCredFileName, stat.S_IRUSR | stat.S_IWUSR)
-
- # OpenSearch authenticate sender account credentials
- # https://opensearch.org/docs/latest/monitoring-plugins/alerting/monitors/#authenticate-sender-account
- if YesOrNo(
- 'Store username/password for email alert sender account? (see https://opensearch.org/docs/latest/monitoring-plugins/alerting/monitors/#authenticate-sender-account)',
- default=False,
- ):
- # prompt username and password
- emailPassword = None
- emailPasswordConfirm = None
- emailSender = AskForString("OpenSearch alerting email sender name")
- while True:
- emailUsername = AskForString("Email account username")
- if len(emailUsername) > 0:
- break
-
- while True:
- emailPassword = AskForPassword(f"{emailUsername} password: ")
- emailPasswordConfirm = AskForPassword(f"{emailUsername} password (again): ")
- if emailPassword == emailPasswordConfirm:
- break
- eprint("Passwords do not match")
-
- # create OpenSearch keystore file, don't complain if it already exists, and set the keystore items
- usernameKey = f'plugins.alerting.destination.email.{emailSender}.username'
- passwordKey = f'plugins.alerting.destination.email.{emailSender}.password'
-
- keystore_op('opensearch', True, 'create', stdin='N')
- keystore_op('opensearch', True, 'remove', usernameKey)
- keystore_op('opensearch', True, 'add', usernameKey, '--stdin', stdin=emailUsername)
- keystore_op('opensearch', True, 'remove', passwordKey)
- keystore_op('opensearch', True, 'add', passwordKey, '--stdin', stdin=emailPassword)
- success, results = keystore_op('opensearch', True, 'list')
- results = [
- x for x in results if x and (not x.upper().startswith('WARNING')) and (not x.upper().startswith('KEYSTORE'))
- ]
- if success and (usernameKey in results) and (passwordKey in results):
- eprint(f"Email alert sender account variables stored: {', '.join(results)}")
- else:
- eprint("Failed to store email alert sender account variables:\n")
- eprint("\n".join(results))
+ eprint("Failed to store email alert sender account variables:\n")
+ eprint("\n".join(results))
+
+ elif authItem[0] == 'netbox':
+ with pushd(os.path.join(MalcolmPath, os.path.join('netbox', 'env'))):
+ netboxPwAlphabet = string.ascii_letters + string.digits + '_'
+ netboxKeyAlphabet = string.ascii_letters + string.digits + '%@<=>?~^_-'
+ netboxPostGresPassword = ''.join(secrets.choice(netboxPwAlphabet) for i in range(24))
+ netboxRedisPassword = ''.join(secrets.choice(netboxPwAlphabet) for i in range(24))
+ netboxRedisCachePassword = ''.join(secrets.choice(netboxPwAlphabet) for i in range(24))
+ netboxSuPassword = ''.join(secrets.choice(netboxPwAlphabet) for i in range(24))
+ netboxSuToken = ''.join(secrets.choice(netboxPwAlphabet) for i in range(40))
+ netboxSecretKey = ''.join(secrets.choice(netboxKeyAlphabet) for i in range(50))
+
+ with open('postgres.env', 'w') as f:
+ f.write('POSTGRES_DB=netbox\n')
+ f.write(f'POSTGRES_PASSWORD={netboxPostGresPassword}\n')
+ f.write('POSTGRES_USER=netbox\n')
+ os.chmod('postgres.env', stat.S_IRUSR | stat.S_IWUSR)
+
+ with open('redis-cache.env', 'w') as f:
+ f.write(f'REDIS_PASSWORD={netboxRedisCachePassword}\n')
+ os.chmod('redis-cache.env', stat.S_IRUSR | stat.S_IWUSR)
+
+ with open('redis.env', 'w') as f:
+ f.write(f'REDIS_PASSWORD={netboxRedisPassword}\n')
+ os.chmod('redis.env', stat.S_IRUSR | stat.S_IWUSR)
+
+ if (not os.path.isfile('netbox.env')) and (os.path.isfile('netbox.env.example')):
+ shutil.copy2('netbox.env.example', 'netbox.env')
+
+ with fileinput.FileInput('netbox.env', inplace=True, backup=None) as envFile:
+ for line in envFile:
+ line = line.rstrip("\n")
+
+ if line.startswith('DB_PASSWORD'):
+ line = re.sub(
+ r'(DB_PASSWORD\s*=\s*)(\S+)',
+ fr"\g<1>{netboxPostGresPassword}",
+ line,
+ )
+ elif line.startswith('REDIS_CACHE_PASSWORD'):
+ line = re.sub(
+ r'(REDIS_CACHE_PASSWORD\s*=\s*)(\S+)',
+ fr"\g<1>{netboxRedisCachePassword}",
+ line,
+ )
+ elif line.startswith('REDIS_PASSWORD'):
+ line = re.sub(
+ r'(REDIS_PASSWORD\s*=\s*)(\S+)',
+ fr"\g<1>{netboxRedisPassword}",
+ line,
+ )
+ elif line.startswith('SECRET_KEY'):
+ line = re.sub(
+ r'(SECRET_KEY\s*=\s*)(\S+)',
+ fr"\g<1>{netboxSecretKey}",
+ line,
+ )
+ elif line.startswith('SUPERUSER_PASSWORD'):
+ line = re.sub(
+ r'(SUPERUSER_PASSWORD\s*=\s*)(\S+)',
+ fr"\g<1>{netboxSuPassword}",
+ line,
+ )
+ elif line.startswith('SUPERUSER_API_TOKEN'):
+ line = re.sub(
+ r'(SUPERUSER_API_TOKEN\s*=\s*)(\S+)',
+ fr"\g<1>{netboxSuToken}",
+ line,
+ )
- if YesOrNo(
- '(Re)generate internal passwords for NetBox',
- default=not os.path.isfile(
- os.path.join(MalcolmPath, os.path.join('netbox', os.path.join('env', 'netbox.env')))
- ),
- ):
- with pushd(os.path.join(MalcolmPath, os.path.join('netbox', 'env'))):
- netboxPwAlphabet = string.ascii_letters + string.digits + '_'
- netboxKeyAlphabet = string.ascii_letters + string.digits + '%@<=>?~^_-'
- netboxPostGresPassword = ''.join(secrets.choice(netboxPwAlphabet) for i in range(24))
- netboxRedisPassword = ''.join(secrets.choice(netboxPwAlphabet) for i in range(24))
- netboxRedisCachePassword = ''.join(secrets.choice(netboxPwAlphabet) for i in range(24))
- netboxSuPassword = ''.join(secrets.choice(netboxPwAlphabet) for i in range(24))
- netboxSuToken = ''.join(secrets.choice(netboxPwAlphabet) for i in range(40))
- netboxSecretKey = ''.join(secrets.choice(netboxKeyAlphabet) for i in range(50))
-
- with open('postgres.env', 'w') as f:
- f.write('POSTGRES_DB=netbox\n')
- f.write(f'POSTGRES_PASSWORD={netboxPostGresPassword}\n')
- f.write('POSTGRES_USER=netbox\n')
- os.chmod('postgres.env', stat.S_IRUSR | stat.S_IWUSR)
-
- with open('redis-cache.env', 'w') as f:
- f.write(f'REDIS_PASSWORD={netboxRedisCachePassword}\n')
- os.chmod('redis-cache.env', stat.S_IRUSR | stat.S_IWUSR)
-
- with open('redis.env', 'w') as f:
- f.write(f'REDIS_PASSWORD={netboxRedisPassword}\n')
- os.chmod('redis.env', stat.S_IRUSR | stat.S_IWUSR)
-
- if (not os.path.isfile('netbox.env')) and (os.path.isfile('netbox.env.example')):
- shutil.copy2('netbox.env.example', 'netbox.env')
-
- with fileinput.FileInput('netbox.env', inplace=True, backup=None) as envFile:
- for line in envFile:
- line = line.rstrip("\n")
-
- if line.startswith('DB_PASSWORD'):
- line = re.sub(
- r'(DB_PASSWORD\s*=\s*)(\S+)',
- fr"\g<1>{netboxPostGresPassword}",
- line,
- )
- elif line.startswith('REDIS_CACHE_PASSWORD'):
- line = re.sub(
- r'(REDIS_CACHE_PASSWORD\s*=\s*)(\S+)',
- fr"\g<1>{netboxRedisCachePassword}",
- line,
- )
- elif line.startswith('REDIS_PASSWORD'):
- line = re.sub(
- r'(REDIS_PASSWORD\s*=\s*)(\S+)',
- fr"\g<1>{netboxRedisPassword}",
- line,
- )
- elif line.startswith('SECRET_KEY'):
- line = re.sub(
- r'(SECRET_KEY\s*=\s*)(\S+)',
- fr"\g<1>{netboxSecretKey}",
- line,
- )
- elif line.startswith('SUPERUSER_PASSWORD'):
- line = re.sub(
- r'(SUPERUSER_PASSWORD\s*=\s*)(\S+)',
- fr"\g<1>{netboxSuPassword}",
- line,
- )
- elif line.startswith('SUPERUSER_API_TOKEN'):
- line = re.sub(
- r'(SUPERUSER_API_TOKEN\s*=\s*)(\S+)',
- fr"\g<1>{netboxSuToken}",
- line,
- )
+ print(line)
- print(line)
+ os.chmod('netbox.env', stat.S_IRUSR | stat.S_IWUSR)
- os.chmod('netbox.env', stat.S_IRUSR | stat.S_IWUSR)
+ elif authItem[0] == 'txfwcerts':
+ DisplayMessage(
+ f'Run configure-capture on the remote log forwarder, select "Configure Forwarding," then "Receive client SSL files..."',
+ )
+ with pushd(filebeatPath):
+ with Popen(
+ [txRxScript, '-t', "ca.crt", "client.crt", "client.key"],
+ stdout=PIPE,
+ stderr=STDOUT,
+ bufsize=0 if MainDialog else -1,
+ ) as p:
+ if MainDialog:
+ DisplayProgramBox(
+ fileDescriptor=p.stdout.fileno(),
+ text='ssl-client-transmit',
+ clearScreen=True,
+ )
+ else:
+ while True:
+ output = p.stdout.readline()
+ if (len(output) == 0) and (p.poll() is not None):
+ break
+ if output:
+ print(output.decode('utf-8').rstrip())
+ else:
+ time.sleep(0.5)
+
+ p.poll()
###################################################################################################
diff --git a/scripts/demo/amazon_linux_2_malcolm_demo_setup.sh b/scripts/demo/amazon_linux_2_malcolm_demo_setup.sh
index 5928858ea..e7172aa59 100755
--- a/scripts/demo/amazon_linux_2_malcolm_demo_setup.sh
+++ b/scripts/demo/amazon_linux_2_malcolm_demo_setup.sh
@@ -541,7 +541,7 @@ function InstallMalcolm {
CONFIRMATION=$(_GetConfirmation "Clone and setup Malcolm [Y/n]?" Y)
if [[ $CONFIRMATION =~ ^[Yy] ]]; then
- if _GitClone https://github.com/cisagov/Malcolm "$MALCOLM_PATH"; then
+ if _GitClone https://github.com/idaholab/Malcolm "$MALCOLM_PATH"; then
pushd "$MALCOLM_PATH" >/dev/null 2>&1
python3 ./scripts/install.py -c -d
CONFIG_PAIRS=(
diff --git a/scripts/github_image_helper.sh b/scripts/github_image_helper.sh
index dd4aa2ec4..514abfc73 100755
--- a/scripts/github_image_helper.sh
+++ b/scripts/github_image_helper.sh
@@ -49,7 +49,7 @@ function _gitreponame() {
# get the current git working copy's Malcolm version (grepped from docker-compose.yml, e.g., 5.0.3)
function _malcolmversion() {
- grep -P "^\s+image:\s*malcolm" "$(_gittoplevel)"/docker-compose.yml | awk '{print $2}' | cut -d':' -f2 | uniq -c | sort -nr | awk '{print $2}' | head -n 1
+ grep -P "^\s+image:.*/malcolm" "$(_gittoplevel)"/docker-compose.yml | awk '{print $2}' | cut -d':' -f2 | uniq -c | sort -nr | awk '{print $2}' | head -n 1
}
################################################################################
@@ -63,8 +63,8 @@ function _cleanup {
}
################################################################################
-# pull ghcr.io/$OWNER/$IMG:$BRANCH for each image in docker-compose.yml and re-tag as $IMG:$VERSION
-# e.g., pull ghcr.io/johndoe/malcolmnetsec/arkime:main and tag as malcolmnetsec/arkime:5.0.3
+# pull ghcr.io/$OWNER/$IMG:$BRANCH for each image in docker-compose.yml and re-tag as ghcr.io/idaholab/$IMG:$VERSION
+# e.g., pull ghcr.io/johndoe/malcolm/arkime:main and tag as ghcr.io/idaholab/malcolm/arkime:5.0.3
function _PullAndTagGithubWorkflowBuild() {
BRANCH="$(_gitbranch)"
VERSION="$(_malcolmversion)"
@@ -72,7 +72,7 @@ function _PullAndTagGithubWorkflowBuild() {
IMAGE=$1
docker pull ghcr.io/"$OWNER"/"$IMAGE":"$BRANCH" && \
- docker tag ghcr.io/"$OWNER"/"$IMAGE":"$BRANCH" "$IMAGE":"$VERSION"
+ docker tag ghcr.io/"$OWNER"/"$IMAGE":"$BRANCH" ghcr.io/idaholab/"$IMAGE":"$VERSION"
}
function PullAndTagGithubWorkflowImages() {
@@ -80,7 +80,7 @@ function PullAndTagGithubWorkflowImages() {
VERSION="$(_malcolmversion)"
OWNER="$(_gitowner)"
echo "Pulling images from ghcr.io/$OWNER ($BRANCH) and tagging as $VERSION ..."
- for IMG in $(grep image: "$(_gittoplevel)"/docker-compose.yml | _cols 2 | cut -d: -f1 | sort -u); do
+ for IMG in $(grep image: "$(_gittoplevel)"/docker-compose.yml | _cols 2 | cut -d: -f1 | sort -u | sed "s/.*\/\(malcolm\)/\1/"); do
_PullAndTagGithubWorkflowBuild "$IMG"
done
echo "done"
@@ -91,7 +91,7 @@ function PullAndTagGithubWorkflowISOImages() {
VERSION="$(_malcolmversion)"
OWNER="$(_gitowner)"
echo "Pulling ISO wrapper images from ghcr.io/$OWNER ($BRANCH) and tagging as $VERSION ..."
- for IMG in malcolmnetsec/{malcolm,hedgehog}; do
+ for IMG in malcolm/{malcolm,hedgehog}; do
_PullAndTagGithubWorkflowBuild "$IMG"
done
echo "done"
@@ -99,7 +99,7 @@ function PullAndTagGithubWorkflowISOImages() {
################################################################################
# extract the ISO wrapped in the ghcr.io docker image to the current directory
-# e.g., extract live.iso from ghcr.io/johndoe/malcolmnetsec/hedgehog:development
+# e.g., extract live.iso from ghcr.io/johndoe/malcolm/hedgehog:development
# and save locally as hedgehog-5.0.3.iso
function _ExtractISOFromGithubWorkflowBuild() {
BRANCH="$(_gitbranch)"
@@ -111,7 +111,7 @@ function _ExtractISOFromGithubWorkflowBuild() {
ISO_NAME="${3:-"$TOOL-$VERSION"}"
docker run --rm -d --name "$TOOL"-iso-srv -p 127.0.0.1:8000:8000/tcp -e QEMU_START=false -e NOVNC_START=false \
- ghcr.io/"$OWNER"/malcolmnetsec/"$TOOL":"$BRANCH" && \
+ ghcr.io/"$OWNER"/malcolm/"$TOOL":"$BRANCH" && \
sleep 10 && \
curl -sSL -o "$DEST_DIR"/"$ISO_NAME".iso http://localhost:8000/live.iso && \
curl -sSL -o "$DEST_DIR"/"$ISO_NAME"-build.log http://localhost:8000/"$TOOL"-"$VERSION"-build.log
diff --git a/scripts/install.py b/scripts/install.py
index 3d2c878f9..b43fad343 100755
--- a/scripts/install.py
+++ b/scripts/install.py
@@ -47,6 +47,7 @@
requests_imported = None
yaml_imported = None
+
###################################################################################################
# get interactive user response to Y/N question
def InstallerYesOrNo(
@@ -100,7 +101,6 @@ def InstallerChooseOne(
defaultBehavior=UserInputDefaultsBehavior.DefaultsPrompt | UserInputDefaultsBehavior.DefaultsAccept,
uiMode=UserInterfaceMode.InteractionInput | UserInterfaceMode.InteractionDialog,
):
-
global args
defBehavior = defaultBehavior
if args.acceptDefaultsNonInteractive and not forceInteraction:
@@ -123,7 +123,6 @@ def InstallerChooseMultiple(
defaultBehavior=UserInputDefaultsBehavior.DefaultsPrompt | UserInputDefaultsBehavior.DefaultsAccept,
uiMode=UserInterfaceMode.InteractionInput | UserInterfaceMode.InteractionDialog,
):
-
global args
defBehavior = defaultBehavior
if args.acceptDefaultsNonInteractive and not forceInteraction:
@@ -157,13 +156,12 @@ def InstallerDisplayMessage(
)
-def TrueOrFalseQuote(expression):
- return "'{}'".format('true' if expression else 'false')
+def TrueOrFalseQuote(expression, falseIsBlank=False):
+ return "'{}'".format('true' if expression else '' if falseIsBlank else 'false')
###################################################################################################
class Installer(object):
-
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def __init__(self, debug=False, configOnly=False):
self.debug = debug
@@ -191,7 +189,6 @@ def __del__(self):
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def run_process(self, command, stdout=True, stderr=True, stdin=None, privileged=False, retry=0, retrySleepSec=5):
-
# if privileged, put the sudo command at the beginning of the command
if privileged and (len(self.sudoCmd) > 0):
command = self.sudoCmd + command
@@ -270,7 +267,6 @@ def install_malcolm_files(self, malcolm_install_file):
f'Extract Malcolm runtime files from {malcolm_install_file}', default=True, forceInteraction=True
)
):
-
# determine and create destination path for installation
while True:
defaultPath = os.path.join(origPath, 'malcolm')
@@ -814,11 +810,22 @@ def tweak_malcolm_runtime(
'Should Malcolm enrich network traffic using NetBox?',
default=netboxEnabled,
)
+ netboxSiteName = (
+ InstallerAskForString(
+ 'Specify default NetBox site name',
+ default='',
+ )
+ if netboxEnabled
+ else ''
+ )
+ if len(netboxSiteName) == 0:
+ netboxSiteName = 'Malcolm'
# input packet capture parameters
pcapNetSniff = False
pcapTcpDump = False
liveZeek = False
+ zeekICSBestGuess = False
liveSuricata = False
pcapIface = 'lo'
tweakIface = False
@@ -838,6 +845,10 @@ def tweak_malcolm_runtime(
liveSuricata = InstallerYesOrNo('Should Malcolm analyze live network traffic with Suricata?', default=False)
liveZeek = InstallerYesOrNo('Should Malcolm analyze live network traffic with Zeek?', default=False)
+ zeekICSBestGuess = (autoZeek or liveZeek) and InstallerYesOrNo(
+ 'Should Malcolm use "best guess" to identify potential OT/ICS traffic with Zeek?', default=False
+ )
+
if pcapNetSniff or pcapTcpDump or liveZeek or liveSuricata:
pcapIface = ''
while len(pcapIface) <= 0:
@@ -931,6 +942,14 @@ def tweak_malcolm_runtime(
# zeek file extraction mode
line = re.sub(r'(ZEEK_EXTRACTOR_MODE\s*:\s*)(\S+)', fr"\g<1>'{fileCarveMode}'", line)
+ elif 'ZEEK_DISABLE_BEST_GUESS_ICS' in line:
+ # disable/enable ICS best guess
+ line = re.sub(
+ r'(ZEEK_DISABLE_BEST_GUESS_ICS\s*:\s*)(\S+)',
+ fr"\g<1>{TrueOrFalseQuote(not zeekICSBestGuess, falseIsBlank=True)}",
+ line,
+ )
+
elif 'EXTRACTED_FILE_PRESERVATION' in line:
# zeek file preservation mode
line = re.sub(
@@ -1114,6 +1133,14 @@ def tweak_malcolm_runtime(
line,
)
+ elif 'NETBOX_DEFAULT_SITE' in line:
+ # NetBox default site name
+ line = re.sub(
+ r'(NETBOX_DEFAULT_SITE\s*:\s*)(\S+)',
+ fr"\g<1>'{netboxSiteName}'",
+ line,
+ )
+
elif 'pipeline.workers' in line:
# logstash pipeline workers
line = re.sub(r'(pipeline\.workers\s*:\s*)(\S+)', fr"\g<1>{lsWorkers}", line)
@@ -1557,6 +1584,11 @@ def tweak_malcolm_runtime(
# restore ownership
os.chown(composeFile, origUid, origGuid)
+ try:
+ Touch(MalcolmCfgRunOnceFile)
+ except Exception as e:
+ pass
+
# if the Malcolm dir is owned by root, see if they want to reassign ownership to a non-root user
if (
((self.platform == PLATFORM_LINUX) or (self.platform == PLATFORM_MAC))
@@ -1584,7 +1616,6 @@ def tweak_malcolm_runtime(
###################################################################################################
class LinuxInstaller(Installer):
-
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def __init__(self, debug=False, configOnly=False):
super().__init__(debug, configOnly)
@@ -1737,9 +1768,7 @@ def install_docker(self):
result = True
elif InstallerYesOrNo('"docker info" failed, attempt to install Docker?', default=True):
-
if InstallerYesOrNo('Attempt to install Docker using official repositories?', default=True):
-
# install required packages for repo-based install
if self.distro == PLATFORM_LINUX_UBUNTU:
requiredRepoPackages = [
@@ -1771,7 +1800,6 @@ def install_docker(self):
# install docker via repo if possible
dockerPackages = []
if ((self.distro == PLATFORM_LINUX_UBUNTU) or (self.distro == PLATFORM_LINUX_DEBIAN)) and self.codename:
-
# for debian/ubuntu, add docker GPG key and check its fingerprint
if self.debug:
eprint("Requesting docker GPG key for package signing")
@@ -1817,7 +1845,6 @@ def install_docker(self):
dockerPackages.extend(['docker-ce', 'docker-ce-cli', 'docker-compose-plugin', 'containerd.io'])
elif self.distro == PLATFORM_LINUX_FEDORA:
-
# add docker fedora repository
if self.debug:
eprint("Adding docker repository")
@@ -1944,7 +1971,6 @@ def install_docker_compose(self):
if (err != 0) and InstallerYesOrNo(
'"docker-compose version" failed, attempt to install docker-compose?', default=True
):
-
if InstallerYesOrNo('Install docker-compose directly from docker github?', default=True):
# download docker-compose from github and put it in /usr/local/bin
@@ -2010,7 +2036,6 @@ def install_docker_compose(self):
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def tweak_system_files(self):
-
# make some system configuration changes with permission
ConfigLines = namedtuple("ConfigLines", ["distros", "filename", "prefix", "description", "lines"], rename=False)
@@ -2123,14 +2148,12 @@ def tweak_system_files(self):
]
for config in configLinesToAdd:
-
if ((len(config.distros) == 0) or (self.codename in config.distros)) and (
os.path.isfile(config.filename)
or InstallerYesOrNo(
f'\n{config.description}\n{config.filename} does not exist, create it?', default=True
)
):
-
confFileLines = (
[line.rstrip('\n') for line in open(config.filename)] if os.path.isfile(config.filename) else []
)
@@ -2146,7 +2169,6 @@ def tweak_system_files(self):
)
)
):
-
echoNewLineJoin = '\\n'
err, out = self.run_process(
[
@@ -2160,7 +2182,6 @@ def tweak_system_files(self):
###################################################################################################
class MacInstaller(Installer):
-
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def __init__(self, debug=False, configOnly=False):
super().__init__(debug, configOnly)
@@ -2254,7 +2275,6 @@ def install_docker(self):
result = True
elif InstallerYesOrNo('"docker info" failed, attempt to install Docker?', default=True):
-
if self.useBrew:
# install docker via brew cask (requires user interaction)
dockerPackages = [MAC_BREW_DOCKER_PACKAGE, "docker-compose"]
@@ -2305,7 +2325,6 @@ def install_docker(self):
and os.path.isfile(settingsFile)
and InstallerYesOrNo(f'Configure Docker resource usage in {settingsFile}?', default=True)
):
-
# adjust CPU and RAM based on system resources
if self.totalCores >= 16:
newCpus = 12
diff --git a/scripts/malcolm_common.py b/scripts/malcolm_common.py
index fe5e990e0..ad956ee39 100644
--- a/scripts/malcolm_common.py
+++ b/scripts/malcolm_common.py
@@ -43,6 +43,7 @@
ScriptPath = os.path.dirname(os.path.realpath(__file__))
MalcolmPath = os.path.abspath(os.path.join(ScriptPath, os.pardir))
MalcolmTmpPath = os.path.join(MalcolmPath, '.tmp')
+MalcolmCfgRunOnceFile = os.path.join(MalcolmPath, '.configured')
###################################################################################################
PLATFORM_WINDOWS = "Windows"
@@ -278,7 +279,6 @@ def YesOrNo(
uiMode=UserInterfaceMode.InteractionDialog | UserInterfaceMode.InteractionInput,
clearScreen=False,
):
-
if (default is not None) and (
(defaultBehavior & UserInputDefaultsBehavior.DefaultsAccept)
and (defaultBehavior & UserInputDefaultsBehavior.DefaultsNonInteractive)
@@ -296,7 +296,6 @@ def YesOrNo(
reply = 'n' if (reply == Dialog.OK) else 'y'
elif uiMode & UserInterfaceMode.InteractionInput:
-
if (default is not None) and defaultBehavior & UserInputDefaultsBehavior.DefaultsPrompt:
if str2bool(default):
questionStr = f"\n{question} (Y/n): "
@@ -346,7 +345,6 @@ def AskForString(
uiMode=UserInterfaceMode.InteractionDialog | UserInterfaceMode.InteractionInput,
clearScreen=False,
):
-
if (default is not None) and (
(defaultBehavior & UserInputDefaultsBehavior.DefaultsAccept)
and (defaultBehavior & UserInputDefaultsBehavior.DefaultsNonInteractive)
@@ -390,7 +388,6 @@ def AskForPassword(
uiMode=UserInterfaceMode.InteractionDialog | UserInterfaceMode.InteractionInput,
clearScreen=False,
):
-
if (uiMode & UserInterfaceMode.InteractionDialog) and (MainDialog is not None):
code, reply = MainDialog.passwordbox(prompt, insecure=True)
if (code == Dialog.CANCEL) or (code == Dialog.ESC):
@@ -421,7 +418,6 @@ def ChooseOne(
uiMode=UserInterfaceMode.InteractionDialog | UserInterfaceMode.InteractionInput,
clearScreen=False,
):
-
validChoices = [x for x in choices if len(x) == 3 and isinstance(x[0], str) and isinstance(x[2], bool)]
defaulted = next(iter([x for x in validChoices if x[2] is True]), None)
@@ -483,7 +479,6 @@ def ChooseMultiple(
uiMode=UserInterfaceMode.InteractionDialog | UserInterfaceMode.InteractionInput,
clearScreen=False,
):
-
validChoices = [x for x in choices if len(x) == 3 and isinstance(x[0], str) and isinstance(x[2], bool)]
defaulted = [x[0] for x in validChoices if x[2] is True]
@@ -576,6 +571,37 @@ def DisplayMessage(
return reply
+###################################################################################################
+# display streaming content via Dialog.programbox
+def DisplayProgramBox(
+ filePath=None,
+ fileFlags=0,
+ fileDescriptor=None,
+ text=None,
+ clearScreen=False,
+):
+ reply = False
+
+ if MainDialog is not None:
+ code = MainDialog.programbox(
+ file_path=filePath,
+ file_flags=fileFlags,
+ fd=fileDescriptor,
+ text=text,
+ width=78,
+ height=20,
+ )
+ if (code == Dialog.CANCEL) or (code == Dialog.ESC):
+ raise RuntimeError("Operation cancelled")
+ else:
+ reply = True
+
+ if clearScreen is True:
+ ClearScreen()
+
+ return reply
+
+
###################################################################################################
# convenient boolean argument parsing
def str2bool(v):
@@ -653,7 +679,6 @@ def DeepGet(d, keys, default=None):
###################################################################################################
# run command with arguments and return its exit code, stdout, and stderr
def check_output_input(*popenargs, **kwargs):
-
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden')
@@ -687,7 +712,6 @@ def check_output_input(*popenargs, **kwargs):
def run_process(
command, stdout=True, stderr=True, stdin=None, retry=0, retrySleepSec=5, cwd=None, env=None, debug=False
):
-
retcode = -1
output = []
@@ -728,7 +752,6 @@ def DoDynamicImport(importName, pipPkgName, interactive=False, debug=False):
# see if we've already imported it
if not DynImports[importName]:
-
# if not, attempt the import
try:
tmpImport = importlib.import_module(importName)
@@ -846,3 +869,10 @@ def RemoveEmptyFolders(path, removeRoot=True):
os.rmdir(path)
except Exception:
pass
+
+
+###################################################################################################
+# open a file and close it, updating its access time
+def Touch(filename):
+ open(filename, 'a').close()
+ os.utime(filename, None)
diff --git a/scripts/third-party-logs/fluent-bit-setup.ps1 b/scripts/third-party-logs/fluent-bit-setup.ps1
index 0b8bee2dd..b9cf32ee2 100644
--- a/scripts/third-party-logs/fluent-bit-setup.ps1
+++ b/scripts/third-party-logs/fluent-bit-setup.ps1
@@ -9,7 +9,7 @@
###############################################################################
$fluent_bit_version = '2.0'
-$fluent_bit_full_version = '2.0.9'
+$fluent_bit_full_version = '2.0.10'
###############################################################################
# select an item from a menu provided in an array
diff --git a/sensor-iso/Dockerfile b/sensor-iso/Dockerfile
index 5c564c2d2..c96bc5753 100644
--- a/sensor-iso/Dockerfile
+++ b/sensor-iso/Dockerfile
@@ -8,7 +8,7 @@ LABEL org.opencontainers.image.url='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.documentation='https://github.com/cisagov/Malcolm/blob/main/sensor-iso/README.md'
LABEL org.opencontainers.image.source='https://github.com/cisagov/Malcolm'
LABEL org.opencontainers.image.vendor='Cybersecurity and Infrastructure Security Agency'
-LABEL org.opencontainers.image.title='malcolmnetsec/hedgehog'
+LABEL org.opencontainers.image.title='ghcr.io/cisagov/malcolm/hedgehog'
LABEL org.opencontainers.image.description='Hedgehog Linux network sensor live image in qemu'
ARG QEMU_CPU=4
diff --git a/sensor-iso/config/hooks/normal/0910-sensor-build.hook.chroot b/sensor-iso/config/hooks/normal/0910-sensor-build.hook.chroot
index af459e612..2c2e182d4 100755
--- a/sensor-iso/config/hooks/normal/0910-sensor-build.hook.chroot
+++ b/sensor-iso/config/hooks/normal/0910-sensor-build.hook.chroot
@@ -13,8 +13,8 @@ GITHUB_API_CURL_ARGS+=( -H )
GITHUB_API_CURL_ARGS+=( "Accept: application/vnd.github.v3+json" )
[[ -n "$GITHUB_TOKEN" ]] && GITHUB_API_CURL_ARGS+=( -H ) && GITHUB_API_CURL_ARGS+=( "Authorization: token $GITHUB_TOKEN" )
-ZEEK_VER=5.0.7-0
-ZEEK_LTS=true
+ZEEK_VER=5.2.0-0
+ZEEK_LTS=
ZEEK_DIR="/opt/zeek"
export PATH="${ZEEK_DIR}"/bin:$PATH
diff --git a/sensor-iso/config/hooks/normal/0991-security-performance.hook.chroot b/sensor-iso/config/hooks/normal/0991-security-performance.hook.chroot
index e8b6bcbd0..e3a7712f3 100755
--- a/sensor-iso/config/hooks/normal/0991-security-performance.hook.chroot
+++ b/sensor-iso/config/hooks/normal/0991-security-performance.hook.chroot
@@ -10,6 +10,7 @@ sed -i "s/LOGLEVEL=.*/LOGLEVEL=off/" /etc/ufw/ufw.conf
UFW_ALLOW_RULES=(
ntp
ssh
+ 9009:9013/tcp
)
for i in ${UFW_ALLOW_RULES[@]}; do
ufw allow "$i"
diff --git a/sensor-iso/config/includes.chroot/etc/skel/.config/xfce4/xfconf/xfce-perchannel-xml/xfce4-session.xml b/sensor-iso/config/includes.chroot/etc/skel/.config/xfce4/xfconf/xfce-perchannel-xml/xfce4-session.xml
index d70f54885..5175d0bbe 100644
--- a/sensor-iso/config/includes.chroot/etc/skel/.config/xfce4/xfconf/xfce-perchannel-xml/xfce4-session.xml
+++ b/sensor-iso/config/includes.chroot/etc/skel/.config/xfce4/xfconf/xfce-perchannel-xml/xfce4-session.xml
@@ -15,6 +15,7 @@
+
diff --git a/shared/bin/agg-init.sh b/shared/bin/agg-init.sh
index e0f6a7608..e3127b3c9 100755
--- a/shared/bin/agg-init.sh
+++ b/shared/bin/agg-init.sh
@@ -24,7 +24,7 @@ if [[ -r "$SCRIPT_PATH"/common-init.sh ]]; then
if [[ -r "$MAIN_USER_HOME"/Malcolm/scripts/install.py ]]; then
/usr/bin/env python3 "$MAIN_USER_HOME"/Malcolm/scripts/install.py --configure --defaults --restart-malcolm
fi
- rm -f "$MAIN_USER_HOME"/Malcolm/firstrun
+ rm -f "$MAIN_USER_HOME"/Malcolm/firstrun "$MAIN_USER_HOME"/Malcolm/.configured
fi
# make sure read permission is set correctly for the nginx worker processes
diff --git a/shared/bin/configure-capture.py b/shared/bin/configure-capture.py
index 2e9476fe1..6e3ad7b51 100755
--- a/shared/bin/configure-capture.py
+++ b/shared/bin/configure-capture.py
@@ -13,8 +13,10 @@
import fileinput
from collections import defaultdict
from dialog import Dialog
+
from zeek_carve_utils import *
from sensorcommon import *
+from subprocess import PIPE, STDOUT, Popen, CalledProcessError
class Constants:
@@ -56,6 +58,7 @@ class Constants:
FILEBEAT = 'filebeat'
MISCBEAT = 'miscbeat'
ARKIMECAP = 'arkime-capture'
+ TX_RX_SECURE = 'ssl-client-receive'
BEAT_DIR = {
FILEBEAT: f'/opt/sensor/sensor_ctl/{FILEBEAT}',
@@ -121,6 +124,7 @@ class Constants:
MSG_CONFIG_ARKIME_COMPRESSION_LEVEL = 'Specify Arkime PCAP {} compression level'
MSG_CONFIG_FILEBEAT = (f'{FILEBEAT}', f'Configure Zeek log forwarding via {FILEBEAT}')
MSG_CONFIG_MISCBEAT = (f'{MISCBEAT}', f"Configure miscellaneous sensor metrics forwarding via {FILEBEAT}")
+ MSG_CONFIG_TXRX = (f'{TX_RX_SECURE}', f'Receive client SSL files for {FILEBEAT} from Malcolm')
MSG_OVERWRITE_CONFIG = '{} is already configured, overwrite current settings?'
MSG_IDENTIFY_NICS = 'Do you need help identifying network interfaces?'
MSG_BACKGROUND_TITLE = 'Sensor Configuration'
@@ -170,6 +174,7 @@ class Constants:
d = Dialog(dialog='dialog', autowidgetsize=True)
d.set_background_title(Constants.MSG_BACKGROUND_TITLE)
+
###################################################################################################
def mime_to_extension_mappings(mapfile):
# get all mime-to-extension mappings from our mapping zeek file into a dictionary
@@ -201,7 +206,6 @@ def input_opensearch_connection_info(
default_username=None,
default_password=None,
):
-
return_dict = defaultdict(str)
# OpenSearch configuration
@@ -413,7 +417,6 @@ def main():
no_label="Cancel",
)
if code == Dialog.OK:
-
# modify specified values in-place in SENSOR_CAPTURE_CONFIG file
autostart_re = re.compile(r"(\bAUTOSTART_\w+)\s*=\s*.+?$")
with fileinput.FileInput(Constants.SENSOR_CAPTURE_CONFIG, inplace=True, backup='.bak') as file:
@@ -602,7 +605,6 @@ def main():
if zeek_carve_mode.startswith(Constants.ZEEK_FILE_CARVING_CUSTOM) or zeek_carve_mode.startswith(
Constants.ZEEK_FILE_CARVING_MAPPED_MINUS_TEXT
):
-
# get all known mime-to-extension mappings into a dictionary
all_mime_maps = mime_to_extension_mappings(Constants.ZEEK_FILE_CARVING_DEFAULTS)
@@ -660,7 +662,6 @@ def main():
# what to do with carved files
if zeek_carve_mode != Constants.ZEEK_FILE_CARVING_NONE:
-
# select engines for file scanning
scanner_choices = []
for k, v in sorted(capture_config_dict.items()):
@@ -735,7 +736,6 @@ def main():
no_label="Cancel",
)
if code == Dialog.OK:
-
# modify specified values in-place in SENSOR_CAPTURE_CONFIG file
with fileinput.FileInput(Constants.SENSOR_CAPTURE_CONFIG, inplace=True, backup='.bak') as file:
for line in file:
@@ -794,13 +794,22 @@ def main():
elif mode == Constants.MSG_CONFIG_MODE_FORWARD:
##### sensor forwarding (beats) configuration #########################################################################
+ # only display MSG_CONFIG_TXRX if we have appropriate executable and script
+ txRxScript = '/opt/sensor/sensor_ctl/tx-rx-secure.sh'
+ txRxScript = (
+ txRxScript if (txRxScript and os.path.isfile(txRxScript)) else '/usr/local/bin/tx-rx-secure.sh'
+ )
+ txRxScript = txRxScript if (txRxScript and os.path.isfile(txRxScript)) else '/usr/bin/tx-rx-secure.sh'
+ txRxScript = txRxScript if (txRxScript and os.path.isfile(txRxScript)) else None
+
code, fwd_mode = d.menu(
Constants.MSG_CONFIG_MODE,
choices=[
Constants.MSG_CONFIG_ARKIME,
Constants.MSG_CONFIG_FILEBEAT,
Constants.MSG_CONFIG_MISCBEAT,
- ],
+ Constants.MSG_CONFIG_TXRX,
+ ][: 4 if txRxScript else -1],
)
if code != Dialog.OK:
raise CancelledError
@@ -1183,6 +1192,62 @@ def main():
# keystore list failed
raise Exception(Constants.MSG_ERROR_KEYSTORE.format(fwd_mode, "\n".join(add_results)))
+ elif (fwd_mode == Constants.TX_RX_SECURE) and txRxScript:
+ # use tx-rx-secure.sh (via croc) to get certs from Malcolm
+ code = d.msgbox(text='Run auth_setup on Malcolm "Transfer self-signed client certificates..."')
+
+ tx_ip = None
+ rx_token = None
+
+ while True:
+ code, values = d.form(
+ Constants.MSG_CONFIG_TXRX[1],
+ [
+ ('Malcolm Server IP', 1, 1, "", 1, 25, 40, 255),
+ ('Single-use Code Phrase', 2, 1, "", 2, 25, 40, 255),
+ ],
+ )
+ values = [x.strip() for x in values]
+
+ if (code == Dialog.CANCEL) or (code == Dialog.ESC):
+ raise CancelledError
+
+ elif (len(values[0]) >= 3) and (len(values[1]) >= 16):
+ tx_ip = values[0]
+ rx_token = values[1]
+ break
+
+ for oldFile in ('ca.crt', 'client.crt', 'client.key'):
+ try:
+ os.unlink(os.path.join(Constants.BEAT_LS_CERT_DIR_DEFAULT, oldFile))
+ except Exception:
+ pass
+
+ with Popen(
+ [
+ txRxScript,
+ '-s',
+ tx_ip,
+ '-r',
+ rx_token,
+ '-o',
+ Constants.BEAT_LS_CERT_DIR_DEFAULT,
+ ],
+ stdout=PIPE,
+ stderr=STDOUT,
+ bufsize=0,
+ ) as p:
+ d.programbox(
+ fd=p.stdout.fileno(),
+ text=os.path.basename(txRxScript),
+ width=78,
+ height=20,
+ )
+ if (code == Dialog.CANCEL) or (code == Dialog.ESC):
+ raise RuntimeError("Operation cancelled")
+
+ p.poll()
+
else:
# we're here without a valid forwarding type selection?!?
raise Exception(Constants.MSG_MESSAGE_ERROR.format(Constants.MSG_INVALID_FORWARDING_TYPE))
diff --git a/shared/bin/docker-uid-gid-setup.sh b/shared/bin/docker-uid-gid-setup.sh
index d111ebfd0..be3cfe384 100755
--- a/shared/bin/docker-uid-gid-setup.sh
+++ b/shared/bin/docker-uid-gid-setup.sh
@@ -10,19 +10,65 @@ unset ENTRYPOINT_ARGS
usermod --non-unique --uid ${PUID:-${DEFAULT_UID}} ${PUSER}
groupmod --non-unique --gid ${PGID:-${DEFAULT_GID}} ${PGROUP}
+# Any directory named with the value of CONFIG_MAP_DIR will have its contents rsync'ed into
+# the parent directory as the container starts up. This is mostly for convenience for
+# Kubernetes configmap objects, which, because the directory into which they are
+# copied is made read-only, doesn't play nicely if you're using it for configuration
+# files which exist in a directory which may need to do read-write operations on other files.
+# This works for nested subdirectories, but don't nest CONFIG_MAP_DIR directories
+# inside of other CONFIG_MAP_DIR directories.
+#
+# TODO: else with cpio, tar, cp?
+
+if [[ -n ${CONFIG_MAP_DIR} ]] && command -v rsync >/dev/null 2>&1; then
+ find / -type d -name "${CONFIG_MAP_DIR}" -print -o -path /sys -prune -o -path /proc -prune 2>/dev/null | \
+ awk '{print gsub("/","/"), $0}' | sort -n | cut -d' ' -f2- | \
+ while read CMDIR; do
+
+ rsync --recursive --mkpath --copy-links \
+ "--usermap=*:${PUID:-${DEFAULT_UID}}" \
+ "--groupmap=*:${PGID:-${DEFAULT_GID}}" \
+ --exclude='..*' --exclude="${CONFIG_MAP_DIR}"/ --exclude=.dockerignore --exclude=.gitignore \
+ "${CMDIR}"/ "${CMDIR}"/../
+
+ # TODO - regarding ownership and permissions:
+ #
+ # I *think* what we want to do here is change the ownership of
+ # these configmap-copied files to be owned by the user specified by PUID
+ # (falling back to DEFAULT_UID) and PGID (falling back to DEFAULT_GID).
+ # The other option would be to preserve the ownership of the source
+ # fine with --owner --group, but I don't think that's what we want, as
+ # if we were doing this with a docker bind mount they'd likely have the
+ # permissions of the original user on the host, anyway, which is
+ # supposed to match up to PUID/PGID.
+ #
+ # For permissions, rsync says that "existing files retain their existing permissions"
+ # and "new files get their normal permission bits set to the source file's
+ # permissions masked with the receiving directory's default permissions"
+ # (either via umask or ACL) which I think is what we want. The other alternative
+ # would be to do something like --chmod=D2755,F644
+
+ done # loop over found CONFIG_MAP_DIR directories
+ CONFIG_MAP_FIND_PRUNE_ARGS=(-o -name "${CONFIG_MAP_DIR}" -prune)
+
+else
+ CONFIG_MAP_FIND_PRUNE_ARGS=()
+fi # check for CONFIG_MAP_DIR and rsync
+
# change user/group ownership of any files/directories belonging to the original IDs
+set +e
if [[ -n ${PUID} ]] && [[ "${PUID}" != "${DEFAULT_UID}" ]]; then
- find / -path /sys -prune -o -path /proc -prune -o -user ${DEFAULT_UID} -exec chown -f ${PUID} "{}" \; || true
+ find / -path /sys -prune -o -path /proc -prune -o -user ${DEFAULT_UID} -exec chown -f ${PUID} "{}" \; 2>/dev/null
fi
if [[ -n ${PGID} ]] && [[ "${PGID}" != "${DEFAULT_GID}" ]]; then
- find / -path /sys -prune -o -path /proc -prune -o -group ${DEFAULT_GID} -exec chown -f :${PGID} "{}" \; || true
+ find / -path /sys -prune -o -path /proc -prune -o -group ${DEFAULT_GID} -exec chown -f :${PGID} "{}" \; 2>/dev/null
fi
# if there are semicolon-separated PUSER_CHOWN entries explicitly specified, chown them too
if [[ -n ${PUSER_CHOWN} ]]; then
IFS=';' read -ra ENTITIES <<< "${PUSER_CHOWN}"
for ENTITY in "${ENTITIES[@]}"; do
- chown -R ${PUSER}:${PGROUP} "${ENTITY}" || true
+ chown -R ${PUSER}:${PGROUP} "${ENTITY}" 2>/dev/null
done
fi
@@ -32,7 +78,7 @@ if [[ -n ${PUSER_CA_TRUST} ]] && command -v openssl >/dev/null 2>&1; then
if [[ -d "${PUSER_CA_TRUST}" ]]; then
while read -r -d ''; do
CA_FILES+=("$REPLY")
- done < <(find "${PUSER_CA_TRUST}" -type f -size +31c -print0 2>/dev/null)
+ done < <(find "${PUSER_CA_TRUST}" -type f -size +31c -print0 "${CONFIG_MAP_FIND_PRUNE_ARGS[@]}" 2>/dev/null)
elif [[ -f "${PUSER_CA_TRUST}" ]]; then
CA_FILES+=("${PUSER_CA_TRUST}")
fi
@@ -71,15 +117,16 @@ if [[ -n ${PUSER_CA_TRUST} ]] && command -v openssl >/dev/null 2>&1; then
CONCAT_FILE=/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem
fi
fi
- [[ -n "$DEST_FILE" ]] && ( cp "$CA_FILE" "$DEST_FILE" && chmod 644 "$DEST_FILE" ) || true
+ [[ -n "$DEST_FILE" ]] && ( cp "$CA_FILE" "$DEST_FILE" && chmod 644 "$DEST_FILE" )
[[ -n "$CONCAT_FILE" ]] && \
( echo "" >> "$CONCAT_FILE" && \
echo "# $CA_NAME_ORIG" >> "$CONCAT_FILE" \
- && cat "$CA_FILE" >> "$CONCAT_FILE" ) || true
+ && cat "$CA_FILE" >> "$CONCAT_FILE" )
done
- command -v update-ca-certificates >/dev/null 2>&1 && update-ca-certificates >/dev/null 2>&1 || true
- command -v update-ca-trust >/dev/null 2>&1 && update-ca-trust extract >/dev/null 2>&1 || true
+ command -v update-ca-certificates >/dev/null 2>&1 && update-ca-certificates >/dev/null 2>&1
+ command -v update-ca-trust >/dev/null 2>&1 && update-ca-trust extract >/dev/null 2>&1
fi
+set -e
# determine if we are now dropping privileges to exec ENTRYPOINT_CMD
if [[ "$PUSER_PRIV_DROP" == "true" ]]; then
diff --git a/shared/bin/malcolm-first-run-configure.sh b/shared/bin/malcolm-first-run-configure.sh
new file mode 100755
index 000000000..1f82c876e
--- /dev/null
+++ b/shared/bin/malcolm-first-run-configure.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+# Copyright (c) 2023 Battelle Energy Alliance, LLC. All rights reserved.
+
+grep -q boot=live /proc/cmdline && exit 0
+
+if [[ -d "$HOME"/Malcolm ]] && [[ ! -f "$HOME"/Malcolm/.configured ]] ; then
+ tilix \
+ --title="Malcolm Configuration" \
+ --maximize \
+ -e /bin/bash -l -c "/usr/bin/python3 ~/Malcolm/scripts/install.py --configure"
+fi
diff --git a/shared/bin/tx-rx-secure.sh b/shared/bin/tx-rx-secure.sh
new file mode 100755
index 000000000..eef32adf2
--- /dev/null
+++ b/shared/bin/tx-rx-secure.sh
@@ -0,0 +1,174 @@
+#!/usr/bin/env bash
+
+# use croc (https://github.com/schollz/croc) to securely send and receive file(s)
+# via the use of single-use code phrase using a local-only relay by default
+
+###############################################################################
+# script options
+set -o pipefail
+shopt -s nocasematch
+ENCODING="utf-8"
+
+###############################################################################
+# script variables
+MODE=
+SERVER=
+PORTS="9009,9010,9011,9012,9013"
+TOKEN=
+LOCAL_RELAY=yes
+RELAY_PID=
+RELAY_OUTPUT_REDIR=/dev/null
+OUTPUT_ARGS=()
+VERBOSE_FLAG=
+DEBUG_FLAG=
+
+# don't let the environment variables croc normally uses get in our way
+unset CROC_RELAY
+unset CROC_RELAY6
+unset CROC_PASS
+unset SOCKS5_PROXY
+
+###############################################################################
+# show script usage
+function help() {
+ echo -e "$(basename $0)\n"
+ echo -e "-v enable bash verbosity"
+ echo -e "-d enable croc debugging"
+ echo -e "-g \"global\" (don't use only local connections)"
+ echo -e "-p value relay port(s) (default: \"${PORTS}\")"
+
+ echo -e "\ntransmit mode:"
+ echo -e "-t enable transmit mode"
+ echo -e "-s value bind local IP"
+ echo -e " file(s) to transfer"
+
+ echo -e "\nreceive mode:"
+ echo -e "-s value local server IP or hostname"
+ echo -e "-r value one-time receive token"
+ echo -e "-o value output folder for file(s) received (default: .)"
+ exit 1
+}
+
+###############################################################################
+# parse command-line parameters
+while getopts 'vdgo:r:s:tp:' OPTION; do
+ case "$OPTION" in
+
+ v)
+ VERBOSE_FLAG="-v"
+ RELAY_OUTPUT_REDIR=/dev/stderr
+ set -x
+ ;;
+
+ d)
+ DEBUG_FLAG="--debug"
+ RELAY_OUTPUT_REDIR=/dev/stderr
+ ;;
+
+ g)
+ LOCAL_RELAY=
+ ;;
+
+ o)
+ OUTPUT_ARGS=(--out "$OPTARG")
+ ;;
+
+ r)
+ MODE=rx
+ TOKEN="$OPTARG"
+ ;;
+
+ s)
+ SERVER="$OPTARG"
+ ;;
+
+ t)
+ MODE=tx
+ ;;
+
+ p)
+ PORTS="$OPTARG"
+ ;;
+
+ ?)
+ help >&2
+ exit 1;
+ ;;
+
+ esac
+done
+shift "$(($OPTIND -1))"
+
+# without croc what are we even doing here
+if ! command -v croc >/dev/null 2>&1; then
+ echo "$(basename $0) requires croc (https://github.com/schollz/croc)" >&2
+ exit 1
+fi
+
+###############################################################################
+# cleanup: kill croc relay process on exit, if we started one
+function cleanup {
+ if [[ -n "${RELAY_PID}" ]] && kill -s 0 "${RELAY_PID}" >/dev/null 2>&1; then
+ [[ -n "${VERBOSE_FLAG}" ]] && echo "killing relay process ${RELAY_PID}"
+ kill "${RELAY_PID}" >/dev/null 2>&1
+ sleep 1
+ if kill -s 0 "${RELAY_PID}" >/dev/null 2>&1; then
+ sleep 5
+ [[ -n "${VERBOSE_FLAG}" ]] && echo "killing (-9) relay process ${RELAY_PID}"
+ kill -s -9 "${RELAY_PID}" >/dev/null 2>&1
+ fi
+ fi
+}
+
+###############################################################################
+
+trap "cleanup" EXIT
+
+RELAY_ARGS=()
+CURVE_ARGS=(--curve siec)
+HASH_ARGS=(--hash xxhash)
+FIRST_PORT="$(echo "${PORTS}" | cut -d, -f1)"
+
+if [[ -n "${PORTS}" ]] && [[ "${MODE}" == "tx" ]] && (( $# > 0 )); then
+ # we have ports defined, have requested transmit mode, and have been given file(s) to transmit
+
+ if [[ -n "${LOCAL_RELAY}" ]]; then
+ # we're using "local-only" mode, which means we need to be the relay ourselves
+ if [[ -z "${SERVER}" ]]; then
+ SERVER="0.0.0.0"
+ command -v ip >/dev/null 2>&1 && SEND_RELAY_IP=$(ip route get 255.255.255.255 2>/dev/null | grep -Po '(?<=src )(\d{1,3}.){4}' | sed "s/ //g")
+ else
+ SEND_RELAY_IP="${SERVER}"
+ fi
+ croc ${DEBUG_FLAG} "${CURVE_ARGS[@]}" relay --host "${SERVER}" --ports "${PORTS}" >${RELAY_OUTPUT_REDIR} &
+ RELAY_PID=$!
+ sleep 5
+ if [[ -n "${RELAY_PID}" ]] && kill -s 0 "${RELAY_PID}" >/dev/null 2>&1; then
+ [[ -n "${VERBOSE_FLAG}" ]] && echo "relay running at ${RELAY_PID}"
+ else
+ echo "Failed to start relay process" >&2
+ exit 1;
+ fi
+
+ elif [[ -n "${SERVER}" ]]; then
+ # we're not using local-only mode, they *can* but don't have to define a relay
+ SEND_RELAY_IP="${SERVER}"
+ fi
+
+ [[ -n "${SEND_RELAY_IP}" ]] && RELAY_ARGS=(--relay "${SEND_RELAY_IP}:${FIRST_PORT}")
+
+ # run croc
+ croc --yes --ignore-stdin --overwrite ${DEBUG_FLAG} "${CURVE_ARGS[@]}" "${RELAY_ARGS[@]}" send "${HASH_ARGS[@]}" "$@"
+
+elif [[ -n "${PORTS}" ]] && ( [[ "${MODE}" == "rx" ]] && [[ -n "${TOKEN}" ]] && ( [[ -z "${LOCAL_RELAY}" ]] || [[ -n "${SERVER}" ]] ) ); then
+ # we have ports defined, have requested receive mode, have been given a token, and either have a relay IP or are not using a local relay
+
+ [[ -n "${SERVER}" ]] && RELAY_ARGS=(--relay "${SERVER}:${FIRST_PORT}")
+
+ # run croc
+ croc --yes --ignore-stdin --overwrite ${DEBUG_FLAG} "${CURVE_ARGS[@]}" "${RELAY_ARGS[@]}" "${OUTPUT_ARGS[@]}" "${TOKEN}"
+
+else
+ help >&2
+ exit 1;
+fi
\ No newline at end of file
diff --git a/shared/bin/zeek_carve_utils.py b/shared/bin/zeek_carve_utils.py
index 16460e141..28e8b3af0 100644
--- a/shared/bin/zeek_carve_utils.py
+++ b/shared/bin/zeek_carve_utils.py
@@ -320,7 +320,6 @@ def touch(filename):
###################################################################################################
# run command with arguments and return its exit code, stdout, and stderr
def check_output_input(*popenargs, **kwargs):
-
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden')
@@ -352,7 +351,6 @@ def check_output_input(*popenargs, **kwargs):
###################################################################################################
# run command with arguments and return its exit code and output
def run_process(command, stdout=True, stderr=True, stdin=None, cwd=None, env=None, debug=False):
-
retcode = -1
output = []
@@ -403,7 +401,6 @@ def value(self):
###################################################################################################
class CarvedFileSubscriberThreaded:
-
# ---------------------------------------------------------------------------------
# constructor
def __init__(
@@ -436,7 +433,6 @@ def __init__(
# ---------------------------------------------------------------------------------
def Pull(self, scanWorkerId=0):
-
fileinfo = defaultdict(str)
with self.lock:
@@ -494,7 +490,6 @@ def format(cls, fileName, response):
###################################################################################################
# class for searching for a hash with a VirusTotal public API, handling rate limiting
class VirusTotalSearch(FileScanProvider):
-
# ---------------------------------------------------------------------------------
# constructor
def __init__(self, apiKey, reqLimit=None, reqLimitSec=None):
@@ -532,7 +527,6 @@ def submit(self, fileName=None, fileSize=None, fileType=None, block=False, timeo
# while limit only repeats if block=True
while (not allowed) and (response is None):
-
with self.lock:
# first make sure we haven't exceeded rate limits
nowTime = int(time.time())
@@ -645,7 +639,6 @@ def format(fileName, response):
###################################################################################################
# class for scanning a file with ClamAV
class ClamAVScan(FileScanProvider):
-
# ---------------------------------------------------------------------------------
# constructor
def __init__(self, debug=False, verboseDebug=False, socketFileName=None, reqLimit=None):
@@ -678,7 +671,6 @@ def submit(self, fileName=None, fileSize=None, fileType=None, block=False, timeo
# while limit only repeats if block=True
while (not allowed) and (not clamavResult.finished):
-
if not connected:
if self.verboseDebug:
eprint(f"{get_ident()}: ClamAV attempting connection")
@@ -781,7 +773,6 @@ def format(fileName, response):
###################################################################################################
# class for scanning a file with Yara
class YaraScan(FileScanProvider):
-
# ---------------------------------------------------------------------------------
# constructor
def __init__(self, debug=False, verboseDebug=False, rulesDirs=[], reqLimit=None):
@@ -833,7 +824,6 @@ def submit(self, fileName=None, fileSize=None, fileType=None, block=False, timeo
# while limit only repeats if block=True
while (not allowed) and (not yaraResult.finished):
-
# first make sure we haven't exceeded rate limits
if self.scanningFilesCount.increment() <= self.reqLimit:
# we've got fewer than the allowed requests open, so we're good to go!
@@ -916,7 +906,6 @@ def format(fileName, response):
###################################################################################################
# class for scanning a file with Capa
class CapaScan(FileScanProvider):
-
# ---------------------------------------------------------------------------------
# constructor
def __init__(self, debug=False, verboseDebug=False, rulesDir=None, verboseHits=False, reqLimit=None):
@@ -951,7 +940,6 @@ def submit(self, fileName=None, fileSize=None, fileType=None, block=False, timeo
# while limit only repeats if block=True
while (not allowed) and (not capaResult.finished):
-
# first make sure we haven't exceeded rate limits
if self.scanningFilesCount.increment() <= self.reqLimit:
# we've got fewer than the allowed requests open, so we're good to go!
diff --git a/shared/bin/zeek_install_plugins.sh b/shared/bin/zeek_install_plugins.sh
index f3840098c..d87fde9d5 100755
--- a/shared/bin/zeek_install_plugins.sh
+++ b/shared/bin/zeek_install_plugins.sh
@@ -73,8 +73,6 @@ ZKG_GITHUB_URLS=(
"https://github.com/0xl3x1/zeek-EternalSafety"
"https://github.com/0xxon/cve-2020-0601"
"https://github.com/0xxon/cve-2020-13777"
- "https://github.com/amzn/zeek-plugin-profinet|master"
- "https://github.com/amzn/zeek-plugin-tds|master"
"https://github.com/cisagov/icsnpp-bacnet"
"https://github.com/cisagov/icsnpp-bsap"
"https://github.com/cisagov/icsnpp-dnp3"
@@ -84,6 +82,7 @@ ZKG_GITHUB_URLS=(
"https://github.com/cisagov/icsnpp-modbus"
"https://github.com/cisagov/icsnpp-opcua-binary"
"https://github.com/cisagov/icsnpp-s7comm"
+ "https://github.com/cisagov/icsnpp-synchrophasor"
"https://github.com/corelight/callstranger-detector"
"https://github.com/corelight/CVE-2020-16898"
"https://github.com/corelight/CVE-2021-31166"
@@ -108,12 +107,14 @@ ZKG_GITHUB_URLS=(
"https://github.com/corelight/zerologon"
"https://github.com/cybera/zeek-sniffpass"
"https://github.com/mmguero-dev/bzar"
+ "https://github.com/mmguero-dev/ja3"
+ "https://github.com/mmguero-dev/zeek-plugin-profinet|master"
+ "https://github.com/mmguero-dev/zeek-plugin-tds|master"
"https://github.com/ncsa/bro-is-darknet"
"https://github.com/ncsa/bro-simple-scan"
"https://github.com/precurse/zeek-httpattacks"
"https://github.com/salesforce/GQUIC_Protocol_Analyzer"
"https://github.com/salesforce/hassh"
- "https://github.com/salesforce/ja3"
"https://github.com/zeek/spicy-dhcp"
"https://github.com/zeek/spicy-dns"
"https://github.com/zeek/spicy-http"
@@ -121,7 +122,6 @@ ZKG_GITHUB_URLS=(
"https://github.com/zeek/spicy-pe"
"https://github.com/zeek/spicy-tftp"
"https://github.com/zeek/spicy-zip"
- "https://github.com/zeek/zeek-af_packet-plugin|master"
)
for i in ${ZKG_GITHUB_URLS[@]}; do
SRC_DIR="$(clone_github_repo "$i")"
diff --git a/shared/bin/zeek_intel_from_threat_feed.py b/shared/bin/zeek_intel_from_threat_feed.py
index 7870b61b5..e02707a7a 100755
--- a/shared/bin/zeek_intel_from_threat_feed.py
+++ b/shared/bin/zeek_intel_from_threat_feed.py
@@ -19,6 +19,7 @@
script_name = os.path.basename(__file__)
script_path = os.path.dirname(os.path.realpath(__file__))
+
###################################################################################################
# main
def main():