Skip to content

Commit

Permalink
idaholab#598, added malcolm /ready API
Browse files Browse the repository at this point in the history
  • Loading branch information
mmguero committed Oct 21, 2024
1 parent c91ab89 commit ffde15c
Show file tree
Hide file tree
Showing 8 changed files with 239 additions and 7 deletions.
188 changes: 187 additions & 1 deletion api/project/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,15 @@
missing_field_map['ip'] = '0.0.0.0'
missing_field_map['long'] = 0

logstash_default_pipelines = [
"malcolm-beats",
"malcolm-enrichment",
"malcolm-input",
"malcolm-output",
"malcolm-suricata",
"malcolm-zeek",
]

urllib3.disable_warnings()
warnings.filterwarnings(
"ignore",
Expand All @@ -181,9 +190,29 @@

debugApi = app.config["MALCOLM_API_DEBUG"] == "true"

opensearchUrl = app.config["OPENSEARCH_URL"]
arkimeHost = app.config["ARKIME_HOST"]
arkimePort = app.config["ARKIME_PORT"]
arkimeStatusUrl = f'https://{arkimeHost}:{arkimePort}/_ns_/nstest.html'
dashboardsUrl = app.config["DASHBOARDS_URL"]
dashboardsHelperHost = app.config["DASHBOARDS_HELPER_HOST"]
dashboardsMapsPort = app.config["DASHBOARDS_MAPS_PORT"]
databaseMode = malcolm_utils.DatabaseModeStrToEnum(app.config["OPENSEARCH_PRIMARY"])
filebeatHost = app.config["FILEBEAT_HOST"]
filebeatTcpJsonPort = app.config["FILEBEAT_TCP_JSON_PORT"]
freqUrl = app.config["FREQ_URL"]
logstashApiPort = app.config["LOGSTASH_API_PORT"]
logstashHost = app.config["LOGSTASH_HOST"]
logstashLJPort = app.config["LOGSTASH_LJ_PORT"]
logstashMapsPort = app.config["LOGSTASH_LJ_PORT"]
logstashUrl = f'http://{logstashHost}:{logstashApiPort}'
netboxUrl = app.config["NETBOX_URL"]
opensearchUrl = app.config["OPENSEARCH_URL"]
pcapMonitorHost = app.config["PCAP_MONITOR_HOST"]
pcapTopicPort = app.config["PCAP_TOPIC_PORT"]
zeekExtractedFileLoggerHost = app.config["ZEEK_EXTRACTED_FILE_LOGGER_HOST"]
zeekExtractedFileLoggerTopicPort = app.config["ZEEK_EXTRACTED_FILE_LOGGER_TOPIC_PORT"]
zeekExtractedFileMonitorHost = app.config["ZEEK_EXTRACTED_FILE_MONITOR_HOST"]
zeekExtractedFileTopicPort = app.config["ZEEK_EXTRACTED_FILE_TOPIC_PORT"]

opensearchLocal = (databaseMode == malcolm_utils.DatabaseMode.OpenSearchLocal) or (
opensearchUrl == 'http://opensearch:9200'
Expand Down Expand Up @@ -884,6 +913,163 @@ def version():
)


@app.route(
f"{('/' + app.config['MALCOLM_API_PREFIX']) if app.config['MALCOLM_API_PREFIX'] else ''}/ready", methods=['GET']
)
def ready():
"""Return ready status (true or false) for various Malcolm components
Parameters
----------
Returns
-------
arkime
true or false, the ready status of Arkime
dashboards
true or false, the ready status of Dashboards (or Kibana)
dashboards_maps
true or false, the ready status of the dashboards-helper offline map server
filebeat_tcp
true or false, the ready status of Filebeat's JSON-OVER-TCP
freq
true or false, the ready status of freq
logstash_lumberjack
true or false, the ready status of Logstash's lumberjack protocol listener
logstash_pipelines
true or false, the ready status of Logstash's default pipelines
netbox
true or false, the ready status of NetBox
opensearch
true or false, the ready status of OpenSearch (or Elasticsearch)
pcap_monitor
true or false, the ready status of the PCAP monitoring process
zeek_extracted_file_logger
true or false, the ready status of the Zeek extracted file results logging process
zeek_extracted_file_monitor
true or false, the ready status of the Zeek extracted file monitoring process
"""
global databaseClient

try:
arkimeResponse = requests.get(
arkimeStatusUrl,
verify=False,
)
arkimeResponse.raise_for_status()
arkimeStatus = True
except Exception as e:
arkimeStatus = False
if debugApi:
print(f"{type(e).__name__}: {str(e)} getting Arkime status")

try:
dashboardsStatus = requests.get(
f'{dashboardsUrl}/api/status',
auth=opensearchReqHttpAuth,
verify=opensearchSslVerify,
).json()
except Exception as e:
dashboardsStatus = {}
if debugApi:
print(f"{type(e).__name__}: {str(e)} getting Dashboards status")

try:
dashboardsMapsStatus = malcolm_utils.check_socket(dashboardsHelperHost, dashboardsMapsPort)
except Exception as e:
dashboardsMapsStatus = False
if debugApi:
print(f"{type(e).__name__}: {str(e)} getting Logstash offline map server")

try:
filebeatTcpJsonStatus = malcolm_utils.check_socket(filebeatHost, filebeatTcpJsonPort)
except Exception as e:
filebeatTcpJsonStatus = False
if debugApi:
print(f"{type(e).__name__}: {str(e)} getting filebeat TCP JSON listener status")

try:
freqResponse = requests.get(freqUrl)
freqResponse.raise_for_status()
freqStatus = True
except Exception as e:
freqStatus = False
if debugApi:
print(f"{type(e).__name__}: {str(e)} getting freq status")

try:
logstashStats = requests.get(f'{logstashUrl}/_node').json()
except Exception as e:
logstashStats = {}
if debugApi:
print(f"{type(e).__name__}: {str(e)} getting Logstash node status")

try:
logstashLJStatus = malcolm_utils.check_socket(logstashHost, logstashLJPort)
except Exception as e:
logstashLJStatus = False
if debugApi:
print(f"{type(e).__name__}: {str(e)} getting Logstash lumberjack listener status")

try:
netboxStatus = requests.get(f'{netboxUrl}/api/status').json()
except Exception as e:
netboxStatus = {}
if debugApi:
print(f"{type(e).__name__}: {str(e)} getting NetBox status")

try:
openSearchHealth = dict(databaseClient.cluster.health())
except Exception as e:
openSearchHealth = {}
if debugApi:
print(f"{type(e).__name__}: {str(e)} getting OpenSearch health")

try:
pcapMonitorStatus = malcolm_utils.check_socket(pcapMonitorHost, pcapTopicPort)
except Exception as e:
pcapMonitorStatus = False
if debugApi:
print(f"{type(e).__name__}: {str(e)} getting PCAP monitor topic status")

try:
zeekExtractedFileMonitorStatus = malcolm_utils.check_socket(
zeekExtractedFileMonitorHost, zeekExtractedFileTopicPort
)
except Exception as e:
zeekExtractedFileMonitorStatus = False
if debugApi:
print(f"{type(e).__name__}: {str(e)} getting Zeek extracted file monitor topic status")

try:
zeekExtractedFileLoggerStatus = malcolm_utils.check_socket(
zeekExtractedFileLoggerHost, zeekExtractedFileLoggerTopicPort
)
except Exception as e:
zeekExtractedFileLoggerStatus = False
if debugApi:
print(f"{type(e).__name__}: {str(e)} getting Zeek extracted file logger topic status")

return jsonify(
arkime=arkimeStatus,
dashboards=(malcolm_utils.deep_get(dashboardsStatus, ["status", "overall", "state"]) == "green"),
dashboards_maps=dashboardsMapsStatus,
filebeat_tcp=filebeatTcpJsonStatus,
freq=freqStatus,
logstash_lumberjack=logstashLJStatus,
logstash_pipelines=(malcolm_utils.deep_get(logstashStats, ["status"]) == "green")
and all(
pipeline in malcolm_utils.deep_get(logstashStats, ["pipelines"], {})
for pipeline in logstash_default_pipelines
),
netbox=(malcolm_utils.deep_get(netboxStatus, ["rq-workers-running"], 0) > 0),
opensearch=(malcolm_utils.deep_get(openSearchHealth, ["status"], 'red') != "red"),
pcap_monitor=pcapMonitorStatus,
zeek_extracted_file_logger=zeekExtractedFileLoggerStatus,
zeek_extracted_file_monitor=zeekExtractedFileMonitorStatus,
)


@app.route(
f"{('/' + app.config['MALCOLM_API_PREFIX']) if app.config['MALCOLM_API_PREFIX'] else ''}/ping", methods=['GET']
)
Expand Down
27 changes: 22 additions & 5 deletions api/project/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,18 +13,35 @@ class Config(object):
ARKIME_NETWORK_INDEX_PATTERN = f"{os.getenv('ARKIME_NETWORK_INDEX_PATTERN', 'arkime_sessions3-*')}"
ARKIME_NETWORK_INDEX_TIME_FIELD = f"{os.getenv('ARKIME_NETWORK_INDEX_TIME_FIELD', 'firstPacket')}"

DOCTYPE_DEFAULT = f"{os.getenv('DOCTYPE_DEFAULT', 'network')}"
ARKIME_HOST = f"{os.getenv('ARKIME_HOST', 'arkime')}"
ARKIME_PORT = int(f"{os.getenv('ARKIME_PORT', '8005')}")
BUILD_DATE = f"{os.getenv('BUILD_DATE', 'unknown')}"
DASHBOARDS_URL = f"{os.getenv('DASHBOARDS_URL', 'http://dashboards:5601/dashboards')}"
MALCOLM_API_PREFIX = f"{os.getenv('MALCOLM_API_PREFIX', 'mapi')}"
DASHBOARDS_HELPER_HOST = f"{os.getenv('DASHBOARDS_HELPER_HOST', 'dashboards-helper')}"
DASHBOARDS_MAPS_PORT = int(f"{os.getenv('DASHBOARDS_MAPS_PORT', '28991')}")
DOCTYPE_DEFAULT = f"{os.getenv('DOCTYPE_DEFAULT', 'network')}"
FILEBEAT_HOST = f"{os.getenv('FILEBEAT_HOST', 'filebeat')}"
FILEBEAT_TCP_JSON_PORT = int(f"{os.getenv('FILEBEAT_TCP_JSON_PORT', '5045')}")
FREQ_URL = f"{os.getenv('FREQ_URL', 'http://freq:10004')}"
LOGSTASH_API_PORT = int(f"{os.getenv('LOGSTASH_API_PORT', '9600')}")
LOGSTASH_HOST = f"{os.getenv('LOGSTASH_HOST', 'logstash')}"
LOGSTASH_LJ_PORT = int(f"{os.getenv('LOGSTASH_LJ_PORT', '5044')}")
MALCOLM_API_DEBUG = f"{os.getenv('MALCOLM_API_DEBUG', 'false')}"
MALCOLM_API_PREFIX = f"{os.getenv('MALCOLM_API_PREFIX', 'mapi')}"
MALCOLM_TEMPLATE = f"{os.getenv('MALCOLM_TEMPLATE', 'malcolm_template')}"
MALCOLM_VERSION = f"{os.getenv('MALCOLM_VERSION', 'unknown')}"
OPENSEARCH_URL = f"{os.getenv('OPENSEARCH_URL', 'http://opensearch:9200')}"
OPENSEARCH_PRIMARY = f"{os.getenv('OPENSEARCH_PRIMARY', 'opensearch-local')}"
OPENSEARCH_SSL_CERTIFICATE_VERIFICATION = f"{os.getenv('OPENSEARCH_SSL_CERTIFICATE_VERIFICATION', 'false')}"
NETBOX_URL = f"{os.getenv('NETBOX_URL', 'http://netbox:8080/netbox')}"
OPENSEARCH_CREDS_CONFIG_FILE = (
f"{os.getenv('OPENSEARCH_CREDS_CONFIG_FILE', '/var/local/curlrc/.opensearch.primary.curlrc')}"
)
OPENSEARCH_PRIMARY = f"{os.getenv('OPENSEARCH_PRIMARY', 'opensearch-local')}"
OPENSEARCH_SSL_CERTIFICATE_VERIFICATION = f"{os.getenv('OPENSEARCH_SSL_CERTIFICATE_VERIFICATION', 'false')}"
OPENSEARCH_URL = f"{os.getenv('OPENSEARCH_URL', 'http://opensearch:9200')}"
PCAP_MONITOR_HOST = f"{os.getenv('PCAP_MONITOR_HOST', 'pcap-monitor')}"
PCAP_TOPIC_PORT = int(f"{os.getenv('PCAP_TOPIC_PORT', '30441')}")
RESULT_SET_LIMIT = int(f"{os.getenv('RESULT_SET_LIMIT', '500')}")
VCS_REVISION = f"{os.getenv('VCS_REVISION', 'unknown')}"
ZEEK_EXTRACTED_FILE_LOGGER_HOST = f"{os.getenv('ZEEK_EXTRACTED_FILE_LOGGER_HOST', 'file-monitor')}"
ZEEK_EXTRACTED_FILE_LOGGER_TOPIC_PORT = int(f"{os.getenv('ZEEK_EXTRACTED_FILE_LOGGER_TOPIC_PORT', '5988')}")
ZEEK_EXTRACTED_FILE_MONITOR_HOST = f"{os.getenv('ZEEK_EXTRACTED_FILE_MONITOR_HOST', 'file-monitor')}"
ZEEK_EXTRACTED_FILE_TOPIC_PORT = int(f"{os.getenv('ZEEK_EXTRACTED_FILE_TOPIC_PORT', '5987')}")
1 change: 1 addition & 0 deletions docker-compose-dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -1318,6 +1318,7 @@ services:
env_file:
- ./config/process.env
- ./config/ssl.env
- ./config/upload-common.env
- ./config/dashboards.env
- ./config/opensearch.env
environment:
Expand Down
1 change: 1 addition & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -1180,6 +1180,7 @@ services:
env_file:
- ./config/process.env
- ./config/ssl.env
- ./config/upload-common.env
- ./config/dashboards.env
- ./config/opensearch.env
environment:
Expand Down
24 changes: 24 additions & 0 deletions docs/api-ready.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# Malcolm Services Readiness Status

`GET` - /mapi/ready

Returns `true` or `false` indicating the readiness status of various Malcolm services.

**Example output:**

```json
{
"arkime": true,
"dashboards": true,
"dashboards_maps": true,
"filebeat_tcp": false,
"freq": true,
"logstash_lumberjack": true,
"logstash_pipelines": true,
"netbox": true,
"opensearch": true,
"pcap_monitor": true,
"zeek_extracted_file_logger": true,
"zeek_extracted_file_monitor": true
}
```
1 change: 1 addition & 0 deletions docs/api.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
* [Fields](api-fields.md)
* [Indices](api-indices.md)
* [Ping](api-ping.md)
* [Ready](api-ready.md)
* [Version](api-version.md)
* [Examples](api-examples.md)

Expand Down
2 changes: 2 additions & 0 deletions kubernetes/08-api.yml
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,8 @@ spec:
name: process-env
- configMapRef:
name: ssl-env
- configMapRef:
name: upload-common-env
- configMapRef:
name: dashboards-env
- configMapRef:
Expand Down
2 changes: 1 addition & 1 deletion logstash/scripts/logstash-start.sh
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ HOST_PIPELINES_DIR="/usr/share/logstash/malcolm-pipelines.available"
# runtime pipelines parent directory
export PIPELINES_DIR="/usr/share/logstash/malcolm-pipelines"

# runtime pipeliens configuration file
# runtime pipelines configuration file
export PIPELINES_CFG="/usr/share/logstash/config/pipelines.yml"

# for each pipeline in /usr/share/logstash/malcolm-pipelines, append the contents of this file to the dynamically-generated
Expand Down

0 comments on commit ffde15c

Please sign in to comment.