Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

MINIFICPP-1691: PutSplunkHTTP and QuerySplunkIndexingStatus #1219

Closed
wants to merge 17 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ jobs:
run: |
PATH %PATH%;C:\Program Files (x86)\Windows Kits\10\bin\10.0.19041.0\x64
PATH %PATH%;C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\MSBuild\Current\Bin\Roslyn
win_build_vs.bat ..\b /64 /CI /S /A /PDH /K /L /R /Z /N /RO
win_build_vs.bat ..\b /64 /CI /S /A /PDH /SPLUNK /K /L /R /Z /N /RO
shell: cmd
- name: test
run: cd ..\b && ctest --timeout 300 --parallel 8 -C Release --output-on-failure
Expand Down Expand Up @@ -201,7 +201,7 @@ jobs:
if [ -d ~/.ccache ]; then mv ~/.ccache .; fi
mkdir build
cd build
cmake -DUSE_SHARED_LIBS= -DSTRICT_GSL_CHECKS=AUDIT -DENABLE_JNI=OFF -DDISABLE_JEMALLOC=ON -DENABLE_AWS=ON -DENABLE_LIBRDKAFKA=ON -DENABLE_MQTT=ON -DENABLE_AZURE=ON -DENABLE_SQL=ON -DENABLE_OPC=ON -DDOCKER_BUILD_ONLY=ON -DDOCKER_CCACHE_DUMP_LOCATION=$HOME/.ccache ..
cmake -DUSE_SHARED_LIBS= -DSTRICT_GSL_CHECKS=AUDIT -DENABLE_JNI=OFF -DDISABLE_JEMALLOC=ON -DENABLE_AWS=ON -DENABLE_LIBRDKAFKA=ON -DENABLE_MQTT=ON -DENABLE_AZURE=ON -DENABLE_SQL=ON -DENABLE_SPLUNK=ON -DENABLE_OPC=ON -DDOCKER_BUILD_ONLY=ON -DDOCKER_CCACHE_DUMP_LOCATION=$HOME/.ccache ..
make docker
- id: install_deps
run: |
Expand Down
6 changes: 6 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,7 @@ option(ENABLE_SFTP "Enables SFTP support." OFF)
option(ENABLE_OPENWSMAN "Enables the Openwsman extensions." OFF)
option(ENABLE_AZURE "Enables Azure support." OFF)
option(ENABLE_ENCRYPT_CONFIG "Enables build of encrypt-config binary." ON)
option(ENABLE_SPLUNK "Enable Splunk support" OFF)
option(DOCKER_BUILD_ONLY "Disables all targets except docker build scripts. Ideal for systems without an up-to-date compiler." OFF)

## Keep all option definitions above this line
Expand Down Expand Up @@ -598,6 +599,11 @@ if (ENABLE_SYSTEMD)
createExtension(SYSTEMD-EXTENSIONS "SYSTEMD EXTENSIONS" "Enabled log collection from journald" "extensions/systemd" "extensions/systemd/tests")
endif()

## Add the splunk extension
if (ENABLE_ALL OR ENABLE_SPLUNK)
createExtension(SPLUNK-EXTENSIONS "SPLUNK EXTENSIONS" "This enables Splunk support" "extensions/splunk" "extensions/splunk/tests")
endif()

## NOW WE CAN ADD LIBRARIES AND EXTENSIONS TO MAIN
add_subdirectory(main)

Expand Down
105 changes: 105 additions & 0 deletions PROCESSORS.md

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@ Through JNI extensions you can run NiFi processors using NARs. The JNI extension
| Sensors | GetEnvironmentalSensors<br/>GetMovementSensors | -DENABLE_SENSORS=ON |
| SFTP | [FetchSFTP](PROCESSORS.md#fetchsftp)<br/>[ListSFTP](PROCESSORS.md#listsftp)<br/>[PutSFTP](PROCESSORS.md#putsftp) | -DENABLE_SFTP=ON |
| SQL | [ExecuteSQL](PROCESSORS.md#executesql)<br/>[PutSQL](PROCESSORS.md#putsql)<br/>[QueryDatabaseTable](PROCESSORS.md#querydatabasetable)<br/> | -DENABLE_SQL=ON |
| Splunk | [PutSplunkHTTP](PROCESSORS.md#putsplunkhttp)<br/>[QuerySplunkIndexingStatus](PROCESSORS.md#querysplunkindexingstatus)| -DENABLE_SPLUNK=ON |
| Systemd | [ConsumeJournald](PROCESSORS.md#consumejournald) | -DENABLE_SYSTEMD=ON |
| Tensorflow | TFApplyGraph<br/>TFConvertImageToTensor<br/>TFExtractTopLabels<br/> | -DENABLE_TENSORFLOW=ON |
| USB Camera | [GetUSBCamera](PROCESSORS.md#getusbcamera) | -DENABLE_USB_CAMERA=ON |
Expand Down
2 changes: 2 additions & 0 deletions bootstrap.sh
Original file line number Diff line number Diff line change
Expand Up @@ -332,6 +332,8 @@ add_disabled_option SYSTEMD_ENABLED ${TRUE} "ENABLE_SYSTEMD"
add_disabled_option NANOFI_ENABLED ${FALSE} "ENABLE_NANOFI"
set_dependency PYTHON_ENABLED NANOFI_ENABLED

add_disabled_option SPLUNK_ENABLED ${FALSE} "ENABLE_SPLUNK"

USE_SHARED_LIBS=${TRUE}
ASAN_ENABLED=${FALSE}
FAIL_ON_WARNINGS=${FALSE}
Expand Down
6 changes: 4 additions & 2 deletions bstrp_functions.sh
Original file line number Diff line number Diff line change
Expand Up @@ -391,6 +391,7 @@ show_supported_features() {
echo "X. Azure Support ...............$(print_feature_status AZURE_ENABLED)"
echo "Y. Systemd Support .............$(print_feature_status SYSTEMD_ENABLED)"
echo "Z. NanoFi Support ..............$(print_feature_status NANOFI_ENABLED)"
echo "AA. Splunk Support .............$(print_feature_status SPLUNK_ENABLED)"
echo "****************************************"
echo " Build Options."
echo "****************************************"
Expand All @@ -413,7 +414,7 @@ show_supported_features() {

read_feature_options(){
local choice
echo -n "Enter choice [ A - Z or 1-7 ] "
echo -n "Enter choice [ A - Z or AA or 1-7] "
read -r choice
choice=$(echo "${choice}" | tr '[:upper:]' '[:lower:]')
case $choice in
Expand Down Expand Up @@ -445,6 +446,7 @@ read_feature_options(){
x) ToggleFeature AZURE_ENABLED ;;
y) ToggleFeature SYSTEMD_ENABLED ;;
z) ToggleFeature NANOFI_ENABLED ;;
aa) ToggleFeature SPLUNK_ENABLED ;;
adamdebreceni marked this conversation as resolved.
Show resolved Hide resolved
1) ToggleFeature TESTS_ENABLED ;;
2) EnableAllFeatures ;;
3) ToggleFeature JNI_ENABLED;;
Expand All @@ -463,7 +465,7 @@ read_feature_options(){
fi
;;
q) exit 0;;
*) echo -e "${RED}Please enter an option A-Z or 1-7...${NO_COLOR}" && sleep 2
*) echo -e "${RED}Please enter an option A-Z or AA or 1-7...${NO_COLOR}" && sleep 2
esac
}

1 change: 1 addition & 0 deletions cmake/DockerConfig.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ add_custom_target(
-c ENABLE_AZURE=${ENABLE_AZURE}
-c ENABLE_ENCRYPT_CONFIG=${ENABLE_ENCRYPT_CONFIG}
-c ENABLE_NANOFI=${ENABLE_NANOFI}
-c ENABLE_SPLUNK=${ENABLE_SPLUNK}
-c DISABLE_CURL=${DISABLE_CURL}
-c DISABLE_JEMALLOC=${DISABLE_JEMALLOC}
-c DISABLE_CIVET=${DISABLE_CIVET}
Expand Down
3 changes: 2 additions & 1 deletion docker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ ARG ENABLE_OPENWSMAN=OFF
ARG ENABLE_AZURE=OFF
ARG ENABLE_ENCRYPT_CONFIG=ON
ARG ENABLE_NANOFI=OFF
ARG ENABLE_SPLUNK=OFF
ARG DISABLE_CURL=OFF
ARG DISABLE_JEMALLOC=ON
ARG DISABLE_CIVET=OFF
Expand Down Expand Up @@ -114,7 +115,7 @@ RUN cmake -DSTATIC_BUILD= -DSKIP_TESTS=true -DENABLE_ALL="${ENABLE_ALL}" -DENABL
-DENABLE_LIBRDKAFKA="${ENABLE_LIBRDKAFKA}" -DENABLE_SENSORS="${ENABLE_SENSORS}" -DENABLE_USB_CAMERA="${ENABLE_USB_CAMERA}" \
-DENABLE_TENSORFLOW="${ENABLE_TENSORFLOW}" -DENABLE_AWS="${ENABLE_AWS}" -DENABLE_BUSTACHE="${ENABLE_BUSTACHE}" -DENABLE_SFTP="${ENABLE_SFTP}" \
-DENABLE_OPENWSMAN="${ENABLE_OPENWSMAN}" -DENABLE_AZURE="${ENABLE_AZURE}" -DENABLE_NANOFI=${ENABLE_NANOFI} -DENABLE_SYSTEMD=OFF \
szaszm marked this conversation as resolved.
Show resolved Hide resolved
-DDISABLE_CURL="${DISABLE_CURL}" -DDISABLE_JEMALLOC="${DISABLE_JEMALLOC}" -DDISABLE_CIVET="${DISABLE_CIVET}" \
-DDISABLE_CURL="${DISABLE_CURL}" -DDISABLE_JEMALLOC="${DISABLE_JEMALLOC}" -DDISABLE_CIVET="${DISABLE_CIVET}" -DENABLE_SPLUNK=${ENABLE_SPLUNK} \
-DDISABLE_EXPRESSION_LANGUAGE="${DISABLE_EXPRESSION_LANGUAGE}" -DDISABLE_ROCKSDB="${DISABLE_ROCKSDB}" \
-DDISABLE_LIBARCHIVE="${DISABLE_LIBARCHIVE}" -DDISABLE_LZMA="${DISABLE_LZMA}" -DDISABLE_BZIP2="${DISABLE_BZIP2}" \
-DENABLE_SCRIPTING="${ENABLE_SCRIPTING}" -DDISABLE_PYTHON_SCRIPTING="${DISABLE_PYTHON_SCRIPTING}" -DDISABLE_CONTROLLER="${DISABLE_CONTROLLER}" \
Expand Down
1 change: 1 addition & 0 deletions docker/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,4 @@ confluent-kafka==1.7.0
PyYAML==5.4.1
m2crypto==0.37.1
watchdog==2.1.2
pyopenssl==21.0.0
12 changes: 12 additions & 0 deletions docker/test/integration/MiNiFi_integration_test_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,12 @@ def start_kafka_broker(self):
self.cluster.deploy('kafka-broker')
assert self.wait_for_container_startup_to_finish('kafka-broker')

def start_splunk(self):
self.cluster.acquire_container('splunk', 'splunk')
self.cluster.deploy('splunk')
assert self.wait_for_container_startup_to_finish('splunk')
assert self.cluster.enable_splunk_hec_indexer('splunk', 'splunk_hec_token')

def start(self):
logging.info("MiNiFi_integration_test start")
self.cluster.deploy_flow()
Expand Down Expand Up @@ -191,6 +197,12 @@ def check_azure_storage_server_data(self, azure_container_name, object_data):
def wait_for_kafka_consumer_to_be_registered(self, kafka_container_name):
assert self.cluster.wait_for_kafka_consumer_to_be_registered(kafka_container_name)

def check_splunk_event(self, splunk_container_name, query):
assert self.cluster.check_splunk_event(splunk_container_name, query)

def check_splunk_event_with_attributes(self, splunk_container_name, query, attributes):
assert self.cluster.check_splunk_event_with_attributes(splunk_container_name, query, attributes)

def check_minifi_log_contents(self, line, timeout_seconds=60):
self.check_container_log_contents("minifi-cpp", line, timeout_seconds)

Expand Down
50 changes: 50 additions & 0 deletions docker/test/integration/features/splunk.feature
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
Feature: Sending data to Splunk HEC using PutSplunkHTTP

Background:
Given the content of "/tmp/output" is monitored

Scenario: A MiNiFi instance transfers data to a Splunk HEC
lordgamez marked this conversation as resolved.
Show resolved Hide resolved
Given a Splunk HEC is set up and running
And a GetFile processor with the "Input Directory" property set to "/tmp/input"
And a file with the content "foobar" is present in "/tmp/input"
And a PutSplunkHTTP processor set up to communicate with the Splunk HEC instance
And a QuerySplunkIndexingStatus processor set up to communicate with the Splunk HEC Instance
And the "Splunk Request Channel" properties of the PutSplunkHTTP and QuerySplunkIndexingStatus processors are set to the same random guid
And the "Source" property of the PutSplunkHTTP processor is set to "my-source"
And the "Source Type" property of the PutSplunkHTTP processor is set to "my-source-type"
And the "Host" property of the PutSplunkHTTP processor is set to "my-host"
And a PutFile processor with the "Directory" property set to "/tmp/output"
And the "success" relationship of the GetFile processor is connected to the PutSplunkHTTP
And the "success" relationship of the PutSplunkHTTP processor is connected to the QuerySplunkIndexingStatus
And the "undetermined" relationship of the QuerySplunkIndexingStatus processor is connected to the QuerySplunkIndexingStatus
And the "acknowledged" relationship of the QuerySplunkIndexingStatus processor is connected to the PutFile
And the "Hostname" property of the PutSplunkHTTP processor is set to "http://splunk"
And the "Hostname" property of the QuerySplunkIndexingStatus processor is set to "http://splunk"

When both instances start up
Then a flowfile with the content "foobar" is placed in the monitored directory in less than 20 seconds
And an event is registered in Splunk HEC with the content "foobar" with "my-source" set as source and "my-source-type" set as sourcetype and "my-host" set as host


Scenario: A MiNiFi instance transfers data to a Splunk HEC with SSL enabled
Given a Splunk HEC is set up and running
And a GetFile processor with the "Input Directory" property set to "/tmp/input"
And a file with the content "foobar" is present in "/tmp/input"
And a PutSplunkHTTP processor set up to communicate with the Splunk HEC instance
And a QuerySplunkIndexingStatus processor set up to communicate with the Splunk HEC Instance
And the "Splunk Request Channel" properties of the PutSplunkHTTP and QuerySplunkIndexingStatus processors are set to the same random guid
And the "Source" property of the PutSplunkHTTP processor is set to "my-source"
And the "Source Type" property of the PutSplunkHTTP processor is set to "my-source-type"
And the "Host" property of the PutSplunkHTTP processor is set to "my-host"
And a PutFile processor with the "Directory" property set to "/tmp/output"
And the "success" relationship of the GetFile processor is connected to the PutSplunkHTTP
And the "success" relationship of the PutSplunkHTTP processor is connected to the QuerySplunkIndexingStatus
And the "undetermined" relationship of the QuerySplunkIndexingStatus processor is connected to the QuerySplunkIndexingStatus
And the "acknowledged" relationship of the QuerySplunkIndexingStatus processor is connected to the PutFile
And SSL is enabled for the Splunk HEC and the SSL context service is set up for PutSplunkHTTP and QuerySplunkIndexingStatus
And the "Hostname" property of the PutSplunkHTTP processor is set to "https://splunk"
And the "Hostname" property of the QuerySplunkIndexingStatus processor is set to "https://splunk"

When both instances start up
Then a flowfile with the content "foobar" is placed in the monitored directory in less than 20 seconds
And an event is registered in Splunk HEC with the content "foobar" with "my-source" set as source and "my-source-type" set as sourcetype and "my-host" set as host
71 changes: 71 additions & 0 deletions docker/test/integration/minifi/core/DockerTestCluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@
import time
import os
import re
import tarfile
import io
import tempfile

from .SingleNodeDockerCluster import SingleNodeDockerCluster
from .utils import retry_check
Expand Down Expand Up @@ -135,6 +138,63 @@ def is_s3_bucket_empty(self, container_name):
ls_result = output.decode(self.get_stdout_encoding())
return code == 0 and not ls_result

@retry_check()
def check_splunk_event(self, container_name, query):
(code, output) = self.client.containers.get(container_name).exec_run(["sudo", "/opt/splunk/bin/splunk", "search", query, "-auth", "admin:splunkadmin"])
if code != 0:
return False
return query in output.decode("utf-8")

@retry_check()
def check_splunk_event_with_attributes(self, container_name, query, attributes):
(code, output) = self.client.containers.get(container_name).exec_run(["sudo", "/opt/splunk/bin/splunk", "search", query, "-output", "json", "-auth", "admin:splunkadmin"])
if code != 0:
return False
result_str = output.decode("utf-8")
result_lines = result_str.splitlines()
for result_line in result_lines:
result_line_json = json.loads(result_line)
if "result" not in result_line_json:
continue
if "host" in attributes:
if result_line_json["result"]["host"] != attributes["host"]:
continue
if "source" in attributes:
if result_line_json["result"]["source"] != attributes["source"]:
continue
if "sourcetype" in attributes:
if result_line_json["result"]["sourcetype"] != attributes["sourcetype"]:
continue
if "index" in attributes:
if result_line_json["result"]["index"] != attributes["index"]:
continue
return True
return False

def enable_splunk_hec_indexer(self, container_name, hec_name):
(code, output) = self.client.containers.get(container_name).exec_run(["sudo",
"/opt/splunk/bin/splunk", "http-event-collector",
"update", hec_name,
"-uri", "https://localhost:8089",
"-use-ack", "1",
"-disabled", "0",
"-auth", "admin:splunkadmin"])
return code == 0

def enable_splunk_hec_ssl(self, container_name, splunk_cert_pem, splunk_key_pem, root_ca_cert_pem):
assert self.write_content_to_container(splunk_cert_pem.decode() + splunk_key_pem.decode() + root_ca_cert_pem.decode(), dst=container_name + ':/opt/splunk/etc/auth/splunk_cert.pem')
assert self.write_content_to_container(root_ca_cert_pem.decode(), dst=container_name + ':/opt/splunk/etc/auth/root_ca.pem')
(code, output) = self.client.containers.get(container_name).exec_run(["sudo",
"/opt/splunk/bin/splunk", "http-event-collector",
"update",
"-uri", "https://localhost:8089",
"-enable-ssl", "1",
"-server-cert", "/opt/splunk/etc/auth/splunk_cert.pem",
"-ca-cert-file", "/opt/splunk/etc/auth/root_ca.pem",
"-require-client-cert", "1",
"-auth", "admin:splunkadmin"])
return code == 0

def query_postgres_server(self, postgresql_container_name, query, number_of_rows):
(code, output) = self.client.containers.get(postgresql_container_name).exec_run(["psql", "-U", "postgres", "-c", query])
output = output.decode(self.get_stdout_encoding())
Expand All @@ -153,3 +213,14 @@ def segfault_happened(self):

def wait_for_kafka_consumer_to_be_registered(self, kafka_container_name):
return self.wait_for_app_logs(kafka_container_name, "Assignment received from leader for group docker_test_group", 60)

def write_content_to_container(self, content, dst):
container_name, dst_path = dst.split(':')
container = self.client.containers.get(container_name)
with tempfile.TemporaryDirectory() as td:
with tarfile.open(os.path.join(td, 'content.tar'), mode='w') as tar:
info = tarfile.TarInfo(name=os.path.basename(dst_path))
info.size = len(content)
tar.addfile(info, io.BytesIO(content.encode('utf-8')))
with open(os.path.join(td, 'content.tar'), 'rb') as data:
return container.put_archive(os.path.dirname(dst_path), data.read())
5 changes: 5 additions & 0 deletions docker/test/integration/minifi/core/ImageStore.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,8 @@ def get_image(self, container_engine):
image = self.__build_kafka_broker_image()
elif container_engine == "mqtt-broker":
image = self.__build_mqtt_broker_image()
elif container_engine == "splunk":
image = self.__build_splunk_image()
else:
raise Exception("There is no associated image for " + container_engine)

Expand Down Expand Up @@ -132,6 +134,9 @@ def __build_mqtt_broker_image(self):

return self.__build_image(dockerfile)

def __build_splunk_image(self):
return self.__build_image_by_path(self.test_dir + "/resources/splunk-hec", 'minifi-splunk')

def __build_image(self, dockerfile, context_files=[]):
conf_dockerfile_buffer = BytesIO()
docker_context_buffer = BytesIO()
Expand Down
Loading