From 12dd633d62b14f570eec03fd3457254b18b49827 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Mon, 2 Oct 2023 15:41:29 -0700 Subject: [PATCH 01/39] add mapping for indices storing threat intel feed data Signed-off-by: Surya Sashank Nistala --- .../mappings/threat_intel_feed_mapping.json | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 src/main/resources/mappings/threat_intel_feed_mapping.json diff --git a/src/main/resources/mappings/threat_intel_feed_mapping.json b/src/main/resources/mappings/threat_intel_feed_mapping.json new file mode 100644 index 000000000..e083a5e84 --- /dev/null +++ b/src/main/resources/mappings/threat_intel_feed_mapping.json @@ -0,0 +1,26 @@ +{ + "dynamic": "strict", + "_meta" : { + "schema_version": 1 + }, + "properties": { + "schema_version": { + "type": "integer" + }, + "ioc_type": { + "type": "keyword" + }, + "ioc_value": { + "type": "keyword" + }, + "feed_id": { + "type": "keyword" + }, + "index": { + "type": "keyword" + }, + "timestamp": { + "type": "long" + } + } +} From a0fd6bd21c7d92aea41e1e885c2f4a897495ee14 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Mon, 2 Oct 2023 15:49:18 -0700 Subject: [PATCH 02/39] fix feed indices mapping Signed-off-by: Surya Sashank Nistala --- src/main/resources/mappings/threat_intel_feed_mapping.json | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/main/resources/mappings/threat_intel_feed_mapping.json b/src/main/resources/mappings/threat_intel_feed_mapping.json index e083a5e84..9a20a76ac 100644 --- a/src/main/resources/mappings/threat_intel_feed_mapping.json +++ b/src/main/resources/mappings/threat_intel_feed_mapping.json @@ -16,9 +16,6 @@ "feed_id": { "type": "keyword" }, - "index": { - "type": "keyword" - }, "timestamp": { "type": "long" } From d7ee5657aed83d5dac6eaef67c2c4fc8273c4e08 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Mon, 2 Oct 2023 18:30:32 -0700 Subject: [PATCH 03/39] add threat intel feed data dao Signed-off-by: Surya Sashank Nistala --- .../SecurityAnalyticsPlugin.java | 4 +- .../model/ThreatIntelFeedData.java | 159 ++++++++++++++++++ .../mappings/threat_intel_feed_mapping.json | 6 +- .../securityanalytics/TestHelpers.java | 16 ++ .../model/XContentTests.java | 10 ++ 5 files changed, 193 insertions(+), 2 deletions(-) create mode 100644 src/main/java/org/opensearch/securityanalytics/model/ThreatIntelFeedData.java diff --git a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java index 2c60321df..725593ad9 100644 --- a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java +++ b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java @@ -60,6 +60,7 @@ import org.opensearch.securityanalytics.mapper.IndexTemplateManager; import org.opensearch.securityanalytics.mapper.MapperService; import org.opensearch.securityanalytics.model.CustomLogType; +import org.opensearch.securityanalytics.model.ThreatIntelFeedData; import org.opensearch.securityanalytics.resthandler.*; import org.opensearch.securityanalytics.transport.*; import org.opensearch.securityanalytics.model.Rule; @@ -193,7 +194,8 @@ public List getNamedXContent() { Detector.XCONTENT_REGISTRY, DetectorInput.XCONTENT_REGISTRY, Rule.XCONTENT_REGISTRY, - CustomLogType.XCONTENT_REGISTRY + CustomLogType.XCONTENT_REGISTRY, + ThreatIntelFeedData.XCONTENT_REGISTRY ); } diff --git a/src/main/java/org/opensearch/securityanalytics/model/ThreatIntelFeedData.java b/src/main/java/org/opensearch/securityanalytics/model/ThreatIntelFeedData.java new file mode 100644 index 000000000..1870f383a --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/model/ThreatIntelFeedData.java @@ -0,0 +1,159 @@ +package org.opensearch.securityanalytics.model; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.core.ParseField; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.core.xcontent.XContentParserUtils; + +import java.io.IOException; +import java.time.Instant; +import java.util.Locale; +import java.util.Objects; + +/** + * Model for threat intel feed data stored in system index. + */ +public class ThreatIntelFeedData implements Writeable, ToXContentObject { + private static final Logger log = LogManager.getLogger(ThreatIntelFeedData.class); + private static final String FEED_TYPE = "feed"; + private static final String TYPE_FIELD = "type"; + private static final String IOC_TYPE_FIELD = "ioc_type"; + private static final String IOC_VALUE_FIELD = "ioc_value"; + private static final String FEED_ID_FIELD = "feed_id"; + private static final String TIMESTAMP_FIELD = "timestamp"; + + public static final NamedXContentRegistry.Entry XCONTENT_REGISTRY = new NamedXContentRegistry.Entry( + ThreatIntelFeedData.class, + new ParseField(FEED_TYPE), + xcp -> parse(xcp, null, null) + ); + + private final String iocType; + private final String iocValue; + private final String feedId; + private final Instant timestamp; + private final String type; + + public ThreatIntelFeedData(String iocType, String iocValue, String feedId, Instant timestamp) { + this.type = FEED_TYPE; + + this.iocType = iocType; + this.iocValue = iocValue; + this.feedId = feedId; + this.timestamp = timestamp; + } + + public static ThreatIntelFeedData parse(XContentParser xcp, String id, Long version) throws IOException { + String iocType = null; + String iocValue = null; + String feedId = null; + Instant timestamp = null; + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp); + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + String fieldName = xcp.currentName(); + xcp.nextToken(); + + switch (fieldName) { + case IOC_TYPE_FIELD: + iocType = xcp.text(); + break; + case IOC_VALUE_FIELD: + iocValue = xcp.text(); + break; + case FEED_ID_FIELD: + feedId = xcp.text(); + break; + case TIMESTAMP_FIELD: + if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) { + timestamp = null; + } else if (xcp.currentToken().isValue()) { + timestamp = Instant.ofEpochMilli(xcp.longValue()); + } else { + XContentParserUtils.throwUnknownToken(xcp.currentToken(), xcp.getTokenLocation()); + timestamp = null; + } + break; + default: + xcp.skipChildren(); + } + } + return new ThreatIntelFeedData(iocType, iocValue, feedId, timestamp); + } + + public String getIocType() { + return iocType; + } + + public String getIocValue() { + return iocValue; + } + + public String getFeedId() { + return feedId; + } + + public Instant getTimestamp() { + return timestamp; + } + + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(iocType); + out.writeString(iocValue); + out.writeString(feedId); + out.writeInstant(timestamp); + } + + public ThreatIntelFeedData(StreamInput sin) throws IOException { + this( + sin.readString(), + sin.readString(), + sin.readString(), + sin.readInstant() + ); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return createXContentBuilder(builder, params); + } + + private XContentBuilder createXContentBuilder(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + if (params.paramAsBoolean("with_type", false)) { + builder.startObject(type); + } + builder.field(TYPE_FIELD, type); + builder + .field(IOC_TYPE_FIELD, iocType) + .field(IOC_VALUE_FIELD, iocValue) + .field(FEED_ID_FIELD, feedId) + .timeField(TIMESTAMP_FIELD, String.format(Locale.getDefault(), "%s_in_millis", TIMESTAMP_FIELD), timestamp.toEpochMilli()); + + return builder.endObject(); + } + + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ThreatIntelFeedData tif = (ThreatIntelFeedData) o; + return Objects.equals(iocType, tif.iocType) && Objects.equals(iocValue, tif.iocValue) && Objects.equals(feedId, tif.feedId); + } + + @Override + public int hashCode() { + return Objects.hash(); + } +} diff --git a/src/main/resources/mappings/threat_intel_feed_mapping.json b/src/main/resources/mappings/threat_intel_feed_mapping.json index 9a20a76ac..2e775cf8e 100644 --- a/src/main/resources/mappings/threat_intel_feed_mapping.json +++ b/src/main/resources/mappings/threat_intel_feed_mapping.json @@ -17,7 +17,11 @@ "type": "keyword" }, "timestamp": { - "type": "long" + "type": "date", + "format": "strict_date_time||epoch_millis" + }, + "type": { + "type": "keyword" } } } diff --git a/src/test/java/org/opensearch/securityanalytics/TestHelpers.java b/src/test/java/org/opensearch/securityanalytics/TestHelpers.java index dde7efbb5..98ef6f21f 100644 --- a/src/test/java/org/opensearch/securityanalytics/TestHelpers.java +++ b/src/test/java/org/opensearch/securityanalytics/TestHelpers.java @@ -28,6 +28,7 @@ import org.opensearch.securityanalytics.model.DetectorInput; import org.opensearch.securityanalytics.model.DetectorRule; import org.opensearch.securityanalytics.model.DetectorTrigger; +import org.opensearch.securityanalytics.model.ThreatIntelFeedData; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.rest.OpenSearchRestTestCase; @@ -168,6 +169,15 @@ public static CustomLogType randomCustomLogType(String name, String description, return new CustomLogType(null, null, name, description, category, source, null); } + public static ThreatIntelFeedData randomThreatIntelFeedData() { + return new ThreatIntelFeedData( + "IP_ADDRESS", + "123.442.111.112", + OpenSearchRestTestCase.randomAlphaOfLength(10), + Instant.now() + ); + } + public static Detector randomDetectorWithNoUser() { String name = OpenSearchRestTestCase.randomAlphaOfLength(10); String detectorType = randomDetectorType(); @@ -429,6 +439,12 @@ public static String toJsonStringWithUser(Detector detector) throws IOException return BytesReference.bytes(builder).utf8ToString(); } + public static String toJsonString(ThreatIntelFeedData threatIntelFeedData) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder = threatIntelFeedData.toXContent(builder, ToXContent.EMPTY_PARAMS); + return BytesReference.bytes(builder).utf8ToString(); + } + public static User randomUser() { return new User( OpenSearchRestTestCase.randomAlphaOfLength(10), diff --git a/src/test/java/org/opensearch/securityanalytics/model/XContentTests.java b/src/test/java/org/opensearch/securityanalytics/model/XContentTests.java index f2ec8c5cc..89f447440 100644 --- a/src/test/java/org/opensearch/securityanalytics/model/XContentTests.java +++ b/src/test/java/org/opensearch/securityanalytics/model/XContentTests.java @@ -17,8 +17,10 @@ import static org.opensearch.securityanalytics.TestHelpers.parser; import static org.opensearch.securityanalytics.TestHelpers.randomDetector; import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithNoUser; +import static org.opensearch.securityanalytics.TestHelpers.randomThreatIntelFeedData; import static org.opensearch.securityanalytics.TestHelpers.randomUser; import static org.opensearch.securityanalytics.TestHelpers.randomUserEmpty; +import static org.opensearch.securityanalytics.TestHelpers.toJsonString; import static org.opensearch.securityanalytics.TestHelpers.toJsonStringWithUser; public class XContentTests extends OpenSearchTestCase { @@ -193,4 +195,12 @@ public void testDetectorParsingWithNoUser() throws IOException { Detector parsedDetector = Detector.parse(parser(detectorString), null, null); Assert.assertEquals("Round tripping Detector doesn't work", detector, parsedDetector); } + + public void testThreatIntelFeedParsing() throws IOException { + ThreatIntelFeedData tifd = randomThreatIntelFeedData(); + + String tifdString = toJsonString(tifd); + ThreatIntelFeedData parsedTifd = ThreatIntelFeedData.parse(parser(tifdString), null, null); + Assert.assertEquals("Round tripping Threat intel feed data model doesn't work", tifd, parsedTifd); + } } \ No newline at end of file From 57c0faa3481fb83ed445f79af95fde0833f82c68 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Mon, 2 Oct 2023 23:43:25 -0700 Subject: [PATCH 04/39] add threatIntelEnabled field in detector. Signed-off-by: Surya Sashank Nistala --- .../action/GetDetectorResponse.java | 1 + .../action/IndexDetectorResponse.java | 1 + .../securityanalytics/model/Detector.java | 27 ++++++++++++++----- src/main/resources/mappings/detectors.json | 3 +++ .../securityanalytics/TestHelpers.java | 5 ++-- .../action/IndexDetectorResponseTests.java | 4 ++- .../alerts/AlertingServiceTests.java | 6 +++-- .../findings/FindingServiceTests.java | 6 +++-- 8 files changed, 39 insertions(+), 14 deletions(-) diff --git a/src/main/java/org/opensearch/securityanalytics/action/GetDetectorResponse.java b/src/main/java/org/opensearch/securityanalytics/action/GetDetectorResponse.java index 3e4fc68d1..0d700b88c 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/GetDetectorResponse.java +++ b/src/main/java/org/opensearch/securityanalytics/action/GetDetectorResponse.java @@ -68,6 +68,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws .field(Detector.INPUTS_FIELD, detector.getInputs()) .field(Detector.LAST_UPDATE_TIME_FIELD, detector.getLastUpdateTime()) .field(Detector.ENABLED_TIME_FIELD, detector.getEnabledTime()) + .field(Detector.THREAT_INTEL_ENABLED_FIELD, detector.getThreatIntelEnabled()) .endObject(); return builder.endObject(); } diff --git a/src/main/java/org/opensearch/securityanalytics/action/IndexDetectorResponse.java b/src/main/java/org/opensearch/securityanalytics/action/IndexDetectorResponse.java index 6a7c268c1..67fe36f0b 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/IndexDetectorResponse.java +++ b/src/main/java/org/opensearch/securityanalytics/action/IndexDetectorResponse.java @@ -64,6 +64,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws .field(Detector.TRIGGERS_FIELD, detector.getTriggers()) .field(Detector.LAST_UPDATE_TIME_FIELD, detector.getLastUpdateTime()) .field(Detector.ENABLED_TIME_FIELD, detector.getEnabledTime()) + .field(Detector.THREAT_INTEL_ENABLED_FIELD, detector.getThreatIntelEnabled()) .endObject(); return builder.endObject(); } diff --git a/src/main/java/org/opensearch/securityanalytics/model/Detector.java b/src/main/java/org/opensearch/securityanalytics/model/Detector.java index ff832d1e7..65e4d18be 100644 --- a/src/main/java/org/opensearch/securityanalytics/model/Detector.java +++ b/src/main/java/org/opensearch/securityanalytics/model/Detector.java @@ -25,14 +25,11 @@ import java.io.IOException; import java.time.Instant; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Objects; -import java.util.stream.Collectors; - public class Detector implements Writeable, ToXContentObject { private static final Logger log = LogManager.getLogger(Detector.class); @@ -51,6 +48,7 @@ public class Detector implements Writeable, ToXContentObject { public static final String TRIGGERS_FIELD = "triggers"; public static final String LAST_UPDATE_TIME_FIELD = "last_update_time"; public static final String ENABLED_TIME_FIELD = "enabled_time"; + public static final String THREAT_INTEL_ENABLED_FIELD = "threat_intel_enabled"; public static final String ALERTING_MONITOR_ID = "monitor_id"; public static final String ALERTING_WORKFLOW_ID = "workflow_ids"; @@ -118,11 +116,14 @@ public class Detector implements Writeable, ToXContentObject { private final String type; + private final Boolean threatIntelEnabled; + public Detector(String id, Long version, String name, Boolean enabled, Schedule schedule, Instant lastUpdateTime, Instant enabledTime, String logType, User user, List inputs, List triggers, List monitorIds, String ruleIndex, String alertsIndex, String alertsHistoryIndex, String alertsHistoryIndexPattern, - String findingsIndex, String findingsIndexPattern, Map rulePerMonitor, List workflowIds) { + String findingsIndex, String findingsIndexPattern, Map rulePerMonitor, + List workflowIds, Boolean threatIntelEnabled) { this.type = DETECTOR_TYPE; this.id = id != null ? id : NO_ID; @@ -145,6 +146,7 @@ public Detector(String id, Long version, String name, Boolean enabled, Schedule this.ruleIdMonitorIdMap = rulePerMonitor; this.logType = logType; this.workflowIds = workflowIds != null ? workflowIds : null; + this.threatIntelEnabled = threatIntelEnabled != null && threatIntelEnabled; if (enabled) { Objects.requireNonNull(enabledTime); @@ -172,7 +174,8 @@ public Detector(StreamInput sin) throws IOException { sin.readString(), sin.readString(), sin.readMap(StreamInput::readString, StreamInput::readString), - sin.readStringList() + sin.readStringList(), + sin.readOptionalBoolean() ); } @@ -211,6 +214,7 @@ public void writeTo(StreamOutput out) throws IOException { if (workflowIds != null) { out.writeStringCollection(workflowIds); } + out.writeOptionalBoolean(threatIntelEnabled); } public XContentBuilder toXContentWithUser(XContentBuilder builder, Params params) throws IOException { @@ -239,6 +243,7 @@ private XContentBuilder createXContentBuilder(XContentBuilder builder, ToXConten } } + builder.field(THREAT_INTEL_ENABLED_FIELD, threatIntelEnabled); builder.field(ENABLED_FIELD, enabled); if (enabledTime == null) { @@ -280,7 +285,6 @@ private XContentBuilder createXContentBuilder(XContentBuilder builder, ToXConten builder.field(FINDINGS_INDEX, findingsIndex); builder.field(FINDINGS_INDEX_PATTERN, findingsIndexPattern); - if (params.paramAsBoolean("with_type", false)) { builder.endObject(); } @@ -327,6 +331,7 @@ public static Detector parse(XContentParser xcp, String id, Long version) throws String alertsHistoryIndexPattern = null; String findingsIndex = null; String findingsIndexPattern = null; + Boolean enableThreatIntel = false; XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp); while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { @@ -350,6 +355,9 @@ public static Detector parse(XContentParser xcp, String id, Long version) throws case ENABLED_FIELD: enabled = xcp.booleanValue(); break; + case THREAT_INTEL_ENABLED_FIELD: + enableThreatIntel = xcp.booleanValue(); + break; case SCHEDULE_FIELD: schedule = Schedule.parse(xcp); break; @@ -459,7 +467,8 @@ public static Detector parse(XContentParser xcp, String id, Long version) throws findingsIndex, findingsIndexPattern, rulePerMonitor, - workflowIds + workflowIds, + enableThreatIntel ); } @@ -612,6 +621,10 @@ public boolean isWorkflowSupported() { return workflowIds != null && !workflowIds.isEmpty(); } + public Boolean getThreatIntelEnabled() { + return threatIntelEnabled; + } + @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/src/main/resources/mappings/detectors.json b/src/main/resources/mappings/detectors.json index e1e160d5f..c4a42d53a 100644 --- a/src/main/resources/mappings/detectors.json +++ b/src/main/resources/mappings/detectors.json @@ -62,6 +62,9 @@ "enabled": { "type": "boolean" }, + "threat_intel_enabled": { + "type": "boolean" + }, "enabled_time": { "type": "date", "format": "strict_date_time||epoch_millis" diff --git a/src/test/java/org/opensearch/securityanalytics/TestHelpers.java b/src/test/java/org/opensearch/securityanalytics/TestHelpers.java index 98ef6f21f..0679de1c7 100644 --- a/src/test/java/org/opensearch/securityanalytics/TestHelpers.java +++ b/src/test/java/org/opensearch/securityanalytics/TestHelpers.java @@ -150,7 +150,7 @@ public static Detector randomDetector(String name, DetectorTrigger trigger = new DetectorTrigger(null, "windows-trigger", "1", List.of(randomDetectorType()), List.of("QuarksPwDump Clearing Access History"), List.of("high"), List.of("T0008"), List.of()); triggers.add(trigger); } - return new Detector(null, null, name, enabled, schedule, lastUpdateTime, enabledTime, detectorType, user, inputs, triggers, Collections.singletonList(""), "", "", "", "", "", "", Collections.emptyMap(), Collections.emptyList()); + return new Detector(null, null, name, enabled, schedule, lastUpdateTime, enabledTime, detectorType, user, inputs, triggers, Collections.singletonList(""), "", "", "", "", "", "", Collections.emptyMap(), Collections.emptyList(), false); } public static CustomLogType randomCustomLogType(String name, String description, String category, String source) { @@ -207,7 +207,8 @@ public static Detector randomDetectorWithNoUser() { "", "", Collections.emptyMap(), - Collections.emptyList() + Collections.emptyList(), + false ); } diff --git a/src/test/java/org/opensearch/securityanalytics/action/IndexDetectorResponseTests.java b/src/test/java/org/opensearch/securityanalytics/action/IndexDetectorResponseTests.java index db366056b..ca98a1144 100644 --- a/src/test/java/org/opensearch/securityanalytics/action/IndexDetectorResponseTests.java +++ b/src/test/java/org/opensearch/securityanalytics/action/IndexDetectorResponseTests.java @@ -50,7 +50,8 @@ public void testIndexDetectorPostResponse() throws IOException { null, DetectorMonitorConfig.getFindingsIndex("others_application"), Collections.emptyMap(), - Collections.emptyList() + Collections.emptyList(), + false ); IndexDetectorResponse response = new IndexDetectorResponse("1234", 1L, RestStatus.OK, detector); Assert.assertNotNull(response); @@ -69,5 +70,6 @@ public void testIndexDetectorPostResponse() throws IOException { Assert.assertTrue(newResponse.getDetector().getMonitorIds().contains("1")); Assert.assertTrue(newResponse.getDetector().getMonitorIds().contains("2")); Assert.assertTrue(newResponse.getDetector().getMonitorIds().contains("3")); + Assert.assertFalse(newResponse.getDetector().getThreatIntelEnabled()); } } \ No newline at end of file diff --git a/src/test/java/org/opensearch/securityanalytics/alerts/AlertingServiceTests.java b/src/test/java/org/opensearch/securityanalytics/alerts/AlertingServiceTests.java index 78dacd6e1..d250d2eef 100644 --- a/src/test/java/org/opensearch/securityanalytics/alerts/AlertingServiceTests.java +++ b/src/test/java/org/opensearch/securityanalytics/alerts/AlertingServiceTests.java @@ -65,7 +65,8 @@ public void testGetAlerts_success() { null, DetectorMonitorConfig.getFindingsIndex("others_application"), Collections.emptyMap(), - Collections.emptyList() + Collections.emptyList(), + false ); GetDetectorResponse getDetectorResponse = new GetDetectorResponse("detector_id123", 1L, RestStatus.OK, detector); @@ -242,7 +243,8 @@ public void testGetFindings_getFindingsByMonitorIdFailures() { null, DetectorMonitorConfig.getFindingsIndex("others_application"), Collections.emptyMap(), - Collections.emptyList() + Collections.emptyList(), + false ); GetDetectorResponse getDetectorResponse = new GetDetectorResponse("detector_id123", 1L, RestStatus.OK, detector); diff --git a/src/test/java/org/opensearch/securityanalytics/findings/FindingServiceTests.java b/src/test/java/org/opensearch/securityanalytics/findings/FindingServiceTests.java index 5c28ba65b..9e7a4d061 100644 --- a/src/test/java/org/opensearch/securityanalytics/findings/FindingServiceTests.java +++ b/src/test/java/org/opensearch/securityanalytics/findings/FindingServiceTests.java @@ -65,7 +65,8 @@ public void testGetFindings_success() { null, DetectorMonitorConfig.getFindingsIndex("others_application"), Collections.emptyMap(), - Collections.emptyList() + Collections.emptyList(), + false ); GetDetectorResponse getDetectorResponse = new GetDetectorResponse("detector_id123", 1L, RestStatus.OK, detector); @@ -186,7 +187,8 @@ public void testGetFindings_getFindingsByMonitorIdFailure() { null, DetectorMonitorConfig.getFindingsIndex("others_application"), Collections.emptyMap(), - Collections.emptyList() + Collections.emptyList(), + false ); GetDetectorResponse getDetectorResponse = new GetDetectorResponse("detector_id123", 1L, RestStatus.OK, detector); From f0a8bedff7ed71c5fa3e3d02424867969b823102 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Tue, 3 Oct 2023 13:36:09 -0700 Subject: [PATCH 05/39] add threat intel feed service and searching feeds Signed-off-by: Surya Sashank Nistala --- .../ThreatIntelFeedDataService.java | 68 +++++++++++++++++++ 1 file changed, 68 insertions(+) create mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java new file mode 100644 index 000000000..60c4d7c66 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java @@ -0,0 +1,68 @@ +package org.opensearch.securityanalytics.threatIntel; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.client.Client; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.securityanalytics.findings.FindingsService; +import org.opensearch.securityanalytics.model.ThreatIntelFeedData; +import org.opensearch.securityanalytics.util.IndexUtils; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +/** + * Service to handle CRUD operations on Threat Intel Feed Data + */ +public class ThreatIntelFeedDataService { + private static final Logger log = LogManager.getLogger(FindingsService.class); + + public void getThreatIntelFeedData(ClusterState state, Client client, IndexNameExpressionResolver indexNameExpressionResolver, + String feedName, String iocType, + ActionListener> listener, NamedXContentRegistry xContentRegistry) { + String indexPattern = String.format(".opendsearch-sap-threatintel-%s*", feedName); + String tifdIndex = IndexUtils.getNewIndexByCreationDate(state, indexNameExpressionResolver, indexPattern); + SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); + sourceBuilder.query(QueryBuilders.boolQuery().filter(QueryBuilders.termQuery("ioc_type", iocType))); + SearchRequest searchRequest = new SearchRequest(tifdIndex); + searchRequest.source().size(9999); //TODO: convert to scroll + searchRequest.source(sourceBuilder); + client.search(searchRequest, ActionListener.wrap(r -> listener.onResponse(getTifdList(r, xContentRegistry)), e -> { + log.error(String.format( + "Failed to fetch threat intel feed data %s from system index %s", feedName, tifdIndex), e); + listener.onFailure(e); + })); + } + + private List getTifdList(SearchResponse searchResponse, NamedXContentRegistry xContentRegistry) { + List list = new ArrayList<>(); + if (searchResponse.getHits().getHits().length != 0) { + Arrays.stream(searchResponse.getHits().getHits()).forEach(hit -> { + try { + XContentParser xcp = XContentType.JSON.xContent().createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, hit.getSourceAsString() + ); + list.add(ThreatIntelFeedData.parse(xcp, hit.getId(), hit.getVersion())); + } catch (Exception e) { + log.error(() -> + new ParameterizedMessage("Failed to parse Threat intel feed data doc from hit {}", hit), e); + } + + }); + } + return list; + } +} From 45949266db6f55481971373babef195bd6a1571b Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Tue, 3 Oct 2023 14:55:11 -0700 Subject: [PATCH 06/39] ti feed data to doc level query convertor logic added Signed-off-by: Surya Sashank Nistala --- .../DetectorThreatIntelService.java | 39 +++++++++++++++++++ .../ThreatIntelFeedDataService.java | 4 +- .../TransportIndexDetectorAction.java | 3 ++ 3 files changed, 44 insertions(+), 2 deletions(-) create mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java new file mode 100644 index 000000000..604d4e983 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java @@ -0,0 +1,39 @@ +package org.opensearch.securityanalytics.threatIntel; + +import org.opensearch.commons.alerting.model.DocLevelQuery; +import org.opensearch.securityanalytics.model.ThreatIntelFeedData; + +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + + +public class DetectorThreatIntelService { + + /** Convert the feed data IOCs into query string query format to create doc level queries. */ + public static DocLevelQuery createDocLevelQueryFromThreatIntelList( + List tifdList, String docLevelQueryId + ) { + Set iocs = tifdList.stream().map(ThreatIntelFeedData::getIocValue).collect(Collectors.toSet()); + String query = buildQueryStringQueryWithIocList(iocs); + return new DocLevelQuery( + docLevelQueryId,tifdList.get(0).getFeedId(), query, + Collections.singletonList("threat_intel") + ); + } + + private static String buildQueryStringQueryWithIocList(Set iocs) { + StringBuilder sb = new StringBuilder(); + + for(String ioc : iocs) { + if(sb.length() != 0) { + sb.append(" "); + } + sb.append("("); + sb.append(ioc); + sb.append(")"); + } + return sb.toString(); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java index 60c4d7c66..9c12fdef7 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java @@ -29,7 +29,7 @@ public class ThreatIntelFeedDataService { private static final Logger log = LogManager.getLogger(FindingsService.class); - public void getThreatIntelFeedData(ClusterState state, Client client, IndexNameExpressionResolver indexNameExpressionResolver, + public static void getThreatIntelFeedData(ClusterState state, Client client, IndexNameExpressionResolver indexNameExpressionResolver, String feedName, String iocType, ActionListener> listener, NamedXContentRegistry xContentRegistry) { String indexPattern = String.format(".opendsearch-sap-threatintel-%s*", feedName); @@ -46,7 +46,7 @@ public void getThreatIntelFeedData(ClusterState state, Client client, IndexNameE })); } - private List getTifdList(SearchResponse searchResponse, NamedXContentRegistry xContentRegistry) { + private static List getTifdList(SearchResponse searchResponse, NamedXContentRegistry xContentRegistry) { List list = new ArrayList<>(); if (searchResponse.getHits().getHits().length != 0) { Arrays.stream(searchResponse.getHits().getHits()).forEach(hit -> { diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java index ae2afc1f3..d5863caf4 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java @@ -648,6 +648,9 @@ private IndexMonitorRequest createDocLevelMonitorRequest(List DocLevelQuery docLevelQuery = new DocLevelQuery(id, name, Collections.emptyList(), actualQuery, tags); docLevelQueries.add(docLevelQuery); } + if(detector.getThreatIntelEnabled()) { + DetectorThreatIntelService + } DocLevelMonitorInput docLevelMonitorInput = new DocLevelMonitorInput(detector.getName(), detector.getInputs().get(0).getIndices(), docLevelQueries); docLevelMonitorInputs.add(docLevelMonitorInput); From f7ff940722fddf9fe7fa4132eeda397dbf36d832 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Tue, 3 Oct 2023 17:45:35 -0700 Subject: [PATCH 07/39] plug threat intel feed into detector creation Signed-off-by: Surya Sashank Nistala --- .../SecurityAnalyticsPlugin.java | 12 +++--- .../DetectorThreatIntelService.java | 26 +++++++++++- .../ThreatIntelFeedDataService.java | 42 ++++++++++++++----- .../TransportIndexDetectorAction.java | 16 +++++-- .../securityanalytics/TestHelpers.java | 4 +- 5 files changed, 77 insertions(+), 23 deletions(-) diff --git a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java index 725593ad9..ccf2f44ab 100644 --- a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java +++ b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java @@ -12,12 +12,9 @@ import java.util.function.Supplier; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.cluster.routing.Preference; import org.opensearch.core.action.ActionListener; import org.opensearch.action.ActionRequest; import org.opensearch.core.action.ActionResponse; -import org.opensearch.action.search.SearchRequest; -import org.opensearch.action.search.SearchResponse; import org.opensearch.client.Client; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.node.DiscoveryNode; @@ -38,7 +35,6 @@ import org.opensearch.index.codec.CodecServiceFactory; import org.opensearch.index.engine.EngineFactory; import org.opensearch.index.mapper.Mapper; -import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.ActionPlugin; import org.opensearch.plugins.ClusterPlugin; import org.opensearch.plugins.EnginePlugin; @@ -49,7 +45,6 @@ import org.opensearch.rest.RestController; import org.opensearch.rest.RestHandler; import org.opensearch.script.ScriptService; -import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.securityanalytics.action.*; import org.opensearch.securityanalytics.correlation.index.codec.CorrelationCodecService; import org.opensearch.securityanalytics.correlation.index.mapper.CorrelationVectorFieldMapper; @@ -62,6 +57,8 @@ import org.opensearch.securityanalytics.model.CustomLogType; import org.opensearch.securityanalytics.model.ThreatIntelFeedData; import org.opensearch.securityanalytics.resthandler.*; +import org.opensearch.securityanalytics.threatIntel.DetectorThreatIntelService; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedDataService; import org.opensearch.securityanalytics.transport.*; import org.opensearch.securityanalytics.model.Rule; import org.opensearch.securityanalytics.model.Detector; @@ -129,6 +126,7 @@ public Collection createComponents(Client client, NamedWriteableRegistry namedWriteableRegistry, IndexNameExpressionResolver indexNameExpressionResolver, Supplier repositoriesServiceSupplier) { + builtinLogTypeLoader = new BuiltinLogTypeLoader(); logTypeService = new LogTypeService(client, clusterService, xContentRegistry, builtinLogTypeLoader); detectorIndices = new DetectorIndices(client.admin(), clusterService, threadPool); @@ -139,11 +137,13 @@ public Collection createComponents(Client client, mapperService = new MapperService(client, clusterService, indexNameExpressionResolver, indexTemplateManager, logTypeService); ruleIndices = new RuleIndices(logTypeService, client, clusterService, threadPool); correlationRuleIndices = new CorrelationRuleIndices(client, clusterService); + ThreatIntelFeedDataService threatIntelFeedDataService = new ThreatIntelFeedDataService(clusterService.state(), client, indexNameExpressionResolver, xContentRegistry); + DetectorThreatIntelService detectorThreatIntelService = new DetectorThreatIntelService(threatIntelFeedDataService); this.client = client; return List.of( detectorIndices, correlationIndices, correlationRuleIndices, ruleTopicIndices, customLogTypeIndices, ruleIndices, - mapperService, indexTemplateManager, builtinLogTypeLoader + mapperService, indexTemplateManager, builtinLogTypeLoader, threatIntelFeedDataService, detectorThreatIntelService ); } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java index 604d4e983..0e940988e 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java @@ -1,7 +1,10 @@ package org.opensearch.securityanalytics.threatIntel; import org.opensearch.commons.alerting.model.DocLevelQuery; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.securityanalytics.model.Detector; import org.opensearch.securityanalytics.model.ThreatIntelFeedData; +import org.opensearch.securityanalytics.util.SecurityAnalyticsException; import java.util.Collections; import java.util.List; @@ -11,8 +14,14 @@ public class DetectorThreatIntelService { + private final ThreatIntelFeedDataService threatIntelFeedDataService; + + public DetectorThreatIntelService(ThreatIntelFeedDataService threatIntelFeedDataService) { + this.threatIntelFeedDataService = threatIntelFeedDataService; + } + /** Convert the feed data IOCs into query string query format to create doc level queries. */ - public static DocLevelQuery createDocLevelQueryFromThreatIntelList( + public DocLevelQuery createDocLevelQueryFromThreatIntelList( List tifdList, String docLevelQueryId ) { Set iocs = tifdList.stream().map(ThreatIntelFeedData::getIocValue).collect(Collectors.toSet()); @@ -23,7 +32,7 @@ public static DocLevelQuery createDocLevelQueryFromThreatIntelList( ); } - private static String buildQueryStringQueryWithIocList(Set iocs) { + private String buildQueryStringQueryWithIocList(Set iocs) { StringBuilder sb = new StringBuilder(); for(String ioc : iocs) { @@ -36,4 +45,17 @@ private static String buildQueryStringQueryWithIocList(Set iocs) { } return sb.toString(); } + + public DocLevelQuery createDocLevelQueryFromThreatIntel(Detector detector) { + // for testing validation only. + if(detector.getThreatIntelEnabled() ==false) { + throw new SecurityAnalyticsException( + "trying to create threat intel feed queries when flag to use threat intel is disabled.", + RestStatus.FORBIDDEN, new IllegalArgumentException()); + + } + // TODO: plugin logic to run job for populating threat intel feed data + /*threatIntelFeedDataService.getThreatIntelFeedData("ip_address", );*/ + return null; + } } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java index 9c12fdef7..91d156003 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java @@ -28,25 +28,45 @@ */ public class ThreatIntelFeedDataService { private static final Logger log = LogManager.getLogger(FindingsService.class); + private final ClusterState state; + private final Client client; + private final IndexNameExpressionResolver indexNameExpressionResolver; - public static void getThreatIntelFeedData(ClusterState state, Client client, IndexNameExpressionResolver indexNameExpressionResolver, - String feedName, String iocType, - ActionListener> listener, NamedXContentRegistry xContentRegistry) { - String indexPattern = String.format(".opendsearch-sap-threatintel-%s*", feedName); - String tifdIndex = IndexUtils.getNewIndexByCreationDate(state, indexNameExpressionResolver, indexPattern); + public ThreatIntelFeedDataService( + ClusterState state, + Client client, + IndexNameExpressionResolver indexNameExpressionResolver, + NamedXContentRegistry xContentRegistry) { + this.state = state; + this.client = client; + this.indexNameExpressionResolver = indexNameExpressionResolver; + this.xContentRegistry = xContentRegistry; + } + + private final NamedXContentRegistry xContentRegistry; + + public void getThreatIntelFeedData( + String iocType, + ActionListener> listener + ) { + String tifdIndex = IndexUtils.getNewIndexByCreationDate( + this.state, + this.indexNameExpressionResolver, + ".opendsearch-sap-threatintel*" + ); SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); sourceBuilder.query(QueryBuilders.boolQuery().filter(QueryBuilders.termQuery("ioc_type", iocType))); SearchRequest searchRequest = new SearchRequest(tifdIndex); searchRequest.source().size(9999); //TODO: convert to scroll searchRequest.source(sourceBuilder); - client.search(searchRequest, ActionListener.wrap(r -> listener.onResponse(getTifdList(r, xContentRegistry)), e -> { + client.search(searchRequest, ActionListener.wrap(r -> listener.onResponse(getTifdList(r)), e -> { log.error(String.format( - "Failed to fetch threat intel feed data %s from system index %s", feedName, tifdIndex), e); + "Failed to fetch threat intel feed data from system index %s", tifdIndex), e); listener.onFailure(e); })); } - private static List getTifdList(SearchResponse searchResponse, NamedXContentRegistry xContentRegistry) { + private List getTifdList(SearchResponse searchResponse) { List list = new ArrayList<>(); if (searchResponse.getHits().getHits().length != 0) { Arrays.stream(searchResponse.getHits().getHits()).forEach(hit -> { @@ -57,8 +77,10 @@ private static List getTifdList(SearchResponse searchRespon ); list.add(ThreatIntelFeedData.parse(xcp, hit.getId(), hit.getVersion())); } catch (Exception e) { - log.error(() -> - new ParameterizedMessage("Failed to parse Threat intel feed data doc from hit {}", hit), e); + log.error(() -> new ParameterizedMessage( + "Failed to parse Threat intel feed data doc from hit {}", hit), + e + ); } }); diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java index d5863caf4..81c548114 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java @@ -96,6 +96,7 @@ import org.opensearch.securityanalytics.rules.backend.QueryBackend; import org.opensearch.securityanalytics.rules.exceptions.SigmaError; import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; +import org.opensearch.securityanalytics.threatIntel.DetectorThreatIntelService; import org.opensearch.securityanalytics.util.DetectorIndices; import org.opensearch.securityanalytics.util.DetectorUtils; import org.opensearch.securityanalytics.util.IndexUtils; @@ -155,6 +156,7 @@ public class TransportIndexDetectorAction extends HandledTransportAction DocLevelQuery docLevelQuery = new DocLevelQuery(id, name, Collections.emptyList(), actualQuery, tags); docLevelQueries.add(docLevelQuery); } - if(detector.getThreatIntelEnabled()) { - DetectorThreatIntelService + try { + if (detector.getThreatIntelEnabled()) { + DocLevelQuery docLevelQueryFromThreatIntel = detectorThreatIntelService.createDocLevelQueryFromThreatIntel(detector); + docLevelQueries.add(docLevelQueryFromThreatIntel); + } + } catch (Exception e) { + // not failing detector creation if any fatal exception occurs during doc level query creation from threat intel feed data + log.error("Failed to convert threat intel feed to. Proceeding with detector creation", e); } DocLevelMonitorInput docLevelMonitorInput = new DocLevelMonitorInput(detector.getName(), detector.getInputs().get(0).getIndices(), docLevelQueries); docLevelMonitorInputs.add(docLevelMonitorInput); diff --git a/src/test/java/org/opensearch/securityanalytics/TestHelpers.java b/src/test/java/org/opensearch/securityanalytics/TestHelpers.java index 0679de1c7..44f5d39ae 100644 --- a/src/test/java/org/opensearch/securityanalytics/TestHelpers.java +++ b/src/test/java/org/opensearch/securityanalytics/TestHelpers.java @@ -172,8 +172,8 @@ public static CustomLogType randomCustomLogType(String name, String description, public static ThreatIntelFeedData randomThreatIntelFeedData() { return new ThreatIntelFeedData( "IP_ADDRESS", - "123.442.111.112", - OpenSearchRestTestCase.randomAlphaOfLength(10), + ip, + "alientVault", Instant.now() ); } From c19377384d4265a45ed613b652675ea5a363a20c Mon Sep 17 00:00:00 2001 From: Joanne Wang <109310487+jowg-amazon@users.noreply.github.com> Date: Wed, 4 Oct 2023 19:03:06 -0700 Subject: [PATCH 08/39] Preliminary framework for jobscheduler and datasource (#626) Signed-off-by: Joanne Wang --- build.gradle | 2 + .../SecurityAnalyticsPlugin.java | 2 +- .../monitors/opensearch_security.policy | 3 + .../ThreatIntelFeedDataService.java | 248 +++++- .../threatIntel/common/Constants.java | 9 + .../action/DeleteDatasourceAction.java | 27 + .../action/DeleteDatasourceRequest.java | 62 ++ .../DeleteDatasourceTransportAction.java | 152 ++++ .../action/GetDatasourceAction.java | 26 + .../action/GetDatasourceRequest.java | 66 ++ .../action/GetDatasourceResponse.java | 81 ++ .../action/GetDatasourceTransportAction.java | 79 ++ .../action/PutDatasourceAction.java | 27 + .../action/PutDatasourceRequest.java | 267 ++++++ .../action/PutDatasourceTransportAction.java | 182 ++++ .../action/RestDeleteDatasourceHandler.java | 48 + .../action/RestGetDatasourceHandler.java | 44 + .../action/RestPutDatasourceHandler.java | 71 ++ .../action/RestUpdateDatasourceHandler.java | 50 ++ .../action/UpdateDatasourceAction.java | 27 + .../action/UpdateDatasourceRequest.java | 190 ++++ .../UpdateDatasourceTransportAction.java | 179 ++++ .../common/DatasourceManifest.java | 168 ++++ .../threatintel/common/DatasourceState.java | 37 + .../common/ParameterValidator.java | 58 ++ .../common/StashedThreadContext.java | 42 + .../common/ThreatIntelExecutor.java | 45 + .../common/ThreatIntelLockService.java | 167 ++++ .../common/ThreatIntelSettings.java | 103 +++ .../threatintel/dao/DatasourceDao.java | 380 ++++++++ .../threatintel/jobscheduler/Datasource.java | 819 ++++++++++++++++++ .../jobscheduler/DatasourceExtension.java | 47 + .../jobscheduler/DatasourceRunner.java | 159 ++++ .../jobscheduler/DatasourceTask.java | 21 + .../jobscheduler/DatasourceUpdateService.java | 296 +++++++ ...rch.jobscheduler.spi.JobSchedulerExtension | 1 + .../securityanalytics/TestHelpers.java | 2 +- .../findings/FindingServiceTests.java | 6 + 38 files changed, 4187 insertions(+), 6 deletions(-) create mode 100644 src/main/java/org/opensearch/securityanalytics/config/monitors/opensearch_security.policy create mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/common/Constants.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/DeleteDatasourceAction.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/DeleteDatasourceRequest.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/DeleteDatasourceTransportAction.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceAction.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceRequest.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceResponse.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceTransportAction.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/PutDatasourceAction.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/PutDatasourceRequest.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/PutDatasourceTransportAction.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/RestDeleteDatasourceHandler.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/RestGetDatasourceHandler.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/RestPutDatasourceHandler.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/RestUpdateDatasourceHandler.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceAction.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceRequest.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceTransportAction.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/common/DatasourceManifest.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/common/DatasourceState.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/common/ParameterValidator.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/common/StashedThreadContext.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/common/ThreatIntelExecutor.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/common/ThreatIntelLockService.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/common/ThreatIntelSettings.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/dao/DatasourceDao.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/Datasource.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceExtension.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceRunner.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceTask.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceUpdateService.java create mode 100644 src/main/resources/META-INF/services/org.opensearch.jobscheduler.spi.JobSchedulerExtension diff --git a/build.gradle b/build.gradle index 2e16c6b70..2a958f0b6 100644 --- a/build.gradle +++ b/build.gradle @@ -158,6 +158,8 @@ dependencies { api "org.opensearch:common-utils:${common_utils_version}@jar" api "org.opensearch.client:opensearch-rest-client:${opensearch_version}" implementation "org.jetbrains.kotlin:kotlin-stdlib:${kotlin_version}" + compileOnly "org.opensearch:opensearch-job-scheduler-spi:${opensearch_build}" + implementation "org.apache.commons:commons-csv:1.10.0" // Needed for integ tests zipArchive group: 'org.opensearch.plugin', name:'alerting', version: "${opensearch_build}" diff --git a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java index ccf2f44ab..33808b445 100644 --- a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java +++ b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java @@ -137,7 +137,7 @@ public Collection createComponents(Client client, mapperService = new MapperService(client, clusterService, indexNameExpressionResolver, indexTemplateManager, logTypeService); ruleIndices = new RuleIndices(logTypeService, client, clusterService, threadPool); correlationRuleIndices = new CorrelationRuleIndices(client, clusterService); - ThreatIntelFeedDataService threatIntelFeedDataService = new ThreatIntelFeedDataService(clusterService.state(), client, indexNameExpressionResolver, xContentRegistry); + ThreatIntelFeedDataService threatIntelFeedDataService = new ThreatIntelFeedDataService(clusterService.state(), clusterService, client, indexNameExpressionResolver, xContentRegistry); DetectorThreatIntelService detectorThreatIntelService = new DetectorThreatIntelService(threatIntelFeedDataService); this.client = client; diff --git a/src/main/java/org/opensearch/securityanalytics/config/monitors/opensearch_security.policy b/src/main/java/org/opensearch/securityanalytics/config/monitors/opensearch_security.policy new file mode 100644 index 000000000..c5af78398 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/config/monitors/opensearch_security.policy @@ -0,0 +1,3 @@ +grant { + permission java.lang.management.ManagementPermission "reputation.alienvault.com:443" "connect,resolve"; +}; \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java index 91d156003..351572470 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java @@ -1,39 +1,106 @@ package org.opensearch.securityanalytics.threatIntel; +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.csv.CSVParser; +import org.apache.commons.csv.CSVRecord; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.opensearch.OpenSearchException; +import org.opensearch.SpecialPermission; +import org.opensearch.action.DocWriteRequest; +import org.opensearch.action.admin.indices.create.CreateIndexRequest; +import org.opensearch.action.bulk.BulkRequest; +import org.opensearch.action.bulk.BulkResponse; +import org.opensearch.action.index.IndexRequest; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.support.IndicesOptions; +import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.client.Client; +import org.opensearch.client.Requests; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.SuppressForbidden; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.action.ActionListener; +import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.securityanalytics.findings.FindingsService; import org.opensearch.securityanalytics.model.ThreatIntelFeedData; +import org.opensearch.securityanalytics.threatIntel.common.DatasourceManifest; +import org.opensearch.securityanalytics.threatIntel.common.StashedThreadContext; +import org.opensearch.securityanalytics.threatIntel.common.ThreatIntelSettings; +import org.opensearch.securityanalytics.threatIntel.dao.DatasourceDao; import org.opensearch.securityanalytics.util.IndexUtils; +import org.opensearch.securityanalytics.util.SecurityAnalyticsException; +import org.opensearch.securityanalytics.threatIntel.common.Constants; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.net.URL; +import java.net.URLConnection; +import java.nio.charset.StandardCharsets; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.*; +import java.util.stream.Collectors; +import java.util.zip.ZipEntry; +import java.util.zip.ZipInputStream; + +import static org.opensearch.securityanalytics.threatIntel.jobscheduler.Datasource.THREAT_INTEL_DATA_INDEX_NAME_PREFIX; /** * Service to handle CRUD operations on Threat Intel Feed Data */ public class ThreatIntelFeedDataService { private static final Logger log = LogManager.getLogger(FindingsService.class); + private static final String SCHEMA_VERSION = "schema_version"; + private static final String IOC_TYPE = "ioc_type"; + private static final String IOC_VALUE = "ioc_value"; + private static final String FEED_ID = "feed_id"; + private static final String TIMESTAMP = "timestamp"; + private static final String TYPE = "type"; + private static final String DATA_FIELD_NAME = "_data"; + private final ClusterState state; private final Client client; private final IndexNameExpressionResolver indexNameExpressionResolver; + private static final Map INDEX_SETTING_TO_CREATE = Map.of( + "index.number_of_shards", + 1, + "index.number_of_replicas", + 0, + "index.refresh_interval", + -1, + "index.hidden", + true + ); + private static final Map INDEX_SETTING_TO_FREEZE = Map.of( + "index.auto_expand_replicas", + "0-all", + "index.blocks.write", + true + ); + private final ClusterService clusterService; + private final ClusterSettings clusterSettings; + public ThreatIntelFeedDataService( ClusterState state, + ClusterService clusterService, Client client, IndexNameExpressionResolver indexNameExpressionResolver, NamedXContentRegistry xContentRegistry) { @@ -41,6 +108,8 @@ public ThreatIntelFeedDataService( this.client = client; this.indexNameExpressionResolver = indexNameExpressionResolver; this.xContentRegistry = xContentRegistry; + this.clusterService = clusterService; + this.clusterSettings = clusterService.getClusterSettings(); } private final NamedXContentRegistry xContentRegistry; @@ -52,7 +121,7 @@ public void getThreatIntelFeedData( String tifdIndex = IndexUtils.getNewIndexByCreationDate( this.state, this.indexNameExpressionResolver, - ".opendsearch-sap-threatintel*" + ".opensearch-sap-threatintel*" //name? ); SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); sourceBuilder.query(QueryBuilders.boolQuery().filter(QueryBuilders.termQuery("ioc_type", iocType))); @@ -87,4 +156,175 @@ private List getTifdList(SearchResponse searchResponse) { } return list; } + + /** + * Create an index for a threat intel feed + * + * Index setting start with single shard, zero replica, no refresh interval, and hidden. + * Once the threat intel feed is indexed, do refresh and force merge. + * Then, change the index setting to expand replica to all nodes, and read only allow delete. + * + * @param indexName index name + */ + public void createIndexIfNotExists(final String indexName) { + if (clusterService.state().metadata().hasIndex(indexName) == true) { + return; + } + final CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName).settings(INDEX_SETTING_TO_CREATE) + .mapping(getIndexMapping()); + StashedThreadContext.run( + client, + () -> client.admin().indices().create(createIndexRequest).actionGet(clusterSettings.get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT)) + ); + } + + private void freezeIndex(final String indexName) { + TimeValue timeout = clusterSettings.get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT); + StashedThreadContext.run(client, () -> { + client.admin().indices().prepareForceMerge(indexName).setMaxNumSegments(1).execute().actionGet(timeout); + client.admin().indices().prepareRefresh(indexName).execute().actionGet(timeout); + client.admin() + .indices() + .prepareUpdateSettings(indexName) + .setSettings(INDEX_SETTING_TO_FREEZE) + .execute() + .actionGet(clusterSettings.get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT)); + }); + } + + private String getIndexMapping() { + try { + try (InputStream is = DatasourceDao.class.getResourceAsStream("/mappings/threat_intel_feed_mapping.json")) { // TODO: check Datasource dao and this mapping + try (BufferedReader reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { + return reader.lines().map(String::trim).collect(Collectors.joining()); + } + } + } catch (IOException e) { + log.error("Runtime exception when getting the threat intel index mapping", e); + throw new SecurityAnalyticsException("Runtime exception when getting the threat intel index mapping", RestStatus.INTERNAL_SERVER_ERROR, e); + } + } + + /** + * Create CSVParser of a threat intel feed + * + * @param manifest Datasource manifest + * @return CSVParser for threat intel feed + */ + @SuppressForbidden(reason = "Need to connect to http endpoint to read threat intel feed database file") + public CSVParser getDatabaseReader(final DatasourceManifest manifest) { + SpecialPermission.check(); + return AccessController.doPrivileged((PrivilegedAction) () -> { + try { + URL url = new URL(manifest.getUrl()); + return internalGetDatabaseReader(manifest, url.openConnection()); + } catch (IOException e) { + log.error("Exception: failed to read threat intel feed data from {}",manifest.getUrl(), e); + throw new OpenSearchException("failed to read threat intel feed data from {}", manifest.getUrl(), e); + } + }); + } + + @SuppressForbidden(reason = "Need to connect to http endpoint to read threat intel feed database file") // TODO: update this function because no zip file... + protected CSVParser internalGetDatabaseReader(final DatasourceManifest manifest, final URLConnection connection) throws IOException { + connection.addRequestProperty(Constants.USER_AGENT_KEY, Constants.USER_AGENT_VALUE); + ZipInputStream zipIn = new ZipInputStream(connection.getInputStream()); + ZipEntry zipEntry = zipIn.getNextEntry(); + while (zipEntry != null) { + if (zipEntry.getName().equalsIgnoreCase(manifest.getDbName()) == false) { + zipEntry = zipIn.getNextEntry(); + continue; + } + return new CSVParser(new BufferedReader(new InputStreamReader(zipIn)), CSVFormat.RFC4180); + } + throw new IllegalArgumentException( + String.format(Locale.ROOT, "database file [%s] does not exist in the zip file [%s]", manifest.getDbName(), manifest.getUrl()) + ); + } + + /** + * Puts threat intel feed from CSVRecord iterator into a given index in bulk + * + * @param indexName Index name to puts the TIF data + * @param fields Field name matching with data in CSVRecord in order + * @param iterator TIF data to insert + * @param renewLock Runnable to renew lock + */ + public void saveThreatIntelFeedData( + final String indexName, + final String[] fields, + final Iterator iterator, + final Runnable renewLock +// final ThreatIntelFeedData threatIntelFeedData + ) throws IOException { + if (indexName == null || fields == null || iterator == null || renewLock == null){ + throw new IllegalArgumentException("Fields cannot be null"); + } + + TimeValue timeout = clusterSettings.get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT); + Integer batchSize = clusterSettings.get(ThreatIntelSettings.BATCH_SIZE); + final BulkRequest bulkRequest = new BulkRequest(); + Queue requests = new LinkedList<>(); + for (int i = 0; i < batchSize; i++) { + requests.add(Requests.indexRequest(indexName)); + } + while (iterator.hasNext()) { + CSVRecord record = iterator.next(); +// XContentBuilder tifData = threatIntelFeedData.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS); + IndexRequest indexRequest = (IndexRequest) requests.poll(); +// indexRequest.source(tifData); + indexRequest.id(record.get(0)); + bulkRequest.add(indexRequest); + if (iterator.hasNext() == false || bulkRequest.requests().size() == batchSize) { + BulkResponse response = StashedThreadContext.run(client, () -> client.bulk(bulkRequest).actionGet(timeout)); + if (response.hasFailures()) { + throw new OpenSearchException( + "error occurred while ingesting threat intel feed data in {} with an error {}", + indexName, + response.buildFailureMessage() + ); + } + requests.addAll(bulkRequest.requests()); + bulkRequest.requests().clear(); + } + renewLock.run(); + } + freezeIndex(indexName); + } + + public void deleteThreatIntelDataIndex(final String index) { + deleteThreatIntelDataIndex(Arrays.asList(index)); + } + + public void deleteThreatIntelDataIndex(final List indices) { + if (indices == null || indices.isEmpty()) { + return; + } + + Optional invalidIndex = indices.stream() + .filter(index -> index.startsWith(THREAT_INTEL_DATA_INDEX_NAME_PREFIX) == false) + .findAny(); + if (invalidIndex.isPresent()) { + throw new OpenSearchException( + "the index[{}] is not threat intel data index which should start with {}", + invalidIndex.get(), + THREAT_INTEL_DATA_INDEX_NAME_PREFIX + ); + } + + AcknowledgedResponse response = StashedThreadContext.run( + client, + () -> client.admin() + .indices() + .prepareDelete(indices.toArray(new String[0])) + .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) + .execute() + .actionGet(clusterSettings.get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT)) + ); + + if (response.isAcknowledged() == false) { + throw new OpenSearchException("failed to delete data[{}] in datasource", String.join(",", indices)); + } + } + } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/Constants.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/Constants.java new file mode 100644 index 000000000..af31e7897 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/Constants.java @@ -0,0 +1,9 @@ +package org.opensearch.securityanalytics.threatIntel.common; + +import org.opensearch.Version; + +import java.util.Locale; +public class Constants { + public static final String USER_AGENT_KEY = "User-Agent"; + public static final String USER_AGENT_VALUE = String.format(Locale.ROOT, "OpenSearch/%s vanilla", Version.CURRENT.toString()); +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/DeleteDatasourceAction.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/DeleteDatasourceAction.java new file mode 100644 index 000000000..35effc4b7 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/action/DeleteDatasourceAction.java @@ -0,0 +1,27 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.opensearch.action.ActionType; +import org.opensearch.action.support.master.AcknowledgedResponse; + +/** + * Threat intel datasource delete action + */ +public class DeleteDatasourceAction extends ActionType { + /** + * Delete datasource action instance + */ + public static final DeleteDatasourceAction INSTANCE = new DeleteDatasourceAction(); + /** + * Delete datasource action name + */ + public static final String NAME = "cluster:admin/security_analytics/datasource/delete"; + + private DeleteDatasourceAction() { + super(NAME, AcknowledgedResponse::new); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/DeleteDatasourceRequest.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/DeleteDatasourceRequest.java new file mode 100644 index 000000000..654b93985 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/action/DeleteDatasourceRequest.java @@ -0,0 +1,62 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.securityanalytics.threatIntel.common.ParameterValidator; + +import java.io.IOException; + +/** + * Threat intel datasource delete request + */ + +public class DeleteDatasourceRequest extends ActionRequest { + private static final ParameterValidator VALIDATOR = new ParameterValidator(); + /** + * @param name the datasource name + * @return the datasource name + */ + private String name; + + /** + * Constructor + * + * @param in the stream input + * @throws IOException IOException + */ + public DeleteDatasourceRequest(final StreamInput in) throws IOException { + super(in); + this.name = in.readString(); + } + + public DeleteDatasourceRequest(final String name) { + this.name = name; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException errors = null; + if (VALIDATOR.validateDatasourceName(name).isEmpty() == false) { + errors = new ActionRequestValidationException(); + errors.addValidationError("no such datasource exist"); + } + return errors; + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(name); + } + + public String getName() { + return name; + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/DeleteDatasourceTransportAction.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/DeleteDatasourceTransportAction.java new file mode 100644 index 000000000..5ff65a945 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/action/DeleteDatasourceTransportAction.java @@ -0,0 +1,152 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.OpenSearchStatusException; +import org.opensearch.ResourceNotFoundException; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.HandledTransportAction; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.common.inject.Inject; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.rest.RestStatus; + +import org.opensearch.ingest.IngestService; +import org.opensearch.securityanalytics.model.DetectorTrigger; +import org.opensearch.securityanalytics.threatIntel.common.DatasourceState; +import org.opensearch.securityanalytics.threatIntel.common.ThreatIntelLockService; +import org.opensearch.securityanalytics.threatIntel.dao.DatasourceDao; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedDataService; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.Datasource; +import org.opensearch.tasks.Task; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +import java.io.IOException; + +/** + * Transport action to delete datasource + */ +public class DeleteDatasourceTransportAction extends HandledTransportAction { + private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + + private static final long LOCK_DURATION_IN_SECONDS = 300l; + private final ThreatIntelLockService lockService; + private final IngestService ingestService; + private final DatasourceDao datasourceDao; + private final ThreatIntelFeedDataService threatIntelFeedDataService; +// private final Ip2GeoProcessorDao ip2GeoProcessorDao; + private final ThreadPool threadPool; + + /** + * Constructor + * @param transportService the transport service + * @param actionFilters the action filters + * @param lockService the lock service + * @param ingestService the ingest service + * @param datasourceDao the datasource facade + */ + @Inject + public DeleteDatasourceTransportAction( + final TransportService transportService, + final ActionFilters actionFilters, + final ThreatIntelLockService lockService, + final IngestService ingestService, + final DatasourceDao datasourceDao, + final ThreatIntelFeedDataService threatIntelFeedDataService, +// final Ip2GeoProcessorDao ip2GeoProcessorDao, + final ThreadPool threadPool + ) { + super(DeleteDatasourceAction.NAME, transportService, actionFilters, DeleteDatasourceRequest::new); + this.lockService = lockService; + this.ingestService = ingestService; + this.datasourceDao = datasourceDao; + this.threatIntelFeedDataService = threatIntelFeedDataService; +// this.ip2GeoProcessorDao = ip2GeoProcessorDao; + this.threadPool = threadPool; + } + + /** + * We delete datasource regardless of its state as long as we can acquire a lock + * + * @param task the task + * @param request the request + * @param listener the listener + */ + @Override + protected void doExecute(final Task task, final DeleteDatasourceRequest request, final ActionListener listener) { + lockService.acquireLock(request.getName(), LOCK_DURATION_IN_SECONDS, ActionListener.wrap(lock -> { + if (lock == null) { + listener.onFailure( + new OpenSearchStatusException("Another processor is holding a lock on the resource. Try again later", RestStatus.BAD_REQUEST) + ); + log.error("Another processor is holding lock, BAD_REQUEST exception", RestStatus.BAD_REQUEST); + + return; + } + try { + // TODO: makes every sub-methods as async call to avoid using a thread in generic pool + threadPool.generic().submit(() -> { + try { + deleteDatasource(request.getName()); + lockService.releaseLock(lock); + listener.onResponse(new AcknowledgedResponse(true)); + } catch (Exception e) { + lockService.releaseLock(lock); + listener.onFailure(e); + log.error("delete data source failed",e); + } + }); + } catch (Exception e) { + lockService.releaseLock(lock); + listener.onFailure(e); + log.error("Internal server error", e); + } + }, exception -> { listener.onFailure(exception); })); + } + + protected void deleteDatasource(final String datasourceName) throws IOException { + Datasource datasource = datasourceDao.getDatasource(datasourceName); + if (datasource == null) { + throw new ResourceNotFoundException("no such datasource exist"); + } + DatasourceState previousState = datasource.getState(); +// setDatasourceStateAsDeleting(datasource); + + try { + threatIntelFeedDataService.deleteThreatIntelDataIndex(datasource.getIndices()); + } catch (Exception e) { + if (previousState.equals(datasource.getState()) == false) { + datasource.setState(previousState); + datasourceDao.updateDatasource(datasource); + } + throw e; + } + datasourceDao.deleteDatasource(datasource); + } + +// private void setDatasourceStateAsDeleting(final Datasource datasource) { +// if (datasourceDao.getProcessors(datasource.getName()).isEmpty() == false) { +// throw new OpenSearchStatusException("datasource is being used by one of processors", RestStatus.BAD_REQUEST); +// } +// +// DatasourceState previousState = datasource.getState(); +// datasource.setState(DatasourceState.DELETING); +// datasourceDao.updateDatasource(datasource); +// +// // Check again as processor might just have been created. +// // If it fails to update the state back to the previous state, the new processor +// // will fail to convert an ip to a geo data. +// // In such case, user have to delete the processor and delete this datasource again. +// if (datasourceDao.getProcessors(datasource.getName()).isEmpty() == false) { +// datasource.setState(previousState); +// datasourceDao.updateDatasource(datasource); +// throw new OpenSearchStatusException("datasource is being used by one of processors", RestStatus.BAD_REQUEST); +// } +// } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceAction.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceAction.java new file mode 100644 index 000000000..6befdde04 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceAction.java @@ -0,0 +1,26 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.opensearch.action.ActionType; + +/** + * Threat intel datasource get action + */ +public class GetDatasourceAction extends ActionType { + /** + * Get datasource action instance + */ + public static final GetDatasourceAction INSTANCE = new GetDatasourceAction(); + /** + * Get datasource action name + */ + public static final String NAME = "cluster:admin/security_analytics/datasource/get"; + + private GetDatasourceAction() { + super(NAME, GetDatasourceResponse::new); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceRequest.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceRequest.java new file mode 100644 index 000000000..16f36b08e --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceRequest.java @@ -0,0 +1,66 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; + +import java.io.IOException; + +/** + * threat intel datasource get request + */ +public class GetDatasourceRequest extends ActionRequest { + /** + * @param names the datasource names + * @return the datasource names + */ + private String[] names; + + /** + * Constructs a new get datasource request with a list of datasources. + * + * If the list of datasources is empty or it contains a single element "_all", all registered datasources + * are returned. + * + * @param names list of datasource names + */ + public GetDatasourceRequest(final String[] names) { + this.names = names; + } + + /** + * Constructor with stream input + * @param in the stream input + * @throws IOException IOException + */ + public GetDatasourceRequest(final StreamInput in) throws IOException { + super(in); + this.names = in.readStringArray(); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException errors = null; + if (names == null) { + errors = new ActionRequestValidationException(); + errors.addValidationError("names should not be null"); + } + return errors; + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(names); + } + + public String[] getNames() { + return this.names; + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceResponse.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceResponse.java new file mode 100644 index 000000000..d404ad728 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceResponse.java @@ -0,0 +1,81 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.opensearch.core.ParseField; +import org.opensearch.core.action.ActionResponse; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.Datasource; + +import java.io.IOException; +import java.time.Instant; +import java.util.List; + +/** + * threat intel datasource get request + */ +public class GetDatasourceResponse extends ActionResponse implements ToXContentObject { + private static final ParseField FIELD_NAME_DATASOURCES = new ParseField("datasources"); + private static final ParseField FIELD_NAME_NAME = new ParseField("name"); + private static final ParseField FIELD_NAME_STATE = new ParseField("state"); + private static final ParseField FIELD_NAME_ENDPOINT = new ParseField("endpoint"); + private static final ParseField FIELD_NAME_UPDATE_INTERVAL = new ParseField("update_interval_in_days"); + private static final ParseField FIELD_NAME_NEXT_UPDATE_AT = new ParseField("next_update_at_in_epoch_millis"); + private static final ParseField FIELD_NAME_NEXT_UPDATE_AT_READABLE = new ParseField("next_update_at"); + private static final ParseField FIELD_NAME_DATABASE = new ParseField("database"); + private static final ParseField FIELD_NAME_UPDATE_STATS = new ParseField("update_stats"); + private List datasources; + + /** + * Default constructor + * + * @param datasources List of datasources + */ + public GetDatasourceResponse(final List datasources) { + this.datasources = datasources; + } + + /** + * Constructor with StreamInput + * + * @param in the stream input + */ + public GetDatasourceResponse(final StreamInput in) throws IOException { + datasources = in.readList(Datasource::new); + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + out.writeList(datasources); + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + builder.startObject(); + builder.startArray(FIELD_NAME_DATASOURCES.getPreferredName()); + for (Datasource datasource : datasources) { + builder.startObject(); + builder.field(FIELD_NAME_NAME.getPreferredName(), datasource.getName()); + builder.field(FIELD_NAME_STATE.getPreferredName(), datasource.getState()); + builder.field(FIELD_NAME_ENDPOINT.getPreferredName(), datasource.getEndpoint()); + builder.field(FIELD_NAME_UPDATE_INTERVAL.getPreferredName(), datasource.getSchedule()); //TODO + builder.timeField( + FIELD_NAME_NEXT_UPDATE_AT.getPreferredName(), + FIELD_NAME_NEXT_UPDATE_AT_READABLE.getPreferredName(), + datasource.getSchedule().getNextExecutionTime(Instant.now()).toEpochMilli() + ); + builder.field(FIELD_NAME_DATABASE.getPreferredName(), datasource.getDatabase()); + builder.field(FIELD_NAME_UPDATE_STATS.getPreferredName(), datasource.getUpdateStats()); + builder.endObject(); + } + builder.endArray(); + builder.endObject(); + return builder; + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceTransportAction.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceTransportAction.java new file mode 100644 index 000000000..cb1419517 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceTransportAction.java @@ -0,0 +1,79 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.opensearch.OpenSearchException; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.HandledTransportAction; +import org.opensearch.common.inject.Inject; +import org.opensearch.core.action.ActionListener; +import org.opensearch.index.IndexNotFoundException; +import org.opensearch.securityanalytics.threatIntel.dao.DatasourceDao; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.Datasource; +import org.opensearch.tasks.Task; +import org.opensearch.transport.TransportService; + +import java.util.Collections; +import java.util.List; + +/** + * Transport action to get datasource + */ +public class GetDatasourceTransportAction extends HandledTransportAction { + private final DatasourceDao datasourceDao; + + /** + * Default constructor + * @param transportService the transport service + * @param actionFilters the action filters + * @param datasourceDao the datasource facade + */ + @Inject + public GetDatasourceTransportAction( + final TransportService transportService, + final ActionFilters actionFilters, + final DatasourceDao datasourceDao + ) { + super(GetDatasourceAction.NAME, transportService, actionFilters, GetDatasourceRequest::new); + this.datasourceDao = datasourceDao; + } + + @Override + protected void doExecute(final Task task, final GetDatasourceRequest request, final ActionListener listener) { + if (shouldGetAllDatasource(request)) { + // We don't expect too many data sources. Therefore, querying all data sources without pagination should be fine. + datasourceDao.getAllDatasources(newActionListener(listener)); + } else { + datasourceDao.getDatasources(request.getNames(), newActionListener(listener)); + } + } + + private boolean shouldGetAllDatasource(final GetDatasourceRequest request) { + if (request.getNames() == null) { + throw new OpenSearchException("names in a request should not be null"); + } + + return request.getNames().length == 0 || (request.getNames().length == 1 && "_all".equals(request.getNames()[0])); + } + + protected ActionListener> newActionListener(final ActionListener listener) { + return new ActionListener<>() { + @Override + public void onResponse(final List datasources) { + listener.onResponse(new GetDatasourceResponse(datasources)); + } + + @Override + public void onFailure(final Exception e) { + if (e instanceof IndexNotFoundException) { + listener.onResponse(new GetDatasourceResponse(Collections.emptyList())); + return; + } + listener.onFailure(e); + } + }; + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/PutDatasourceAction.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/PutDatasourceAction.java new file mode 100644 index 000000000..6a6acb9ed --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/action/PutDatasourceAction.java @@ -0,0 +1,27 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.opensearch.action.ActionType; +import org.opensearch.action.support.master.AcknowledgedResponse; + +/** + * Threat intel datasource creation action + */ +public class PutDatasourceAction extends ActionType { + /** + * Put datasource action instance + */ + public static final PutDatasourceAction INSTANCE = new PutDatasourceAction(); + /** + * Put datasource action name + */ + public static final String NAME = "cluster:admin/security_analytics/datasource/put"; + + private PutDatasourceAction() { + super(NAME, AcknowledgedResponse::new); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/PutDatasourceRequest.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/PutDatasourceRequest.java new file mode 100644 index 000000000..dac67ed43 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/action/PutDatasourceRequest.java @@ -0,0 +1,267 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.List; +import java.util.Locale; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.ParseField; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.ObjectParser; +import org.opensearch.securityanalytics.model.DetectorTrigger; +import org.opensearch.securityanalytics.threatIntel.common.DatasourceManifest; +import org.opensearch.securityanalytics.threatIntel.common.ParameterValidator; + +/** + * Threat intel datasource creation request + */ +public class PutDatasourceRequest extends ActionRequest { + private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + + public static final ParseField FEED_FORMAT_FIELD = new ParseField("feed_format"); + public static final ParseField ENDPOINT_FIELD = new ParseField("endpoint"); + public static final ParseField FEED_NAME_FIELD = new ParseField("feed_name"); + public static final ParseField DESCRIPTION_FIELD = new ParseField("description"); + public static final ParseField ORGANIZATION_FIELD = new ParseField("organization"); + public static final ParseField CONTAINED_IOCS_FIELD = new ParseField("contained_iocs_field"); + public static final ParseField UPDATE_INTERVAL_IN_DAYS_FIELD = new ParseField("update_interval_in_days"); + private static final ParameterValidator VALIDATOR = new ParameterValidator(); + + /** + * @param name the datasource name + * @return the datasource name + */ + private String name; + + private String feedFormat; + + /** + * @param endpoint url to a manifest file for a datasource + * @return url to a manifest file for a datasource + */ + private String endpoint; + + private String feedName; + + private String description; + + private String organization; + + private List contained_iocs_field; + + public void setFeedFormat(String feedFormat) { + this.feedFormat = feedFormat; + } + + public void setThisEndpoint(String endpoint) { + this.endpoint = endpoint; + } + + public void setFeedName(String feedName) { + this.feedName = feedName; + } + + public void setDescription(String description) { + this.description = description; + } + + public void setOrganization(String organization) { + this.organization = organization; + } + + public void setContained_iocs_field(List contained_iocs_field) { + this.contained_iocs_field = contained_iocs_field; + } + + public List getContained_iocs_field() { + return contained_iocs_field; + } + + public String getFeedFormat() { + return feedFormat; + } + + public String getFeedName() { + return feedName; + } + + @Override + public String getDescription() { + return description; + } + + public String getOrganization() { + return organization; + } + /** + * @param updateInterval update interval of a datasource + * @return update interval of a datasource + */ + private TimeValue updateInterval; + + /** + * Parser of a datasource + */ + public static final ObjectParser PARSER; + static { + PARSER = new ObjectParser<>("put_datasource"); + PARSER.declareString((request, val) -> request.setFeedFormat(val), FEED_FORMAT_FIELD); + PARSER.declareString((request, val) -> request.setThisEndpoint(val), ENDPOINT_FIELD); + PARSER.declareString((request, val) -> request.setFeedName(val), FEED_NAME_FIELD); + PARSER.declareString((request, val) -> request.setDescription(val), DESCRIPTION_FIELD); + PARSER.declareString((request, val) -> request.setOrganization(val), ORGANIZATION_FIELD); +// PARSER.declareStringArray((request, val[]) -> request.setContained_iocs_field(val), CONTAINED_IOCS_FIELD); + PARSER.declareLong((request, val) -> request.setUpdateInterval(TimeValue.timeValueDays(val)), UPDATE_INTERVAL_IN_DAYS_FIELD); + } + + /** + * Default constructor + * @param name name of a datasource + */ + public PutDatasourceRequest(final String name) { + this.name = name; + } + + /** + * Constructor with stream input + * @param in the stream input + * @throws IOException IOException + */ + public PutDatasourceRequest(final StreamInput in) throws IOException { + super(in); + this.name = in.readString(); + this.feedFormat = in.readString(); + this.endpoint = in.readString(); + this.feedName = in.readString(); + this.description = in.readString(); + this.organization = in.readString(); + this.contained_iocs_field = in.readStringList(); + this.updateInterval = in.readTimeValue(); + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(name); + out.writeString(feedFormat); + out.writeString(endpoint); + out.writeString(feedName); + out.writeString(description); + out.writeString(organization); + out.writeStringCollection(contained_iocs_field); + out.writeTimeValue(updateInterval); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException errors = new ActionRequestValidationException(); + List errorMsgs = VALIDATOR.validateDatasourceName(name); + if (errorMsgs.isEmpty() == false) { + errorMsgs.stream().forEach(msg -> errors.addValidationError(msg)); + } + validateEndpoint(errors); + validateUpdateInterval(errors); + return errors.validationErrors().isEmpty() ? null : errors; + } + + /** + * Conduct following validation on endpoint + * 1. endpoint format complies with RFC-2396 + * 2. validate manifest file from the endpoint + * + * @param errors the errors to add error messages + */ + private void validateEndpoint(final ActionRequestValidationException errors) { + try { + URL url = new URL(endpoint); + url.toURI(); // Validate URL complies with RFC-2396 + validateManifestFile(url, errors); + } catch (MalformedURLException | URISyntaxException e) { + log.info("Invalid URL[{}] is provided", endpoint, e); + errors.addValidationError("Invalid URL format is provided"); + } + } + + /** + * Conduct following validation on url + * 1. can read manifest file from the endpoint + * 2. the url in the manifest file complies with RFC-2396 + * 3. updateInterval is less than validForInDays value in the manifest file + * + * @param url the url to validate + * @param errors the errors to add error messages + */ + private void validateManifestFile(final URL url, final ActionRequestValidationException errors) { + DatasourceManifest manifest; + try { + manifest = DatasourceManifest.Builder.build(url); + } catch (Exception e) { + log.info("Error occurred while reading a file from {}", url, e); + errors.addValidationError(String.format(Locale.ROOT, "Error occurred while reading a file from %s: %s", url, e.getMessage())); + return; + } + + try { + new URL(manifest.getUrl()).toURI(); // Validate URL complies with RFC-2396 + } catch (MalformedURLException | URISyntaxException e) { + log.info("Invalid URL[{}] is provided for url field in the manifest file", manifest.getUrl(), e); + errors.addValidationError("Invalid URL format is provided for url field in the manifest file"); + return; + } + +// if (manifest.getValidForInDays() != null && updateInterval.days() >= manifest.getValidForInDays()) { +// errors.addValidationError( +// String.format( +// Locale.ROOT, +// "updateInterval %d should be smaller than %d", +// updateInterval.days(), +// manifest.getValidForInDays() +// ) +// ); +// } + } + + /** + * Validate updateInterval is equal or larger than 1 + * + * @param errors the errors to add error messages + */ + private void validateUpdateInterval(final ActionRequestValidationException errors) { + if (updateInterval.compareTo(TimeValue.timeValueDays(1)) < 0) { + errors.addValidationError("Update interval should be equal to or larger than 1 day"); + } + } + + public String getName() { + return name; + } + + public String getEndpoint() { + return this.endpoint; + } + + public void setEndpoint(String newEndpoint) { + this.endpoint = newEndpoint; + } + + public TimeValue getUpdateInterval() { + return this.updateInterval; + } + + public void setUpdateInterval(TimeValue timeValue) { + this.updateInterval = timeValue; + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/PutDatasourceTransportAction.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/PutDatasourceTransportAction.java new file mode 100644 index 000000000..f1f87c4c5 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/action/PutDatasourceTransportAction.java @@ -0,0 +1,182 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import static org.opensearch.securityanalytics.threatIntel.common.ThreatIntelLockService.LOCK_DURATION_IN_SECONDS; + +import java.time.Instant; +import java.util.ConcurrentModificationException; +import java.util.concurrent.atomic.AtomicReference; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.ResourceAlreadyExistsException; +import org.opensearch.action.StepListener; +import org.opensearch.action.index.IndexResponse; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.HandledTransportAction; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.common.inject.Inject; +import org.opensearch.core.action.ActionListener; + +import org.opensearch.core.rest.RestStatus; +import org.opensearch.index.engine.VersionConflictEngineException; +import org.opensearch.jobscheduler.spi.LockModel; +import org.opensearch.securityanalytics.model.DetectorTrigger; +import org.opensearch.securityanalytics.threatIntel.common.DatasourceState; +import org.opensearch.securityanalytics.threatIntel.common.ThreatIntelLockService; +import org.opensearch.securityanalytics.threatIntel.dao.DatasourceDao; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.Datasource; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.DatasourceUpdateService; +import org.opensearch.tasks.Task; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +/** + * Transport action to create datasource + */ +public class PutDatasourceTransportAction extends HandledTransportAction { + private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + + private final ThreadPool threadPool; + private final DatasourceDao datasourceDao; + private final DatasourceUpdateService datasourceUpdateService; + private final ThreatIntelLockService lockService; + + /** + * Default constructor + * @param transportService the transport service + * @param actionFilters the action filters + * @param threadPool the thread pool + * @param datasourceDao the datasource facade + * @param datasourceUpdateService the datasource update service + * @param lockService the lock service + */ + @Inject + public PutDatasourceTransportAction( + final TransportService transportService, + final ActionFilters actionFilters, + final ThreadPool threadPool, + final DatasourceDao datasourceDao, + final DatasourceUpdateService datasourceUpdateService, + final ThreatIntelLockService lockService + ) { + super(PutDatasourceAction.NAME, transportService, actionFilters, PutDatasourceRequest::new); + this.threadPool = threadPool; + this.datasourceDao = datasourceDao; + this.datasourceUpdateService = datasourceUpdateService; + this.lockService = lockService; + } + + @Override + protected void doExecute(final Task task, final PutDatasourceRequest request, final ActionListener listener) { + lockService.acquireLock(request.getName(), LOCK_DURATION_IN_SECONDS, ActionListener.wrap(lock -> { + if (lock == null) { + listener.onFailure( + new ConcurrentModificationException("another processor is holding a lock on the resource. Try again later") + ); + log.error("another processor is a lock, BAD_REQUEST error", RestStatus.BAD_REQUEST); + return; + } + try { + internalDoExecute(request, lock, listener); + } catch (Exception e) { + lockService.releaseLock(lock); + listener.onFailure(e); + log.error("listener failed when executing", e); + } + }, exception -> { + listener.onFailure(exception); + log.error("execution failed", exception); + })); + } + + /** + * This method takes lock as a parameter and is responsible for releasing lock + * unless exception is thrown + */ + protected void internalDoExecute( + final PutDatasourceRequest request, + final LockModel lock, + final ActionListener listener + ) { + StepListener createIndexStep = new StepListener<>(); + datasourceDao.createIndexIfNotExists(createIndexStep); + createIndexStep.whenComplete(v -> { + Datasource datasource = Datasource.Builder.build(request); + datasourceDao.putDatasource(datasource, getIndexResponseListener(datasource, lock, listener)); + }, exception -> { + lockService.releaseLock(lock); + log.error("failed to release lock", exception); + listener.onFailure(exception); + }); + } + + /** + * This method takes lock as a parameter and is responsible for releasing lock + * unless exception is thrown + */ + protected ActionListener getIndexResponseListener( + final Datasource datasource, + final LockModel lock, + final ActionListener listener + ) { + return new ActionListener<>() { + @Override + public void onResponse(final IndexResponse indexResponse) { + // This is user initiated request. Therefore, we want to handle the first datasource update task in a generic thread + // pool. + threadPool.generic().submit(() -> { + AtomicReference lockReference = new AtomicReference<>(lock); + try { + createDatasource(datasource, lockService.getRenewLockRunnable(lockReference)); + } finally { + lockService.releaseLock(lockReference.get()); + } + }); + listener.onResponse(new AcknowledgedResponse(true)); + } + + @Override + public void onFailure(final Exception e) { + lockService.releaseLock(lock); + if (e instanceof VersionConflictEngineException) { + log.error("datasource already exists"); + listener.onFailure(new ResourceAlreadyExistsException("datasource [{}] already exists", datasource.getName())); + } else { + log.error("Internal server error"); + listener.onFailure(e); + } + } + }; + } + + protected void createDatasource(final Datasource datasource, final Runnable renewLock) { + if (DatasourceState.CREATING.equals(datasource.getState()) == false) { + log.error("Invalid datasource state. Expecting {} but received {}", DatasourceState.CREATING, datasource.getState()); + markDatasourceAsCreateFailed(datasource); + return; + } + + try { + datasourceUpdateService.updateOrCreateThreatIntelFeedData(datasource, renewLock); + } catch (Exception e) { + log.error("Failed to create datasource for {}", datasource.getName(), e); + markDatasourceAsCreateFailed(datasource); + } + } + + private void markDatasourceAsCreateFailed(final Datasource datasource) { + datasource.getUpdateStats().setLastFailedAt(Instant.now()); + datasource.setState(DatasourceState.CREATE_FAILED); + try { + datasourceDao.updateDatasource(datasource); + } catch (Exception e) { + log.error("Failed to mark datasource state as CREATE_FAILED for {}", datasource.getName(), e); + } + } +} + diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestDeleteDatasourceHandler.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestDeleteDatasourceHandler.java new file mode 100644 index 000000000..3da4c4abc --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestDeleteDatasourceHandler.java @@ -0,0 +1,48 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.opensearch.client.node.NodeClient; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.action.RestToXContentListener; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; + +import static org.opensearch.rest.RestRequest.Method.DELETE; + +/** + * Rest handler for threat intel datasource delete request + */ +public class RestDeleteDatasourceHandler extends BaseRestHandler { + private static final String ACTION_NAME = "threatintel_datasource_delete"; + private static final String PARAMS_NAME = "name"; + + @Override + public String getName() { + return ACTION_NAME; + } + + @Override + protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + final String name = request.param(PARAMS_NAME); + final DeleteDatasourceRequest deleteDatasourceRequest = new DeleteDatasourceRequest(name); + + return channel -> client.executeLocally( + DeleteDatasourceAction.INSTANCE, + deleteDatasourceRequest, + new RestToXContentListener<>(channel) + ); + } + + @Override + public List routes() { + String path = String.join("/", "/_plugins/_security_analytics", String.format(Locale.ROOT, "threatintel/datasource/{%s}", PARAMS_NAME)); + return List.of(new Route(DELETE, path)); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestGetDatasourceHandler.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestGetDatasourceHandler.java new file mode 100644 index 000000000..ddbecdad5 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestGetDatasourceHandler.java @@ -0,0 +1,44 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.opensearch.client.node.NodeClient; +import org.opensearch.core.common.Strings; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.action.RestToXContentListener; + +import java.util.List; + +import static org.opensearch.rest.RestRequest.Method.GET; + +/** + * Rest handler for threat intel datasource get request + */ +public class RestGetDatasourceHandler extends BaseRestHandler { + private static final String ACTION_NAME = "threatintel_datasource_get"; + + @Override + public String getName() { + return ACTION_NAME; + } + + @Override + protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) { + final String[] names = request.paramAsStringArray("name", Strings.EMPTY_ARRAY); + final GetDatasourceRequest getDatasourceRequest = new GetDatasourceRequest(names); + + return channel -> client.executeLocally(GetDatasourceAction.INSTANCE, getDatasourceRequest, new RestToXContentListener<>(channel)); + } + + @Override + public List routes() { + return List.of( + new Route(GET, String.join("/", "/_plugins/_security_analytics", "threatintel/datasource")), + new Route(GET, String.join("/", "/_plugins/_security_analytics", "threatintel/datasource/{name}")) + ); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestPutDatasourceHandler.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestPutDatasourceHandler.java new file mode 100644 index 000000000..5c9ecd7b4 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestPutDatasourceHandler.java @@ -0,0 +1,71 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.opensearch.client.node.NodeClient; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.action.RestToXContentListener; +import org.opensearch.securityanalytics.threatIntel.common.ThreatIntelSettings; + +import java.io.IOException; +import java.util.List; + +import static org.opensearch.rest.RestRequest.Method.PUT; + +/** + * Rest handler for threat intel datasource creation + * + * This handler handles a request of + * PUT /_plugins/security_analytics/threatintel/datasource/{id} + * { + * "endpoint": {endpoint}, + * "update_interval_in_days": 3 + * } + * + * When request is received, it will create a datasource by downloading threat intel feed from the endpoint. + * After the creation of datasource is completed, it will schedule the next update task after update_interval_in_days. + * + */ +public class RestPutDatasourceHandler extends BaseRestHandler { + private static final String ACTION_NAME = "threatintel_datasource_put"; + private final ClusterSettings clusterSettings; + + public RestPutDatasourceHandler(final ClusterSettings clusterSettings) { + this.clusterSettings = clusterSettings; + } + + @Override + public String getName() { + return ACTION_NAME; + } + + @Override + protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + final PutDatasourceRequest putDatasourceRequest = new PutDatasourceRequest(request.param("name")); + if (request.hasContentOrSourceParam()) { + try (XContentParser parser = request.contentOrSourceParamParser()) { + PutDatasourceRequest.PARSER.parse(parser, putDatasourceRequest, null); + } + } + if (putDatasourceRequest.getEndpoint() == null) { + putDatasourceRequest.setEndpoint(clusterSettings.get(ThreatIntelSettings.DATASOURCE_ENDPOINT)); + } + if (putDatasourceRequest.getUpdateInterval() == null) { + putDatasourceRequest.setUpdateInterval(TimeValue.timeValueDays(clusterSettings.get(ThreatIntelSettings.DATASOURCE_UPDATE_INTERVAL))); + } + return channel -> client.executeLocally(PutDatasourceAction.INSTANCE, putDatasourceRequest, new RestToXContentListener<>(channel)); + } + + @Override + public List routes() { + String path = String.join("/", "/_plugins/_security_analytics", "threatintel/datasource/{name}"); + return List.of(new Route(PUT, path)); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestUpdateDatasourceHandler.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestUpdateDatasourceHandler.java new file mode 100644 index 000000000..3f755670f --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestUpdateDatasourceHandler.java @@ -0,0 +1,50 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.opensearch.client.node.NodeClient; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.action.RestToXContentListener; + +import java.io.IOException; +import java.util.List; + +import static org.opensearch.rest.RestRequest.Method.PUT; + +/** + * Rest handler for threat intel datasource update request + */ +public class RestUpdateDatasourceHandler extends BaseRestHandler { + private static final String ACTION_NAME = "threatintel_datasource_update"; + + @Override + public String getName() { + return ACTION_NAME; + } + + @Override + protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + final UpdateDatasourceRequest updateDatasourceRequest = new UpdateDatasourceRequest(request.param("name")); + if (request.hasContentOrSourceParam()) { + try (XContentParser parser = request.contentOrSourceParamParser()) { + UpdateDatasourceRequest.PARSER.parse(parser, updateDatasourceRequest, null); + } + } + return channel -> client.executeLocally( + UpdateDatasourceAction.INSTANCE, + updateDatasourceRequest, + new RestToXContentListener<>(channel) + ); + } + + @Override + public List routes() { + String path = String.join("/", "/_plugins/_security_analytics", "threatintel/datasource/{name}/_settings"); + return List.of(new Route(PUT, path)); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceAction.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceAction.java new file mode 100644 index 000000000..ddf2d42e6 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceAction.java @@ -0,0 +1,27 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.opensearch.action.ActionType; +import org.opensearch.action.support.master.AcknowledgedResponse; + +/** + * threat intel datasource update action + */ +public class UpdateDatasourceAction extends ActionType { + /** + * Update datasource action instance + */ + public static final UpdateDatasourceAction INSTANCE = new UpdateDatasourceAction(); + /** + * Update datasource action name + */ + public static final String NAME = "cluster:admin/security_analytics/datasource/update"; + + private UpdateDatasourceAction() { + super(NAME, AcknowledgedResponse::new); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceRequest.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceRequest.java new file mode 100644 index 000000000..7d70f45aa --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceRequest.java @@ -0,0 +1,190 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.ParseField; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.ObjectParser; +import org.opensearch.securityanalytics.model.DetectorTrigger; +import org.opensearch.securityanalytics.threatIntel.common.DatasourceManifest; +import org.opensearch.securityanalytics.threatIntel.common.ParameterValidator; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.Locale; + +/** + * threat intel datasource update request + */ +public class UpdateDatasourceRequest extends ActionRequest { + private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + + public static final ParseField ENDPOINT_FIELD = new ParseField("endpoint"); + public static final ParseField UPDATE_INTERVAL_IN_DAYS_FIELD = new ParseField("update_interval_in_days"); + private static final int MAX_DATASOURCE_NAME_BYTES = 255; + private static final ParameterValidator VALIDATOR = new ParameterValidator(); + + /** + * @param name the datasource name + * @return the datasource name + */ + private String name; + + /** + * @param endpoint url to a manifest file for a datasource + * @return url to a manifest file for a datasource + */ + private String endpoint; + + /** + * @param updateInterval update interval of a datasource + * @return update interval of a datasource + */ + private TimeValue updateInterval; + + /** + * Parser of a datasource + */ + public static final ObjectParser PARSER; + static { + PARSER = new ObjectParser<>("update_datasource"); + PARSER.declareString((request, val) -> request.setEndpoint(val), ENDPOINT_FIELD); + PARSER.declareLong((request, val) -> request.setUpdateInterval(TimeValue.timeValueDays(val)), UPDATE_INTERVAL_IN_DAYS_FIELD); + } + + public String getName() { + return name; + } + public String getEndpoint() { + return endpoint; + } + private void setEndpoint(String endpoint) { + this.endpoint = endpoint; + } + + public TimeValue getUpdateInterval() { + return updateInterval; + } + + private void setUpdateInterval(TimeValue updateInterval){ + this.updateInterval = updateInterval; + } + + /** + * Constructor + * @param name name of a datasource + */ + public UpdateDatasourceRequest(final String name) { + this.name = name; + } + + /** + * Constructor + * @param in the stream input + * @throws IOException IOException + */ + public UpdateDatasourceRequest(final StreamInput in) throws IOException { + super(in); + this.name = in.readString(); + this.endpoint = in.readOptionalString(); + this.updateInterval = in.readOptionalTimeValue(); + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(name); + out.writeOptionalString(endpoint); + out.writeOptionalTimeValue(updateInterval); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException errors = new ActionRequestValidationException(); + if (VALIDATOR.validateDatasourceName(name).isEmpty() == false) { + errors.addValidationError("no such datasource exist"); + } + if (endpoint == null && updateInterval == null) { + errors.addValidationError("no values to update"); + } + + validateEndpoint(errors); + validateUpdateInterval(errors); + + return errors.validationErrors().isEmpty() ? null : errors; + } + + /** + * Conduct following validation on endpoint + * 1. endpoint format complies with RFC-2396 + * 2. validate manifest file from the endpoint + * + * @param errors the errors to add error messages + */ + private void validateEndpoint(final ActionRequestValidationException errors) { + if (endpoint == null) { + return; + } + + try { + URL url = new URL(endpoint); + url.toURI(); // Validate URL complies with RFC-2396 + validateManifestFile(url, errors); + } catch (MalformedURLException | URISyntaxException e) { + log.info("Invalid URL[{}] is provided", endpoint, e); + errors.addValidationError("Invalid URL format is provided"); + } + } + + /** + * Conduct following validation on url + * 1. can read manifest file from the endpoint + * 2. the url in the manifest file complies with RFC-2396 + * + * @param url the url to validate + * @param errors the errors to add error messages + */ + private void validateManifestFile(final URL url, final ActionRequestValidationException errors) { + DatasourceManifest manifest; + try { + manifest = DatasourceManifest.Builder.build(url); + } catch (Exception e) { + log.info("Error occurred while reading a file from {}", url, e); + errors.addValidationError(String.format(Locale.ROOT, "Error occurred while reading a file from %s: %s", url, e.getMessage())); + return; + } + + try { + new URL(manifest.getUrl()).toURI(); // Validate URL complies with RFC-2396 + } catch (MalformedURLException | URISyntaxException e) { + log.info("Invalid URL[{}] is provided for url field in the manifest file", manifest.getUrl(), e); + errors.addValidationError("Invalid URL format is provided for url field in the manifest file"); + } + } + + /** + * Validate updateInterval is equal or larger than 1 + * + * @param errors the errors to add error messages + */ + private void validateUpdateInterval(final ActionRequestValidationException errors) { + if (updateInterval == null) { + return; + } + + if (updateInterval.compareTo(TimeValue.timeValueDays(1)) < 0) { + errors.addValidationError("Update interval should be equal to or larger than 1 day"); + } + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceTransportAction.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceTransportAction.java new file mode 100644 index 000000000..11d99e41c --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceTransportAction.java @@ -0,0 +1,179 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.opensearch.OpenSearchStatusException; +import org.opensearch.ResourceNotFoundException; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.HandledTransportAction; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.common.inject.Inject; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; +import org.opensearch.securityanalytics.threatIntel.common.DatasourceState; +import org.opensearch.securityanalytics.threatIntel.common.ThreatIntelLockService; +import org.opensearch.securityanalytics.threatIntel.dao.DatasourceDao; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.Datasource; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.DatasourceTask; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.DatasourceUpdateService; +import org.opensearch.tasks.Task; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +import java.io.IOException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.List; +import java.util.Locale; + +/** + * Transport action to update datasource + */ +public class UpdateDatasourceTransportAction extends HandledTransportAction { + private static final long LOCK_DURATION_IN_SECONDS = 300l; + private final ThreatIntelLockService lockService; + private final DatasourceDao datasourceDao; + private final DatasourceUpdateService datasourceUpdateService; + private final ThreadPool threadPool; + + /** + * Constructor + * + * @param transportService the transport service + * @param actionFilters the action filters + * @param lockService the lock service + * @param datasourceDao the datasource facade + * @param datasourceUpdateService the datasource update service + */ + @Inject + public UpdateDatasourceTransportAction( + final TransportService transportService, + final ActionFilters actionFilters, + final ThreatIntelLockService lockService, + final DatasourceDao datasourceDao, + final DatasourceUpdateService datasourceUpdateService, + final ThreadPool threadPool + ) { + super(UpdateDatasourceAction.NAME, transportService, actionFilters, UpdateDatasourceRequest::new); + this.lockService = lockService; + this.datasourceUpdateService = datasourceUpdateService; + this.datasourceDao = datasourceDao; + this.threadPool = threadPool; + } + + /** + * Get a lock and update datasource + * + * @param task the task + * @param request the request + * @param listener the listener + */ + @Override + protected void doExecute(final Task task, final UpdateDatasourceRequest request, final ActionListener listener) { + lockService.acquireLock(request.getName(), LOCK_DURATION_IN_SECONDS, ActionListener.wrap(lock -> { + if (lock == null) { + listener.onFailure( + new OpenSearchStatusException("Another processor is holding a lock on the resource. Try again later", RestStatus.BAD_REQUEST) + ); + return; + } + try { + // TODO: makes every sub-methods as async call to avoid using a thread in generic pool + threadPool.generic().submit(() -> { + try { + Datasource datasource = datasourceDao.getDatasource(request.getName()); + if (datasource == null) { + throw new ResourceNotFoundException("no such datasource exist"); + } + if (DatasourceState.AVAILABLE.equals(datasource.getState()) == false) { + throw new IllegalArgumentException( + String.format(Locale.ROOT, "data source is not in an [%s] state", DatasourceState.AVAILABLE) + ); + } + validate(request, datasource); + updateIfChanged(request, datasource); + lockService.releaseLock(lock); + listener.onResponse(new AcknowledgedResponse(true)); + } catch (Exception e) { + lockService.releaseLock(lock); + listener.onFailure(e); + } + }); + } catch (Exception e) { + lockService.releaseLock(lock); + listener.onFailure(e); + } + }, exception -> listener.onFailure(exception))); + } + + private void updateIfChanged(final UpdateDatasourceRequest request, final Datasource datasource) { + boolean isChanged = false; + if (isEndpointChanged(request, datasource)) { + datasource.setEndpoint(request.getEndpoint()); + isChanged = true; + } + if (isUpdateIntervalChanged(request)) { + datasource.setSchedule(new IntervalSchedule(Instant.now(), (int) request.getUpdateInterval().getDays(), ChronoUnit.DAYS)); + datasource.setTask(DatasourceTask.ALL); + isChanged = true; + } + + if (isChanged) { + datasourceDao.updateDatasource(datasource); + } + } + + /** + * Additional validation based on an existing datasource + * + * Basic validation is done in UpdateDatasourceRequest#validate + * In this method we do additional validation based on an existing datasource + * + * 1. Check the compatibility of new fields and old fields + * 2. Check the updateInterval is less than validForInDays in datasource + * + * This method throws exception if one of validation fails. + * + * @param request the update request + * @param datasource the existing datasource + * @throws IOException the exception + */ + private void validate(final UpdateDatasourceRequest request, final Datasource datasource) throws IOException { + validateFieldsCompatibility(request, datasource); + } + + private void validateFieldsCompatibility(final UpdateDatasourceRequest request, final Datasource datasource) throws IOException { + if (isEndpointChanged(request, datasource) == false) { + return; + } + + List fields = datasourceUpdateService.getHeaderFields(request.getEndpoint()); + if (datasource.isCompatible(fields) == false) { +// throw new IncompatibleDatasourceException( +// "new fields [{}] does not contain all old fields [{}]", +// fields.toString(), +// datasource.getDatabase().getFields().toString() +// ); + throw new OpenSearchStatusException("new fields does not contain all old fields", RestStatus.BAD_REQUEST); + } + } + + private boolean isEndpointChanged(final UpdateDatasourceRequest request, final Datasource datasource) { + return request.getEndpoint() != null && request.getEndpoint().equals(datasource.getEndpoint()) == false; + } + + /** + * Update interval is changed as long as user provide one because + * start time will get updated even if the update interval is same as current one. + * + * @param request the update datasource request + * @return true if update interval is changed, and false otherwise + */ + private boolean isUpdateIntervalChanged(final UpdateDatasourceRequest request) { + return request.getUpdateInterval() != null; + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/common/DatasourceManifest.java b/src/main/java/org/opensearch/securityanalytics/threatintel/common/DatasourceManifest.java new file mode 100644 index 000000000..1417c8a36 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/common/DatasourceManifest.java @@ -0,0 +1,168 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.threatIntel.common; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.net.URL; +import java.net.URLConnection; +import java.nio.CharBuffer; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.Locale; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.SpecialPermission; +import org.opensearch.Version; +import org.opensearch.common.SuppressForbidden; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.ParseField; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.core.xcontent.ConstructingObjectParser; +import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.securityanalytics.model.DetectorTrigger; +import org.opensearch.securityanalytics.util.SecurityAnalyticsException; + +/** + * Threat intel datasource manifest file object + * + * Manifest file is stored in an external endpoint. OpenSearch read the file and store values it in this object. + */ +public class DatasourceManifest { + private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + + private static final ParseField URL_FIELD = new ParseField("url"); //url for csv threat intel feed + private static final ParseField DB_NAME_FIELD = new ParseField("db_name"); // name of the db (csv file for now) + private static final ParseField SHA256_HASH_FIELD = new ParseField("sha256_hash"); //not using for now + private static final ParseField ORGANIZATION_FIELD = new ParseField("organization"); //not using for now + private static final ParseField DESCRIPTION_FIELD = new ParseField("description"); //not using for now + private static final ParseField UPDATED_AT_FIELD = new ParseField("updated_at_in_epoch_milli"); //not using for now + + /** + * @param url URL of a ZIP file containing a database + * @return URL of a ZIP file containing a database + */ + private String url; + + /** + * @param dbName A database file name inside the ZIP file + * @return A database file name inside the ZIP file + */ + private String dbName; + /** + * @param sha256Hash SHA256 hash value of a database file + * @return SHA256 hash value of a database file + */ + private String sha256Hash; + + /** + * @param organization A database organization name + * @return A database organization name + */ + private String organization; + /** + * @param description A description of the database + * @return A description of a database + */ + private String description; + /** + * @param updatedAt A date when the database was updated + * @return A date when the database was updated + */ + private Long updatedAt; + + public String getUrl() { + return this.url; + } + public String getDbName() { + return dbName; + } + + public String getOrganization() { + return organization; + } + + public String getSha256Hash() { + return sha256Hash; + } + + public String getDescription() { + return description; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public DatasourceManifest(final String url, final String dbName) { + this.url = url; + this.dbName = dbName; + } + + /** + * Datasource manifest parser + */ + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "datasource_manifest", + true, + args -> { + String url = (String) args[0]; + String dbName = (String) args[1]; + return new DatasourceManifest(url, dbName); + } + ); + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), URL_FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), DB_NAME_FIELD); + } + + /** + * Datasource manifest builder + */ + public static class Builder { + private static final int MANIFEST_FILE_MAX_BYTES = 1024 * 8; + + /** + * Build DatasourceManifest from a given url + * + * @param url url to downloads a manifest file + * @return DatasourceManifest representing the manifest file + */ + @SuppressForbidden(reason = "Need to connect to http endpoint to read manifest file") // change permissions + public static DatasourceManifest build(final URL url) { + SpecialPermission.check(); + return AccessController.doPrivileged((PrivilegedAction) () -> { + try { + URLConnection connection = url.openConnection(); + return internalBuild(connection); + } catch (IOException e) { + log.error("Runtime exception connecting to the manifest file", e); + throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO + } + }); + } + + @SuppressForbidden(reason = "Need to connect to http endpoint to read manifest file") + protected static DatasourceManifest internalBuild(final URLConnection connection) throws IOException { + connection.addRequestProperty(Constants.USER_AGENT_KEY, Constants.USER_AGENT_VALUE); + InputStreamReader inputStreamReader = new InputStreamReader(connection.getInputStream()); + try (BufferedReader reader = new BufferedReader(inputStreamReader)) { + CharBuffer charBuffer = CharBuffer.allocate(MANIFEST_FILE_MAX_BYTES); + reader.read(charBuffer); + charBuffer.flip(); + XContentParser parser = JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.IGNORE_DEPRECATIONS, + charBuffer.toString() + ); + return PARSER.parse(parser, null); + } + } + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/common/DatasourceState.java b/src/main/java/org/opensearch/securityanalytics/threatintel/common/DatasourceState.java new file mode 100644 index 000000000..a516b1d34 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/common/DatasourceState.java @@ -0,0 +1,37 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.common; + +/** + * Threat intel datasource state + * + * When data source is created, it starts with CREATING state. Once the first threat intel feed is generated, the state changes to AVAILABLE. + * Only when the first threat intel feed generation failed, the state changes to CREATE_FAILED. + * Subsequent threat intel feed failure won't change data source state from AVAILABLE to CREATE_FAILED. + * When delete request is received, the data source state changes to DELETING. + * + * State changed from left to right for the entire lifecycle of a datasource + * (CREATING) to (CREATE_FAILED or AVAILABLE) to (DELETING) + * + */ +public enum DatasourceState { + /** + * Data source is being created + */ + CREATING, + /** + * Data source is ready to be used + */ + AVAILABLE, + /** + * Data source creation failed + */ + CREATE_FAILED, + /** + * Data source is being deleted + */ + DELETING +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/common/ParameterValidator.java b/src/main/java/org/opensearch/securityanalytics/threatintel/common/ParameterValidator.java new file mode 100644 index 000000000..13276975c --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/common/ParameterValidator.java @@ -0,0 +1,58 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.common; + +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; + +import org.apache.commons.lang3.StringUtils; +import org.opensearch.core.common.Strings; + +/** + * Parameter validator for TIF APIs + */ +public class ParameterValidator { + private static final int MAX_DATASOURCE_NAME_BYTES = 127; + + /** + * Validate datasource name and return list of error messages + * + * @param datasourceName datasource name + * @return Error messages. Empty list if there is no violation. + */ + public List validateDatasourceName(final String datasourceName) { + List errorMsgs = new ArrayList<>(); + if (StringUtils.isBlank(datasourceName)) { + errorMsgs.add("datasource name must not be empty"); + return errorMsgs; + } + + if (!Strings.validFileName(datasourceName)) { + errorMsgs.add( + String.format(Locale.ROOT, "datasource name must not contain the following characters %s", Strings.INVALID_FILENAME_CHARS) + ); + } + if (datasourceName.contains("#")) { + errorMsgs.add("datasource name must not contain '#'"); + } + if (datasourceName.contains(":")) { + errorMsgs.add("datasource name must not contain ':'"); + } + if (datasourceName.charAt(0) == '_' || datasourceName.charAt(0) == '-' || datasourceName.charAt(0) == '+') { + errorMsgs.add("datasource name must not start with '_', '-', or '+'"); + } + int byteCount = datasourceName.getBytes(StandardCharsets.UTF_8).length; + if (byteCount > MAX_DATASOURCE_NAME_BYTES) { + errorMsgs.add(String.format(Locale.ROOT, "datasource name is too long, (%d > %d)", byteCount, MAX_DATASOURCE_NAME_BYTES)); + } + if (datasourceName.equals(".") || datasourceName.equals("..")) { + errorMsgs.add("datasource name must not be '.' or '..'"); + } + return errorMsgs; + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/common/StashedThreadContext.java b/src/main/java/org/opensearch/securityanalytics/threatintel/common/StashedThreadContext.java new file mode 100644 index 000000000..32f4e6d40 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/common/StashedThreadContext.java @@ -0,0 +1,42 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.common; + +import java.util.function.Supplier; + +import org.opensearch.client.Client; +import org.opensearch.common.util.concurrent.ThreadContext; + +/** + * Helper class to run code with stashed thread context + * + * Code need to be run with stashed thread context if it interacts with system index + * when security plugin is enabled. + */ +public class StashedThreadContext { + /** + * Set the thread context to default, this is needed to allow actions on model system index + * when security plugin is enabled + * @param function runnable that needs to be executed after thread context has been stashed, accepts and returns nothing + */ + public static void run(final Client client, final Runnable function) { + try (ThreadContext.StoredContext context = client.threadPool().getThreadContext().stashContext()) { + function.run(); + } + } + + /** + * Set the thread context to default, this is needed to allow actions on model system index + * when security plugin is enabled + * @param function supplier function that needs to be executed after thread context has been stashed, return object + */ + public static T run(final Client client, final Supplier function) { + try (ThreadContext.StoredContext context = client.threadPool().getThreadContext().stashContext()) { + return function.get(); + } + } +} + diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/common/ThreatIntelExecutor.java b/src/main/java/org/opensearch/securityanalytics/threatintel/common/ThreatIntelExecutor.java new file mode 100644 index 000000000..b3817786c --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/common/ThreatIntelExecutor.java @@ -0,0 +1,45 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.common; + +import java.util.concurrent.ExecutorService; + +import org.opensearch.common.settings.Settings; +import org.opensearch.threadpool.ExecutorBuilder; +import org.opensearch.threadpool.FixedExecutorBuilder; +import org.opensearch.threadpool.ThreadPool; + +/** + * Provide a list of static methods related with executors for threat intel + */ +public class ThreatIntelExecutor { + private static final String THREAD_POOL_NAME = "plugin_sap_datasource_update"; + private final ThreadPool threadPool; + + public ThreatIntelExecutor(final ThreadPool threadPool) { + this.threadPool = threadPool; + } + + /** + * We use fixed thread count of 1 for updating datasource as updating datasource is running background + * once a day at most and no need to expedite the task. + * + * @param settings the settings + * @return the executor builder + */ + public static ExecutorBuilder executorBuilder(final Settings settings) { + return new FixedExecutorBuilder(settings, THREAD_POOL_NAME, 1, 1000, THREAD_POOL_NAME, false); + } + + /** + * Return an executor service for datasource update task + * + * @return the executor service + */ + public ExecutorService forDatasourceUpdate() { + return threadPool.executor(THREAD_POOL_NAME); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/common/ThreatIntelLockService.java b/src/main/java/org/opensearch/securityanalytics/threatintel/common/ThreatIntelLockService.java new file mode 100644 index 000000000..8847d681e --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/common/ThreatIntelLockService.java @@ -0,0 +1,167 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.common; + +import static org.opensearch.securityanalytics.threatIntel.jobscheduler.DatasourceExtension.JOB_INDEX_NAME; + +import java.time.Instant; +import java.util.Optional; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import org.opensearch.OpenSearchException; +import org.opensearch.client.Client; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.core.action.ActionListener; +import org.opensearch.jobscheduler.spi.LockModel; +import org.opensearch.jobscheduler.spi.utils.LockService; +import org.opensearch.securityanalytics.model.DetectorTrigger; + +/** + * A wrapper of job scheduler's lock service for datasource + */ +public class ThreatIntelLockService { + private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + + public static final long LOCK_DURATION_IN_SECONDS = 300l; + public static final long RENEW_AFTER_IN_SECONDS = 120l; + + private final ClusterService clusterService; + private final LockService lockService; + + + /** + * Constructor + * + * @param clusterService the cluster service + * @param client the client + */ + public ThreatIntelLockService(final ClusterService clusterService, final Client client) { + this.clusterService = clusterService; + this.lockService = new LockService(client, clusterService); + } + + /** + * Wrapper method of LockService#acquireLockWithId + * + * Datasource uses its name as doc id in job scheduler. Therefore, we can use datasource name to acquire + * a lock on a datasource. + * + * @param datasourceName datasourceName to acquire lock on + * @param lockDurationSeconds the lock duration in seconds + * @param listener the listener + */ + public void acquireLock(final String datasourceName, final Long lockDurationSeconds, final ActionListener listener) { + lockService.acquireLockWithId(JOB_INDEX_NAME, lockDurationSeconds, datasourceName, listener); + } + + /** + * Synchronous method of #acquireLock + * + * @param datasourceName datasourceName to acquire lock on + * @param lockDurationSeconds the lock duration in seconds + * @return lock model + */ + public Optional acquireLock(final String datasourceName, final Long lockDurationSeconds) { + AtomicReference lockReference = new AtomicReference(); + CountDownLatch countDownLatch = new CountDownLatch(1); + lockService.acquireLockWithId(JOB_INDEX_NAME, lockDurationSeconds, datasourceName, new ActionListener<>() { + @Override + public void onResponse(final LockModel lockModel) { + lockReference.set(lockModel); + countDownLatch.countDown(); + } + + @Override + public void onFailure(final Exception e) { + lockReference.set(null); + countDownLatch.countDown(); + log.error("aquiring lock failed", e); + } + }); + + try { + countDownLatch.await(clusterService.getClusterSettings().get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT).getSeconds(), TimeUnit.SECONDS); + return Optional.ofNullable(lockReference.get()); + } catch (InterruptedException e) { + log.error("Waiting for the count down latch failed", e); + return Optional.empty(); + } + } + + /** + * Wrapper method of LockService#release + * + * @param lockModel the lock model + */ + public void releaseLock(final LockModel lockModel) { + lockService.release( + lockModel, + ActionListener.wrap(released -> {}, exception -> log.error("Failed to release the lock", exception)) + ); + } + + /** + * Synchronous method of LockService#renewLock + * + * @param lockModel lock to renew + * @return renewed lock if renew succeed and null otherwise + */ + public LockModel renewLock(final LockModel lockModel) { + AtomicReference lockReference = new AtomicReference(); + CountDownLatch countDownLatch = new CountDownLatch(1); + lockService.renewLock(lockModel, new ActionListener<>() { + @Override + public void onResponse(final LockModel lockModel) { + lockReference.set(lockModel); + countDownLatch.countDown(); + } + + @Override + public void onFailure(final Exception e) { + log.error("failed to renew lock", e); + lockReference.set(null); + countDownLatch.countDown(); + } + }); + + try { + countDownLatch.await(clusterService.getClusterSettings().get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT).getSeconds(), TimeUnit.SECONDS); + return lockReference.get(); + } catch (InterruptedException e) { + log.error("Interrupted exception", e); + return null; + } + } + + /** + * Return a runnable which can renew the given lock model + * + * The runnable renews the lock and store the renewed lock in the AtomicReference. + * It only renews the lock when it passed {@code RENEW_AFTER_IN_SECONDS} since + * the last time the lock was renewed to avoid resource abuse. + * + * @param lockModel lock model to renew + * @return runnable which can renew the given lock for every call + */ + public Runnable getRenewLockRunnable(final AtomicReference lockModel) { + return () -> { + LockModel preLock = lockModel.get(); + if (Instant.now().isBefore(preLock.getLockTime().plusSeconds(RENEW_AFTER_IN_SECONDS))) { + return; + } + lockModel.set(renewLock(lockModel.get())); + if (lockModel.get() == null) { + log.error("Exception: failed to renew a lock"); + new OpenSearchException("failed to renew a lock [{}]", preLock); + } + }; + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/common/ThreatIntelSettings.java b/src/main/java/org/opensearch/securityanalytics/threatintel/common/ThreatIntelSettings.java new file mode 100644 index 000000000..1d649e0b6 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/common/ThreatIntelSettings.java @@ -0,0 +1,103 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.common; + +import java.net.MalformedURLException; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.List; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.securityanalytics.model.DetectorTrigger; + +/** + * Settings for threat intel datasource operations + */ +public class ThreatIntelSettings { + private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + + + /** + * Default endpoint to be used in threat intel feed datasource creation API + */ + public static final Setting DATASOURCE_ENDPOINT = Setting.simpleString( + "plugins.security_analytics.threatintel.datasource.endpoint", + "https://geoip.maps.opensearch.org/v1/geolite2-city/manifest.json", //TODO fix this endpoint + new DatasourceEndpointValidator(), + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + /** + * Default update interval to be used in threat intel datasource creation API + */ + public static final Setting DATASOURCE_UPDATE_INTERVAL = Setting.longSetting( + "plugins.security_analytics.threatintel.datasource.update_interval_in_days", + 3l, + 1l, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + /** + * Bulk size for indexing threat intel feed data + */ + public static final Setting BATCH_SIZE = Setting.intSetting( + "plugins.security_analytics.threatintel.datasource.batch_size", + 10000, + 1, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + /** + * Timeout value for threat intel processor + */ + public static final Setting THREAT_INTEL_TIMEOUT = Setting.timeSetting( + "plugins.security_analytics.threat_intel_timeout", + TimeValue.timeValueSeconds(30), + TimeValue.timeValueSeconds(1), + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + /** + * Max size for threat intel feed cache + */ + public static final Setting CACHE_SIZE = Setting.longSetting( + "plugins.security_analytics.threatintel.processor.cache_size", + 1000, + 0, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + /** + * Return all settings of threat intel feature + * @return a list of all settings for threat intel feature + */ + public static final List> settings() { + return List.of(DATASOURCE_ENDPOINT, DATASOURCE_UPDATE_INTERVAL, BATCH_SIZE, THREAT_INTEL_TIMEOUT); + } + + /** + * Visible for testing + */ + protected static class DatasourceEndpointValidator implements Setting.Validator { + @Override + public void validate(final String value) { + try { + new URL(value).toURI(); + } catch (MalformedURLException | URISyntaxException e) { + log.error("Invalid URL format is provided", e); + throw new IllegalArgumentException("Invalid URL format is provided"); + } + } + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/dao/DatasourceDao.java b/src/main/java/org/opensearch/securityanalytics/threatintel/dao/DatasourceDao.java new file mode 100644 index 000000000..9d6a15241 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/dao/DatasourceDao.java @@ -0,0 +1,380 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.dao; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.time.Instant; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.OpenSearchException; +import org.opensearch.ResourceAlreadyExistsException; +import org.opensearch.ResourceNotFoundException; +import org.opensearch.action.DocWriteRequest; +import org.opensearch.action.StepListener; +import org.opensearch.action.admin.indices.create.CreateIndexRequest; +import org.opensearch.action.admin.indices.create.CreateIndexResponse; +import org.opensearch.action.bulk.BulkRequest; +import org.opensearch.action.bulk.BulkResponse; +import org.opensearch.action.delete.DeleteResponse; +import org.opensearch.action.get.GetRequest; +import org.opensearch.action.get.GetResponse; +import org.opensearch.action.get.MultiGetItemResponse; +import org.opensearch.action.get.MultiGetResponse; +import org.opensearch.action.index.IndexRequest; +import org.opensearch.action.index.IndexResponse; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.support.WriteRequest; +import org.opensearch.client.Client; +import org.opensearch.cluster.routing.Preference; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.common.xcontent.XContentHelper; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.securityanalytics.model.DetectorTrigger; +import org.opensearch.securityanalytics.threatIntel.common.ThreatIntelSettings; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.Datasource; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.DatasourceExtension; +import org.opensearch.securityanalytics.threatIntel.common.StashedThreadContext; +import org.opensearch.index.IndexNotFoundException; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.search.SearchHit; +import org.opensearch.securityanalytics.util.SecurityAnalyticsException; + +/** + * Data access object for datasource + */ +public class DatasourceDao { + private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + + private static final Integer MAX_SIZE = 1000; + private final Client client; + private final ClusterService clusterService; + private final ClusterSettings clusterSettings; + + public DatasourceDao(final Client client, final ClusterService clusterService) { + this.client = client; + this.clusterService = clusterService; + this.clusterSettings = clusterService.getClusterSettings(); + } + + /** + * Create datasource index + * + * @param stepListener setup listener + */ + public void createIndexIfNotExists(final StepListener stepListener) { + if (clusterService.state().metadata().hasIndex(DatasourceExtension.JOB_INDEX_NAME) == true) { + stepListener.onResponse(null); + return; + } + final CreateIndexRequest createIndexRequest = new CreateIndexRequest(DatasourceExtension.JOB_INDEX_NAME).mapping(getIndexMapping()) + .settings(DatasourceExtension.INDEX_SETTING); + StashedThreadContext.run(client, () -> client.admin().indices().create(createIndexRequest, new ActionListener<>() { + @Override + public void onResponse(final CreateIndexResponse createIndexResponse) { + stepListener.onResponse(null); + } + + @Override + public void onFailure(final Exception e) { + if (e instanceof ResourceAlreadyExistsException) { + log.info("index[{}] already exist", DatasourceExtension.JOB_INDEX_NAME); + stepListener.onResponse(null); + return; + } + stepListener.onFailure(e); + } + })); + } + + private String getIndexMapping() { + try { + try (InputStream is = DatasourceDao.class.getResourceAsStream("/mappings/threatintel_datasource.json")) { + try (BufferedReader reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { + return reader.lines().map(String::trim).collect(Collectors.joining()); + } + } + } catch (IOException e) { + log.error("Runtime exception", e); + throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO + } + } + + /** + * Update datasource in an index {@code DatasourceExtension.JOB_INDEX_NAME} + * @param datasource the datasource + * @return index response + */ + public IndexResponse updateDatasource(final Datasource datasource) { + datasource.setLastUpdateTime(Instant.now()); + return StashedThreadContext.run(client, () -> { + try { + return client.prepareIndex(DatasourceExtension.JOB_INDEX_NAME) + .setId(datasource.getName()) + .setOpType(DocWriteRequest.OpType.INDEX) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .setSource(datasource.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) + .execute() + .actionGet(clusterSettings.get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT)); + } catch (IOException e) { + throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO + } + }); + } + + /** + * Update datasources in an index {@code DatasourceExtension.JOB_INDEX_NAME} + * @param datasources the datasources + * @param listener action listener + */ + public void updateDatasource(final List datasources, final ActionListener listener) { + BulkRequest bulkRequest = new BulkRequest(); + datasources.stream().map(datasource -> { + datasource.setLastUpdateTime(Instant.now()); + return datasource; + }).map(this::toIndexRequest).forEach(indexRequest -> bulkRequest.add(indexRequest)); + StashedThreadContext.run(client, () -> client.bulk(bulkRequest, listener)); + } + + private IndexRequest toIndexRequest(Datasource datasource) { + try { + IndexRequest indexRequest = new IndexRequest(); + indexRequest.index(DatasourceExtension.JOB_INDEX_NAME); + indexRequest.id(datasource.getName()); + indexRequest.opType(DocWriteRequest.OpType.INDEX); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + indexRequest.source(datasource.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)); + return indexRequest; + } catch (IOException e) { + throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO + } + } + + /** + * Put datasource in an index {@code DatasourceExtension.JOB_INDEX_NAME} + * + * @param datasource the datasource + * @param listener the listener + */ + public void putDatasource(final Datasource datasource, final ActionListener listener) { + datasource.setLastUpdateTime(Instant.now()); + StashedThreadContext.run(client, () -> { + try { + client.prepareIndex(DatasourceExtension.JOB_INDEX_NAME) + .setId(datasource.getName()) + .setOpType(DocWriteRequest.OpType.CREATE) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .setSource(datasource.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) + .execute(listener); + } catch (IOException e) { + throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO + } + }); + } + + /** + * Delete datasource in an index {@code DatasourceExtension.JOB_INDEX_NAME} + * + * @param datasource the datasource + * + */ + public void deleteDatasource(final Datasource datasource) { + DeleteResponse response = client.prepareDelete() + .setIndex(DatasourceExtension.JOB_INDEX_NAME) + .setId(datasource.getName()) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .execute() + .actionGet(clusterSettings.get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT)); + + if (response.status().equals(RestStatus.OK)) { + log.info("deleted datasource[{}] successfully", datasource.getName()); + } else if (response.status().equals(RestStatus.NOT_FOUND)) { + throw new ResourceNotFoundException("datasource[{}] does not exist", datasource.getName()); + } else { + throw new OpenSearchException("failed to delete datasource[{}] with status[{}]", datasource.getName(), response.status()); + } + } + + /** + * Get datasource from an index {@code DatasourceExtension.JOB_INDEX_NAME} + * @param name the name of a datasource + * @return datasource + * @throws IOException exception + */ + public Datasource getDatasource(final String name) throws IOException { + GetRequest request = new GetRequest(DatasourceExtension.JOB_INDEX_NAME, name); + GetResponse response; + try { + response = StashedThreadContext.run(client, () -> client.get(request).actionGet(clusterSettings.get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT))); + if (response.isExists() == false) { + log.error("Datasource[{}] does not exist in an index[{}]", name, DatasourceExtension.JOB_INDEX_NAME); + return null; + } + } catch (IndexNotFoundException e) { + log.error("Index[{}] is not found", DatasourceExtension.JOB_INDEX_NAME); + return null; + } + + XContentParser parser = XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.getSourceAsBytesRef() + ); + return Datasource.PARSER.parse(parser, null); + } + + /** + * Get datasource from an index {@code DatasourceExtension.JOB_INDEX_NAME} + * @param name the name of a datasource + * @param actionListener the action listener + */ + public void getDatasource(final String name, final ActionListener actionListener) { + GetRequest request = new GetRequest(DatasourceExtension.JOB_INDEX_NAME, name); + StashedThreadContext.run(client, () -> client.get(request, new ActionListener<>() { + @Override + public void onResponse(final GetResponse response) { + if (response.isExists() == false) { + actionListener.onResponse(null); + return; + } + + try { + XContentParser parser = XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.getSourceAsBytesRef() + ); + actionListener.onResponse(Datasource.PARSER.parse(parser, null)); + } catch (IOException e) { + actionListener.onFailure(e); + } + } + + @Override + public void onFailure(final Exception e) { + actionListener.onFailure(e); + } + })); + } + + /** + * Get datasources from an index {@code DatasourceExtension.JOB_INDEX_NAME} + * @param names the array of datasource names + * @param actionListener the action listener + */ + public void getDatasources(final String[] names, final ActionListener> actionListener) { + StashedThreadContext.run( + client, + () -> client.prepareMultiGet() + .add(DatasourceExtension.JOB_INDEX_NAME, names) + .execute(createGetDataSourceQueryActionLister(MultiGetResponse.class, actionListener)) + ); + } + + /** + * Get all datasources up to {@code MAX_SIZE} from an index {@code DatasourceExtension.JOB_INDEX_NAME} + * @param actionListener the action listener + */ + public void getAllDatasources(final ActionListener> actionListener) { + StashedThreadContext.run( + client, + () -> client.prepareSearch(DatasourceExtension.JOB_INDEX_NAME) + .setQuery(QueryBuilders.matchAllQuery()) + .setPreference(Preference.PRIMARY.type()) + .setSize(MAX_SIZE) + .execute(createGetDataSourceQueryActionLister(SearchResponse.class, actionListener)) + ); + } + + /** + * Get all datasources up to {@code MAX_SIZE} from an index {@code DatasourceExtension.JOB_INDEX_NAME} + */ + public List getAllDatasources() { + SearchResponse response = StashedThreadContext.run( + client, + () -> client.prepareSearch(DatasourceExtension.JOB_INDEX_NAME) + .setQuery(QueryBuilders.matchAllQuery()) + .setPreference(Preference.PRIMARY.type()) + .setSize(MAX_SIZE) + .execute() + .actionGet(clusterSettings.get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT)) + ); + + List bytesReferences = toBytesReferences(response); + return bytesReferences.stream().map(bytesRef -> toDatasource(bytesRef)).collect(Collectors.toList()); + } + + private ActionListener createGetDataSourceQueryActionLister( + final Class response, + final ActionListener> actionListener + ) { + return new ActionListener() { + @Override + public void onResponse(final T response) { + try { + List bytesReferences = toBytesReferences(response); + List datasources = bytesReferences.stream() + .map(bytesRef -> toDatasource(bytesRef)) + .collect(Collectors.toList()); + actionListener.onResponse(datasources); + } catch (Exception e) { + actionListener.onFailure(e); + } + } + + @Override + public void onFailure(final Exception e) { + actionListener.onFailure(e); + } + }; + } + + private List toBytesReferences(final Object response) { + if (response instanceof SearchResponse) { + SearchResponse searchResponse = (SearchResponse) response; + return Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getSourceRef).collect(Collectors.toList()); + } else if (response instanceof MultiGetResponse) { + MultiGetResponse multiGetResponse = (MultiGetResponse) response; + return Arrays.stream(multiGetResponse.getResponses()) + .map(MultiGetItemResponse::getResponse) + .filter(Objects::nonNull) + .filter(GetResponse::isExists) + .map(GetResponse::getSourceAsBytesRef) + .collect(Collectors.toList()); + } else { + throw new OpenSearchException("No supported instance type[{}] is provided", response.getClass()); + } + } + + private Datasource toDatasource(final BytesReference bytesReference) { + try { + XContentParser parser = XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + bytesReference + ); + return Datasource.PARSER.parse(parser, null); + } catch (IOException e) { + throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO + } + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/Datasource.java b/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/Datasource.java new file mode 100644 index 000000000..00ff1d419 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/Datasource.java @@ -0,0 +1,819 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ +package org.opensearch.securityanalytics.threatIntel.jobscheduler; + +import org.opensearch.core.ParseField; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.xcontent.ConstructingObjectParser; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.jobscheduler.spi.ScheduledJobParameter; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; +import org.opensearch.jobscheduler.spi.schedule.Schedule; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.jobscheduler.spi.schedule.ScheduleParser; + +import java.io.IOException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.*; + +import static org.opensearch.common.time.DateUtils.toInstant; + +import org.opensearch.securityanalytics.threatIntel.action.PutDatasourceRequest; +import org.opensearch.securityanalytics.threatIntel.common.DatasourceManifest; +import org.opensearch.securityanalytics.threatIntel.common.DatasourceState; +import org.opensearch.securityanalytics.threatIntel.common.ThreatIntelLockService; + +public class Datasource implements Writeable, ScheduledJobParameter { + /** + * Prefix of indices having threatIntel data + */ + public static final String THREAT_INTEL_DATA_INDEX_NAME_PREFIX = "opensearch-sap-threatintel"; + + /** + * Default fields for job scheduling + */ + private static final ParseField NAME_FIELD = new ParseField("name"); + private static final ParseField ENABLED_FIELD = new ParseField("update_enabled"); + private static final ParseField LAST_UPDATE_TIME_FIELD = new ParseField("last_update_time"); + private static final ParseField LAST_UPDATE_TIME_FIELD_READABLE = new ParseField("last_update_time_field"); + public static final ParseField SCHEDULE_FIELD = new ParseField("schedule"); + private static final ParseField ENABLED_TIME_FIELD = new ParseField("enabled_time"); + private static final ParseField ENABLED_TIME_FIELD_READABLE = new ParseField("enabled_time_field"); + + // need? + private static final ParseField TASK_FIELD = new ParseField("task"); + public static final String LOCK_DURATION_SECONDS = "lock_duration_seconds"; + + /** + * Additional fields for datasource + */ + private static final ParseField FEED_NAME = new ParseField("feed_name"); + private static final ParseField FEED_FORMAT = new ParseField("feed_format"); + private static final ParseField ENDPOINT_FIELD = new ParseField("endpoint"); + private static final ParseField DESCRIPTION = new ParseField("description"); + private static final ParseField ORGANIZATION = new ParseField("organization"); + private static final ParseField CONTAINED_IOCS_FIELD = new ParseField("contained_iocs_field"); + private static final ParseField STATE_FIELD = new ParseField("state"); + private static final ParseField CURRENT_INDEX_FIELD = new ParseField("current_index"); + private static final ParseField INDICES_FIELD = new ParseField("indices"); + private static final ParseField DATABASE_FIELD = new ParseField("database"); + private static final ParseField UPDATE_STATS_FIELD = new ParseField("update_stats"); + + + /** + * Default variables for job scheduling + */ + + /** + * @param name name of a datasource + * @return name of a datasource + */ + private String name; + + /** + * @param lastUpdateTime Last update time of a datasource + * @return Last update time of a datasource + */ + private Instant lastUpdateTime; + /** + * @param enabledTime Last time when a scheduling is enabled for a threat intel feed data update + * @return Last time when a scheduling is enabled for the job scheduler + */ + private Instant enabledTime; + /** + * @param isEnabled Indicate if threat intel feed data update is scheduled or not + * @return Indicate if scheduling is enabled or not + */ + private boolean isEnabled; + /** + * @param schedule Schedule that system uses + * @return Schedule that system uses + */ + private IntervalSchedule schedule; + + /** + * @param task Task that {@link DatasourceRunner} will execute + * @return Task that {@link DatasourceRunner} will execute + */ + private DatasourceTask task; + + + /** + * Additional variables for datasource + */ + + /** + * @param feedFormat format of the feed (ip, dns...) + * @return the type of feed ingested + */ + private String feedFormat; + + /** + * @param endpoint URL of a manifest file + * @return URL of a manifest file + */ + private String endpoint; + + /** + * @param feedName name of the threat intel feed + * @return name of the threat intel feed + */ + private String feedName; + + /** + * @param description description of the threat intel feed + * @return description of the threat intel feed + */ + private String description; + + /** + * @param organization organization of the threat intel feed + * @return organization of the threat intel feed + */ + private String organization; + + /** + * @param contained_iocs_field list of iocs contained in a given feed + * @return list of iocs contained in a given feed + */ + private List contained_iocs_field; + + /** + * @param state State of a datasource + * @return State of a datasource + */ + private DatasourceState state; + + /** + * @param currentIndex the current index name having threat intel feed data + * @return the current index name having threat intel feed data + */ + private String currentIndex; + /** + * @param indices A list of indices having threat intel feed data including currentIndex + * @return A list of indices having threat intel feed data including currentIndex + */ + private List indices; + /** + * @param database threat intel feed database information + * @return threat intel feed database information + */ + private Database database; + /** + * @param updateStats threat intel feed database update statistics + * @return threat intel feed database update statistics + */ + private UpdateStats updateStats; + + public DatasourceTask getTask() { + return task; + } + + public void setEndpoint(String endpoint) { + this.endpoint = endpoint; + } + + public void setLastUpdateTime(Instant lastUpdateTime) { + this.lastUpdateTime = lastUpdateTime; + } + + public void setOrganization(String organization) { + this.organization = organization; + } + + public void setCurrentIndex(String currentIndex) { + this.currentIndex = currentIndex; + } + + public void setTask(DatasourceTask task) { + this.task = task; + } + + + /** + * Datasource parser + */ + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "datasource_metadata", + true, + args -> { + String name = (String) args[0]; + Instant lastUpdateTime = Instant.ofEpochMilli((long) args[1]); + Instant enabledTime = args[2] == null ? null : Instant.ofEpochMilli((long) args[2]); + boolean isEnabled = (boolean) args[3]; + IntervalSchedule schedule = (IntervalSchedule) args[4]; + DatasourceTask task = DatasourceTask.valueOf((String) args[6]); + String feedFormat = (String) args[7]; + String endpoint = (String) args[8]; + String feedName = (String) args[9]; + String description = (String) args[10]; + String organization = (String) args[11]; + List contained_iocs_field = (List) args[12]; + DatasourceState state = DatasourceState.valueOf((String) args[13]); + String currentIndex = (String) args[14]; + List indices = (List) args[15]; + Database database = (Database) args[16]; + UpdateStats updateStats = (UpdateStats) args[17]; + Datasource parameter = new Datasource( + name, + lastUpdateTime, + enabledTime, + isEnabled, + schedule, + task, + feedFormat, + endpoint, + feedName, + description, + organization, + contained_iocs_field, + state, + currentIndex, + indices, + database, + updateStats + ); + return parameter; + } + ); + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME_FIELD); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), LAST_UPDATE_TIME_FIELD); + PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ENABLED_TIME_FIELD); + PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED_FIELD); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> ScheduleParser.parse(p), SCHEDULE_FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), TASK_FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), FEED_FORMAT); + PARSER.declareString(ConstructingObjectParser.constructorArg(), ENDPOINT_FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), FEED_NAME); + PARSER.declareString(ConstructingObjectParser.constructorArg(), DESCRIPTION); + PARSER.declareString(ConstructingObjectParser.constructorArg(), ORGANIZATION); + PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), CONTAINED_IOCS_FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), STATE_FIELD); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), CURRENT_INDEX_FIELD); + PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), INDICES_FIELD); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), Database.PARSER, DATABASE_FIELD); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), UpdateStats.PARSER, UPDATE_STATS_FIELD); + } + + public Datasource() { + this(null, null, null, null, null, null, null, null); + } + + public Datasource(final String name, final Instant lastUpdateTime, final Instant enabledTime, final Boolean isEnabled, + final IntervalSchedule schedule, DatasourceTask task, final String feedFormat, final String endpoint, + final String feedName, final String description, final String organization, final List contained_iocs_field, + final DatasourceState state, final String currentIndex, final List indices, final Database database, final UpdateStats updateStats) { + this.name = name; + this.lastUpdateTime = lastUpdateTime; + this.enabledTime = enabledTime; + this.isEnabled = isEnabled; + this.schedule = schedule; + this.task = task; + this.feedFormat = feedFormat; + this.endpoint = endpoint; + this.feedName = feedName; + this.description = description; + this.organization = organization; + this.contained_iocs_field = contained_iocs_field; + this.state = state; + this.currentIndex = currentIndex; + this.indices = indices; + this.database = database; + this.updateStats = updateStats; + } + + public Datasource(final String name, final IntervalSchedule schedule, final String feedFormat, final String endpoint, final String feedName, final String description, final String organization, final List contained_iocs_field ) { + this( + name, + Instant.now().truncatedTo(ChronoUnit.MILLIS), + null, + false, + schedule, + DatasourceTask.ALL, + feedFormat, + endpoint, + feedName, + description, + organization, + contained_iocs_field, + DatasourceState.CREATING, + null, + new ArrayList<>(), + new Database(), + new UpdateStats() + ); + } + + public Datasource(final StreamInput in) throws IOException { + name = in.readString(); + lastUpdateTime = toInstant(in.readVLong()); + enabledTime = toInstant(in.readOptionalVLong()); + isEnabled = in.readBoolean(); + schedule = new IntervalSchedule(in); + task = DatasourceTask.valueOf(in.readString()); + feedFormat = in.readString(); + endpoint = in.readString(); + feedName = in.readString(); + description = in.readString(); + organization = in.readString(); + contained_iocs_field = in.readStringList(); + state = DatasourceState.valueOf(in.readString()); + currentIndex = in.readOptionalString(); + indices = in.readStringList(); + database = new Database(in); + updateStats = new UpdateStats(in); + } + + public void writeTo(final StreamOutput out) throws IOException { + out.writeString(name); + out.writeVLong(lastUpdateTime.toEpochMilli()); + out.writeOptionalVLong(enabledTime == null ? null : enabledTime.toEpochMilli()); + out.writeBoolean(isEnabled); + schedule.writeTo(out); + out.writeString(task.name()); + out.writeString(feedFormat); + out.writeString(endpoint); + out.writeString(feedName); + out.writeString(description); + out.writeString(organization); + out.writeStringCollection(contained_iocs_field); + out.writeString(state.name()); + out.writeOptionalString(currentIndex); + out.writeStringCollection(indices); + database.writeTo(out); + updateStats.writeTo(out); + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + builder.startObject(); + builder.field(NAME_FIELD.getPreferredName(), name); + builder.timeField( + LAST_UPDATE_TIME_FIELD.getPreferredName(), + LAST_UPDATE_TIME_FIELD_READABLE.getPreferredName(), + lastUpdateTime.toEpochMilli() + ); + if (enabledTime != null) { + builder.timeField( + ENABLED_TIME_FIELD.getPreferredName(), + ENABLED_TIME_FIELD_READABLE.getPreferredName(), + enabledTime.toEpochMilli() + ); + } + builder.field(ENABLED_FIELD.getPreferredName(), isEnabled); + builder.field(SCHEDULE_FIELD.getPreferredName(), schedule); + builder.field(TASK_FIELD.getPreferredName(), task.name()); + builder.field(FEED_FORMAT.getPreferredName(), feedFormat); + builder.field(ENDPOINT_FIELD.getPreferredName(), endpoint); + builder.field(FEED_NAME.getPreferredName(), feedName); + builder.field(DESCRIPTION.getPreferredName(), description); + builder.field(ORGANIZATION.getPreferredName(), organization); + builder.field(CONTAINED_IOCS_FIELD.getPreferredName(), contained_iocs_field); + builder.field(STATE_FIELD.getPreferredName(), state.name()); + if (currentIndex != null) { + builder.field(CURRENT_INDEX_FIELD.getPreferredName(), currentIndex); + } + builder.field(INDICES_FIELD.getPreferredName(), indices); + builder.field(DATABASE_FIELD.getPreferredName(), database); + builder.field(UPDATE_STATS_FIELD.getPreferredName(), updateStats); + builder.endObject(); + return builder; + } + + @Override + public String getName() { + return this.name; + } + + @Override + public Instant getLastUpdateTime() { + return this.lastUpdateTime; + } + + @Override + public Instant getEnabledTime() { + return this.enabledTime; + } + + @Override + public IntervalSchedule getSchedule() { + return this.schedule; + } + + @Override + public boolean isEnabled() { + return this.isEnabled; + } + + @Override + public Long getLockDurationSeconds() { + return ThreatIntelLockService.LOCK_DURATION_IN_SECONDS; + } + + /** + * Enable auto update of threat intel feed data + */ + public void enable() { + if (isEnabled == true) { + return; + } + enabledTime = Instant.now().truncatedTo(ChronoUnit.MILLIS); + isEnabled = true; + } + + /** + * Disable auto update of threat intel feed data + */ + public void disable() { + enabledTime = null; + isEnabled = false; + } + + /** + * Current index name of a datasource + * + * @return Current index name of a datasource + */ + public String currentIndexName() { + return currentIndex; + } + + public void setSchedule(IntervalSchedule schedule) { + this.schedule = schedule; + } + + /** + * Reset database so that it can be updated in next run regardless there is new update or not + */ + public void resetDatabase() { + database.setUpdatedAt(null); + database.setSha256Hash(null); + } + + /** + * Index name for a datasource with given suffix + * + * @param suffix the suffix of a index name + * @return index name for a datasource with given suffix + */ + public String newIndexName(final String suffix) { + return String.format(Locale.ROOT, "%s.%s.%s", THREAT_INTEL_DATA_INDEX_NAME_PREFIX, name, suffix); + } + + /** + * Set database attributes with given input + * + * @param datasourceManifest the datasource manifest + * @param fields the fields + */ + public void setDatabase(final DatasourceManifest datasourceManifest, final List fields) { + this.database.setProvider(datasourceManifest.getOrganization()); + this.database.setSha256Hash(datasourceManifest.getSha256Hash()); + this.database.setUpdatedAt(Instant.ofEpochMilli(datasourceManifest.getUpdatedAt())); + this.database.setFields(fields); + } + + /** + * Checks if the database fields are compatible with the given set of fields. + * + * If database fields are null, it is compatible with any input fields + * as it hasn't been generated before. + * + * @param fields The set of input fields to check for compatibility. + * @return true if the database fields are compatible with the given input fields, false otherwise. + */ + public boolean isCompatible(final List fields) { + if (database.fields == null) { + return true; + } + + if (fields.size() < database.fields.size()) { + return false; + } + + Set fieldsSet = new HashSet<>(fields); + for (String field : database.fields) { + if (fieldsSet.contains(field) == false) { + return false; + } + } + return true; + } + + public DatasourceState getState() { + return state; + } + + public List getIndices() { + return indices; + } + + public void setState(DatasourceState previousState) { + this.state = previousState; + } + + public String getEndpoint() { + return this.endpoint; + } + + public Database getDatabase() { + return this.database; + } + + public UpdateStats getUpdateStats() { + return this.updateStats; + } + + /** + * Database of a datasource + */ + public static class Database implements Writeable, ToXContent { + private static final ParseField PROVIDER_FIELD = new ParseField("provider"); + private static final ParseField SHA256_HASH_FIELD = new ParseField("sha256_hash"); + private static final ParseField UPDATED_AT_FIELD = new ParseField("updated_at_in_epoch_millis"); + private static final ParseField UPDATED_AT_FIELD_READABLE = new ParseField("updated_at"); + private static final ParseField FIELDS_FIELD = new ParseField("fields"); + + /** + * @param provider A database provider name + * @return A database provider name + */ + private String provider; + /** + * @param sha256Hash SHA256 hash value of a database file + * @return SHA256 hash value of a database file + */ + private String sha256Hash; + + /** + * @param updatedAt A date when the database was updated + * @return A date when the database was updated + */ + private Instant updatedAt; + + /** + * @param fields A list of available fields in the database + * @return A list of available fields in the database + */ + private List fields; + + public Database(String provider, String sha256Hash, Instant updatedAt, List fields) { + this.provider = provider; + this.sha256Hash = sha256Hash; + this.updatedAt = updatedAt; + this.fields = fields; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public void setSha256Hash(String sha256Hash) { + this.sha256Hash = sha256Hash; + } + + public void setUpdatedAt(Instant updatedAt) { + this.updatedAt = updatedAt; + } + + public void setFields(List fields) { + this.fields = fields; + } + + public Instant getUpdatedAt() { + return updatedAt; + } + + public String getSha256Hash() { + return sha256Hash; + } + + public List getFields() { + return fields; + } + + public String getProvider() { + return provider; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "datasource_metadata_database", + true, + args -> { + String provider = (String) args[0]; + String sha256Hash = (String) args[1]; + Instant updatedAt = args[2] == null ? null : Instant.ofEpochMilli((Long) args[2]); + List fields = (List) args[3]; + return new Database(provider, sha256Hash, updatedAt, fields); + } + ); + static { + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), PROVIDER_FIELD); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), SHA256_HASH_FIELD); + PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), UPDATED_AT_FIELD); + PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), FIELDS_FIELD); + } + + public Database(final StreamInput in) throws IOException { + provider = in.readOptionalString(); + sha256Hash = in.readOptionalString(); + updatedAt = toInstant(in.readOptionalVLong()); + fields = in.readOptionalStringList(); + } + + private Database(){} + + @Override + public void writeTo(final StreamOutput out) throws IOException { + out.writeOptionalString(provider); + out.writeOptionalString(sha256Hash); + out.writeOptionalVLong(updatedAt == null ? null : updatedAt.toEpochMilli()); + out.writeOptionalStringCollection(fields); + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + builder.startObject(); + if (provider != null) { + builder.field(PROVIDER_FIELD.getPreferredName(), provider); + } + if (sha256Hash != null) { + builder.field(SHA256_HASH_FIELD.getPreferredName(), sha256Hash); + } + if (updatedAt != null) { + builder.timeField( + UPDATED_AT_FIELD.getPreferredName(), + UPDATED_AT_FIELD_READABLE.getPreferredName(), + updatedAt.toEpochMilli() + ); + } + if (fields != null) { + builder.startArray(FIELDS_FIELD.getPreferredName()); + for (String field : fields) { + builder.value(field); + } + builder.endArray(); + } + builder.endObject(); + return builder; + } + } + + /** + * Update stats of a datasource + */ + public static class UpdateStats implements Writeable, ToXContent { + private static final ParseField LAST_SUCCEEDED_AT_FIELD = new ParseField("last_succeeded_at_in_epoch_millis"); + private static final ParseField LAST_SUCCEEDED_AT_FIELD_READABLE = new ParseField("last_succeeded_at"); + private static final ParseField LAST_PROCESSING_TIME_IN_MILLIS_FIELD = new ParseField("last_processing_time_in_millis"); + private static final ParseField LAST_FAILED_AT_FIELD = new ParseField("last_failed_at_in_epoch_millis"); + private static final ParseField LAST_FAILED_AT_FIELD_READABLE = new ParseField("last_failed_at"); + private static final ParseField LAST_SKIPPED_AT = new ParseField("last_skipped_at_in_epoch_millis"); + private static final ParseField LAST_SKIPPED_AT_READABLE = new ParseField("last_skipped_at"); + + /** + * @param lastSucceededAt The last time when threat intel feed data update was succeeded + * @return The last time when threat intel feed data update was succeeded + */ + private Instant lastSucceededAt; + /** + * @param lastProcessingTimeInMillis The last processing time when threat intel feed data update was succeeded + * @return The last processing time when threat intel feed data update was succeeded + */ + private Long lastProcessingTimeInMillis; + /** + * @param lastFailedAt The last time when threat intel feed data update was failed + * @return The last time when threat intel feed data update was failed + */ + private Instant lastFailedAt; + + /** + * @param lastSkippedAt The last time when threat intel feed data update was skipped as there was no new update from an endpoint + * @return The last time when threat intel feed data update was skipped as there was no new update from an endpoint + */ + private Instant lastSkippedAt; + + private UpdateStats(){} + + public void setLastSkippedAt(Instant lastSkippedAt) { + this.lastSkippedAt = lastSkippedAt; + } + + public void setLastSucceededAt(Instant lastSucceededAt) { + this.lastSucceededAt = lastSucceededAt; + } + + public void setLastProcessingTimeInMillis(Long lastProcessingTimeInMillis) { + this.lastProcessingTimeInMillis = lastProcessingTimeInMillis; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "datasource_metadata_update_stats", + true, + args -> { + Instant lastSucceededAt = args[0] == null ? null : Instant.ofEpochMilli((long) args[0]); + Long lastProcessingTimeInMillis = (Long) args[1]; + Instant lastFailedAt = args[2] == null ? null : Instant.ofEpochMilli((long) args[2]); + Instant lastSkippedAt = args[3] == null ? null : Instant.ofEpochMilli((long) args[3]); + return new UpdateStats(lastSucceededAt, lastProcessingTimeInMillis, lastFailedAt, lastSkippedAt); + } + ); + + static { + PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), LAST_SUCCEEDED_AT_FIELD); + PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), LAST_PROCESSING_TIME_IN_MILLIS_FIELD); + PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), LAST_FAILED_AT_FIELD); + PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), LAST_SKIPPED_AT); + } + + public UpdateStats(final StreamInput in) throws IOException { + lastSucceededAt = toInstant(in.readOptionalVLong()); + lastProcessingTimeInMillis = in.readOptionalVLong(); + lastFailedAt = toInstant(in.readOptionalVLong()); + lastSkippedAt = toInstant(in.readOptionalVLong()); + } + + public UpdateStats(Instant lastSucceededAt, Long lastProcessingTimeInMillis, Instant lastFailedAt, Instant lastSkippedAt) { + this.lastSucceededAt = lastSucceededAt; + this.lastProcessingTimeInMillis = lastProcessingTimeInMillis; + this.lastFailedAt = lastFailedAt; + this.lastSkippedAt = lastSkippedAt; + } + + + @Override + public void writeTo(final StreamOutput out) throws IOException { + out.writeOptionalVLong(lastSucceededAt == null ? null : lastSucceededAt.toEpochMilli()); + out.writeOptionalVLong(lastProcessingTimeInMillis); + out.writeOptionalVLong(lastFailedAt == null ? null : lastFailedAt.toEpochMilli()); + out.writeOptionalVLong(lastSkippedAt == null ? null : lastSkippedAt.toEpochMilli()); + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + builder.startObject(); + if (lastSucceededAt != null) { + builder.timeField( + LAST_SUCCEEDED_AT_FIELD.getPreferredName(), + LAST_SUCCEEDED_AT_FIELD_READABLE.getPreferredName(), + lastSucceededAt.toEpochMilli() + ); + } + if (lastProcessingTimeInMillis != null) { + builder.field(LAST_PROCESSING_TIME_IN_MILLIS_FIELD.getPreferredName(), lastProcessingTimeInMillis); + } + if (lastFailedAt != null) { + builder.timeField( + LAST_FAILED_AT_FIELD.getPreferredName(), + LAST_FAILED_AT_FIELD_READABLE.getPreferredName(), + lastFailedAt.toEpochMilli() + ); + } + if (lastSkippedAt != null) { + builder.timeField( + LAST_SKIPPED_AT.getPreferredName(), + LAST_SKIPPED_AT_READABLE.getPreferredName(), + lastSkippedAt.toEpochMilli() + ); + } + builder.endObject(); + return builder; + } + + public void setLastFailedAt(Instant now) { + this.lastFailedAt = now; + } + } + + + /** + * Builder class for Datasource + */ + public static class Builder { + public static Datasource build(final PutDatasourceRequest request) { + String id = request.getName(); + IntervalSchedule schedule = new IntervalSchedule( + Instant.now().truncatedTo(ChronoUnit.MILLIS), + (int) request.getUpdateInterval().days(), + ChronoUnit.DAYS + ); + String feedFormat = request.getFeedFormat(); + String endpoint = request.getEndpoint(); + String feedName = request.getFeedName(); + String description = request.getDescription(); + String organization = request.getOrganization(); + List contained_iocs_field = request.getContained_iocs_field(); + return new Datasource(id, schedule, feedFormat, endpoint, feedName, description, organization, contained_iocs_field); + } + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceExtension.java b/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceExtension.java new file mode 100644 index 000000000..4d32973e6 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceExtension.java @@ -0,0 +1,47 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.jobscheduler; + +import org.opensearch.jobscheduler.spi.JobSchedulerExtension; +import org.opensearch.jobscheduler.spi.ScheduledJobParser; +import org.opensearch.jobscheduler.spi.ScheduledJobRunner; + +import java.util.Map; + +public class DatasourceExtension implements JobSchedulerExtension { + /** + * Job index name for a datasource + */ + public static final String JOB_INDEX_NAME = ".scheduler-security_analytics-threatintel-datasource"; //rename this... + + /** + * Job index setting + * + * We want it to be single shard so that job can be run only in a single node by job scheduler. + * We want it to expand to all replicas so that querying to this index can be done locally to reduce latency. + */ + public static final Map INDEX_SETTING = Map.of("index.number_of_shards", 1, "index.number_of_replicas", "0-all", "index.hidden", true); + + @Override + public String getJobType() { + return "scheduler_security_analytics_threatintel_datasource"; + } + + @Override + public String getJobIndex() { + return JOB_INDEX_NAME; + } + + @Override + public ScheduledJobRunner getJobRunner() { + return DatasourceRunner.getJobRunnerInstance(); + } + + @Override + public ScheduledJobParser getJobParser() { + return (parser, id, jobDocVersion) -> Datasource.PARSER.parse(parser, null); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceRunner.java b/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceRunner.java new file mode 100644 index 000000000..8de306d33 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceRunner.java @@ -0,0 +1,159 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.jobscheduler; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.jobscheduler.spi.JobExecutionContext; +import org.opensearch.jobscheduler.spi.LockModel; +import org.opensearch.jobscheduler.spi.ScheduledJobParameter; +import org.opensearch.jobscheduler.spi.ScheduledJobRunner; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; +import org.opensearch.securityanalytics.model.DetectorTrigger; + +import java.io.IOException; +import java.time.temporal.ChronoUnit; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicReference; +import java.time.Instant; + +import org.opensearch.securityanalytics.threatIntel.common.DatasourceState; +import org.opensearch.securityanalytics.threatIntel.common.ThreatIntelExecutor; +import org.opensearch.securityanalytics.threatIntel.common.ThreatIntelLockService; +import org.opensearch.securityanalytics.threatIntel.dao.DatasourceDao; +/** + * Datasource update task + * + * This is a background task which is responsible for updating threat intel feed data + */ +public class DatasourceRunner implements ScheduledJobRunner { + private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + private static DatasourceRunner INSTANCE; + + public static DatasourceRunner getJobRunnerInstance() { + if (INSTANCE != null) { + return INSTANCE; + } + synchronized (DatasourceRunner.class) { + if (INSTANCE != null) { + return INSTANCE; + } + INSTANCE = new DatasourceRunner(); + return INSTANCE; + } + } + + private ClusterService clusterService; + + // threat intel specific variables + private DatasourceUpdateService datasourceUpdateService; + private DatasourceDao datasourceDao; + private ThreatIntelExecutor threatIntelExecutor; + private ThreatIntelLockService lockService; + private boolean initialized; + + private DatasourceRunner() { + // Singleton class, use getJobRunner method instead of constructor + } + + public void initialize( + final ClusterService clusterService, + final DatasourceUpdateService datasourceUpdateService, + final DatasourceDao datasourceDao, + final ThreatIntelExecutor threatIntelExecutor, + final ThreatIntelLockService threatIntelLockService + ) { + this.clusterService = clusterService; + this.datasourceUpdateService = datasourceUpdateService; + this.datasourceDao = datasourceDao; + this.threatIntelExecutor = threatIntelExecutor; + this.lockService = threatIntelLockService; + this.initialized = true; + } + + @Override + public void runJob(final ScheduledJobParameter jobParameter, final JobExecutionContext context) { + if (initialized == false) { + throw new AssertionError("this instance is not initialized"); + } + + log.info("Update job started for a datasource[{}]", jobParameter.getName()); + if (jobParameter instanceof Datasource == false) { + log.error("Illegal state exception: job parameter is not instance of Datasource"); + throw new IllegalStateException( + "job parameter is not instance of Datasource, type: " + jobParameter.getClass().getCanonicalName() + ); + } + threatIntelExecutor.forDatasourceUpdate().submit(updateDatasourceRunner(jobParameter)); + } + + /** + * Update threat intel feed data + * + * Lock is used so that only one of nodes run this task. + * + * @param jobParameter job parameter + */ + protected Runnable updateDatasourceRunner(final ScheduledJobParameter jobParameter) { + return () -> { + Optional lockModel = lockService.acquireLock( + jobParameter.getName(), + ThreatIntelLockService.LOCK_DURATION_IN_SECONDS + ); + if (lockModel.isEmpty()) { + log.error("Failed to update. Another processor is holding a lock for datasource[{}]", jobParameter.getName()); + return; + } + + LockModel lock = lockModel.get(); + try { + updateDatasource(jobParameter, lockService.getRenewLockRunnable(new AtomicReference<>(lock))); + } catch (Exception e) { + log.error("Failed to update datasource[{}]", jobParameter.getName(), e); + } finally { + lockService.releaseLock(lock); + } + }; + } + + protected void updateDatasource(final ScheduledJobParameter jobParameter, final Runnable renewLock) throws IOException { + Datasource datasource = datasourceDao.getDatasource(jobParameter.getName()); + /** + * If delete request comes while update task is waiting on a queue for other update tasks to complete, + * because update task for this datasource didn't acquire a lock yet, delete request is processed. + * When it is this datasource's turn to run, it will find that the datasource is deleted already. + * Therefore, we stop the update process when data source does not exist. + */ + if (datasource == null) { + log.info("Datasource[{}] does not exist", jobParameter.getName()); + return; + } + + if (DatasourceState.AVAILABLE.equals(datasource.getState()) == false) { + log.error("Invalid datasource state. Expecting {} but received {}", DatasourceState.AVAILABLE, datasource.getState()); + datasource.disable(); + datasource.getUpdateStats().setLastFailedAt(Instant.now()); + datasourceDao.updateDatasource(datasource); + return; + } + try { + datasourceUpdateService.deleteUnusedIndices(datasource); + if (DatasourceTask.DELETE_UNUSED_INDICES.equals(datasource.getTask()) == false) { + datasourceUpdateService.updateOrCreateThreatIntelFeedData(datasource, renewLock); + } + datasourceUpdateService.deleteUnusedIndices(datasource); + } catch (Exception e) { + log.error("Failed to update datasource for {}", datasource.getName(), e); + datasource.getUpdateStats().setLastFailedAt(Instant.now()); + datasourceDao.updateDatasource(datasource); + } finally { //post processing + datasourceUpdateService.updateDatasource(datasource, datasource.getSchedule(), DatasourceTask.ALL); + } + } + +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceTask.java b/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceTask.java new file mode 100644 index 000000000..b0e9ac184 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceTask.java @@ -0,0 +1,21 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.jobscheduler; + +/** + * Task that {@link DatasourceRunner} will run + */ +public enum DatasourceTask { + /** + * Do everything + */ + ALL, + + /** + * Only delete unused indices + */ + DELETE_UNUSED_INDICES +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceUpdateService.java b/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceUpdateService.java new file mode 100644 index 000000000..5a24c5a84 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceUpdateService.java @@ -0,0 +1,296 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.jobscheduler; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.io.IOException; +import java.net.URL; +import java.time.Duration; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.apache.commons.csv.CSVParser; +import org.apache.commons.csv.CSVRecord; +import org.opensearch.OpenSearchException; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.ClusterSettings; + +import org.opensearch.core.rest.RestStatus; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; +import org.opensearch.securityanalytics.model.DetectorTrigger; +import org.opensearch.securityanalytics.threatIntel.common.DatasourceManifest; +import org.opensearch.securityanalytics.threatIntel.dao.DatasourceDao; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedDataService; +import org.opensearch.securityanalytics.threatIntel.common.DatasourceState; +import org.opensearch.securityanalytics.util.SecurityAnalyticsException; + +public class DatasourceUpdateService { + private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + + private static final int SLEEP_TIME_IN_MILLIS = 5000; // 5 seconds + private static final int MAX_WAIT_TIME_FOR_REPLICATION_TO_COMPLETE_IN_MILLIS = 10 * 60 * 60 * 1000; // 10 hours + private final ClusterService clusterService; + private final ClusterSettings clusterSettings; + private final DatasourceDao datasourceDao; + private final ThreatIntelFeedDataService threatIntelFeedDataService; + + public DatasourceUpdateService( + final ClusterService clusterService, + final DatasourceDao datasourceDao, + final ThreatIntelFeedDataService threatIntelFeedDataService + ) { + this.clusterService = clusterService; + this.clusterSettings = clusterService.getClusterSettings(); + this.datasourceDao = datasourceDao; + this.threatIntelFeedDataService = threatIntelFeedDataService; + } + + /** + * Update threat intel feed data + * + * The first column is ip range field regardless its header name. + * Therefore, we don't store the first column's header name. + * + * @param datasource the datasource + * @param renewLock runnable to renew lock + * + * @throws IOException + */ + public void updateOrCreateThreatIntelFeedData(final Datasource datasource, final Runnable renewLock) throws IOException { + URL url = new URL(datasource.getEndpoint()); + DatasourceManifest manifest = DatasourceManifest.Builder.build(url); + + if (shouldUpdate(datasource, manifest) == false) { + log.info("Skipping threat intel feed database update. Update is not required for {}", datasource.getName()); + datasource.getUpdateStats().setLastSkippedAt(Instant.now()); + datasourceDao.updateDatasource(datasource); + return; + } + + Instant startTime = Instant.now(); + String indexName = setupIndex(datasource); + String[] header; + List fieldsToStore; + try (CSVParser reader = threatIntelFeedDataService.getDatabaseReader(manifest)) { + CSVRecord headerLine = reader.iterator().next(); + header = validateHeader(headerLine).values(); + fieldsToStore = Arrays.asList(header).subList(1, header.length); + if (datasource.isCompatible(fieldsToStore) == false) { + log.error("Exception: new fields does not contain all old fields"); + throw new OpenSearchException( + "new fields [{}] does not contain all old fields [{}]", + fieldsToStore.toString(), + datasource.getDatabase().getFields().toString() + ); + } + threatIntelFeedDataService.saveThreatIntelFeedData(indexName, header, reader.iterator(), renewLock); + } + + waitUntilAllShardsStarted(indexName, MAX_WAIT_TIME_FOR_REPLICATION_TO_COMPLETE_IN_MILLIS); + Instant endTime = Instant.now(); + updateDatasourceAsSucceeded(indexName, datasource, manifest, fieldsToStore, startTime, endTime); // then I update the datasource + } + + + /** + * We wait until all shards are ready to serve search requests before updating datasource metadata to + * point to a new index so that there won't be latency degradation during threat intel feed data update + * + * @param indexName the indexName + */ + protected void waitUntilAllShardsStarted(final String indexName, final int timeout) { + Instant start = Instant.now(); + try { + while (Instant.now().toEpochMilli() - start.toEpochMilli() < timeout) { + if (clusterService.state().routingTable().allShards(indexName).stream().allMatch(shard -> shard.started())) { + return; + } + Thread.sleep(SLEEP_TIME_IN_MILLIS); + } + throw new OpenSearchException( + "index[{}] replication did not complete after {} millis", + MAX_WAIT_TIME_FOR_REPLICATION_TO_COMPLETE_IN_MILLIS + ); + } catch (InterruptedException e) { + log.error("runtime exception", e); + throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO + } + } + + /** + * Return header fields of threat intel feed data with given url of a manifest file + * + * The first column is ip range field regardless its header name. + * Therefore, we don't store the first column's header name. + * + * @param manifestUrl the url of a manifest file + * @return header fields of threat intel feed + */ + public List getHeaderFields(String manifestUrl) throws IOException { + URL url = new URL(manifestUrl); + DatasourceManifest manifest = DatasourceManifest.Builder.build(url); + + try (CSVParser reader = threatIntelFeedDataService.getDatabaseReader(manifest)) { + String[] fields = reader.iterator().next().values(); + return Arrays.asList(fields).subList(1, fields.length); + } + } + + /** + * Delete all indices except the one which are being used + * + * @param datasource + */ + public void deleteUnusedIndices(final Datasource datasource) { + try { + List indicesToDelete = datasource.getIndices() + .stream() + .filter(index -> index.equals(datasource.currentIndexName()) == false) + .collect(Collectors.toList()); + + List deletedIndices = deleteIndices(indicesToDelete); + + if (deletedIndices.isEmpty() == false) { + datasource.getIndices().removeAll(deletedIndices); + datasourceDao.updateDatasource(datasource); + } + } catch (Exception e) { + log.error("Failed to delete old indices for {}", datasource.getName(), e); + } + } + + /** + * Update datasource with given systemSchedule and task + * + * @param datasource datasource to update + * @param systemSchedule new system schedule value + * @param task new task value + */ + public void updateDatasource(final Datasource datasource, final IntervalSchedule systemSchedule, final DatasourceTask task) { + boolean updated = false; + if (datasource.getSchedule().equals(systemSchedule) == false) { + datasource.setSchedule(systemSchedule); + updated = true; + } + + if (datasource.getTask().equals(task) == false) { + datasource.setTask(task); + updated = true; + } + + if (updated) { + datasourceDao.updateDatasource(datasource); + } + } + + private List deleteIndices(final List indicesToDelete) { + List deletedIndices = new ArrayList<>(indicesToDelete.size()); + for (String index : indicesToDelete) { + if (clusterService.state().metadata().hasIndex(index) == false) { + deletedIndices.add(index); + continue; + } + + try { + threatIntelFeedDataService.deleteThreatIntelDataIndex(index); + deletedIndices.add(index); + } catch (Exception e) { + log.error("Failed to delete an index [{}]", index, e); + } + } + return deletedIndices; + } + + /** + * Validate header + * + * 1. header should not be null + * 2. the number of values in header should be more than one + * + * @param header the header + * @return CSVRecord the input header + */ + private CSVRecord validateHeader(CSVRecord header) { + if (header == null) { + throw new OpenSearchException("threat intel feed database is empty"); + } + if (header.values().length < 2) { + throw new OpenSearchException("threat intel feed database should have at least two fields"); + } + return header; + } + + /*** + * Update datasource as succeeded + * + * @param manifest the manifest + * @param datasource the datasource + */ + private void updateDatasourceAsSucceeded( + final String newIndexName, + final Datasource datasource, + final DatasourceManifest manifest, + final List fields, + final Instant startTime, + final Instant endTime + ) { + datasource.setCurrentIndex(newIndexName); + datasource.setDatabase(manifest, fields); + datasource.getUpdateStats().setLastSucceededAt(endTime); + datasource.getUpdateStats().setLastProcessingTimeInMillis(endTime.toEpochMilli() - startTime.toEpochMilli()); + datasource.enable(); + datasource.setState(DatasourceState.AVAILABLE); + datasourceDao.updateDatasource(datasource); + log.info( + "threat intel feed database creation succeeded for {} and took {} seconds", + datasource.getName(), + Duration.between(startTime, endTime) + ); + } + + /*** + * Setup index to add a new threat intel feed data + * + * @param datasource the datasource + * @return new index name + */ + private String setupIndex(final Datasource datasource) { + String indexName = datasource.newIndexName(UUID.randomUUID().toString()); + datasource.getIndices().add(indexName); + datasourceDao.updateDatasource(datasource); + threatIntelFeedDataService.createIndexIfNotExists(indexName); + return indexName; + } + + /** + * Determine if update is needed or not + * + * Update is needed when all following conditions are met + * 1. updatedAt value in datasource is equal or before updateAt value in manifest + * 2. SHA256 hash value in datasource is different with SHA256 hash value in manifest + * + * @param datasource + * @param manifest + * @return + */ + private boolean shouldUpdate(final Datasource datasource, final DatasourceManifest manifest) { + if (datasource.getDatabase().getUpdatedAt() != null + && datasource.getDatabase().getUpdatedAt().toEpochMilli() > manifest.getUpdatedAt()) { + return false; + } + +// if (manifest.getSha256Hash().equals(datasource.getDatabase().getSha256Hash())) { +// return false; +// } + return true; + } +} diff --git a/src/main/resources/META-INF/services/org.opensearch.jobscheduler.spi.JobSchedulerExtension b/src/main/resources/META-INF/services/org.opensearch.jobscheduler.spi.JobSchedulerExtension new file mode 100644 index 000000000..0ffeb24aa --- /dev/null +++ b/src/main/resources/META-INF/services/org.opensearch.jobscheduler.spi.JobSchedulerExtension @@ -0,0 +1 @@ +org.opensearch.securityanalytics.SecurityAnalyticsPlugin \ No newline at end of file diff --git a/src/test/java/org/opensearch/securityanalytics/TestHelpers.java b/src/test/java/org/opensearch/securityanalytics/TestHelpers.java index 44f5d39ae..a3e73e96f 100644 --- a/src/test/java/org/opensearch/securityanalytics/TestHelpers.java +++ b/src/test/java/org/opensearch/securityanalytics/TestHelpers.java @@ -172,7 +172,7 @@ public static CustomLogType randomCustomLogType(String name, String description, public static ThreatIntelFeedData randomThreatIntelFeedData() { return new ThreatIntelFeedData( "IP_ADDRESS", - ip, + "ip", "alientVault", Instant.now() ); diff --git a/src/test/java/org/opensearch/securityanalytics/findings/FindingServiceTests.java b/src/test/java/org/opensearch/securityanalytics/findings/FindingServiceTests.java index 9e7a4d061..6551f579c 100644 --- a/src/test/java/org/opensearch/securityanalytics/findings/FindingServiceTests.java +++ b/src/test/java/org/opensearch/securityanalytics/findings/FindingServiceTests.java @@ -5,6 +5,12 @@ package org.opensearch.securityanalytics.findings; +import java.io.BufferedReader; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStreamReader; +import java.net.URL; +import java.net.URLConnection; import java.time.Instant; import java.time.ZoneId; import java.util.ArrayDeque; From 2e52a02e64cb8d9ddf54caede4a79cb1eb2a4758 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Sat, 7 Oct 2023 13:38:24 -0700 Subject: [PATCH 09/39] create doc level query from threat intel feed data index docs" Signed-off-by: Surya Sashank Nistala --- build.gradle | 2 +- .../SecurityAnalyticsPlugin.java | 2 +- .../model/ThreatIntelFeedData.java | 3 +- .../DetectorThreatIntelService.java | 74 +++++++++++++---- .../ThreatIntelFeedDataService.java | 17 ++-- .../TransportIndexDetectorAction.java | 20 ++++- .../SecurityAnalyticsRestTestCase.java | 6 ++ .../securityanalytics/TestHelpers.java | 34 ++++---- .../resthandler/DetectorMonitorRestApiIT.java | 82 +++++++++++++++++++ 9 files changed, 192 insertions(+), 48 deletions(-) diff --git a/build.gradle b/build.gradle index 2a958f0b6..49180e6ab 100644 --- a/build.gradle +++ b/build.gradle @@ -155,7 +155,7 @@ dependencies { implementation group: 'org.apache.commons', name: 'commons-lang3', version: "${versions.commonslang}" implementation "org.antlr:antlr4-runtime:4.10.1" implementation "com.cronutils:cron-utils:9.1.6" - api "org.opensearch:common-utils:${common_utils_version}@jar" + api files("/Users/snistala/Documents/opensearch/common-utils/build/libs/common-utils-3.0.0.0-SNAPSHOT.jar") api "org.opensearch.client:opensearch-rest-client:${opensearch_version}" implementation "org.jetbrains.kotlin:kotlin-stdlib:${kotlin_version}" compileOnly "org.opensearch:opensearch-job-scheduler-spi:${opensearch_build}" diff --git a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java index 33808b445..3e3d6ee07 100644 --- a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java +++ b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java @@ -137,7 +137,7 @@ public Collection createComponents(Client client, mapperService = new MapperService(client, clusterService, indexNameExpressionResolver, indexTemplateManager, logTypeService); ruleIndices = new RuleIndices(logTypeService, client, clusterService, threadPool); correlationRuleIndices = new CorrelationRuleIndices(client, clusterService); - ThreatIntelFeedDataService threatIntelFeedDataService = new ThreatIntelFeedDataService(clusterService.state(), clusterService, client, indexNameExpressionResolver, xContentRegistry); + ThreatIntelFeedDataService threatIntelFeedDataService = new ThreatIntelFeedDataService(clusterService, client, indexNameExpressionResolver, xContentRegistry); DetectorThreatIntelService detectorThreatIntelService = new DetectorThreatIntelService(threatIntelFeedDataService); this.client = client; diff --git a/src/main/java/org/opensearch/securityanalytics/model/ThreatIntelFeedData.java b/src/main/java/org/opensearch/securityanalytics/model/ThreatIntelFeedData.java index 1870f383a..d79907fcb 100644 --- a/src/main/java/org/opensearch/securityanalytics/model/ThreatIntelFeedData.java +++ b/src/main/java/org/opensearch/securityanalytics/model/ThreatIntelFeedData.java @@ -56,7 +56,7 @@ public static ThreatIntelFeedData parse(XContentParser xcp, String id, Long vers String iocValue = null; String feedId = null; Instant timestamp = null; - + xcp.nextToken(); XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp); while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { String fieldName = xcp.currentName(); @@ -126,6 +126,7 @@ public ThreatIntelFeedData(StreamInput sin) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { return createXContentBuilder(builder, params); + } private XContentBuilder createXContentBuilder(XContentBuilder builder, ToXContent.Params params) throws IOException { diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java index 0e940988e..ae0acc6c3 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java @@ -1,7 +1,9 @@ package org.opensearch.securityanalytics.threatIntel; import org.opensearch.commons.alerting.model.DocLevelQuery; +import org.opensearch.core.action.ActionListener; import org.opensearch.core.rest.RestStatus; +import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; import org.opensearch.securityanalytics.model.Detector; import org.opensearch.securityanalytics.model.ThreatIntelFeedData; import org.opensearch.securityanalytics.util.SecurityAnalyticsException; @@ -9,6 +11,9 @@ import java.util.Collections; import java.util.List; import java.util.Set; +import java.util.UUID; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; @@ -20,42 +25,75 @@ public DetectorThreatIntelService(ThreatIntelFeedDataService threatIntelFeedData this.threatIntelFeedDataService = threatIntelFeedDataService; } - /** Convert the feed data IOCs into query string query format to create doc level queries. */ + /** + * Convert the feed data IOCs into query string query format to create doc level queries. + */ public DocLevelQuery createDocLevelQueryFromThreatIntelList( List tifdList, String docLevelQueryId - ) { + ) { Set iocs = tifdList.stream().map(ThreatIntelFeedData::getIocValue).collect(Collectors.toSet()); String query = buildQueryStringQueryWithIocList(iocs); return new DocLevelQuery( - docLevelQueryId,tifdList.get(0).getFeedId(), query, + docLevelQueryId, tifdList.get(0).getFeedId(), + Collections.singletonList("*"), + query, Collections.singletonList("threat_intel") ); } private String buildQueryStringQueryWithIocList(Set iocs) { StringBuilder sb = new StringBuilder(); - - for(String ioc : iocs) { - if(sb.length() != 0) { - sb.append(" "); + sb.append("("); + for (String ioc : iocs) { + if (sb.length() > 2) { + sb.append(" OR "); } - sb.append("("); sb.append(ioc); - sb.append(")"); + } + sb.append(")"); return sb.toString(); } - public DocLevelQuery createDocLevelQueryFromThreatIntel(Detector detector) { - // for testing validation only. - if(detector.getThreatIntelEnabled() ==false) { - throw new SecurityAnalyticsException( - "trying to create threat intel feed queries when flag to use threat intel is disabled.", - RestStatus.FORBIDDEN, new IllegalArgumentException()); + public void createDocLevelQueryFromThreatIntel(Detector detector, ActionListener listener) { + try { + if (detector.getThreatIntelEnabled() == false) { + listener.onResponse(null); + return; + + } + CountDownLatch latch = new CountDownLatch(1); + // TODO: plugin logic to run job for populating threat intel feed data + //TODO populateFeedData() + threatIntelFeedDataService.getThreatIntelFeedData(new ActionListener<>() { + @Override + public void onResponse(List threatIntelFeedData) { + if (threatIntelFeedData.isEmpty()) { + listener.onResponse(null); + } else { + listener.onResponse(createDocLevelQueryFromThreatIntelList( + threatIntelFeedData, + detector.getName() + "_threat_intel" + UUID.randomUUID() + )); + } + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + latch.countDown(); + } + }); + latch.await(30, TimeUnit.SECONDS); + } catch (InterruptedException e) { + listener.onFailure(e); } - // TODO: plugin logic to run job for populating threat intel feed data - /*threatIntelFeedDataService.getThreatIntelFeedData("ip_address", );*/ - return null; + + } + + public void updateDetectorsWithLatestThreatIntelRules() { + } } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java index 351572470..1a7001725 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java @@ -75,7 +75,6 @@ public class ThreatIntelFeedDataService { private static final String TYPE = "type"; private static final String DATA_FIELD_NAME = "_data"; - private final ClusterState state; private final Client client; private final IndexNameExpressionResolver indexNameExpressionResolver; @@ -96,35 +95,29 @@ public class ThreatIntelFeedDataService { true ); private final ClusterService clusterService; - private final ClusterSettings clusterSettings; public ThreatIntelFeedDataService( - ClusterState state, ClusterService clusterService, Client client, IndexNameExpressionResolver indexNameExpressionResolver, NamedXContentRegistry xContentRegistry) { - this.state = state; this.client = client; this.indexNameExpressionResolver = indexNameExpressionResolver; this.xContentRegistry = xContentRegistry; this.clusterService = clusterService; - this.clusterSettings = clusterService.getClusterSettings(); } private final NamedXContentRegistry xContentRegistry; public void getThreatIntelFeedData( - String iocType, ActionListener> listener ) { String tifdIndex = IndexUtils.getNewIndexByCreationDate( - this.state, + this.clusterService.state(), this.indexNameExpressionResolver, ".opensearch-sap-threatintel*" //name? ); SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); - sourceBuilder.query(QueryBuilders.boolQuery().filter(QueryBuilders.termQuery("ioc_type", iocType))); SearchRequest searchRequest = new SearchRequest(tifdIndex); searchRequest.source().size(9999); //TODO: convert to scroll searchRequest.source(sourceBuilder); @@ -174,12 +167,13 @@ public void createIndexIfNotExists(final String indexName) { .mapping(getIndexMapping()); StashedThreadContext.run( client, - () -> client.admin().indices().create(createIndexRequest).actionGet(clusterSettings.get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT)) + () -> client.admin().indices().create(createIndexRequest).actionGet(this.clusterService.getClusterSettings().get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT)) ); } private void freezeIndex(final String indexName) { - TimeValue timeout = clusterSettings.get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT); + ClusterSettings clusterSettings = this.clusterService.getClusterSettings(); + TimeValue timeout = this.clusterService.getClusterSettings().get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT); StashedThreadContext.run(client, () -> { client.admin().indices().prepareForceMerge(indexName).setMaxNumSegments(1).execute().actionGet(timeout); client.admin().indices().prepareRefresh(indexName).execute().actionGet(timeout); @@ -260,7 +254,7 @@ public void saveThreatIntelFeedData( if (indexName == null || fields == null || iterator == null || renewLock == null){ throw new IllegalArgumentException("Fields cannot be null"); } - + ClusterSettings clusterSettings = this.clusterService.getClusterSettings(); TimeValue timeout = clusterSettings.get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT); Integer batchSize = clusterSettings.get(ThreatIntelSettings.BATCH_SIZE); final BulkRequest bulkRequest = new BulkRequest(); @@ -297,6 +291,7 @@ public void deleteThreatIntelDataIndex(final String index) { } public void deleteThreatIntelDataIndex(final List indices) { + ClusterSettings clusterSettings = this.clusterService.getClusterSettings(); if (indices == null || indices.isEmpty()) { return; } diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java index 81c548114..ea226369d 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java @@ -116,6 +116,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; @@ -654,8 +655,23 @@ private IndexMonitorRequest createDocLevelMonitorRequest(List } try { if (detector.getThreatIntelEnabled()) { - DocLevelQuery docLevelQueryFromThreatIntel = detectorThreatIntelService.createDocLevelQueryFromThreatIntel(detector); - docLevelQueries.add(docLevelQueryFromThreatIntel); + CountDownLatch countDownLatch = new CountDownLatch(1); + detectorThreatIntelService.createDocLevelQueryFromThreatIntel(detector, new ActionListener<>() { + @Override + public void onResponse(DocLevelQuery dlq) { + if (dlq != null) + docLevelQueries.add(dlq); + countDownLatch.countDown(); + } + + @Override + public void onFailure(Exception e) { + // not failing detector creation if any fatal exception occurs during doc level query creation from threat intel feed data + log.error("Failed to convert threat intel feed to. Proceeding with detector creation", e); + countDownLatch.countDown(); + } + }); + countDownLatch.await(); } } catch (Exception e) { // not failing detector creation if any fatal exception occurs during doc level query creation from threat intel feed data diff --git a/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsRestTestCase.java b/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsRestTestCase.java index 2178f06d6..1d8e1e858 100644 --- a/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsRestTestCase.java +++ b/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsRestTestCase.java @@ -64,6 +64,7 @@ import org.opensearch.securityanalytics.model.CustomLogType; import org.opensearch.securityanalytics.model.Detector; import org.opensearch.securityanalytics.model.Rule; +import org.opensearch.securityanalytics.model.ThreatIntelFeedData; import org.opensearch.test.rest.OpenSearchRestTestCase; @@ -682,6 +683,11 @@ protected String toJsonString(CorrelationRule rule) throws IOException { return IndexUtilsKt.string(shuffleXContent(rule.toXContent(builder, ToXContent.EMPTY_PARAMS))); } + protected String toJsonString(ThreatIntelFeedData tifd) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder(); + return IndexUtilsKt.string(shuffleXContent(tifd.toXContent(builder, ToXContent.EMPTY_PARAMS))); + } + private String alertingScheduledJobMappings() { return " \"_meta\" : {\n" + " \"schema_version\": 5\n" + diff --git a/src/test/java/org/opensearch/securityanalytics/TestHelpers.java b/src/test/java/org/opensearch/securityanalytics/TestHelpers.java index a3e73e96f..abc9caad8 100644 --- a/src/test/java/org/opensearch/securityanalytics/TestHelpers.java +++ b/src/test/java/org/opensearch/securityanalytics/TestHelpers.java @@ -54,53 +54,57 @@ static class AccessRoles { public static Detector randomDetector(List rules) { DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), Collections.emptyList(), rules.stream().map(DetectorRule::new).collect(Collectors.toList())); - return randomDetector(null, null, null, List.of(input), List.of(), null, null, null, null); + return randomDetector(null, null, null, List.of(input), List.of(), null, null, null, null, false); } public static Detector randomDetector(List rules, String detectorType) { DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), Collections.emptyList(), rules.stream().map(DetectorRule::new).collect(Collectors.toList())); - return randomDetector(null, detectorType, null, List.of(input), List.of(), null, null, null, null); + return randomDetector(null, detectorType, null, List.of(input), List.of(), null, null, null, null, false); } public static Detector randomDetectorWithInputs(List inputs) { - return randomDetector(null, null, null, inputs, List.of(), null, null, null, null); + return randomDetector(null, null, null, inputs, List.of(), null, null, null, null, false); + } + + public static Detector randomDetectorWithInputsAndThreatIntel(List inputs, Boolean threatIntel) { + return randomDetector(null, null, null, inputs, List.of(), null, null, null, null, threatIntel); } public static Detector randomDetectorWithInputsAndTriggers(List inputs, List triggers) { - return randomDetector(null, null, null, inputs, triggers, null, null, null, null); + return randomDetector(null, null, null, inputs, triggers, null, null, null, null, false); } public static Detector randomDetectorWithInputs(List inputs, String detectorType) { - return randomDetector(null, detectorType, null, inputs, List.of(), null, null, null, null); + return randomDetector(null, detectorType, null, inputs, List.of(), null, null, null, null, false); } public static Detector randomDetectorWithTriggers(List triggers) { - return randomDetector(null, null, null, List.of(), triggers, null, null, null, null); + return randomDetector(null, null, null, List.of(), triggers, null, null, null, null, false); } public static Detector randomDetectorWithTriggers(List rules, List triggers) { DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), Collections.emptyList(), rules.stream().map(DetectorRule::new).collect(Collectors.toList())); - return randomDetector(null, null, null, List.of(input), triggers, null, null, null, null); + return randomDetector(null, null, null, List.of(input), triggers, null, null, null, null, false); } public static Detector randomDetectorWithTriggers(List rules, List triggers, List inputIndices) { DetectorInput input = new DetectorInput("windows detector for security analytics", inputIndices, Collections.emptyList(), rules.stream().map(DetectorRule::new).collect(Collectors.toList())); - return randomDetector(null, null, null, List.of(input), triggers, null, null, null, null); + return randomDetector(null, null, null, List.of(input), triggers, null, true, null, null, false); } public static Detector randomDetectorWithTriggersAndScheduleAndEnabled(List rules, List triggers, Schedule schedule, boolean enabled) { DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), Collections.emptyList(), rules.stream().map(DetectorRule::new).collect(Collectors.toList())); - return randomDetector(null, null, null, List.of(input), triggers, schedule, enabled, null, null); + return randomDetector(null, null, null, List.of(input), triggers, schedule, enabled, null, null, false); } public static Detector randomDetectorWithTriggers(List rules, List triggers, String detectorType, DetectorInput input) { - return randomDetector(null, detectorType, null, List.of(input), triggers, null, null, null, null); + return randomDetector(null, detectorType, null, List.of(input), triggers, null, null, null, null, false); } public static Detector randomDetectorWithInputsAndTriggersAndType(List inputs, List triggers, String detectorType) { - return randomDetector(null, detectorType, null, inputs, triggers, null, null, null, null); + return randomDetector(null, detectorType, null, inputs, triggers, null, null, null, null, false); } public static Detector randomDetector(String name, @@ -111,7 +115,8 @@ public static Detector randomDetector(String name, Schedule schedule, Boolean enabled, Instant enabledTime, - Instant lastUpdateTime) { + Instant lastUpdateTime, + Boolean threatIntel) { if (name == null) { name = OpenSearchRestTestCase.randomAlphaOfLength(10); } @@ -150,7 +155,7 @@ public static Detector randomDetector(String name, DetectorTrigger trigger = new DetectorTrigger(null, "windows-trigger", "1", List.of(randomDetectorType()), List.of("QuarksPwDump Clearing Access History"), List.of("high"), List.of("T0008"), List.of()); triggers.add(trigger); } - return new Detector(null, null, name, enabled, schedule, lastUpdateTime, enabledTime, detectorType, user, inputs, triggers, Collections.singletonList(""), "", "", "", "", "", "", Collections.emptyMap(), Collections.emptyList(), false); + return new Detector(null, null, name, enabled, schedule, lastUpdateTime, enabledTime, detectorType, user, inputs, triggers, Collections.singletonList(""), "", "", "", "", "", "", Collections.emptyMap(), Collections.emptyList(), threatIntel); } public static CustomLogType randomCustomLogType(String name, String description, String category, String source) { @@ -1528,7 +1533,8 @@ public static NamedXContentRegistry xContentRegistry() { return new NamedXContentRegistry( List.of( Detector.XCONTENT_REGISTRY, - DetectorInput.XCONTENT_REGISTRY + DetectorInput.XCONTENT_REGISTRY, + ThreatIntelFeedData.XCONTENT_REGISTRY ) ); } diff --git a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java index 68d3636ae..6e2519442 100644 --- a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java @@ -4,7 +4,9 @@ */ package org.opensearch.securityanalytics.resthandler; +import org.apache.hc.core5.http.ContentType; import org.apache.hc.core5.http.HttpStatus; +import org.apache.hc.core5.http.io.entity.StringEntity; import org.junit.Assert; import org.opensearch.action.search.SearchResponse; import org.opensearch.client.Request; @@ -20,8 +22,11 @@ import org.opensearch.securityanalytics.model.DetectorRule; import org.opensearch.securityanalytics.model.DetectorTrigger; import org.opensearch.securityanalytics.model.Rule; +import org.opensearch.securityanalytics.model.ThreatIntelFeedData; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedDataService; import java.io.IOException; +import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -36,6 +41,7 @@ import static org.opensearch.securityanalytics.TestHelpers.randomDetector; import static org.opensearch.securityanalytics.TestHelpers.randomDetectorType; import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputs; +import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputsAndThreatIntel; import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputsAndTriggers; import static org.opensearch.securityanalytics.TestHelpers.randomDoc; import static org.opensearch.securityanalytics.TestHelpers.randomIndex; @@ -1048,7 +1054,83 @@ public void testCreateDetector_verifyWorkflowCreation_success_WithoutGroupByRule verifyWorkflow(detectorMap, monitorIds, 2); } + public void testCreateDetector_threatIntelEnabled() throws IOException { + String tifdString1 = "{ \"type\": \"feed\",\"ioc_type\": \"ip\", \"ioc_value\": \"abc\", \"feed_id\": \"feed\", \"timestamp\": 1633344000000 }"; + String tifdString2 = "{ \"type\": \"feed\",\"ioc_type\": \"ip\", \"ioc_value\": \"xyz\", \"feed_id\": \"feed\", \"timestamp\": 1633344000000 }"; + String feedIndex = ".opendsearch-sap-threatintel"; + indexDoc(feedIndex, "1", tifdString1); + indexDoc(feedIndex, "2", tifdString2); + updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + + String testOpCode = "Test"; + + String randomDocRuleId = createRule(randomRule()); + List detectorRules = List.of(new DetectorRule(randomDocRuleId)); + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, + Collections.emptyList()); + Detector detector = randomDetectorWithInputsAndThreatIntel(List.of(input), true); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + assertEquals(1, response.getHits().getTotalHits().value); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + Map responseBody = asMap(createResponse); + + String detectorId = responseBody.get("_id").toString(); + request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map detectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + List inputArr = (List) detectorMap.get("inputs"); + + + List monitorIds = ((List) (detectorMap).get("monitor_id")); + assertEquals(1, monitorIds.size()); + + assertNotNull("Workflow not created", detectorMap.get("workflow_ids")); + assertEquals("Number of workflows not correct", 1, ((List) detectorMap.get("workflow_ids")).size()); + + // Verify workflow + verifyWorkflow(detectorMap, monitorIds, 1); + + indexDoc(index, "1", randomDoc(5, 3, "abc")); + indexDoc(index, "2", randomDoc(5, 3, "xyz")); + indexDoc(index, "3", randomDoc(5, 3, "klm")); + String workflowId = ((List) detectorMap.get("workflow_ids")).get(0); + + Response executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); + assertNotNull(executeResponse); + } public void testCreateDetector_verifyWorkflowCreation_success_WithGroupByRulesInTrigger() throws IOException { updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); From 1830ec9f7e7550ec36f58ee2fffa33647a215af5 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Mon, 9 Oct 2023 02:54:49 -0700 Subject: [PATCH 10/39] handle threat intel enabled check during detector updation Signed-off-by: Surya Sashank Nistala --- .../transport/TransportIndexDetectorAction.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java index ea226369d..4805179df 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java @@ -255,7 +255,7 @@ private void createMonitorFromQueries(List> rulesById, Detect List monitorRequests = new ArrayList<>(); - if (!docLevelRules.isEmpty()) { + if (!docLevelRules.isEmpty() || detector.getThreatIntelEnabled()) { monitorRequests.add(createDocLevelMonitorRequest(docLevelRules, detector, refreshPolicy, Monitor.NO_ID, Method.POST)); } @@ -474,7 +474,7 @@ public void onFailure(Exception e) { Collectors.toList()); // Process doc level monitors - if (!docLevelRules.isEmpty()) { + if (!docLevelRules.isEmpty() || detector.getThreatIntelEnabled()) { if (detector.getDocLevelMonitorId() == null) { monitorsToBeAdded.add(createDocLevelMonitorRequest(docLevelRules, detector, refreshPolicy, Monitor.NO_ID, Method.POST)); } else { From dababa8391c6a7ff7163db320940138b035c8fdd Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Mon, 9 Oct 2023 19:14:53 -0700 Subject: [PATCH 11/39] add tests for testing threat intel feed integration with detectors Signed-off-by: Surya Sashank Nistala --- .../securityanalytics/model/Detector.java | 8 +- .../resthandler/DetectorMonitorRestApiIT.java | 155 +++++++++++++++++- 2 files changed, 158 insertions(+), 5 deletions(-) diff --git a/src/main/java/org/opensearch/securityanalytics/model/Detector.java b/src/main/java/org/opensearch/securityanalytics/model/Detector.java index 65e4d18be..4ffca565d 100644 --- a/src/main/java/org/opensearch/securityanalytics/model/Detector.java +++ b/src/main/java/org/opensearch/securityanalytics/model/Detector.java @@ -80,6 +80,8 @@ public class Detector implements Writeable, ToXContentObject { private String name; + private Boolean threatIntelEnabled; + private Boolean enabled; private Schedule schedule; @@ -116,8 +118,6 @@ public class Detector implements Writeable, ToXContentObject { private final String type; - private final Boolean threatIntelEnabled; - public Detector(String id, Long version, String name, Boolean enabled, Schedule schedule, Instant lastUpdateTime, Instant enabledTime, String logType, User user, List inputs, List triggers, List monitorIds, @@ -609,6 +609,10 @@ public void setWorkflowIds(List workflowIds) { this.workflowIds = workflowIds; } + public void setThreatIntelEnabled(boolean threatIntelEnabled) { + this.threatIntelEnabled = threatIntelEnabled; + } + public List getWorkflowIds() { return workflowIds; } diff --git a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java index 6e2519442..67f2b083a 100644 --- a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java @@ -34,6 +34,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; @@ -1054,10 +1055,10 @@ public void testCreateDetector_verifyWorkflowCreation_success_WithoutGroupByRule verifyWorkflow(detectorMap, monitorIds, 2); } - public void testCreateDetector_threatIntelEnabled() throws IOException { + public void testCreateDetector_threatIntelEnabled_updateDetectorWithNewThreatIntel() throws IOException { String tifdString1 = "{ \"type\": \"feed\",\"ioc_type\": \"ip\", \"ioc_value\": \"abc\", \"feed_id\": \"feed\", \"timestamp\": 1633344000000 }"; String tifdString2 = "{ \"type\": \"feed\",\"ioc_type\": \"ip\", \"ioc_value\": \"xyz\", \"feed_id\": \"feed\", \"timestamp\": 1633344000000 }"; - String feedIndex = ".opendsearch-sap-threatintel"; + String feedIndex = ".opensearch-sap-threatintel"; indexDoc(feedIndex, "1", tifdString1); indexDoc(feedIndex, "2", tifdString2); updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); @@ -1095,6 +1096,121 @@ public void testCreateDetector_threatIntelEnabled() throws IOException { "}"; SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + assertEquals(2, response.getHits().getTotalHits().value); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + Map responseBody = asMap(createResponse); + + String detectorId = responseBody.get("_id").toString(); + request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map detectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + List inputArr = (List) detectorMap.get("inputs"); + + + List monitorIds = ((List) (detectorMap).get("monitor_id")); + assertEquals(1, monitorIds.size()); + + assertNotNull("Workflow not created", detectorMap.get("workflow_ids")); + assertEquals("Number of workflows not correct", 1, ((List) detectorMap.get("workflow_ids")).size()); + + // Verify workflow + verifyWorkflow(detectorMap, monitorIds, 1); + + indexDoc(index, "1", randomDoc(5, 3, "abc")); + indexDoc(index, "2", randomDoc(5, 3, "xyz")); + indexDoc(index, "3", randomDoc(5, 3, "klm")); + String workflowId = ((List) detectorMap.get("workflow_ids")).get(0); + + Response executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); + + List> monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); + assertEquals(1, monitorRunResults.size()); + + Map docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); + int noOfSigmaRuleMatches = docLevelQueryResults.size(); + assertEquals(2, noOfSigmaRuleMatches); + String threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.contains(detector.getName() + "_threat_intel")).findAny().get(); + ArrayList docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); + assertEquals(docs.size(),2); + + //update threat intel + String tifdString3 = "{ \"type\": \"feed\",\"ioc_type\": \"ip\", \"ioc_value\": \"klm\", \"feed_id\": \"feed\", \"timestamp\": 1633344000000 }"; + + indexDoc(feedIndex, "3", tifdString3); + + Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(detector)); + + assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); + + Map updateResponseBody = asMap(updateResponse); + detectorId = updateResponseBody.get("_id").toString(); + + indexDoc(index, "4", randomDoc(5, 3, "klm")); + + executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); + + monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); + assertEquals(1, monitorRunResults.size()); + + docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); + noOfSigmaRuleMatches = docLevelQueryResults.size(); + assertEquals(2, noOfSigmaRuleMatches); + threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.contains(detector.getName() + "_threat_intel")).findAny().get(); + docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); + assertEquals(docs.size(),1); + } + + + + public void testCreateDetectorthreatIntelDisabled_updateDetectorWithThreatIntelEnabled() throws IOException { + String tifdString1 = "{ \"type\": \"feed\",\"ioc_type\": \"ip\", \"ioc_value\": \"abc\", \"feed_id\": \"feed\", \"timestamp\": 1633344000000 }"; + String tifdString2 = "{ \"type\": \"feed\",\"ioc_type\": \"ip\", \"ioc_value\": \"xyz\", \"feed_id\": \"feed\", \"timestamp\": 1633344000000 }"; + String feedIndex = ".opensearch-sap-threatintel"; + indexDoc(feedIndex, "1", tifdString1); + indexDoc(feedIndex, "2", tifdString2); + updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + + String testOpCode = "Test"; + + String randomDocRuleId = createRule(randomRule()); + List detectorRules = List.of(new DetectorRule(randomDocRuleId)); + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, + Collections.emptyList()); + Detector detector = randomDetectorWithInputsAndThreatIntel(List.of(input), false); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + assertEquals(1, response.getHits().getTotalHits().value); assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -1129,7 +1245,40 @@ public void testCreateDetector_threatIntelEnabled() throws IOException { String workflowId = ((List) detectorMap.get("workflow_ids")).get(0); Response executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); - assertNotNull(executeResponse); + + List> monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); + assertEquals(1, monitorRunResults.size()); + + Map docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); + int noOfSigmaRuleMatches = docLevelQueryResults.size(); + assertEquals(1, noOfSigmaRuleMatches); + + + //update threat intel + String tifdString3 = "{ \"type\": \"feed\",\"ioc_type\": \"ip\", \"ioc_value\": \"klm\", \"feed_id\": \"feed\", \"timestamp\": 1633344000000 }"; + + indexDoc(feedIndex, "3", tifdString3); + detector.setThreatIntelEnabled(true); + Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(detector)); + + assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); + + Map updateResponseBody = asMap(updateResponse); + detectorId = updateResponseBody.get("_id").toString(); + + indexDoc(index, "4", randomDoc(5, 3, "klm")); + + executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); + + monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); + assertEquals(1, monitorRunResults.size()); + + docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); + noOfSigmaRuleMatches = docLevelQueryResults.size(); + assertEquals(2, noOfSigmaRuleMatches); + String threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.contains(detector.getName() + "_threat_intel")).findAny().get(); + ArrayList docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); + assertEquals(docs.size(),1); } public void testCreateDetector_verifyWorkflowCreation_success_WithGroupByRulesInTrigger() throws IOException { From 5c733ed23ef4da206f46ffc99f13291694d3ea61 Mon Sep 17 00:00:00 2001 From: Joanne Wang <109310487+jowg-amazon@users.noreply.github.com> Date: Tue, 10 Oct 2023 16:00:06 -0700 Subject: [PATCH 12/39] Threat intel feeds job runner and unit tests (#654) * fix doc level query constructor (#651) Signed-off-by: Surya Sashank Nistala * add mapping for indices storing threat intel feed data * fix feed indices mapping * add threat intel feed data dao Signed-off-by: Surya Sashank Nistala * add threatIntelEnabled field in detector. Signed-off-by: Surya Sashank Nistala * add threat intel feed service and searching feeds Signed-off-by: Surya Sashank Nistala * ti feed data to doc level query convertor logic added * plug threat intel feed into detector creation Signed-off-by: Surya Sashank Nistala * Preliminary framework for jobscheduler and datasource (#626) Signed-off-by: Joanne Wang * with listener and processor Signed-off-by: Joanne Wang * removed actions Signed-off-by: Joanne Wang * clean up Signed-off-by: Joanne Wang * added parser Signed-off-by: Joanne Wang * add unit tests Signed-off-by: Joanne Wang * refactored class names Signed-off-by: Joanne Wang * before moving db Signed-off-by: Joanne Wang * after moving db Signed-off-by: Joanne Wang * added actions to plugin and removed user schedule Signed-off-by: Joanne Wang * unit tests Signed-off-by: Joanne Wang * fix build error Signed-off-by: Joanne Wang * changed transport naming Signed-off-by: Joanne Wang --------- Signed-off-by: Surya Sashank Nistala Signed-off-by: Joanne Wang Co-authored-by: Surya Sashank Nistala --- build.gradle | 2 +- .../SecurityAnalyticsPlugin.java | 66 ++- .../SampleExtensionPlugin.java | 161 ++++++ .../SampleExtensionRestHandler.java | 138 +++++ .../sampleextension/SampleJobParameter.java | 153 ++++++ .../sampleextension/SampleJobRunner.java | 149 ++++++ .../settings/SecurityAnalyticsSettings.java | 49 +- .../ThreatIntelFeedDataService.java | 141 ++--- .../threatIntel/ThreatIntelFeedParser.java | 65 +++ .../action/DeleteTIFJobAction.java} | 14 +- .../action/DeleteTIFJobRequest.java} | 16 +- .../threatIntel/action/GetTIFJobAction.java | 26 + .../action/GetTIFJobRequest.java} | 18 +- .../action/GetTIFJobResponse.java} | 40 +- .../action/PutTIFJobAction.java} | 14 +- .../threatIntel/action/PutTIFJobRequest.java | 107 ++++ .../action/TransportDeleteTIFJobAction.java} | 83 ++- .../action/TransportGetTIFJobAction.java | 78 +++ .../action/TransportPutTIFJobAction.java} | 95 ++-- .../action/TransportUpdateTIFJobAction.java | 133 +++++ .../action/UpdateTIFJobAction.java} | 14 +- .../action/UpdateTIFJobRequest.java | 123 +++++ .../threatIntel/common/FeedMetadata.java | 287 ++++++++++ .../common/TIFExecutor.java} | 12 +- .../threatIntel/common/TIFJobState.java | 37 ++ .../common/TIFLockService.java} | 29 +- .../threatIntel/common/TIFMetadata.java | 309 +++++++++++ .../jobscheduler/TIFJobExtension.java} | 15 +- .../jobscheduler/TIFJobParameter.java} | 494 ++++-------------- .../jobscheduler/TIFJobParameterService.java} | 201 ++++--- .../jobscheduler/TIFJobRunner.java | 167 ++++++ .../jobscheduler/TIFJobTask.java} | 4 +- .../jobscheduler/TIFJobUpdateService.java | 287 ++++++++++ .../action/GetDatasourceAction.java | 26 - .../action/GetDatasourceTransportAction.java | 79 --- .../action/PutDatasourceRequest.java | 267 ---------- .../action/RestDeleteDatasourceHandler.java | 48 -- .../action/RestGetDatasourceHandler.java | 44 -- .../action/RestPutDatasourceHandler.java | 71 --- .../action/RestUpdateDatasourceHandler.java | 50 -- .../action/UpdateDatasourceRequest.java | 190 ------- .../UpdateDatasourceTransportAction.java | 179 ------- .../common/DatasourceManifest.java | 168 ------ .../threatintel/common/DatasourceState.java | 37 -- .../common/ParameterValidator.java | 2 +- .../common/ThreatIntelSettings.java | 103 ---- .../jobscheduler/DatasourceRunner.java | 159 ------ .../jobscheduler/DatasourceUpdateService.java | 296 ----------- .../mappings/threat_intel_job_mapping.json | 118 +++++ .../resources/threatIntelFeedInfo/feodo.yml | 6 + .../threatIntel/ThreatIntelTestCase.java | 287 ++++++++++ .../threatIntel/ThreatIntelTestHelper.java | 120 +++++ .../threatIntel/common/TIFMetadataTests.java | 35 ++ .../common/ThreatIntelLockServiceTests.java | 117 +++++ .../jobscheduler/TIFJobExtensionTests.java | 56 ++ .../TIFJobParameterServiceTests.java | 385 ++++++++++++++ .../jobscheduler/TIFJobParameterTests.java | 90 ++++ .../jobscheduler/TIFJobRunnerTests.java | 177 +++++++ .../TIFJobUpdateServiceTests.java | 205 ++++++++ .../sample_invalid_less_than_two_fields.csv | 2 + .../resources/threatIntel/sample_valid.csv | 3 + 61 files changed, 4337 insertions(+), 2510 deletions(-) create mode 100644 src/main/java/org/opensearch/securityanalytics/sampleextension/SampleExtensionPlugin.java create mode 100644 src/main/java/org/opensearch/securityanalytics/sampleextension/SampleExtensionRestHandler.java create mode 100644 src/main/java/org/opensearch/securityanalytics/sampleextension/SampleJobParameter.java create mode 100644 src/main/java/org/opensearch/securityanalytics/sampleextension/SampleJobRunner.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedParser.java rename src/main/java/org/opensearch/securityanalytics/{threatintel/action/PutDatasourceAction.java => threatIntel/action/DeleteTIFJobAction.java} (55%) rename src/main/java/org/opensearch/securityanalytics/{threatintel/action/DeleteDatasourceRequest.java => threatIntel/action/DeleteTIFJobRequest.java} (73%) create mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/action/GetTIFJobAction.java rename src/main/java/org/opensearch/securityanalytics/{threatintel/action/GetDatasourceRequest.java => threatIntel/action/GetTIFJobRequest.java} (70%) rename src/main/java/org/opensearch/securityanalytics/{threatintel/action/GetDatasourceResponse.java => threatIntel/action/GetTIFJobResponse.java} (59%) rename src/main/java/org/opensearch/securityanalytics/{threatintel/action/DeleteDatasourceAction.java => threatIntel/action/PutTIFJobAction.java} (54%) create mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/action/PutTIFJobRequest.java rename src/main/java/org/opensearch/securityanalytics/{threatintel/action/DeleteDatasourceTransportAction.java => threatIntel/action/TransportDeleteTIFJobAction.java} (53%) create mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportGetTIFJobAction.java rename src/main/java/org/opensearch/securityanalytics/{threatintel/action/PutDatasourceTransportAction.java => threatIntel/action/TransportPutTIFJobAction.java} (61%) create mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportUpdateTIFJobAction.java rename src/main/java/org/opensearch/securityanalytics/{threatintel/action/UpdateDatasourceAction.java => threatIntel/action/UpdateTIFJobAction.java} (54%) create mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/action/UpdateTIFJobRequest.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/common/FeedMetadata.java rename src/main/java/org/opensearch/securityanalytics/{threatintel/common/ThreatIntelExecutor.java => threatIntel/common/TIFExecutor.java} (71%) create mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFJobState.java rename src/main/java/org/opensearch/securityanalytics/{threatintel/common/ThreatIntelLockService.java => threatIntel/common/TIFLockService.java} (83%) create mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java rename src/main/java/org/opensearch/securityanalytics/{threatintel/jobscheduler/DatasourceExtension.java => threatIntel/jobscheduler/TIFJobExtension.java} (60%) rename src/main/java/org/opensearch/securityanalytics/{threatintel/jobscheduler/Datasource.java => threatIntel/jobscheduler/TIFJobParameter.java} (52%) rename src/main/java/org/opensearch/securityanalytics/{threatintel/dao/DatasourceDao.java => threatIntel/jobscheduler/TIFJobParameterService.java} (62%) create mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunner.java rename src/main/java/org/opensearch/securityanalytics/{threatintel/jobscheduler/DatasourceTask.java => threatIntel/jobscheduler/TIFJobTask.java} (78%) create mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceAction.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceTransportAction.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/PutDatasourceRequest.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/RestDeleteDatasourceHandler.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/RestGetDatasourceHandler.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/RestPutDatasourceHandler.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/RestUpdateDatasourceHandler.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceRequest.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceTransportAction.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/common/DatasourceManifest.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/common/DatasourceState.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/common/ThreatIntelSettings.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceRunner.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceUpdateService.java create mode 100644 src/main/resources/mappings/threat_intel_job_mapping.json create mode 100644 src/main/resources/threatIntelFeedInfo/feodo.yml create mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestCase.java create mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestHelper.java create mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadataTests.java create mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/common/ThreatIntelLockServiceTests.java create mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtensionTests.java create mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterServiceTests.java create mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterTests.java create mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunnerTests.java create mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateServiceTests.java create mode 100644 src/test/resources/threatIntel/sample_invalid_less_than_two_fields.csv create mode 100644 src/test/resources/threatIntel/sample_valid.csv diff --git a/build.gradle b/build.gradle index 49180e6ab..2a958f0b6 100644 --- a/build.gradle +++ b/build.gradle @@ -155,7 +155,7 @@ dependencies { implementation group: 'org.apache.commons', name: 'commons-lang3', version: "${versions.commonslang}" implementation "org.antlr:antlr4-runtime:4.10.1" implementation "com.cronutils:cron-utils:9.1.6" - api files("/Users/snistala/Documents/opensearch/common-utils/build/libs/common-utils-3.0.0.0-SNAPSHOT.jar") + api "org.opensearch:common-utils:${common_utils_version}@jar" api "org.opensearch.client:opensearch-rest-client:${opensearch_version}" implementation "org.jetbrains.kotlin:kotlin-stdlib:${kotlin_version}" compileOnly "org.opensearch:opensearch-job-scheduler-spi:${opensearch_build}" diff --git a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java index 3e3d6ee07..e9b9382e8 100644 --- a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java +++ b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java @@ -4,11 +4,7 @@ */ package org.opensearch.securityanalytics; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Optional; +import java.util.*; import java.util.function.Supplier; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -35,12 +31,8 @@ import org.opensearch.index.codec.CodecServiceFactory; import org.opensearch.index.engine.EngineFactory; import org.opensearch.index.mapper.Mapper; -import org.opensearch.plugins.ActionPlugin; -import org.opensearch.plugins.ClusterPlugin; -import org.opensearch.plugins.EnginePlugin; -import org.opensearch.plugins.MapperPlugin; -import org.opensearch.plugins.Plugin; -import org.opensearch.plugins.SearchPlugin; +import org.opensearch.indices.SystemIndexDescriptor; +import org.opensearch.plugins.*; import org.opensearch.repositories.RepositoriesService; import org.opensearch.rest.RestController; import org.opensearch.rest.RestHandler; @@ -59,6 +51,12 @@ import org.opensearch.securityanalytics.resthandler.*; import org.opensearch.securityanalytics.threatIntel.DetectorThreatIntelService; import org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedDataService; +import org.opensearch.securityanalytics.threatIntel.action.*; +import org.opensearch.securityanalytics.threatIntel.common.TIFExecutor; +import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameterService; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobRunner; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobUpdateService; import org.opensearch.securityanalytics.transport.*; import org.opensearch.securityanalytics.model.Rule; import org.opensearch.securityanalytics.model.Detector; @@ -70,10 +68,13 @@ import org.opensearch.securityanalytics.util.DetectorIndices; import org.opensearch.securityanalytics.util.RuleIndices; import org.opensearch.securityanalytics.util.RuleTopicIndices; +import org.opensearch.threadpool.ExecutorBuilder; import org.opensearch.threadpool.ThreadPool; import org.opensearch.watcher.ResourceWatcherService; -public class SecurityAnalyticsPlugin extends Plugin implements ActionPlugin, MapperPlugin, SearchPlugin, EnginePlugin, ClusterPlugin { +import static org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter.THREAT_INTEL_DATA_INDEX_NAME_PREFIX; + +public class SecurityAnalyticsPlugin extends Plugin implements ActionPlugin, MapperPlugin, SearchPlugin, EnginePlugin, ClusterPlugin, SystemIndexPlugin { private static final Logger log = LogManager.getLogger(SecurityAnalyticsPlugin.class); @@ -114,6 +115,18 @@ public class SecurityAnalyticsPlugin extends Plugin implements ActionPlugin, Map private Client client; + @Override + public Collection getSystemIndexDescriptors(Settings settings){ + return List.of(new SystemIndexDescriptor(THREAT_INTEL_DATA_INDEX_NAME_PREFIX, "System index used for threat intel data")); + } + + @Override + public List> getExecutorBuilders(Settings settings) { + List> executorBuilders = new ArrayList<>(); + executorBuilders.add(TIFExecutor.executorBuilder(settings)); + return executorBuilders; + } + @Override public Collection createComponents(Client client, ClusterService clusterService, @@ -137,13 +150,21 @@ public Collection createComponents(Client client, mapperService = new MapperService(client, clusterService, indexNameExpressionResolver, indexTemplateManager, logTypeService); ruleIndices = new RuleIndices(logTypeService, client, clusterService, threadPool); correlationRuleIndices = new CorrelationRuleIndices(client, clusterService); - ThreatIntelFeedDataService threatIntelFeedDataService = new ThreatIntelFeedDataService(clusterService, client, indexNameExpressionResolver, xContentRegistry); + ThreatIntelFeedDataService threatIntelFeedDataService = new ThreatIntelFeedDataService(clusterService.state(), clusterService, client, indexNameExpressionResolver, xContentRegistry); DetectorThreatIntelService detectorThreatIntelService = new DetectorThreatIntelService(threatIntelFeedDataService); + TIFJobParameterService tifJobParameterService = new TIFJobParameterService(client, clusterService); + TIFJobUpdateService tifJobUpdateService = new TIFJobUpdateService(clusterService, tifJobParameterService, threatIntelFeedDataService); + TIFExecutor threatIntelExecutor = new TIFExecutor(threadPool); + TIFLockService threatIntelLockService = new TIFLockService(clusterService, client); + this.client = client; + TIFJobRunner.getJobRunnerInstance().initialize(clusterService,tifJobUpdateService, tifJobParameterService, threatIntelExecutor, threatIntelLockService, threadPool); + return List.of( detectorIndices, correlationIndices, correlationRuleIndices, ruleTopicIndices, customLogTypeIndices, ruleIndices, - mapperService, indexTemplateManager, builtinLogTypeLoader, threatIntelFeedDataService, detectorThreatIntelService + mapperService, indexTemplateManager, builtinLogTypeLoader, threatIntelFeedDataService, detectorThreatIntelService, + tifJobUpdateService, tifJobParameterService, threatIntelExecutor, threatIntelLockService ); } @@ -245,7 +266,10 @@ public List> getSettings() { SecurityAnalyticsSettings.IS_CORRELATION_INDEX_SETTING, SecurityAnalyticsSettings.CORRELATION_TIME_WINDOW, SecurityAnalyticsSettings.DEFAULT_MAPPING_SCHEMA, - SecurityAnalyticsSettings.ENABLE_WORKFLOW_USAGE + SecurityAnalyticsSettings.ENABLE_WORKFLOW_USAGE, + SecurityAnalyticsSettings.TIFJOB_UPDATE_INTERVAL, + SecurityAnalyticsSettings.BATCH_SIZE, + SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT ); } @@ -276,8 +300,14 @@ public List> getSettings() { new ActionPlugin.ActionHandler<>(SearchCorrelationRuleAction.INSTANCE, TransportSearchCorrelationRuleAction.class), new ActionHandler<>(IndexCustomLogTypeAction.INSTANCE, TransportIndexCustomLogTypeAction.class), new ActionHandler<>(SearchCustomLogTypeAction.INSTANCE, TransportSearchCustomLogTypeAction.class), - new ActionHandler<>(DeleteCustomLogTypeAction.INSTANCE, TransportDeleteCustomLogTypeAction.class) - ); + new ActionHandler<>(DeleteCustomLogTypeAction.INSTANCE, TransportDeleteCustomLogTypeAction.class), + + new ActionHandler<>(PutTIFJobAction.INSTANCE, TransportPutTIFJobAction.class), + new ActionHandler<>(GetTIFJobAction.INSTANCE, TransportGetTIFJobAction.class), + new ActionHandler<>(UpdateTIFJobAction.INSTANCE, TransportUpdateTIFJobAction.class), + new ActionHandler<>(DeleteTIFJobAction.INSTANCE, TransportDeleteTIFJobAction.class) + + ); } @Override @@ -294,5 +324,5 @@ public void onFailure(Exception e) { log.warn("Failed to initialize LogType config index and builtin log types"); } }); - } + } } \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleExtensionPlugin.java b/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleExtensionPlugin.java new file mode 100644 index 000000000..653653deb --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleExtensionPlugin.java @@ -0,0 +1,161 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ +package org.opensearch.securityanalytics.sampleextension; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.client.Client; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.node.DiscoveryNodes; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.IndexScopedSettings; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.settings.SettingsFilter; +import org.opensearch.core.common.io.stream.NamedWriteableRegistry; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.core.xcontent.XContentParserUtils; +import org.opensearch.env.Environment; +import org.opensearch.env.NodeEnvironment; +import org.opensearch.jobscheduler.spi.JobSchedulerExtension; +import org.opensearch.jobscheduler.spi.ScheduledJobParser; +import org.opensearch.jobscheduler.spi.ScheduledJobRunner; +import org.opensearch.jobscheduler.spi.schedule.ScheduleParser; +import org.opensearch.plugins.ActionPlugin; +import org.opensearch.plugins.Plugin; +import org.opensearch.repositories.RepositoriesService; +import org.opensearch.rest.RestController; +import org.opensearch.rest.RestHandler; +import org.opensearch.script.ScriptService; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.watcher.ResourceWatcherService; + +import java.io.IOException; +import java.time.Instant; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.function.Supplier; + +/** + * Sample JobScheduler extension plugin. + * + * It use ".scheduler_sample_extension" index to manage its scheduled jobs, and exposes a REST API + * endpoint using {@link SampleExtensionRestHandler}. + * + */ +public class SampleExtensionPlugin extends Plugin implements ActionPlugin, JobSchedulerExtension { + private static final Logger log = LogManager.getLogger(SampleExtensionPlugin.class); + + static final String JOB_INDEX_NAME = ".scheduler_sample_extension"; + + @Override + public Collection createComponents( + Client client, + ClusterService clusterService, + ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, + ScriptService scriptService, + NamedXContentRegistry xContentRegistry, + Environment environment, + NodeEnvironment nodeEnvironment, + NamedWriteableRegistry namedWriteableRegistry, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier repositoriesServiceSupplier + ) { + SampleJobRunner jobRunner = SampleJobRunner.getJobRunnerInstance(); + jobRunner.setClusterService(clusterService); + jobRunner.setThreadPool(threadPool); + jobRunner.setClient(client); + + return Collections.emptyList(); + } + + @Override + public String getJobType() { + return "scheduler_sample_extension"; + } + + @Override + public String getJobIndex() { + return JOB_INDEX_NAME; + } + + @Override + public ScheduledJobRunner getJobRunner() { + return SampleJobRunner.getJobRunnerInstance(); + } + + @Override + public ScheduledJobParser getJobParser() { + return (parser, id, jobDocVersion) -> { + SampleJobParameter jobParameter = new SampleJobParameter(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + + while (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) { + String fieldName = parser.currentName(); + parser.nextToken(); + switch (fieldName) { + case SampleJobParameter.NAME_FIELD: + jobParameter.setJobName(parser.text()); + break; + case SampleJobParameter.ENABLED_FILED: + jobParameter.setEnabled(parser.booleanValue()); + break; + case SampleJobParameter.ENABLED_TIME_FILED: + jobParameter.setEnabledTime(parseInstantValue(parser)); + break; + case SampleJobParameter.LAST_UPDATE_TIME_FIELD: + jobParameter.setLastUpdateTime(parseInstantValue(parser)); + break; + case SampleJobParameter.SCHEDULE_FIELD: + jobParameter.setSchedule(ScheduleParser.parse(parser)); + break; + case SampleJobParameter.INDEX_NAME_FIELD: + jobParameter.setIndexToWatch(parser.text()); + break; + case SampleJobParameter.LOCK_DURATION_SECONDS: + jobParameter.setLockDurationSeconds(parser.longValue()); + break; + case SampleJobParameter.JITTER: + jobParameter.setJitter(parser.doubleValue()); + break; + default: + XContentParserUtils.throwUnknownToken(parser.currentToken(), parser.getTokenLocation()); + } + } + return jobParameter; + }; + } + + private Instant parseInstantValue(XContentParser parser) throws IOException { + if (XContentParser.Token.VALUE_NULL.equals(parser.currentToken())) { + return null; + } + if (parser.currentToken().isValue()) { + return Instant.ofEpochMilli(parser.longValue()); + } + XContentParserUtils.throwUnknownToken(parser.currentToken(), parser.getTokenLocation()); + return null; + } + + @Override + public List getRestHandlers( + Settings settings, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster + ) { + return Collections.singletonList(new SampleExtensionRestHandler()); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleExtensionRestHandler.java b/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleExtensionRestHandler.java new file mode 100644 index 000000000..b0ae1299f --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleExtensionRestHandler.java @@ -0,0 +1,138 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ +package org.opensearch.securityanalytics.sampleextension; + +import org.opensearch.action.delete.DeleteRequest; +import org.opensearch.action.delete.DeleteResponse; +import org.opensearch.action.index.IndexRequest; +import org.opensearch.action.index.IndexResponse; +import org.opensearch.action.support.WriteRequest; +import org.opensearch.client.node.NodeClient; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.BytesRestResponse; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.RestResponse; + +import java.io.IOException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +/** + * A sample rest handler that supports schedule and deschedule job operation + * + * Users need to provide "id", "index", "job_name", and "interval" parameter to schedule + * a job. e.g. + * {@code + * POST /_plugins/scheduler_sample/watch?id=dashboards-job-id&job_name=watch dashboards index&index=.opensearch_dashboards_1&interval=1 + * } + * + * creates a job with id "dashboards-job-id" and job name "watch dashboards index", + * which logs ".opensearch_dashboards_1" index's shards info every 1 minute + * + * Users can remove that job by calling + * {@code DELETE /_plugins/scheduler_sample/watch?id=dashboards-job-id} + */ +public class SampleExtensionRestHandler extends BaseRestHandler { + public static final String WATCH_INDEX_URI = "/_plugins/scheduler_sample/watch"; + + @Override + public String getName() { + return "Sample JobScheduler extension handler"; + } + + @Override + public List routes() { + return Collections.unmodifiableList( + Arrays.asList(new Route(RestRequest.Method.POST, WATCH_INDEX_URI), new Route(RestRequest.Method.DELETE, WATCH_INDEX_URI)) + ); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + if (request.method().equals(RestRequest.Method.POST)) { + // compose SampleJobParameter object from request + String id = request.param("id"); + String indexName = request.param("index"); + String jobName = request.param("job_name"); + String interval = request.param("interval"); + String lockDurationSecondsString = request.param("lock_duration_seconds"); + Long lockDurationSeconds = lockDurationSecondsString != null ? Long.parseLong(lockDurationSecondsString) : null; + String jitterString = request.param("jitter"); + Double jitter = jitterString != null ? Double.parseDouble(jitterString) : null; + + if (id == null || indexName == null) { + throw new IllegalArgumentException("Must specify id and index parameter"); + } + SampleJobParameter jobParameter = new SampleJobParameter( + id, + jobName, + indexName, + new IntervalSchedule(Instant.now(), Integer.parseInt(interval), ChronoUnit.MINUTES), + lockDurationSeconds, + jitter + ); + IndexRequest indexRequest = new IndexRequest().index(SampleExtensionPlugin.JOB_INDEX_NAME) + .id(id) + .source(jobParameter.toXContent(JsonXContent.contentBuilder(), null)) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + return restChannel -> { + // index the job parameter + client.index(indexRequest, new ActionListener() { + @Override + public void onResponse(IndexResponse indexResponse) { + try { + RestResponse restResponse = new BytesRestResponse( + RestStatus.OK, + indexResponse.toXContent(JsonXContent.contentBuilder(), null) + ); + restChannel.sendResponse(restResponse); + } catch (IOException e) { + restChannel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); + } + } + + @Override + public void onFailure(Exception e) { + restChannel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); + } + }); + }; + } else if (request.method().equals(RestRequest.Method.DELETE)) { + // delete job parameter doc from index + String id = request.param("id"); + DeleteRequest deleteRequest = new DeleteRequest().index(SampleExtensionPlugin.JOB_INDEX_NAME).id(id); + + return restChannel -> { + client.delete(deleteRequest, new ActionListener() { + @Override + public void onResponse(DeleteResponse deleteResponse) { + restChannel.sendResponse(new BytesRestResponse(RestStatus.OK, "Job deleted.")); + } + + @Override + public void onFailure(Exception e) { + restChannel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); + } + }); + }; + } else { + return restChannel -> { + restChannel.sendResponse(new BytesRestResponse(RestStatus.METHOD_NOT_ALLOWED, request.method() + " is not allowed.")); + }; + } + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleJobParameter.java b/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleJobParameter.java new file mode 100644 index 000000000..1353b47ab --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleJobParameter.java @@ -0,0 +1,153 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ +package org.opensearch.securityanalytics.sampleextension; + +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.jobscheduler.spi.ScheduledJobParameter; +import org.opensearch.jobscheduler.spi.schedule.Schedule; + +import java.io.IOException; +import java.time.Instant; + +/** + * A sample job parameter. + *

+ * It adds an additional "indexToWatch" field to {@link ScheduledJobParameter}, which stores the index + * the job runner will watch. + */ +public class SampleJobParameter implements ScheduledJobParameter { + public static final String NAME_FIELD = "name"; + public static final String ENABLED_FILED = "enabled"; + public static final String LAST_UPDATE_TIME_FIELD = "last_update_time"; + public static final String LAST_UPDATE_TIME_FIELD_READABLE = "last_update_time_field"; + public static final String SCHEDULE_FIELD = "schedule"; + public static final String ENABLED_TIME_FILED = "enabled_time"; + public static final String ENABLED_TIME_FILED_READABLE = "enabled_time_field"; + public static final String INDEX_NAME_FIELD = "index_name_to_watch"; + public static final String LOCK_DURATION_SECONDS = "lock_duration_seconds"; + public static final String JITTER = "jitter"; + + private String jobName; + private Instant lastUpdateTime; + private Instant enabledTime; + private boolean isEnabled; + private Schedule schedule; + private String indexToWatch; + private Long lockDurationSeconds; + private Double jitter; + + public SampleJobParameter() {} + + public SampleJobParameter(String id, String name, String indexToWatch, Schedule schedule, Long lockDurationSeconds, Double jitter) { + this.jobName = name; + this.indexToWatch = indexToWatch; + this.schedule = schedule; + + Instant now = Instant.now(); + this.isEnabled = true; + this.enabledTime = now; + this.lastUpdateTime = now; + this.lockDurationSeconds = lockDurationSeconds; + this.jitter = jitter; + } + + @Override + public String getName() { + return this.jobName; + } + + @Override + public Instant getLastUpdateTime() { + return this.lastUpdateTime; + } + + @Override + public Instant getEnabledTime() { + return this.enabledTime; + } + + @Override + public Schedule getSchedule() { + return this.schedule; + } + + @Override + public boolean isEnabled() { + return this.isEnabled; + } + + @Override + public Long getLockDurationSeconds() { + return this.lockDurationSeconds; + } + + @Override + public Double getJitter() { + return jitter; + } + + public String getIndexToWatch() { + return this.indexToWatch; + } + + public void setJobName(String jobName) { + this.jobName = jobName; + } + + public void setLastUpdateTime(Instant lastUpdateTime) { + this.lastUpdateTime = lastUpdateTime; + } + + public void setEnabledTime(Instant enabledTime) { + this.enabledTime = enabledTime; + } + + public void setEnabled(boolean enabled) { + isEnabled = enabled; + } + + public void setSchedule(Schedule schedule) { + this.schedule = schedule; + } + + public void setIndexToWatch(String indexToWatch) { + this.indexToWatch = indexToWatch; + } + + public void setLockDurationSeconds(Long lockDurationSeconds) { + this.lockDurationSeconds = lockDurationSeconds; + } + + public void setJitter(Double jitter) { + this.jitter = jitter; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(NAME_FIELD, this.jobName) + .field(ENABLED_FILED, this.isEnabled) + .field(SCHEDULE_FIELD, this.schedule) + .field(INDEX_NAME_FIELD, this.indexToWatch); + if (this.enabledTime != null) { + builder.timeField(ENABLED_TIME_FILED, ENABLED_TIME_FILED_READABLE, this.enabledTime.toEpochMilli()); + } + if (this.lastUpdateTime != null) { + builder.timeField(LAST_UPDATE_TIME_FIELD, LAST_UPDATE_TIME_FIELD_READABLE, this.lastUpdateTime.toEpochMilli()); + } + if (this.lockDurationSeconds != null) { + builder.field(LOCK_DURATION_SECONDS, this.lockDurationSeconds); + } + if (this.jitter != null) { + builder.field(JITTER, this.jitter); + } + builder.endObject(); + return builder; + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleJobRunner.java b/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleJobRunner.java new file mode 100644 index 000000000..0d62738f1 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleJobRunner.java @@ -0,0 +1,149 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ +package org.opensearch.securityanalytics.sampleextension; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.action.index.IndexRequest; +import org.opensearch.client.Client; +import org.opensearch.cluster.routing.ShardRouting; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.action.ActionListener; +import org.opensearch.jobscheduler.spi.JobExecutionContext; +import org.opensearch.jobscheduler.spi.ScheduledJobParameter; +import org.opensearch.jobscheduler.spi.ScheduledJobRunner; +import org.opensearch.jobscheduler.spi.utils.LockService; +import org.opensearch.plugins.Plugin; +import org.opensearch.threadpool.ThreadPool; + +import java.util.List; +import java.util.UUID; + +/** + * A sample job runner class. + * + * The job runner should be a singleton class if it uses OpenSearch client or other objects passed + * from OpenSearch. Because when registering the job runner to JobScheduler plugin, OpenSearch has + * not invoke plugins' createComponents() method. That is saying the plugin is not completely initalized, + * and the OpenSearch {@link Client}, {@link ClusterService} and other objects + * are not available to plugin and this job runner. + * + * So we have to move this job runner intialization to {@link Plugin} createComponents() method, and using + * singleton job runner to ensure we register a usable job runner instance to JobScheduler plugin. + * + * This sample job runner takes the "indexToWatch" from job parameter and logs that index's shards. + */ +public class SampleJobRunner implements ScheduledJobRunner { + + private static final Logger log = LogManager.getLogger(ScheduledJobRunner.class); + + private static SampleJobRunner INSTANCE; + + public static SampleJobRunner getJobRunnerInstance() { + if (INSTANCE != null) { + return INSTANCE; + } + synchronized (SampleJobRunner.class) { + if (INSTANCE != null) { + return INSTANCE; + } + INSTANCE = new SampleJobRunner(); + return INSTANCE; + } + } + + private ClusterService clusterService; + private ThreadPool threadPool; + private Client client; + + private SampleJobRunner() { + // Singleton class, use getJobRunner method instead of constructor + } + + public void setClusterService(ClusterService clusterService) { + this.clusterService = clusterService; + } + + public void setThreadPool(ThreadPool threadPool) { + this.threadPool = threadPool; + } + + public void setClient(Client client) { + this.client = client; + } + + @Override + public void runJob(ScheduledJobParameter jobParameter, JobExecutionContext context) { + if (!(jobParameter instanceof SampleJobParameter)) { + throw new IllegalStateException( + "Job parameter is not instance of SampleJobParameter, type: " + jobParameter.getClass().getCanonicalName() + ); + } + + if (this.clusterService == null) { + throw new IllegalStateException("ClusterService is not initialized."); + } + + if (this.threadPool == null) { + throw new IllegalStateException("ThreadPool is not initialized."); + } + + final LockService lockService = context.getLockService(); + + Runnable runnable = () -> { + if (jobParameter.getLockDurationSeconds() != null) { + lockService.acquireLock(jobParameter, context, ActionListener.wrap(lock -> { + if (lock == null) { + return; + } + + SampleJobParameter parameter = (SampleJobParameter) jobParameter; + StringBuilder msg = new StringBuilder(); + msg.append("Watching index ").append(parameter.getIndexToWatch()).append("\n"); + + List shardRoutingList = this.clusterService.state().routingTable().allShards(parameter.getIndexToWatch()); + for (ShardRouting shardRouting : shardRoutingList) { + msg.append(shardRouting.shardId().getId()) + .append("\t") + .append(shardRouting.currentNodeId()) + .append("\t") + .append(shardRouting.active() ? "active" : "inactive") + .append("\n"); + } + log.info(msg.toString()); + runTaskForIntegrationTests(parameter); + runTaskForLockIntegrationTests(parameter); + + lockService.release( + lock, + ActionListener.wrap(released -> { log.info("Released lock for job {}", jobParameter.getName()); }, exception -> { + throw new IllegalStateException("Failed to release lock."); + }) + ); + }, exception -> { throw new IllegalStateException("Failed to acquire lock."); })); + } + }; + + threadPool.generic().submit(runnable); + } + + private void runTaskForIntegrationTests(SampleJobParameter jobParameter) { + this.client.index( + new IndexRequest(jobParameter.getIndexToWatch()).id(UUID.randomUUID().toString()) + .source("{\"message\": \"message\"}", XContentType.JSON) + ); + } + + private void runTaskForLockIntegrationTests(SampleJobParameter jobParameter) throws InterruptedException { + if (jobParameter.getName().equals("sample-job-lock-test-it")) { + Thread.sleep(180000); + } + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java b/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java index 4085d7ae2..967bd3165 100644 --- a/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java +++ b/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java @@ -4,10 +4,14 @@ */ package org.opensearch.securityanalytics.settings; +import java.net.MalformedURLException; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.List; import java.util.concurrent.TimeUnit; import org.opensearch.common.settings.Setting; import org.opensearch.common.unit.TimeValue; -import org.opensearch.securityanalytics.model.FieldMappingDoc; +import org.opensearch.jobscheduler.repackage.com.cronutils.utils.VisibleForTesting; public class SecurityAnalyticsSettings { public static final String CORRELATION_INDEX = "index.correlation"; @@ -117,4 +121,47 @@ public class SecurityAnalyticsSettings { "ecs", Setting.Property.NodeScope, Setting.Property.Dynamic ); + + // threat intel settings + /** + * Default update interval to be used in threat intel tif job creation API + */ + public static final Setting TIFJOB_UPDATE_INTERVAL = Setting.longSetting( + "plugins.security_analytics.threatintel.tifjob.update_interval_in_days", + 1l, + 1l, //todo: change the min value + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + /** + * Bulk size for indexing threat intel feed data + */ + public static final Setting BATCH_SIZE = Setting.intSetting( + "plugins.security_analytics.threatintel.tifjob.batch_size", + 10000, + 1, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + /** + * Timeout value for threat intel processor + */ + public static final Setting THREAT_INTEL_TIMEOUT = Setting.timeSetting( + "plugins.security_analytics.threat_intel_timeout", + TimeValue.timeValueSeconds(30), + TimeValue.timeValueSeconds(1), + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + /** + * Return all settings of threat intel feature + * @return a list of all settings for threat intel feature + */ + public static final List> settings() { + return List.of(TIFJOB_UPDATE_INTERVAL, BATCH_SIZE, THREAT_INTEL_TIMEOUT); + } + } \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java index 1a7001725..b01d602b3 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java @@ -1,13 +1,10 @@ package org.opensearch.securityanalytics.threatIntel; -import org.apache.commons.csv.CSVFormat; -import org.apache.commons.csv.CSVParser; import org.apache.commons.csv.CSVRecord; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.opensearch.OpenSearchException; -import org.opensearch.SpecialPermission; import org.opensearch.action.DocWriteRequest; import org.opensearch.action.admin.indices.create.CreateIndexRequest; import org.opensearch.action.bulk.BulkRequest; @@ -22,7 +19,6 @@ import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.SuppressForbidden; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.LoggingDeprecationHandler; @@ -38,43 +34,31 @@ import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.securityanalytics.findings.FindingsService; import org.opensearch.securityanalytics.model.ThreatIntelFeedData; -import org.opensearch.securityanalytics.threatIntel.common.DatasourceManifest; +import org.opensearch.securityanalytics.threatIntel.common.TIFMetadata; import org.opensearch.securityanalytics.threatIntel.common.StashedThreadContext; -import org.opensearch.securityanalytics.threatIntel.common.ThreatIntelSettings; -import org.opensearch.securityanalytics.threatIntel.dao.DatasourceDao; +import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameterService; import org.opensearch.securityanalytics.util.IndexUtils; import org.opensearch.securityanalytics.util.SecurityAnalyticsException; -import org.opensearch.securityanalytics.threatIntel.common.Constants; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; -import java.net.URL; -import java.net.URLConnection; import java.nio.charset.StandardCharsets; -import java.security.AccessController; -import java.security.PrivilegedAction; +import java.time.Instant; import java.util.*; import java.util.stream.Collectors; -import java.util.zip.ZipEntry; -import java.util.zip.ZipInputStream; -import static org.opensearch.securityanalytics.threatIntel.jobscheduler.Datasource.THREAT_INTEL_DATA_INDEX_NAME_PREFIX; +import static org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter.THREAT_INTEL_DATA_INDEX_NAME_PREFIX; /** * Service to handle CRUD operations on Threat Intel Feed Data */ public class ThreatIntelFeedDataService { private static final Logger log = LogManager.getLogger(FindingsService.class); - private static final String SCHEMA_VERSION = "schema_version"; - private static final String IOC_TYPE = "ioc_type"; - private static final String IOC_VALUE = "ioc_value"; - private static final String FEED_ID = "feed_id"; - private static final String TIMESTAMP = "timestamp"; - private static final String TYPE = "type"; - private static final String DATA_FIELD_NAME = "_data"; + private final ClusterState state; private final Client client; private final IndexNameExpressionResolver indexNameExpressionResolver; @@ -95,16 +79,20 @@ public class ThreatIntelFeedDataService { true ); private final ClusterService clusterService; + private final ClusterSettings clusterSettings; public ThreatIntelFeedDataService( + ClusterState state, ClusterService clusterService, Client client, IndexNameExpressionResolver indexNameExpressionResolver, NamedXContentRegistry xContentRegistry) { + this.state = state; this.client = client; this.indexNameExpressionResolver = indexNameExpressionResolver; this.xContentRegistry = xContentRegistry; this.clusterService = clusterService; + this.clusterSettings = clusterService.getClusterSettings(); } private final NamedXContentRegistry xContentRegistry; @@ -150,6 +138,9 @@ private List getTifdList(SearchResponse searchResponse) { return list; } + + + /** * Create an index for a threat intel feed * @@ -167,28 +158,13 @@ public void createIndexIfNotExists(final String indexName) { .mapping(getIndexMapping()); StashedThreadContext.run( client, - () -> client.admin().indices().create(createIndexRequest).actionGet(this.clusterService.getClusterSettings().get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT)) + () -> client.admin().indices().create(createIndexRequest).actionGet(clusterSettings.get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT)) ); } - private void freezeIndex(final String indexName) { - ClusterSettings clusterSettings = this.clusterService.getClusterSettings(); - TimeValue timeout = this.clusterService.getClusterSettings().get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT); - StashedThreadContext.run(client, () -> { - client.admin().indices().prepareForceMerge(indexName).setMaxNumSegments(1).execute().actionGet(timeout); - client.admin().indices().prepareRefresh(indexName).execute().actionGet(timeout); - client.admin() - .indices() - .prepareUpdateSettings(indexName) - .setSettings(INDEX_SETTING_TO_FREEZE) - .execute() - .actionGet(clusterSettings.get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT)); - }); - } - private String getIndexMapping() { try { - try (InputStream is = DatasourceDao.class.getResourceAsStream("/mappings/threat_intel_feed_mapping.json")) { // TODO: check Datasource dao and this mapping + try (InputStream is = TIFJobParameterService.class.getResourceAsStream("/mappings/threat_intel_feed_mapping.json")) { try (BufferedReader reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { return reader.lines().map(String::trim).collect(Collectors.joining()); } @@ -199,74 +175,48 @@ private String getIndexMapping() { } } - /** - * Create CSVParser of a threat intel feed - * - * @param manifest Datasource manifest - * @return CSVParser for threat intel feed - */ - @SuppressForbidden(reason = "Need to connect to http endpoint to read threat intel feed database file") - public CSVParser getDatabaseReader(final DatasourceManifest manifest) { - SpecialPermission.check(); - return AccessController.doPrivileged((PrivilegedAction) () -> { - try { - URL url = new URL(manifest.getUrl()); - return internalGetDatabaseReader(manifest, url.openConnection()); - } catch (IOException e) { - log.error("Exception: failed to read threat intel feed data from {}",manifest.getUrl(), e); - throw new OpenSearchException("failed to read threat intel feed data from {}", manifest.getUrl(), e); - } - }); - } - - @SuppressForbidden(reason = "Need to connect to http endpoint to read threat intel feed database file") // TODO: update this function because no zip file... - protected CSVParser internalGetDatabaseReader(final DatasourceManifest manifest, final URLConnection connection) throws IOException { - connection.addRequestProperty(Constants.USER_AGENT_KEY, Constants.USER_AGENT_VALUE); - ZipInputStream zipIn = new ZipInputStream(connection.getInputStream()); - ZipEntry zipEntry = zipIn.getNextEntry(); - while (zipEntry != null) { - if (zipEntry.getName().equalsIgnoreCase(manifest.getDbName()) == false) { - zipEntry = zipIn.getNextEntry(); - continue; - } - return new CSVParser(new BufferedReader(new InputStreamReader(zipIn)), CSVFormat.RFC4180); - } - throw new IllegalArgumentException( - String.format(Locale.ROOT, "database file [%s] does not exist in the zip file [%s]", manifest.getDbName(), manifest.getUrl()) - ); - } - /** * Puts threat intel feed from CSVRecord iterator into a given index in bulk * - * @param indexName Index name to puts the TIF data + * @param indexName Index name to save the threat intel feed * @param fields Field name matching with data in CSVRecord in order * @param iterator TIF data to insert * @param renewLock Runnable to renew lock */ - public void saveThreatIntelFeedData( + public void saveThreatIntelFeedDataCSV( final String indexName, final String[] fields, final Iterator iterator, - final Runnable renewLock -// final ThreatIntelFeedData threatIntelFeedData + final Runnable renewLock, + final TIFMetadata tifMetadata ) throws IOException { if (indexName == null || fields == null || iterator == null || renewLock == null){ - throw new IllegalArgumentException("Fields cannot be null"); + throw new IllegalArgumentException("Parameters cannot be null, failed to save threat intel feed data"); } - ClusterSettings clusterSettings = this.clusterService.getClusterSettings(); - TimeValue timeout = clusterSettings.get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT); - Integer batchSize = clusterSettings.get(ThreatIntelSettings.BATCH_SIZE); + + TimeValue timeout = clusterSettings.get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT); + Integer batchSize = clusterSettings.get(SecurityAnalyticsSettings.BATCH_SIZE); final BulkRequest bulkRequest = new BulkRequest(); Queue requests = new LinkedList<>(); for (int i = 0; i < batchSize; i++) { requests.add(Requests.indexRequest(indexName)); } + while (iterator.hasNext()) { CSVRecord record = iterator.next(); -// XContentBuilder tifData = threatIntelFeedData.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS); + String iocType = tifMetadata.getFeedType(); + if (tifMetadata.getContainedIocs().get(0) == "ip") { //TODO: dynamically get the type + iocType = "ip"; + } + Integer colNum = Integer.parseInt(tifMetadata.getIocCol()); + String iocValue = record.values()[colNum]; + String feedId = tifMetadata.getFeedId(); + Instant timestamp = Instant.now(); + + ThreatIntelFeedData threatIntelFeedData = new ThreatIntelFeedData(iocType, iocValue, feedId, timestamp); + XContentBuilder tifData = threatIntelFeedData.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS); IndexRequest indexRequest = (IndexRequest) requests.poll(); -// indexRequest.source(tifData); + indexRequest.source(tifData); indexRequest.id(record.get(0)); bulkRequest.add(indexRequest); if (iterator.hasNext() == false || bulkRequest.requests().size() == batchSize) { @@ -286,12 +236,25 @@ public void saveThreatIntelFeedData( freezeIndex(indexName); } + private void freezeIndex(final String indexName) { + TimeValue timeout = clusterSettings.get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT); + StashedThreadContext.run(client, () -> { + client.admin().indices().prepareForceMerge(indexName).setMaxNumSegments(1).execute().actionGet(timeout); + client.admin().indices().prepareRefresh(indexName).execute().actionGet(timeout); + client.admin() + .indices() + .prepareUpdateSettings(indexName) + .setSettings(INDEX_SETTING_TO_FREEZE) + .execute() + .actionGet(clusterSettings.get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT)); + }); + } + public void deleteThreatIntelDataIndex(final String index) { deleteThreatIntelDataIndex(Arrays.asList(index)); } public void deleteThreatIntelDataIndex(final List indices) { - ClusterSettings clusterSettings = this.clusterService.getClusterSettings(); if (indices == null || indices.isEmpty()) { return; } @@ -314,11 +277,11 @@ public void deleteThreatIntelDataIndex(final List indices) { .prepareDelete(indices.toArray(new String[0])) .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) .execute() - .actionGet(clusterSettings.get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT)) + .actionGet(clusterSettings.get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT)) ); if (response.isAcknowledged() == false) { - throw new OpenSearchException("failed to delete data[{}] in datasource", String.join(",", indices)); + throw new OpenSearchException("failed to delete data[{}]", String.join(",", indices)); } } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedParser.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedParser.java new file mode 100644 index 000000000..ab4477a44 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedParser.java @@ -0,0 +1,65 @@ +package org.opensearch.securityanalytics.threatIntel; + +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.csv.CSVParser; +import org.apache.commons.csv.CSVRecord; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.OpenSearchException; +import org.opensearch.SpecialPermission; +import org.opensearch.common.SuppressForbidden; +import org.opensearch.securityanalytics.model.DetectorTrigger; +import org.opensearch.securityanalytics.threatIntel.common.Constants; +import org.opensearch.securityanalytics.threatIntel.common.TIFMetadata; + +import java.io.*; +import java.net.URL; +import java.net.URLConnection; +import java.security.AccessController; +import java.security.PrivilegedAction; + +//Parser helper class +public class ThreatIntelFeedParser { + private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + + /** + * Create CSVParser of a threat intel feed + * + * @param tifMetadata Threat intel feed metadata + * @return parser for threat intel feed + */ + @SuppressForbidden(reason = "Need to connect to http endpoint to read threat intel feed database file") + public static CSVParser getThreatIntelFeedReaderCSV(final TIFMetadata tifMetadata) { + SpecialPermission.check(); + return AccessController.doPrivileged((PrivilegedAction) () -> { + try { + URL url = new URL(tifMetadata.getUrl()); + URLConnection connection = url.openConnection(); + connection.addRequestProperty(Constants.USER_AGENT_KEY, Constants.USER_AGENT_VALUE); + return new CSVParser(new BufferedReader(new InputStreamReader(connection.getInputStream())), CSVFormat.RFC4180); + } catch (IOException e) { + log.error("Exception: failed to read threat intel feed data from {}",tifMetadata.getUrl(), e); + throw new OpenSearchException("failed to read threat intel feed data from {}", tifMetadata.getUrl(), e); + } + }); + } + + /** + * Validate header + * + * 1. header should not be null + * 2. the number of values in header should be more than one + * + * @param header the header + * @return CSVRecord the input header + */ + public static CSVRecord validateHeader(CSVRecord header) { + if (header == null) { + throw new OpenSearchException("threat intel feed database is empty"); + } + if (header.values().length < 2) { + throw new OpenSearchException("threat intel feed database should have at least two fields"); + } + return header; + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/PutDatasourceAction.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobAction.java similarity index 55% rename from src/main/java/org/opensearch/securityanalytics/threatintel/action/PutDatasourceAction.java rename to src/main/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobAction.java index 6a6acb9ed..d0fd0bee4 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/action/PutDatasourceAction.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobAction.java @@ -9,19 +9,19 @@ import org.opensearch.action.support.master.AcknowledgedResponse; /** - * Threat intel datasource creation action + * Threat intel tif job delete action */ -public class PutDatasourceAction extends ActionType { +public class DeleteTIFJobAction extends ActionType { /** - * Put datasource action instance + * Delete tif job action instance */ - public static final PutDatasourceAction INSTANCE = new PutDatasourceAction(); + public static final DeleteTIFJobAction INSTANCE = new DeleteTIFJobAction(); /** - * Put datasource action name + * Delete tif job action name */ - public static final String NAME = "cluster:admin/security_analytics/datasource/put"; + public static final String NAME = "cluster:admin/security_analytics/tifjob/delete"; - private PutDatasourceAction() { + private DeleteTIFJobAction() { super(NAME, AcknowledgedResponse::new); } } diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/DeleteDatasourceRequest.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobRequest.java similarity index 73% rename from src/main/java/org/opensearch/securityanalytics/threatintel/action/DeleteDatasourceRequest.java rename to src/main/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobRequest.java index 654b93985..54e41126f 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/action/DeleteDatasourceRequest.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobRequest.java @@ -14,14 +14,14 @@ import java.io.IOException; /** - * Threat intel datasource delete request + * Threat intel feed job delete request */ -public class DeleteDatasourceRequest extends ActionRequest { +public class DeleteTIFJobRequest extends ActionRequest { private static final ParameterValidator VALIDATOR = new ParameterValidator(); /** - * @param name the datasource name - * @return the datasource name + * @param name the TIF job name + * @return the TIF job name */ private String name; @@ -31,21 +31,21 @@ public class DeleteDatasourceRequest extends ActionRequest { * @param in the stream input * @throws IOException IOException */ - public DeleteDatasourceRequest(final StreamInput in) throws IOException { + public DeleteTIFJobRequest(final StreamInput in) throws IOException { super(in); this.name = in.readString(); } - public DeleteDatasourceRequest(final String name) { + public DeleteTIFJobRequest(final String name) { this.name = name; } @Override public ActionRequestValidationException validate() { ActionRequestValidationException errors = null; - if (VALIDATOR.validateDatasourceName(name).isEmpty() == false) { + if (VALIDATOR.validateTIFJobName(name).isEmpty() == false) { errors = new ActionRequestValidationException(); - errors.addValidationError("no such datasource exist"); + errors.addValidationError("no such job exist"); } return errors; } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/GetTIFJobAction.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/GetTIFJobAction.java new file mode 100644 index 000000000..8f1034d94 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/GetTIFJobAction.java @@ -0,0 +1,26 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.opensearch.action.ActionType; + +/** + * Threat intel tif job get action + */ +public class GetTIFJobAction extends ActionType { + /** + * Get tif job action instance + */ + public static final GetTIFJobAction INSTANCE = new GetTIFJobAction(); + /** + * Get tif job action name + */ + public static final String NAME = "cluster:admin/security_analytics/tifjob/get"; + + private GetTIFJobAction() { + super(NAME, GetTIFJobResponse::new); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceRequest.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/GetTIFJobRequest.java similarity index 70% rename from src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceRequest.java rename to src/main/java/org/opensearch/securityanalytics/threatIntel/action/GetTIFJobRequest.java index 16f36b08e..c40e1f747 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceRequest.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/GetTIFJobRequest.java @@ -13,24 +13,24 @@ import java.io.IOException; /** - * threat intel datasource get request + * threat intel tif job get request */ -public class GetDatasourceRequest extends ActionRequest { +public class GetTIFJobRequest extends ActionRequest { /** - * @param names the datasource names - * @return the datasource names + * @param names the tif job names + * @return the tif job names */ private String[] names; /** - * Constructs a new get datasource request with a list of datasources. + * Constructs a new get tif job request with a list of tif jobs. * - * If the list of datasources is empty or it contains a single element "_all", all registered datasources + * If the list of tif jobs is empty or it contains a single element "_all", all registered tif jobs * are returned. * - * @param names list of datasource names + * @param names list of tif job names */ - public GetDatasourceRequest(final String[] names) { + public GetTIFJobRequest(final String[] names) { this.names = names; } @@ -39,7 +39,7 @@ public GetDatasourceRequest(final String[] names) { * @param in the stream input * @throws IOException IOException */ - public GetDatasourceRequest(final StreamInput in) throws IOException { + public GetTIFJobRequest(final StreamInput in) throws IOException { super(in); this.names = in.readStringArray(); } diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceResponse.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/GetTIFJobResponse.java similarity index 59% rename from src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceResponse.java rename to src/main/java/org/opensearch/securityanalytics/threatIntel/action/GetTIFJobResponse.java index d404ad728..507f1f4ee 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceResponse.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/GetTIFJobResponse.java @@ -11,34 +11,32 @@ import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.Datasource; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter; import java.io.IOException; import java.time.Instant; import java.util.List; /** - * threat intel datasource get request + * threat intel tif job get request */ -public class GetDatasourceResponse extends ActionResponse implements ToXContentObject { - private static final ParseField FIELD_NAME_DATASOURCES = new ParseField("datasources"); +public class GetTIFJobResponse extends ActionResponse implements ToXContentObject { + private static final ParseField FIELD_NAME_TIFJOBS = new ParseField("tifjobs"); private static final ParseField FIELD_NAME_NAME = new ParseField("name"); private static final ParseField FIELD_NAME_STATE = new ParseField("state"); - private static final ParseField FIELD_NAME_ENDPOINT = new ParseField("endpoint"); private static final ParseField FIELD_NAME_UPDATE_INTERVAL = new ParseField("update_interval_in_days"); private static final ParseField FIELD_NAME_NEXT_UPDATE_AT = new ParseField("next_update_at_in_epoch_millis"); private static final ParseField FIELD_NAME_NEXT_UPDATE_AT_READABLE = new ParseField("next_update_at"); - private static final ParseField FIELD_NAME_DATABASE = new ParseField("database"); private static final ParseField FIELD_NAME_UPDATE_STATS = new ParseField("update_stats"); - private List datasources; + private List tifJobParameters; /** * Default constructor * - * @param datasources List of datasources + * @param tifJobParameters List of tifJobParameters */ - public GetDatasourceResponse(final List datasources) { - this.datasources = datasources; + public GetTIFJobResponse(final List tifJobParameters) { + this.tifJobParameters = tifJobParameters; } /** @@ -46,32 +44,30 @@ public GetDatasourceResponse(final List datasources) { * * @param in the stream input */ - public GetDatasourceResponse(final StreamInput in) throws IOException { - datasources = in.readList(Datasource::new); + public GetTIFJobResponse(final StreamInput in) throws IOException { + tifJobParameters = in.readList(TIFJobParameter::new); } @Override public void writeTo(final StreamOutput out) throws IOException { - out.writeList(datasources); + out.writeList(tifJobParameters); } @Override public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { builder.startObject(); - builder.startArray(FIELD_NAME_DATASOURCES.getPreferredName()); - for (Datasource datasource : datasources) { + builder.startArray(FIELD_NAME_TIFJOBS.getPreferredName()); + for (TIFJobParameter tifJobParameter : tifJobParameters) { builder.startObject(); - builder.field(FIELD_NAME_NAME.getPreferredName(), datasource.getName()); - builder.field(FIELD_NAME_STATE.getPreferredName(), datasource.getState()); - builder.field(FIELD_NAME_ENDPOINT.getPreferredName(), datasource.getEndpoint()); - builder.field(FIELD_NAME_UPDATE_INTERVAL.getPreferredName(), datasource.getSchedule()); //TODO + builder.field(FIELD_NAME_NAME.getPreferredName(), tifJobParameter.getName()); + builder.field(FIELD_NAME_STATE.getPreferredName(), tifJobParameter.getState()); + builder.field(FIELD_NAME_UPDATE_INTERVAL.getPreferredName(), tifJobParameter.getSchedule()); //TODO builder.timeField( FIELD_NAME_NEXT_UPDATE_AT.getPreferredName(), FIELD_NAME_NEXT_UPDATE_AT_READABLE.getPreferredName(), - datasource.getSchedule().getNextExecutionTime(Instant.now()).toEpochMilli() + tifJobParameter.getSchedule().getNextExecutionTime(Instant.now()).toEpochMilli() ); - builder.field(FIELD_NAME_DATABASE.getPreferredName(), datasource.getDatabase()); - builder.field(FIELD_NAME_UPDATE_STATS.getPreferredName(), datasource.getUpdateStats()); + builder.field(FIELD_NAME_UPDATE_STATS.getPreferredName(), tifJobParameter.getUpdateStats()); builder.endObject(); } builder.endArray(); diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/DeleteDatasourceAction.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/PutTIFJobAction.java similarity index 54% rename from src/main/java/org/opensearch/securityanalytics/threatintel/action/DeleteDatasourceAction.java rename to src/main/java/org/opensearch/securityanalytics/threatIntel/action/PutTIFJobAction.java index 35effc4b7..01863f862 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/action/DeleteDatasourceAction.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/PutTIFJobAction.java @@ -9,19 +9,19 @@ import org.opensearch.action.support.master.AcknowledgedResponse; /** - * Threat intel datasource delete action + * Threat intel tif job creation action */ -public class DeleteDatasourceAction extends ActionType { +public class PutTIFJobAction extends ActionType { /** - * Delete datasource action instance + * Put tif job action instance */ - public static final DeleteDatasourceAction INSTANCE = new DeleteDatasourceAction(); + public static final PutTIFJobAction INSTANCE = new PutTIFJobAction(); /** - * Delete datasource action name + * Put tif job action name */ - public static final String NAME = "cluster:admin/security_analytics/datasource/delete"; + public static final String NAME = "cluster:admin/security_analytics/tifjob/put"; - private DeleteDatasourceAction() { + private PutTIFJobAction() { super(NAME, AcknowledgedResponse::new); } } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/PutTIFJobRequest.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/PutTIFJobRequest.java new file mode 100644 index 000000000..1662979d2 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/PutTIFJobRequest.java @@ -0,0 +1,107 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.ParseField; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.ObjectParser; +import org.opensearch.securityanalytics.model.DetectorTrigger; +import org.opensearch.securityanalytics.threatIntel.common.ParameterValidator; + +import java.io.IOException; +import java.util.List; + +/** + * Threat intel tif job creation request + */ +public class PutTIFJobRequest extends ActionRequest { + private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + + public static final ParseField NAME_FIELD = new ParseField("name_FIELD"); +// public static final ParseField UPDATE_INTERVAL_IN_DAYS_FIELD = new ParseField("update_interval_in_days"); + private static final ParameterValidator VALIDATOR = new ParameterValidator(); + + /** + * @param name the tif job name + * @return the tif job name + */ + private String name; + + /** + * @param updateInterval update interval of a tif job + * @return update interval of a tif job + */ + private TimeValue updateInterval; + + public void setName(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public TimeValue getUpdateInterval() { + return this.updateInterval; + } + + public void setUpdateInterval(TimeValue timeValue) { + this.updateInterval = timeValue; + } + + /** + * Parser of a tif job + */ + public static final ObjectParser PARSER; + static { + PARSER = new ObjectParser<>("put_tifjob"); + PARSER.declareString((request, val) -> request.setName(val), NAME_FIELD); +// PARSER.declareLong((request, val) -> request.setUpdateInterval(TimeValue.timeValueDays(val)), UPDATE_INTERVAL_IN_DAYS_FIELD); + } + + /** + * Default constructor + * @param name name of a tif job + */ + public PutTIFJobRequest(final String name) { + this.name = name; + } + + /** + * Constructor with stream input + * @param in the stream input + * @throws IOException IOException + */ + public PutTIFJobRequest(final StreamInput in) throws IOException { + super(in); + this.name = in.readString(); + this.updateInterval = in.readTimeValue(); + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(name); + out.writeTimeValue(updateInterval); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException errors = new ActionRequestValidationException(); + List errorMsgs = VALIDATOR.validateTIFJobName(name); + if (errorMsgs.isEmpty() == false) { + errorMsgs.stream().forEach(msg -> errors.addValidationError(msg)); + } + return errors.validationErrors().isEmpty() ? null : errors; + } + +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/DeleteDatasourceTransportAction.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobAction.java similarity index 53% rename from src/main/java/org/opensearch/securityanalytics/threatintel/action/DeleteDatasourceTransportAction.java rename to src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobAction.java index 5ff65a945..638893f2e 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/action/DeleteDatasourceTransportAction.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobAction.java @@ -15,14 +15,13 @@ import org.opensearch.common.inject.Inject; import org.opensearch.core.action.ActionListener; import org.opensearch.core.rest.RestStatus; - import org.opensearch.ingest.IngestService; import org.opensearch.securityanalytics.model.DetectorTrigger; -import org.opensearch.securityanalytics.threatIntel.common.DatasourceState; -import org.opensearch.securityanalytics.threatIntel.common.ThreatIntelLockService; -import org.opensearch.securityanalytics.threatIntel.dao.DatasourceDao; import org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedDataService; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.Datasource; +import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; +import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameterService; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter; import org.opensearch.tasks.Task; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; @@ -30,17 +29,16 @@ import java.io.IOException; /** - * Transport action to delete datasource + * Transport action to delete tif job */ -public class DeleteDatasourceTransportAction extends HandledTransportAction { +public class TransportDeleteTIFJobAction extends HandledTransportAction { private static final Logger log = LogManager.getLogger(DetectorTrigger.class); private static final long LOCK_DURATION_IN_SECONDS = 300l; - private final ThreatIntelLockService lockService; + private final TIFLockService lockService; private final IngestService ingestService; - private final DatasourceDao datasourceDao; + private final TIFJobParameterService tifJobParameterService; private final ThreatIntelFeedDataService threatIntelFeedDataService; -// private final Ip2GeoProcessorDao ip2GeoProcessorDao; private final ThreadPool threadPool; /** @@ -49,37 +47,35 @@ public class DeleteDatasourceTransportAction extends HandledTransportAction listener) { + protected void doExecute(final Task task, final DeleteTIFJobRequest request, final ActionListener listener) { lockService.acquireLock(request.getName(), LOCK_DURATION_IN_SECONDS, ActionListener.wrap(lock -> { if (lock == null) { listener.onFailure( @@ -93,13 +89,13 @@ protected void doExecute(final Task task, final DeleteDatasourceRequest request, // TODO: makes every sub-methods as async call to avoid using a thread in generic pool threadPool.generic().submit(() -> { try { - deleteDatasource(request.getName()); + deleteTIFJob(request.getName()); lockService.releaseLock(lock); listener.onResponse(new AcknowledgedResponse(true)); } catch (Exception e) { lockService.releaseLock(lock); listener.onFailure(e); - log.error("delete data source failed",e); + log.error("delete tif job failed",e); } }); } catch (Exception e) { @@ -110,43 +106,24 @@ protected void doExecute(final Task task, final DeleteDatasourceRequest request, }, exception -> { listener.onFailure(exception); })); } - protected void deleteDatasource(final String datasourceName) throws IOException { - Datasource datasource = datasourceDao.getDatasource(datasourceName); - if (datasource == null) { - throw new ResourceNotFoundException("no such datasource exist"); + protected void deleteTIFJob(final String tifJobName) throws IOException { + TIFJobParameter tifJobParameter = tifJobParameterService.getJobParameter(tifJobName); + if (tifJobParameter == null) { + throw new ResourceNotFoundException("no such tifJobParameter exist"); } - DatasourceState previousState = datasource.getState(); -// setDatasourceStateAsDeleting(datasource); + TIFJobState previousState = tifJobParameter.getState(); + tifJobParameter.setState(TIFJobState.DELETING); + tifJobParameterService.updateJobSchedulerParameter(tifJobParameter); try { - threatIntelFeedDataService.deleteThreatIntelDataIndex(datasource.getIndices()); + threatIntelFeedDataService.deleteThreatIntelDataIndex(tifJobParameter.getIndices()); } catch (Exception e) { - if (previousState.equals(datasource.getState()) == false) { - datasource.setState(previousState); - datasourceDao.updateDatasource(datasource); + if (previousState.equals(tifJobParameter.getState()) == false) { + tifJobParameter.setState(previousState); + tifJobParameterService.updateJobSchedulerParameter(tifJobParameter); } throw e; } - datasourceDao.deleteDatasource(datasource); + tifJobParameterService.deleteTIFJobParameter(tifJobParameter); } - -// private void setDatasourceStateAsDeleting(final Datasource datasource) { -// if (datasourceDao.getProcessors(datasource.getName()).isEmpty() == false) { -// throw new OpenSearchStatusException("datasource is being used by one of processors", RestStatus.BAD_REQUEST); -// } -// -// DatasourceState previousState = datasource.getState(); -// datasource.setState(DatasourceState.DELETING); -// datasourceDao.updateDatasource(datasource); -// -// // Check again as processor might just have been created. -// // If it fails to update the state back to the previous state, the new processor -// // will fail to convert an ip to a geo data. -// // In such case, user have to delete the processor and delete this datasource again. -// if (datasourceDao.getProcessors(datasource.getName()).isEmpty() == false) { -// datasource.setState(previousState); -// datasourceDao.updateDatasource(datasource); -// throw new OpenSearchStatusException("datasource is being used by one of processors", RestStatus.BAD_REQUEST); -// } -// } } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportGetTIFJobAction.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportGetTIFJobAction.java new file mode 100644 index 000000000..1f884eea1 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportGetTIFJobAction.java @@ -0,0 +1,78 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.opensearch.OpenSearchException; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.HandledTransportAction; +import org.opensearch.common.inject.Inject; +import org.opensearch.core.action.ActionListener; +import org.opensearch.index.IndexNotFoundException; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameterService; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter; +import org.opensearch.tasks.Task; +import org.opensearch.transport.TransportService; + +import java.util.Collections; +import java.util.List; + +/** + * Transport action to get tif job + */ +public class TransportGetTIFJobAction extends HandledTransportAction { + private final TIFJobParameterService tifJobParameterService; + + /** + * Default constructor + * @param transportService the transport service + * @param actionFilters the action filters + * @param tifJobParameterService the tif job parameter service facade + */ + @Inject + public TransportGetTIFJobAction( + final TransportService transportService, + final ActionFilters actionFilters, + final TIFJobParameterService tifJobParameterService + ) { + super(GetTIFJobAction.NAME, transportService, actionFilters, GetTIFJobRequest::new); + this.tifJobParameterService = tifJobParameterService; + } + + @Override + protected void doExecute(final Task task, final GetTIFJobRequest request, final ActionListener listener) { + if (shouldGetAllTIFJobs(request)) { + // We don't expect too many tif jobs. Therefore, querying all tif jobs without pagination should be fine. + tifJobParameterService.getAllTIFJobParameters(newActionListener(listener)); + } else { + tifJobParameterService.getTIFJobParameters(request.getNames(), newActionListener(listener)); + } + } + + private boolean shouldGetAllTIFJobs(final GetTIFJobRequest request) { + if (request.getNames() == null) { + throw new OpenSearchException("names in a request should not be null"); + } + return request.getNames().length == 0 || (request.getNames().length == 1 && "_all".equals(request.getNames()[0])); + } + + protected ActionListener> newActionListener(final ActionListener listener) { + return new ActionListener<>() { + @Override + public void onResponse(final List tifJobParameters) { + listener.onResponse(new GetTIFJobResponse(tifJobParameters)); + } + + @Override + public void onFailure(final Exception e) { + if (e instanceof IndexNotFoundException) { + listener.onResponse(new GetTIFJobResponse(Collections.emptyList())); + return; + } + listener.onFailure(e); + } + }; + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/PutDatasourceTransportAction.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java similarity index 61% rename from src/main/java/org/opensearch/securityanalytics/threatintel/action/PutDatasourceTransportAction.java rename to src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java index f1f87c4c5..c32a64c1c 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/action/PutDatasourceTransportAction.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java @@ -5,12 +5,6 @@ package org.opensearch.securityanalytics.threatIntel.action; -import static org.opensearch.securityanalytics.threatIntel.common.ThreatIntelLockService.LOCK_DURATION_IN_SECONDS; - -import java.time.Instant; -import java.util.ConcurrentModificationException; -import java.util.concurrent.atomic.AtomicReference; - import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.ResourceAlreadyExistsException; @@ -21,58 +15,63 @@ import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.common.inject.Inject; import org.opensearch.core.action.ActionListener; - import org.opensearch.core.rest.RestStatus; import org.opensearch.index.engine.VersionConflictEngineException; import org.opensearch.jobscheduler.spi.LockModel; import org.opensearch.securityanalytics.model.DetectorTrigger; -import org.opensearch.securityanalytics.threatIntel.common.DatasourceState; -import org.opensearch.securityanalytics.threatIntel.common.ThreatIntelLockService; -import org.opensearch.securityanalytics.threatIntel.dao.DatasourceDao; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.Datasource; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.DatasourceUpdateService; +import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; +import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameterService; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobUpdateService; import org.opensearch.tasks.Task; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; +import java.time.Instant; +import java.util.ConcurrentModificationException; +import java.util.concurrent.atomic.AtomicReference; + +import static org.opensearch.securityanalytics.threatIntel.common.TIFLockService.LOCK_DURATION_IN_SECONDS; + /** - * Transport action to create datasource + * Transport action to create tif job */ -public class PutDatasourceTransportAction extends HandledTransportAction { +public class TransportPutTIFJobAction extends HandledTransportAction { private static final Logger log = LogManager.getLogger(DetectorTrigger.class); private final ThreadPool threadPool; - private final DatasourceDao datasourceDao; - private final DatasourceUpdateService datasourceUpdateService; - private final ThreatIntelLockService lockService; + private final TIFJobParameterService tifJobParameterService; + private final TIFJobUpdateService tifJobUpdateService; + private final TIFLockService lockService; /** * Default constructor * @param transportService the transport service * @param actionFilters the action filters * @param threadPool the thread pool - * @param datasourceDao the datasource facade - * @param datasourceUpdateService the datasource update service + * @param tifJobParameterService the tif job parameter service facade + * @param tifJobUpdateService the tif job update service * @param lockService the lock service */ @Inject - public PutDatasourceTransportAction( + public TransportPutTIFJobAction( final TransportService transportService, final ActionFilters actionFilters, final ThreadPool threadPool, - final DatasourceDao datasourceDao, - final DatasourceUpdateService datasourceUpdateService, - final ThreatIntelLockService lockService + final TIFJobParameterService tifJobParameterService, + final TIFJobUpdateService tifJobUpdateService, + final TIFLockService lockService ) { - super(PutDatasourceAction.NAME, transportService, actionFilters, PutDatasourceRequest::new); + super(PutTIFJobAction.NAME, transportService, actionFilters, PutTIFJobRequest::new); this.threadPool = threadPool; - this.datasourceDao = datasourceDao; - this.datasourceUpdateService = datasourceUpdateService; + this.tifJobParameterService = tifJobParameterService; + this.tifJobUpdateService = tifJobUpdateService; this.lockService = lockService; } @Override - protected void doExecute(final Task task, final PutDatasourceRequest request, final ActionListener listener) { + protected void doExecute(final Task task, final PutTIFJobRequest request, final ActionListener listener) { lockService.acquireLock(request.getName(), LOCK_DURATION_IN_SECONDS, ActionListener.wrap(lock -> { if (lock == null) { listener.onFailure( @@ -99,15 +98,15 @@ protected void doExecute(final Task task, final PutDatasourceRequest request, fi * unless exception is thrown */ protected void internalDoExecute( - final PutDatasourceRequest request, + final PutTIFJobRequest request, final LockModel lock, final ActionListener listener ) { StepListener createIndexStep = new StepListener<>(); - datasourceDao.createIndexIfNotExists(createIndexStep); + tifJobParameterService.createIndexIfNotExists(createIndexStep); createIndexStep.whenComplete(v -> { - Datasource datasource = Datasource.Builder.build(request); - datasourceDao.putDatasource(datasource, getIndexResponseListener(datasource, lock, listener)); + TIFJobParameter tifJobParameter = TIFJobParameter.Builder.build(request); + tifJobParameterService.putTIFJobParameter(tifJobParameter, getIndexResponseListener(tifJobParameter, lock, listener)); }, exception -> { lockService.releaseLock(lock); log.error("failed to release lock", exception); @@ -120,19 +119,19 @@ protected void internalDoExecute( * unless exception is thrown */ protected ActionListener getIndexResponseListener( - final Datasource datasource, + final TIFJobParameter tifJobParameter, final LockModel lock, final ActionListener listener ) { return new ActionListener<>() { @Override public void onResponse(final IndexResponse indexResponse) { - // This is user initiated request. Therefore, we want to handle the first datasource update task in a generic thread + // This is user initiated request. Therefore, we want to handle the first tifJobParameter update task in a generic thread // pool. threadPool.generic().submit(() -> { AtomicReference lockReference = new AtomicReference<>(lock); try { - createDatasource(datasource, lockService.getRenewLockRunnable(lockReference)); + createTIFJob(tifJobParameter, lockService.getRenewLockRunnable(lockReference)); } finally { lockService.releaseLock(lockReference.get()); } @@ -144,8 +143,8 @@ public void onResponse(final IndexResponse indexResponse) { public void onFailure(final Exception e) { lockService.releaseLock(lock); if (e instanceof VersionConflictEngineException) { - log.error("datasource already exists"); - listener.onFailure(new ResourceAlreadyExistsException("datasource [{}] already exists", datasource.getName())); + log.error("tifJobParameter already exists"); + listener.onFailure(new ResourceAlreadyExistsException("tifJobParameter [{}] already exists", tifJobParameter.getName())); } else { log.error("Internal server error"); listener.onFailure(e); @@ -154,28 +153,28 @@ public void onFailure(final Exception e) { }; } - protected void createDatasource(final Datasource datasource, final Runnable renewLock) { - if (DatasourceState.CREATING.equals(datasource.getState()) == false) { - log.error("Invalid datasource state. Expecting {} but received {}", DatasourceState.CREATING, datasource.getState()); - markDatasourceAsCreateFailed(datasource); + protected void createTIFJob(final TIFJobParameter tifJobParameter, final Runnable renewLock) { + if (TIFJobState.CREATING.equals(tifJobParameter.getState()) == false) { + log.error("Invalid tifJobParameter state. Expecting {} but received {}", TIFJobState.CREATING, tifJobParameter.getState()); + markTIFJobAsCreateFailed(tifJobParameter); return; } try { - datasourceUpdateService.updateOrCreateThreatIntelFeedData(datasource, renewLock); + tifJobUpdateService.createThreatIntelFeedData(tifJobParameter, renewLock); } catch (Exception e) { - log.error("Failed to create datasource for {}", datasource.getName(), e); - markDatasourceAsCreateFailed(datasource); + log.error("Failed to create tifJobParameter for {}", tifJobParameter.getName(), e); + markTIFJobAsCreateFailed(tifJobParameter); } } - private void markDatasourceAsCreateFailed(final Datasource datasource) { - datasource.getUpdateStats().setLastFailedAt(Instant.now()); - datasource.setState(DatasourceState.CREATE_FAILED); + private void markTIFJobAsCreateFailed(final TIFJobParameter tifJobParameter) { + tifJobParameter.getUpdateStats().setLastFailedAt(Instant.now()); + tifJobParameter.setState(TIFJobState.CREATE_FAILED); try { - datasourceDao.updateDatasource(datasource); + tifJobParameterService.updateJobSchedulerParameter(tifJobParameter); } catch (Exception e) { - log.error("Failed to mark datasource state as CREATE_FAILED for {}", datasource.getName(), e); + log.error("Failed to mark tifJobParameter state as CREATE_FAILED for {}", tifJobParameter.getName(), e); } } } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportUpdateTIFJobAction.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportUpdateTIFJobAction.java new file mode 100644 index 000000000..393bc02b9 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportUpdateTIFJobAction.java @@ -0,0 +1,133 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.opensearch.OpenSearchStatusException; +import org.opensearch.ResourceNotFoundException; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.HandledTransportAction; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.common.inject.Inject; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; +import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; +import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameterService; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobTask; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobUpdateService; +import org.opensearch.tasks.Task; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Locale; + +/** + * Transport action to update tif job + */ +public class TransportUpdateTIFJobAction extends HandledTransportAction { + private static final long LOCK_DURATION_IN_SECONDS = 300l; + private final TIFLockService lockService; + private final TIFJobParameterService tifJobParameterService; + private final TIFJobUpdateService tifJobUpdateService; + private final ThreadPool threadPool; + + /** + * Constructor + * + * @param transportService the transport service + * @param actionFilters the action filters + * @param lockService the lock service + * @param tifJobParameterService the tif job parameter facade + * @param tifJobUpdateService the tif job update service + */ + @Inject + public TransportUpdateTIFJobAction( + final TransportService transportService, + final ActionFilters actionFilters, + final TIFLockService lockService, + final TIFJobParameterService tifJobParameterService, + final TIFJobUpdateService tifJobUpdateService, + final ThreadPool threadPool + ) { + super(UpdateTIFJobAction.NAME, transportService, actionFilters, UpdateTIFJobRequest::new); + this.lockService = lockService; + this.tifJobUpdateService = tifJobUpdateService; + this.tifJobParameterService = tifJobParameterService; + this.threadPool = threadPool; + } + + /** + * Get a lock and update tif job + * + * @param task the task + * @param request the request + * @param listener the listener + */ + @Override + protected void doExecute(final Task task, final UpdateTIFJobRequest request, final ActionListener listener) { + lockService.acquireLock(request.getName(), LOCK_DURATION_IN_SECONDS, ActionListener.wrap(lock -> { + if (lock == null) { + listener.onFailure( + new OpenSearchStatusException("Another processor is holding a lock on the resource. Try again later", RestStatus.BAD_REQUEST) + ); + return; + } + try { + // TODO: makes every sub-methods as async call to avoid using a thread in generic pool + threadPool.generic().submit(() -> { + try { + TIFJobParameter tifJobParameter = tifJobParameterService.getJobParameter(request.getName()); + if (tifJobParameter == null) { + throw new ResourceNotFoundException("no such tifJobParameter exist"); + } + if (TIFJobState.AVAILABLE.equals(tifJobParameter.getState()) == false) { + throw new IllegalArgumentException( + String.format(Locale.ROOT, "tif job is not in an [%s] state", TIFJobState.AVAILABLE) + ); + } + updateIfChanged(request, tifJobParameter); //TODO: just want to update? + lockService.releaseLock(lock); + listener.onResponse(new AcknowledgedResponse(true)); + } catch (Exception e) { + lockService.releaseLock(lock); + listener.onFailure(e); + } + }); + } catch (Exception e) { + lockService.releaseLock(lock); + listener.onFailure(e); + } + }, exception -> listener.onFailure(exception))); + } + + private void updateIfChanged(final UpdateTIFJobRequest request, final TIFJobParameter tifJobParameter) { + boolean isChanged = false; + if (isUpdateIntervalChanged(request)) { + tifJobParameter.setSchedule(new IntervalSchedule(Instant.now(), (int) request.getUpdateInterval().getDays(), ChronoUnit.DAYS)); + tifJobParameter.setTask(TIFJobTask.ALL); + isChanged = true; + } + + if (isChanged) { + tifJobParameterService.updateJobSchedulerParameter(tifJobParameter); + } + } + + /** + * Update interval is changed as long as user provide one because + * start time will get updated even if the update interval is same as current one. + * + * @param request the update tif job request + * @return true if update interval is changed, and false otherwise + */ + private boolean isUpdateIntervalChanged(final UpdateTIFJobRequest request) { + return request.getUpdateInterval() != null; + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceAction.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/UpdateTIFJobAction.java similarity index 54% rename from src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceAction.java rename to src/main/java/org/opensearch/securityanalytics/threatIntel/action/UpdateTIFJobAction.java index ddf2d42e6..8b4c495f4 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceAction.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/UpdateTIFJobAction.java @@ -9,19 +9,19 @@ import org.opensearch.action.support.master.AcknowledgedResponse; /** - * threat intel datasource update action + * threat intel tif job update action */ -public class UpdateDatasourceAction extends ActionType { +public class UpdateTIFJobAction extends ActionType { /** - * Update datasource action instance + * Update tif job action instance */ - public static final UpdateDatasourceAction INSTANCE = new UpdateDatasourceAction(); + public static final UpdateTIFJobAction INSTANCE = new UpdateTIFJobAction(); /** - * Update datasource action name + * Update tif job action name */ - public static final String NAME = "cluster:admin/security_analytics/datasource/update"; + public static final String NAME = "cluster:admin/security_analytics/tifjob/update"; - private UpdateDatasourceAction() { + private UpdateTIFJobAction() { super(NAME, AcknowledgedResponse::new); } } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/UpdateTIFJobRequest.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/UpdateTIFJobRequest.java new file mode 100644 index 000000000..205590319 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/UpdateTIFJobRequest.java @@ -0,0 +1,123 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.ParseField; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.ObjectParser; +import org.opensearch.securityanalytics.model.DetectorTrigger; +import org.opensearch.securityanalytics.threatIntel.common.TIFMetadata; +import org.opensearch.securityanalytics.threatIntel.common.ParameterValidator; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.Locale; + +/** + * threat intel tif job update request + */ +public class UpdateTIFJobRequest extends ActionRequest { + private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + public static final ParseField UPDATE_INTERVAL_IN_DAYS_FIELD = new ParseField("update_interval_in_days"); + private static final ParameterValidator VALIDATOR = new ParameterValidator(); + + /** + * @param name the tif job name + * @return the tif job name + */ + private String name; + + /** + * @param updateInterval update interval of a tif job + * @return update interval of a tif job + */ + private TimeValue updateInterval; + + /** + * Parser of a tif job + */ + public static final ObjectParser PARSER; + static { + PARSER = new ObjectParser<>("update_tifjob"); + PARSER.declareLong((request, val) -> request.setUpdateInterval(TimeValue.timeValueDays(val)), UPDATE_INTERVAL_IN_DAYS_FIELD); + } + + public String getName() { + return name; + } + + public TimeValue getUpdateInterval() { + return updateInterval; + } + + private void setUpdateInterval(TimeValue updateInterval){ + this.updateInterval = updateInterval; + } + + /** + * Constructor + * @param name name of a tif job + */ + public UpdateTIFJobRequest(final String name) { + this.name = name; + } + + /** + * Constructor + * @param in the stream input + * @throws IOException IOException + */ + public UpdateTIFJobRequest(final StreamInput in) throws IOException { + super(in); + this.name = in.readString(); + this.updateInterval = in.readOptionalTimeValue(); + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(name); + out.writeOptionalTimeValue(updateInterval); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException errors = new ActionRequestValidationException(); + if (VALIDATOR.validateTIFJobName(name).isEmpty() == false) { + errors.addValidationError("no such tif job exist"); + } + if (updateInterval == null) { + errors.addValidationError("no values to update"); + } + + validateUpdateInterval(errors); + + return errors.validationErrors().isEmpty() ? null : errors; + } + + /** + * Validate updateInterval is equal or larger than 1 + * + * @param errors the errors to add error messages + */ + private void validateUpdateInterval(final ActionRequestValidationException errors) { + if (updateInterval == null) { + return; + } + + if (updateInterval.compareTo(TimeValue.timeValueDays(1)) < 0) { + errors.addValidationError("Update interval should be equal to or larger than 1 day"); + } + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/FeedMetadata.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/FeedMetadata.java new file mode 100644 index 000000000..7d219a164 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/FeedMetadata.java @@ -0,0 +1,287 @@ +package org.opensearch.securityanalytics.threatIntel.common; + +import org.opensearch.core.ParseField; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.xcontent.ConstructingObjectParser; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; + +/** + * Database of a tif job + */ +public class FeedMetadata implements Writeable, ToXContent { //feedmetadata + private static final ParseField FEED_ID = new ParseField("feed_id"); + private static final ParseField FEED_NAME = new ParseField("feed_name"); + private static final ParseField FEED_FORMAT = new ParseField("feed_format"); + private static final ParseField ENDPOINT_FIELD = new ParseField("endpoint"); + private static final ParseField DESCRIPTION = new ParseField("description"); + private static final ParseField ORGANIZATION = new ParseField("organization"); + private static final ParseField CONTAINED_IOCS_FIELD = new ParseField("contained_iocs_field"); + private static final ParseField IOC_COL = new ParseField("ioc_col"); + private static final ParseField FIELDS_FIELD = new ParseField("fields"); + + /** + * @param feedId id of the feed + * @return id of the feed + */ + private String feedId; + + /** + * @param feedFormat format of the feed (csv, json...) + * @return the type of feed ingested + */ + private String feedFormat; + + /** + * @param endpoint URL of a manifest file + * @return URL of a manifest file + */ + private String endpoint; + + /** + * @param feedName name of the threat intel feed + * @return name of the threat intel feed + */ + private String feedName; + + /** + * @param description description of the threat intel feed + * @return description of the threat intel feed + */ + private String description; + + /** + * @param organization organization of the threat intel feed + * @return organization of the threat intel feed + */ + private String organization; + + /** + * @param contained_iocs_field list of iocs contained in a given feed + * @return list of iocs contained in a given feed + */ + private List contained_iocs_field; + + /** + * @param ioc_col column of the contained ioc + * @return column of the contained ioc + */ + private String iocCol; + + /** + * @param fields A list of available fields in the database + * @return A list of available fields in the database + */ + private List fields; + + public FeedMetadata(String feedId, String feedName, String feedFormat, final String endpoint, final String description, + final String organization, final List contained_iocs_field, final String iocCol, final List fields) { + this.feedId = feedId; + this.feedName = feedName; + this.feedFormat = feedFormat; + this.endpoint = endpoint; + this.description = description; + this.organization = organization; + this.contained_iocs_field = contained_iocs_field; + this.iocCol = iocCol; + this.fields = fields; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "tif_metadata_database", + true, + args -> { + String feedId = (String) args[0]; + String feedName = (String) args[1]; + String feedFormat = (String) args[2]; + String endpoint = (String) args[3]; + String description = (String) args[4]; + String organization = (String) args[5]; + List contained_iocs_field = (List) args[6]; + String iocCol = (String) args[7]; + List fields = (List) args[8]; + return new FeedMetadata(feedFormat, endpoint, feedId, feedName, description, organization, contained_iocs_field, iocCol, fields); + } + ); + static { + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FEED_ID); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FEED_NAME); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FEED_FORMAT); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), ENDPOINT_FIELD); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), DESCRIPTION); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), ORGANIZATION); + PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), CONTAINED_IOCS_FIELD); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), IOC_COL); + PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), FIELDS_FIELD); + } + + public FeedMetadata(final StreamInput in) throws IOException { + feedId = in.readString(); + feedName = in.readString(); + feedFormat = in.readString(); + endpoint = in.readString(); + description = in.readString(); + organization = in.readString(); + contained_iocs_field = in.readStringList(); + iocCol = in.readString(); + fields = in.readOptionalStringList(); + } + + private FeedMetadata(){} + + @Override + public void writeTo(final StreamOutput out) throws IOException { + out.writeString(feedId); + out.writeString(feedName); + out.writeString(feedFormat); + out.writeString(endpoint); + out.writeString(description); + out.writeString(organization); + out.writeStringCollection(contained_iocs_field); + out.writeString(iocCol); + out.writeOptionalStringCollection(fields); + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + builder.startObject(); + builder.field(FEED_ID.getPreferredName(), feedId); + builder.field(FEED_NAME.getPreferredName(), feedName); + builder.field(FEED_FORMAT.getPreferredName(), feedFormat); + builder.field(ENDPOINT_FIELD.getPreferredName(), endpoint); + builder.field(DESCRIPTION.getPreferredName(), description); + builder.field(ORGANIZATION.getPreferredName(), organization); + builder.field(CONTAINED_IOCS_FIELD.getPreferredName(), contained_iocs_field); + builder.field(IOC_COL.getPreferredName(), iocCol); + +// if (provider != null) { +// builder.field(PROVIDER_FIELD.getPreferredName(), provider); +// } +// if (updatedAt != null) { +// builder.timeField( +// UPDATED_AT_FIELD.getPreferredName(), +// UPDATED_AT_FIELD_READABLE.getPreferredName(), +// updatedAt.toEpochMilli() +// ); +// } + if (fields != null) { + builder.startArray(FIELDS_FIELD.getPreferredName()); + for (String field : fields) { + builder.value(field); + } + builder.endArray(); + } + builder.endObject(); + return builder; + } + + public String getFeedId() { + return feedId; + } + + public String getFeedFormat() { + return feedFormat; + } + + public String getFeedName() { + return feedName; + } + + public String getDescription() { + return description; + } + + public String getOrganization() { + return organization; + } + + public List getContained_iocs_field() { + return contained_iocs_field; + } + + public String getIocCol() { + return iocCol; + } + + public String getEndpoint() { + return this.endpoint; + } + + public List getFields() { + return fields; + } + public void setFeedId(String feedId) { + this.feedId = feedId; + } + + public void setFeedFormat(String feedFormat) { + this.feedFormat = feedFormat; + } + + public void setEndpoint(String endpoint) { + this.endpoint = endpoint; + } + + public void setFeedName(String feedName) { + this.feedName = feedName; + } + + public void setDescription(String description) { + this.description = description; + } + + public void setOrganization(String organization) { + this.organization = organization; + } + + public void setContained_iocs_field(List contained_iocs_field) { + this.contained_iocs_field = contained_iocs_field; + } + + public void setIocCol(String iocCol) { + this.iocCol = iocCol; + } + + public void setFields(List fields) { + this.fields = fields; + } + + /** + * Reset database so that it can be updated in next run regardless there is new update or not + */ + public void resetTIFMetadata() { + this.setFeedId(null); + this.setFeedName(null); + this.setFeedFormat(null); + this.setEndpoint(null); + this.setDescription(null); + this.setOrganization(null); + this.setContained_iocs_field(null); + this.setIocCol(null); + this.setFeedFormat(null); + } + + /** + * Set database attributes with given input + * + * @param tifMetadata the tif metadata + * @param fields the fields + */ + public void setTIFMetadata(final TIFMetadata tifMetadata, final List fields) { + this.feedId = tifMetadata.getFeedId(); + this.feedName = tifMetadata.getName(); + this.feedFormat = tifMetadata.getFeedType(); + this.endpoint = tifMetadata.getUrl(); + this.organization = tifMetadata.getOrganization(); + this.description = tifMetadata.getDescription(); + this.contained_iocs_field = tifMetadata.getContainedIocs(); + this.iocCol = tifMetadata.getIocCol(); + this.fields = fields; + } + +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/common/ThreatIntelExecutor.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFExecutor.java similarity index 71% rename from src/main/java/org/opensearch/securityanalytics/threatintel/common/ThreatIntelExecutor.java rename to src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFExecutor.java index b3817786c..c2f861332 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/common/ThreatIntelExecutor.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFExecutor.java @@ -15,16 +15,16 @@ /** * Provide a list of static methods related with executors for threat intel */ -public class ThreatIntelExecutor { - private static final String THREAD_POOL_NAME = "plugin_sap_datasource_update"; +public class TIFExecutor { + private static final String THREAD_POOL_NAME = "_plugin_sap_tifjob_update"; //TODO: name private final ThreadPool threadPool; - public ThreatIntelExecutor(final ThreadPool threadPool) { + public TIFExecutor(final ThreadPool threadPool) { this.threadPool = threadPool; } /** - * We use fixed thread count of 1 for updating datasource as updating datasource is running background + * We use fixed thread count of 1 for updating tif job as updating tif job is running background * once a day at most and no need to expedite the task. * * @param settings the settings @@ -35,11 +35,11 @@ public static ExecutorBuilder executorBuilder(final Settings settings) { } /** - * Return an executor service for datasource update task + * Return an executor service for tif job update task * * @return the executor service */ - public ExecutorService forDatasourceUpdate() { + public ExecutorService forJobSchedulerParameterUpdate() { return threadPool.executor(THREAD_POOL_NAME); } } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFJobState.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFJobState.java new file mode 100644 index 000000000..22ffee3e9 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFJobState.java @@ -0,0 +1,37 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.common; + +/** + * Threat intel tif job state + * + * When tif job is created, it starts with CREATING state. Once the first threat intel feed is generated, the state changes to AVAILABLE. + * Only when the first threat intel feed generation failed, the state changes to CREATE_FAILED. + * Subsequent threat intel feed failure won't change tif job state from AVAILABLE to CREATE_FAILED. + * When delete request is received, the tif job state changes to DELETING. + * + * State changed from left to right for the entire lifecycle of a datasource + * (CREATING) to (CREATE_FAILED or AVAILABLE) to (DELETING) + * + */ +public enum TIFJobState { + /** + * tif job is being created + */ + CREATING, + /** + * tif job is ready to be used + */ + AVAILABLE, + /** + * tif job creation failed + */ + CREATE_FAILED, + /** + * tif job is being deleted + */ + DELETING +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/common/ThreatIntelLockService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFLockService.java similarity index 83% rename from src/main/java/org/opensearch/securityanalytics/threatintel/common/ThreatIntelLockService.java rename to src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFLockService.java index 8847d681e..df1fd1b75 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/common/ThreatIntelLockService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFLockService.java @@ -5,7 +5,7 @@ package org.opensearch.securityanalytics.threatIntel.common; -import static org.opensearch.securityanalytics.threatIntel.jobscheduler.DatasourceExtension.JOB_INDEX_NAME; +import static org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobExtension.JOB_INDEX_NAME; import java.time.Instant; import java.util.Optional; @@ -23,11 +23,12 @@ import org.opensearch.jobscheduler.spi.LockModel; import org.opensearch.jobscheduler.spi.utils.LockService; import org.opensearch.securityanalytics.model.DetectorTrigger; +import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; /** - * A wrapper of job scheduler's lock service for datasource + * A wrapper of job scheduler's lock service */ -public class ThreatIntelLockService { +public class TIFLockService { private static final Logger log = LogManager.getLogger(DetectorTrigger.class); public static final long LOCK_DURATION_IN_SECONDS = 300l; @@ -43,7 +44,7 @@ public class ThreatIntelLockService { * @param clusterService the cluster service * @param client the client */ - public ThreatIntelLockService(final ClusterService clusterService, final Client client) { + public TIFLockService(final ClusterService clusterService, final Client client) { this.clusterService = clusterService; this.lockService = new LockService(client, clusterService); } @@ -51,28 +52,28 @@ public ThreatIntelLockService(final ClusterService clusterService, final Client /** * Wrapper method of LockService#acquireLockWithId * - * Datasource uses its name as doc id in job scheduler. Therefore, we can use datasource name to acquire - * a lock on a datasource. + * tif job uses its name as doc id in job scheduler. Therefore, we can use tif job name to acquire + * a lock on a tif job. * - * @param datasourceName datasourceName to acquire lock on + * @param tifJobName tifJobName to acquire lock on * @param lockDurationSeconds the lock duration in seconds * @param listener the listener */ - public void acquireLock(final String datasourceName, final Long lockDurationSeconds, final ActionListener listener) { - lockService.acquireLockWithId(JOB_INDEX_NAME, lockDurationSeconds, datasourceName, listener); + public void acquireLock(final String tifJobName, final Long lockDurationSeconds, final ActionListener listener) { + lockService.acquireLockWithId(JOB_INDEX_NAME, lockDurationSeconds, tifJobName, listener); } /** * Synchronous method of #acquireLock * - * @param datasourceName datasourceName to acquire lock on + * @param tifJobName tifJobName to acquire lock on * @param lockDurationSeconds the lock duration in seconds * @return lock model */ - public Optional acquireLock(final String datasourceName, final Long lockDurationSeconds) { + public Optional acquireLock(final String tifJobName, final Long lockDurationSeconds) { AtomicReference lockReference = new AtomicReference(); CountDownLatch countDownLatch = new CountDownLatch(1); - lockService.acquireLockWithId(JOB_INDEX_NAME, lockDurationSeconds, datasourceName, new ActionListener<>() { + lockService.acquireLockWithId(JOB_INDEX_NAME, lockDurationSeconds, tifJobName, new ActionListener<>() { @Override public void onResponse(final LockModel lockModel) { lockReference.set(lockModel); @@ -88,7 +89,7 @@ public void onFailure(final Exception e) { }); try { - countDownLatch.await(clusterService.getClusterSettings().get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT).getSeconds(), TimeUnit.SECONDS); + countDownLatch.await(clusterService.getClusterSettings().get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT).getSeconds(), TimeUnit.SECONDS); return Optional.ofNullable(lockReference.get()); } catch (InterruptedException e) { log.error("Waiting for the count down latch failed", e); @@ -133,7 +134,7 @@ public void onFailure(final Exception e) { }); try { - countDownLatch.await(clusterService.getClusterSettings().get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT).getSeconds(), TimeUnit.SECONDS); + countDownLatch.await(clusterService.getClusterSettings().get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT).getSeconds(), TimeUnit.SECONDS); return lockReference.get(); } catch (InterruptedException e) { log.error("Interrupted exception", e); diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java new file mode 100644 index 000000000..a594537be --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java @@ -0,0 +1,309 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.threatIntel.common; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.net.URL; +import java.net.URLConnection; +import java.nio.CharBuffer; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.List; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.SpecialPermission; +import org.opensearch.common.SuppressForbidden; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.ParseField; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.core.xcontent.*; +import org.opensearch.securityanalytics.model.DetectorTrigger; +import org.opensearch.securityanalytics.util.SecurityAnalyticsException; + +/** + * Threat intel tif job metadata object + * + * TIFMetadata is stored in an external endpoint. OpenSearch read the file and store values it in this object. + */ +public class TIFMetadata implements Writeable, ToXContent { + private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + + private static final ParseField FEED_ID = new ParseField("id"); + private static final ParseField URL_FIELD = new ParseField("url"); + private static final ParseField NAME = new ParseField("name"); + private static final ParseField ORGANIZATION = new ParseField("organization"); + private static final ParseField DESCRIPTION = new ParseField("description"); + private static final ParseField FEED_TYPE = new ParseField("feed_type"); + private static final ParseField CONTAINED_IOCS = new ParseField("contained_iocs"); + private static final ParseField IOC_COL = new ParseField("ioc_col"); + + /** + * @param feedId ID of the threat intel feed data + * @return ID of the threat intel feed data + */ + private String feedId; + + /** + * @param url URL of the threat intel feed data + * @return URL of the threat intel feed data + */ + private String url; + + /** + * @param name Name of the threat intel feed + * @return Name of the threat intel feed + */ + private String name; + + /** + * @param organization A threat intel feed organization name + * @return A threat intel feed organization name + */ + private String organization; + + /** + * @param description A description of the database + * @return A description of a database + */ + private String description; + + /** + * @param feedType The type of the data feed (csv, json...) + * @return The type of the data feed (csv, json...) + */ + private String feedType; + + /** + * @param iocCol the column of the ioc data if feedType is csv + * @return the column of the ioc data if feedType is csv + */ + private String iocCol; + + /** + * @param containedIocs list of ioc types contained in feed + * @return list of ioc types contained in feed + */ + private List containedIocs; + + + public String getUrl() { + return url; + } + public String getName() { + return name; + } + public String getOrganization() { + return organization; + } + public String getDescription() { + return description; + } + public String getFeedId() { + return feedId; + } + public String getFeedType() { + return feedType; + } + public String getIocCol() { + return iocCol; + } + public List getContainedIocs() { + return containedIocs; + } + + public void setFeedId(String feedId) { + this.feedId = feedId; + } + + public void setUrl(String url) { + this.url = url; + } + + public void setName(String name) { + this.name = name; + } + + public void setOrganization(String organization) { + this.organization = organization; + } + + public void setFeedType(String feedType) { + this.feedType = feedType; + } + + public void setDescription(String description) { + this.description = description; + } + + public void setIocCol(String iocCol) { + this.iocCol = iocCol; + } + + public void setContainedIocs(List containedIocs) { + this.containedIocs = containedIocs; + } + + + public TIFMetadata(final String feedId, final String url, final String name, final String organization, final String description, + final String feedType, final List containedIocs, final String iocCol) { + this.feedId = feedId; + this.url = url; + this.name = name; + this.organization = organization; + this.description = description; + this.feedType = feedType; + this.containedIocs = containedIocs; + this.iocCol = iocCol; + } + + /** + * tif job metadata parser + */ + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "tif_metadata", + true, + args -> { + String feedId = (String) args[0]; + String url = (String) args[1]; + String name = (String) args[2]; + String organization = (String) args[3]; + String description = (String) args[4]; + String feedType = (String) args[5]; + List containedIocs = (List) args[6]; + String iocCol = (String) args[7]; + return new TIFMetadata(feedId, url, name, organization, description, feedType, containedIocs, iocCol); + } + ); + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), FEED_ID); + PARSER.declareString(ConstructingObjectParser.constructorArg(), URL_FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME); + PARSER.declareString(ConstructingObjectParser.constructorArg(), ORGANIZATION); + PARSER.declareString(ConstructingObjectParser.constructorArg(), DESCRIPTION); + PARSER.declareString(ConstructingObjectParser.constructorArg(), FEED_TYPE); + PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), CONTAINED_IOCS); + PARSER.declareString(ConstructingObjectParser.constructorArg(), IOC_COL); + } + + public TIFMetadata(final StreamInput in) throws IOException{ + feedId = in.readString(); + url = in.readString(); + name = in.readString(); + organization = in.readString(); + description = in.readString(); + feedType = in.readString(); + containedIocs = in.readStringList(); + iocCol = in.readString(); + } + public void writeTo(final StreamOutput out) throws IOException { + out.writeString(feedId); + out.writeString(url); + out.writeString(name); + out.writeString(organization); + out.writeString(description); + out.writeString(feedType); + out.writeStringCollection(containedIocs); + out.writeString(iocCol); + } + + private TIFMetadata(){} + + + /** + * Reset database so that it can be updated in next run regardless there is new update or not + */ + public void resetTIFMetadata() { + this.setFeedId(null); + this.setUrl(null); + this.setName(null); + this.setOrganization(null); + this.setDescription(null); + this.setFeedType(null); + this.setContainedIocs(null); + this.setIocCol(null); + } + + /** + * Set database attributes with given input + * + * @param tifMetadata the tif metadata + * @param fields the fields + */ + public void setTIFMetadata(final TIFMetadata tifMetadata, final List fields) { + this.feedId = tifMetadata.getFeedId(); + this.url = tifMetadata.getUrl(); + this.name = tifMetadata.getName(); + this.organization = tifMetadata.getOrganization(); + this.description = tifMetadata.getDescription(); + this.feedType = tifMetadata.getFeedType(); + this.containedIocs = tifMetadata.getContainedIocs(); + this.iocCol = tifMetadata.getIocCol(); + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + builder.startObject(); + builder.field(FEED_ID.getPreferredName(), feedId); + builder.field(URL_FIELD.getPreferredName(), url); + builder.field(NAME.getPreferredName(), name); + builder.field(ORGANIZATION.getPreferredName(), organization); + builder.field(DESCRIPTION.getPreferredName(), description); + builder.field(FEED_TYPE.getPreferredName(), feedType); + builder.field(CONTAINED_IOCS.getPreferredName(), containedIocs); + builder.field(IOC_COL.getPreferredName(), iocCol); + builder.endObject(); + return builder; + } + + /** + * TIFMetadata builder + */ + public static class Builder { //TODO: builder? + private static final int FILE_MAX_BYTES = 1024 * 8; + + /** + * Build TIFMetadata from a given url + * + * @param url url to downloads a manifest file + * @return TIFMetadata representing the manifest file + */ + @SuppressForbidden(reason = "Need to connect to http endpoint to read manifest file") + public static TIFMetadata build(final URL url) { + SpecialPermission.check(); + return AccessController.doPrivileged((PrivilegedAction) () -> { + try { + URLConnection connection = url.openConnection(); + return internalBuild(connection); + } catch (IOException e) { + log.error("Runtime exception connecting to the manifest file", e); + throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO + } + }); + } + + @SuppressForbidden(reason = "Need to connect to http endpoint to read manifest file") + protected static TIFMetadata internalBuild(final URLConnection connection) throws IOException { + connection.addRequestProperty(Constants.USER_AGENT_KEY, Constants.USER_AGENT_VALUE); + InputStreamReader inputStreamReader = new InputStreamReader(connection.getInputStream()); + try (BufferedReader reader = new BufferedReader(inputStreamReader)) { + CharBuffer charBuffer = CharBuffer.allocate(FILE_MAX_BYTES); + reader.read(charBuffer); + charBuffer.flip(); + XContentParser parser = JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.IGNORE_DEPRECATIONS, + charBuffer.toString() + ); + return PARSER.parse(parser, null); + } + } + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceExtension.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtension.java similarity index 60% rename from src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceExtension.java rename to src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtension.java index 4d32973e6..023323253 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceExtension.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtension.java @@ -5,17 +5,16 @@ package org.opensearch.securityanalytics.threatIntel.jobscheduler; -import org.opensearch.jobscheduler.spi.JobSchedulerExtension; import org.opensearch.jobscheduler.spi.ScheduledJobParser; import org.opensearch.jobscheduler.spi.ScheduledJobRunner; import java.util.Map; -public class DatasourceExtension implements JobSchedulerExtension { +public class TIFJobExtension implements org.opensearch.jobscheduler.spi.JobSchedulerExtension { /** - * Job index name for a datasource + * Job index name for a TIF job */ - public static final String JOB_INDEX_NAME = ".scheduler-security_analytics-threatintel-datasource"; //rename this... + public static final String JOB_INDEX_NAME = ".scheduler-sap-threatintel-job"; /** * Job index setting @@ -23,11 +22,11 @@ public class DatasourceExtension implements JobSchedulerExtension { * We want it to be single shard so that job can be run only in a single node by job scheduler. * We want it to expand to all replicas so that querying to this index can be done locally to reduce latency. */ - public static final Map INDEX_SETTING = Map.of("index.number_of_shards", 1, "index.number_of_replicas", "0-all", "index.hidden", true); + public static final Map INDEX_SETTING = Map.of("index.number_of_shards", 1, "index.auto_expand_replicas", "0-all", "index.hidden", true); @Override public String getJobType() { - return "scheduler_security_analytics_threatintel_datasource"; + return "scheduler_sap_threatintel_job"; } @Override @@ -37,11 +36,11 @@ public String getJobIndex() { @Override public ScheduledJobRunner getJobRunner() { - return DatasourceRunner.getJobRunnerInstance(); + return TIFJobRunner.getJobRunnerInstance(); } @Override public ScheduledJobParser getJobParser() { - return (parser, id, jobDocVersion) -> Datasource.PARSER.parse(parser, null); + return (parser, id, jobDocVersion) -> TIFJobParameter.PARSER.parse(parser, null); } } diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/Datasource.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java similarity index 52% rename from src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/Datasource.java rename to src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java index 00ff1d419..e347e0e60 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/Datasource.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java @@ -16,7 +16,6 @@ import org.opensearch.core.xcontent.ToXContent; import org.opensearch.jobscheduler.spi.ScheduledJobParameter; import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; -import org.opensearch.jobscheduler.spi.schedule.Schedule; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.jobscheduler.spi.schedule.ScheduleParser; @@ -27,12 +26,11 @@ import static org.opensearch.common.time.DateUtils.toInstant; -import org.opensearch.securityanalytics.threatIntel.action.PutDatasourceRequest; -import org.opensearch.securityanalytics.threatIntel.common.DatasourceManifest; -import org.opensearch.securityanalytics.threatIntel.common.DatasourceState; -import org.opensearch.securityanalytics.threatIntel.common.ThreatIntelLockService; +import org.opensearch.securityanalytics.threatIntel.action.PutTIFJobRequest; +import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; +import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; -public class Datasource implements Writeable, ScheduledJobParameter { +public class TIFJobParameter implements Writeable, ScheduledJobParameter { /** * Prefix of indices having threatIntel data */ @@ -49,24 +47,14 @@ public class Datasource implements Writeable, ScheduledJobParameter { private static final ParseField ENABLED_TIME_FIELD = new ParseField("enabled_time"); private static final ParseField ENABLED_TIME_FIELD_READABLE = new ParseField("enabled_time_field"); - // need? - private static final ParseField TASK_FIELD = new ParseField("task"); - public static final String LOCK_DURATION_SECONDS = "lock_duration_seconds"; - /** - * Additional fields for datasource + * Additional fields for tif job */ - private static final ParseField FEED_NAME = new ParseField("feed_name"); - private static final ParseField FEED_FORMAT = new ParseField("feed_format"); - private static final ParseField ENDPOINT_FIELD = new ParseField("endpoint"); - private static final ParseField DESCRIPTION = new ParseField("description"); - private static final ParseField ORGANIZATION = new ParseField("organization"); - private static final ParseField CONTAINED_IOCS_FIELD = new ParseField("contained_iocs_field"); private static final ParseField STATE_FIELD = new ParseField("state"); private static final ParseField CURRENT_INDEX_FIELD = new ParseField("current_index"); private static final ParseField INDICES_FIELD = new ParseField("indices"); - private static final ParseField DATABASE_FIELD = new ParseField("database"); private static final ParseField UPDATE_STATS_FIELD = new ParseField("update_stats"); + private static final ParseField TASK_FIELD = new ParseField("task"); /** @@ -74,14 +62,14 @@ public class Datasource implements Writeable, ScheduledJobParameter { */ /** - * @param name name of a datasource - * @return name of a datasource + * @param name name of a tif job + * @return name of a tif job */ private String name; /** - * @param lastUpdateTime Last update time of a datasource - * @return Last update time of a datasource + * @param lastUpdateTime Last update time of a tif job + * @return Last update time of a tif job */ private Instant lastUpdateTime; /** @@ -100,110 +88,46 @@ public class Datasource implements Writeable, ScheduledJobParameter { */ private IntervalSchedule schedule; - /** - * @param task Task that {@link DatasourceRunner} will execute - * @return Task that {@link DatasourceRunner} will execute - */ - private DatasourceTask task; - - - /** - * Additional variables for datasource - */ - - /** - * @param feedFormat format of the feed (ip, dns...) - * @return the type of feed ingested - */ - private String feedFormat; - - /** - * @param endpoint URL of a manifest file - * @return URL of a manifest file - */ - private String endpoint; - - /** - * @param feedName name of the threat intel feed - * @return name of the threat intel feed - */ - private String feedName; - - /** - * @param description description of the threat intel feed - * @return description of the threat intel feed - */ - private String description; - - /** - * @param organization organization of the threat intel feed - * @return organization of the threat intel feed - */ - private String organization; /** - * @param contained_iocs_field list of iocs contained in a given feed - * @return list of iocs contained in a given feed + * Additional variables for tif job */ - private List contained_iocs_field; /** - * @param state State of a datasource - * @return State of a datasource + * @param state State of a tif job + * @return State of a tif job */ - private DatasourceState state; + private TIFJobState state; /** * @param currentIndex the current index name having threat intel feed data * @return the current index name having threat intel feed data */ private String currentIndex; + /** * @param indices A list of indices having threat intel feed data including currentIndex * @return A list of indices having threat intel feed data including currentIndex */ private List indices; - /** - * @param database threat intel feed database information - * @return threat intel feed database information - */ - private Database database; + /** * @param updateStats threat intel feed database update statistics * @return threat intel feed database update statistics */ private UpdateStats updateStats; - public DatasourceTask getTask() { - return task; - } - - public void setEndpoint(String endpoint) { - this.endpoint = endpoint; - } - - public void setLastUpdateTime(Instant lastUpdateTime) { - this.lastUpdateTime = lastUpdateTime; - } - - public void setOrganization(String organization) { - this.organization = organization; - } - - public void setCurrentIndex(String currentIndex) { - this.currentIndex = currentIndex; - } - - public void setTask(DatasourceTask task) { - this.task = task; - } - + /** + * @param task Task that {@link TIFJobRunner} will execute + * @return Task that {@link TIFJobRunner} will execute + */ + private TIFJobTask task; /** - * Datasource parser + * tif job parser */ - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "datasource_metadata", + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "tifjob_metadata", true, args -> { String name = (String) args[0]; @@ -211,35 +135,21 @@ public void setTask(DatasourceTask task) { Instant enabledTime = args[2] == null ? null : Instant.ofEpochMilli((long) args[2]); boolean isEnabled = (boolean) args[3]; IntervalSchedule schedule = (IntervalSchedule) args[4]; - DatasourceTask task = DatasourceTask.valueOf((String) args[6]); - String feedFormat = (String) args[7]; - String endpoint = (String) args[8]; - String feedName = (String) args[9]; - String description = (String) args[10]; - String organization = (String) args[11]; - List contained_iocs_field = (List) args[12]; - DatasourceState state = DatasourceState.valueOf((String) args[13]); - String currentIndex = (String) args[14]; - List indices = (List) args[15]; - Database database = (Database) args[16]; - UpdateStats updateStats = (UpdateStats) args[17]; - Datasource parameter = new Datasource( + TIFJobTask task = TIFJobTask.valueOf((String) args[5]); + TIFJobState state = TIFJobState.valueOf((String) args[6]); + String currentIndex = (String) args[7]; + List indices = (List) args[8]; + UpdateStats updateStats = (UpdateStats) args[9]; + TIFJobParameter parameter = new TIFJobParameter( name, lastUpdateTime, enabledTime, isEnabled, schedule, task, - feedFormat, - endpoint, - feedName, - description, - organization, - contained_iocs_field, state, currentIndex, indices, - database, updateStats ); return parameter; @@ -252,85 +162,56 @@ public void setTask(DatasourceTask task) { PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED_FIELD); PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> ScheduleParser.parse(p), SCHEDULE_FIELD); PARSER.declareString(ConstructingObjectParser.constructorArg(), TASK_FIELD); - PARSER.declareString(ConstructingObjectParser.constructorArg(), FEED_FORMAT); - PARSER.declareString(ConstructingObjectParser.constructorArg(), ENDPOINT_FIELD); - PARSER.declareString(ConstructingObjectParser.constructorArg(), FEED_NAME); - PARSER.declareString(ConstructingObjectParser.constructorArg(), DESCRIPTION); - PARSER.declareString(ConstructingObjectParser.constructorArg(), ORGANIZATION); - PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), CONTAINED_IOCS_FIELD); PARSER.declareString(ConstructingObjectParser.constructorArg(), STATE_FIELD); PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), CURRENT_INDEX_FIELD); PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), INDICES_FIELD); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), Database.PARSER, DATABASE_FIELD); PARSER.declareObject(ConstructingObjectParser.constructorArg(), UpdateStats.PARSER, UPDATE_STATS_FIELD); } - public Datasource() { - this(null, null, null, null, null, null, null, null); + public TIFJobParameter() { + this(null, null); } - public Datasource(final String name, final Instant lastUpdateTime, final Instant enabledTime, final Boolean isEnabled, - final IntervalSchedule schedule, DatasourceTask task, final String feedFormat, final String endpoint, - final String feedName, final String description, final String organization, final List contained_iocs_field, - final DatasourceState state, final String currentIndex, final List indices, final Database database, final UpdateStats updateStats) { + public TIFJobParameter(final String name, final Instant lastUpdateTime, final Instant enabledTime, final Boolean isEnabled, + final IntervalSchedule schedule, TIFJobTask task, final TIFJobState state, final String currentIndex, + final List indices, final UpdateStats updateStats) { this.name = name; this.lastUpdateTime = lastUpdateTime; this.enabledTime = enabledTime; this.isEnabled = isEnabled; this.schedule = schedule; this.task = task; - this.feedFormat = feedFormat; - this.endpoint = endpoint; - this.feedName = feedName; - this.description = description; - this.organization = organization; - this.contained_iocs_field = contained_iocs_field; this.state = state; this.currentIndex = currentIndex; this.indices = indices; - this.database = database; this.updateStats = updateStats; } - public Datasource(final String name, final IntervalSchedule schedule, final String feedFormat, final String endpoint, final String feedName, final String description, final String organization, final List contained_iocs_field ) { + public TIFJobParameter(final String name, final IntervalSchedule schedule) { this( name, Instant.now().truncatedTo(ChronoUnit.MILLIS), null, false, schedule, - DatasourceTask.ALL, - feedFormat, - endpoint, - feedName, - description, - organization, - contained_iocs_field, - DatasourceState.CREATING, + TIFJobTask.ALL, + TIFJobState.CREATING, null, new ArrayList<>(), - new Database(), new UpdateStats() ); } - public Datasource(final StreamInput in) throws IOException { + public TIFJobParameter(final StreamInput in) throws IOException { name = in.readString(); lastUpdateTime = toInstant(in.readVLong()); enabledTime = toInstant(in.readOptionalVLong()); isEnabled = in.readBoolean(); schedule = new IntervalSchedule(in); - task = DatasourceTask.valueOf(in.readString()); - feedFormat = in.readString(); - endpoint = in.readString(); - feedName = in.readString(); - description = in.readString(); - organization = in.readString(); - contained_iocs_field = in.readStringList(); - state = DatasourceState.valueOf(in.readString()); + task = TIFJobTask.valueOf(in.readString()); + state = TIFJobState.valueOf(in.readString()); currentIndex = in.readOptionalString(); indices = in.readStringList(); - database = new Database(in); updateStats = new UpdateStats(in); } @@ -341,16 +222,9 @@ public void writeTo(final StreamOutput out) throws IOException { out.writeBoolean(isEnabled); schedule.writeTo(out); out.writeString(task.name()); - out.writeString(feedFormat); - out.writeString(endpoint); - out.writeString(feedName); - out.writeString(description); - out.writeString(organization); - out.writeStringCollection(contained_iocs_field); out.writeString(state.name()); out.writeOptionalString(currentIndex); out.writeStringCollection(indices); - database.writeTo(out); updateStats.writeTo(out); } @@ -373,51 +247,73 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa builder.field(ENABLED_FIELD.getPreferredName(), isEnabled); builder.field(SCHEDULE_FIELD.getPreferredName(), schedule); builder.field(TASK_FIELD.getPreferredName(), task.name()); - builder.field(FEED_FORMAT.getPreferredName(), feedFormat); - builder.field(ENDPOINT_FIELD.getPreferredName(), endpoint); - builder.field(FEED_NAME.getPreferredName(), feedName); - builder.field(DESCRIPTION.getPreferredName(), description); - builder.field(ORGANIZATION.getPreferredName(), organization); - builder.field(CONTAINED_IOCS_FIELD.getPreferredName(), contained_iocs_field); builder.field(STATE_FIELD.getPreferredName(), state.name()); if (currentIndex != null) { builder.field(CURRENT_INDEX_FIELD.getPreferredName(), currentIndex); } builder.field(INDICES_FIELD.getPreferredName(), indices); - builder.field(DATABASE_FIELD.getPreferredName(), database); builder.field(UPDATE_STATS_FIELD.getPreferredName(), updateStats); builder.endObject(); return builder; } + // getters and setters + public void setName(String name) { + this.name = name; + } + public void setEnabledTime(Instant enabledTime) { + this.enabledTime = enabledTime; + } + + public void setEnabled(boolean enabled) { + isEnabled = enabled; + } + + public void setIndices(List indices) { + this.indices = indices; + } + @Override public String getName() { return this.name; } - @Override public Instant getLastUpdateTime() { return this.lastUpdateTime; } - @Override public Instant getEnabledTime() { return this.enabledTime; } - @Override public IntervalSchedule getSchedule() { return this.schedule; } - @Override public boolean isEnabled() { return this.isEnabled; } + public TIFJobTask getTask() { + return task; + } + public void setLastUpdateTime(Instant lastUpdateTime) { + this.lastUpdateTime = lastUpdateTime; + } + public void setCurrentIndex(String currentIndex) { + this.currentIndex = currentIndex; + } + + public void setTask(TIFJobTask task) { + this.task = task; + } @Override public Long getLockDurationSeconds() { - return ThreatIntelLockService.LOCK_DURATION_IN_SECONDS; + return TIFLockService.LOCK_DURATION_IN_SECONDS; + } + + public String getCurrentIndex() { + return currentIndex; } /** @@ -440,9 +336,9 @@ public void disable() { } /** - * Current index name of a datasource + * Current index name of a tif job * - * @return Current index name of a datasource + * @return Current index name of a tif job */ public String currentIndexName() { return currentIndex; @@ -453,64 +349,16 @@ public void setSchedule(IntervalSchedule schedule) { } /** - * Reset database so that it can be updated in next run regardless there is new update or not - */ - public void resetDatabase() { - database.setUpdatedAt(null); - database.setSha256Hash(null); - } - - /** - * Index name for a datasource with given suffix + * Index name for a tif job with given suffix * * @param suffix the suffix of a index name - * @return index name for a datasource with given suffix + * @return index name for a tif job with given suffix */ public String newIndexName(final String suffix) { return String.format(Locale.ROOT, "%s.%s.%s", THREAT_INTEL_DATA_INDEX_NAME_PREFIX, name, suffix); } - /** - * Set database attributes with given input - * - * @param datasourceManifest the datasource manifest - * @param fields the fields - */ - public void setDatabase(final DatasourceManifest datasourceManifest, final List fields) { - this.database.setProvider(datasourceManifest.getOrganization()); - this.database.setSha256Hash(datasourceManifest.getSha256Hash()); - this.database.setUpdatedAt(Instant.ofEpochMilli(datasourceManifest.getUpdatedAt())); - this.database.setFields(fields); - } - - /** - * Checks if the database fields are compatible with the given set of fields. - * - * If database fields are null, it is compatible with any input fields - * as it hasn't been generated before. - * - * @param fields The set of input fields to check for compatibility. - * @return true if the database fields are compatible with the given input fields, false otherwise. - */ - public boolean isCompatible(final List fields) { - if (database.fields == null) { - return true; - } - - if (fields.size() < database.fields.size()) { - return false; - } - - Set fieldsSet = new HashSet<>(fields); - for (String field : database.fields) { - if (fieldsSet.contains(field) == false) { - return false; - } - } - return true; - } - - public DatasourceState getState() { + public TIFJobState getState() { return state; } @@ -518,159 +366,17 @@ public List getIndices() { return indices; } - public void setState(DatasourceState previousState) { + public void setState(TIFJobState previousState) { this.state = previousState; } - public String getEndpoint() { - return this.endpoint; - } - - public Database getDatabase() { - return this.database; - } - public UpdateStats getUpdateStats() { return this.updateStats; } - /** - * Database of a datasource - */ - public static class Database implements Writeable, ToXContent { - private static final ParseField PROVIDER_FIELD = new ParseField("provider"); - private static final ParseField SHA256_HASH_FIELD = new ParseField("sha256_hash"); - private static final ParseField UPDATED_AT_FIELD = new ParseField("updated_at_in_epoch_millis"); - private static final ParseField UPDATED_AT_FIELD_READABLE = new ParseField("updated_at"); - private static final ParseField FIELDS_FIELD = new ParseField("fields"); - - /** - * @param provider A database provider name - * @return A database provider name - */ - private String provider; - /** - * @param sha256Hash SHA256 hash value of a database file - * @return SHA256 hash value of a database file - */ - private String sha256Hash; - - /** - * @param updatedAt A date when the database was updated - * @return A date when the database was updated - */ - private Instant updatedAt; - - /** - * @param fields A list of available fields in the database - * @return A list of available fields in the database - */ - private List fields; - - public Database(String provider, String sha256Hash, Instant updatedAt, List fields) { - this.provider = provider; - this.sha256Hash = sha256Hash; - this.updatedAt = updatedAt; - this.fields = fields; - } - - public void setProvider(String provider) { - this.provider = provider; - } - - public void setSha256Hash(String sha256Hash) { - this.sha256Hash = sha256Hash; - } - - public void setUpdatedAt(Instant updatedAt) { - this.updatedAt = updatedAt; - } - - public void setFields(List fields) { - this.fields = fields; - } - - public Instant getUpdatedAt() { - return updatedAt; - } - - public String getSha256Hash() { - return sha256Hash; - } - - public List getFields() { - return fields; - } - - public String getProvider() { - return provider; - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "datasource_metadata_database", - true, - args -> { - String provider = (String) args[0]; - String sha256Hash = (String) args[1]; - Instant updatedAt = args[2] == null ? null : Instant.ofEpochMilli((Long) args[2]); - List fields = (List) args[3]; - return new Database(provider, sha256Hash, updatedAt, fields); - } - ); - static { - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), PROVIDER_FIELD); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), SHA256_HASH_FIELD); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), UPDATED_AT_FIELD); - PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), FIELDS_FIELD); - } - - public Database(final StreamInput in) throws IOException { - provider = in.readOptionalString(); - sha256Hash = in.readOptionalString(); - updatedAt = toInstant(in.readOptionalVLong()); - fields = in.readOptionalStringList(); - } - - private Database(){} - - @Override - public void writeTo(final StreamOutput out) throws IOException { - out.writeOptionalString(provider); - out.writeOptionalString(sha256Hash); - out.writeOptionalVLong(updatedAt == null ? null : updatedAt.toEpochMilli()); - out.writeOptionalStringCollection(fields); - } - - @Override - public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { - builder.startObject(); - if (provider != null) { - builder.field(PROVIDER_FIELD.getPreferredName(), provider); - } - if (sha256Hash != null) { - builder.field(SHA256_HASH_FIELD.getPreferredName(), sha256Hash); - } - if (updatedAt != null) { - builder.timeField( - UPDATED_AT_FIELD.getPreferredName(), - UPDATED_AT_FIELD_READABLE.getPreferredName(), - updatedAt.toEpochMilli() - ); - } - if (fields != null) { - builder.startArray(FIELDS_FIELD.getPreferredName()); - for (String field : fields) { - builder.value(field); - } - builder.endArray(); - } - builder.endObject(); - return builder; - } - } /** - * Update stats of a datasource + * Update stats of a tif job */ public static class UpdateStats implements Writeable, ToXContent { private static final ParseField LAST_SUCCEEDED_AT_FIELD = new ParseField("last_succeeded_at_in_epoch_millis"); @@ -681,6 +387,22 @@ public static class UpdateStats implements Writeable, ToXContent { private static final ParseField LAST_SKIPPED_AT = new ParseField("last_skipped_at_in_epoch_millis"); private static final ParseField LAST_SKIPPED_AT_READABLE = new ParseField("last_skipped_at"); + public Instant getLastSucceededAt() { + return lastSucceededAt; + } + + public Long getLastProcessingTimeInMillis() { + return lastProcessingTimeInMillis; + } + + public Instant getLastFailedAt() { + return lastFailedAt; + } + + public Instant getLastSkippedAt() { + return lastSkippedAt; + } + /** * @param lastSucceededAt The last time when threat intel feed data update was succeeded * @return The last time when threat intel feed data update was succeeded @@ -718,7 +440,7 @@ public void setLastProcessingTimeInMillis(Long lastProcessingTimeInMillis) { } private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "datasource_metadata_update_stats", + "tifjob_metadata_update_stats", true, args -> { Instant lastSucceededAt = args[0] == null ? null : Instant.ofEpochMilli((long) args[0]); @@ -728,7 +450,6 @@ public void setLastProcessingTimeInMillis(Long lastProcessingTimeInMillis) { return new UpdateStats(lastSucceededAt, lastProcessingTimeInMillis, lastFailedAt, lastSkippedAt); } ); - static { PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), LAST_SUCCEEDED_AT_FIELD); PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), LAST_PROCESSING_TIME_IN_MILLIS_FIELD); @@ -750,7 +471,6 @@ public UpdateStats(Instant lastSucceededAt, Long lastProcessingTimeInMillis, Ins this.lastSkippedAt = lastSkippedAt; } - @Override public void writeTo(final StreamOutput out) throws IOException { out.writeOptionalVLong(lastSucceededAt == null ? null : lastSucceededAt.toEpochMilli()); @@ -795,25 +515,19 @@ public void setLastFailedAt(Instant now) { } } - /** - * Builder class for Datasource + * Builder class for tif job */ public static class Builder { - public static Datasource build(final PutDatasourceRequest request) { - String id = request.getName(); + public static TIFJobParameter build(final PutTIFJobRequest request) { + String name = request.getName(); IntervalSchedule schedule = new IntervalSchedule( Instant.now().truncatedTo(ChronoUnit.MILLIS), (int) request.getUpdateInterval().days(), ChronoUnit.DAYS ); - String feedFormat = request.getFeedFormat(); - String endpoint = request.getEndpoint(); - String feedName = request.getFeedName(); - String description = request.getDescription(); - String organization = request.getOrganization(); - List contained_iocs_field = request.getContained_iocs_field(); - return new Datasource(id, schedule, feedFormat, endpoint, feedName, description, organization, contained_iocs_field); + return new TIFJobParameter(name, schedule); + } } } \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/dao/DatasourceDao.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterService.java similarity index 62% rename from src/main/java/org/opensearch/securityanalytics/threatintel/dao/DatasourceDao.java rename to src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterService.java index 9d6a15241..cab8dcc0b 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/dao/DatasourceDao.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterService.java @@ -3,7 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ -package org.opensearch.securityanalytics.threatIntel.dao; +package org.opensearch.securityanalytics.threatIntel.jobscheduler; import java.io.BufferedReader; import java.io.IOException; @@ -50,9 +50,7 @@ import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.securityanalytics.model.DetectorTrigger; -import org.opensearch.securityanalytics.threatIntel.common.ThreatIntelSettings; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.Datasource; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.DatasourceExtension; +import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; import org.opensearch.securityanalytics.threatIntel.common.StashedThreadContext; import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.query.QueryBuilders; @@ -60,9 +58,9 @@ import org.opensearch.securityanalytics.util.SecurityAnalyticsException; /** - * Data access object for datasource + * Data access object for tif job */ -public class DatasourceDao { +public class TIFJobParameterService { private static final Logger log = LogManager.getLogger(DetectorTrigger.class); private static final Integer MAX_SIZE = 1000; @@ -70,24 +68,24 @@ public class DatasourceDao { private final ClusterService clusterService; private final ClusterSettings clusterSettings; - public DatasourceDao(final Client client, final ClusterService clusterService) { + public TIFJobParameterService(final Client client, final ClusterService clusterService) { this.client = client; this.clusterService = clusterService; this.clusterSettings = clusterService.getClusterSettings(); } /** - * Create datasource index + * Create tif job index * * @param stepListener setup listener */ public void createIndexIfNotExists(final StepListener stepListener) { - if (clusterService.state().metadata().hasIndex(DatasourceExtension.JOB_INDEX_NAME) == true) { + if (clusterService.state().metadata().hasIndex(TIFJobExtension.JOB_INDEX_NAME) == true) { stepListener.onResponse(null); return; } - final CreateIndexRequest createIndexRequest = new CreateIndexRequest(DatasourceExtension.JOB_INDEX_NAME).mapping(getIndexMapping()) - .settings(DatasourceExtension.INDEX_SETTING); + final CreateIndexRequest createIndexRequest = new CreateIndexRequest(TIFJobExtension.JOB_INDEX_NAME).mapping(getIndexMapping()) + .settings(TIFJobExtension.INDEX_SETTING); StashedThreadContext.run(client, () -> client.admin().indices().create(createIndexRequest, new ActionListener<>() { @Override public void onResponse(final CreateIndexResponse createIndexResponse) { @@ -97,7 +95,7 @@ public void onResponse(final CreateIndexResponse createIndexResponse) { @Override public void onFailure(final Exception e) { if (e instanceof ResourceAlreadyExistsException) { - log.info("index[{}] already exist", DatasourceExtension.JOB_INDEX_NAME); + log.info("index[{}] already exist", TIFJobExtension.JOB_INDEX_NAME); stepListener.onResponse(null); return; } @@ -108,7 +106,7 @@ public void onFailure(final Exception e) { private String getIndexMapping() { try { - try (InputStream is = DatasourceDao.class.getResourceAsStream("/mappings/threatintel_datasource.json")) { + try (InputStream is = TIFJobParameterService.class.getResourceAsStream("/mappings/threat_intel_job_mapping.json")) { try (BufferedReader reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { return reader.lines().map(String::trim).collect(Collectors.joining()); } @@ -120,21 +118,21 @@ private String getIndexMapping() { } /** - * Update datasource in an index {@code DatasourceExtension.JOB_INDEX_NAME} - * @param datasource the datasource + * Update jobSchedulerParameter in an index {@code TIFJobExtension.JOB_INDEX_NAME} + * @param jobSchedulerParameter the jobSchedulerParameter * @return index response */ - public IndexResponse updateDatasource(final Datasource datasource) { - datasource.setLastUpdateTime(Instant.now()); + public IndexResponse updateJobSchedulerParameter(final TIFJobParameter jobSchedulerParameter) { + jobSchedulerParameter.setLastUpdateTime(Instant.now()); return StashedThreadContext.run(client, () -> { try { - return client.prepareIndex(DatasourceExtension.JOB_INDEX_NAME) - .setId(datasource.getName()) + return client.prepareIndex(TIFJobExtension.JOB_INDEX_NAME) + .setId(jobSchedulerParameter.getName()) .setOpType(DocWriteRequest.OpType.INDEX) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .setSource(datasource.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) + .setSource(jobSchedulerParameter.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) .execute() - .actionGet(clusterSettings.get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT)); + .actionGet(clusterSettings.get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT)); } catch (IOException e) { throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO } @@ -142,27 +140,26 @@ public IndexResponse updateDatasource(final Datasource datasource) { } /** - * Update datasources in an index {@code DatasourceExtension.JOB_INDEX_NAME} - * @param datasources the datasources + * Update tif jobs in an index {@code TIFJobExtension.JOB_INDEX_NAME} + * @param tifJobParameters the tifJobParameters * @param listener action listener */ - public void updateDatasource(final List datasources, final ActionListener listener) { + public void updateJobSchedulerParameter(final List tifJobParameters, final ActionListener listener) { BulkRequest bulkRequest = new BulkRequest(); - datasources.stream().map(datasource -> { - datasource.setLastUpdateTime(Instant.now()); - return datasource; + tifJobParameters.stream().map(tifJobParameter -> { + tifJobParameter.setLastUpdateTime(Instant.now()); + return tifJobParameter; }).map(this::toIndexRequest).forEach(indexRequest -> bulkRequest.add(indexRequest)); StashedThreadContext.run(client, () -> client.bulk(bulkRequest, listener)); } - - private IndexRequest toIndexRequest(Datasource datasource) { + private IndexRequest toIndexRequest(TIFJobParameter tifJobParameter) { try { IndexRequest indexRequest = new IndexRequest(); - indexRequest.index(DatasourceExtension.JOB_INDEX_NAME); - indexRequest.id(datasource.getName()); + indexRequest.index(TIFJobExtension.JOB_INDEX_NAME); + indexRequest.id(tifJobParameter.getName()); indexRequest.opType(DocWriteRequest.OpType.INDEX); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - indexRequest.source(datasource.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)); + indexRequest.source(tifJobParameter.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)); return indexRequest; } catch (IOException e) { throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO @@ -170,20 +167,48 @@ private IndexRequest toIndexRequest(Datasource datasource) { } /** - * Put datasource in an index {@code DatasourceExtension.JOB_INDEX_NAME} + * Get tif job from an index {@code TIFJobExtension.JOB_INDEX_NAME} + * @param name the name of a tif job + * @return tif job + * @throws IOException exception + */ + public TIFJobParameter getJobParameter(final String name) throws IOException { + GetRequest request = new GetRequest(TIFJobExtension.JOB_INDEX_NAME, name); + GetResponse response; + try { + response = StashedThreadContext.run(client, () -> client.get(request).actionGet(clusterSettings.get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT))); + if (response.isExists() == false) { + log.error("TIF job[{}] does not exist in an index[{}]", name, TIFJobExtension.JOB_INDEX_NAME); + return null; + } + } catch (IndexNotFoundException e) { + log.error("Index[{}] is not found", TIFJobExtension.JOB_INDEX_NAME); + return null; + } + + XContentParser parser = XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.getSourceAsBytesRef() + ); + return TIFJobParameter.PARSER.parse(parser, null); + } + + /** + * Put tifJobParameter in an index {@code TIFJobExtension.JOB_INDEX_NAME} * - * @param datasource the datasource + * @param tifJobParameter the tifJobParameter * @param listener the listener */ - public void putDatasource(final Datasource datasource, final ActionListener listener) { - datasource.setLastUpdateTime(Instant.now()); + public void putTIFJobParameter(final TIFJobParameter tifJobParameter, final ActionListener listener) { + tifJobParameter.setLastUpdateTime(Instant.now()); StashedThreadContext.run(client, () -> { try { - client.prepareIndex(DatasourceExtension.JOB_INDEX_NAME) - .setId(datasource.getName()) + client.prepareIndex(TIFJobExtension.JOB_INDEX_NAME) + .setId(tifJobParameter.getName()) .setOpType(DocWriteRequest.OpType.CREATE) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .setSource(datasource.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) + .setSource(tifJobParameter.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) .execute(listener); } catch (IOException e) { throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO @@ -192,63 +217,35 @@ public void putDatasource(final Datasource datasource, final ActionListener list } /** - * Delete datasource in an index {@code DatasourceExtension.JOB_INDEX_NAME} + * Delete tifJobParameter in an index {@code TIFJobExtension.JOB_INDEX_NAME} * - * @param datasource the datasource + * @param tifJobParameter the tifJobParameter * */ - public void deleteDatasource(final Datasource datasource) { + public void deleteTIFJobParameter(final TIFJobParameter tifJobParameter) { DeleteResponse response = client.prepareDelete() - .setIndex(DatasourceExtension.JOB_INDEX_NAME) - .setId(datasource.getName()) + .setIndex(TIFJobExtension.JOB_INDEX_NAME) + .setId(tifJobParameter.getName()) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .execute() - .actionGet(clusterSettings.get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT)); + .actionGet(clusterSettings.get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT)); if (response.status().equals(RestStatus.OK)) { - log.info("deleted datasource[{}] successfully", datasource.getName()); + log.info("deleted tifJobParameter[{}] successfully", tifJobParameter.getName()); } else if (response.status().equals(RestStatus.NOT_FOUND)) { - throw new ResourceNotFoundException("datasource[{}] does not exist", datasource.getName()); + throw new ResourceNotFoundException("tifJobParameter[{}] does not exist", tifJobParameter.getName()); } else { - throw new OpenSearchException("failed to delete datasource[{}] with status[{}]", datasource.getName(), response.status()); + throw new OpenSearchException("failed to delete tifJobParameter[{}] with status[{}]", tifJobParameter.getName(), response.status()); } } /** - * Get datasource from an index {@code DatasourceExtension.JOB_INDEX_NAME} - * @param name the name of a datasource - * @return datasource - * @throws IOException exception - */ - public Datasource getDatasource(final String name) throws IOException { - GetRequest request = new GetRequest(DatasourceExtension.JOB_INDEX_NAME, name); - GetResponse response; - try { - response = StashedThreadContext.run(client, () -> client.get(request).actionGet(clusterSettings.get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT))); - if (response.isExists() == false) { - log.error("Datasource[{}] does not exist in an index[{}]", name, DatasourceExtension.JOB_INDEX_NAME); - return null; - } - } catch (IndexNotFoundException e) { - log.error("Index[{}] is not found", DatasourceExtension.JOB_INDEX_NAME); - return null; - } - - XContentParser parser = XContentHelper.createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - response.getSourceAsBytesRef() - ); - return Datasource.PARSER.parse(parser, null); - } - - /** - * Get datasource from an index {@code DatasourceExtension.JOB_INDEX_NAME} - * @param name the name of a datasource + * Get tif job from an index {@code TIFJobExtension.JOB_INDEX_NAME} + * @param name the name of a tif job * @param actionListener the action listener */ - public void getDatasource(final String name, final ActionListener actionListener) { - GetRequest request = new GetRequest(DatasourceExtension.JOB_INDEX_NAME, name); + public void getJobParameter(final String name, final ActionListener actionListener) { + GetRequest request = new GetRequest(TIFJobExtension.JOB_INDEX_NAME, name); StashedThreadContext.run(client, () -> client.get(request, new ActionListener<>() { @Override public void onResponse(final GetResponse response) { @@ -263,7 +260,7 @@ public void onResponse(final GetResponse response) { LoggingDeprecationHandler.INSTANCE, response.getSourceAsBytesRef() ); - actionListener.onResponse(Datasource.PARSER.parse(parser, null)); + actionListener.onResponse(TIFJobParameter.PARSER.parse(parser, null)); } catch (IOException e) { actionListener.onFailure(e); } @@ -277,65 +274,65 @@ public void onFailure(final Exception e) { } /** - * Get datasources from an index {@code DatasourceExtension.JOB_INDEX_NAME} - * @param names the array of datasource names + * Get tif jobs from an index {@code TIFJobExtension.JOB_INDEX_NAME} + * @param names the array of tif job names * @param actionListener the action listener */ - public void getDatasources(final String[] names, final ActionListener> actionListener) { + public void getTIFJobParameters(final String[] names, final ActionListener> actionListener) { StashedThreadContext.run( client, () -> client.prepareMultiGet() - .add(DatasourceExtension.JOB_INDEX_NAME, names) - .execute(createGetDataSourceQueryActionLister(MultiGetResponse.class, actionListener)) + .add(TIFJobExtension.JOB_INDEX_NAME, names) + .execute(createGetTIFJobParameterQueryActionLister(MultiGetResponse.class, actionListener)) ); } /** - * Get all datasources up to {@code MAX_SIZE} from an index {@code DatasourceExtension.JOB_INDEX_NAME} + * Get all tif jobs up to {@code MAX_SIZE} from an index {@code TIFJobExtension.JOB_INDEX_NAME} * @param actionListener the action listener */ - public void getAllDatasources(final ActionListener> actionListener) { + public void getAllTIFJobParameters(final ActionListener> actionListener) { StashedThreadContext.run( client, - () -> client.prepareSearch(DatasourceExtension.JOB_INDEX_NAME) + () -> client.prepareSearch(TIFJobExtension.JOB_INDEX_NAME) .setQuery(QueryBuilders.matchAllQuery()) .setPreference(Preference.PRIMARY.type()) .setSize(MAX_SIZE) - .execute(createGetDataSourceQueryActionLister(SearchResponse.class, actionListener)) + .execute(createGetTIFJobParameterQueryActionLister(SearchResponse.class, actionListener)) ); } /** - * Get all datasources up to {@code MAX_SIZE} from an index {@code DatasourceExtension.JOB_INDEX_NAME} + * Get all tif jobs up to {@code MAX_SIZE} from an index {@code TIFJobExtension.JOB_INDEX_NAME} */ - public List getAllDatasources() { + public List getAllTIFJobParameters() { SearchResponse response = StashedThreadContext.run( client, - () -> client.prepareSearch(DatasourceExtension.JOB_INDEX_NAME) + () -> client.prepareSearch(TIFJobExtension.JOB_INDEX_NAME) .setQuery(QueryBuilders.matchAllQuery()) .setPreference(Preference.PRIMARY.type()) .setSize(MAX_SIZE) .execute() - .actionGet(clusterSettings.get(ThreatIntelSettings.THREAT_INTEL_TIMEOUT)) + .actionGet(clusterSettings.get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT)) ); List bytesReferences = toBytesReferences(response); - return bytesReferences.stream().map(bytesRef -> toDatasource(bytesRef)).collect(Collectors.toList()); + return bytesReferences.stream().map(bytesRef -> toTIFJobParameter(bytesRef)).collect(Collectors.toList()); } - private ActionListener createGetDataSourceQueryActionLister( + private ActionListener createGetTIFJobParameterQueryActionLister( final Class response, - final ActionListener> actionListener + final ActionListener> actionListener ) { return new ActionListener() { @Override public void onResponse(final T response) { try { List bytesReferences = toBytesReferences(response); - List datasources = bytesReferences.stream() - .map(bytesRef -> toDatasource(bytesRef)) + List tifJobParameters = bytesReferences.stream() + .map(bytesRef -> toTIFJobParameter(bytesRef)) .collect(Collectors.toList()); - actionListener.onResponse(datasources); + actionListener.onResponse(tifJobParameters); } catch (Exception e) { actionListener.onFailure(e); } @@ -365,14 +362,14 @@ private List toBytesReferences(final Object response) { } } - private Datasource toDatasource(final BytesReference bytesReference) { + private TIFJobParameter toTIFJobParameter(final BytesReference bytesReference) { try { XContentParser parser = XContentHelper.createParser( NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, bytesReference ); - return Datasource.PARSER.parse(parser, null); + return TIFJobParameter.PARSER.parse(parser, null); } catch (IOException e) { throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunner.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunner.java new file mode 100644 index 000000000..dfe16f4c6 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunner.java @@ -0,0 +1,167 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.jobscheduler; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.jobscheduler.spi.JobExecutionContext; +import org.opensearch.jobscheduler.spi.LockModel; +import org.opensearch.jobscheduler.spi.ScheduledJobParameter; +import org.opensearch.jobscheduler.spi.ScheduledJobRunner; +import org.opensearch.securityanalytics.model.DetectorTrigger; + +import java.io.IOException; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicReference; +import java.time.Instant; + +import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; +import org.opensearch.securityanalytics.threatIntel.common.TIFExecutor; +import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; +import org.opensearch.threadpool.ThreadPool; + +/** + * Job Parameter update task + * + * This is a background task which is responsible for updating threat intel feed data + */ +public class TIFJobRunner implements ScheduledJobRunner { + private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + private static TIFJobRunner INSTANCE; + + public static TIFJobRunner getJobRunnerInstance() { + if (INSTANCE != null) { + return INSTANCE; + } + synchronized (TIFJobRunner.class) { + if (INSTANCE != null) { + return INSTANCE; + } + INSTANCE = new TIFJobRunner(); + return INSTANCE; + } + } + + private ClusterService clusterService; + + // threat intel specific variables + private TIFJobUpdateService jobSchedulerUpdateService; + private TIFJobParameterService jobSchedulerParameterService; + private TIFExecutor threatIntelExecutor; + private TIFLockService lockService; + private boolean initialized; + private ThreadPool threadPool; + + public void setThreadPool(ThreadPool threadPool) { + this.threadPool = threadPool; + } + + private TIFJobRunner() { + // Singleton class, use getJobRunner method instead of constructor + } + + public void initialize( + final ClusterService clusterService, + final TIFJobUpdateService jobSchedulerUpdateService, + final TIFJobParameterService jobSchedulerParameterService, + final TIFExecutor threatIntelExecutor, + final TIFLockService threatIntelLockService, + final ThreadPool threadPool + ) { + this.clusterService = clusterService; + this.jobSchedulerUpdateService = jobSchedulerUpdateService; + this.jobSchedulerParameterService = jobSchedulerParameterService; + this.threatIntelExecutor = threatIntelExecutor; + this.lockService = threatIntelLockService; + this.threadPool = threadPool; + this.initialized = true; + } + + @Override + public void runJob(final ScheduledJobParameter jobParameter, final JobExecutionContext context) { + if (initialized == false) { + throw new AssertionError("This instance is not initialized"); + } + + log.info("Update job started for a job parameter[{}]", jobParameter.getName()); + if (jobParameter instanceof TIFJobParameter == false) { + log.error("Illegal state exception: job parameter is not instance of Job Scheduler Parameter"); + throw new IllegalStateException( + "job parameter is not instance of Job Scheduler Parameter, type: " + jobParameter.getClass().getCanonicalName() + ); + } + threadPool.generic().submit(updateJobRunner(jobParameter)); +// threatIntelExecutor.forJobSchedulerParameterUpdate().submit(updateJobRunner(jobParameter)); + } + + /** + * Update threat intel feed data + * + * Lock is used so that only one of nodes run this task. + * + * @param jobParameter job parameter + */ + protected Runnable updateJobRunner(final ScheduledJobParameter jobParameter) { + return () -> { + Optional lockModel = lockService.acquireLock( + jobParameter.getName(), + TIFLockService.LOCK_DURATION_IN_SECONDS + ); + if (lockModel.isEmpty()) { + log.error("Failed to update. Another processor is holding a lock for job parameter[{}]", jobParameter.getName()); + return; + } + + LockModel lock = lockModel.get(); + try { + updateJobParameter(jobParameter, lockService.getRenewLockRunnable(new AtomicReference<>(lock))); + } catch (Exception e) { + log.error("Failed to update job parameter[{}]", jobParameter.getName(), e); + } finally { + lockService.releaseLock(lock); + } + }; + } + + protected void updateJobParameter(final ScheduledJobParameter jobParameter, final Runnable renewLock) throws IOException { + TIFJobParameter jobSchedulerParameter = jobSchedulerParameterService.getJobParameter(jobParameter.getName()); + /** + * If delete request comes while update task is waiting on a queue for other update tasks to complete, + * because update task for this jobSchedulerParameter didn't acquire a lock yet, delete request is processed. + * When it is this jobSchedulerParameter's turn to run, it will find that the jobSchedulerParameter is deleted already. + * Therefore, we stop the update process when data source does not exist. + */ + if (jobSchedulerParameter == null) { + log.info("Job parameter[{}] does not exist", jobParameter.getName()); + return; + } + + if (TIFJobState.AVAILABLE.equals(jobSchedulerParameter.getState()) == false) { + log.error("Invalid jobSchedulerParameter state. Expecting {} but received {}", TIFJobState.AVAILABLE, jobSchedulerParameter.getState()); + jobSchedulerParameter.disable(); + jobSchedulerParameter.getUpdateStats().setLastFailedAt(Instant.now()); + jobSchedulerParameterService.updateJobSchedulerParameter(jobSchedulerParameter); + return; + } + try { + jobSchedulerUpdateService.deleteAllTifdIndices(jobSchedulerParameter); + if (TIFJobTask.DELETE_UNUSED_INDICES.equals(jobSchedulerParameter.getTask()) == false) { + jobSchedulerUpdateService.createThreatIntelFeedData(jobSchedulerParameter, renewLock); + } +// jobSchedulerUpdateService.deleteUnusedIndices(jobSchedulerParameter); + } catch (Exception e) { + log.error("Failed to update jobSchedulerParameter for {}", jobSchedulerParameter.getName(), e); + jobSchedulerParameter.getUpdateStats().setLastFailedAt(Instant.now()); + jobSchedulerParameterService.updateJobSchedulerParameter(jobSchedulerParameter); + } finally { +// jobSchedulerParameterService.updateJobSchedulerParameter(jobSchedulerParameter); + jobSchedulerUpdateService.updateJobSchedulerParameter(jobSchedulerParameter, jobSchedulerParameter.getSchedule(), TIFJobTask.ALL); + } + } + +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceTask.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobTask.java similarity index 78% rename from src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceTask.java rename to src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobTask.java index b0e9ac184..1221a3540 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceTask.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobTask.java @@ -6,9 +6,9 @@ package org.opensearch.securityanalytics.threatIntel.jobscheduler; /** - * Task that {@link DatasourceRunner} will run + * Task that {@link TIFJobRunner} will run */ -public enum DatasourceTask { +public enum TIFJobTask { /** * Do everything */ diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java new file mode 100644 index 000000000..710d8015c --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java @@ -0,0 +1,287 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.jobscheduler; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.io.IOException; +import java.time.Duration; +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.apache.commons.csv.CSVParser; +import org.apache.commons.csv.CSVRecord; +import org.opensearch.OpenSearchException; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.ClusterSettings; + +import org.opensearch.core.rest.RestStatus; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; +import org.opensearch.securityanalytics.model.DetectorTrigger; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedParser; +import org.opensearch.securityanalytics.threatIntel.common.TIFMetadata; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedDataService; +import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; +import org.opensearch.securityanalytics.util.SecurityAnalyticsException; + +public class TIFJobUpdateService { + private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + + private static final int SLEEP_TIME_IN_MILLIS = 5000; // 5 seconds + private static final int MAX_WAIT_TIME_FOR_REPLICATION_TO_COMPLETE_IN_MILLIS = 10 * 60 * 60 * 1000; // 10 hours + private final ClusterService clusterService; + private final ClusterSettings clusterSettings; + private final TIFJobParameterService jobSchedulerParameterService; + private final ThreatIntelFeedDataService threatIntelFeedDataService; + + public TIFJobUpdateService( + final ClusterService clusterService, + final TIFJobParameterService jobSchedulerParameterService, + final ThreatIntelFeedDataService threatIntelFeedDataService + ) { + this.clusterService = clusterService; + this.clusterSettings = clusterService.getClusterSettings(); + this.jobSchedulerParameterService = jobSchedulerParameterService; + this.threatIntelFeedDataService = threatIntelFeedDataService; + } + + // functions used in job Runner + /** + * Delete all indices except the one which is being used + * + * @param jobSchedulerParameter + */ + public void deleteAllTifdIndices(final TIFJobParameter jobSchedulerParameter) { + try { + List indicesToDelete = jobSchedulerParameter.getIndices() + .stream() +// .filter(index -> index.equals(jobSchedulerParameter.currentIndexName()) == false) + .collect(Collectors.toList()); + + List deletedIndices = deleteIndices(indicesToDelete); + + if (deletedIndices.isEmpty() == false) { + jobSchedulerParameter.getIndices().removeAll(deletedIndices); + jobSchedulerParameterService.updateJobSchedulerParameter(jobSchedulerParameter); + } + } catch (Exception e) { + log.error("Failed to delete old indices for {}", jobSchedulerParameter.getName(), e); + } + } + + /** + * Update jobSchedulerParameter with given systemSchedule and task + * + * @param jobSchedulerParameter jobSchedulerParameter to update + * @param systemSchedule new system schedule value + * @param task new task value + */ + public void updateJobSchedulerParameter(final TIFJobParameter jobSchedulerParameter, final IntervalSchedule systemSchedule, final TIFJobTask task) { + boolean updated = false; + if (jobSchedulerParameter.getSchedule().equals(systemSchedule) == false) { //TODO: will always be true + jobSchedulerParameter.setSchedule(systemSchedule); + updated = true; + } + if (jobSchedulerParameter.getTask().equals(task) == false) { + jobSchedulerParameter.setTask(task); + updated = true; + } // this is called when task == DELETE + if (updated) { + jobSchedulerParameterService.updateJobSchedulerParameter(jobSchedulerParameter); + } + } + + private List deleteIndices(final List indicesToDelete) { + List deletedIndices = new ArrayList<>(indicesToDelete.size()); + for (String index : indicesToDelete) { + if (clusterService.state().metadata().hasIndex(index) == false) { + deletedIndices.add(index); + continue; + } + try { + threatIntelFeedDataService.deleteThreatIntelDataIndex(index); + deletedIndices.add(index); + } catch (Exception e) { + log.error("Failed to delete an index [{}]", index, e); + } + } + return deletedIndices; + } + + + /** + * Update threat intel feed data + * + * The first column is ip range field regardless its header name. + * Therefore, we don't store the first column's header name. + * + * @param jobSchedulerParameter the jobSchedulerParameter + * @param renewLock runnable to renew lock + * + * @throws IOException + */ + public void createThreatIntelFeedData(final TIFJobParameter jobSchedulerParameter, final Runnable renewLock) throws IOException { + // parse YAML containing list of threat intel feeds + // for each feed (ex. Feodo) + // parse feed specific YAML containing TIFMetadata + + // for every threat intel feed + // create and store a new TIFMetadata object + + // use the TIFMetadata to switch case feed type + // parse through file and save threat intel feed data + + List containedIocs = new ArrayList<>(); + TIFMetadata tifMetadata = new TIFMetadata("feedid", "url", "name", "org", + "descr", "csv", containedIocs, "1"); // TODO: example tif metdata + + Instant startTime = Instant.now(); + String indexName = setupIndex(jobSchedulerParameter); + String[] header; + + Boolean succeeded; + + switch(tifMetadata.getFeedType()) { + case "csv": + try (CSVParser reader = ThreatIntelFeedParser.getThreatIntelFeedReaderCSV(tifMetadata)) { + // iterate until we find first line without '#' + CSVRecord findHeader = reader.iterator().next(); + while (findHeader.get(0).charAt(0) == '#' || findHeader.get(0).charAt(0) == ' ') { + findHeader = reader.iterator().next(); + } + CSVRecord headerLine = findHeader; + header = ThreatIntelFeedParser.validateHeader(headerLine).values(); + + threatIntelFeedDataService.saveThreatIntelFeedDataCSV(indexName, header, reader.iterator(), renewLock, tifMetadata); + } + default: + // if the feed type doesn't match any of the supporting feed types, throw an exception + succeeded = false; + } + + if (!succeeded) { + log.error("Exception: failed to parse correct feed type"); + throw new OpenSearchException("Exception: failed to parse correct feed type"); + } + + // end the loop here + + waitUntilAllShardsStarted(indexName, MAX_WAIT_TIME_FOR_REPLICATION_TO_COMPLETE_IN_MILLIS); + Instant endTime = Instant.now(); + updateJobSchedulerParameterAsSucceeded(indexName, jobSchedulerParameter, startTime, endTime); + } + + // helper functions + /*** + * Update jobSchedulerParameter as succeeded + * + * @param jobSchedulerParameter the jobSchedulerParameter + */ + private void updateJobSchedulerParameterAsSucceeded( + final String newIndexName, + final TIFJobParameter jobSchedulerParameter, + final Instant startTime, + final Instant endTime + ) { + jobSchedulerParameter.setCurrentIndex(newIndexName); // TODO: remove current index? + jobSchedulerParameter.getUpdateStats().setLastSucceededAt(endTime); + jobSchedulerParameter.getUpdateStats().setLastProcessingTimeInMillis(endTime.toEpochMilli() - startTime.toEpochMilli()); + jobSchedulerParameter.enable(); + jobSchedulerParameter.setState(TIFJobState.AVAILABLE); + jobSchedulerParameterService.updateJobSchedulerParameter(jobSchedulerParameter); + log.info( + "threat intel feed database creation succeeded for {} and took {} seconds", + jobSchedulerParameter.getName(), + Duration.between(startTime, endTime) + ); + } + + /*** + * Setup index to add a new threat intel feed data + * + * @param jobSchedulerParameter the jobSchedulerParameter + * @return new index name + */ + private String setupIndex(final TIFJobParameter jobSchedulerParameter) { + String indexName = jobSchedulerParameter.newIndexName(UUID.randomUUID().toString()); + jobSchedulerParameter.getIndices().add(indexName); + jobSchedulerParameterService.updateJobSchedulerParameter(jobSchedulerParameter); + threatIntelFeedDataService.createIndexIfNotExists(indexName); + return indexName; + } + + /** + * We wait until all shards are ready to serve search requests before updating job scheduler parameter to + * point to a new index so that there won't be latency degradation during threat intel feed data update + * + * @param indexName the indexName + */ + protected void waitUntilAllShardsStarted(final String indexName, final int timeout) { + Instant start = Instant.now(); + try { + while (Instant.now().toEpochMilli() - start.toEpochMilli() < timeout) { + if (clusterService.state().routingTable().allShards(indexName).stream().allMatch(shard -> shard.started())) { + return; + } + Thread.sleep(SLEEP_TIME_IN_MILLIS); + } + throw new OpenSearchException( + "index[{}] replication did not complete after {} millis", + MAX_WAIT_TIME_FOR_REPLICATION_TO_COMPLETE_IN_MILLIS + ); + } catch (InterruptedException e) { + log.error("runtime exception", e); + throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO + } + } + + +// /** +// * Determine if update is needed or not +// * +// * Update is needed when all following conditions are met +// * 1. updatedAt value in jobSchedulerParameter is equal or before updateAt value in tifMetadata +// * 2. SHA256 hash value in jobSchedulerParameter is different with SHA256 hash value in tifMetadata +// * +// * @param jobSchedulerParameter +// * @param tifMetadata +// * @return +// */ +// private boolean shouldUpdate(final TIFJobParameter jobSchedulerParameter, final TIFMetadata tifMetadata) { +// if (jobSchedulerParameter.getDatabase().getUpdatedAt() != null +// && jobSchedulerParameter.getDatabase().getUpdatedAt().toEpochMilli() > tifMetadata.getUpdatedAt()) { +// return false; +// } +// +// if (tifMetadata.getSha256Hash().equals(jobSchedulerParameter.getDatabase().getSha256Hash())) { +// return false; +// } +// return true; +// } + +// /** +// * Return header fields of threat intel feed data with given url of a manifest file +// * +// * The first column is ip range field regardless its header name. +// * Therefore, we don't store the first column's header name. +// * +// * @param TIFMetadataUrl the url of a manifest file +// * @return header fields of threat intel feed +// */ +// public List getHeaderFields(String TIFMetadataUrl) throws IOException { +// URL url = new URL(TIFMetadataUrl); +// TIFMetadata tifMetadata = TIFMetadata.Builder.build(url); +// +// try (CSVParser reader = ThreatIntelFeedParser.getThreatIntelFeedReaderCSV(tifMetadata)) { +// String[] fields = reader.iterator().next().values(); +// return Arrays.asList(fields).subList(1, fields.length); +// } +// } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceAction.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceAction.java deleted file mode 100644 index 6befdde04..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceAction.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.action; - -import org.opensearch.action.ActionType; - -/** - * Threat intel datasource get action - */ -public class GetDatasourceAction extends ActionType { - /** - * Get datasource action instance - */ - public static final GetDatasourceAction INSTANCE = new GetDatasourceAction(); - /** - * Get datasource action name - */ - public static final String NAME = "cluster:admin/security_analytics/datasource/get"; - - private GetDatasourceAction() { - super(NAME, GetDatasourceResponse::new); - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceTransportAction.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceTransportAction.java deleted file mode 100644 index cb1419517..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/action/GetDatasourceTransportAction.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.action; - -import org.opensearch.OpenSearchException; -import org.opensearch.action.support.ActionFilters; -import org.opensearch.action.support.HandledTransportAction; -import org.opensearch.common.inject.Inject; -import org.opensearch.core.action.ActionListener; -import org.opensearch.index.IndexNotFoundException; -import org.opensearch.securityanalytics.threatIntel.dao.DatasourceDao; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.Datasource; -import org.opensearch.tasks.Task; -import org.opensearch.transport.TransportService; - -import java.util.Collections; -import java.util.List; - -/** - * Transport action to get datasource - */ -public class GetDatasourceTransportAction extends HandledTransportAction { - private final DatasourceDao datasourceDao; - - /** - * Default constructor - * @param transportService the transport service - * @param actionFilters the action filters - * @param datasourceDao the datasource facade - */ - @Inject - public GetDatasourceTransportAction( - final TransportService transportService, - final ActionFilters actionFilters, - final DatasourceDao datasourceDao - ) { - super(GetDatasourceAction.NAME, transportService, actionFilters, GetDatasourceRequest::new); - this.datasourceDao = datasourceDao; - } - - @Override - protected void doExecute(final Task task, final GetDatasourceRequest request, final ActionListener listener) { - if (shouldGetAllDatasource(request)) { - // We don't expect too many data sources. Therefore, querying all data sources without pagination should be fine. - datasourceDao.getAllDatasources(newActionListener(listener)); - } else { - datasourceDao.getDatasources(request.getNames(), newActionListener(listener)); - } - } - - private boolean shouldGetAllDatasource(final GetDatasourceRequest request) { - if (request.getNames() == null) { - throw new OpenSearchException("names in a request should not be null"); - } - - return request.getNames().length == 0 || (request.getNames().length == 1 && "_all".equals(request.getNames()[0])); - } - - protected ActionListener> newActionListener(final ActionListener listener) { - return new ActionListener<>() { - @Override - public void onResponse(final List datasources) { - listener.onResponse(new GetDatasourceResponse(datasources)); - } - - @Override - public void onFailure(final Exception e) { - if (e instanceof IndexNotFoundException) { - listener.onResponse(new GetDatasourceResponse(Collections.emptyList())); - return; - } - listener.onFailure(e); - } - }; - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/PutDatasourceRequest.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/PutDatasourceRequest.java deleted file mode 100644 index dac67ed43..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/action/PutDatasourceRequest.java +++ /dev/null @@ -1,267 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.action; - -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URISyntaxException; -import java.net.URL; -import java.util.List; -import java.util.Locale; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.opensearch.action.ActionRequest; -import org.opensearch.action.ActionRequestValidationException; -import org.opensearch.common.unit.TimeValue; -import org.opensearch.core.ParseField; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.core.xcontent.ObjectParser; -import org.opensearch.securityanalytics.model.DetectorTrigger; -import org.opensearch.securityanalytics.threatIntel.common.DatasourceManifest; -import org.opensearch.securityanalytics.threatIntel.common.ParameterValidator; - -/** - * Threat intel datasource creation request - */ -public class PutDatasourceRequest extends ActionRequest { - private static final Logger log = LogManager.getLogger(DetectorTrigger.class); - - public static final ParseField FEED_FORMAT_FIELD = new ParseField("feed_format"); - public static final ParseField ENDPOINT_FIELD = new ParseField("endpoint"); - public static final ParseField FEED_NAME_FIELD = new ParseField("feed_name"); - public static final ParseField DESCRIPTION_FIELD = new ParseField("description"); - public static final ParseField ORGANIZATION_FIELD = new ParseField("organization"); - public static final ParseField CONTAINED_IOCS_FIELD = new ParseField("contained_iocs_field"); - public static final ParseField UPDATE_INTERVAL_IN_DAYS_FIELD = new ParseField("update_interval_in_days"); - private static final ParameterValidator VALIDATOR = new ParameterValidator(); - - /** - * @param name the datasource name - * @return the datasource name - */ - private String name; - - private String feedFormat; - - /** - * @param endpoint url to a manifest file for a datasource - * @return url to a manifest file for a datasource - */ - private String endpoint; - - private String feedName; - - private String description; - - private String organization; - - private List contained_iocs_field; - - public void setFeedFormat(String feedFormat) { - this.feedFormat = feedFormat; - } - - public void setThisEndpoint(String endpoint) { - this.endpoint = endpoint; - } - - public void setFeedName(String feedName) { - this.feedName = feedName; - } - - public void setDescription(String description) { - this.description = description; - } - - public void setOrganization(String organization) { - this.organization = organization; - } - - public void setContained_iocs_field(List contained_iocs_field) { - this.contained_iocs_field = contained_iocs_field; - } - - public List getContained_iocs_field() { - return contained_iocs_field; - } - - public String getFeedFormat() { - return feedFormat; - } - - public String getFeedName() { - return feedName; - } - - @Override - public String getDescription() { - return description; - } - - public String getOrganization() { - return organization; - } - /** - * @param updateInterval update interval of a datasource - * @return update interval of a datasource - */ - private TimeValue updateInterval; - - /** - * Parser of a datasource - */ - public static final ObjectParser PARSER; - static { - PARSER = new ObjectParser<>("put_datasource"); - PARSER.declareString((request, val) -> request.setFeedFormat(val), FEED_FORMAT_FIELD); - PARSER.declareString((request, val) -> request.setThisEndpoint(val), ENDPOINT_FIELD); - PARSER.declareString((request, val) -> request.setFeedName(val), FEED_NAME_FIELD); - PARSER.declareString((request, val) -> request.setDescription(val), DESCRIPTION_FIELD); - PARSER.declareString((request, val) -> request.setOrganization(val), ORGANIZATION_FIELD); -// PARSER.declareStringArray((request, val[]) -> request.setContained_iocs_field(val), CONTAINED_IOCS_FIELD); - PARSER.declareLong((request, val) -> request.setUpdateInterval(TimeValue.timeValueDays(val)), UPDATE_INTERVAL_IN_DAYS_FIELD); - } - - /** - * Default constructor - * @param name name of a datasource - */ - public PutDatasourceRequest(final String name) { - this.name = name; - } - - /** - * Constructor with stream input - * @param in the stream input - * @throws IOException IOException - */ - public PutDatasourceRequest(final StreamInput in) throws IOException { - super(in); - this.name = in.readString(); - this.feedFormat = in.readString(); - this.endpoint = in.readString(); - this.feedName = in.readString(); - this.description = in.readString(); - this.organization = in.readString(); - this.contained_iocs_field = in.readStringList(); - this.updateInterval = in.readTimeValue(); - } - - @Override - public void writeTo(final StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(name); - out.writeString(feedFormat); - out.writeString(endpoint); - out.writeString(feedName); - out.writeString(description); - out.writeString(organization); - out.writeStringCollection(contained_iocs_field); - out.writeTimeValue(updateInterval); - } - - @Override - public ActionRequestValidationException validate() { - ActionRequestValidationException errors = new ActionRequestValidationException(); - List errorMsgs = VALIDATOR.validateDatasourceName(name); - if (errorMsgs.isEmpty() == false) { - errorMsgs.stream().forEach(msg -> errors.addValidationError(msg)); - } - validateEndpoint(errors); - validateUpdateInterval(errors); - return errors.validationErrors().isEmpty() ? null : errors; - } - - /** - * Conduct following validation on endpoint - * 1. endpoint format complies with RFC-2396 - * 2. validate manifest file from the endpoint - * - * @param errors the errors to add error messages - */ - private void validateEndpoint(final ActionRequestValidationException errors) { - try { - URL url = new URL(endpoint); - url.toURI(); // Validate URL complies with RFC-2396 - validateManifestFile(url, errors); - } catch (MalformedURLException | URISyntaxException e) { - log.info("Invalid URL[{}] is provided", endpoint, e); - errors.addValidationError("Invalid URL format is provided"); - } - } - - /** - * Conduct following validation on url - * 1. can read manifest file from the endpoint - * 2. the url in the manifest file complies with RFC-2396 - * 3. updateInterval is less than validForInDays value in the manifest file - * - * @param url the url to validate - * @param errors the errors to add error messages - */ - private void validateManifestFile(final URL url, final ActionRequestValidationException errors) { - DatasourceManifest manifest; - try { - manifest = DatasourceManifest.Builder.build(url); - } catch (Exception e) { - log.info("Error occurred while reading a file from {}", url, e); - errors.addValidationError(String.format(Locale.ROOT, "Error occurred while reading a file from %s: %s", url, e.getMessage())); - return; - } - - try { - new URL(manifest.getUrl()).toURI(); // Validate URL complies with RFC-2396 - } catch (MalformedURLException | URISyntaxException e) { - log.info("Invalid URL[{}] is provided for url field in the manifest file", manifest.getUrl(), e); - errors.addValidationError("Invalid URL format is provided for url field in the manifest file"); - return; - } - -// if (manifest.getValidForInDays() != null && updateInterval.days() >= manifest.getValidForInDays()) { -// errors.addValidationError( -// String.format( -// Locale.ROOT, -// "updateInterval %d should be smaller than %d", -// updateInterval.days(), -// manifest.getValidForInDays() -// ) -// ); -// } - } - - /** - * Validate updateInterval is equal or larger than 1 - * - * @param errors the errors to add error messages - */ - private void validateUpdateInterval(final ActionRequestValidationException errors) { - if (updateInterval.compareTo(TimeValue.timeValueDays(1)) < 0) { - errors.addValidationError("Update interval should be equal to or larger than 1 day"); - } - } - - public String getName() { - return name; - } - - public String getEndpoint() { - return this.endpoint; - } - - public void setEndpoint(String newEndpoint) { - this.endpoint = newEndpoint; - } - - public TimeValue getUpdateInterval() { - return this.updateInterval; - } - - public void setUpdateInterval(TimeValue timeValue) { - this.updateInterval = timeValue; - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestDeleteDatasourceHandler.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestDeleteDatasourceHandler.java deleted file mode 100644 index 3da4c4abc..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestDeleteDatasourceHandler.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.action; - -import org.opensearch.client.node.NodeClient; -import org.opensearch.rest.BaseRestHandler; -import org.opensearch.rest.RestRequest; -import org.opensearch.rest.action.RestToXContentListener; - -import java.io.IOException; -import java.util.List; -import java.util.Locale; - -import static org.opensearch.rest.RestRequest.Method.DELETE; - -/** - * Rest handler for threat intel datasource delete request - */ -public class RestDeleteDatasourceHandler extends BaseRestHandler { - private static final String ACTION_NAME = "threatintel_datasource_delete"; - private static final String PARAMS_NAME = "name"; - - @Override - public String getName() { - return ACTION_NAME; - } - - @Override - protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final String name = request.param(PARAMS_NAME); - final DeleteDatasourceRequest deleteDatasourceRequest = new DeleteDatasourceRequest(name); - - return channel -> client.executeLocally( - DeleteDatasourceAction.INSTANCE, - deleteDatasourceRequest, - new RestToXContentListener<>(channel) - ); - } - - @Override - public List routes() { - String path = String.join("/", "/_plugins/_security_analytics", String.format(Locale.ROOT, "threatintel/datasource/{%s}", PARAMS_NAME)); - return List.of(new Route(DELETE, path)); - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestGetDatasourceHandler.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestGetDatasourceHandler.java deleted file mode 100644 index ddbecdad5..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestGetDatasourceHandler.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.action; - -import org.opensearch.client.node.NodeClient; -import org.opensearch.core.common.Strings; -import org.opensearch.rest.BaseRestHandler; -import org.opensearch.rest.RestRequest; -import org.opensearch.rest.action.RestToXContentListener; - -import java.util.List; - -import static org.opensearch.rest.RestRequest.Method.GET; - -/** - * Rest handler for threat intel datasource get request - */ -public class RestGetDatasourceHandler extends BaseRestHandler { - private static final String ACTION_NAME = "threatintel_datasource_get"; - - @Override - public String getName() { - return ACTION_NAME; - } - - @Override - protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) { - final String[] names = request.paramAsStringArray("name", Strings.EMPTY_ARRAY); - final GetDatasourceRequest getDatasourceRequest = new GetDatasourceRequest(names); - - return channel -> client.executeLocally(GetDatasourceAction.INSTANCE, getDatasourceRequest, new RestToXContentListener<>(channel)); - } - - @Override - public List routes() { - return List.of( - new Route(GET, String.join("/", "/_plugins/_security_analytics", "threatintel/datasource")), - new Route(GET, String.join("/", "/_plugins/_security_analytics", "threatintel/datasource/{name}")) - ); - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestPutDatasourceHandler.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestPutDatasourceHandler.java deleted file mode 100644 index 5c9ecd7b4..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestPutDatasourceHandler.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.action; - -import org.opensearch.client.node.NodeClient; -import org.opensearch.common.settings.ClusterSettings; -import org.opensearch.common.unit.TimeValue; -import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.rest.BaseRestHandler; -import org.opensearch.rest.RestRequest; -import org.opensearch.rest.action.RestToXContentListener; -import org.opensearch.securityanalytics.threatIntel.common.ThreatIntelSettings; - -import java.io.IOException; -import java.util.List; - -import static org.opensearch.rest.RestRequest.Method.PUT; - -/** - * Rest handler for threat intel datasource creation - * - * This handler handles a request of - * PUT /_plugins/security_analytics/threatintel/datasource/{id} - * { - * "endpoint": {endpoint}, - * "update_interval_in_days": 3 - * } - * - * When request is received, it will create a datasource by downloading threat intel feed from the endpoint. - * After the creation of datasource is completed, it will schedule the next update task after update_interval_in_days. - * - */ -public class RestPutDatasourceHandler extends BaseRestHandler { - private static final String ACTION_NAME = "threatintel_datasource_put"; - private final ClusterSettings clusterSettings; - - public RestPutDatasourceHandler(final ClusterSettings clusterSettings) { - this.clusterSettings = clusterSettings; - } - - @Override - public String getName() { - return ACTION_NAME; - } - - @Override - protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final PutDatasourceRequest putDatasourceRequest = new PutDatasourceRequest(request.param("name")); - if (request.hasContentOrSourceParam()) { - try (XContentParser parser = request.contentOrSourceParamParser()) { - PutDatasourceRequest.PARSER.parse(parser, putDatasourceRequest, null); - } - } - if (putDatasourceRequest.getEndpoint() == null) { - putDatasourceRequest.setEndpoint(clusterSettings.get(ThreatIntelSettings.DATASOURCE_ENDPOINT)); - } - if (putDatasourceRequest.getUpdateInterval() == null) { - putDatasourceRequest.setUpdateInterval(TimeValue.timeValueDays(clusterSettings.get(ThreatIntelSettings.DATASOURCE_UPDATE_INTERVAL))); - } - return channel -> client.executeLocally(PutDatasourceAction.INSTANCE, putDatasourceRequest, new RestToXContentListener<>(channel)); - } - - @Override - public List routes() { - String path = String.join("/", "/_plugins/_security_analytics", "threatintel/datasource/{name}"); - return List.of(new Route(PUT, path)); - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestUpdateDatasourceHandler.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestUpdateDatasourceHandler.java deleted file mode 100644 index 3f755670f..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/action/RestUpdateDatasourceHandler.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.action; - -import org.opensearch.client.node.NodeClient; -import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.rest.BaseRestHandler; -import org.opensearch.rest.RestRequest; -import org.opensearch.rest.action.RestToXContentListener; - -import java.io.IOException; -import java.util.List; - -import static org.opensearch.rest.RestRequest.Method.PUT; - -/** - * Rest handler for threat intel datasource update request - */ -public class RestUpdateDatasourceHandler extends BaseRestHandler { - private static final String ACTION_NAME = "threatintel_datasource_update"; - - @Override - public String getName() { - return ACTION_NAME; - } - - @Override - protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final UpdateDatasourceRequest updateDatasourceRequest = new UpdateDatasourceRequest(request.param("name")); - if (request.hasContentOrSourceParam()) { - try (XContentParser parser = request.contentOrSourceParamParser()) { - UpdateDatasourceRequest.PARSER.parse(parser, updateDatasourceRequest, null); - } - } - return channel -> client.executeLocally( - UpdateDatasourceAction.INSTANCE, - updateDatasourceRequest, - new RestToXContentListener<>(channel) - ); - } - - @Override - public List routes() { - String path = String.join("/", "/_plugins/_security_analytics", "threatintel/datasource/{name}/_settings"); - return List.of(new Route(PUT, path)); - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceRequest.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceRequest.java deleted file mode 100644 index 7d70f45aa..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceRequest.java +++ /dev/null @@ -1,190 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.action; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.opensearch.action.ActionRequest; -import org.opensearch.action.ActionRequestValidationException; -import org.opensearch.common.unit.TimeValue; -import org.opensearch.core.ParseField; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.core.xcontent.ObjectParser; -import org.opensearch.securityanalytics.model.DetectorTrigger; -import org.opensearch.securityanalytics.threatIntel.common.DatasourceManifest; -import org.opensearch.securityanalytics.threatIntel.common.ParameterValidator; - -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URISyntaxException; -import java.net.URL; -import java.util.Locale; - -/** - * threat intel datasource update request - */ -public class UpdateDatasourceRequest extends ActionRequest { - private static final Logger log = LogManager.getLogger(DetectorTrigger.class); - - public static final ParseField ENDPOINT_FIELD = new ParseField("endpoint"); - public static final ParseField UPDATE_INTERVAL_IN_DAYS_FIELD = new ParseField("update_interval_in_days"); - private static final int MAX_DATASOURCE_NAME_BYTES = 255; - private static final ParameterValidator VALIDATOR = new ParameterValidator(); - - /** - * @param name the datasource name - * @return the datasource name - */ - private String name; - - /** - * @param endpoint url to a manifest file for a datasource - * @return url to a manifest file for a datasource - */ - private String endpoint; - - /** - * @param updateInterval update interval of a datasource - * @return update interval of a datasource - */ - private TimeValue updateInterval; - - /** - * Parser of a datasource - */ - public static final ObjectParser PARSER; - static { - PARSER = new ObjectParser<>("update_datasource"); - PARSER.declareString((request, val) -> request.setEndpoint(val), ENDPOINT_FIELD); - PARSER.declareLong((request, val) -> request.setUpdateInterval(TimeValue.timeValueDays(val)), UPDATE_INTERVAL_IN_DAYS_FIELD); - } - - public String getName() { - return name; - } - public String getEndpoint() { - return endpoint; - } - private void setEndpoint(String endpoint) { - this.endpoint = endpoint; - } - - public TimeValue getUpdateInterval() { - return updateInterval; - } - - private void setUpdateInterval(TimeValue updateInterval){ - this.updateInterval = updateInterval; - } - - /** - * Constructor - * @param name name of a datasource - */ - public UpdateDatasourceRequest(final String name) { - this.name = name; - } - - /** - * Constructor - * @param in the stream input - * @throws IOException IOException - */ - public UpdateDatasourceRequest(final StreamInput in) throws IOException { - super(in); - this.name = in.readString(); - this.endpoint = in.readOptionalString(); - this.updateInterval = in.readOptionalTimeValue(); - } - - @Override - public void writeTo(final StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(name); - out.writeOptionalString(endpoint); - out.writeOptionalTimeValue(updateInterval); - } - - @Override - public ActionRequestValidationException validate() { - ActionRequestValidationException errors = new ActionRequestValidationException(); - if (VALIDATOR.validateDatasourceName(name).isEmpty() == false) { - errors.addValidationError("no such datasource exist"); - } - if (endpoint == null && updateInterval == null) { - errors.addValidationError("no values to update"); - } - - validateEndpoint(errors); - validateUpdateInterval(errors); - - return errors.validationErrors().isEmpty() ? null : errors; - } - - /** - * Conduct following validation on endpoint - * 1. endpoint format complies with RFC-2396 - * 2. validate manifest file from the endpoint - * - * @param errors the errors to add error messages - */ - private void validateEndpoint(final ActionRequestValidationException errors) { - if (endpoint == null) { - return; - } - - try { - URL url = new URL(endpoint); - url.toURI(); // Validate URL complies with RFC-2396 - validateManifestFile(url, errors); - } catch (MalformedURLException | URISyntaxException e) { - log.info("Invalid URL[{}] is provided", endpoint, e); - errors.addValidationError("Invalid URL format is provided"); - } - } - - /** - * Conduct following validation on url - * 1. can read manifest file from the endpoint - * 2. the url in the manifest file complies with RFC-2396 - * - * @param url the url to validate - * @param errors the errors to add error messages - */ - private void validateManifestFile(final URL url, final ActionRequestValidationException errors) { - DatasourceManifest manifest; - try { - manifest = DatasourceManifest.Builder.build(url); - } catch (Exception e) { - log.info("Error occurred while reading a file from {}", url, e); - errors.addValidationError(String.format(Locale.ROOT, "Error occurred while reading a file from %s: %s", url, e.getMessage())); - return; - } - - try { - new URL(manifest.getUrl()).toURI(); // Validate URL complies with RFC-2396 - } catch (MalformedURLException | URISyntaxException e) { - log.info("Invalid URL[{}] is provided for url field in the manifest file", manifest.getUrl(), e); - errors.addValidationError("Invalid URL format is provided for url field in the manifest file"); - } - } - - /** - * Validate updateInterval is equal or larger than 1 - * - * @param errors the errors to add error messages - */ - private void validateUpdateInterval(final ActionRequestValidationException errors) { - if (updateInterval == null) { - return; - } - - if (updateInterval.compareTo(TimeValue.timeValueDays(1)) < 0) { - errors.addValidationError("Update interval should be equal to or larger than 1 day"); - } - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceTransportAction.java b/src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceTransportAction.java deleted file mode 100644 index 11d99e41c..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/action/UpdateDatasourceTransportAction.java +++ /dev/null @@ -1,179 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.action; - -import org.opensearch.OpenSearchStatusException; -import org.opensearch.ResourceNotFoundException; -import org.opensearch.action.support.ActionFilters; -import org.opensearch.action.support.HandledTransportAction; -import org.opensearch.action.support.master.AcknowledgedResponse; -import org.opensearch.common.inject.Inject; -import org.opensearch.core.action.ActionListener; -import org.opensearch.core.rest.RestStatus; -import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; -import org.opensearch.securityanalytics.threatIntel.common.DatasourceState; -import org.opensearch.securityanalytics.threatIntel.common.ThreatIntelLockService; -import org.opensearch.securityanalytics.threatIntel.dao.DatasourceDao; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.Datasource; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.DatasourceTask; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.DatasourceUpdateService; -import org.opensearch.tasks.Task; -import org.opensearch.threadpool.ThreadPool; -import org.opensearch.transport.TransportService; - -import java.io.IOException; -import java.time.Instant; -import java.time.temporal.ChronoUnit; -import java.util.List; -import java.util.Locale; - -/** - * Transport action to update datasource - */ -public class UpdateDatasourceTransportAction extends HandledTransportAction { - private static final long LOCK_DURATION_IN_SECONDS = 300l; - private final ThreatIntelLockService lockService; - private final DatasourceDao datasourceDao; - private final DatasourceUpdateService datasourceUpdateService; - private final ThreadPool threadPool; - - /** - * Constructor - * - * @param transportService the transport service - * @param actionFilters the action filters - * @param lockService the lock service - * @param datasourceDao the datasource facade - * @param datasourceUpdateService the datasource update service - */ - @Inject - public UpdateDatasourceTransportAction( - final TransportService transportService, - final ActionFilters actionFilters, - final ThreatIntelLockService lockService, - final DatasourceDao datasourceDao, - final DatasourceUpdateService datasourceUpdateService, - final ThreadPool threadPool - ) { - super(UpdateDatasourceAction.NAME, transportService, actionFilters, UpdateDatasourceRequest::new); - this.lockService = lockService; - this.datasourceUpdateService = datasourceUpdateService; - this.datasourceDao = datasourceDao; - this.threadPool = threadPool; - } - - /** - * Get a lock and update datasource - * - * @param task the task - * @param request the request - * @param listener the listener - */ - @Override - protected void doExecute(final Task task, final UpdateDatasourceRequest request, final ActionListener listener) { - lockService.acquireLock(request.getName(), LOCK_DURATION_IN_SECONDS, ActionListener.wrap(lock -> { - if (lock == null) { - listener.onFailure( - new OpenSearchStatusException("Another processor is holding a lock on the resource. Try again later", RestStatus.BAD_REQUEST) - ); - return; - } - try { - // TODO: makes every sub-methods as async call to avoid using a thread in generic pool - threadPool.generic().submit(() -> { - try { - Datasource datasource = datasourceDao.getDatasource(request.getName()); - if (datasource == null) { - throw new ResourceNotFoundException("no such datasource exist"); - } - if (DatasourceState.AVAILABLE.equals(datasource.getState()) == false) { - throw new IllegalArgumentException( - String.format(Locale.ROOT, "data source is not in an [%s] state", DatasourceState.AVAILABLE) - ); - } - validate(request, datasource); - updateIfChanged(request, datasource); - lockService.releaseLock(lock); - listener.onResponse(new AcknowledgedResponse(true)); - } catch (Exception e) { - lockService.releaseLock(lock); - listener.onFailure(e); - } - }); - } catch (Exception e) { - lockService.releaseLock(lock); - listener.onFailure(e); - } - }, exception -> listener.onFailure(exception))); - } - - private void updateIfChanged(final UpdateDatasourceRequest request, final Datasource datasource) { - boolean isChanged = false; - if (isEndpointChanged(request, datasource)) { - datasource.setEndpoint(request.getEndpoint()); - isChanged = true; - } - if (isUpdateIntervalChanged(request)) { - datasource.setSchedule(new IntervalSchedule(Instant.now(), (int) request.getUpdateInterval().getDays(), ChronoUnit.DAYS)); - datasource.setTask(DatasourceTask.ALL); - isChanged = true; - } - - if (isChanged) { - datasourceDao.updateDatasource(datasource); - } - } - - /** - * Additional validation based on an existing datasource - * - * Basic validation is done in UpdateDatasourceRequest#validate - * In this method we do additional validation based on an existing datasource - * - * 1. Check the compatibility of new fields and old fields - * 2. Check the updateInterval is less than validForInDays in datasource - * - * This method throws exception if one of validation fails. - * - * @param request the update request - * @param datasource the existing datasource - * @throws IOException the exception - */ - private void validate(final UpdateDatasourceRequest request, final Datasource datasource) throws IOException { - validateFieldsCompatibility(request, datasource); - } - - private void validateFieldsCompatibility(final UpdateDatasourceRequest request, final Datasource datasource) throws IOException { - if (isEndpointChanged(request, datasource) == false) { - return; - } - - List fields = datasourceUpdateService.getHeaderFields(request.getEndpoint()); - if (datasource.isCompatible(fields) == false) { -// throw new IncompatibleDatasourceException( -// "new fields [{}] does not contain all old fields [{}]", -// fields.toString(), -// datasource.getDatabase().getFields().toString() -// ); - throw new OpenSearchStatusException("new fields does not contain all old fields", RestStatus.BAD_REQUEST); - } - } - - private boolean isEndpointChanged(final UpdateDatasourceRequest request, final Datasource datasource) { - return request.getEndpoint() != null && request.getEndpoint().equals(datasource.getEndpoint()) == false; - } - - /** - * Update interval is changed as long as user provide one because - * start time will get updated even if the update interval is same as current one. - * - * @param request the update datasource request - * @return true if update interval is changed, and false otherwise - */ - private boolean isUpdateIntervalChanged(final UpdateDatasourceRequest request) { - return request.getUpdateInterval() != null; - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/common/DatasourceManifest.java b/src/main/java/org/opensearch/securityanalytics/threatintel/common/DatasourceManifest.java deleted file mode 100644 index 1417c8a36..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/common/DatasourceManifest.java +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ -package org.opensearch.securityanalytics.threatIntel.common; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; -import java.net.URL; -import java.net.URLConnection; -import java.nio.CharBuffer; -import java.security.AccessController; -import java.security.PrivilegedAction; -import java.util.Locale; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.opensearch.SpecialPermission; -import org.opensearch.Version; -import org.opensearch.common.SuppressForbidden; -import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.core.ParseField; -import org.opensearch.core.rest.RestStatus; -import org.opensearch.core.xcontent.ConstructingObjectParser; -import org.opensearch.core.xcontent.DeprecationHandler; -import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.securityanalytics.model.DetectorTrigger; -import org.opensearch.securityanalytics.util.SecurityAnalyticsException; - -/** - * Threat intel datasource manifest file object - * - * Manifest file is stored in an external endpoint. OpenSearch read the file and store values it in this object. - */ -public class DatasourceManifest { - private static final Logger log = LogManager.getLogger(DetectorTrigger.class); - - private static final ParseField URL_FIELD = new ParseField("url"); //url for csv threat intel feed - private static final ParseField DB_NAME_FIELD = new ParseField("db_name"); // name of the db (csv file for now) - private static final ParseField SHA256_HASH_FIELD = new ParseField("sha256_hash"); //not using for now - private static final ParseField ORGANIZATION_FIELD = new ParseField("organization"); //not using for now - private static final ParseField DESCRIPTION_FIELD = new ParseField("description"); //not using for now - private static final ParseField UPDATED_AT_FIELD = new ParseField("updated_at_in_epoch_milli"); //not using for now - - /** - * @param url URL of a ZIP file containing a database - * @return URL of a ZIP file containing a database - */ - private String url; - - /** - * @param dbName A database file name inside the ZIP file - * @return A database file name inside the ZIP file - */ - private String dbName; - /** - * @param sha256Hash SHA256 hash value of a database file - * @return SHA256 hash value of a database file - */ - private String sha256Hash; - - /** - * @param organization A database organization name - * @return A database organization name - */ - private String organization; - /** - * @param description A description of the database - * @return A description of a database - */ - private String description; - /** - * @param updatedAt A date when the database was updated - * @return A date when the database was updated - */ - private Long updatedAt; - - public String getUrl() { - return this.url; - } - public String getDbName() { - return dbName; - } - - public String getOrganization() { - return organization; - } - - public String getSha256Hash() { - return sha256Hash; - } - - public String getDescription() { - return description; - } - - public Long getUpdatedAt() { - return updatedAt; - } - - public DatasourceManifest(final String url, final String dbName) { - this.url = url; - this.dbName = dbName; - } - - /** - * Datasource manifest parser - */ - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "datasource_manifest", - true, - args -> { - String url = (String) args[0]; - String dbName = (String) args[1]; - return new DatasourceManifest(url, dbName); - } - ); - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), URL_FIELD); - PARSER.declareString(ConstructingObjectParser.constructorArg(), DB_NAME_FIELD); - } - - /** - * Datasource manifest builder - */ - public static class Builder { - private static final int MANIFEST_FILE_MAX_BYTES = 1024 * 8; - - /** - * Build DatasourceManifest from a given url - * - * @param url url to downloads a manifest file - * @return DatasourceManifest representing the manifest file - */ - @SuppressForbidden(reason = "Need to connect to http endpoint to read manifest file") // change permissions - public static DatasourceManifest build(final URL url) { - SpecialPermission.check(); - return AccessController.doPrivileged((PrivilegedAction) () -> { - try { - URLConnection connection = url.openConnection(); - return internalBuild(connection); - } catch (IOException e) { - log.error("Runtime exception connecting to the manifest file", e); - throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO - } - }); - } - - @SuppressForbidden(reason = "Need to connect to http endpoint to read manifest file") - protected static DatasourceManifest internalBuild(final URLConnection connection) throws IOException { - connection.addRequestProperty(Constants.USER_AGENT_KEY, Constants.USER_AGENT_VALUE); - InputStreamReader inputStreamReader = new InputStreamReader(connection.getInputStream()); - try (BufferedReader reader = new BufferedReader(inputStreamReader)) { - CharBuffer charBuffer = CharBuffer.allocate(MANIFEST_FILE_MAX_BYTES); - reader.read(charBuffer); - charBuffer.flip(); - XContentParser parser = JsonXContent.jsonXContent.createParser( - NamedXContentRegistry.EMPTY, - DeprecationHandler.IGNORE_DEPRECATIONS, - charBuffer.toString() - ); - return PARSER.parse(parser, null); - } - } - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/common/DatasourceState.java b/src/main/java/org/opensearch/securityanalytics/threatintel/common/DatasourceState.java deleted file mode 100644 index a516b1d34..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/common/DatasourceState.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.common; - -/** - * Threat intel datasource state - * - * When data source is created, it starts with CREATING state. Once the first threat intel feed is generated, the state changes to AVAILABLE. - * Only when the first threat intel feed generation failed, the state changes to CREATE_FAILED. - * Subsequent threat intel feed failure won't change data source state from AVAILABLE to CREATE_FAILED. - * When delete request is received, the data source state changes to DELETING. - * - * State changed from left to right for the entire lifecycle of a datasource - * (CREATING) to (CREATE_FAILED or AVAILABLE) to (DELETING) - * - */ -public enum DatasourceState { - /** - * Data source is being created - */ - CREATING, - /** - * Data source is ready to be used - */ - AVAILABLE, - /** - * Data source creation failed - */ - CREATE_FAILED, - /** - * Data source is being deleted - */ - DELETING -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/common/ParameterValidator.java b/src/main/java/org/opensearch/securityanalytics/threatintel/common/ParameterValidator.java index 13276975c..25e40837c 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/common/ParameterValidator.java +++ b/src/main/java/org/opensearch/securityanalytics/threatintel/common/ParameterValidator.java @@ -25,7 +25,7 @@ public class ParameterValidator { * @param datasourceName datasource name * @return Error messages. Empty list if there is no violation. */ - public List validateDatasourceName(final String datasourceName) { + public List validateTIFJobName(final String datasourceName) { List errorMsgs = new ArrayList<>(); if (StringUtils.isBlank(datasourceName)) { errorMsgs.add("datasource name must not be empty"); diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/common/ThreatIntelSettings.java b/src/main/java/org/opensearch/securityanalytics/threatintel/common/ThreatIntelSettings.java deleted file mode 100644 index 1d649e0b6..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/common/ThreatIntelSettings.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.common; - -import java.net.MalformedURLException; -import java.net.URISyntaxException; -import java.net.URL; -import java.util.List; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.opensearch.common.settings.Setting; -import org.opensearch.common.unit.TimeValue; -import org.opensearch.securityanalytics.model.DetectorTrigger; - -/** - * Settings for threat intel datasource operations - */ -public class ThreatIntelSettings { - private static final Logger log = LogManager.getLogger(DetectorTrigger.class); - - - /** - * Default endpoint to be used in threat intel feed datasource creation API - */ - public static final Setting DATASOURCE_ENDPOINT = Setting.simpleString( - "plugins.security_analytics.threatintel.datasource.endpoint", - "https://geoip.maps.opensearch.org/v1/geolite2-city/manifest.json", //TODO fix this endpoint - new DatasourceEndpointValidator(), - Setting.Property.NodeScope, - Setting.Property.Dynamic - ); - - /** - * Default update interval to be used in threat intel datasource creation API - */ - public static final Setting DATASOURCE_UPDATE_INTERVAL = Setting.longSetting( - "plugins.security_analytics.threatintel.datasource.update_interval_in_days", - 3l, - 1l, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ); - - /** - * Bulk size for indexing threat intel feed data - */ - public static final Setting BATCH_SIZE = Setting.intSetting( - "plugins.security_analytics.threatintel.datasource.batch_size", - 10000, - 1, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ); - - /** - * Timeout value for threat intel processor - */ - public static final Setting THREAT_INTEL_TIMEOUT = Setting.timeSetting( - "plugins.security_analytics.threat_intel_timeout", - TimeValue.timeValueSeconds(30), - TimeValue.timeValueSeconds(1), - Setting.Property.NodeScope, - Setting.Property.Dynamic - ); - - /** - * Max size for threat intel feed cache - */ - public static final Setting CACHE_SIZE = Setting.longSetting( - "plugins.security_analytics.threatintel.processor.cache_size", - 1000, - 0, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ); - - /** - * Return all settings of threat intel feature - * @return a list of all settings for threat intel feature - */ - public static final List> settings() { - return List.of(DATASOURCE_ENDPOINT, DATASOURCE_UPDATE_INTERVAL, BATCH_SIZE, THREAT_INTEL_TIMEOUT); - } - - /** - * Visible for testing - */ - protected static class DatasourceEndpointValidator implements Setting.Validator { - @Override - public void validate(final String value) { - try { - new URL(value).toURI(); - } catch (MalformedURLException | URISyntaxException e) { - log.error("Invalid URL format is provided", e); - throw new IllegalArgumentException("Invalid URL format is provided"); - } - } - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceRunner.java b/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceRunner.java deleted file mode 100644 index 8de306d33..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceRunner.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.jobscheduler; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - -import org.opensearch.cluster.service.ClusterService; -import org.opensearch.jobscheduler.spi.JobExecutionContext; -import org.opensearch.jobscheduler.spi.LockModel; -import org.opensearch.jobscheduler.spi.ScheduledJobParameter; -import org.opensearch.jobscheduler.spi.ScheduledJobRunner; -import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; -import org.opensearch.securityanalytics.model.DetectorTrigger; - -import java.io.IOException; -import java.time.temporal.ChronoUnit; -import java.util.Optional; -import java.util.concurrent.atomic.AtomicReference; -import java.time.Instant; - -import org.opensearch.securityanalytics.threatIntel.common.DatasourceState; -import org.opensearch.securityanalytics.threatIntel.common.ThreatIntelExecutor; -import org.opensearch.securityanalytics.threatIntel.common.ThreatIntelLockService; -import org.opensearch.securityanalytics.threatIntel.dao.DatasourceDao; -/** - * Datasource update task - * - * This is a background task which is responsible for updating threat intel feed data - */ -public class DatasourceRunner implements ScheduledJobRunner { - private static final Logger log = LogManager.getLogger(DetectorTrigger.class); - private static DatasourceRunner INSTANCE; - - public static DatasourceRunner getJobRunnerInstance() { - if (INSTANCE != null) { - return INSTANCE; - } - synchronized (DatasourceRunner.class) { - if (INSTANCE != null) { - return INSTANCE; - } - INSTANCE = new DatasourceRunner(); - return INSTANCE; - } - } - - private ClusterService clusterService; - - // threat intel specific variables - private DatasourceUpdateService datasourceUpdateService; - private DatasourceDao datasourceDao; - private ThreatIntelExecutor threatIntelExecutor; - private ThreatIntelLockService lockService; - private boolean initialized; - - private DatasourceRunner() { - // Singleton class, use getJobRunner method instead of constructor - } - - public void initialize( - final ClusterService clusterService, - final DatasourceUpdateService datasourceUpdateService, - final DatasourceDao datasourceDao, - final ThreatIntelExecutor threatIntelExecutor, - final ThreatIntelLockService threatIntelLockService - ) { - this.clusterService = clusterService; - this.datasourceUpdateService = datasourceUpdateService; - this.datasourceDao = datasourceDao; - this.threatIntelExecutor = threatIntelExecutor; - this.lockService = threatIntelLockService; - this.initialized = true; - } - - @Override - public void runJob(final ScheduledJobParameter jobParameter, final JobExecutionContext context) { - if (initialized == false) { - throw new AssertionError("this instance is not initialized"); - } - - log.info("Update job started for a datasource[{}]", jobParameter.getName()); - if (jobParameter instanceof Datasource == false) { - log.error("Illegal state exception: job parameter is not instance of Datasource"); - throw new IllegalStateException( - "job parameter is not instance of Datasource, type: " + jobParameter.getClass().getCanonicalName() - ); - } - threatIntelExecutor.forDatasourceUpdate().submit(updateDatasourceRunner(jobParameter)); - } - - /** - * Update threat intel feed data - * - * Lock is used so that only one of nodes run this task. - * - * @param jobParameter job parameter - */ - protected Runnable updateDatasourceRunner(final ScheduledJobParameter jobParameter) { - return () -> { - Optional lockModel = lockService.acquireLock( - jobParameter.getName(), - ThreatIntelLockService.LOCK_DURATION_IN_SECONDS - ); - if (lockModel.isEmpty()) { - log.error("Failed to update. Another processor is holding a lock for datasource[{}]", jobParameter.getName()); - return; - } - - LockModel lock = lockModel.get(); - try { - updateDatasource(jobParameter, lockService.getRenewLockRunnable(new AtomicReference<>(lock))); - } catch (Exception e) { - log.error("Failed to update datasource[{}]", jobParameter.getName(), e); - } finally { - lockService.releaseLock(lock); - } - }; - } - - protected void updateDatasource(final ScheduledJobParameter jobParameter, final Runnable renewLock) throws IOException { - Datasource datasource = datasourceDao.getDatasource(jobParameter.getName()); - /** - * If delete request comes while update task is waiting on a queue for other update tasks to complete, - * because update task for this datasource didn't acquire a lock yet, delete request is processed. - * When it is this datasource's turn to run, it will find that the datasource is deleted already. - * Therefore, we stop the update process when data source does not exist. - */ - if (datasource == null) { - log.info("Datasource[{}] does not exist", jobParameter.getName()); - return; - } - - if (DatasourceState.AVAILABLE.equals(datasource.getState()) == false) { - log.error("Invalid datasource state. Expecting {} but received {}", DatasourceState.AVAILABLE, datasource.getState()); - datasource.disable(); - datasource.getUpdateStats().setLastFailedAt(Instant.now()); - datasourceDao.updateDatasource(datasource); - return; - } - try { - datasourceUpdateService.deleteUnusedIndices(datasource); - if (DatasourceTask.DELETE_UNUSED_INDICES.equals(datasource.getTask()) == false) { - datasourceUpdateService.updateOrCreateThreatIntelFeedData(datasource, renewLock); - } - datasourceUpdateService.deleteUnusedIndices(datasource); - } catch (Exception e) { - log.error("Failed to update datasource for {}", datasource.getName(), e); - datasource.getUpdateStats().setLastFailedAt(Instant.now()); - datasourceDao.updateDatasource(datasource); - } finally { //post processing - datasourceUpdateService.updateDatasource(datasource, datasource.getSchedule(), DatasourceTask.ALL); - } - } - -} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceUpdateService.java b/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceUpdateService.java deleted file mode 100644 index 5a24c5a84..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/jobscheduler/DatasourceUpdateService.java +++ /dev/null @@ -1,296 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.jobscheduler; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - -import java.io.IOException; -import java.net.URL; -import java.time.Duration; -import java.time.Instant; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.UUID; -import java.util.stream.Collectors; - -import org.apache.commons.csv.CSVParser; -import org.apache.commons.csv.CSVRecord; -import org.opensearch.OpenSearchException; -import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.settings.ClusterSettings; - -import org.opensearch.core.rest.RestStatus; -import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; -import org.opensearch.securityanalytics.model.DetectorTrigger; -import org.opensearch.securityanalytics.threatIntel.common.DatasourceManifest; -import org.opensearch.securityanalytics.threatIntel.dao.DatasourceDao; -import org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedDataService; -import org.opensearch.securityanalytics.threatIntel.common.DatasourceState; -import org.opensearch.securityanalytics.util.SecurityAnalyticsException; - -public class DatasourceUpdateService { - private static final Logger log = LogManager.getLogger(DetectorTrigger.class); - - private static final int SLEEP_TIME_IN_MILLIS = 5000; // 5 seconds - private static final int MAX_WAIT_TIME_FOR_REPLICATION_TO_COMPLETE_IN_MILLIS = 10 * 60 * 60 * 1000; // 10 hours - private final ClusterService clusterService; - private final ClusterSettings clusterSettings; - private final DatasourceDao datasourceDao; - private final ThreatIntelFeedDataService threatIntelFeedDataService; - - public DatasourceUpdateService( - final ClusterService clusterService, - final DatasourceDao datasourceDao, - final ThreatIntelFeedDataService threatIntelFeedDataService - ) { - this.clusterService = clusterService; - this.clusterSettings = clusterService.getClusterSettings(); - this.datasourceDao = datasourceDao; - this.threatIntelFeedDataService = threatIntelFeedDataService; - } - - /** - * Update threat intel feed data - * - * The first column is ip range field regardless its header name. - * Therefore, we don't store the first column's header name. - * - * @param datasource the datasource - * @param renewLock runnable to renew lock - * - * @throws IOException - */ - public void updateOrCreateThreatIntelFeedData(final Datasource datasource, final Runnable renewLock) throws IOException { - URL url = new URL(datasource.getEndpoint()); - DatasourceManifest manifest = DatasourceManifest.Builder.build(url); - - if (shouldUpdate(datasource, manifest) == false) { - log.info("Skipping threat intel feed database update. Update is not required for {}", datasource.getName()); - datasource.getUpdateStats().setLastSkippedAt(Instant.now()); - datasourceDao.updateDatasource(datasource); - return; - } - - Instant startTime = Instant.now(); - String indexName = setupIndex(datasource); - String[] header; - List fieldsToStore; - try (CSVParser reader = threatIntelFeedDataService.getDatabaseReader(manifest)) { - CSVRecord headerLine = reader.iterator().next(); - header = validateHeader(headerLine).values(); - fieldsToStore = Arrays.asList(header).subList(1, header.length); - if (datasource.isCompatible(fieldsToStore) == false) { - log.error("Exception: new fields does not contain all old fields"); - throw new OpenSearchException( - "new fields [{}] does not contain all old fields [{}]", - fieldsToStore.toString(), - datasource.getDatabase().getFields().toString() - ); - } - threatIntelFeedDataService.saveThreatIntelFeedData(indexName, header, reader.iterator(), renewLock); - } - - waitUntilAllShardsStarted(indexName, MAX_WAIT_TIME_FOR_REPLICATION_TO_COMPLETE_IN_MILLIS); - Instant endTime = Instant.now(); - updateDatasourceAsSucceeded(indexName, datasource, manifest, fieldsToStore, startTime, endTime); // then I update the datasource - } - - - /** - * We wait until all shards are ready to serve search requests before updating datasource metadata to - * point to a new index so that there won't be latency degradation during threat intel feed data update - * - * @param indexName the indexName - */ - protected void waitUntilAllShardsStarted(final String indexName, final int timeout) { - Instant start = Instant.now(); - try { - while (Instant.now().toEpochMilli() - start.toEpochMilli() < timeout) { - if (clusterService.state().routingTable().allShards(indexName).stream().allMatch(shard -> shard.started())) { - return; - } - Thread.sleep(SLEEP_TIME_IN_MILLIS); - } - throw new OpenSearchException( - "index[{}] replication did not complete after {} millis", - MAX_WAIT_TIME_FOR_REPLICATION_TO_COMPLETE_IN_MILLIS - ); - } catch (InterruptedException e) { - log.error("runtime exception", e); - throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO - } - } - - /** - * Return header fields of threat intel feed data with given url of a manifest file - * - * The first column is ip range field regardless its header name. - * Therefore, we don't store the first column's header name. - * - * @param manifestUrl the url of a manifest file - * @return header fields of threat intel feed - */ - public List getHeaderFields(String manifestUrl) throws IOException { - URL url = new URL(manifestUrl); - DatasourceManifest manifest = DatasourceManifest.Builder.build(url); - - try (CSVParser reader = threatIntelFeedDataService.getDatabaseReader(manifest)) { - String[] fields = reader.iterator().next().values(); - return Arrays.asList(fields).subList(1, fields.length); - } - } - - /** - * Delete all indices except the one which are being used - * - * @param datasource - */ - public void deleteUnusedIndices(final Datasource datasource) { - try { - List indicesToDelete = datasource.getIndices() - .stream() - .filter(index -> index.equals(datasource.currentIndexName()) == false) - .collect(Collectors.toList()); - - List deletedIndices = deleteIndices(indicesToDelete); - - if (deletedIndices.isEmpty() == false) { - datasource.getIndices().removeAll(deletedIndices); - datasourceDao.updateDatasource(datasource); - } - } catch (Exception e) { - log.error("Failed to delete old indices for {}", datasource.getName(), e); - } - } - - /** - * Update datasource with given systemSchedule and task - * - * @param datasource datasource to update - * @param systemSchedule new system schedule value - * @param task new task value - */ - public void updateDatasource(final Datasource datasource, final IntervalSchedule systemSchedule, final DatasourceTask task) { - boolean updated = false; - if (datasource.getSchedule().equals(systemSchedule) == false) { - datasource.setSchedule(systemSchedule); - updated = true; - } - - if (datasource.getTask().equals(task) == false) { - datasource.setTask(task); - updated = true; - } - - if (updated) { - datasourceDao.updateDatasource(datasource); - } - } - - private List deleteIndices(final List indicesToDelete) { - List deletedIndices = new ArrayList<>(indicesToDelete.size()); - for (String index : indicesToDelete) { - if (clusterService.state().metadata().hasIndex(index) == false) { - deletedIndices.add(index); - continue; - } - - try { - threatIntelFeedDataService.deleteThreatIntelDataIndex(index); - deletedIndices.add(index); - } catch (Exception e) { - log.error("Failed to delete an index [{}]", index, e); - } - } - return deletedIndices; - } - - /** - * Validate header - * - * 1. header should not be null - * 2. the number of values in header should be more than one - * - * @param header the header - * @return CSVRecord the input header - */ - private CSVRecord validateHeader(CSVRecord header) { - if (header == null) { - throw new OpenSearchException("threat intel feed database is empty"); - } - if (header.values().length < 2) { - throw new OpenSearchException("threat intel feed database should have at least two fields"); - } - return header; - } - - /*** - * Update datasource as succeeded - * - * @param manifest the manifest - * @param datasource the datasource - */ - private void updateDatasourceAsSucceeded( - final String newIndexName, - final Datasource datasource, - final DatasourceManifest manifest, - final List fields, - final Instant startTime, - final Instant endTime - ) { - datasource.setCurrentIndex(newIndexName); - datasource.setDatabase(manifest, fields); - datasource.getUpdateStats().setLastSucceededAt(endTime); - datasource.getUpdateStats().setLastProcessingTimeInMillis(endTime.toEpochMilli() - startTime.toEpochMilli()); - datasource.enable(); - datasource.setState(DatasourceState.AVAILABLE); - datasourceDao.updateDatasource(datasource); - log.info( - "threat intel feed database creation succeeded for {} and took {} seconds", - datasource.getName(), - Duration.between(startTime, endTime) - ); - } - - /*** - * Setup index to add a new threat intel feed data - * - * @param datasource the datasource - * @return new index name - */ - private String setupIndex(final Datasource datasource) { - String indexName = datasource.newIndexName(UUID.randomUUID().toString()); - datasource.getIndices().add(indexName); - datasourceDao.updateDatasource(datasource); - threatIntelFeedDataService.createIndexIfNotExists(indexName); - return indexName; - } - - /** - * Determine if update is needed or not - * - * Update is needed when all following conditions are met - * 1. updatedAt value in datasource is equal or before updateAt value in manifest - * 2. SHA256 hash value in datasource is different with SHA256 hash value in manifest - * - * @param datasource - * @param manifest - * @return - */ - private boolean shouldUpdate(final Datasource datasource, final DatasourceManifest manifest) { - if (datasource.getDatabase().getUpdatedAt() != null - && datasource.getDatabase().getUpdatedAt().toEpochMilli() > manifest.getUpdatedAt()) { - return false; - } - -// if (manifest.getSha256Hash().equals(datasource.getDatabase().getSha256Hash())) { -// return false; -// } - return true; - } -} diff --git a/src/main/resources/mappings/threat_intel_job_mapping.json b/src/main/resources/mappings/threat_intel_job_mapping.json new file mode 100644 index 000000000..5e039928d --- /dev/null +++ b/src/main/resources/mappings/threat_intel_job_mapping.json @@ -0,0 +1,118 @@ +{ + "properties": { + "database": { + "properties": { + "feed_id": { + "type": "text" + }, + "feed_name": { + "type": "text" + }, + "feed_format": { + "type": "text" + }, + "endpoint": { + "type": "text" + }, + "description": { + "type": "text" + }, + "organization": { + "type": "text" + }, + "contained_iocs_field": { + "type": "text" + }, + "ioc_col": { + "type": "text" + }, + "fields": { + "type": "text" + } + } + }, + "enabled_time": { + "type": "long" + }, + "indices": { + "type": "text" + }, + "last_update_time": { + "type": "long" + }, + "name": { + "type": "text" + }, + "schedule": { + "properties": { + "interval": { + "properties": { + "period": { + "type": "long" + }, + "start_time": { + "type": "long" + }, + "unit": { + "type": "text" + } + } + } + } + }, + "state": { + "type": "text" + }, + "task": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "update_enabled": { + "type": "boolean" + }, + "update_stats": { + "properties": { + "last_failed_at_in_epoch_millis": { + "type": "long" + }, + "last_processing_time_in_millis": { + "type": "long" + }, + "last_skipped_at_in_epoch_millis": { + "type": "long" + }, + "last_succeeded_at_in_epoch_millis": { + "type": "long" + } + } + }, + "user_schedule": { + "properties": { + "interval": { + "properties": { + "period": { + "type": "long" + }, + "start_time": { + "type": "long" + }, + "unit": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + } + } + } + } +} \ No newline at end of file diff --git a/src/main/resources/threatIntelFeedInfo/feodo.yml b/src/main/resources/threatIntelFeedInfo/feodo.yml new file mode 100644 index 000000000..4acbf40e4 --- /dev/null +++ b/src/main/resources/threatIntelFeedInfo/feodo.yml @@ -0,0 +1,6 @@ +url: "https://feodotracker.abuse.ch/downloads/ipblocklist_aggressive.csv" +name: "ipblocklist_aggressive.csv" +feedFormat: "csv" +org: "Feodo" +iocTypes: ["ip"] +description: "" \ No newline at end of file diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestCase.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestCase.java new file mode 100644 index 000000000..c637b448a --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestCase.java @@ -0,0 +1,287 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel; + +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; + +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Locale; +import java.util.UUID; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiFunction; +import java.util.stream.Collectors; + +import org.junit.After; +import org.junit.Before; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionType; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.cluster.routing.RoutingTable; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.Randomness; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.concurrent.OpenSearchExecutors; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.action.ActionResponse; +import org.opensearch.ingest.IngestMetadata; +import org.opensearch.ingest.IngestService; +import org.opensearch.jobscheduler.spi.LockModel; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; +import org.opensearch.jobscheduler.spi.utils.LockService; +import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; +import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; +import org.opensearch.securityanalytics.threatIntel.common.TIFExecutor; +import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameterService; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobTask; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobUpdateService; +import org.opensearch.tasks.Task; +import org.opensearch.tasks.TaskListener; +import org.opensearch.test.client.NoOpNodeClient; +import org.opensearch.test.rest.RestActionTestCase; +import org.opensearch.threadpool.ThreadPool; + +public abstract class ThreatIntelTestCase extends RestActionTestCase { + @Mock + protected ClusterService clusterService; + @Mock + protected TIFJobUpdateService tifJobUpdateService; + @Mock + protected TIFJobParameterService tifJobParameterService; + @Mock + protected TIFExecutor threatIntelExecutor; + @Mock + protected ThreatIntelFeedDataService threatIntelFeedDataService; + @Mock + protected ClusterState clusterState; + @Mock + protected Metadata metadata; + @Mock + protected IngestService ingestService; + @Mock + protected ActionFilters actionFilters; + @Mock + protected ThreadPool threadPool; + @Mock + protected TIFLockService threatIntelLockService; + @Mock + protected RoutingTable routingTable; + protected IngestMetadata ingestMetadata; + protected NoOpNodeClient client; + protected VerifyingClient verifyingClient; + protected LockService lockService; + protected ClusterSettings clusterSettings; + protected Settings settings; + private AutoCloseable openMocks; + + @Before + public void prepareThreatIntelTestCase() { + openMocks = MockitoAnnotations.openMocks(this); + settings = Settings.EMPTY; + client = new NoOpNodeClient(this.getTestName()); + verifyingClient = spy(new VerifyingClient(this.getTestName())); + clusterSettings = new ClusterSettings(settings, new HashSet<>(SecurityAnalyticsSettings.settings())); + lockService = new LockService(client, clusterService); + ingestMetadata = new IngestMetadata(Collections.emptyMap()); + when(metadata.custom(IngestMetadata.TYPE)).thenReturn(ingestMetadata); + when(clusterService.getSettings()).thenReturn(Settings.EMPTY); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + when(clusterService.state()).thenReturn(clusterState); + when(clusterState.metadata()).thenReturn(metadata); + when(clusterState.getMetadata()).thenReturn(metadata); + when(clusterState.routingTable()).thenReturn(routingTable); + when(ingestService.getClusterService()).thenReturn(clusterService); + when(threadPool.generic()).thenReturn(OpenSearchExecutors.newDirectExecutorService()); + } + + @After + public void clean() throws Exception { + openMocks.close(); + client.close(); + verifyingClient.close(); + } + + protected TIFJobState randomStateExcept(TIFJobState state) { + assertNotNull(state); + return Arrays.stream(TIFJobState.values()) + .sequential() + .filter(s -> !s.equals(state)) + .collect(Collectors.toList()) + .get(Randomness.createSecure().nextInt(TIFJobState.values().length - 2)); + } + + protected TIFJobState randomState() { + return Arrays.stream(TIFJobState.values()) + .sequential() + .collect(Collectors.toList()) + .get(Randomness.createSecure().nextInt(TIFJobState.values().length - 1)); + } + + protected TIFJobTask randomTask() { + return Arrays.stream(TIFJobTask.values()) + .sequential() + .collect(Collectors.toList()) + .get(Randomness.createSecure().nextInt(TIFJobTask.values().length - 1)); + } + + protected String randomIpAddress() { + return String.format( + Locale.ROOT, + "%d.%d.%d.%d", + Randomness.get().nextInt(255), + Randomness.get().nextInt(255), + Randomness.get().nextInt(255), + Randomness.get().nextInt(255) + ); + } + + protected long randomPositiveLong() { + long value = Randomness.get().nextLong(); + return value < 0 ? -value : value; + } + + /** + * Update interval should be > 0 and < validForInDays. + * For an update test to work, there should be at least one eligible value other than current update interval. + * Therefore, the smallest value for validForInDays is 2. + * Update interval is random value from 1 to validForInDays - 2. + * The new update value will be validForInDays - 1. + */ + protected TIFJobParameter randomTifJobParameter(final Instant updateStartTime) { + Instant now = Instant.now().truncatedTo(ChronoUnit.MILLIS); + TIFJobParameter tifJobParameter = new TIFJobParameter(); + tifJobParameter.setName(ThreatIntelTestHelper.randomLowerCaseString()); + tifJobParameter.setSchedule( + new IntervalSchedule( + updateStartTime.truncatedTo(ChronoUnit.MILLIS), + 1, + ChronoUnit.DAYS + ) + ); + tifJobParameter.setTask(randomTask()); + tifJobParameter.setState(randomState()); + tifJobParameter.setCurrentIndex(tifJobParameter.newIndexName(UUID.randomUUID().toString())); + tifJobParameter.setIndices(Arrays.asList(ThreatIntelTestHelper.randomLowerCaseString(), ThreatIntelTestHelper.randomLowerCaseString())); + tifJobParameter.getUpdateStats().setLastSkippedAt(now); + tifJobParameter.getUpdateStats().setLastSucceededAt(now); + tifJobParameter.getUpdateStats().setLastFailedAt(now); + tifJobParameter.getUpdateStats().setLastProcessingTimeInMillis(randomPositiveLong()); + tifJobParameter.setLastUpdateTime(now); + if (Randomness.get().nextInt() % 2 == 0) { + tifJobParameter.enable(); + } else { + tifJobParameter.disable(); + } + return tifJobParameter; + } + + protected TIFJobParameter randomTifJobParameter() { + return randomTifJobParameter(Instant.now()); + } + + protected LockModel randomLockModel() { + LockModel lockModel = new LockModel( + ThreatIntelTestHelper.randomLowerCaseString(), + ThreatIntelTestHelper.randomLowerCaseString(), + Instant.now(), + randomPositiveLong(), + false + ); + return lockModel; + } + + /** + * Temporary class of VerifyingClient until this PR(https://github.com/opensearch-project/OpenSearch/pull/7167) + * is merged in OpenSearch core + */ + public static class VerifyingClient extends NoOpNodeClient { + AtomicReference executeVerifier = new AtomicReference<>(); + AtomicReference executeLocallyVerifier = new AtomicReference<>(); + + public VerifyingClient(String testName) { + super(testName); + reset(); + } + + /** + * Clears any previously set verifier functions set by {@link #setExecuteVerifier(BiFunction)} and/or + * {@link #setExecuteLocallyVerifier(BiFunction)}. These functions are replaced with functions which will throw an + * {@link AssertionError} if called. + */ + public void reset() { + executeVerifier.set((arg1, arg2) -> { throw new AssertionError(); }); + executeLocallyVerifier.set((arg1, arg2) -> { throw new AssertionError(); }); + } + + /** + * Sets the function that will be called when {@link #doExecute(ActionType, ActionRequest, ActionListener)} is called. The given + * function should return either a subclass of {@link ActionResponse} or {@code null}. + * @param verifier A function which is called in place of {@link #doExecute(ActionType, ActionRequest, ActionListener)} + */ + public void setExecuteVerifier( + BiFunction, Request, Response> verifier + ) { + executeVerifier.set(verifier); + } + + @Override + public void doExecute( + ActionType action, + Request request, + ActionListener listener + ) { + try { + listener.onResponse((Response) executeVerifier.get().apply(action, request)); + } catch (Exception e) { + listener.onFailure(e); + } + } + + /** + * Sets the function that will be called when {@link #executeLocally(ActionType, ActionRequest, TaskListener)}is called. The given + * function should return either a subclass of {@link ActionResponse} or {@code null}. + * @param verifier A function which is called in place of {@link #executeLocally(ActionType, ActionRequest, TaskListener)} + */ + public void setExecuteLocallyVerifier( + BiFunction, Request, Response> verifier + ) { + executeLocallyVerifier.set(verifier); + } + + @Override + public Task executeLocally( + ActionType action, + Request request, + ActionListener listener + ) { + listener.onResponse((Response) executeLocallyVerifier.get().apply(action, request)); + return null; + } + + @Override + public Task executeLocally( + ActionType action, + Request request, + TaskListener listener + ) { + listener.onResponse(null, (Response) executeLocallyVerifier.get().apply(action, request)); + return null; + } + + } +} + diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestHelper.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestHelper.java new file mode 100644 index 000000000..73522053f --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestHelper.java @@ -0,0 +1,120 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.securityanalytics.threatIntel; + +import static org.apache.lucene.tests.util.LuceneTestCase.random; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.opensearch.test.OpenSearchTestCase.randomBoolean; +import static org.opensearch.test.OpenSearchTestCase.randomIntBetween; +import static org.opensearch.test.OpenSearchTestCase.randomNonNegativeLong; + +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.stream.IntStream; + + +import org.opensearch.OpenSearchException; +import org.opensearch.action.DocWriteRequest; +import org.opensearch.action.bulk.BulkItemResponse; +import org.opensearch.action.bulk.BulkResponse; +import org.opensearch.action.index.IndexResponse; +import org.opensearch.action.support.replication.ReplicationResponse; +import org.opensearch.common.Randomness; +import org.opensearch.common.UUIDs; +import org.opensearch.common.collect.Tuple; +import org.opensearch.core.index.shard.ShardId; + +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.test.RandomObjects; + +public class ThreatIntelTestHelper { + + public static final int MAX_SEQ_NO = 10000; + public static final int MAX_PRIMARY_TERM = 10000; + public static final int MAX_VERSION = 10000; + public static final int MAX_SHARD_ID = 100; + + public static final int RANDOM_STRING_MIN_LENGTH = 2; + public static final int RANDOM_STRING_MAX_LENGTH = 16; + + private static String randomString() { + return OpenSearchTestCase.randomAlphaOfLengthBetween(RANDOM_STRING_MIN_LENGTH, RANDOM_STRING_MAX_LENGTH); + } + + public static String randomLowerCaseString() { + return randomString().toLowerCase(Locale.ROOT); + } + + public static List randomLowerCaseStringList() { + List stringList = new ArrayList<>(); + stringList.add(randomLowerCaseString()); + return stringList; + } + + /** + * Returns random {@link IndexResponse} by generating inputs using random functions. + * It is not guaranteed to generate every possible values, and it is not required since + * it is used by the unit test and will not be validated by the cluster. + */ + private static IndexResponse randomIndexResponse() { + String index = randomLowerCaseString(); + String indexUUid = UUIDs.randomBase64UUID(); + int shardId = randomIntBetween(0, MAX_SHARD_ID); + String id = UUIDs.randomBase64UUID(); + long seqNo = randomIntBetween(0, MAX_SEQ_NO); + long primaryTerm = randomIntBetween(0, MAX_PRIMARY_TERM); + long version = randomIntBetween(0, MAX_VERSION); + boolean created = randomBoolean(); + boolean forcedRefresh = randomBoolean(); + Tuple shardInfo = RandomObjects.randomShardInfo(random()); + IndexResponse actual = new IndexResponse(new ShardId(index, indexUUid, shardId), id, seqNo, primaryTerm, version, created); + actual.setForcedRefresh(forcedRefresh); + actual.setShardInfo(shardInfo.v1()); + + return actual; + } + + // Generate Random Bulk Response with noOfSuccessItems as BulkItemResponse, and include BulkItemResponse.Failure with + // random error message, if hasFailures is true. + public static BulkResponse generateRandomBulkResponse(int noOfSuccessItems, boolean hasFailures) { + long took = randomNonNegativeLong(); + long ingestTook = randomNonNegativeLong(); + if (noOfSuccessItems < 1) { + return new BulkResponse(null, took, ingestTook); + } + List items = new ArrayList<>(); + IntStream.range(0, noOfSuccessItems) + .forEach(shardId -> items.add(new BulkItemResponse(shardId, DocWriteRequest.OpType.CREATE, randomIndexResponse()))); + if (hasFailures) { + final BulkItemResponse.Failure failedToIndex = new BulkItemResponse.Failure( + randomLowerCaseString(), + randomLowerCaseString(), + new OpenSearchException(randomLowerCaseString()) + ); + items.add(new BulkItemResponse(randomIntBetween(0, MAX_SHARD_ID), DocWriteRequest.OpType.CREATE, failedToIndex)); + } + return new BulkResponse(items.toArray(BulkItemResponse[]::new), took, ingestTook); + } + + public static StringBuilder buildFieldNameValuePair(Object field, Object value) { + StringBuilder builder = new StringBuilder(); + builder.append("\"").append(field).append("\":"); + if (!(value instanceof String)) { + return builder.append(value); + } + return builder.append("\"").append(value).append("\""); + } + +} + diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadataTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadataTests.java new file mode 100644 index 000000000..fc229c2e8 --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadataTests.java @@ -0,0 +1,35 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.threatIntel.common; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.net.URLConnection; + +import org.opensearch.common.SuppressForbidden; +import org.opensearch.securityanalytics.SecurityAnalyticsRestTestCase; + +@SuppressForbidden(reason = "unit test") +public class TIFMetadataTests extends SecurityAnalyticsRestTestCase { + + public void testInternalBuild_whenCalled_thenCorrectUserAgentValueIsSet() throws IOException { + URLConnection connection = mock(URLConnection.class); + File manifestFile = new File(this.getClass().getClassLoader().getResource("threatIntel/manifest.json").getFile()); + when(connection.getInputStream()).thenReturn(new FileInputStream(manifestFile)); + + // Run + TIFMetadata manifest = TIFMetadata.Builder.internalBuild(connection); + + // Verify + verify(connection).addRequestProperty(Constants.USER_AGENT_KEY, Constants.USER_AGENT_VALUE); + assertEquals("https://test.com/db.zip", manifest.getUrl()); + } +} + diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/common/ThreatIntelLockServiceTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/common/ThreatIntelLockServiceTests.java new file mode 100644 index 000000000..d9390af7a --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/common/ThreatIntelLockServiceTests.java @@ -0,0 +1,117 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.common; + +import static org.mockito.Mockito.mock; +import static org.opensearch.securityanalytics.threatIntel.common.TIFLockService.LOCK_DURATION_IN_SECONDS; +import static org.opensearch.securityanalytics.threatIntel.common.TIFLockService.RENEW_AFTER_IN_SECONDS; + +import java.time.Instant; +import java.util.concurrent.atomic.AtomicReference; + +import org.junit.Before; +import org.opensearch.action.DocWriteResponse; +import org.opensearch.action.update.UpdateRequest; +import org.opensearch.action.update.UpdateResponse; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.index.shard.ShardId; +import org.opensearch.jobscheduler.spi.LockModel; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestHelper; + +public class ThreatIntelLockServiceTests extends ThreatIntelTestCase { + private TIFLockService threatIntelLockService; + private TIFLockService noOpsLockService; + + @Before + public void init() { + threatIntelLockService = new TIFLockService(clusterService, verifyingClient); + noOpsLockService = new TIFLockService(clusterService, client); + } + + public void testAcquireLock_whenValidInput_thenSucceed() { + // Cannot test because LockService is final class + // Simply calling method to increase coverage + noOpsLockService.acquireLock(ThreatIntelTestHelper.randomLowerCaseString(), randomPositiveLong(), mock(ActionListener.class)); + } + + public void testAcquireLock_whenCalled_thenNotBlocked() { + long expectedDurationInMillis = 1000; + Instant before = Instant.now(); + assertTrue(threatIntelLockService.acquireLock(null, null).isEmpty()); + Instant after = Instant.now(); + assertTrue(after.toEpochMilli() - before.toEpochMilli() < expectedDurationInMillis); + } + + public void testReleaseLock_whenValidInput_thenSucceed() { + // Cannot test because LockService is final class + // Simply calling method to increase coverage + noOpsLockService.releaseLock(null); + } + + public void testRenewLock_whenCalled_thenNotBlocked() { + long expectedDurationInMillis = 1000; + Instant before = Instant.now(); + assertNull(threatIntelLockService.renewLock(null)); + Instant after = Instant.now(); + assertTrue(after.toEpochMilli() - before.toEpochMilli() < expectedDurationInMillis); + } + + public void testGetRenewLockRunnable_whenLockIsFresh_thenDoNotRenew() { + LockModel lockModel = new LockModel( + ThreatIntelTestHelper.randomLowerCaseString(), + ThreatIntelTestHelper.randomLowerCaseString(), + Instant.now(), + LOCK_DURATION_IN_SECONDS, + false + ); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + // Verifying + assertTrue(actionRequest instanceof UpdateRequest); + return new UpdateResponse( + mock(ShardId.class), + ThreatIntelTestHelper.randomLowerCaseString(), + randomPositiveLong(), + randomPositiveLong(), + randomPositiveLong(), + DocWriteResponse.Result.UPDATED + ); + }); + + AtomicReference reference = new AtomicReference<>(lockModel); + threatIntelLockService.getRenewLockRunnable(reference).run(); + assertEquals(lockModel, reference.get()); + } + + public void testGetRenewLockRunnable_whenLockIsStale_thenRenew() { + LockModel lockModel = new LockModel( + ThreatIntelTestHelper.randomLowerCaseString(), + ThreatIntelTestHelper.randomLowerCaseString(), + Instant.now().minusSeconds(RENEW_AFTER_IN_SECONDS), + LOCK_DURATION_IN_SECONDS, + false + ); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + // Verifying + assertTrue(actionRequest instanceof UpdateRequest); + return new UpdateResponse( + mock(ShardId.class), + ThreatIntelTestHelper.randomLowerCaseString(), + randomPositiveLong(), + randomPositiveLong(), + randomPositiveLong(), + DocWriteResponse.Result.UPDATED + ); + }); + + AtomicReference reference = new AtomicReference<>(lockModel); + threatIntelLockService.getRenewLockRunnable(reference).run(); + assertNotEquals(lockModel, reference.get()); + } +} + diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtensionTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtensionTests.java new file mode 100644 index 000000000..ab8520286 --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtensionTests.java @@ -0,0 +1,56 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.jobscheduler; + +import static org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobExtension.JOB_INDEX_NAME; + +import java.time.Instant; +import java.time.temporal.ChronoUnit; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.jobscheduler.spi.JobDocVersion; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; +import org.opensearch.securityanalytics.model.DetectorTrigger; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestHelper; +public class TIFJobExtensionTests extends ThreatIntelTestCase { + private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + + public void testBasic() { + TIFJobExtension extension = new TIFJobExtension(); + assertEquals("scheduler_sap_threatintel_job", extension.getJobType()); + assertEquals(JOB_INDEX_NAME, extension.getJobIndex()); + assertEquals(TIFJobRunner.getJobRunnerInstance(), extension.getJobRunner()); + } + + public void testParser() throws Exception { + TIFJobExtension extension = new TIFJobExtension(); + String id = ThreatIntelTestHelper.randomLowerCaseString(); + IntervalSchedule schedule = new IntervalSchedule(Instant.now().truncatedTo(ChronoUnit.MILLIS), 1, ChronoUnit.DAYS); + TIFJobParameter tifJobParameter = new TIFJobParameter(id, schedule); + + TIFJobParameter anotherTifJobParameter = (TIFJobParameter) extension.getJobParser() + .parse( + createParser(tifJobParameter.toXContent(XContentFactory.jsonBuilder(), null)), + ThreatIntelTestHelper.randomLowerCaseString(), + new JobDocVersion(randomPositiveLong(), randomPositiveLong(), randomPositiveLong()) + ); + log.info("first"); + log.error(tifJobParameter); + log.error(tifJobParameter.getName()); + log.error(tifJobParameter.getCurrentIndex()); + log.info("second"); + log.error(anotherTifJobParameter); + log.error(anotherTifJobParameter.getName()); + log.error(anotherTifJobParameter.getCurrentIndex()); + + //same values but technically diff indices + + assertTrue(tifJobParameter.equals(anotherTifJobParameter)); + } +} diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterServiceTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterServiceTests.java new file mode 100644 index 000000000..148d16e93 --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterServiceTests.java @@ -0,0 +1,385 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.jobscheduler; + +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.IOException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Arrays; +import java.util.List; + +import org.apache.lucene.search.TotalHits; +import org.junit.Before; +import org.mockito.ArgumentCaptor; +import org.opensearch.ResourceAlreadyExistsException; +import org.opensearch.ResourceNotFoundException; +import org.opensearch.action.DocWriteRequest; +import org.opensearch.action.StepListener; +import org.opensearch.action.admin.indices.create.CreateIndexRequest; +import org.opensearch.action.bulk.BulkRequest; +import org.opensearch.action.delete.DeleteRequest; +import org.opensearch.action.delete.DeleteResponse; +import org.opensearch.action.get.GetRequest; +import org.opensearch.action.get.GetResponse; +import org.opensearch.action.get.MultiGetItemResponse; +import org.opensearch.action.get.MultiGetRequest; +import org.opensearch.action.get.MultiGetResponse; +import org.opensearch.action.index.IndexRequest; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.support.WriteRequest; +import org.opensearch.cluster.routing.Preference; +import org.opensearch.common.Randomness; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.index.IndexNotFoundException; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; +import org.opensearch.search.SearchHit; +import org.opensearch.search.SearchHits; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestHelper; + +public class TIFJobParameterServiceTests extends ThreatIntelTestCase { + private TIFJobParameterService tifJobParameterService; + + @Before + public void init() { + tifJobParameterService = new TIFJobParameterService(verifyingClient, clusterService); + } + + public void testCreateIndexIfNotExists_whenIndexExist_thenCreateRequestIsNotCalled() { + when(metadata.hasIndex(TIFJobExtension.JOB_INDEX_NAME)).thenReturn(true); + + // Verify + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { throw new RuntimeException("Shouldn't get called"); }); + + // Run + StepListener stepListener = new StepListener<>(); + tifJobParameterService.createIndexIfNotExists(stepListener); + + // Verify stepListener is called + stepListener.result(); + } + + public void testCreateIndexIfNotExists_whenIndexExist_thenCreateRequestIsCalled() { + when(metadata.hasIndex(TIFJobExtension.JOB_INDEX_NAME)).thenReturn(false); + + // Verify + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + assertTrue(actionRequest instanceof CreateIndexRequest); + CreateIndexRequest request = (CreateIndexRequest) actionRequest; + assertEquals(TIFJobExtension.JOB_INDEX_NAME, request.index()); + assertEquals("1", request.settings().get("index.number_of_shards")); + assertEquals("0-all", request.settings().get("index.auto_expand_replicas")); + assertEquals("true", request.settings().get("index.hidden")); + assertNotNull(request.mappings()); + return null; + }); + + // Run + StepListener stepListener = new StepListener<>(); + tifJobParameterService.createIndexIfNotExists(stepListener); + + // Verify stepListener is called + stepListener.result(); + } + + public void testCreateIndexIfNotExists_whenIndexCreatedAlready_thenExceptionIsIgnored() { + when(metadata.hasIndex(TIFJobExtension.JOB_INDEX_NAME)).thenReturn(false); + verifyingClient.setExecuteVerifier( + (actionResponse, actionRequest) -> { throw new ResourceAlreadyExistsException(TIFJobExtension.JOB_INDEX_NAME); } + ); + + // Run + StepListener stepListener = new StepListener<>(); + tifJobParameterService.createIndexIfNotExists(stepListener); + + // Verify stepListener is called + stepListener.result(); + } + + public void testCreateIndexIfNotExists_whenExceptionIsThrown_thenExceptionIsThrown() { + when(metadata.hasIndex(TIFJobExtension.JOB_INDEX_NAME)).thenReturn(false); + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { throw new RuntimeException(); }); + + // Run + StepListener stepListener = new StepListener<>(); + tifJobParameterService.createIndexIfNotExists(stepListener); + + // Verify stepListener is called + expectThrows(RuntimeException.class, () -> stepListener.result()); + } + + public void testUpdateTIFJobParameter_whenValidInput_thenSucceed() throws Exception { + String tifJobName = ThreatIntelTestHelper.randomLowerCaseString(); + TIFJobParameter tifJobParameter = new TIFJobParameter( + tifJobName, + new IntervalSchedule(Instant.now().truncatedTo(ChronoUnit.MILLIS), 1, ChronoUnit.DAYS) + ); + Instant previousTime = Instant.now().minusMillis(1); + tifJobParameter.setLastUpdateTime(previousTime); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + assertTrue(actionRequest instanceof IndexRequest); + IndexRequest request = (IndexRequest) actionRequest; + assertEquals(tifJobParameter.getName(), request.id()); + assertEquals(DocWriteRequest.OpType.INDEX, request.opType()); + assertEquals(TIFJobExtension.JOB_INDEX_NAME, request.index()); + assertEquals(WriteRequest.RefreshPolicy.IMMEDIATE, request.getRefreshPolicy()); + return null; + }); + + tifJobParameterService.updateJobSchedulerParameter(tifJobParameter); + assertTrue(previousTime.isBefore(tifJobParameter.getLastUpdateTime())); + } + + public void testPutTifJobParameter_whenValidInput_thenSucceed() { + TIFJobParameter tifJobParameter = randomTifJobParameter(); + Instant previousTime = Instant.now().minusMillis(1); + tifJobParameter.setLastUpdateTime(previousTime); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + assertTrue(actionRequest instanceof IndexRequest); + IndexRequest indexRequest = (IndexRequest) actionRequest; + assertEquals(TIFJobExtension.JOB_INDEX_NAME, indexRequest.index()); + assertEquals(tifJobParameter.getName(), indexRequest.id()); + assertEquals(WriteRequest.RefreshPolicy.IMMEDIATE, indexRequest.getRefreshPolicy()); + assertEquals(DocWriteRequest.OpType.CREATE, indexRequest.opType()); + return null; + }); + + tifJobParameterService.putTIFJobParameter(tifJobParameter, mock(ActionListener.class)); + assertTrue(previousTime.isBefore(tifJobParameter.getLastUpdateTime())); + } + + public void testGetTifJobParameter_whenException_thenNull() throws Exception { + TIFJobParameter tifJobParameter = setupClientForGetRequest(true, new IndexNotFoundException(TIFJobExtension.JOB_INDEX_NAME)); + assertNull(tifJobParameterService.getJobParameter(tifJobParameter.getName())); + } + + public void testGetTifJobParameter_whenExist_thenReturnTifJobParameter() throws Exception { + TIFJobParameter tifJobParameter = setupClientForGetRequest(true, null); + assertEquals(tifJobParameter, tifJobParameterService.getJobParameter(tifJobParameter.getName())); + } + + public void testGetTifJobParameter_whenNotExist_thenNull() throws Exception { + TIFJobParameter tifJobParameter = setupClientForGetRequest(false, null); + assertNull(tifJobParameterService.getJobParameter(tifJobParameter.getName())); + } + + public void testGetTifJobParameter_whenExistWithListener_thenListenerIsCalledWithTifJobParameter() { + TIFJobParameter tifJobParameter = setupClientForGetRequest(true, null); + ActionListener listener = mock(ActionListener.class); + tifJobParameterService.getJobParameter(tifJobParameter.getName(), listener); + verify(listener).onResponse(eq(tifJobParameter)); + } + + public void testGetTifJobParameter_whenNotExistWithListener_thenListenerIsCalledWithNull() { + TIFJobParameter tifJobParameter = setupClientForGetRequest(false, null); + ActionListener listener = mock(ActionListener.class); + tifJobParameterService.getJobParameter(tifJobParameter.getName(), listener); + verify(listener).onResponse(null); + } + + private TIFJobParameter setupClientForGetRequest(final boolean isExist, final RuntimeException exception) { + TIFJobParameter tifJobParameter = randomTifJobParameter(); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + assertTrue(actionRequest instanceof GetRequest); + GetRequest request = (GetRequest) actionRequest; + assertEquals(tifJobParameter.getName(), request.id()); + assertEquals(TIFJobExtension.JOB_INDEX_NAME, request.index()); + GetResponse response = getMockedGetResponse(isExist ? tifJobParameter : null); + if (exception != null) { + throw exception; + } + return response; + }); + return tifJobParameter; + } + + public void testDeleteTifJobParameter_whenValidInput_thenSucceed() { + TIFJobParameter tifJobParameter = randomTifJobParameter(); + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + // Verify + assertTrue(actionRequest instanceof DeleteRequest); + DeleteRequest request = (DeleteRequest) actionRequest; + assertEquals(TIFJobExtension.JOB_INDEX_NAME, request.index()); + assertEquals(DocWriteRequest.OpType.DELETE, request.opType()); + assertEquals(tifJobParameter.getName(), request.id()); + assertEquals(WriteRequest.RefreshPolicy.IMMEDIATE, request.getRefreshPolicy()); + + DeleteResponse response = mock(DeleteResponse.class); + when(response.status()).thenReturn(RestStatus.OK); + return response; + }); + + // Run + tifJobParameterService.deleteTIFJobParameter(tifJobParameter); + } + + public void testDeleteTifJobParameter_whenIndexNotFound_thenThrowException() { + TIFJobParameter tifJobParameter = randomTifJobParameter(); + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + DeleteResponse response = mock(DeleteResponse.class); + when(response.status()).thenReturn(RestStatus.NOT_FOUND); + return response; + }); + + // Run + expectThrows(ResourceNotFoundException.class, () -> tifJobParameterService.deleteTIFJobParameter(tifJobParameter)); + } + + public void testGetTifJobParameter_whenValidInput_thenSucceed() { + List tifJobParameters = Arrays.asList(randomTifJobParameter(), randomTifJobParameter()); + String[] names = tifJobParameters.stream().map(TIFJobParameter::getName).toArray(String[]::new); + ActionListener> listener = mock(ActionListener.class); + MultiGetItemResponse[] multiGetItemResponses = tifJobParameters.stream().map(tifJobParameter -> { + GetResponse getResponse = getMockedGetResponse(tifJobParameter); + MultiGetItemResponse multiGetItemResponse = mock(MultiGetItemResponse.class); + when(multiGetItemResponse.getResponse()).thenReturn(getResponse); + return multiGetItemResponse; + }).toArray(MultiGetItemResponse[]::new); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + // Verify + assertTrue(actionRequest instanceof MultiGetRequest); + MultiGetRequest request = (MultiGetRequest) actionRequest; + assertEquals(2, request.getItems().size()); + for (MultiGetRequest.Item item : request.getItems()) { + assertEquals(TIFJobExtension.JOB_INDEX_NAME, item.index()); + assertTrue(tifJobParameters.stream().filter(tifJobParameter -> tifJobParameter.getName().equals(item.id())).findAny().isPresent()); + } + + MultiGetResponse response = mock(MultiGetResponse.class); + when(response.getResponses()).thenReturn(multiGetItemResponses); + return response; + }); + + // Run + tifJobParameterService.getTIFJobParameters(names, listener); + + // Verify + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + verify(listener).onResponse(captor.capture()); + assertEquals(tifJobParameters, captor.getValue()); + + } + + public void testGetAllTifJobParameter_whenAsynchronous_thenSuccee() { + List tifJobParameters = Arrays.asList(randomTifJobParameter(), randomTifJobParameter()); + ActionListener> listener = mock(ActionListener.class); + SearchHits searchHits = getMockedSearchHits(tifJobParameters); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + // Verify + assertTrue(actionRequest instanceof SearchRequest); + SearchRequest request = (SearchRequest) actionRequest; + assertEquals(1, request.indices().length); + assertEquals(TIFJobExtension.JOB_INDEX_NAME, request.indices()[0]); + assertEquals(QueryBuilders.matchAllQuery(), request.source().query()); + assertEquals(1000, request.source().size()); + assertEquals(Preference.PRIMARY.type(), request.preference()); + + SearchResponse response = mock(SearchResponse.class); + when(response.getHits()).thenReturn(searchHits); + return response; + }); + + // Run + tifJobParameterService.getAllTIFJobParameters(listener); + + // Verify + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + verify(listener).onResponse(captor.capture()); + assertEquals(tifJobParameters, captor.getValue()); + } + + public void testGetAllTifJobParameter_whenSynchronous_thenSucceed() { + List tifJobParameters = Arrays.asList(randomTifJobParameter(), randomTifJobParameter()); + SearchHits searchHits = getMockedSearchHits(tifJobParameters); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + // Verify + assertTrue(actionRequest instanceof SearchRequest); + SearchRequest request = (SearchRequest) actionRequest; + assertEquals(1, request.indices().length); + assertEquals(TIFJobExtension.JOB_INDEX_NAME, request.indices()[0]); + assertEquals(QueryBuilders.matchAllQuery(), request.source().query()); + assertEquals(1000, request.source().size()); + assertEquals(Preference.PRIMARY.type(), request.preference()); + + SearchResponse response = mock(SearchResponse.class); + when(response.getHits()).thenReturn(searchHits); + return response; + }); + + // Run + tifJobParameterService.getAllTIFJobParameters(); + + // Verify + assertEquals(tifJobParameters, tifJobParameterService.getAllTIFJobParameters()); + } + + public void testUpdateTifJobParameter_whenValidInput_thenUpdate() { + List tifJobParameters = Arrays.asList(randomTifJobParameter(), randomTifJobParameter()); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + // Verify + assertTrue(actionRequest instanceof BulkRequest); + BulkRequest bulkRequest = (BulkRequest) actionRequest; + assertEquals(2, bulkRequest.requests().size()); + for (int i = 0; i < bulkRequest.requests().size(); i++) { + IndexRequest request = (IndexRequest) bulkRequest.requests().get(i); + assertEquals(TIFJobExtension.JOB_INDEX_NAME, request.index()); + assertEquals(tifJobParameters.get(i).getName(), request.id()); + assertEquals(DocWriteRequest.OpType.INDEX, request.opType()); + } + return null; + }); + + tifJobParameterService.updateJobSchedulerParameter(tifJobParameters, mock(ActionListener.class)); + } + + private SearchHits getMockedSearchHits(List tifJobParameters) { + SearchHit[] searchHitArray = tifJobParameters.stream().map(this::toBytesReference).map(this::toSearchHit).toArray(SearchHit[]::new); + + return new SearchHits(searchHitArray, new TotalHits(1l, TotalHits.Relation.EQUAL_TO), 1); + } + + private GetResponse getMockedGetResponse(TIFJobParameter tifJobParameter) { + GetResponse response = mock(GetResponse.class); + when(response.isExists()).thenReturn(tifJobParameter != null); + when(response.getSourceAsBytesRef()).thenReturn(toBytesReference(tifJobParameter)); + return response; + } + + private BytesReference toBytesReference(TIFJobParameter tifJobParameter) { + if (tifJobParameter == null) { + return null; + } + + try { + return BytesReference.bytes(tifJobParameter.toXContent(JsonXContent.contentBuilder(), null)); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private SearchHit toSearchHit(BytesReference bytesReference) { + SearchHit searchHit = new SearchHit(Randomness.get().nextInt()); + searchHit.sourceRef(bytesReference); + return searchHit; + } +} diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterTests.java new file mode 100644 index 000000000..90a67f74b --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterTests.java @@ -0,0 +1,90 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.jobscheduler; + +import static org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter.THREAT_INTEL_DATA_INDEX_NAME_PREFIX; + +import java.io.IOException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Locale; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; +import org.opensearch.securityanalytics.model.DetectorTrigger; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestHelper; + +public class TIFJobParameterTests extends ThreatIntelTestCase { + private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + + public void testParser_whenAllValueIsFilled_thenSucceed() throws IOException { // TODO: same issue + String id = ThreatIntelTestHelper.randomLowerCaseString(); + IntervalSchedule schedule = new IntervalSchedule(Instant.now().truncatedTo(ChronoUnit.MILLIS), 1, ChronoUnit.DAYS); + TIFJobParameter tifJobParameter = new TIFJobParameter(id, schedule); + tifJobParameter.enable(); + tifJobParameter.setCurrentIndex(ThreatIntelTestHelper.randomLowerCaseString()); + tifJobParameter.getUpdateStats().setLastProcessingTimeInMillis(randomPositiveLong()); + tifJobParameter.getUpdateStats().setLastSucceededAt(Instant.now().truncatedTo(ChronoUnit.MILLIS)); + tifJobParameter.getUpdateStats().setLastSkippedAt(Instant.now().truncatedTo(ChronoUnit.MILLIS)); + tifJobParameter.getUpdateStats().setLastFailedAt(Instant.now().truncatedTo(ChronoUnit.MILLIS)); + + TIFJobParameter anotherTIFJobParameter = TIFJobParameter.PARSER.parse( + createParser(tifJobParameter.toXContent(XContentFactory.jsonBuilder(), null)), + null + ); + + log.info("first"); + log.error(tifJobParameter); + log.error(tifJobParameter.getName()); + log.error(tifJobParameter.getCurrentIndex()); + log.info("second"); + log.error(anotherTIFJobParameter); + log.error(anotherTIFJobParameter.getName()); + log.error(anotherTIFJobParameter.getCurrentIndex()); + + assertTrue(tifJobParameter.equals(anotherTIFJobParameter)); + } + + public void testParser_whenNullForOptionalFields_thenSucceed() throws IOException { // TODO: same issue + String id = ThreatIntelTestHelper.randomLowerCaseString(); + IntervalSchedule schedule = new IntervalSchedule(Instant.now().truncatedTo(ChronoUnit.MILLIS), 1, ChronoUnit.DAYS); + TIFJobParameter datasource = new TIFJobParameter(id, schedule); + TIFJobParameter anotherDatasource = TIFJobParameter.PARSER.parse( + createParser(datasource.toXContent(XContentFactory.jsonBuilder(), null)), + null + ); + assertTrue(datasource.equals(anotherDatasource)); + } + + public void testCurrentIndexName_whenNotExpired_thenReturnName() { + String id = ThreatIntelTestHelper.randomLowerCaseString(); + TIFJobParameter datasource = new TIFJobParameter(); + datasource.setName(id); + datasource.setCurrentIndex(datasource.newIndexName(ThreatIntelTestHelper.randomLowerCaseString())); + + assertNotNull(datasource.currentIndexName()); + } + + public void testNewIndexName_whenCalled_thenReturnedExpectedValue() { + String name = ThreatIntelTestHelper.randomLowerCaseString(); + String suffix = ThreatIntelTestHelper.randomLowerCaseString(); + TIFJobParameter datasource = new TIFJobParameter(); + datasource.setName(name); + assertEquals(String.format(Locale.ROOT, "%s.%s.%s", THREAT_INTEL_DATA_INDEX_NAME_PREFIX, name, suffix), datasource.newIndexName(suffix)); + } + + public void testLockDurationSeconds() { + TIFJobParameter datasource = new TIFJobParameter(); + assertNotNull(datasource.getLockDurationSeconds()); + } +} + diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunnerTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunnerTests.java new file mode 100644 index 000000000..e30f2ecfc --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunnerTests.java @@ -0,0 +1,177 @@ + +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.jobscheduler; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.mockito.internal.verification.VerificationModeFactory.times; + +import java.io.IOException; +import java.time.Instant; +import java.util.Optional; + +import org.junit.Before; + +import org.opensearch.jobscheduler.spi.JobDocVersion; +import org.opensearch.jobscheduler.spi.JobExecutionContext; +import org.opensearch.jobscheduler.spi.LockModel; +import org.opensearch.jobscheduler.spi.ScheduledJobParameter; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestHelper; +import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; +import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; + +public class TIFJobRunnerTests extends ThreatIntelTestCase { + @Before + public void init() { + TIFJobRunner.getJobRunnerInstance() + .initialize(clusterService, tifJobUpdateService, tifJobParameterService, threatIntelExecutor, threatIntelLockService, threadPool); + } + + public void testGetJobRunnerInstance_whenCalledAgain_thenReturnSameInstance() { + assertTrue(TIFJobRunner.getJobRunnerInstance() == TIFJobRunner.getJobRunnerInstance()); + } + + public void testRunJob_whenInvalidClass_thenThrowException() { + JobDocVersion jobDocVersion = new JobDocVersion(randomInt(), randomInt(), randomInt()); + String jobIndexName = ThreatIntelTestHelper.randomLowerCaseString(); + String jobId = ThreatIntelTestHelper.randomLowerCaseString(); + JobExecutionContext jobExecutionContext = new JobExecutionContext(Instant.now(), jobDocVersion, lockService, jobIndexName, jobId); + ScheduledJobParameter jobParameter = mock(ScheduledJobParameter.class); + + // Run + expectThrows(IllegalStateException.class, () -> TIFJobRunner.getJobRunnerInstance().runJob(jobParameter, jobExecutionContext)); + } + + public void testRunJob_whenValidInput_thenSucceed() throws IOException { + JobDocVersion jobDocVersion = new JobDocVersion(randomInt(), randomInt(), randomInt()); + String jobIndexName = ThreatIntelTestHelper.randomLowerCaseString(); + String jobId = ThreatIntelTestHelper.randomLowerCaseString(); + JobExecutionContext jobExecutionContext = new JobExecutionContext(Instant.now(), jobDocVersion, lockService, jobIndexName, jobId); + TIFJobParameter tifJobParameter = randomTifJobParameter(); + + LockModel lockModel = randomLockModel(); + when(threatIntelLockService.acquireLock(tifJobParameter.getName(), TIFLockService.LOCK_DURATION_IN_SECONDS)).thenReturn( + Optional.of(lockModel) + ); + + // Run + TIFJobRunner.getJobRunnerInstance().runJob(tifJobParameter, jobExecutionContext); + + // Verify + verify(threatIntelLockService).acquireLock(tifJobParameter.getName(), threatIntelLockService.LOCK_DURATION_IN_SECONDS); + verify(tifJobParameterService).getJobParameter(tifJobParameter.getName()); + verify(threatIntelLockService).releaseLock(lockModel); + } + + public void testUpdateDatasourceRunner_whenExceptionBeforeAcquiringLock_thenNoReleaseLock() { + ScheduledJobParameter jobParameter = mock(ScheduledJobParameter.class); + when(jobParameter.getName()).thenReturn(ThreatIntelTestHelper.randomLowerCaseString()); + when(threatIntelLockService.acquireLock(jobParameter.getName(), TIFLockService.LOCK_DURATION_IN_SECONDS)).thenThrow( + new RuntimeException() + ); + + // Run + expectThrows(Exception.class, () -> TIFJobRunner.getJobRunnerInstance().updateJobRunner(jobParameter).run()); + + // Verify + verify(threatIntelLockService, never()).releaseLock(any()); + } + + public void testUpdateDatasourceRunner_whenExceptionAfterAcquiringLock_thenReleaseLock() throws IOException { + ScheduledJobParameter jobParameter = mock(ScheduledJobParameter.class); + when(jobParameter.getName()).thenReturn(ThreatIntelTestHelper.randomLowerCaseString()); + LockModel lockModel = randomLockModel(); + when(threatIntelLockService.acquireLock(jobParameter.getName(), TIFLockService.LOCK_DURATION_IN_SECONDS)).thenReturn( + Optional.of(lockModel) + ); + when(tifJobParameterService.getJobParameter(jobParameter.getName())).thenThrow(new RuntimeException()); + + // Run + TIFJobRunner.getJobRunnerInstance().updateJobRunner(jobParameter).run(); + + // Verify + verify(threatIntelLockService).releaseLock(any()); + } + + public void testUpdateDatasource_whenDatasourceDoesNotExist_thenDoNothing() throws IOException { + TIFJobParameter datasource = new TIFJobParameter(); + + // Run + TIFJobRunner.getJobRunnerInstance().updateJobParameter(datasource, mock(Runnable.class)); + + // Verify + verify(tifJobUpdateService, never()).deleteAllTifdIndices(any()); + } + + public void testUpdateDatasource_whenInvalidState_thenUpdateLastFailedAt() throws IOException { + TIFJobParameter datasource = new TIFJobParameter(); + datasource.enable(); + datasource.getUpdateStats().setLastFailedAt(null); + datasource.setState(randomStateExcept(TIFJobState.AVAILABLE)); + when(tifJobParameterService.getJobParameter(datasource.getName())).thenReturn(datasource); + + // Run + TIFJobRunner.getJobRunnerInstance().updateJobParameter(datasource, mock(Runnable.class)); + + // Verify + assertFalse(datasource.isEnabled()); + assertNotNull(datasource.getUpdateStats().getLastFailedAt()); + verify(tifJobParameterService).updateJobSchedulerParameter(datasource); + } + + public void testUpdateDatasource_whenValidInput_thenSucceed() throws IOException { + TIFJobParameter datasource = randomTifJobParameter(); + datasource.setState(TIFJobState.AVAILABLE); + when(tifJobParameterService.getJobParameter(datasource.getName())).thenReturn(datasource); + Runnable renewLock = mock(Runnable.class); + + // Run + TIFJobRunner.getJobRunnerInstance().updateJobParameter(datasource, renewLock); + + // Verify + verify(tifJobUpdateService, times(1)).deleteAllTifdIndices(datasource); + verify(tifJobUpdateService).createThreatIntelFeedData(datasource, renewLock); + verify(tifJobUpdateService).updateJobSchedulerParameter(datasource, datasource.getSchedule(), TIFJobTask.ALL); + } + + public void testUpdateDatasource_whenDeleteTask_thenDeleteOnly() throws IOException { + TIFJobParameter datasource = randomTifJobParameter(); + datasource.setState(TIFJobState.AVAILABLE); + datasource.setTask(TIFJobTask.DELETE_UNUSED_INDICES); + when(tifJobParameterService.getJobParameter(datasource.getName())).thenReturn(datasource); + Runnable renewLock = mock(Runnable.class); + + // Run + TIFJobRunner.getJobRunnerInstance().updateJobParameter(datasource, renewLock); + + // Verify + verify(tifJobUpdateService, times(1)).deleteAllTifdIndices(datasource); + verify(tifJobUpdateService, never()).createThreatIntelFeedData(datasource, renewLock); + verify(tifJobUpdateService).updateJobSchedulerParameter(datasource, datasource.getSchedule(), TIFJobTask.ALL); + } + + public void testUpdateDatasourceExceptionHandling() throws IOException { + TIFJobParameter datasource = new TIFJobParameter(); + datasource.setName(ThreatIntelTestHelper.randomLowerCaseString()); + datasource.getUpdateStats().setLastFailedAt(null); + when(tifJobParameterService.getJobParameter(datasource.getName())).thenReturn(datasource); + doThrow(new RuntimeException("test failure")).when(tifJobUpdateService).deleteAllTifdIndices(any()); + + // Run + TIFJobRunner.getJobRunnerInstance().updateJobParameter(datasource, mock(Runnable.class)); + + // Verify + assertNotNull(datasource.getUpdateStats().getLastFailedAt()); + verify(tifJobParameterService).updateJobSchedulerParameter(datasource); + } +} + diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateServiceTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateServiceTests.java new file mode 100644 index 000000000..06f635a34 --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateServiceTests.java @@ -0,0 +1,205 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.jobscheduler; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.isA; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; + +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.csv.CSVParser; +import org.junit.Before; +import org.opensearch.OpenSearchException; +import org.opensearch.cluster.routing.ShardRouting; +import org.opensearch.common.SuppressForbidden; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedParser; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestHelper; +import org.opensearch.securityanalytics.threatIntel.common.TIFMetadata; +import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; + + +@SuppressForbidden(reason = "unit test") +public class TIFJobUpdateServiceTests extends ThreatIntelTestCase { + private TIFJobUpdateService datasourceUpdateService; + + @Before + public void init() { + datasourceUpdateService = new TIFJobUpdateService(clusterService, tifJobParameterService, threatIntelFeedDataService); + } + + public void testUpdateOrCreateThreatIntelFeedData_whenHashValueIsSame_thenSkipUpdate() throws IOException { + List containedIocs = new ArrayList<>(); + containedIocs.add("ip"); + TIFMetadata tifMetadata = new TIFMetadata("id", "url", "name", "org", "desc", "type", containedIocs, "0"); + + TIFJobParameter datasource = new TIFJobParameter(); + datasource.setState(TIFJobState.AVAILABLE); + + // Run + datasourceUpdateService.createThreatIntelFeedData(datasource, mock(Runnable.class)); + + // Verify + assertNotNull(datasource.getUpdateStats().getLastSkippedAt()); + verify(tifJobParameterService).updateJobSchedulerParameter(datasource); + } + + public void testUpdateOrCreateThreatIntelFeedData_whenInvalidData_thenThrowException() throws IOException { + List containedIocs = new ArrayList<>(); + containedIocs.add("ip"); + TIFMetadata tifMetadata = new TIFMetadata("id", "url", "name", "org", "desc", "type", containedIocs, "0"); + + File sampleFile = new File( + this.getClass().getClassLoader().getResource("threatIntel/sample_invalid_less_than_two_fields.csv").getFile() + ); + when(ThreatIntelFeedParser.getThreatIntelFeedReaderCSV(tifMetadata)).thenReturn(CSVParser.parse(sampleFile, StandardCharsets.UTF_8, CSVFormat.RFC4180)); + + TIFJobParameter datasource = new TIFJobParameter(); + datasource.setState(TIFJobState.AVAILABLE); + // Run + expectThrows(OpenSearchException.class, () -> datasourceUpdateService.createThreatIntelFeedData(datasource, mock(Runnable.class))); + } + + public void testUpdateOrCreateThreatIntelFeedData_whenIncompatibleFields_thenThrowException() throws IOException { + List containedIocs = new ArrayList<>(); + containedIocs.add("ip"); + TIFMetadata tifMetadata = new TIFMetadata("id", "https://feodotracker.abuse.ch/downloads/ipblocklist_aggressive.csv", "name", "org", "desc", "type", containedIocs, "0"); + + File sampleFile = new File(this.getClass().getClassLoader().getResource("threatIntel/sample_valid.csv").getFile()); + when(ThreatIntelFeedParser.getThreatIntelFeedReaderCSV(tifMetadata)).thenReturn(CSVParser.parse(sampleFile, StandardCharsets.UTF_8, CSVFormat.RFC4180)); + + TIFJobParameter datasource = new TIFJobParameter(); + datasource.setState(TIFJobState.AVAILABLE); + + + // Run + expectThrows(OpenSearchException.class, () -> datasourceUpdateService.createThreatIntelFeedData(datasource, mock(Runnable.class))); + } + + public void testUpdateOrCreateThreatIntelFeedData_whenValidInput_thenSucceed() throws IOException { + List containedIocs = new ArrayList<>(); + containedIocs.add("ip"); + TIFMetadata tifMetadata = new TIFMetadata("id", "url", "name", "org", "desc", "type", containedIocs, "0"); + + File sampleFile = new File(this.getClass().getClassLoader().getResource("threatIntel/sample_valid.csv").getFile()); + when(ThreatIntelFeedParser.getThreatIntelFeedReaderCSV(tifMetadata)).thenReturn(CSVParser.parse(sampleFile, StandardCharsets.UTF_8, CSVFormat.RFC4180)); + ShardRouting shardRouting = mock(ShardRouting.class); + when(shardRouting.started()).thenReturn(true); + when(routingTable.allShards(anyString())).thenReturn(Arrays.asList(shardRouting)); + + TIFJobParameter datasource = new TIFJobParameter(); + datasource.setState(TIFJobState.AVAILABLE); + + datasource.getUpdateStats().setLastSucceededAt(null); + datasource.getUpdateStats().setLastProcessingTimeInMillis(null); + + // Run + datasourceUpdateService.createThreatIntelFeedData(datasource, mock(Runnable.class)); + + // Verify + + assertNotNull(datasource.getUpdateStats().getLastSucceededAt()); + assertNotNull(datasource.getUpdateStats().getLastProcessingTimeInMillis()); + verify(tifJobParameterService, times(2)).updateJobSchedulerParameter(datasource); + verify(threatIntelFeedDataService).saveThreatIntelFeedDataCSV(eq(datasource.currentIndexName()), isA(String[].class), any(Iterator.class), any(Runnable.class), tifMetadata); + } + + public void testWaitUntilAllShardsStarted_whenTimedOut_thenThrowException() { + String indexName = ThreatIntelTestHelper.randomLowerCaseString(); + ShardRouting shardRouting = mock(ShardRouting.class); + when(shardRouting.started()).thenReturn(false); + when(routingTable.allShards(indexName)).thenReturn(Arrays.asList(shardRouting)); + + // Run + Exception e = expectThrows(OpenSearchException.class, () -> datasourceUpdateService.waitUntilAllShardsStarted(indexName, 10)); + + // Verify + assertTrue(e.getMessage().contains("did not complete")); + } + + public void testWaitUntilAllShardsStarted_whenInterrupted_thenThrowException() { + String indexName = ThreatIntelTestHelper.randomLowerCaseString(); + ShardRouting shardRouting = mock(ShardRouting.class); + when(shardRouting.started()).thenReturn(false); + when(routingTable.allShards(indexName)).thenReturn(Arrays.asList(shardRouting)); + + // Run + Thread.currentThread().interrupt(); + Exception e = expectThrows(RuntimeException.class, () -> datasourceUpdateService.waitUntilAllShardsStarted(indexName, 10)); + + // Verify + assertEquals(InterruptedException.class, e.getCause().getClass()); + } + + public void testDeleteUnusedIndices_whenValidInput_thenSucceed() { + String datasourceName = ThreatIntelTestHelper.randomLowerCaseString(); + String indexPrefix = String.format(".threatintel-data.%s.", datasourceName); + Instant now = Instant.now(); + String currentIndex = indexPrefix + now.toEpochMilli(); + String oldIndex = indexPrefix + now.minusMillis(1).toEpochMilli(); + String lingeringIndex = indexPrefix + now.minusMillis(2).toEpochMilli(); + TIFJobParameter datasource = new TIFJobParameter(); + datasource.setName(datasourceName); + datasource.setCurrentIndex(currentIndex); + datasource.getIndices().add(currentIndex); + datasource.getIndices().add(oldIndex); + datasource.getIndices().add(lingeringIndex); + + when(metadata.hasIndex(currentIndex)).thenReturn(true); + when(metadata.hasIndex(oldIndex)).thenReturn(true); + when(metadata.hasIndex(lingeringIndex)).thenReturn(false); + + datasourceUpdateService.deleteAllTifdIndices(datasource); + + assertEquals(0, datasource.getIndices().size()); +// assertEquals(currentIndex, datasource.getIndices().get(0)); //TODO: check this + verify(tifJobParameterService).updateJobSchedulerParameter(datasource); + verify(threatIntelFeedDataService).deleteThreatIntelDataIndex(oldIndex); + } + + public void testUpdateDatasource_whenNoChange_thenNoUpdate() { + TIFJobParameter datasource = randomTifJobParameter(); + + // Run + datasourceUpdateService.updateJobSchedulerParameter(datasource, datasource.getSchedule(), datasource.getTask()); + + // Verify + verify(tifJobParameterService, never()).updateJobSchedulerParameter(any()); + } + + public void testUpdateDatasource_whenChange_thenUpdate() { + TIFJobParameter datasource = randomTifJobParameter(); + datasource.setTask(TIFJobTask.ALL); + + // Run + datasourceUpdateService.updateJobSchedulerParameter( + datasource, + new IntervalSchedule(Instant.now(), datasource.getSchedule().getInterval() + 1, ChronoUnit.DAYS), + datasource.getTask() + ); + datasourceUpdateService.updateJobSchedulerParameter(datasource, datasource.getSchedule(), TIFJobTask.DELETE_UNUSED_INDICES); + + // Verify + verify(tifJobParameterService, times(2)).updateJobSchedulerParameter(any()); + } +} diff --git a/src/test/resources/threatIntel/sample_invalid_less_than_two_fields.csv b/src/test/resources/threatIntel/sample_invalid_less_than_two_fields.csv new file mode 100644 index 000000000..08670061c --- /dev/null +++ b/src/test/resources/threatIntel/sample_invalid_less_than_two_fields.csv @@ -0,0 +1,2 @@ +network +1.0.0.0/24 \ No newline at end of file diff --git a/src/test/resources/threatIntel/sample_valid.csv b/src/test/resources/threatIntel/sample_valid.csv new file mode 100644 index 000000000..fad1eb6fd --- /dev/null +++ b/src/test/resources/threatIntel/sample_valid.csv @@ -0,0 +1,3 @@ +ip,region +1.0.0.0/24,Australia +10.0.0.0/24,USA \ No newline at end of file From 4512308306ef6799d52c886707dfc36a6fe3d119 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Tue, 10 Oct 2023 18:21:42 -0700 Subject: [PATCH 13/39] converge job scheduler code with threat intel feed integration in detectors Signed-off-by: Surya Sashank Nistala --- .../SecurityAnalyticsPlugin.java | 2 +- .../SampleExtensionPlugin.java | 161 ------ .../SampleExtensionRestHandler.java | 138 ------ .../sampleextension/SampleJobParameter.java | 153 ------ .../sampleextension/SampleJobRunner.java | 149 ------ .../ThreatIntelFeedDataService.java | 152 +++--- .../threatIntel/ThreatIntelFeedDataUtils.java | 42 ++ .../action/TransportPutTIFJobAction.java | 10 +- .../threatIntel/common/FeedMetadata.java | 287 ----------- .../threatIntel/common/TIFMetadata.java | 37 +- .../jobscheduler/TIFJobParameter.java | 14 +- .../jobscheduler/TIFJobParameterService.java | 4 +- .../jobscheduler/TIFJobRunner.java | 12 +- .../jobscheduler/TIFJobUpdateService.java | 164 +++--- src/main/resources/feed/config/feeds.yml | 3 + src/main/resources/feed/config/feeds/otx.yml | 12 + .../resthandler/DetectorMonitorRestApiIT.java | 467 +++++++++--------- .../threatIntel/ThreatIntelTestCase.java | 287 ----------- .../threatIntel/ThreatIntelTestHelper.java | 120 ----- .../threatIntel/common/TIFMetadataTests.java | 35 -- .../common/ThreatIntelLockServiceTests.java | 117 ----- .../jobscheduler/TIFJobExtensionTests.java | 56 --- .../TIFJobParameterServiceTests.java | 385 --------------- .../jobscheduler/TIFJobParameterTests.java | 90 ---- .../jobscheduler/TIFJobRunnerTests.java | 177 ------- .../TIFJobUpdateServiceTests.java | 205 -------- 26 files changed, 497 insertions(+), 2782 deletions(-) delete mode 100644 src/main/java/org/opensearch/securityanalytics/sampleextension/SampleExtensionPlugin.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/sampleextension/SampleExtensionRestHandler.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/sampleextension/SampleJobParameter.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/sampleextension/SampleJobRunner.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataUtils.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/common/FeedMetadata.java create mode 100644 src/main/resources/feed/config/feeds.yml create mode 100644 src/main/resources/feed/config/feeds/otx.yml delete mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestCase.java delete mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestHelper.java delete mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadataTests.java delete mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/common/ThreatIntelLockServiceTests.java delete mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtensionTests.java delete mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterServiceTests.java delete mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterTests.java delete mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunnerTests.java delete mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateServiceTests.java diff --git a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java index e9b9382e8..624df47cb 100644 --- a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java +++ b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java @@ -150,7 +150,7 @@ public Collection createComponents(Client client, mapperService = new MapperService(client, clusterService, indexNameExpressionResolver, indexTemplateManager, logTypeService); ruleIndices = new RuleIndices(logTypeService, client, clusterService, threadPool); correlationRuleIndices = new CorrelationRuleIndices(client, clusterService); - ThreatIntelFeedDataService threatIntelFeedDataService = new ThreatIntelFeedDataService(clusterService.state(), clusterService, client, indexNameExpressionResolver, xContentRegistry); + ThreatIntelFeedDataService threatIntelFeedDataService = new ThreatIntelFeedDataService(clusterService, client, indexNameExpressionResolver, xContentRegistry); DetectorThreatIntelService detectorThreatIntelService = new DetectorThreatIntelService(threatIntelFeedDataService); TIFJobParameterService tifJobParameterService = new TIFJobParameterService(client, clusterService); TIFJobUpdateService tifJobUpdateService = new TIFJobUpdateService(clusterService, tifJobParameterService, threatIntelFeedDataService); diff --git a/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleExtensionPlugin.java b/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleExtensionPlugin.java deleted file mode 100644 index 653653deb..000000000 --- a/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleExtensionPlugin.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ -package org.opensearch.securityanalytics.sampleextension; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.opensearch.client.Client; -import org.opensearch.cluster.metadata.IndexNameExpressionResolver; -import org.opensearch.cluster.node.DiscoveryNodes; -import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.settings.ClusterSettings; -import org.opensearch.common.settings.IndexScopedSettings; -import org.opensearch.common.settings.Settings; -import org.opensearch.common.settings.SettingsFilter; -import org.opensearch.core.common.io.stream.NamedWriteableRegistry; -import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.core.xcontent.XContentParserUtils; -import org.opensearch.env.Environment; -import org.opensearch.env.NodeEnvironment; -import org.opensearch.jobscheduler.spi.JobSchedulerExtension; -import org.opensearch.jobscheduler.spi.ScheduledJobParser; -import org.opensearch.jobscheduler.spi.ScheduledJobRunner; -import org.opensearch.jobscheduler.spi.schedule.ScheduleParser; -import org.opensearch.plugins.ActionPlugin; -import org.opensearch.plugins.Plugin; -import org.opensearch.repositories.RepositoriesService; -import org.opensearch.rest.RestController; -import org.opensearch.rest.RestHandler; -import org.opensearch.script.ScriptService; -import org.opensearch.threadpool.ThreadPool; -import org.opensearch.watcher.ResourceWatcherService; - -import java.io.IOException; -import java.time.Instant; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.function.Supplier; - -/** - * Sample JobScheduler extension plugin. - * - * It use ".scheduler_sample_extension" index to manage its scheduled jobs, and exposes a REST API - * endpoint using {@link SampleExtensionRestHandler}. - * - */ -public class SampleExtensionPlugin extends Plugin implements ActionPlugin, JobSchedulerExtension { - private static final Logger log = LogManager.getLogger(SampleExtensionPlugin.class); - - static final String JOB_INDEX_NAME = ".scheduler_sample_extension"; - - @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier - ) { - SampleJobRunner jobRunner = SampleJobRunner.getJobRunnerInstance(); - jobRunner.setClusterService(clusterService); - jobRunner.setThreadPool(threadPool); - jobRunner.setClient(client); - - return Collections.emptyList(); - } - - @Override - public String getJobType() { - return "scheduler_sample_extension"; - } - - @Override - public String getJobIndex() { - return JOB_INDEX_NAME; - } - - @Override - public ScheduledJobRunner getJobRunner() { - return SampleJobRunner.getJobRunnerInstance(); - } - - @Override - public ScheduledJobParser getJobParser() { - return (parser, id, jobDocVersion) -> { - SampleJobParameter jobParameter = new SampleJobParameter(); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - - while (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) { - String fieldName = parser.currentName(); - parser.nextToken(); - switch (fieldName) { - case SampleJobParameter.NAME_FIELD: - jobParameter.setJobName(parser.text()); - break; - case SampleJobParameter.ENABLED_FILED: - jobParameter.setEnabled(parser.booleanValue()); - break; - case SampleJobParameter.ENABLED_TIME_FILED: - jobParameter.setEnabledTime(parseInstantValue(parser)); - break; - case SampleJobParameter.LAST_UPDATE_TIME_FIELD: - jobParameter.setLastUpdateTime(parseInstantValue(parser)); - break; - case SampleJobParameter.SCHEDULE_FIELD: - jobParameter.setSchedule(ScheduleParser.parse(parser)); - break; - case SampleJobParameter.INDEX_NAME_FIELD: - jobParameter.setIndexToWatch(parser.text()); - break; - case SampleJobParameter.LOCK_DURATION_SECONDS: - jobParameter.setLockDurationSeconds(parser.longValue()); - break; - case SampleJobParameter.JITTER: - jobParameter.setJitter(parser.doubleValue()); - break; - default: - XContentParserUtils.throwUnknownToken(parser.currentToken(), parser.getTokenLocation()); - } - } - return jobParameter; - }; - } - - private Instant parseInstantValue(XContentParser parser) throws IOException { - if (XContentParser.Token.VALUE_NULL.equals(parser.currentToken())) { - return null; - } - if (parser.currentToken().isValue()) { - return Instant.ofEpochMilli(parser.longValue()); - } - XContentParserUtils.throwUnknownToken(parser.currentToken(), parser.getTokenLocation()); - return null; - } - - @Override - public List getRestHandlers( - Settings settings, - RestController restController, - ClusterSettings clusterSettings, - IndexScopedSettings indexScopedSettings, - SettingsFilter settingsFilter, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier nodesInCluster - ) { - return Collections.singletonList(new SampleExtensionRestHandler()); - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleExtensionRestHandler.java b/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleExtensionRestHandler.java deleted file mode 100644 index b0ae1299f..000000000 --- a/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleExtensionRestHandler.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ -package org.opensearch.securityanalytics.sampleextension; - -import org.opensearch.action.delete.DeleteRequest; -import org.opensearch.action.delete.DeleteResponse; -import org.opensearch.action.index.IndexRequest; -import org.opensearch.action.index.IndexResponse; -import org.opensearch.action.support.WriteRequest; -import org.opensearch.client.node.NodeClient; -import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.core.action.ActionListener; -import org.opensearch.core.rest.RestStatus; -import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; -import org.opensearch.rest.BaseRestHandler; -import org.opensearch.rest.BytesRestResponse; -import org.opensearch.rest.RestRequest; -import org.opensearch.rest.RestResponse; - -import java.io.IOException; -import java.time.Instant; -import java.time.temporal.ChronoUnit; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -/** - * A sample rest handler that supports schedule and deschedule job operation - * - * Users need to provide "id", "index", "job_name", and "interval" parameter to schedule - * a job. e.g. - * {@code - * POST /_plugins/scheduler_sample/watch?id=dashboards-job-id&job_name=watch dashboards index&index=.opensearch_dashboards_1&interval=1 - * } - * - * creates a job with id "dashboards-job-id" and job name "watch dashboards index", - * which logs ".opensearch_dashboards_1" index's shards info every 1 minute - * - * Users can remove that job by calling - * {@code DELETE /_plugins/scheduler_sample/watch?id=dashboards-job-id} - */ -public class SampleExtensionRestHandler extends BaseRestHandler { - public static final String WATCH_INDEX_URI = "/_plugins/scheduler_sample/watch"; - - @Override - public String getName() { - return "Sample JobScheduler extension handler"; - } - - @Override - public List routes() { - return Collections.unmodifiableList( - Arrays.asList(new Route(RestRequest.Method.POST, WATCH_INDEX_URI), new Route(RestRequest.Method.DELETE, WATCH_INDEX_URI)) - ); - } - - @Override - protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - if (request.method().equals(RestRequest.Method.POST)) { - // compose SampleJobParameter object from request - String id = request.param("id"); - String indexName = request.param("index"); - String jobName = request.param("job_name"); - String interval = request.param("interval"); - String lockDurationSecondsString = request.param("lock_duration_seconds"); - Long lockDurationSeconds = lockDurationSecondsString != null ? Long.parseLong(lockDurationSecondsString) : null; - String jitterString = request.param("jitter"); - Double jitter = jitterString != null ? Double.parseDouble(jitterString) : null; - - if (id == null || indexName == null) { - throw new IllegalArgumentException("Must specify id and index parameter"); - } - SampleJobParameter jobParameter = new SampleJobParameter( - id, - jobName, - indexName, - new IntervalSchedule(Instant.now(), Integer.parseInt(interval), ChronoUnit.MINUTES), - lockDurationSeconds, - jitter - ); - IndexRequest indexRequest = new IndexRequest().index(SampleExtensionPlugin.JOB_INDEX_NAME) - .id(id) - .source(jobParameter.toXContent(JsonXContent.contentBuilder(), null)) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - - return restChannel -> { - // index the job parameter - client.index(indexRequest, new ActionListener() { - @Override - public void onResponse(IndexResponse indexResponse) { - try { - RestResponse restResponse = new BytesRestResponse( - RestStatus.OK, - indexResponse.toXContent(JsonXContent.contentBuilder(), null) - ); - restChannel.sendResponse(restResponse); - } catch (IOException e) { - restChannel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); - } - } - - @Override - public void onFailure(Exception e) { - restChannel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); - } - }); - }; - } else if (request.method().equals(RestRequest.Method.DELETE)) { - // delete job parameter doc from index - String id = request.param("id"); - DeleteRequest deleteRequest = new DeleteRequest().index(SampleExtensionPlugin.JOB_INDEX_NAME).id(id); - - return restChannel -> { - client.delete(deleteRequest, new ActionListener() { - @Override - public void onResponse(DeleteResponse deleteResponse) { - restChannel.sendResponse(new BytesRestResponse(RestStatus.OK, "Job deleted.")); - } - - @Override - public void onFailure(Exception e) { - restChannel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); - } - }); - }; - } else { - return restChannel -> { - restChannel.sendResponse(new BytesRestResponse(RestStatus.METHOD_NOT_ALLOWED, request.method() + " is not allowed.")); - }; - } - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleJobParameter.java b/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleJobParameter.java deleted file mode 100644 index 1353b47ab..000000000 --- a/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleJobParameter.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ -package org.opensearch.securityanalytics.sampleextension; - -import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.jobscheduler.spi.ScheduledJobParameter; -import org.opensearch.jobscheduler.spi.schedule.Schedule; - -import java.io.IOException; -import java.time.Instant; - -/** - * A sample job parameter. - *

- * It adds an additional "indexToWatch" field to {@link ScheduledJobParameter}, which stores the index - * the job runner will watch. - */ -public class SampleJobParameter implements ScheduledJobParameter { - public static final String NAME_FIELD = "name"; - public static final String ENABLED_FILED = "enabled"; - public static final String LAST_UPDATE_TIME_FIELD = "last_update_time"; - public static final String LAST_UPDATE_TIME_FIELD_READABLE = "last_update_time_field"; - public static final String SCHEDULE_FIELD = "schedule"; - public static final String ENABLED_TIME_FILED = "enabled_time"; - public static final String ENABLED_TIME_FILED_READABLE = "enabled_time_field"; - public static final String INDEX_NAME_FIELD = "index_name_to_watch"; - public static final String LOCK_DURATION_SECONDS = "lock_duration_seconds"; - public static final String JITTER = "jitter"; - - private String jobName; - private Instant lastUpdateTime; - private Instant enabledTime; - private boolean isEnabled; - private Schedule schedule; - private String indexToWatch; - private Long lockDurationSeconds; - private Double jitter; - - public SampleJobParameter() {} - - public SampleJobParameter(String id, String name, String indexToWatch, Schedule schedule, Long lockDurationSeconds, Double jitter) { - this.jobName = name; - this.indexToWatch = indexToWatch; - this.schedule = schedule; - - Instant now = Instant.now(); - this.isEnabled = true; - this.enabledTime = now; - this.lastUpdateTime = now; - this.lockDurationSeconds = lockDurationSeconds; - this.jitter = jitter; - } - - @Override - public String getName() { - return this.jobName; - } - - @Override - public Instant getLastUpdateTime() { - return this.lastUpdateTime; - } - - @Override - public Instant getEnabledTime() { - return this.enabledTime; - } - - @Override - public Schedule getSchedule() { - return this.schedule; - } - - @Override - public boolean isEnabled() { - return this.isEnabled; - } - - @Override - public Long getLockDurationSeconds() { - return this.lockDurationSeconds; - } - - @Override - public Double getJitter() { - return jitter; - } - - public String getIndexToWatch() { - return this.indexToWatch; - } - - public void setJobName(String jobName) { - this.jobName = jobName; - } - - public void setLastUpdateTime(Instant lastUpdateTime) { - this.lastUpdateTime = lastUpdateTime; - } - - public void setEnabledTime(Instant enabledTime) { - this.enabledTime = enabledTime; - } - - public void setEnabled(boolean enabled) { - isEnabled = enabled; - } - - public void setSchedule(Schedule schedule) { - this.schedule = schedule; - } - - public void setIndexToWatch(String indexToWatch) { - this.indexToWatch = indexToWatch; - } - - public void setLockDurationSeconds(Long lockDurationSeconds) { - this.lockDurationSeconds = lockDurationSeconds; - } - - public void setJitter(Double jitter) { - this.jitter = jitter; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(NAME_FIELD, this.jobName) - .field(ENABLED_FILED, this.isEnabled) - .field(SCHEDULE_FIELD, this.schedule) - .field(INDEX_NAME_FIELD, this.indexToWatch); - if (this.enabledTime != null) { - builder.timeField(ENABLED_TIME_FILED, ENABLED_TIME_FILED_READABLE, this.enabledTime.toEpochMilli()); - } - if (this.lastUpdateTime != null) { - builder.timeField(LAST_UPDATE_TIME_FIELD, LAST_UPDATE_TIME_FIELD_READABLE, this.lastUpdateTime.toEpochMilli()); - } - if (this.lockDurationSeconds != null) { - builder.field(LOCK_DURATION_SECONDS, this.lockDurationSeconds); - } - if (this.jitter != null) { - builder.field(JITTER, this.jitter); - } - builder.endObject(); - return builder; - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleJobRunner.java b/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleJobRunner.java deleted file mode 100644 index 0d62738f1..000000000 --- a/src/main/java/org/opensearch/securityanalytics/sampleextension/SampleJobRunner.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ -package org.opensearch.securityanalytics.sampleextension; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.opensearch.action.index.IndexRequest; -import org.opensearch.client.Client; -import org.opensearch.cluster.routing.ShardRouting; -import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.core.action.ActionListener; -import org.opensearch.jobscheduler.spi.JobExecutionContext; -import org.opensearch.jobscheduler.spi.ScheduledJobParameter; -import org.opensearch.jobscheduler.spi.ScheduledJobRunner; -import org.opensearch.jobscheduler.spi.utils.LockService; -import org.opensearch.plugins.Plugin; -import org.opensearch.threadpool.ThreadPool; - -import java.util.List; -import java.util.UUID; - -/** - * A sample job runner class. - * - * The job runner should be a singleton class if it uses OpenSearch client or other objects passed - * from OpenSearch. Because when registering the job runner to JobScheduler plugin, OpenSearch has - * not invoke plugins' createComponents() method. That is saying the plugin is not completely initalized, - * and the OpenSearch {@link Client}, {@link ClusterService} and other objects - * are not available to plugin and this job runner. - * - * So we have to move this job runner intialization to {@link Plugin} createComponents() method, and using - * singleton job runner to ensure we register a usable job runner instance to JobScheduler plugin. - * - * This sample job runner takes the "indexToWatch" from job parameter and logs that index's shards. - */ -public class SampleJobRunner implements ScheduledJobRunner { - - private static final Logger log = LogManager.getLogger(ScheduledJobRunner.class); - - private static SampleJobRunner INSTANCE; - - public static SampleJobRunner getJobRunnerInstance() { - if (INSTANCE != null) { - return INSTANCE; - } - synchronized (SampleJobRunner.class) { - if (INSTANCE != null) { - return INSTANCE; - } - INSTANCE = new SampleJobRunner(); - return INSTANCE; - } - } - - private ClusterService clusterService; - private ThreadPool threadPool; - private Client client; - - private SampleJobRunner() { - // Singleton class, use getJobRunner method instead of constructor - } - - public void setClusterService(ClusterService clusterService) { - this.clusterService = clusterService; - } - - public void setThreadPool(ThreadPool threadPool) { - this.threadPool = threadPool; - } - - public void setClient(Client client) { - this.client = client; - } - - @Override - public void runJob(ScheduledJobParameter jobParameter, JobExecutionContext context) { - if (!(jobParameter instanceof SampleJobParameter)) { - throw new IllegalStateException( - "Job parameter is not instance of SampleJobParameter, type: " + jobParameter.getClass().getCanonicalName() - ); - } - - if (this.clusterService == null) { - throw new IllegalStateException("ClusterService is not initialized."); - } - - if (this.threadPool == null) { - throw new IllegalStateException("ThreadPool is not initialized."); - } - - final LockService lockService = context.getLockService(); - - Runnable runnable = () -> { - if (jobParameter.getLockDurationSeconds() != null) { - lockService.acquireLock(jobParameter, context, ActionListener.wrap(lock -> { - if (lock == null) { - return; - } - - SampleJobParameter parameter = (SampleJobParameter) jobParameter; - StringBuilder msg = new StringBuilder(); - msg.append("Watching index ").append(parameter.getIndexToWatch()).append("\n"); - - List shardRoutingList = this.clusterService.state().routingTable().allShards(parameter.getIndexToWatch()); - for (ShardRouting shardRouting : shardRoutingList) { - msg.append(shardRouting.shardId().getId()) - .append("\t") - .append(shardRouting.currentNodeId()) - .append("\t") - .append(shardRouting.active() ? "active" : "inactive") - .append("\n"); - } - log.info(msg.toString()); - runTaskForIntegrationTests(parameter); - runTaskForLockIntegrationTests(parameter); - - lockService.release( - lock, - ActionListener.wrap(released -> { log.info("Released lock for job {}", jobParameter.getName()); }, exception -> { - throw new IllegalStateException("Failed to release lock."); - }) - ); - }, exception -> { throw new IllegalStateException("Failed to acquire lock."); })); - } - }; - - threadPool.generic().submit(runnable); - } - - private void runTaskForIntegrationTests(SampleJobParameter jobParameter) { - this.client.index( - new IndexRequest(jobParameter.getIndexToWatch()).id(UUID.randomUUID().toString()) - .source("{\"message\": \"message\"}", XContentType.JSON) - ); - } - - private void runTaskForLockIntegrationTests(SampleJobParameter jobParameter) throws InterruptedException { - if (jobParameter.getName().equals("sample-job-lock-test-it")) { - Thread.sleep(180000); - } - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java index b01d602b3..b7592a6a4 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java @@ -1,9 +1,9 @@ package org.opensearch.securityanalytics.threatIntel; import org.apache.commons.csv.CSVRecord; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.message.ParameterizedMessage; import org.opensearch.OpenSearchException; import org.opensearch.action.DocWriteRequest; import org.opensearch.action.admin.indices.create.CreateIndexRequest; @@ -11,29 +11,26 @@ import org.opensearch.action.bulk.BulkResponse; import org.opensearch.action.index.IndexRequest; import org.opensearch.action.search.SearchRequest; -import org.opensearch.action.search.SearchResponse; import org.opensearch.action.support.IndicesOptions; +import org.opensearch.action.support.WriteRequest; import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.client.Client; -import org.opensearch.client.Requests; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.action.ActionListener; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.securityanalytics.findings.FindingsService; import org.opensearch.securityanalytics.model.ThreatIntelFeedData; +import org.opensearch.securityanalytics.threatIntel.action.PutTIFJobAction; +import org.opensearch.securityanalytics.threatIntel.action.PutTIFJobRequest; import org.opensearch.securityanalytics.threatIntel.common.TIFMetadata; import org.opensearch.securityanalytics.threatIntel.common.StashedThreadContext; import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; @@ -48,6 +45,7 @@ import java.nio.charset.StandardCharsets; import java.time.Instant; import java.util.*; +import java.util.concurrent.CountDownLatch; import java.util.stream.Collectors; import static org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter.THREAT_INTEL_DATA_INDEX_NAME_PREFIX; @@ -56,9 +54,8 @@ * Service to handle CRUD operations on Threat Intel Feed Data */ public class ThreatIntelFeedDataService { - private static final Logger log = LogManager.getLogger(FindingsService.class); + private static final Logger log = LogManager.getLogger(ThreatIntelFeedDataService.class); - private final ClusterState state; private final Client client; private final IndexNameExpressionResolver indexNameExpressionResolver; @@ -82,12 +79,10 @@ public class ThreatIntelFeedDataService { private final ClusterSettings clusterSettings; public ThreatIntelFeedDataService( - ClusterState state, ClusterService clusterService, Client client, IndexNameExpressionResolver indexNameExpressionResolver, NamedXContentRegistry xContentRegistry) { - this.state = state; this.client = client; this.indexNameExpressionResolver = indexNameExpressionResolver; this.xContentRegistry = xContentRegistry; @@ -100,45 +95,42 @@ public ThreatIntelFeedDataService( public void getThreatIntelFeedData( ActionListener> listener ) { - String tifdIndex = IndexUtils.getNewIndexByCreationDate( - this.clusterService.state(), - this.indexNameExpressionResolver, - ".opensearch-sap-threatintel*" //name? - ); - SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); - SearchRequest searchRequest = new SearchRequest(tifdIndex); - searchRequest.source().size(9999); //TODO: convert to scroll - searchRequest.source(sourceBuilder); - client.search(searchRequest, ActionListener.wrap(r -> listener.onResponse(getTifdList(r)), e -> { - log.error(String.format( - "Failed to fetch threat intel feed data from system index %s", tifdIndex), e); - listener.onFailure(e); - })); - } - - private List getTifdList(SearchResponse searchResponse) { - List list = new ArrayList<>(); - if (searchResponse.getHits().getHits().length != 0) { - Arrays.stream(searchResponse.getHits().getHits()).forEach(hit -> { - try { - XContentParser xcp = XContentType.JSON.xContent().createParser( - xContentRegistry, - LoggingDeprecationHandler.INSTANCE, hit.getSourceAsString() - ); - list.add(ThreatIntelFeedData.parse(xcp, hit.getId(), hit.getVersion())); - } catch (Exception e) { - log.error(() -> new ParameterizedMessage( - "Failed to parse Threat intel feed data doc from hit {}", hit), - e - ); - } + try { + //if index not exists + if(IndexUtils.getNewIndexByCreationDate( + this.clusterService.state(), + this.indexNameExpressionResolver, + ".opensearch-sap-threatintel*" //name? + ) == null) { + createThreatIntelFeedData(); + } + //if index exists + String tifdIndex = IndexUtils.getNewIndexByCreationDate( + this.clusterService.state(), + this.indexNameExpressionResolver, + ".opensearch-sap-threatintel*" //name? + ); - }); + SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); + SearchRequest searchRequest = new SearchRequest(tifdIndex); + searchRequest.source().size(9999); //TODO: convert to scroll + searchRequest.source(sourceBuilder); + client.search(searchRequest, ActionListener.wrap(r -> listener.onResponse(ThreatIntelFeedDataUtils.getTifdList(r, xContentRegistry)), e -> { + log.error(String.format( + "Failed to fetch threat intel feed data from system index %s", tifdIndex), e); + listener.onFailure(e); + })); + } catch (InterruptedException e) { + log.error("failed to get threat intel feed data", e); + listener.onFailure(e); } - return list; } - - + + private void createThreatIntelFeedData() throws InterruptedException { + CountDownLatch countDownLatch = new CountDownLatch(1); + client.execute(PutTIFJobAction.INSTANCE, new PutTIFJobRequest("feed_updater")).actionGet(); + countDownLatch.await(); + } /** @@ -183,59 +175,62 @@ private String getIndexMapping() { * @param iterator TIF data to insert * @param renewLock Runnable to renew lock */ - public void saveThreatIntelFeedDataCSV( + public void parseAndSaveThreatIntelFeedDataCSV( final String indexName, final String[] fields, final Iterator iterator, final Runnable renewLock, final TIFMetadata tifMetadata ) throws IOException { - if (indexName == null || fields == null || iterator == null || renewLock == null){ + if (indexName == null || fields == null || iterator == null || renewLock == null) { throw new IllegalArgumentException("Parameters cannot be null, failed to save threat intel feed data"); } TimeValue timeout = clusterSettings.get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT); Integer batchSize = clusterSettings.get(SecurityAnalyticsSettings.BATCH_SIZE); final BulkRequest bulkRequest = new BulkRequest(); - Queue requests = new LinkedList<>(); - for (int i = 0; i < batchSize; i++) { - requests.add(Requests.indexRequest(indexName)); - } - + bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + List tifdList = new ArrayList<>(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); - String iocType = tifMetadata.getFeedType(); - if (tifMetadata.getContainedIocs().get(0) == "ip") { //TODO: dynamically get the type - iocType = "ip"; - } - Integer colNum = Integer.parseInt(tifMetadata.getIocCol()); + String iocType = tifMetadata.getContainedIocs().get(0); //todo make generic in upcoming versions + Integer colNum = tifMetadata.getIocCol(); String iocValue = record.values()[colNum]; String feedId = tifMetadata.getFeedId(); Instant timestamp = Instant.now(); - ThreatIntelFeedData threatIntelFeedData = new ThreatIntelFeedData(iocType, iocValue, feedId, timestamp); - XContentBuilder tifData = threatIntelFeedData.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS); - IndexRequest indexRequest = (IndexRequest) requests.poll(); + tifdList.add(threatIntelFeedData); + } + for (ThreatIntelFeedData tifd : tifdList) { + XContentBuilder tifData = tifd.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS); + IndexRequest indexRequest = new IndexRequest(indexName); indexRequest.source(tifData); - indexRequest.id(record.get(0)); + indexRequest.opType(DocWriteRequest.OpType.INDEX); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); bulkRequest.add(indexRequest); - if (iterator.hasNext() == false || bulkRequest.requests().size() == batchSize) { - BulkResponse response = StashedThreadContext.run(client, () -> client.bulk(bulkRequest).actionGet(timeout)); - if (response.hasFailures()) { - throw new OpenSearchException( - "error occurred while ingesting threat intel feed data in {} with an error {}", - indexName, - response.buildFailureMessage() - ); - } - requests.addAll(bulkRequest.requests()); - bulkRequest.requests().clear(); + + if (bulkRequest.requests().size() == batchSize) { + saveTifds(bulkRequest, timeout); } - renewLock.run(); } + renewLock.run(); freezeIndex(indexName); } + public void saveTifds(BulkRequest bulkRequest, TimeValue timeout) { + + BulkResponse response = StashedThreadContext.run(client, () -> client.bulk(bulkRequest).actionGet(timeout)); + if (response.hasFailures()) { + throw new OpenSearchException( + "error occurred while ingesting threat intel feed data in {} with an error {}", + StringUtils.join(bulkRequest.getIndices()), + response.buildFailureMessage() + ); + } + bulkRequest.requests().clear(); + + } + private void freezeIndex(final String indexName) { TimeValue timeout = clusterSettings.get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT); StashedThreadContext.run(client, () -> { @@ -284,5 +279,10 @@ public void deleteThreatIntelDataIndex(final List indices) { throw new OpenSearchException("failed to delete data[{}]", String.join(",", indices)); } } + public static class ThreatIntelFeedUpdateHandler implements Runnable { + + @Override + public void run() { -} + } + }} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataUtils.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataUtils.java new file mode 100644 index 000000000..75a20f1a5 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataUtils.java @@ -0,0 +1,42 @@ +package org.opensearch.securityanalytics.threatIntel; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.securityanalytics.model.ThreatIntelFeedData; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class ThreatIntelFeedDataUtils { + + private static final Logger log = LogManager.getLogger(ThreatIntelFeedDataUtils.class); + + public static List getTifdList(SearchResponse searchResponse, NamedXContentRegistry xContentRegistry) { + List list = new ArrayList<>(); + if (searchResponse.getHits().getHits().length != 0) { + Arrays.stream(searchResponse.getHits().getHits()).forEach(hit -> { + try { + XContentParser xcp = XContentType.JSON.xContent().createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, hit.getSourceAsString() + ); + list.add(ThreatIntelFeedData.parse(xcp, hit.getId(), hit.getVersion())); + } catch (Exception e) { + log.error(() -> new ParameterizedMessage( + "Failed to parse Threat intel feed data doc from hit {}", hit), + e + ); + } + + }); + } + return list; + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java index c32a64c1c..edd189ec9 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java @@ -103,10 +103,10 @@ protected void internalDoExecute( final ActionListener listener ) { StepListener createIndexStep = new StepListener<>(); - tifJobParameterService.createIndexIfNotExists(createIndexStep); + tifJobParameterService.createJobIndexIfNotExists(createIndexStep); createIndexStep.whenComplete(v -> { TIFJobParameter tifJobParameter = TIFJobParameter.Builder.build(request); - tifJobParameterService.putTIFJobParameter(tifJobParameter, getIndexResponseListener(tifJobParameter, lock, listener)); + tifJobParameterService.saveTIFJobParameter(tifJobParameter, postIndexingTifJobParameter(tifJobParameter, lock, listener)); }, exception -> { lockService.releaseLock(lock); log.error("failed to release lock", exception); @@ -118,7 +118,7 @@ protected void internalDoExecute( * This method takes lock as a parameter and is responsible for releasing lock * unless exception is thrown */ - protected ActionListener getIndexResponseListener( + protected ActionListener postIndexingTifJobParameter( final TIFJobParameter tifJobParameter, final LockModel lock, final ActionListener listener @@ -131,7 +131,7 @@ public void onResponse(final IndexResponse indexResponse) { threadPool.generic().submit(() -> { AtomicReference lockReference = new AtomicReference<>(lock); try { - createTIFJob(tifJobParameter, lockService.getRenewLockRunnable(lockReference)); + createThreatIntelFeedData(tifJobParameter, lockService.getRenewLockRunnable(lockReference)); } finally { lockService.releaseLock(lockReference.get()); } @@ -153,7 +153,7 @@ public void onFailure(final Exception e) { }; } - protected void createTIFJob(final TIFJobParameter tifJobParameter, final Runnable renewLock) { + protected void createThreatIntelFeedData(final TIFJobParameter tifJobParameter, final Runnable renewLock) { if (TIFJobState.CREATING.equals(tifJobParameter.getState()) == false) { log.error("Invalid tifJobParameter state. Expecting {} but received {}", TIFJobState.CREATING, tifJobParameter.getState()); markTIFJobAsCreateFailed(tifJobParameter); diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/FeedMetadata.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/FeedMetadata.java deleted file mode 100644 index 7d219a164..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/FeedMetadata.java +++ /dev/null @@ -1,287 +0,0 @@ -package org.opensearch.securityanalytics.threatIntel.common; - -import org.opensearch.core.ParseField; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.core.xcontent.ConstructingObjectParser; -import org.opensearch.core.xcontent.ToXContent; -import org.opensearch.core.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.List; - -/** - * Database of a tif job - */ -public class FeedMetadata implements Writeable, ToXContent { //feedmetadata - private static final ParseField FEED_ID = new ParseField("feed_id"); - private static final ParseField FEED_NAME = new ParseField("feed_name"); - private static final ParseField FEED_FORMAT = new ParseField("feed_format"); - private static final ParseField ENDPOINT_FIELD = new ParseField("endpoint"); - private static final ParseField DESCRIPTION = new ParseField("description"); - private static final ParseField ORGANIZATION = new ParseField("organization"); - private static final ParseField CONTAINED_IOCS_FIELD = new ParseField("contained_iocs_field"); - private static final ParseField IOC_COL = new ParseField("ioc_col"); - private static final ParseField FIELDS_FIELD = new ParseField("fields"); - - /** - * @param feedId id of the feed - * @return id of the feed - */ - private String feedId; - - /** - * @param feedFormat format of the feed (csv, json...) - * @return the type of feed ingested - */ - private String feedFormat; - - /** - * @param endpoint URL of a manifest file - * @return URL of a manifest file - */ - private String endpoint; - - /** - * @param feedName name of the threat intel feed - * @return name of the threat intel feed - */ - private String feedName; - - /** - * @param description description of the threat intel feed - * @return description of the threat intel feed - */ - private String description; - - /** - * @param organization organization of the threat intel feed - * @return organization of the threat intel feed - */ - private String organization; - - /** - * @param contained_iocs_field list of iocs contained in a given feed - * @return list of iocs contained in a given feed - */ - private List contained_iocs_field; - - /** - * @param ioc_col column of the contained ioc - * @return column of the contained ioc - */ - private String iocCol; - - /** - * @param fields A list of available fields in the database - * @return A list of available fields in the database - */ - private List fields; - - public FeedMetadata(String feedId, String feedName, String feedFormat, final String endpoint, final String description, - final String organization, final List contained_iocs_field, final String iocCol, final List fields) { - this.feedId = feedId; - this.feedName = feedName; - this.feedFormat = feedFormat; - this.endpoint = endpoint; - this.description = description; - this.organization = organization; - this.contained_iocs_field = contained_iocs_field; - this.iocCol = iocCol; - this.fields = fields; - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "tif_metadata_database", - true, - args -> { - String feedId = (String) args[0]; - String feedName = (String) args[1]; - String feedFormat = (String) args[2]; - String endpoint = (String) args[3]; - String description = (String) args[4]; - String organization = (String) args[5]; - List contained_iocs_field = (List) args[6]; - String iocCol = (String) args[7]; - List fields = (List) args[8]; - return new FeedMetadata(feedFormat, endpoint, feedId, feedName, description, organization, contained_iocs_field, iocCol, fields); - } - ); - static { - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FEED_ID); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FEED_NAME); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FEED_FORMAT); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), ENDPOINT_FIELD); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), DESCRIPTION); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), ORGANIZATION); - PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), CONTAINED_IOCS_FIELD); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), IOC_COL); - PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), FIELDS_FIELD); - } - - public FeedMetadata(final StreamInput in) throws IOException { - feedId = in.readString(); - feedName = in.readString(); - feedFormat = in.readString(); - endpoint = in.readString(); - description = in.readString(); - organization = in.readString(); - contained_iocs_field = in.readStringList(); - iocCol = in.readString(); - fields = in.readOptionalStringList(); - } - - private FeedMetadata(){} - - @Override - public void writeTo(final StreamOutput out) throws IOException { - out.writeString(feedId); - out.writeString(feedName); - out.writeString(feedFormat); - out.writeString(endpoint); - out.writeString(description); - out.writeString(organization); - out.writeStringCollection(contained_iocs_field); - out.writeString(iocCol); - out.writeOptionalStringCollection(fields); - } - - @Override - public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { - builder.startObject(); - builder.field(FEED_ID.getPreferredName(), feedId); - builder.field(FEED_NAME.getPreferredName(), feedName); - builder.field(FEED_FORMAT.getPreferredName(), feedFormat); - builder.field(ENDPOINT_FIELD.getPreferredName(), endpoint); - builder.field(DESCRIPTION.getPreferredName(), description); - builder.field(ORGANIZATION.getPreferredName(), organization); - builder.field(CONTAINED_IOCS_FIELD.getPreferredName(), contained_iocs_field); - builder.field(IOC_COL.getPreferredName(), iocCol); - -// if (provider != null) { -// builder.field(PROVIDER_FIELD.getPreferredName(), provider); -// } -// if (updatedAt != null) { -// builder.timeField( -// UPDATED_AT_FIELD.getPreferredName(), -// UPDATED_AT_FIELD_READABLE.getPreferredName(), -// updatedAt.toEpochMilli() -// ); -// } - if (fields != null) { - builder.startArray(FIELDS_FIELD.getPreferredName()); - for (String field : fields) { - builder.value(field); - } - builder.endArray(); - } - builder.endObject(); - return builder; - } - - public String getFeedId() { - return feedId; - } - - public String getFeedFormat() { - return feedFormat; - } - - public String getFeedName() { - return feedName; - } - - public String getDescription() { - return description; - } - - public String getOrganization() { - return organization; - } - - public List getContained_iocs_field() { - return contained_iocs_field; - } - - public String getIocCol() { - return iocCol; - } - - public String getEndpoint() { - return this.endpoint; - } - - public List getFields() { - return fields; - } - public void setFeedId(String feedId) { - this.feedId = feedId; - } - - public void setFeedFormat(String feedFormat) { - this.feedFormat = feedFormat; - } - - public void setEndpoint(String endpoint) { - this.endpoint = endpoint; - } - - public void setFeedName(String feedName) { - this.feedName = feedName; - } - - public void setDescription(String description) { - this.description = description; - } - - public void setOrganization(String organization) { - this.organization = organization; - } - - public void setContained_iocs_field(List contained_iocs_field) { - this.contained_iocs_field = contained_iocs_field; - } - - public void setIocCol(String iocCol) { - this.iocCol = iocCol; - } - - public void setFields(List fields) { - this.fields = fields; - } - - /** - * Reset database so that it can be updated in next run regardless there is new update or not - */ - public void resetTIFMetadata() { - this.setFeedId(null); - this.setFeedName(null); - this.setFeedFormat(null); - this.setEndpoint(null); - this.setDescription(null); - this.setOrganization(null); - this.setContained_iocs_field(null); - this.setIocCol(null); - this.setFeedFormat(null); - } - - /** - * Set database attributes with given input - * - * @param tifMetadata the tif metadata - * @param fields the fields - */ - public void setTIFMetadata(final TIFMetadata tifMetadata, final List fields) { - this.feedId = tifMetadata.getFeedId(); - this.feedName = tifMetadata.getName(); - this.feedFormat = tifMetadata.getFeedType(); - this.endpoint = tifMetadata.getUrl(); - this.organization = tifMetadata.getOrganization(); - this.description = tifMetadata.getDescription(); - this.contained_iocs_field = tifMetadata.getContainedIocs(); - this.iocCol = tifMetadata.getIocCol(); - this.fields = fields; - } - -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java index a594537be..8b94e5693 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java @@ -85,7 +85,7 @@ public class TIFMetadata implements Writeable, ToXContent { * @param iocCol the column of the ioc data if feedType is csv * @return the column of the ioc data if feedType is csv */ - private String iocCol; + private Integer iocCol; /** * @param containedIocs list of ioc types contained in feed @@ -93,7 +93,6 @@ public class TIFMetadata implements Writeable, ToXContent { */ private List containedIocs; - public String getUrl() { return url; } @@ -112,13 +111,25 @@ public String getFeedId() { public String getFeedType() { return feedType; } - public String getIocCol() { + public Integer getIocCol() { return iocCol; } public List getContainedIocs() { return containedIocs; } + public TIFMetadata(final String feedId, final String url, final String name, final String organization, final String description, + final String feedType, final List containedIocs, final Integer iocCol) { + this.feedId = feedId; + this.url = url; + this.name = name; + this.organization = organization; + this.description = description; + this.feedType = feedType; + this.containedIocs = containedIocs; + this.iocCol = iocCol; + } + public void setFeedId(String feedId) { this.feedId = feedId; } @@ -143,7 +154,7 @@ public void setDescription(String description) { this.description = description; } - public void setIocCol(String iocCol) { + public void setIocCol(Integer iocCol) { this.iocCol = iocCol; } @@ -152,18 +163,6 @@ public void setContainedIocs(List containedIocs) { } - public TIFMetadata(final String feedId, final String url, final String name, final String organization, final String description, - final String feedType, final List containedIocs, final String iocCol) { - this.feedId = feedId; - this.url = url; - this.name = name; - this.organization = organization; - this.description = description; - this.feedType = feedType; - this.containedIocs = containedIocs; - this.iocCol = iocCol; - } - /** * tif job metadata parser */ @@ -178,7 +177,7 @@ public TIFMetadata(final String feedId, final String url, final String name, fin String description = (String) args[4]; String feedType = (String) args[5]; List containedIocs = (List) args[6]; - String iocCol = (String) args[7]; + Integer iocCol = Integer.parseInt((String) args[7]); return new TIFMetadata(feedId, url, name, organization, description, feedType, containedIocs, iocCol); } ); @@ -201,7 +200,7 @@ public TIFMetadata(final StreamInput in) throws IOException{ description = in.readString(); feedType = in.readString(); containedIocs = in.readStringList(); - iocCol = in.readString(); + iocCol = in.readInt(); } public void writeTo(final StreamOutput out) throws IOException { out.writeString(feedId); @@ -211,7 +210,7 @@ public void writeTo(final StreamOutput out) throws IOException { out.writeString(description); out.writeString(feedType); out.writeStringCollection(containedIocs); - out.writeString(iocCol); + out.writeInt(iocCol); } private TIFMetadata(){} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java index e347e0e60..456be4838 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java @@ -29,12 +29,13 @@ import org.opensearch.securityanalytics.threatIntel.action.PutTIFJobRequest; import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; +import org.opensearch.securityanalytics.threatIntel.common.TIFMetadata; public class TIFJobParameter implements Writeable, ScheduledJobParameter { /** * Prefix of indices having threatIntel data */ - public static final String THREAT_INTEL_DATA_INDEX_NAME_PREFIX = "opensearch-sap-threatintel"; + public static final String THREAT_INTEL_DATA_INDEX_NAME_PREFIX = ".opensearch-sap-threatintel"; /** * Default fields for job scheduling @@ -351,11 +352,16 @@ public void setSchedule(IntervalSchedule schedule) { /** * Index name for a tif job with given suffix * - * @param suffix the suffix of a index name * @return index name for a tif job with given suffix */ - public String newIndexName(final String suffix) { - return String.format(Locale.ROOT, "%s.%s.%s", THREAT_INTEL_DATA_INDEX_NAME_PREFIX, name, suffix); + public String newIndexName(final TIFJobParameter jobSchedulerParameter, TIFMetadata tifMetadata) { + List indices = jobSchedulerParameter.indices; + Optional nameOptional = indices.stream().filter(name -> name.contains(tifMetadata.getFeedId())).findAny(); + String suffix = "-1"; + if (nameOptional.isPresent()) { + suffix = "-1".equals(nameOptional.get()) ? "-2" : suffix; + } + return String.format(Locale.ROOT, "%s-%s-%s", THREAT_INTEL_DATA_INDEX_NAME_PREFIX, tifMetadata.getFeedId(), suffix); } public TIFJobState getState() { diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterService.java index cab8dcc0b..9d8fc3a3d 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterService.java @@ -79,7 +79,7 @@ public TIFJobParameterService(final Client client, final ClusterService clusterS * * @param stepListener setup listener */ - public void createIndexIfNotExists(final StepListener stepListener) { + public void createJobIndexIfNotExists(final StepListener stepListener) { if (clusterService.state().metadata().hasIndex(TIFJobExtension.JOB_INDEX_NAME) == true) { stepListener.onResponse(null); return; @@ -200,7 +200,7 @@ public TIFJobParameter getJobParameter(final String name) throws IOException { * @param tifJobParameter the tifJobParameter * @param listener the listener */ - public void putTIFJobParameter(final TIFJobParameter tifJobParameter, final ActionListener listener) { + public void saveTIFJobParameter(final TIFJobParameter tifJobParameter, final ActionListener listener) { tifJobParameter.setLastUpdateTime(Instant.now()); StashedThreadContext.run(client, () -> { try { diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunner.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunner.java index dfe16f4c6..4407bd9fe 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunner.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunner.java @@ -16,6 +16,8 @@ import org.opensearch.securityanalytics.model.DetectorTrigger; import java.io.IOException; +import java.util.ArrayList; +import java.util.List; import java.util.Optional; import java.util.concurrent.atomic.AtomicReference; import java.time.Instant; @@ -149,17 +151,19 @@ protected void updateJobParameter(final ScheduledJobParameter jobParameter, fina return; } try { - jobSchedulerUpdateService.deleteAllTifdIndices(jobSchedulerParameter); if (TIFJobTask.DELETE_UNUSED_INDICES.equals(jobSchedulerParameter.getTask()) == false) { - jobSchedulerUpdateService.createThreatIntelFeedData(jobSchedulerParameter, renewLock); + Instant startTime = Instant.now(); + List oldIndices = new ArrayList<>(jobSchedulerParameter.getIndices()); + List newFeedIndices = jobSchedulerUpdateService.createThreatIntelFeedData(jobSchedulerParameter, renewLock); + Instant endTime = Instant.now(); + jobSchedulerUpdateService.deleteAllTifdIndices(oldIndices, newFeedIndices); + jobSchedulerUpdateService.updateJobSchedulerParameterAsSucceeded(newFeedIndices, jobSchedulerParameter, startTime, endTime); } -// jobSchedulerUpdateService.deleteUnusedIndices(jobSchedulerParameter); } catch (Exception e) { log.error("Failed to update jobSchedulerParameter for {}", jobSchedulerParameter.getName(), e); jobSchedulerParameter.getUpdateStats().setLastFailedAt(Instant.now()); jobSchedulerParameterService.updateJobSchedulerParameter(jobSchedulerParameter); } finally { -// jobSchedulerParameterService.updateJobSchedulerParameter(jobSchedulerParameter); jobSchedulerUpdateService.updateJobSchedulerParameter(jobSchedulerParameter, jobSchedulerParameter.getSchedule(), TIFJobTask.ALL); } } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java index 710d8015c..6da04087e 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java @@ -5,32 +5,30 @@ package org.opensearch.securityanalytics.threatIntel.jobscheduler; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - -import java.io.IOException; -import java.time.Duration; -import java.time.Instant; -import java.util.ArrayList; -import java.util.List; -import java.util.UUID; -import java.util.stream.Collectors; - import org.apache.commons.csv.CSVParser; import org.apache.commons.csv.CSVRecord; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; import org.opensearch.OpenSearchException; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.ClusterSettings; - import org.opensearch.core.rest.RestStatus; import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; import org.opensearch.securityanalytics.model.DetectorTrigger; -import org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedParser; -import org.opensearch.securityanalytics.threatIntel.common.TIFMetadata; import org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedDataService; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedParser; import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; +import org.opensearch.securityanalytics.threatIntel.common.TIFMetadata; import org.opensearch.securityanalytics.util.SecurityAnalyticsException; +import java.io.IOException; +import java.time.Duration; +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; + public class TIFJobUpdateService { private static final Logger log = LogManager.getLogger(DetectorTrigger.class); @@ -53,26 +51,20 @@ public TIFJobUpdateService( } // functions used in job Runner + /** - * Delete all indices except the one which is being used - * - * @param jobSchedulerParameter + * Delete old feed indices except the one which is being used */ - public void deleteAllTifdIndices(final TIFJobParameter jobSchedulerParameter) { + public void deleteAllTifdIndices(List oldIndices, List newIndices) { try { - List indicesToDelete = jobSchedulerParameter.getIndices() - .stream() -// .filter(index -> index.equals(jobSchedulerParameter.currentIndexName()) == false) - .collect(Collectors.toList()); - - List deletedIndices = deleteIndices(indicesToDelete); - - if (deletedIndices.isEmpty() == false) { - jobSchedulerParameter.getIndices().removeAll(deletedIndices); - jobSchedulerParameterService.updateJobSchedulerParameter(jobSchedulerParameter); + oldIndices.removeAll(newIndices); + if (false == oldIndices.isEmpty()) { + deleteIndices(oldIndices); } } catch (Exception e) { - log.error("Failed to delete old indices for {}", jobSchedulerParameter.getName(), e); + log.error( + () -> new ParameterizedMessage("Failed to delete old threat intel feed indices {}", StringUtils.join(oldIndices)), e + ); } } @@ -80,8 +72,8 @@ public void deleteAllTifdIndices(final TIFJobParameter jobSchedulerParameter) { * Update jobSchedulerParameter with given systemSchedule and task * * @param jobSchedulerParameter jobSchedulerParameter to update - * @param systemSchedule new system schedule value - * @param task new task value + * @param systemSchedule new system schedule value + * @param task new task value */ public void updateJobSchedulerParameter(final TIFJobParameter jobSchedulerParameter, final IntervalSchedule systemSchedule, final TIFJobTask task) { boolean updated = false; @@ -101,34 +93,34 @@ public void updateJobSchedulerParameter(final TIFJobParameter jobSchedulerParame private List deleteIndices(final List indicesToDelete) { List deletedIndices = new ArrayList<>(indicesToDelete.size()); for (String index : indicesToDelete) { - if (clusterService.state().metadata().hasIndex(index) == false) { + if (false == clusterService.state().metadata().hasIndex(index)) { deletedIndices.add(index); - continue; - } - try { - threatIntelFeedDataService.deleteThreatIntelDataIndex(index); - deletedIndices.add(index); - } catch (Exception e) { - log.error("Failed to delete an index [{}]", index, e); } } - return deletedIndices; + indicesToDelete.removeAll(deletedIndices); + try { + threatIntelFeedDataService.deleteThreatIntelDataIndex(indicesToDelete); + } catch (Exception e) { + log.error( + () -> new ParameterizedMessage("Failed to delete old threat intel feed index [{}]", indicesToDelete), e + ); + } + return indicesToDelete; } /** * Update threat intel feed data - * + *

* The first column is ip range field regardless its header name. * Therefore, we don't store the first column's header name. * * @param jobSchedulerParameter the jobSchedulerParameter - * @param renewLock runnable to renew lock - * + * @param renewLock runnable to renew lock * @throws IOException */ - public void createThreatIntelFeedData(final TIFJobParameter jobSchedulerParameter, final Runnable renewLock) throws IOException { - // parse YAML containing list of threat intel feeds + public List createThreatIntelFeedData(final TIFJobParameter jobSchedulerParameter, final Runnable renewLock) throws IOException { + // parse YAML containing list of threat intel feeds.yml // for each feed (ex. Feodo) // parse feed specific YAML containing TIFMetadata @@ -138,59 +130,66 @@ public void createThreatIntelFeedData(final TIFJobParameter jobSchedulerParamete // use the TIFMetadata to switch case feed type // parse through file and save threat intel feed data - List containedIocs = new ArrayList<>(); - TIFMetadata tifMetadata = new TIFMetadata("feedid", "url", "name", "org", - "descr", "csv", containedIocs, "1"); // TODO: example tif metdata + TIFMetadata tifMetadata = new TIFMetadata("alientvault_reputation_generic", + "https://reputation.alienvault.com/reputation.generic", + "Alienvault IP Reputation Feed", + "OTX", + "Alienvault IP Reputation Database", + "csv", + List.of("ip"), + 1); + List tifMetadataList = new ArrayList<>(); //todo populate from config instead of example + tifMetadataList.add(tifMetadata); Instant startTime = Instant.now(); - String indexName = setupIndex(jobSchedulerParameter); - String[] header; + List freshIndices = new ArrayList<>(); + for (TIFMetadata metadata : tifMetadataList) { + String indexName = setupIndex(jobSchedulerParameter, tifMetadata); + String[] header; - Boolean succeeded; + Boolean succeeded; - switch(tifMetadata.getFeedType()) { - case "csv": - try (CSVParser reader = ThreatIntelFeedParser.getThreatIntelFeedReaderCSV(tifMetadata)) { - // iterate until we find first line without '#' - CSVRecord findHeader = reader.iterator().next(); - while (findHeader.get(0).charAt(0) == '#' || findHeader.get(0).charAt(0) == ' ') { - findHeader = reader.iterator().next(); + switch (tifMetadata.getFeedType()) { + case "csv": + try (CSVParser reader = ThreatIntelFeedParser.getThreatIntelFeedReaderCSV(tifMetadata)) { + // iterate until we find first line without '#' + CSVRecord findHeader = reader.iterator().next(); + while (findHeader.get(0).charAt(0) == '#' || findHeader.get(0).charAt(0) == ' ') { + findHeader = reader.iterator().next(); + } + CSVRecord headerLine = findHeader; + header = ThreatIntelFeedParser.validateHeader(headerLine).values(); + threatIntelFeedDataService.parseAndSaveThreatIntelFeedDataCSV(indexName, header, reader.iterator(), renewLock, tifMetadata); } - CSVRecord headerLine = findHeader; - header = ThreatIntelFeedParser.validateHeader(headerLine).values(); - - threatIntelFeedDataService.saveThreatIntelFeedDataCSV(indexName, header, reader.iterator(), renewLock, tifMetadata); - } - default: - // if the feed type doesn't match any of the supporting feed types, throw an exception - succeeded = false; - } + default: + // if the feed type doesn't match any of the supporting feed types, throw an exception + succeeded = false; + } + waitUntilAllShardsStarted(indexName, MAX_WAIT_TIME_FOR_REPLICATION_TO_COMPLETE_IN_MILLIS); - if (!succeeded) { - log.error("Exception: failed to parse correct feed type"); - throw new OpenSearchException("Exception: failed to parse correct feed type"); + if (!succeeded) { + log.error("Exception: failed to parse correct feed type"); + throw new OpenSearchException("Exception: failed to parse correct feed type"); + } + freshIndices.add(indexName); } - - // end the loop here - - waitUntilAllShardsStarted(indexName, MAX_WAIT_TIME_FOR_REPLICATION_TO_COMPLETE_IN_MILLIS); - Instant endTime = Instant.now(); - updateJobSchedulerParameterAsSucceeded(indexName, jobSchedulerParameter, startTime, endTime); + return freshIndices; } // helper functions + /*** * Update jobSchedulerParameter as succeeded * * @param jobSchedulerParameter the jobSchedulerParameter */ - private void updateJobSchedulerParameterAsSucceeded( - final String newIndexName, + public void updateJobSchedulerParameterAsSucceeded( + List indices, final TIFJobParameter jobSchedulerParameter, final Instant startTime, final Instant endTime ) { - jobSchedulerParameter.setCurrentIndex(newIndexName); // TODO: remove current index? + jobSchedulerParameter.setIndices(indices); jobSchedulerParameter.getUpdateStats().setLastSucceededAt(endTime); jobSchedulerParameter.getUpdateStats().setLastProcessingTimeInMillis(endTime.toEpochMilli() - startTime.toEpochMilli()); jobSchedulerParameter.enable(); @@ -204,13 +203,14 @@ private void updateJobSchedulerParameterAsSucceeded( } /*** - * Setup index to add a new threat intel feed data + * Create index to add a new threat intel feed data * * @param jobSchedulerParameter the jobSchedulerParameter + * @param tifMetadata * @return new index name */ - private String setupIndex(final TIFJobParameter jobSchedulerParameter) { - String indexName = jobSchedulerParameter.newIndexName(UUID.randomUUID().toString()); + private String setupIndex(final TIFJobParameter jobSchedulerParameter, TIFMetadata tifMetadata) { + String indexName = jobSchedulerParameter.newIndexName(jobSchedulerParameter, tifMetadata); jobSchedulerParameter.getIndices().add(indexName); jobSchedulerParameterService.updateJobSchedulerParameter(jobSchedulerParameter); threatIntelFeedDataService.createIndexIfNotExists(indexName); diff --git a/src/main/resources/feed/config/feeds.yml b/src/main/resources/feed/config/feeds.yml new file mode 100644 index 000000000..8f07a00f7 --- /dev/null +++ b/src/main/resources/feed/config/feeds.yml @@ -0,0 +1,3 @@ +feeds: + - otx + - feodo \ No newline at end of file diff --git a/src/main/resources/feed/config/feeds/otx.yml b/src/main/resources/feed/config/feeds/otx.yml new file mode 100644 index 000000000..50d19924a --- /dev/null +++ b/src/main/resources/feed/config/feeds/otx.yml @@ -0,0 +1,12 @@ +feedId: otx_alienvault +url: www.otx.comm; +name: OTX Alientvault reputation +organization: OTX +description: description +feedType: csv; +containedIocs: + - ip +iocCol: 1; # 0 indexed +indexName: otx + +# .opensearch-sap-threatintel-otx-00001 \ No newline at end of file diff --git a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java index 67f2b083a..640a3d8eb 100644 --- a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java @@ -4,9 +4,7 @@ */ package org.opensearch.securityanalytics.resthandler; -import org.apache.hc.core5.http.ContentType; import org.apache.hc.core5.http.HttpStatus; -import org.apache.hc.core5.http.io.entity.StringEntity; import org.junit.Assert; import org.opensearch.action.search.SearchResponse; import org.opensearch.client.Request; @@ -22,11 +20,8 @@ import org.opensearch.securityanalytics.model.DetectorRule; import org.opensearch.securityanalytics.model.DetectorTrigger; import org.opensearch.securityanalytics.model.Rule; -import org.opensearch.securityanalytics.model.ThreatIntelFeedData; -import org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedDataService; import java.io.IOException; -import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -34,7 +29,6 @@ import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; @@ -49,6 +43,7 @@ import static org.opensearch.securityanalytics.TestHelpers.randomRule; import static org.opensearch.securityanalytics.TestHelpers.windowsIndexMapping; import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ENABLE_WORKFLOW_USAGE; +import static org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedDataUtils.getTifdList; public class DetectorMonitorRestApiIT extends SecurityAnalyticsRestTestCase { /** @@ -56,6 +51,7 @@ public class DetectorMonitorRestApiIT extends SecurityAnalyticsRestTestCase { * 2. Creates two aggregation rules and assigns to a detector, while removing 5 prepackaged rules * 3. Verifies that two bucket level monitor exists * 4. Verifies the findings + * * @throws IOException */ public void testRemoveDocLevelRuleAddAggregationRules_verifyFindings_success() throws IOException { @@ -110,13 +106,13 @@ public void testRemoveDocLevelRuleAddAggregationRules_verifyFindings_success() t assertEquals(1, monitorIds.size()); String monitorId = monitorIds.get(0); - String monitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId))).get("monitor")).get("monitor_type"); + String monitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId))).get("monitor")).get("monitor_type"); assertEquals(MonitorType.DOC_LEVEL_MONITOR.getValue(), monitorType); // Create aggregation rules - String sumRuleId = createRule(randomAggregationRule( "sum", " > 2")); - String avgTermRuleId = createRule(randomAggregationRule( "avg", " > 1")); + String sumRuleId = createRule(randomAggregationRule("sum", " > 2")); + String avgTermRuleId = createRule(randomAggregationRule("avg", " > 1")); // Update detector and empty doc level rules so detector contains only one aggregation rule DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(new DetectorRule(sumRuleId), new DetectorRule(avgTermRuleId)), Collections.emptyList()); @@ -140,8 +136,8 @@ public void testRemoveDocLevelRuleAddAggregationRules_verifyFindings_success() t indexDoc(index, "2", randomDoc(3, 4, "Info")); // Execute two bucket level monitors - for(String id: monitorIds){ - monitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + id))).get("monitor")).get("monitor_type"); + for (String id : monitorIds) { + monitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + id))).get("monitor")).get("monitor_type"); Assert.assertEquals(MonitorType.BUCKET_LEVEL_MONITOR.getValue(), monitorType); executeAlertingMonitor(id, Collections.emptyMap()); } @@ -156,24 +152,24 @@ public void testRemoveDocLevelRuleAddAggregationRules_verifyFindings_success() t List aggRuleIds = List.of(sumRuleId, avgTermRuleId); - List> findings = (List)getFindingsBody.get("findings"); - for(Map finding : findings) { - Set aggRulesFinding = ((List>)finding.get("queries")).stream().map(it -> it.get("id").toString()).collect( + List> findings = (List) getFindingsBody.get("findings"); + for (Map finding : findings) { + Set aggRulesFinding = ((List>) finding.get("queries")).stream().map(it -> it.get("id").toString()).collect( Collectors.toSet()); // Bucket monitor finding will have one rule String aggRuleId = aggRulesFinding.iterator().next(); assertTrue(aggRulesFinding.contains(aggRuleId)); - List findingDocs = (List)finding.get("related_doc_ids"); + List findingDocs = (List) finding.get("related_doc_ids"); Assert.assertEquals(2, findingDocs.size()); assertTrue(Arrays.asList("1", "2").containsAll(findingDocs)); } - String findingDetectorId = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("detectorId").toString(); + String findingDetectorId = ((Map) ((List) getFindingsBody.get("findings")).get(0)).get("detectorId").toString(); assertEquals(detectorId, findingDetectorId); - String findingIndex = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("index").toString(); + String findingIndex = ((Map) ((List) getFindingsBody.get("findings")).get(0)).get("index").toString(); assertEquals(index, findingIndex); } @@ -182,6 +178,7 @@ public void testRemoveDocLevelRuleAddAggregationRules_verifyFindings_success() t * 2. Creates 5 prepackaged doc level rules and one custom doc level rule and removes the aggregation rule * 3. Verifies that one doc level monitor exists * 4. Verifies the findings + * * @throws IOException */ public void testReplaceAggregationRuleWithDocRule_verifyFindings_success() throws IOException { @@ -201,7 +198,7 @@ public void testReplaceAggregationRuleWithDocRule_verifyFindings_success() throw assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); - String maxRuleId = createRule(randomAggregationRule( "max", " > 2")); + String maxRuleId = createRule(randomAggregationRule("max", " > 2")); List detectorRules = List.of(new DetectorRule(maxRuleId)); DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, Collections.emptyList()); @@ -235,7 +232,7 @@ public void testReplaceAggregationRuleWithDocRule_verifyFindings_success() throw Map detectorAsMap = (Map) hit.getSourceAsMap().get("detector"); String monitorId = ((List) (detectorAsMap).get("monitor_id")).get(0); - String monitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId))).get("monitor")).get("monitor_type"); + String monitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId))).get("monitor")).get("monitor_type"); assertEquals(MonitorType.BUCKET_LEVEL_MONITOR.getValue(), monitorType); @@ -262,7 +259,7 @@ public void testReplaceAggregationRuleWithDocRule_verifyFindings_success() throw assertEquals(1, monitorIds.size()); monitorId = monitorIds.get(0); - monitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId))).get("monitor")).get("monitor_type"); + monitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId))).get("monitor")).get("monitor_type"); assertEquals(MonitorType.DOC_LEVEL_MONITOR.getValue(), monitorType); @@ -299,15 +296,15 @@ public void testReplaceAggregationRuleWithDocRule_verifyFindings_success() throw Set docRuleIds = new HashSet<>(prepackagedRules); docRuleIds.add(randomDocRuleId); - List> findings = (List)getFindingsBody.get("findings"); + List> findings = (List) getFindingsBody.get("findings"); List foundDocIds = new ArrayList<>(); - for(Map finding : findings) { - Set aggRulesFinding = ((List>)finding.get("queries")).stream().map(it -> it.get("id").toString()).collect( + for (Map finding : findings) { + Set aggRulesFinding = ((List>) finding.get("queries")).stream().map(it -> it.get("id").toString()).collect( Collectors.toSet()); assertTrue(docRuleIds.containsAll(aggRulesFinding)); - List findingDocs = (List)finding.get("related_doc_ids"); + List findingDocs = (List) finding.get("related_doc_ids"); Assert.assertEquals(1, findingDocs.size()); foundDocIds.addAll(findingDocs); } @@ -372,7 +369,7 @@ public void testRemoveAllRulesAndUpdateDetector_success() throws IOException { assertEquals(1, monitorIds.size()); String monitorId = monitorIds.get(0); - String monitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId))).get("monitor")).get("monitor_type"); + String monitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId))).get("monitor")).get("monitor_type"); assertEquals(MonitorType.DOC_LEVEL_MONITOR.getValue(), monitorType); @@ -428,7 +425,7 @@ public void testAddNewAggregationRule_verifyFindings_success() throws IOExceptio Map responseBody = asMap(createResponse); String detectorId = responseBody.get("_id").toString(); - String request = "{\n" + + String request = "{\n" + " \"query\" : {\n" + " \"match\":{\n" + " \"_id\": \"" + detectorId + "\"\n" + @@ -438,13 +435,13 @@ public void testAddNewAggregationRule_verifyFindings_success() throws IOExceptio List hits = executeSearch(Detector.DETECTORS_INDEX, request); SearchHit hit = hits.get(0); - Map detectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); List inputArr = detectorMap.get("inputs"); assertEquals(1, ((Map>) inputArr.get(0)).get("detector_input").get("custom_rules").size()); // Test adding the new max monitor and updating the existing sum monitor - String maxRuleId = createRule(randomAggregationRule("max", " > 3")); + String maxRuleId = createRule(randomAggregationRule("max", " > 3")); DetectorInput newInput = new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(new DetectorRule(maxRuleId), new DetectorRule(sumRuleId)), Collections.emptyList()); Detector updatedDetector = randomDetectorWithInputs(List.of(newInput)); @@ -454,7 +451,7 @@ public void testAddNewAggregationRule_verifyFindings_success() throws IOExceptio hits = executeSearch(Detector.DETECTORS_INDEX, request); hit = hits.get(0); - Map updatedDetectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + Map updatedDetectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); inputArr = updatedDetectorMap.get("inputs"); assertEquals(2, ((Map>) inputArr.get(0)).get("detector_input").get("custom_rules").size()); @@ -466,8 +463,8 @@ public void testAddNewAggregationRule_verifyFindings_success() throws IOExceptio indexDoc(index, "1", randomDoc(2, 4, "Info")); indexDoc(index, "2", randomDoc(3, 4, "Info")); - for(String monitorId: monitorIds) { - Map monitor = (Map)(entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId)))).get("monitor"); + for (String monitorId : monitorIds) { + Map monitor = (Map) (entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId)))).get("monitor"); assertEquals(MonitorType.BUCKET_LEVEL_MONITOR.getValue(), monitor.get("monitor_type")); executeAlertingMonitor(monitorId, Collections.emptyMap()); } @@ -493,10 +490,10 @@ public void testAddNewAggregationRule_verifyFindings_success() throws IOExceptio assertEquals(2, findingDocs.size()); assertTrue(Arrays.asList("1", "2").containsAll(findingDocs)); - String findingDetectorId = ((Map)((List) getFindingsBody.get("findings")).get(0)).get("detectorId").toString(); + String findingDetectorId = ((Map) ((List) getFindingsBody.get("findings")).get(0)).get("detectorId").toString(); assertEquals(detectorId, findingDetectorId); - String findingIndex = ((Map)((List) getFindingsBody.get("findings")).get(0)).get("index").toString(); + String findingIndex = ((Map) ((List) getFindingsBody.get("findings")).get(0)).get("index").toString(); assertEquals(index, findingIndex); } @@ -540,7 +537,7 @@ public void testDeleteAggregationRule_verifyFindings_success() throws IOExceptio Map responseBody = asMap(createResponse); String detectorId = responseBody.get("_id").toString(); - String request = "{\n" + + String request = "{\n" + " \"query\" : {\n" + " \"match\":{\n" + " \"_id\": \"" + detectorId + "\"\n" + @@ -550,7 +547,7 @@ public void testDeleteAggregationRule_verifyFindings_success() throws IOExceptio List hits = executeSearch(Detector.DETECTORS_INDEX, request); SearchHit hit = hits.get(0); - Map detectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); List inputArr = detectorMap.get("inputs"); assertEquals(2, ((Map>) inputArr.get(0)).get("detector_input").get("custom_rules").size()); @@ -565,7 +562,7 @@ public void testDeleteAggregationRule_verifyFindings_success() throws IOExceptio hits = executeSearch(Detector.DETECTORS_INDEX, request); hit = hits.get(0); - Map updatedDetectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + Map updatedDetectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); inputArr = updatedDetectorMap.get("inputs"); assertEquals(1, ((Map>) inputArr.get(0)).get("detector_input").get("custom_rules").size()); @@ -579,7 +576,7 @@ public void testDeleteAggregationRule_verifyFindings_success() throws IOExceptio assertEquals(1, monitorIds.size()); - Map monitor = (Map)(entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorIds.get(0))))).get("monitor"); + Map monitor = (Map) (entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorIds.get(0))))).get("monitor"); assertEquals(MonitorType.BUCKET_LEVEL_MONITOR.getValue(), monitor.get("monitor_type")); @@ -608,10 +605,10 @@ public void testDeleteAggregationRule_verifyFindings_success() throws IOExceptio assertEquals(2, findingDocs.size()); assertTrue(Arrays.asList("1", "2").containsAll(findingDocs)); - String findingDetectorId = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("detectorId").toString(); + String findingDetectorId = ((Map) ((List) getFindingsBody.get("findings")).get(0)).get("detectorId").toString(); assertEquals(detectorId, findingDetectorId); - String findingIndex = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("index").toString(); + String findingIndex = ((Map) ((List) getFindingsBody.get("findings")).get(0)).get("index").toString(); assertEquals(index, findingIndex); } @@ -621,6 +618,7 @@ public void testDeleteAggregationRule_verifyFindings_success() throws IOExceptio * 3. Verifies that number of rules is unchanged * 4. Verifies monitor types * 5. Verifies findings + * * @throws IOException */ public void testReplaceAggregationRule_verifyFindings_success() throws IOException { @@ -656,7 +654,7 @@ public void testReplaceAggregationRule_verifyFindings_success() throws IOExcepti Map responseBody = asMap(createResponse); String detectorId = responseBody.get("_id").toString(); - String request = "{\n" + + String request = "{\n" + " \"query\" : {\n" + " \"match\":{\n" + " \"_id\": \"" + detectorId + "\"\n" + @@ -666,7 +664,7 @@ public void testReplaceAggregationRule_verifyFindings_success() throws IOExcepti List hits = executeSearch(Detector.DETECTORS_INDEX, request); SearchHit hit = hits.get(0); - Map detectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); List inputArr = detectorMap.get("inputs"); assertEquals(2, ((Map>) inputArr.get(0)).get("detector_input").get("custom_rules").size()); @@ -682,7 +680,7 @@ public void testReplaceAggregationRule_verifyFindings_success() throws IOExcepti hits = executeSearch(Detector.DETECTORS_INDEX, request); hit = hits.get(0); - Map updatedDetectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + Map updatedDetectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); inputArr = updatedDetectorMap.get("inputs"); assertEquals(2, ((Map>) inputArr.get(0)).get("detector_input").get("custom_rules").size()); @@ -695,8 +693,8 @@ public void testReplaceAggregationRule_verifyFindings_success() throws IOExcepti indexDoc(index, "2", randomDoc(3, 4, "Info")); indexDoc(index, "3", randomDoc(3, 4, "Test")); Map numberOfMonitorTypes = new HashMap<>(); - for(String monitorId: monitorIds) { - Map monitor = (Map)(entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId)))).get("monitor"); + for (String monitorId : monitorIds) { + Map monitor = (Map) (entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId)))).get("monitor"); numberOfMonitorTypes.merge(monitor.get("monitor_type"), 1, Integer::sum); executeAlertingMonitor(monitorId, Collections.emptyMap()); } @@ -712,27 +710,27 @@ public void testReplaceAggregationRule_verifyFindings_success() throws IOExcepti assertNotNull(getFindingsBody); assertEquals(5, getFindingsBody.get("total_findings")); - String findingDetectorId = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("detectorId").toString(); + String findingDetectorId = ((Map) ((List) getFindingsBody.get("findings")).get(0)).get("detectorId").toString(); assertEquals(detectorId, findingDetectorId); - String findingIndex = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("index").toString(); + String findingIndex = ((Map) ((List) getFindingsBody.get("findings")).get(0)).get("index").toString(); assertEquals(index, findingIndex); List docLevelFinding = new ArrayList<>(); - List> findings = (List)getFindingsBody.get("findings"); + List> findings = (List) getFindingsBody.get("findings"); Set docLevelRules = new HashSet<>(prepackagedDocRules); - for(Map finding : findings) { - List> queries = (List>)finding.get("queries"); + for (Map finding : findings) { + List> queries = (List>) finding.get("queries"); Set findingRules = queries.stream().map(it -> it.get("id").toString()).collect(Collectors.toSet()); // In this test case all doc level rules are matching the finding rule ids - if(docLevelRules.containsAll(findingRules)) { - docLevelFinding.addAll((List)finding.get("related_doc_ids")); + if (docLevelRules.containsAll(findingRules)) { + docLevelFinding.addAll((List) finding.get("related_doc_ids")); } else { String aggRuleId = findingRules.iterator().next(); - List findingDocs = (List)finding.get("related_doc_ids"); + List findingDocs = (List) finding.get("related_doc_ids"); Assert.assertEquals(2, findingDocs.size()); assertTrue(Arrays.asList("1", "2").containsAll(findingDocs)); } @@ -770,7 +768,7 @@ public void testMinAggregationRule_findingSuccess() throws IOException { Map responseBody = asMap(createResponse); String detectorId = responseBody.get("_id").toString(); - String request = "{\n" + + String request = "{\n" + " \"query\" : {\n" + " \"match\":{\n" + " \"_id\": \"" + detectorId + "\"\n" + @@ -780,7 +778,7 @@ public void testMinAggregationRule_findingSuccess() throws IOException { List hits = executeSearch(Detector.DETECTORS_INDEX, request); SearchHit hit = hits.get(0); - Map detectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); List monitorIds = ((List) (detectorMap).get("monitor_id")); @@ -791,8 +789,8 @@ public void testMinAggregationRule_findingSuccess() throws IOException { indexDoc(index, "8", randomDoc(1, 1, testOpCode)); Map numberOfMonitorTypes = new HashMap<>(); - for (String monitorId: monitorIds) { - Map monitor = (Map)(entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId)))).get("monitor"); + for (String monitorId : monitorIds) { + Map monitor = (Map) (entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId)))).get("monitor"); numberOfMonitorTypes.merge(monitor.get("monitor_type"), 1, Integer::sum); executeAlertingMonitor(monitorId, Collections.emptyMap()); } @@ -805,17 +803,17 @@ public void testMinAggregationRule_findingSuccess() throws IOException { assertNotNull(getFindingsBody); - List> findings = (List)getFindingsBody.get("findings"); + List> findings = (List) getFindingsBody.get("findings"); for (Map finding : findings) { - List findingDocs = (List)finding.get("related_doc_ids"); + List findingDocs = (List) finding.get("related_doc_ids"); Assert.assertEquals(1, findingDocs.size()); assertTrue(Arrays.asList("7").containsAll(findingDocs)); } - String findingDetectorId = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("detectorId").toString(); + String findingDetectorId = ((Map) ((List) getFindingsBody.get("findings")).get(0)).get("detectorId").toString(); assertEquals(detectorId, findingDetectorId); - String findingIndex = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("index").toString(); + String findingIndex = ((Map) ((List) getFindingsBody.get("findings")).get(0)).get("index").toString(); assertEquals(index, findingIndex); } @@ -850,10 +848,10 @@ public void testMultipleAggregationAndDocRules_findingSuccess() throws IOExcepti // 5 custom aggregation rules String sumRuleId = createRule(randomAggregationRule("sum", " > 1", infoOpCode)); - String maxRuleId = createRule(randomAggregationRule("max", " > 3", testOpCode)); - String minRuleId = createRule(randomAggregationRule("min", " > 3", testOpCode)); - String avgRuleId = createRule(randomAggregationRule("avg", " > 3", infoOpCode)); - String cntRuleId = createRule(randomAggregationRule("count", " > 3", "randomTestCode")); + String maxRuleId = createRule(randomAggregationRule("max", " > 3", testOpCode)); + String minRuleId = createRule(randomAggregationRule("min", " > 3", testOpCode)); + String avgRuleId = createRule(randomAggregationRule("avg", " > 3", infoOpCode)); + String cntRuleId = createRule(randomAggregationRule("count", " > 3", "randomTestCode")); List aggRuleIds = List.of(sumRuleId, maxRuleId); String randomDocRuleId = createRule(randomRule()); List prepackagedRules = getRandomPrePackagedRules(); @@ -868,7 +866,6 @@ public void testMultipleAggregationAndDocRules_findingSuccess() throws IOExcepti Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); - String request = "{\n" + " \"query\" : {\n" + " \"match_all\":{\n" + @@ -891,7 +888,7 @@ public void testMultipleAggregationAndDocRules_findingSuccess() throws IOExcepti "}"; List hits = executeSearch(Detector.DETECTORS_INDEX, request); SearchHit hit = hits.get(0); - Map updatedDetectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + Map updatedDetectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); List inputArr = updatedDetectorMap.get("inputs"); assertEquals(6, ((Map>) inputArr.get(0)).get("detector_input").get("custom_rules").size()); @@ -911,8 +908,8 @@ public void testMultipleAggregationAndDocRules_findingSuccess() throws IOExcepti Map numberOfMonitorTypes = new HashMap<>(); - for (String monitorId: monitorIds) { - Map monitor = (Map)(entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId)))).get("monitor"); + for (String monitorId : monitorIds) { + Map monitor = (Map) (entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId)))).get("monitor"); numberOfMonitorTypes.merge(monitor.get("monitor_type"), 1, Integer::sum); Response executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); @@ -923,16 +920,15 @@ public void testMultipleAggregationAndDocRules_findingSuccess() throws IOExcepti // 5 prepackaged and 1 custom doc level rule assertEquals(6, noOfSigmaRuleMatches); } else if (MonitorType.BUCKET_LEVEL_MONITOR.getValue().equals(monitor.get("monitor_type"))) { - for(String ruleId: aggRuleIds) { - Object rule = (((Map)((Map)((List)((Map)executeResults.get("input_results")).get("results")).get(0)).get("aggregations")).get(ruleId)); - if(rule != null) { - if(ruleId == sumRuleId) { - assertRuleMonitorFinding(executeResults, ruleId,3, List.of("4")); + for (String ruleId : aggRuleIds) { + Object rule = (((Map) ((Map) ((List) ((Map) executeResults.get("input_results")).get("results")).get(0)).get("aggregations")).get(ruleId)); + if (rule != null) { + if (ruleId == sumRuleId) { + assertRuleMonitorFinding(executeResults, ruleId, 3, List.of("4")); } else if (ruleId == maxRuleId) { - assertRuleMonitorFinding(executeResults, ruleId,5, List.of("2", "3")); - } - else if (ruleId == minRuleId) { - assertRuleMonitorFinding(executeResults, ruleId,1, List.of("2")); + assertRuleMonitorFinding(executeResults, ruleId, 5, List.of("2", "3")); + } else if (ruleId == minRuleId) { + assertRuleMonitorFinding(executeResults, ruleId, 1, List.of("2")); } } } @@ -952,10 +948,10 @@ else if (ruleId == minRuleId) { // 8 findings from doc level rules, and 3 findings for aggregation (sum, max and min) assertEquals(11, getFindingsBody.get("total_findings")); - String findingDetectorId = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("detectorId").toString(); + String findingDetectorId = ((Map) ((List) getFindingsBody.get("findings")).get(0)).get("detectorId").toString(); assertEquals(detectorId, findingDetectorId); - String findingIndex = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("index").toString(); + String findingIndex = ((Map) ((List) getFindingsBody.get("findings")).get(0)).get("index").toString(); assertEquals(index, findingIndex); List docLevelFinding = new ArrayList<>(); @@ -964,22 +960,22 @@ else if (ruleId == minRuleId) { Set docLevelRules = new HashSet<>(prepackagedRules); docLevelRules.add(randomDocRuleId); - for(Map finding : findings) { - List> queries = (List>)finding.get("queries"); + for (Map finding : findings) { + List> queries = (List>) finding.get("queries"); Set findingRuleIds = queries.stream().map(it -> it.get("id").toString()).collect(Collectors.toSet()); // Doc level finding matches all doc level rules (including the custom one) in this test case - if(docLevelRules.containsAll(findingRuleIds)) { - docLevelFinding.addAll((List)finding.get("related_doc_ids")); + if (docLevelRules.containsAll(findingRuleIds)) { + docLevelFinding.addAll((List) finding.get("related_doc_ids")); } else { // In the case of bucket level monitors, queries will always contain one value String aggRuleId = findingRuleIds.iterator().next(); - List findingDocs = (List)finding.get("related_doc_ids"); + List findingDocs = (List) finding.get("related_doc_ids"); - if(aggRuleId.equals(sumRuleId)) { + if (aggRuleId.equals(sumRuleId)) { assertTrue(List.of("1", "2", "3").containsAll(findingDocs)); - } else if(aggRuleId.equals(maxRuleId)) { + } else if (aggRuleId.equals(maxRuleId)) { assertTrue(List.of("4", "5", "6", "7").containsAll(findingDocs)); - } else if(aggRuleId.equals( minRuleId)) { + } else if (aggRuleId.equals(minRuleId)) { assertTrue(List.of("7").containsAll(findingDocs)); } } @@ -1008,7 +1004,7 @@ public void testCreateDetector_verifyWorkflowCreation_success_WithoutGroupByRule String testOpCode = "Test"; - String maxRuleId = createRule(randomAggregationRule("max", " > 3", testOpCode)); + String maxRuleId = createRule(randomAggregationRule("max", " > 3", testOpCode)); String randomDocRuleId = createRule(randomRule()); List detectorRules = List.of(new DetectorRule(maxRuleId), new DetectorRule(randomDocRuleId)); DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, @@ -1040,7 +1036,7 @@ public void testCreateDetector_verifyWorkflowCreation_success_WithoutGroupByRule "}"; List hits = executeSearch(Detector.DETECTORS_INDEX, request); SearchHit hit = hits.get(0); - Map detectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); List inputArr = (List) detectorMap.get("inputs"); assertEquals(2, ((Map>) inputArr.get(0)).get("detector_input").get("custom_rules").size()); @@ -1055,121 +1051,134 @@ public void testCreateDetector_verifyWorkflowCreation_success_WithoutGroupByRule verifyWorkflow(detectorMap, monitorIds, 2); } - public void testCreateDetector_threatIntelEnabled_updateDetectorWithNewThreatIntel() throws IOException { - String tifdString1 = "{ \"type\": \"feed\",\"ioc_type\": \"ip\", \"ioc_value\": \"abc\", \"feed_id\": \"feed\", \"timestamp\": 1633344000000 }"; - String tifdString2 = "{ \"type\": \"feed\",\"ioc_type\": \"ip\", \"ioc_value\": \"xyz\", \"feed_id\": \"feed\", \"timestamp\": 1633344000000 }"; - String feedIndex = ".opensearch-sap-threatintel"; - indexDoc(feedIndex, "1", tifdString1); - indexDoc(feedIndex, "2", tifdString2); - updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); - String index = createTestIndex(randomIndex(), windowsIndexMapping()); - - // Execute CreateMappingsAction to add alias mapping for index - Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); - // both req params and req body are supported - createMappingRequest.setJsonEntity( - "{ \"index_name\":\"" + index + "\"," + - " \"rule_topic\":\"" + randomDetectorType() + "\", " + - " \"partial\":true" + - "}" - ); - - Response createMappingResponse = client().performRequest(createMappingRequest); - - assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); - - String testOpCode = "Test"; - - String randomDocRuleId = createRule(randomRule()); - List detectorRules = List.of(new DetectorRule(randomDocRuleId)); - DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, - Collections.emptyList()); - Detector detector = randomDetectorWithInputsAndThreatIntel(List.of(input), true); - - Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); +// public void testCreateDetector_threatIntelEnabled_updateDetectorWithNewThreatIntel() throws IOException { +// +// updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); +// String index = createTestIndex(randomIndex(), windowsIndexMapping()); +// +// // Execute CreateMappingsAction to add alias mapping for index +// Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); +// // both req params and req body are supported +// createMappingRequest.setJsonEntity( +// "{ \"index_name\":\"" + index + "\"," + +// " \"rule_topic\":\"" + randomDetectorType() + "\", " + +// " \"partial\":true" + +// "}" +// ); +// +// Response createMappingResponse = client().performRequest(createMappingRequest); +// +// assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); +// +// String testOpCode = "Test"; +// +// String randomDocRuleId = createRule(randomRule()); +// List detectorRules = List.of(new DetectorRule(randomDocRuleId)); +// DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, +// Collections.emptyList()); +// Detector detector = randomDetectorWithInputsAndThreatIntel(List.of(input), true); +// +// Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); +// +// String request = "{\n" + +// " \"query\" : {\n" + +// " \"match_all\":{\n" + +// " }\n" + +// " }\n" + +// "}"; +// SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); +// +// assertEquals(2, response.getHits().getTotalHits().value); +// +// assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); +// Map responseBody = asMap(createResponse); +// +// String detectorId = responseBody.get("_id").toString(); +// request = "{\n" + +// " \"query\" : {\n" + +// " \"match\":{\n" + +// " \"_id\": \"" + detectorId + "\"\n" + +// " }\n" + +// " }\n" + +// "}"; +// List hits = executeSearch(Detector.DETECTORS_INDEX, request); +// SearchHit hit = hits.get(0); +// Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); +// List inputArr = (List) detectorMap.get("inputs"); +// +// +// List monitorIds = ((List) (detectorMap).get("monitor_id")); +// assertEquals(1, monitorIds.size()); +// +// assertNotNull("Workflow not created", detectorMap.get("workflow_ids")); +// assertEquals("Number of workflows not correct", 1, ((List) detectorMap.get("workflow_ids")).size()); +// +// // Verify workflow +// verifyWorkflow(detectorMap, monitorIds, 1); +// List iocs = getThreatIntelFeedIocs(3); +// for (String ioc : iocs) { +// indexDoc(index, "1", randomDoc(5, 3, "abc")); +// indexDoc(index, "2", randomDoc(5, 3, "xyz")); +// indexDoc(index, "3", randomDoc(5, 3, "klm")); +// } +// String workflowId = ((List) detectorMap.get("workflow_ids")).get(0); +// +// Response executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); +// +// List> monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); +// assertEquals(1, monitorRunResults.size()); +// +// Map docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); +// int noOfSigmaRuleMatches = docLevelQueryResults.size(); +// assertEquals(2, noOfSigmaRuleMatches); +// String threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.contains(detector.getName() + "_threat_intel")).findAny().get(); +// ArrayList docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); +// assertEquals(docs.size(), 2); +// +// //update threat intel +// String tifdString3 = "{ \"type\": \"feed\",\"ioc_type\": \"ip\", \"ioc_value\": \"klm\", \"feed_id\": \"feed\", \"timestamp\": 1633344000000 }"; +// +// indexDoc(feedIndex, "3", tifdString3); +// +// Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(detector)); +// +// assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); +// +// Map updateResponseBody = asMap(updateResponse); +// detectorId = updateResponseBody.get("_id").toString(); +// +// indexDoc(index, "4", randomDoc(5, 3, "klm")); +// +// executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); +// +// monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); +// assertEquals(1, monitorRunResults.size()); +// +// docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); +// noOfSigmaRuleMatches = docLevelQueryResults.size(); +// assertEquals(2, noOfSigmaRuleMatches); +// threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.contains(detector.getName() + "_threat_intel")).findAny().get(); +// docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); +// assertEquals(docs.size(), 1); +// } + + private List getThreatIntelFeedIocs(int num) throws IOException { + String request = getMatchAllSearchRequestString(num); + SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threatintel*", request, false); + return getTifdList(res, xContentRegistry()).stream().map(it -> it.getIocValue()).collect(Collectors.toList()); + } - String request = "{\n" + + private static String getMatchAllSearchRequestString(int num) { + return "{\n" + + "size : " + num + "," + " \"query\" : {\n" + " \"match_all\":{\n" + " }\n" + " }\n" + "}"; - SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); - - assertEquals(2, response.getHits().getTotalHits().value); - - assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); - Map responseBody = asMap(createResponse); - - String detectorId = responseBody.get("_id").toString(); - request = "{\n" + - " \"query\" : {\n" + - " \"match\":{\n" + - " \"_id\": \"" + detectorId + "\"\n" + - " }\n" + - " }\n" + - "}"; - List hits = executeSearch(Detector.DETECTORS_INDEX, request); - SearchHit hit = hits.get(0); - Map detectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); - List inputArr = (List) detectorMap.get("inputs"); - - - List monitorIds = ((List) (detectorMap).get("monitor_id")); - assertEquals(1, monitorIds.size()); - - assertNotNull("Workflow not created", detectorMap.get("workflow_ids")); - assertEquals("Number of workflows not correct", 1, ((List) detectorMap.get("workflow_ids")).size()); - - // Verify workflow - verifyWorkflow(detectorMap, monitorIds, 1); - - indexDoc(index, "1", randomDoc(5, 3, "abc")); - indexDoc(index, "2", randomDoc(5, 3, "xyz")); - indexDoc(index, "3", randomDoc(5, 3, "klm")); - String workflowId = ((List) detectorMap.get("workflow_ids")).get(0); - - Response executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); - - List> monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); - assertEquals(1, monitorRunResults.size()); - - Map docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); - int noOfSigmaRuleMatches = docLevelQueryResults.size(); - assertEquals(2, noOfSigmaRuleMatches); - String threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.contains(detector.getName() + "_threat_intel")).findAny().get(); - ArrayList docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); - assertEquals(docs.size(),2); - - //update threat intel - String tifdString3 = "{ \"type\": \"feed\",\"ioc_type\": \"ip\", \"ioc_value\": \"klm\", \"feed_id\": \"feed\", \"timestamp\": 1633344000000 }"; - - indexDoc(feedIndex, "3", tifdString3); - - Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(detector)); - - assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); - - Map updateResponseBody = asMap(updateResponse); - detectorId = updateResponseBody.get("_id").toString(); - - indexDoc(index, "4", randomDoc(5, 3, "klm")); - - executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); - - monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); - assertEquals(1, monitorRunResults.size()); - - docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); - noOfSigmaRuleMatches = docLevelQueryResults.size(); - assertEquals(2, noOfSigmaRuleMatches); - threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.contains(detector.getName() + "_threat_intel")).findAny().get(); - docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); - assertEquals(docs.size(),1); } - public void testCreateDetectorthreatIntelDisabled_updateDetectorWithThreatIntelEnabled() throws IOException { String tifdString1 = "{ \"type\": \"feed\",\"ioc_type\": \"ip\", \"ioc_value\": \"abc\", \"feed_id\": \"feed\", \"timestamp\": 1633344000000 }"; String tifdString2 = "{ \"type\": \"feed\",\"ioc_type\": \"ip\", \"ioc_value\": \"xyz\", \"feed_id\": \"feed\", \"timestamp\": 1633344000000 }"; @@ -1226,7 +1235,7 @@ public void testCreateDetectorthreatIntelDisabled_updateDetectorWithThreatIntelE "}"; List hits = executeSearch(Detector.DETECTORS_INDEX, request); SearchHit hit = hits.get(0); - Map detectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); List inputArr = (List) detectorMap.get("inputs"); @@ -1278,7 +1287,7 @@ public void testCreateDetectorthreatIntelDisabled_updateDetectorWithThreatIntelE assertEquals(2, noOfSigmaRuleMatches); String threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.contains(detector.getName() + "_threat_intel")).findAny().get(); ArrayList docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); - assertEquals(docs.size(),1); + assertEquals(docs.size(), 1); } public void testCreateDetector_verifyWorkflowCreation_success_WithGroupByRulesInTrigger() throws IOException { @@ -1301,7 +1310,7 @@ public void testCreateDetector_verifyWorkflowCreation_success_WithGroupByRulesIn String testOpCode = "Test"; - String maxRuleId = createRule(randomAggregationRule("max", " > 3", testOpCode)); + String maxRuleId = createRule(randomAggregationRule("max", " > 3", testOpCode)); String randomDocRuleId = createRule(randomRule()); List detectorRules = List.of(new DetectorRule(maxRuleId), new DetectorRule(randomDocRuleId)); DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, @@ -1334,7 +1343,7 @@ public void testCreateDetector_verifyWorkflowCreation_success_WithGroupByRulesIn "}"; List hits = executeSearch(Detector.DETECTORS_INDEX, request); SearchHit hit = hits.get(0); - Map detectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); List inputArr = (List) detectorMap.get("inputs"); assertEquals(2, ((Map>) inputArr.get(0)).get("detector_input").get("custom_rules").size()); @@ -1400,7 +1409,7 @@ public void testUpdateDetector_disabledWorkflowUsage_verifyWorkflowNotCreated_su "}"; List hits = executeSearch(Detector.DETECTORS_INDEX, request); SearchHit hit = hits.get(0); - Map detectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); List monitorIds = ((List) (detectorMap).get("monitor_id")); assertEquals(1, monitorIds.size()); @@ -1415,7 +1424,7 @@ public void testUpdateDetector_disabledWorkflowUsage_verifyWorkflowNotCreated_su assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); hits = executeSearch(Detector.DETECTORS_INDEX, request); hit = hits.get(0); - detectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); // Verify that the workflow for the given detector is not added assertTrue("Workflow created", ((List) detectorMap.get("workflow_ids")).size() == 0); @@ -1443,7 +1452,7 @@ public void testUpdateDetector_removeRule_verifyWorkflowUpdate_success() throws String testOpCode = "Test"; - String maxRuleId = createRule(randomAggregationRule("max", " > 3", testOpCode)); + String maxRuleId = createRule(randomAggregationRule("max", " > 3", testOpCode)); String randomDocRuleId = createRule(randomRule()); List detectorRules = List.of(new DetectorRule(maxRuleId), new DetectorRule(randomDocRuleId)); DetectorTrigger t1 = new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(randomDocRuleId, maxRuleId), List.of(), List.of(), List.of()); @@ -1477,7 +1486,7 @@ public void testUpdateDetector_removeRule_verifyWorkflowUpdate_success() throws "}"; List hits = executeSearch(Detector.DETECTORS_INDEX, request); SearchHit hit = hits.get(0); - Map detectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); List inputArr = (List) detectorMap.get("inputs"); assertEquals(2, ((Map>) inputArr.get(0)).get("detector_input").get("custom_rules").size()); @@ -1492,14 +1501,14 @@ public void testUpdateDetector_removeRule_verifyWorkflowUpdate_success() throws verifyWorkflow(detectorMap, monitorIds, 3); // Update detector - remove one agg rule; Verify workflow - DetectorInput newInput = new DetectorInput("windows detector for security analytics", List.of("windows"), Arrays.asList(new DetectorRule(randomDocRuleId)) , getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList())); + DetectorInput newInput = new DetectorInput("windows detector for security analytics", List.of("windows"), Arrays.asList(new DetectorRule(randomDocRuleId)), getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList())); detector = randomDetectorWithInputs(List.of(newInput)); createResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(detector)); assertEquals("Update detector failed", RestStatus.OK, restStatus(createResponse)); hits = executeSearch(Detector.DETECTORS_INDEX, request); hit = hits.get(0); - detectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); inputArr = (List) detectorMap.get("inputs"); assertEquals(1, ((Map>) inputArr.get(0)).get("detector_input").get("custom_rules").size()); @@ -1533,13 +1542,13 @@ public void testUpdateDetector_removeRule_verifyWorkflowUpdate_success() throws assertNotNull(getFindingsBody); assertEquals(1, getFindingsBody.get("total_findings")); - String findingDetectorId = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("detectorId").toString(); + String findingDetectorId = ((Map) ((List) getFindingsBody.get("findings")).get(0)).get("detectorId").toString(); assertEquals(detectorId, findingDetectorId); - String findingIndex = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("index").toString(); + String findingIndex = ((Map) ((List) getFindingsBody.get("findings")).get(0)).get("index").toString(); assertEquals(index, findingIndex); - List> findings = (List)getFindingsBody.get("findings"); + List> findings = (List) getFindingsBody.get("findings"); assertEquals(1, findings.size()); List findingDocs = (List) findings.get(0).get("related_doc_ids"); @@ -1567,7 +1576,7 @@ public void testCreateDetector_workflowWithDuplicateMonitor_failure() throws IOE String testOpCode = "Test"; - String maxRuleId = createRule(randomAggregationRule("max", " > 3", testOpCode)); + String maxRuleId = createRule(randomAggregationRule("max", " > 3", testOpCode)); String randomDocRuleId = createRule(randomRule()); List detectorRules = List.of(new DetectorRule(maxRuleId), new DetectorRule(randomDocRuleId)); @@ -1601,7 +1610,7 @@ public void testCreateDetector_workflowWithDuplicateMonitor_failure() throws IOE "}"; List hits = executeSearch(Detector.DETECTORS_INDEX, request); SearchHit hit = hits.get(0); - Map detectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); List inputArr = (List) detectorMap.get("inputs"); assertEquals(2, ((Map>) inputArr.get(0)).get("detector_input").get("custom_rules").size()); @@ -1636,7 +1645,7 @@ public void testCreateDetector_verifyWorkflowExecutionBucketLevelDocLevelMonitor String testOpCode = "Test"; - String maxRuleId = createRule(randomAggregationRule("max", " > 3", testOpCode)); + String maxRuleId = createRule(randomAggregationRule("max", " > 3", testOpCode)); String randomDocRuleId = createRule(randomRule()); List detectorRules = List.of(new DetectorRule(maxRuleId), new DetectorRule(randomDocRuleId)); @@ -1671,7 +1680,7 @@ public void testCreateDetector_verifyWorkflowExecutionBucketLevelDocLevelMonitor "}"; List hits = executeSearch(Detector.DETECTORS_INDEX, request); SearchHit hit = hits.get(0); - Map detectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); List inputArr = (List) detectorMap.get("inputs"); assertEquals(2, ((Map>) inputArr.get(0)).get("detector_input").get("custom_rules").size()); @@ -1716,21 +1725,21 @@ public void testCreateDetector_verifyWorkflowExecutionBucketLevelDocLevelMonitor assertNotNull(getFindingsBody); assertEquals(6, getFindingsBody.get("total_findings")); - String findingDetectorId = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("detectorId").toString(); + String findingDetectorId = ((Map) ((List) getFindingsBody.get("findings")).get(0)).get("detectorId").toString(); assertEquals(detectorId, findingDetectorId); - String findingIndex = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("index").toString(); + String findingIndex = ((Map) ((List) getFindingsBody.get("findings")).get(0)).get("index").toString(); assertEquals(index, findingIndex); List docLevelFinding = new ArrayList<>(); - List> findings = (List)getFindingsBody.get("findings"); + List> findings = (List) getFindingsBody.get("findings"); Set docLevelRules = new HashSet<>(List.of(randomDocRuleId)); - for(Map finding : findings) { + for (Map finding : findings) { List> queries = (List>) finding.get("queries"); Set findingRules = queries.stream().map(it -> it.get("id").toString()).collect(Collectors.toSet()); // In this test case all doc level rules are matching the finding rule ids - if(docLevelRules.containsAll(findingRules)) { + if (docLevelRules.containsAll(findingRules)) { docLevelFinding.addAll((List) finding.get("related_doc_ids")); } else { List findingDocs = (List) finding.get("related_doc_ids"); @@ -1764,10 +1773,10 @@ public void testCreateDetector_verifyWorkflowExecutionMultipleBucketLevelDocLeve // 5 custom aggregation rules String sumRuleId = createRule(randomAggregationRule("sum", " > 1", infoOpCode)); - String maxRuleId = createRule(randomAggregationRule("max", " > 3", testOpCode)); - String minRuleId = createRule(randomAggregationRule("min", " > 3", testOpCode)); - String avgRuleId = createRule(randomAggregationRule("avg", " > 3", infoOpCode)); - String cntRuleId = createRule(randomAggregationRule("count", " > 3", "randomTestCode")); + String maxRuleId = createRule(randomAggregationRule("max", " > 3", testOpCode)); + String minRuleId = createRule(randomAggregationRule("min", " > 3", testOpCode)); + String avgRuleId = createRule(randomAggregationRule("avg", " > 3", infoOpCode)); + String cntRuleId = createRule(randomAggregationRule("count", " > 3", "randomTestCode")); String randomDocRuleId = createRule(randomRule()); List prepackagedRules = getRandomPrePackagedRules(); @@ -1806,7 +1815,7 @@ public void testCreateDetector_verifyWorkflowExecutionMultipleBucketLevelDocLeve "}"; List hits = executeSearch(Detector.DETECTORS_INDEX, request); SearchHit hit = hits.get(0); - Map detectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); List inputArr = (List) detectorMap.get("inputs"); assertEquals(6, ((Map>) inputArr.get(0)).get("detector_input").get("custom_rules").size()); @@ -1851,19 +1860,19 @@ public void testCreateDetector_verifyWorkflowExecutionMultipleBucketLevelDocLeve for (Map runResult : monitorRunResults) { String monitorName = runResult.get("monitor_name").toString(); String monitorId = monitorNameToIdMap.get(monitorName); - if(monitorId.equals(docMonitorId)){ + if (monitorId.equals(docMonitorId)) { int noOfSigmaRuleMatches = ((List>) ((Map) runResult.get("input_results")).get("results")).get(0).size(); // 5 prepackaged and 1 custom doc level rule assertEquals(6, noOfSigmaRuleMatches); - } else if(monitorId.equals(chainedFindingsMonitorId)) { + } else if (monitorId.equals(chainedFindingsMonitorId)) { } else { Map trigger_results = (Map) runResult.get("trigger_results"); if (trigger_results.containsKey(maxRuleId)) { assertRuleMonitorFinding(runResult, maxRuleId, 5, List.of("2", "3")); - } else if( trigger_results.containsKey(sumRuleId)) { + } else if (trigger_results.containsKey(sumRuleId)) { assertRuleMonitorFinding(runResult, sumRuleId, 3, List.of("4")); - } else if( trigger_results.containsKey(minRuleId)) { + } else if (trigger_results.containsKey(minRuleId)) { assertRuleMonitorFinding(runResult, minRuleId, 5, List.of("2")); } } @@ -1881,11 +1890,11 @@ public void testCreateDetector_verifyWorkflowExecutionMultipleBucketLevelDocLeve private static void assertRuleMonitorFinding(Map executeResults, String ruleId, int expectedDocCount, List expectedTriggerResult) { - List> buckets = ((List>)(((Map)((Map)((Map)((List)((Map) executeResults.get("input_results")).get("results")).get(0)).get("aggregations")).get("result_agg")).get("buckets"))); - Integer docCount = buckets.stream().mapToInt(it -> (Integer)it.get("doc_count")).sum(); + List> buckets = ((List>) (((Map) ((Map) ((Map) ((List) ((Map) executeResults.get("input_results")).get("results")).get(0)).get("aggregations")).get("result_agg")).get("buckets"))); + Integer docCount = buckets.stream().mapToInt(it -> (Integer) it.get("doc_count")).sum(); assertEquals(expectedDocCount, docCount.intValue()); - List triggerResultBucketKeys = ((Map)((Map) ((Map)executeResults.get("trigger_results")).get(ruleId)).get("agg_result_buckets")).keySet().stream().collect(Collectors.toList()); + List triggerResultBucketKeys = ((Map) ((Map) ((Map) executeResults.get("trigger_results")).get(ruleId)).get("agg_result_buckets")).keySet().stream().collect(Collectors.toList()); Assert.assertEquals(expectedTriggerResult, triggerResultBucketKeys); } } \ No newline at end of file diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestCase.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestCase.java deleted file mode 100644 index c637b448a..000000000 --- a/src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestCase.java +++ /dev/null @@ -1,287 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel; - -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.when; - -import java.time.Instant; -import java.time.temporal.ChronoUnit; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.Locale; -import java.util.UUID; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.BiFunction; -import java.util.stream.Collectors; - -import org.junit.After; -import org.junit.Before; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.opensearch.action.ActionRequest; -import org.opensearch.action.ActionType; -import org.opensearch.action.support.ActionFilters; -import org.opensearch.cluster.ClusterState; -import org.opensearch.cluster.metadata.Metadata; -import org.opensearch.cluster.routing.RoutingTable; -import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.Randomness; -import org.opensearch.common.settings.ClusterSettings; -import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.concurrent.OpenSearchExecutors; -import org.opensearch.core.action.ActionListener; -import org.opensearch.core.action.ActionResponse; -import org.opensearch.ingest.IngestMetadata; -import org.opensearch.ingest.IngestService; -import org.opensearch.jobscheduler.spi.LockModel; -import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; -import org.opensearch.jobscheduler.spi.utils.LockService; -import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; -import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; -import org.opensearch.securityanalytics.threatIntel.common.TIFExecutor; -import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameterService; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobTask; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobUpdateService; -import org.opensearch.tasks.Task; -import org.opensearch.tasks.TaskListener; -import org.opensearch.test.client.NoOpNodeClient; -import org.opensearch.test.rest.RestActionTestCase; -import org.opensearch.threadpool.ThreadPool; - -public abstract class ThreatIntelTestCase extends RestActionTestCase { - @Mock - protected ClusterService clusterService; - @Mock - protected TIFJobUpdateService tifJobUpdateService; - @Mock - protected TIFJobParameterService tifJobParameterService; - @Mock - protected TIFExecutor threatIntelExecutor; - @Mock - protected ThreatIntelFeedDataService threatIntelFeedDataService; - @Mock - protected ClusterState clusterState; - @Mock - protected Metadata metadata; - @Mock - protected IngestService ingestService; - @Mock - protected ActionFilters actionFilters; - @Mock - protected ThreadPool threadPool; - @Mock - protected TIFLockService threatIntelLockService; - @Mock - protected RoutingTable routingTable; - protected IngestMetadata ingestMetadata; - protected NoOpNodeClient client; - protected VerifyingClient verifyingClient; - protected LockService lockService; - protected ClusterSettings clusterSettings; - protected Settings settings; - private AutoCloseable openMocks; - - @Before - public void prepareThreatIntelTestCase() { - openMocks = MockitoAnnotations.openMocks(this); - settings = Settings.EMPTY; - client = new NoOpNodeClient(this.getTestName()); - verifyingClient = spy(new VerifyingClient(this.getTestName())); - clusterSettings = new ClusterSettings(settings, new HashSet<>(SecurityAnalyticsSettings.settings())); - lockService = new LockService(client, clusterService); - ingestMetadata = new IngestMetadata(Collections.emptyMap()); - when(metadata.custom(IngestMetadata.TYPE)).thenReturn(ingestMetadata); - when(clusterService.getSettings()).thenReturn(Settings.EMPTY); - when(clusterService.getClusterSettings()).thenReturn(clusterSettings); - when(clusterService.state()).thenReturn(clusterState); - when(clusterState.metadata()).thenReturn(metadata); - when(clusterState.getMetadata()).thenReturn(metadata); - when(clusterState.routingTable()).thenReturn(routingTable); - when(ingestService.getClusterService()).thenReturn(clusterService); - when(threadPool.generic()).thenReturn(OpenSearchExecutors.newDirectExecutorService()); - } - - @After - public void clean() throws Exception { - openMocks.close(); - client.close(); - verifyingClient.close(); - } - - protected TIFJobState randomStateExcept(TIFJobState state) { - assertNotNull(state); - return Arrays.stream(TIFJobState.values()) - .sequential() - .filter(s -> !s.equals(state)) - .collect(Collectors.toList()) - .get(Randomness.createSecure().nextInt(TIFJobState.values().length - 2)); - } - - protected TIFJobState randomState() { - return Arrays.stream(TIFJobState.values()) - .sequential() - .collect(Collectors.toList()) - .get(Randomness.createSecure().nextInt(TIFJobState.values().length - 1)); - } - - protected TIFJobTask randomTask() { - return Arrays.stream(TIFJobTask.values()) - .sequential() - .collect(Collectors.toList()) - .get(Randomness.createSecure().nextInt(TIFJobTask.values().length - 1)); - } - - protected String randomIpAddress() { - return String.format( - Locale.ROOT, - "%d.%d.%d.%d", - Randomness.get().nextInt(255), - Randomness.get().nextInt(255), - Randomness.get().nextInt(255), - Randomness.get().nextInt(255) - ); - } - - protected long randomPositiveLong() { - long value = Randomness.get().nextLong(); - return value < 0 ? -value : value; - } - - /** - * Update interval should be > 0 and < validForInDays. - * For an update test to work, there should be at least one eligible value other than current update interval. - * Therefore, the smallest value for validForInDays is 2. - * Update interval is random value from 1 to validForInDays - 2. - * The new update value will be validForInDays - 1. - */ - protected TIFJobParameter randomTifJobParameter(final Instant updateStartTime) { - Instant now = Instant.now().truncatedTo(ChronoUnit.MILLIS); - TIFJobParameter tifJobParameter = new TIFJobParameter(); - tifJobParameter.setName(ThreatIntelTestHelper.randomLowerCaseString()); - tifJobParameter.setSchedule( - new IntervalSchedule( - updateStartTime.truncatedTo(ChronoUnit.MILLIS), - 1, - ChronoUnit.DAYS - ) - ); - tifJobParameter.setTask(randomTask()); - tifJobParameter.setState(randomState()); - tifJobParameter.setCurrentIndex(tifJobParameter.newIndexName(UUID.randomUUID().toString())); - tifJobParameter.setIndices(Arrays.asList(ThreatIntelTestHelper.randomLowerCaseString(), ThreatIntelTestHelper.randomLowerCaseString())); - tifJobParameter.getUpdateStats().setLastSkippedAt(now); - tifJobParameter.getUpdateStats().setLastSucceededAt(now); - tifJobParameter.getUpdateStats().setLastFailedAt(now); - tifJobParameter.getUpdateStats().setLastProcessingTimeInMillis(randomPositiveLong()); - tifJobParameter.setLastUpdateTime(now); - if (Randomness.get().nextInt() % 2 == 0) { - tifJobParameter.enable(); - } else { - tifJobParameter.disable(); - } - return tifJobParameter; - } - - protected TIFJobParameter randomTifJobParameter() { - return randomTifJobParameter(Instant.now()); - } - - protected LockModel randomLockModel() { - LockModel lockModel = new LockModel( - ThreatIntelTestHelper.randomLowerCaseString(), - ThreatIntelTestHelper.randomLowerCaseString(), - Instant.now(), - randomPositiveLong(), - false - ); - return lockModel; - } - - /** - * Temporary class of VerifyingClient until this PR(https://github.com/opensearch-project/OpenSearch/pull/7167) - * is merged in OpenSearch core - */ - public static class VerifyingClient extends NoOpNodeClient { - AtomicReference executeVerifier = new AtomicReference<>(); - AtomicReference executeLocallyVerifier = new AtomicReference<>(); - - public VerifyingClient(String testName) { - super(testName); - reset(); - } - - /** - * Clears any previously set verifier functions set by {@link #setExecuteVerifier(BiFunction)} and/or - * {@link #setExecuteLocallyVerifier(BiFunction)}. These functions are replaced with functions which will throw an - * {@link AssertionError} if called. - */ - public void reset() { - executeVerifier.set((arg1, arg2) -> { throw new AssertionError(); }); - executeLocallyVerifier.set((arg1, arg2) -> { throw new AssertionError(); }); - } - - /** - * Sets the function that will be called when {@link #doExecute(ActionType, ActionRequest, ActionListener)} is called. The given - * function should return either a subclass of {@link ActionResponse} or {@code null}. - * @param verifier A function which is called in place of {@link #doExecute(ActionType, ActionRequest, ActionListener)} - */ - public void setExecuteVerifier( - BiFunction, Request, Response> verifier - ) { - executeVerifier.set(verifier); - } - - @Override - public void doExecute( - ActionType action, - Request request, - ActionListener listener - ) { - try { - listener.onResponse((Response) executeVerifier.get().apply(action, request)); - } catch (Exception e) { - listener.onFailure(e); - } - } - - /** - * Sets the function that will be called when {@link #executeLocally(ActionType, ActionRequest, TaskListener)}is called. The given - * function should return either a subclass of {@link ActionResponse} or {@code null}. - * @param verifier A function which is called in place of {@link #executeLocally(ActionType, ActionRequest, TaskListener)} - */ - public void setExecuteLocallyVerifier( - BiFunction, Request, Response> verifier - ) { - executeLocallyVerifier.set(verifier); - } - - @Override - public Task executeLocally( - ActionType action, - Request request, - ActionListener listener - ) { - listener.onResponse((Response) executeLocallyVerifier.get().apply(action, request)); - return null; - } - - @Override - public Task executeLocally( - ActionType action, - Request request, - TaskListener listener - ) { - listener.onResponse(null, (Response) executeLocallyVerifier.get().apply(action, request)); - return null; - } - - } -} - diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestHelper.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestHelper.java deleted file mode 100644 index 73522053f..000000000 --- a/src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestHelper.java +++ /dev/null @@ -1,120 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.securityanalytics.threatIntel; - -import static org.apache.lucene.tests.util.LuceneTestCase.random; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.opensearch.test.OpenSearchTestCase.randomBoolean; -import static org.opensearch.test.OpenSearchTestCase.randomIntBetween; -import static org.opensearch.test.OpenSearchTestCase.randomNonNegativeLong; - -import java.util.ArrayList; -import java.util.List; -import java.util.Locale; -import java.util.stream.IntStream; - - -import org.opensearch.OpenSearchException; -import org.opensearch.action.DocWriteRequest; -import org.opensearch.action.bulk.BulkItemResponse; -import org.opensearch.action.bulk.BulkResponse; -import org.opensearch.action.index.IndexResponse; -import org.opensearch.action.support.replication.ReplicationResponse; -import org.opensearch.common.Randomness; -import org.opensearch.common.UUIDs; -import org.opensearch.common.collect.Tuple; -import org.opensearch.core.index.shard.ShardId; - -import org.opensearch.test.OpenSearchTestCase; -import org.opensearch.test.RandomObjects; - -public class ThreatIntelTestHelper { - - public static final int MAX_SEQ_NO = 10000; - public static final int MAX_PRIMARY_TERM = 10000; - public static final int MAX_VERSION = 10000; - public static final int MAX_SHARD_ID = 100; - - public static final int RANDOM_STRING_MIN_LENGTH = 2; - public static final int RANDOM_STRING_MAX_LENGTH = 16; - - private static String randomString() { - return OpenSearchTestCase.randomAlphaOfLengthBetween(RANDOM_STRING_MIN_LENGTH, RANDOM_STRING_MAX_LENGTH); - } - - public static String randomLowerCaseString() { - return randomString().toLowerCase(Locale.ROOT); - } - - public static List randomLowerCaseStringList() { - List stringList = new ArrayList<>(); - stringList.add(randomLowerCaseString()); - return stringList; - } - - /** - * Returns random {@link IndexResponse} by generating inputs using random functions. - * It is not guaranteed to generate every possible values, and it is not required since - * it is used by the unit test and will not be validated by the cluster. - */ - private static IndexResponse randomIndexResponse() { - String index = randomLowerCaseString(); - String indexUUid = UUIDs.randomBase64UUID(); - int shardId = randomIntBetween(0, MAX_SHARD_ID); - String id = UUIDs.randomBase64UUID(); - long seqNo = randomIntBetween(0, MAX_SEQ_NO); - long primaryTerm = randomIntBetween(0, MAX_PRIMARY_TERM); - long version = randomIntBetween(0, MAX_VERSION); - boolean created = randomBoolean(); - boolean forcedRefresh = randomBoolean(); - Tuple shardInfo = RandomObjects.randomShardInfo(random()); - IndexResponse actual = new IndexResponse(new ShardId(index, indexUUid, shardId), id, seqNo, primaryTerm, version, created); - actual.setForcedRefresh(forcedRefresh); - actual.setShardInfo(shardInfo.v1()); - - return actual; - } - - // Generate Random Bulk Response with noOfSuccessItems as BulkItemResponse, and include BulkItemResponse.Failure with - // random error message, if hasFailures is true. - public static BulkResponse generateRandomBulkResponse(int noOfSuccessItems, boolean hasFailures) { - long took = randomNonNegativeLong(); - long ingestTook = randomNonNegativeLong(); - if (noOfSuccessItems < 1) { - return new BulkResponse(null, took, ingestTook); - } - List items = new ArrayList<>(); - IntStream.range(0, noOfSuccessItems) - .forEach(shardId -> items.add(new BulkItemResponse(shardId, DocWriteRequest.OpType.CREATE, randomIndexResponse()))); - if (hasFailures) { - final BulkItemResponse.Failure failedToIndex = new BulkItemResponse.Failure( - randomLowerCaseString(), - randomLowerCaseString(), - new OpenSearchException(randomLowerCaseString()) - ); - items.add(new BulkItemResponse(randomIntBetween(0, MAX_SHARD_ID), DocWriteRequest.OpType.CREATE, failedToIndex)); - } - return new BulkResponse(items.toArray(BulkItemResponse[]::new), took, ingestTook); - } - - public static StringBuilder buildFieldNameValuePair(Object field, Object value) { - StringBuilder builder = new StringBuilder(); - builder.append("\"").append(field).append("\":"); - if (!(value instanceof String)) { - return builder.append(value); - } - return builder.append("\"").append(value).append("\""); - } - -} - diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadataTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadataTests.java deleted file mode 100644 index fc229c2e8..000000000 --- a/src/test/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadataTests.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ -package org.opensearch.securityanalytics.threatIntel.common; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.net.URLConnection; - -import org.opensearch.common.SuppressForbidden; -import org.opensearch.securityanalytics.SecurityAnalyticsRestTestCase; - -@SuppressForbidden(reason = "unit test") -public class TIFMetadataTests extends SecurityAnalyticsRestTestCase { - - public void testInternalBuild_whenCalled_thenCorrectUserAgentValueIsSet() throws IOException { - URLConnection connection = mock(URLConnection.class); - File manifestFile = new File(this.getClass().getClassLoader().getResource("threatIntel/manifest.json").getFile()); - when(connection.getInputStream()).thenReturn(new FileInputStream(manifestFile)); - - // Run - TIFMetadata manifest = TIFMetadata.Builder.internalBuild(connection); - - // Verify - verify(connection).addRequestProperty(Constants.USER_AGENT_KEY, Constants.USER_AGENT_VALUE); - assertEquals("https://test.com/db.zip", manifest.getUrl()); - } -} - diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/common/ThreatIntelLockServiceTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/common/ThreatIntelLockServiceTests.java deleted file mode 100644 index d9390af7a..000000000 --- a/src/test/java/org/opensearch/securityanalytics/threatIntel/common/ThreatIntelLockServiceTests.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.common; - -import static org.mockito.Mockito.mock; -import static org.opensearch.securityanalytics.threatIntel.common.TIFLockService.LOCK_DURATION_IN_SECONDS; -import static org.opensearch.securityanalytics.threatIntel.common.TIFLockService.RENEW_AFTER_IN_SECONDS; - -import java.time.Instant; -import java.util.concurrent.atomic.AtomicReference; - -import org.junit.Before; -import org.opensearch.action.DocWriteResponse; -import org.opensearch.action.update.UpdateRequest; -import org.opensearch.action.update.UpdateResponse; -import org.opensearch.core.action.ActionListener; -import org.opensearch.core.index.shard.ShardId; -import org.opensearch.jobscheduler.spi.LockModel; -import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; -import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestHelper; - -public class ThreatIntelLockServiceTests extends ThreatIntelTestCase { - private TIFLockService threatIntelLockService; - private TIFLockService noOpsLockService; - - @Before - public void init() { - threatIntelLockService = new TIFLockService(clusterService, verifyingClient); - noOpsLockService = new TIFLockService(clusterService, client); - } - - public void testAcquireLock_whenValidInput_thenSucceed() { - // Cannot test because LockService is final class - // Simply calling method to increase coverage - noOpsLockService.acquireLock(ThreatIntelTestHelper.randomLowerCaseString(), randomPositiveLong(), mock(ActionListener.class)); - } - - public void testAcquireLock_whenCalled_thenNotBlocked() { - long expectedDurationInMillis = 1000; - Instant before = Instant.now(); - assertTrue(threatIntelLockService.acquireLock(null, null).isEmpty()); - Instant after = Instant.now(); - assertTrue(after.toEpochMilli() - before.toEpochMilli() < expectedDurationInMillis); - } - - public void testReleaseLock_whenValidInput_thenSucceed() { - // Cannot test because LockService is final class - // Simply calling method to increase coverage - noOpsLockService.releaseLock(null); - } - - public void testRenewLock_whenCalled_thenNotBlocked() { - long expectedDurationInMillis = 1000; - Instant before = Instant.now(); - assertNull(threatIntelLockService.renewLock(null)); - Instant after = Instant.now(); - assertTrue(after.toEpochMilli() - before.toEpochMilli() < expectedDurationInMillis); - } - - public void testGetRenewLockRunnable_whenLockIsFresh_thenDoNotRenew() { - LockModel lockModel = new LockModel( - ThreatIntelTestHelper.randomLowerCaseString(), - ThreatIntelTestHelper.randomLowerCaseString(), - Instant.now(), - LOCK_DURATION_IN_SECONDS, - false - ); - - verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { - // Verifying - assertTrue(actionRequest instanceof UpdateRequest); - return new UpdateResponse( - mock(ShardId.class), - ThreatIntelTestHelper.randomLowerCaseString(), - randomPositiveLong(), - randomPositiveLong(), - randomPositiveLong(), - DocWriteResponse.Result.UPDATED - ); - }); - - AtomicReference reference = new AtomicReference<>(lockModel); - threatIntelLockService.getRenewLockRunnable(reference).run(); - assertEquals(lockModel, reference.get()); - } - - public void testGetRenewLockRunnable_whenLockIsStale_thenRenew() { - LockModel lockModel = new LockModel( - ThreatIntelTestHelper.randomLowerCaseString(), - ThreatIntelTestHelper.randomLowerCaseString(), - Instant.now().minusSeconds(RENEW_AFTER_IN_SECONDS), - LOCK_DURATION_IN_SECONDS, - false - ); - - verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { - // Verifying - assertTrue(actionRequest instanceof UpdateRequest); - return new UpdateResponse( - mock(ShardId.class), - ThreatIntelTestHelper.randomLowerCaseString(), - randomPositiveLong(), - randomPositiveLong(), - randomPositiveLong(), - DocWriteResponse.Result.UPDATED - ); - }); - - AtomicReference reference = new AtomicReference<>(lockModel); - threatIntelLockService.getRenewLockRunnable(reference).run(); - assertNotEquals(lockModel, reference.get()); - } -} - diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtensionTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtensionTests.java deleted file mode 100644 index ab8520286..000000000 --- a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtensionTests.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.jobscheduler; - -import static org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobExtension.JOB_INDEX_NAME; - -import java.time.Instant; -import java.time.temporal.ChronoUnit; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.jobscheduler.spi.JobDocVersion; -import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; -import org.opensearch.securityanalytics.model.DetectorTrigger; -import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; -import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestHelper; -public class TIFJobExtensionTests extends ThreatIntelTestCase { - private static final Logger log = LogManager.getLogger(DetectorTrigger.class); - - public void testBasic() { - TIFJobExtension extension = new TIFJobExtension(); - assertEquals("scheduler_sap_threatintel_job", extension.getJobType()); - assertEquals(JOB_INDEX_NAME, extension.getJobIndex()); - assertEquals(TIFJobRunner.getJobRunnerInstance(), extension.getJobRunner()); - } - - public void testParser() throws Exception { - TIFJobExtension extension = new TIFJobExtension(); - String id = ThreatIntelTestHelper.randomLowerCaseString(); - IntervalSchedule schedule = new IntervalSchedule(Instant.now().truncatedTo(ChronoUnit.MILLIS), 1, ChronoUnit.DAYS); - TIFJobParameter tifJobParameter = new TIFJobParameter(id, schedule); - - TIFJobParameter anotherTifJobParameter = (TIFJobParameter) extension.getJobParser() - .parse( - createParser(tifJobParameter.toXContent(XContentFactory.jsonBuilder(), null)), - ThreatIntelTestHelper.randomLowerCaseString(), - new JobDocVersion(randomPositiveLong(), randomPositiveLong(), randomPositiveLong()) - ); - log.info("first"); - log.error(tifJobParameter); - log.error(tifJobParameter.getName()); - log.error(tifJobParameter.getCurrentIndex()); - log.info("second"); - log.error(anotherTifJobParameter); - log.error(anotherTifJobParameter.getName()); - log.error(anotherTifJobParameter.getCurrentIndex()); - - //same values but technically diff indices - - assertTrue(tifJobParameter.equals(anotherTifJobParameter)); - } -} diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterServiceTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterServiceTests.java deleted file mode 100644 index 148d16e93..000000000 --- a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterServiceTests.java +++ /dev/null @@ -1,385 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.jobscheduler; - -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import java.io.IOException; -import java.time.Instant; -import java.time.temporal.ChronoUnit; -import java.util.Arrays; -import java.util.List; - -import org.apache.lucene.search.TotalHits; -import org.junit.Before; -import org.mockito.ArgumentCaptor; -import org.opensearch.ResourceAlreadyExistsException; -import org.opensearch.ResourceNotFoundException; -import org.opensearch.action.DocWriteRequest; -import org.opensearch.action.StepListener; -import org.opensearch.action.admin.indices.create.CreateIndexRequest; -import org.opensearch.action.bulk.BulkRequest; -import org.opensearch.action.delete.DeleteRequest; -import org.opensearch.action.delete.DeleteResponse; -import org.opensearch.action.get.GetRequest; -import org.opensearch.action.get.GetResponse; -import org.opensearch.action.get.MultiGetItemResponse; -import org.opensearch.action.get.MultiGetRequest; -import org.opensearch.action.get.MultiGetResponse; -import org.opensearch.action.index.IndexRequest; -import org.opensearch.action.search.SearchRequest; -import org.opensearch.action.search.SearchResponse; -import org.opensearch.action.support.WriteRequest; -import org.opensearch.cluster.routing.Preference; -import org.opensearch.common.Randomness; -import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.core.action.ActionListener; -import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.core.rest.RestStatus; -import org.opensearch.index.IndexNotFoundException; -import org.opensearch.index.query.QueryBuilders; -import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; -import org.opensearch.search.SearchHit; -import org.opensearch.search.SearchHits; -import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; -import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestHelper; - -public class TIFJobParameterServiceTests extends ThreatIntelTestCase { - private TIFJobParameterService tifJobParameterService; - - @Before - public void init() { - tifJobParameterService = new TIFJobParameterService(verifyingClient, clusterService); - } - - public void testCreateIndexIfNotExists_whenIndexExist_thenCreateRequestIsNotCalled() { - when(metadata.hasIndex(TIFJobExtension.JOB_INDEX_NAME)).thenReturn(true); - - // Verify - verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { throw new RuntimeException("Shouldn't get called"); }); - - // Run - StepListener stepListener = new StepListener<>(); - tifJobParameterService.createIndexIfNotExists(stepListener); - - // Verify stepListener is called - stepListener.result(); - } - - public void testCreateIndexIfNotExists_whenIndexExist_thenCreateRequestIsCalled() { - when(metadata.hasIndex(TIFJobExtension.JOB_INDEX_NAME)).thenReturn(false); - - // Verify - verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { - assertTrue(actionRequest instanceof CreateIndexRequest); - CreateIndexRequest request = (CreateIndexRequest) actionRequest; - assertEquals(TIFJobExtension.JOB_INDEX_NAME, request.index()); - assertEquals("1", request.settings().get("index.number_of_shards")); - assertEquals("0-all", request.settings().get("index.auto_expand_replicas")); - assertEquals("true", request.settings().get("index.hidden")); - assertNotNull(request.mappings()); - return null; - }); - - // Run - StepListener stepListener = new StepListener<>(); - tifJobParameterService.createIndexIfNotExists(stepListener); - - // Verify stepListener is called - stepListener.result(); - } - - public void testCreateIndexIfNotExists_whenIndexCreatedAlready_thenExceptionIsIgnored() { - when(metadata.hasIndex(TIFJobExtension.JOB_INDEX_NAME)).thenReturn(false); - verifyingClient.setExecuteVerifier( - (actionResponse, actionRequest) -> { throw new ResourceAlreadyExistsException(TIFJobExtension.JOB_INDEX_NAME); } - ); - - // Run - StepListener stepListener = new StepListener<>(); - tifJobParameterService.createIndexIfNotExists(stepListener); - - // Verify stepListener is called - stepListener.result(); - } - - public void testCreateIndexIfNotExists_whenExceptionIsThrown_thenExceptionIsThrown() { - when(metadata.hasIndex(TIFJobExtension.JOB_INDEX_NAME)).thenReturn(false); - verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { throw new RuntimeException(); }); - - // Run - StepListener stepListener = new StepListener<>(); - tifJobParameterService.createIndexIfNotExists(stepListener); - - // Verify stepListener is called - expectThrows(RuntimeException.class, () -> stepListener.result()); - } - - public void testUpdateTIFJobParameter_whenValidInput_thenSucceed() throws Exception { - String tifJobName = ThreatIntelTestHelper.randomLowerCaseString(); - TIFJobParameter tifJobParameter = new TIFJobParameter( - tifJobName, - new IntervalSchedule(Instant.now().truncatedTo(ChronoUnit.MILLIS), 1, ChronoUnit.DAYS) - ); - Instant previousTime = Instant.now().minusMillis(1); - tifJobParameter.setLastUpdateTime(previousTime); - - verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { - assertTrue(actionRequest instanceof IndexRequest); - IndexRequest request = (IndexRequest) actionRequest; - assertEquals(tifJobParameter.getName(), request.id()); - assertEquals(DocWriteRequest.OpType.INDEX, request.opType()); - assertEquals(TIFJobExtension.JOB_INDEX_NAME, request.index()); - assertEquals(WriteRequest.RefreshPolicy.IMMEDIATE, request.getRefreshPolicy()); - return null; - }); - - tifJobParameterService.updateJobSchedulerParameter(tifJobParameter); - assertTrue(previousTime.isBefore(tifJobParameter.getLastUpdateTime())); - } - - public void testPutTifJobParameter_whenValidInput_thenSucceed() { - TIFJobParameter tifJobParameter = randomTifJobParameter(); - Instant previousTime = Instant.now().minusMillis(1); - tifJobParameter.setLastUpdateTime(previousTime); - - verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { - assertTrue(actionRequest instanceof IndexRequest); - IndexRequest indexRequest = (IndexRequest) actionRequest; - assertEquals(TIFJobExtension.JOB_INDEX_NAME, indexRequest.index()); - assertEquals(tifJobParameter.getName(), indexRequest.id()); - assertEquals(WriteRequest.RefreshPolicy.IMMEDIATE, indexRequest.getRefreshPolicy()); - assertEquals(DocWriteRequest.OpType.CREATE, indexRequest.opType()); - return null; - }); - - tifJobParameterService.putTIFJobParameter(tifJobParameter, mock(ActionListener.class)); - assertTrue(previousTime.isBefore(tifJobParameter.getLastUpdateTime())); - } - - public void testGetTifJobParameter_whenException_thenNull() throws Exception { - TIFJobParameter tifJobParameter = setupClientForGetRequest(true, new IndexNotFoundException(TIFJobExtension.JOB_INDEX_NAME)); - assertNull(tifJobParameterService.getJobParameter(tifJobParameter.getName())); - } - - public void testGetTifJobParameter_whenExist_thenReturnTifJobParameter() throws Exception { - TIFJobParameter tifJobParameter = setupClientForGetRequest(true, null); - assertEquals(tifJobParameter, tifJobParameterService.getJobParameter(tifJobParameter.getName())); - } - - public void testGetTifJobParameter_whenNotExist_thenNull() throws Exception { - TIFJobParameter tifJobParameter = setupClientForGetRequest(false, null); - assertNull(tifJobParameterService.getJobParameter(tifJobParameter.getName())); - } - - public void testGetTifJobParameter_whenExistWithListener_thenListenerIsCalledWithTifJobParameter() { - TIFJobParameter tifJobParameter = setupClientForGetRequest(true, null); - ActionListener listener = mock(ActionListener.class); - tifJobParameterService.getJobParameter(tifJobParameter.getName(), listener); - verify(listener).onResponse(eq(tifJobParameter)); - } - - public void testGetTifJobParameter_whenNotExistWithListener_thenListenerIsCalledWithNull() { - TIFJobParameter tifJobParameter = setupClientForGetRequest(false, null); - ActionListener listener = mock(ActionListener.class); - tifJobParameterService.getJobParameter(tifJobParameter.getName(), listener); - verify(listener).onResponse(null); - } - - private TIFJobParameter setupClientForGetRequest(final boolean isExist, final RuntimeException exception) { - TIFJobParameter tifJobParameter = randomTifJobParameter(); - - verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { - assertTrue(actionRequest instanceof GetRequest); - GetRequest request = (GetRequest) actionRequest; - assertEquals(tifJobParameter.getName(), request.id()); - assertEquals(TIFJobExtension.JOB_INDEX_NAME, request.index()); - GetResponse response = getMockedGetResponse(isExist ? tifJobParameter : null); - if (exception != null) { - throw exception; - } - return response; - }); - return tifJobParameter; - } - - public void testDeleteTifJobParameter_whenValidInput_thenSucceed() { - TIFJobParameter tifJobParameter = randomTifJobParameter(); - verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { - // Verify - assertTrue(actionRequest instanceof DeleteRequest); - DeleteRequest request = (DeleteRequest) actionRequest; - assertEquals(TIFJobExtension.JOB_INDEX_NAME, request.index()); - assertEquals(DocWriteRequest.OpType.DELETE, request.opType()); - assertEquals(tifJobParameter.getName(), request.id()); - assertEquals(WriteRequest.RefreshPolicy.IMMEDIATE, request.getRefreshPolicy()); - - DeleteResponse response = mock(DeleteResponse.class); - when(response.status()).thenReturn(RestStatus.OK); - return response; - }); - - // Run - tifJobParameterService.deleteTIFJobParameter(tifJobParameter); - } - - public void testDeleteTifJobParameter_whenIndexNotFound_thenThrowException() { - TIFJobParameter tifJobParameter = randomTifJobParameter(); - verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { - DeleteResponse response = mock(DeleteResponse.class); - when(response.status()).thenReturn(RestStatus.NOT_FOUND); - return response; - }); - - // Run - expectThrows(ResourceNotFoundException.class, () -> tifJobParameterService.deleteTIFJobParameter(tifJobParameter)); - } - - public void testGetTifJobParameter_whenValidInput_thenSucceed() { - List tifJobParameters = Arrays.asList(randomTifJobParameter(), randomTifJobParameter()); - String[] names = tifJobParameters.stream().map(TIFJobParameter::getName).toArray(String[]::new); - ActionListener> listener = mock(ActionListener.class); - MultiGetItemResponse[] multiGetItemResponses = tifJobParameters.stream().map(tifJobParameter -> { - GetResponse getResponse = getMockedGetResponse(tifJobParameter); - MultiGetItemResponse multiGetItemResponse = mock(MultiGetItemResponse.class); - when(multiGetItemResponse.getResponse()).thenReturn(getResponse); - return multiGetItemResponse; - }).toArray(MultiGetItemResponse[]::new); - - verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { - // Verify - assertTrue(actionRequest instanceof MultiGetRequest); - MultiGetRequest request = (MultiGetRequest) actionRequest; - assertEquals(2, request.getItems().size()); - for (MultiGetRequest.Item item : request.getItems()) { - assertEquals(TIFJobExtension.JOB_INDEX_NAME, item.index()); - assertTrue(tifJobParameters.stream().filter(tifJobParameter -> tifJobParameter.getName().equals(item.id())).findAny().isPresent()); - } - - MultiGetResponse response = mock(MultiGetResponse.class); - when(response.getResponses()).thenReturn(multiGetItemResponses); - return response; - }); - - // Run - tifJobParameterService.getTIFJobParameters(names, listener); - - // Verify - ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); - verify(listener).onResponse(captor.capture()); - assertEquals(tifJobParameters, captor.getValue()); - - } - - public void testGetAllTifJobParameter_whenAsynchronous_thenSuccee() { - List tifJobParameters = Arrays.asList(randomTifJobParameter(), randomTifJobParameter()); - ActionListener> listener = mock(ActionListener.class); - SearchHits searchHits = getMockedSearchHits(tifJobParameters); - - verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { - // Verify - assertTrue(actionRequest instanceof SearchRequest); - SearchRequest request = (SearchRequest) actionRequest; - assertEquals(1, request.indices().length); - assertEquals(TIFJobExtension.JOB_INDEX_NAME, request.indices()[0]); - assertEquals(QueryBuilders.matchAllQuery(), request.source().query()); - assertEquals(1000, request.source().size()); - assertEquals(Preference.PRIMARY.type(), request.preference()); - - SearchResponse response = mock(SearchResponse.class); - when(response.getHits()).thenReturn(searchHits); - return response; - }); - - // Run - tifJobParameterService.getAllTIFJobParameters(listener); - - // Verify - ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); - verify(listener).onResponse(captor.capture()); - assertEquals(tifJobParameters, captor.getValue()); - } - - public void testGetAllTifJobParameter_whenSynchronous_thenSucceed() { - List tifJobParameters = Arrays.asList(randomTifJobParameter(), randomTifJobParameter()); - SearchHits searchHits = getMockedSearchHits(tifJobParameters); - - verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { - // Verify - assertTrue(actionRequest instanceof SearchRequest); - SearchRequest request = (SearchRequest) actionRequest; - assertEquals(1, request.indices().length); - assertEquals(TIFJobExtension.JOB_INDEX_NAME, request.indices()[0]); - assertEquals(QueryBuilders.matchAllQuery(), request.source().query()); - assertEquals(1000, request.source().size()); - assertEquals(Preference.PRIMARY.type(), request.preference()); - - SearchResponse response = mock(SearchResponse.class); - when(response.getHits()).thenReturn(searchHits); - return response; - }); - - // Run - tifJobParameterService.getAllTIFJobParameters(); - - // Verify - assertEquals(tifJobParameters, tifJobParameterService.getAllTIFJobParameters()); - } - - public void testUpdateTifJobParameter_whenValidInput_thenUpdate() { - List tifJobParameters = Arrays.asList(randomTifJobParameter(), randomTifJobParameter()); - - verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { - // Verify - assertTrue(actionRequest instanceof BulkRequest); - BulkRequest bulkRequest = (BulkRequest) actionRequest; - assertEquals(2, bulkRequest.requests().size()); - for (int i = 0; i < bulkRequest.requests().size(); i++) { - IndexRequest request = (IndexRequest) bulkRequest.requests().get(i); - assertEquals(TIFJobExtension.JOB_INDEX_NAME, request.index()); - assertEquals(tifJobParameters.get(i).getName(), request.id()); - assertEquals(DocWriteRequest.OpType.INDEX, request.opType()); - } - return null; - }); - - tifJobParameterService.updateJobSchedulerParameter(tifJobParameters, mock(ActionListener.class)); - } - - private SearchHits getMockedSearchHits(List tifJobParameters) { - SearchHit[] searchHitArray = tifJobParameters.stream().map(this::toBytesReference).map(this::toSearchHit).toArray(SearchHit[]::new); - - return new SearchHits(searchHitArray, new TotalHits(1l, TotalHits.Relation.EQUAL_TO), 1); - } - - private GetResponse getMockedGetResponse(TIFJobParameter tifJobParameter) { - GetResponse response = mock(GetResponse.class); - when(response.isExists()).thenReturn(tifJobParameter != null); - when(response.getSourceAsBytesRef()).thenReturn(toBytesReference(tifJobParameter)); - return response; - } - - private BytesReference toBytesReference(TIFJobParameter tifJobParameter) { - if (tifJobParameter == null) { - return null; - } - - try { - return BytesReference.bytes(tifJobParameter.toXContent(JsonXContent.contentBuilder(), null)); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - private SearchHit toSearchHit(BytesReference bytesReference) { - SearchHit searchHit = new SearchHit(Randomness.get().nextInt()); - searchHit.sourceRef(bytesReference); - return searchHit; - } -} diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterTests.java deleted file mode 100644 index 90a67f74b..000000000 --- a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterTests.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.jobscheduler; - -import static org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter.THREAT_INTEL_DATA_INDEX_NAME_PREFIX; - -import java.io.IOException; -import java.time.Instant; -import java.time.temporal.ChronoUnit; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Locale; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; -import org.opensearch.securityanalytics.model.DetectorTrigger; -import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; -import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestHelper; - -public class TIFJobParameterTests extends ThreatIntelTestCase { - private static final Logger log = LogManager.getLogger(DetectorTrigger.class); - - public void testParser_whenAllValueIsFilled_thenSucceed() throws IOException { // TODO: same issue - String id = ThreatIntelTestHelper.randomLowerCaseString(); - IntervalSchedule schedule = new IntervalSchedule(Instant.now().truncatedTo(ChronoUnit.MILLIS), 1, ChronoUnit.DAYS); - TIFJobParameter tifJobParameter = new TIFJobParameter(id, schedule); - tifJobParameter.enable(); - tifJobParameter.setCurrentIndex(ThreatIntelTestHelper.randomLowerCaseString()); - tifJobParameter.getUpdateStats().setLastProcessingTimeInMillis(randomPositiveLong()); - tifJobParameter.getUpdateStats().setLastSucceededAt(Instant.now().truncatedTo(ChronoUnit.MILLIS)); - tifJobParameter.getUpdateStats().setLastSkippedAt(Instant.now().truncatedTo(ChronoUnit.MILLIS)); - tifJobParameter.getUpdateStats().setLastFailedAt(Instant.now().truncatedTo(ChronoUnit.MILLIS)); - - TIFJobParameter anotherTIFJobParameter = TIFJobParameter.PARSER.parse( - createParser(tifJobParameter.toXContent(XContentFactory.jsonBuilder(), null)), - null - ); - - log.info("first"); - log.error(tifJobParameter); - log.error(tifJobParameter.getName()); - log.error(tifJobParameter.getCurrentIndex()); - log.info("second"); - log.error(anotherTIFJobParameter); - log.error(anotherTIFJobParameter.getName()); - log.error(anotherTIFJobParameter.getCurrentIndex()); - - assertTrue(tifJobParameter.equals(anotherTIFJobParameter)); - } - - public void testParser_whenNullForOptionalFields_thenSucceed() throws IOException { // TODO: same issue - String id = ThreatIntelTestHelper.randomLowerCaseString(); - IntervalSchedule schedule = new IntervalSchedule(Instant.now().truncatedTo(ChronoUnit.MILLIS), 1, ChronoUnit.DAYS); - TIFJobParameter datasource = new TIFJobParameter(id, schedule); - TIFJobParameter anotherDatasource = TIFJobParameter.PARSER.parse( - createParser(datasource.toXContent(XContentFactory.jsonBuilder(), null)), - null - ); - assertTrue(datasource.equals(anotherDatasource)); - } - - public void testCurrentIndexName_whenNotExpired_thenReturnName() { - String id = ThreatIntelTestHelper.randomLowerCaseString(); - TIFJobParameter datasource = new TIFJobParameter(); - datasource.setName(id); - datasource.setCurrentIndex(datasource.newIndexName(ThreatIntelTestHelper.randomLowerCaseString())); - - assertNotNull(datasource.currentIndexName()); - } - - public void testNewIndexName_whenCalled_thenReturnedExpectedValue() { - String name = ThreatIntelTestHelper.randomLowerCaseString(); - String suffix = ThreatIntelTestHelper.randomLowerCaseString(); - TIFJobParameter datasource = new TIFJobParameter(); - datasource.setName(name); - assertEquals(String.format(Locale.ROOT, "%s.%s.%s", THREAT_INTEL_DATA_INDEX_NAME_PREFIX, name, suffix), datasource.newIndexName(suffix)); - } - - public void testLockDurationSeconds() { - TIFJobParameter datasource = new TIFJobParameter(); - assertNotNull(datasource.getLockDurationSeconds()); - } -} - diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunnerTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunnerTests.java deleted file mode 100644 index e30f2ecfc..000000000 --- a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunnerTests.java +++ /dev/null @@ -1,177 +0,0 @@ - -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.jobscheduler; - -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.mockito.internal.verification.VerificationModeFactory.times; - -import java.io.IOException; -import java.time.Instant; -import java.util.Optional; - -import org.junit.Before; - -import org.opensearch.jobscheduler.spi.JobDocVersion; -import org.opensearch.jobscheduler.spi.JobExecutionContext; -import org.opensearch.jobscheduler.spi.LockModel; -import org.opensearch.jobscheduler.spi.ScheduledJobParameter; -import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; -import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestHelper; -import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; -import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; - -public class TIFJobRunnerTests extends ThreatIntelTestCase { - @Before - public void init() { - TIFJobRunner.getJobRunnerInstance() - .initialize(clusterService, tifJobUpdateService, tifJobParameterService, threatIntelExecutor, threatIntelLockService, threadPool); - } - - public void testGetJobRunnerInstance_whenCalledAgain_thenReturnSameInstance() { - assertTrue(TIFJobRunner.getJobRunnerInstance() == TIFJobRunner.getJobRunnerInstance()); - } - - public void testRunJob_whenInvalidClass_thenThrowException() { - JobDocVersion jobDocVersion = new JobDocVersion(randomInt(), randomInt(), randomInt()); - String jobIndexName = ThreatIntelTestHelper.randomLowerCaseString(); - String jobId = ThreatIntelTestHelper.randomLowerCaseString(); - JobExecutionContext jobExecutionContext = new JobExecutionContext(Instant.now(), jobDocVersion, lockService, jobIndexName, jobId); - ScheduledJobParameter jobParameter = mock(ScheduledJobParameter.class); - - // Run - expectThrows(IllegalStateException.class, () -> TIFJobRunner.getJobRunnerInstance().runJob(jobParameter, jobExecutionContext)); - } - - public void testRunJob_whenValidInput_thenSucceed() throws IOException { - JobDocVersion jobDocVersion = new JobDocVersion(randomInt(), randomInt(), randomInt()); - String jobIndexName = ThreatIntelTestHelper.randomLowerCaseString(); - String jobId = ThreatIntelTestHelper.randomLowerCaseString(); - JobExecutionContext jobExecutionContext = new JobExecutionContext(Instant.now(), jobDocVersion, lockService, jobIndexName, jobId); - TIFJobParameter tifJobParameter = randomTifJobParameter(); - - LockModel lockModel = randomLockModel(); - when(threatIntelLockService.acquireLock(tifJobParameter.getName(), TIFLockService.LOCK_DURATION_IN_SECONDS)).thenReturn( - Optional.of(lockModel) - ); - - // Run - TIFJobRunner.getJobRunnerInstance().runJob(tifJobParameter, jobExecutionContext); - - // Verify - verify(threatIntelLockService).acquireLock(tifJobParameter.getName(), threatIntelLockService.LOCK_DURATION_IN_SECONDS); - verify(tifJobParameterService).getJobParameter(tifJobParameter.getName()); - verify(threatIntelLockService).releaseLock(lockModel); - } - - public void testUpdateDatasourceRunner_whenExceptionBeforeAcquiringLock_thenNoReleaseLock() { - ScheduledJobParameter jobParameter = mock(ScheduledJobParameter.class); - when(jobParameter.getName()).thenReturn(ThreatIntelTestHelper.randomLowerCaseString()); - when(threatIntelLockService.acquireLock(jobParameter.getName(), TIFLockService.LOCK_DURATION_IN_SECONDS)).thenThrow( - new RuntimeException() - ); - - // Run - expectThrows(Exception.class, () -> TIFJobRunner.getJobRunnerInstance().updateJobRunner(jobParameter).run()); - - // Verify - verify(threatIntelLockService, never()).releaseLock(any()); - } - - public void testUpdateDatasourceRunner_whenExceptionAfterAcquiringLock_thenReleaseLock() throws IOException { - ScheduledJobParameter jobParameter = mock(ScheduledJobParameter.class); - when(jobParameter.getName()).thenReturn(ThreatIntelTestHelper.randomLowerCaseString()); - LockModel lockModel = randomLockModel(); - when(threatIntelLockService.acquireLock(jobParameter.getName(), TIFLockService.LOCK_DURATION_IN_SECONDS)).thenReturn( - Optional.of(lockModel) - ); - when(tifJobParameterService.getJobParameter(jobParameter.getName())).thenThrow(new RuntimeException()); - - // Run - TIFJobRunner.getJobRunnerInstance().updateJobRunner(jobParameter).run(); - - // Verify - verify(threatIntelLockService).releaseLock(any()); - } - - public void testUpdateDatasource_whenDatasourceDoesNotExist_thenDoNothing() throws IOException { - TIFJobParameter datasource = new TIFJobParameter(); - - // Run - TIFJobRunner.getJobRunnerInstance().updateJobParameter(datasource, mock(Runnable.class)); - - // Verify - verify(tifJobUpdateService, never()).deleteAllTifdIndices(any()); - } - - public void testUpdateDatasource_whenInvalidState_thenUpdateLastFailedAt() throws IOException { - TIFJobParameter datasource = new TIFJobParameter(); - datasource.enable(); - datasource.getUpdateStats().setLastFailedAt(null); - datasource.setState(randomStateExcept(TIFJobState.AVAILABLE)); - when(tifJobParameterService.getJobParameter(datasource.getName())).thenReturn(datasource); - - // Run - TIFJobRunner.getJobRunnerInstance().updateJobParameter(datasource, mock(Runnable.class)); - - // Verify - assertFalse(datasource.isEnabled()); - assertNotNull(datasource.getUpdateStats().getLastFailedAt()); - verify(tifJobParameterService).updateJobSchedulerParameter(datasource); - } - - public void testUpdateDatasource_whenValidInput_thenSucceed() throws IOException { - TIFJobParameter datasource = randomTifJobParameter(); - datasource.setState(TIFJobState.AVAILABLE); - when(tifJobParameterService.getJobParameter(datasource.getName())).thenReturn(datasource); - Runnable renewLock = mock(Runnable.class); - - // Run - TIFJobRunner.getJobRunnerInstance().updateJobParameter(datasource, renewLock); - - // Verify - verify(tifJobUpdateService, times(1)).deleteAllTifdIndices(datasource); - verify(tifJobUpdateService).createThreatIntelFeedData(datasource, renewLock); - verify(tifJobUpdateService).updateJobSchedulerParameter(datasource, datasource.getSchedule(), TIFJobTask.ALL); - } - - public void testUpdateDatasource_whenDeleteTask_thenDeleteOnly() throws IOException { - TIFJobParameter datasource = randomTifJobParameter(); - datasource.setState(TIFJobState.AVAILABLE); - datasource.setTask(TIFJobTask.DELETE_UNUSED_INDICES); - when(tifJobParameterService.getJobParameter(datasource.getName())).thenReturn(datasource); - Runnable renewLock = mock(Runnable.class); - - // Run - TIFJobRunner.getJobRunnerInstance().updateJobParameter(datasource, renewLock); - - // Verify - verify(tifJobUpdateService, times(1)).deleteAllTifdIndices(datasource); - verify(tifJobUpdateService, never()).createThreatIntelFeedData(datasource, renewLock); - verify(tifJobUpdateService).updateJobSchedulerParameter(datasource, datasource.getSchedule(), TIFJobTask.ALL); - } - - public void testUpdateDatasourceExceptionHandling() throws IOException { - TIFJobParameter datasource = new TIFJobParameter(); - datasource.setName(ThreatIntelTestHelper.randomLowerCaseString()); - datasource.getUpdateStats().setLastFailedAt(null); - when(tifJobParameterService.getJobParameter(datasource.getName())).thenReturn(datasource); - doThrow(new RuntimeException("test failure")).when(tifJobUpdateService).deleteAllTifdIndices(any()); - - // Run - TIFJobRunner.getJobRunnerInstance().updateJobParameter(datasource, mock(Runnable.class)); - - // Verify - assertNotNull(datasource.getUpdateStats().getLastFailedAt()); - verify(tifJobParameterService).updateJobSchedulerParameter(datasource); - } -} - diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateServiceTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateServiceTests.java deleted file mode 100644 index 06f635a34..000000000 --- a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateServiceTests.java +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.jobscheduler; - -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.ArgumentMatchers.isA; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import java.io.File; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.time.Instant; -import java.time.temporal.ChronoUnit; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Iterator; -import java.util.List; - -import org.apache.commons.csv.CSVFormat; -import org.apache.commons.csv.CSVParser; -import org.junit.Before; -import org.opensearch.OpenSearchException; -import org.opensearch.cluster.routing.ShardRouting; -import org.opensearch.common.SuppressForbidden; -import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; -import org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedParser; -import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; -import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestHelper; -import org.opensearch.securityanalytics.threatIntel.common.TIFMetadata; -import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; - - -@SuppressForbidden(reason = "unit test") -public class TIFJobUpdateServiceTests extends ThreatIntelTestCase { - private TIFJobUpdateService datasourceUpdateService; - - @Before - public void init() { - datasourceUpdateService = new TIFJobUpdateService(clusterService, tifJobParameterService, threatIntelFeedDataService); - } - - public void testUpdateOrCreateThreatIntelFeedData_whenHashValueIsSame_thenSkipUpdate() throws IOException { - List containedIocs = new ArrayList<>(); - containedIocs.add("ip"); - TIFMetadata tifMetadata = new TIFMetadata("id", "url", "name", "org", "desc", "type", containedIocs, "0"); - - TIFJobParameter datasource = new TIFJobParameter(); - datasource.setState(TIFJobState.AVAILABLE); - - // Run - datasourceUpdateService.createThreatIntelFeedData(datasource, mock(Runnable.class)); - - // Verify - assertNotNull(datasource.getUpdateStats().getLastSkippedAt()); - verify(tifJobParameterService).updateJobSchedulerParameter(datasource); - } - - public void testUpdateOrCreateThreatIntelFeedData_whenInvalidData_thenThrowException() throws IOException { - List containedIocs = new ArrayList<>(); - containedIocs.add("ip"); - TIFMetadata tifMetadata = new TIFMetadata("id", "url", "name", "org", "desc", "type", containedIocs, "0"); - - File sampleFile = new File( - this.getClass().getClassLoader().getResource("threatIntel/sample_invalid_less_than_two_fields.csv").getFile() - ); - when(ThreatIntelFeedParser.getThreatIntelFeedReaderCSV(tifMetadata)).thenReturn(CSVParser.parse(sampleFile, StandardCharsets.UTF_8, CSVFormat.RFC4180)); - - TIFJobParameter datasource = new TIFJobParameter(); - datasource.setState(TIFJobState.AVAILABLE); - // Run - expectThrows(OpenSearchException.class, () -> datasourceUpdateService.createThreatIntelFeedData(datasource, mock(Runnable.class))); - } - - public void testUpdateOrCreateThreatIntelFeedData_whenIncompatibleFields_thenThrowException() throws IOException { - List containedIocs = new ArrayList<>(); - containedIocs.add("ip"); - TIFMetadata tifMetadata = new TIFMetadata("id", "https://feodotracker.abuse.ch/downloads/ipblocklist_aggressive.csv", "name", "org", "desc", "type", containedIocs, "0"); - - File sampleFile = new File(this.getClass().getClassLoader().getResource("threatIntel/sample_valid.csv").getFile()); - when(ThreatIntelFeedParser.getThreatIntelFeedReaderCSV(tifMetadata)).thenReturn(CSVParser.parse(sampleFile, StandardCharsets.UTF_8, CSVFormat.RFC4180)); - - TIFJobParameter datasource = new TIFJobParameter(); - datasource.setState(TIFJobState.AVAILABLE); - - - // Run - expectThrows(OpenSearchException.class, () -> datasourceUpdateService.createThreatIntelFeedData(datasource, mock(Runnable.class))); - } - - public void testUpdateOrCreateThreatIntelFeedData_whenValidInput_thenSucceed() throws IOException { - List containedIocs = new ArrayList<>(); - containedIocs.add("ip"); - TIFMetadata tifMetadata = new TIFMetadata("id", "url", "name", "org", "desc", "type", containedIocs, "0"); - - File sampleFile = new File(this.getClass().getClassLoader().getResource("threatIntel/sample_valid.csv").getFile()); - when(ThreatIntelFeedParser.getThreatIntelFeedReaderCSV(tifMetadata)).thenReturn(CSVParser.parse(sampleFile, StandardCharsets.UTF_8, CSVFormat.RFC4180)); - ShardRouting shardRouting = mock(ShardRouting.class); - when(shardRouting.started()).thenReturn(true); - when(routingTable.allShards(anyString())).thenReturn(Arrays.asList(shardRouting)); - - TIFJobParameter datasource = new TIFJobParameter(); - datasource.setState(TIFJobState.AVAILABLE); - - datasource.getUpdateStats().setLastSucceededAt(null); - datasource.getUpdateStats().setLastProcessingTimeInMillis(null); - - // Run - datasourceUpdateService.createThreatIntelFeedData(datasource, mock(Runnable.class)); - - // Verify - - assertNotNull(datasource.getUpdateStats().getLastSucceededAt()); - assertNotNull(datasource.getUpdateStats().getLastProcessingTimeInMillis()); - verify(tifJobParameterService, times(2)).updateJobSchedulerParameter(datasource); - verify(threatIntelFeedDataService).saveThreatIntelFeedDataCSV(eq(datasource.currentIndexName()), isA(String[].class), any(Iterator.class), any(Runnable.class), tifMetadata); - } - - public void testWaitUntilAllShardsStarted_whenTimedOut_thenThrowException() { - String indexName = ThreatIntelTestHelper.randomLowerCaseString(); - ShardRouting shardRouting = mock(ShardRouting.class); - when(shardRouting.started()).thenReturn(false); - when(routingTable.allShards(indexName)).thenReturn(Arrays.asList(shardRouting)); - - // Run - Exception e = expectThrows(OpenSearchException.class, () -> datasourceUpdateService.waitUntilAllShardsStarted(indexName, 10)); - - // Verify - assertTrue(e.getMessage().contains("did not complete")); - } - - public void testWaitUntilAllShardsStarted_whenInterrupted_thenThrowException() { - String indexName = ThreatIntelTestHelper.randomLowerCaseString(); - ShardRouting shardRouting = mock(ShardRouting.class); - when(shardRouting.started()).thenReturn(false); - when(routingTable.allShards(indexName)).thenReturn(Arrays.asList(shardRouting)); - - // Run - Thread.currentThread().interrupt(); - Exception e = expectThrows(RuntimeException.class, () -> datasourceUpdateService.waitUntilAllShardsStarted(indexName, 10)); - - // Verify - assertEquals(InterruptedException.class, e.getCause().getClass()); - } - - public void testDeleteUnusedIndices_whenValidInput_thenSucceed() { - String datasourceName = ThreatIntelTestHelper.randomLowerCaseString(); - String indexPrefix = String.format(".threatintel-data.%s.", datasourceName); - Instant now = Instant.now(); - String currentIndex = indexPrefix + now.toEpochMilli(); - String oldIndex = indexPrefix + now.minusMillis(1).toEpochMilli(); - String lingeringIndex = indexPrefix + now.minusMillis(2).toEpochMilli(); - TIFJobParameter datasource = new TIFJobParameter(); - datasource.setName(datasourceName); - datasource.setCurrentIndex(currentIndex); - datasource.getIndices().add(currentIndex); - datasource.getIndices().add(oldIndex); - datasource.getIndices().add(lingeringIndex); - - when(metadata.hasIndex(currentIndex)).thenReturn(true); - when(metadata.hasIndex(oldIndex)).thenReturn(true); - when(metadata.hasIndex(lingeringIndex)).thenReturn(false); - - datasourceUpdateService.deleteAllTifdIndices(datasource); - - assertEquals(0, datasource.getIndices().size()); -// assertEquals(currentIndex, datasource.getIndices().get(0)); //TODO: check this - verify(tifJobParameterService).updateJobSchedulerParameter(datasource); - verify(threatIntelFeedDataService).deleteThreatIntelDataIndex(oldIndex); - } - - public void testUpdateDatasource_whenNoChange_thenNoUpdate() { - TIFJobParameter datasource = randomTifJobParameter(); - - // Run - datasourceUpdateService.updateJobSchedulerParameter(datasource, datasource.getSchedule(), datasource.getTask()); - - // Verify - verify(tifJobParameterService, never()).updateJobSchedulerParameter(any()); - } - - public void testUpdateDatasource_whenChange_thenUpdate() { - TIFJobParameter datasource = randomTifJobParameter(); - datasource.setTask(TIFJobTask.ALL); - - // Run - datasourceUpdateService.updateJobSchedulerParameter( - datasource, - new IntervalSchedule(Instant.now(), datasource.getSchedule().getInterval() + 1, ChronoUnit.DAYS), - datasource.getTask() - ); - datasourceUpdateService.updateJobSchedulerParameter(datasource, datasource.getSchedule(), TIFJobTask.DELETE_UNUSED_INDICES); - - // Verify - verify(tifJobParameterService, times(2)).updateJobSchedulerParameter(any()); - } -} From d1d7ca00e28c584ad58a3a40f95c6ac24f9d3d4b Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Wed, 11 Oct 2023 10:11:28 -0700 Subject: [PATCH 14/39] converge job scheduler and detector threat intel code Signed-off-by: Surya Sashank Nistala --- build.gradle | 2 +- .../DetectorThreatIntelService.java | 2 - .../ThreatIntelFeedDataService.java | 6 +- .../jobscheduler/TIFJobParameter.java | 4 +- .../jobscheduler/TIFJobUpdateService.java | 9 +- .../resthandler/DetectorMonitorRestApiIT.java | 175 +++++++++--------- 6 files changed, 95 insertions(+), 103 deletions(-) diff --git a/build.gradle b/build.gradle index 2a958f0b6..70b9e0bd3 100644 --- a/build.gradle +++ b/build.gradle @@ -158,7 +158,7 @@ dependencies { api "org.opensearch:common-utils:${common_utils_version}@jar" api "org.opensearch.client:opensearch-rest-client:${opensearch_version}" implementation "org.jetbrains.kotlin:kotlin-stdlib:${kotlin_version}" - compileOnly "org.opensearch:opensearch-job-scheduler-spi:${opensearch_build}" + implementation "org.opensearch:opensearch-job-scheduler-spi:${opensearch_build}" implementation "org.apache.commons:commons-csv:1.10.0" // Needed for integ tests diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java index ae0acc6c3..b0891f413 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java @@ -63,8 +63,6 @@ public void createDocLevelQueryFromThreatIntel(Detector detector, ActionListener } CountDownLatch latch = new CountDownLatch(1); - // TODO: plugin logic to run job for populating threat intel feed data - //TODO populateFeedData() threatIntelFeedDataService.getThreatIntelFeedData(new ActionListener<>() { @Override public void onResponse(List threatIntelFeedData) { diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java index b7592a6a4..12c7dfb5e 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java @@ -111,10 +111,8 @@ public void getThreatIntelFeedData( ".opensearch-sap-threatintel*" //name? ); - SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); SearchRequest searchRequest = new SearchRequest(tifdIndex); searchRequest.source().size(9999); //TODO: convert to scroll - searchRequest.source(sourceBuilder); client.search(searchRequest, ActionListener.wrap(r -> listener.onResponse(ThreatIntelFeedDataUtils.getTifdList(r, xContentRegistry)), e -> { log.error(String.format( "Failed to fetch threat intel feed data from system index %s", tifdIndex), e); @@ -195,7 +193,7 @@ public void parseAndSaveThreatIntelFeedDataCSV( CSVRecord record = iterator.next(); String iocType = tifMetadata.getContainedIocs().get(0); //todo make generic in upcoming versions Integer colNum = tifMetadata.getIocCol(); - String iocValue = record.values()[colNum]; + String iocValue = record.values()[colNum].split(" ")[0]; String feedId = tifMetadata.getFeedId(); Instant timestamp = Instant.now(); ThreatIntelFeedData threatIntelFeedData = new ThreatIntelFeedData(iocType, iocValue, feedId, timestamp); @@ -206,13 +204,13 @@ public void parseAndSaveThreatIntelFeedDataCSV( IndexRequest indexRequest = new IndexRequest(indexName); indexRequest.source(tifData); indexRequest.opType(DocWriteRequest.OpType.INDEX); - indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); bulkRequest.add(indexRequest); if (bulkRequest.requests().size() == batchSize) { saveTifds(bulkRequest, timeout); } } + saveTifds(bulkRequest, timeout); renewLock.run(); freezeIndex(indexName); } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java index 456be4838..a5346dce4 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java @@ -361,7 +361,7 @@ public String newIndexName(final TIFJobParameter jobSchedulerParameter, TIFMetad if (nameOptional.isPresent()) { suffix = "-1".equals(nameOptional.get()) ? "-2" : suffix; } - return String.format(Locale.ROOT, "%s-%s-%s", THREAT_INTEL_DATA_INDEX_NAME_PREFIX, tifMetadata.getFeedId(), suffix); + return String.format(Locale.ROOT, "%s-%s%s", THREAT_INTEL_DATA_INDEX_NAME_PREFIX, tifMetadata.getFeedId(), suffix); } public TIFJobState getState() { @@ -529,7 +529,7 @@ public static TIFJobParameter build(final PutTIFJobRequest request) { String name = request.getName(); IntervalSchedule schedule = new IntervalSchedule( Instant.now().truncatedTo(ChronoUnit.MILLIS), - (int) request.getUpdateInterval().days(), + 1, //TODO fix ChronoUnit.DAYS ); return new TIFJobParameter(name, schedule); diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java index 6da04087e..a73009184 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java @@ -138,10 +138,9 @@ public List createThreatIntelFeedData(final TIFJobParameter jobScheduler "Alienvault IP Reputation Database", "csv", List.of("ip"), - 1); + 0); List tifMetadataList = new ArrayList<>(); //todo populate from config instead of example tifMetadataList.add(tifMetadata); - Instant startTime = Instant.now(); List freshIndices = new ArrayList<>(); for (TIFMetadata metadata : tifMetadataList) { String indexName = setupIndex(jobSchedulerParameter, tifMetadata); @@ -152,15 +151,17 @@ public List createThreatIntelFeedData(final TIFJobParameter jobScheduler switch (tifMetadata.getFeedType()) { case "csv": try (CSVParser reader = ThreatIntelFeedParser.getThreatIntelFeedReaderCSV(tifMetadata)) { - // iterate until we find first line without '#' + // iterate until we find first line without '#' and without empty line CSVRecord findHeader = reader.iterator().next(); - while (findHeader.get(0).charAt(0) == '#' || findHeader.get(0).charAt(0) == ' ') { + while ((findHeader.values().length ==1 && "".equals(findHeader.values()[0])) || findHeader.get(0).charAt(0) == '#' || findHeader.get(0).charAt(0) == ' ') { findHeader = reader.iterator().next(); } CSVRecord headerLine = findHeader; header = ThreatIntelFeedParser.validateHeader(headerLine).values(); threatIntelFeedDataService.parseAndSaveThreatIntelFeedDataCSV(indexName, header, reader.iterator(), renewLock, tifMetadata); + succeeded = true; } + break; default: // if the feed type doesn't match any of the supporting feed types, throw an exception succeeded = false; diff --git a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java index 640a3d8eb..15e9f9bad 100644 --- a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java @@ -1051,94 +1051,89 @@ public void testCreateDetector_verifyWorkflowCreation_success_WithoutGroupByRule verifyWorkflow(detectorMap, monitorIds, 2); } -// public void testCreateDetector_threatIntelEnabled_updateDetectorWithNewThreatIntel() throws IOException { -// -// updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); -// String index = createTestIndex(randomIndex(), windowsIndexMapping()); -// -// // Execute CreateMappingsAction to add alias mapping for index -// Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); -// // both req params and req body are supported -// createMappingRequest.setJsonEntity( -// "{ \"index_name\":\"" + index + "\"," + -// " \"rule_topic\":\"" + randomDetectorType() + "\", " + -// " \"partial\":true" + -// "}" -// ); -// -// Response createMappingResponse = client().performRequest(createMappingRequest); -// -// assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); -// -// String testOpCode = "Test"; -// -// String randomDocRuleId = createRule(randomRule()); -// List detectorRules = List.of(new DetectorRule(randomDocRuleId)); -// DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, -// Collections.emptyList()); -// Detector detector = randomDetectorWithInputsAndThreatIntel(List.of(input), true); -// -// Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); -// -// String request = "{\n" + -// " \"query\" : {\n" + -// " \"match_all\":{\n" + -// " }\n" + -// " }\n" + -// "}"; -// SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); -// -// assertEquals(2, response.getHits().getTotalHits().value); -// -// assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); -// Map responseBody = asMap(createResponse); -// -// String detectorId = responseBody.get("_id").toString(); -// request = "{\n" + -// " \"query\" : {\n" + -// " \"match\":{\n" + -// " \"_id\": \"" + detectorId + "\"\n" + -// " }\n" + -// " }\n" + -// "}"; -// List hits = executeSearch(Detector.DETECTORS_INDEX, request); -// SearchHit hit = hits.get(0); -// Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); -// List inputArr = (List) detectorMap.get("inputs"); -// -// -// List monitorIds = ((List) (detectorMap).get("monitor_id")); -// assertEquals(1, monitorIds.size()); -// -// assertNotNull("Workflow not created", detectorMap.get("workflow_ids")); -// assertEquals("Number of workflows not correct", 1, ((List) detectorMap.get("workflow_ids")).size()); -// -// // Verify workflow -// verifyWorkflow(detectorMap, monitorIds, 1); -// List iocs = getThreatIntelFeedIocs(3); -// for (String ioc : iocs) { -// indexDoc(index, "1", randomDoc(5, 3, "abc")); -// indexDoc(index, "2", randomDoc(5, 3, "xyz")); -// indexDoc(index, "3", randomDoc(5, 3, "klm")); -// } -// String workflowId = ((List) detectorMap.get("workflow_ids")).get(0); -// -// Response executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); -// -// List> monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); -// assertEquals(1, monitorRunResults.size()); -// -// Map docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); -// int noOfSigmaRuleMatches = docLevelQueryResults.size(); -// assertEquals(2, noOfSigmaRuleMatches); -// String threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.contains(detector.getName() + "_threat_intel")).findAny().get(); -// ArrayList docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); -// assertEquals(docs.size(), 2); -// -// //update threat intel -// String tifdString3 = "{ \"type\": \"feed\",\"ioc_type\": \"ip\", \"ioc_value\": \"klm\", \"feed_id\": \"feed\", \"timestamp\": 1633344000000 }"; -// -// indexDoc(feedIndex, "3", tifdString3); + public void testCreateDetector_threatIntelEnabled_updateDetectorWithNewThreatIntel() throws IOException { + + updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + + String testOpCode = "Test"; + + String randomDocRuleId = createRule(randomRule()); + List detectorRules = List.of(new DetectorRule(randomDocRuleId)); + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, + Collections.emptyList()); + Detector detector = randomDetectorWithInputsAndThreatIntel(List.of(input), true); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + + assertEquals(2, response.getHits().getTotalHits().value); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + Map responseBody = asMap(createResponse); + + String detectorId = responseBody.get("_id").toString(); + request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); + List inputArr = (List) detectorMap.get("inputs"); + + + List monitorIds = ((List) (detectorMap).get("monitor_id")); + assertEquals(1, monitorIds.size()); + + assertNotNull("Workflow not created", detectorMap.get("workflow_ids")); + assertEquals("Number of workflows not correct", 1, ((List) detectorMap.get("workflow_ids")).size()); + + // Verify workflow + verifyWorkflow(detectorMap, monitorIds, 1); + List iocs = getThreatIntelFeedIocs(3); + int i=1; + for (String ioc : iocs) { + indexDoc(index, i+"", randomDoc(5, 3, ioc)); + i++; + } + String workflowId = ((List) detectorMap.get("workflow_ids")).get(0); + + Response executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); + + List> monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); + assertEquals(1, monitorRunResults.size()); + + Map docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); + int noOfSigmaRuleMatches = docLevelQueryResults.size(); + assertEquals(2, noOfSigmaRuleMatches); + String threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.contains(detector.getName() + "_threat_intel")).findAny().get(); + ArrayList docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); + assertEquals(docs.size(), 2); // // Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(detector)); // @@ -1160,7 +1155,7 @@ public void testCreateDetector_verifyWorkflowCreation_success_WithoutGroupByRule // threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.contains(detector.getName() + "_threat_intel")).findAny().get(); // docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); // assertEquals(docs.size(), 1); -// } + } private List getThreatIntelFeedIocs(int num) throws IOException { String request = getMatchAllSearchRequestString(num); @@ -1170,7 +1165,7 @@ private List getThreatIntelFeedIocs(int num) throws IOException { private static String getMatchAllSearchRequestString(int num) { return "{\n" + - "size : " + num + "," + + "\"size\" : " + num + "," + " \"query\" : {\n" + " \"match_all\":{\n" + " }\n" + From d53085b0cc34ee20e27cff9a55ca9a4b3bdbbd0c Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Thu, 12 Oct 2023 01:52:33 -0700 Subject: [PATCH 15/39] add feed metadata config files in src and test Signed-off-by: Surya Sashank Nistala --- .../SecurityAnalyticsPlugin.java | 9 +- .../ThreatIntelFeedDataService.java | 15 +- .../threatIntel/common/TIFMetadata.java | 215 +++++------------- .../BuiltInTIFMetadataLoader.java | 114 ++++++++++ .../feedMetadata/TIFMetadataService.java | 0 .../jobscheduler/TIFJobUpdateService.java | 30 +-- .../threatIntelFeed/feedMetadata.json | 12 + .../resources/threatIntelFeedInfo/feodo.yml | 6 - .../threatIntelFeed/feedMetadata.json | 12 + 9 files changed, 214 insertions(+), 199 deletions(-) create mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/feedMetadata/BuiltInTIFMetadataLoader.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/feedMetadata/TIFMetadataService.java create mode 100644 src/main/resources/threatIntelFeed/feedMetadata.json delete mode 100644 src/main/resources/threatIntelFeedInfo/feodo.yml create mode 100644 src/test/resources/threatIntelFeed/feedMetadata.json diff --git a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java index 624df47cb..66257c360 100644 --- a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java +++ b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java @@ -54,6 +54,7 @@ import org.opensearch.securityanalytics.threatIntel.action.*; import org.opensearch.securityanalytics.threatIntel.common.TIFExecutor; import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; +import org.opensearch.securityanalytics.threatIntel.feedMetadata.BuiltInTIFMetadataLoader; import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameterService; import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobRunner; import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobUpdateService; @@ -141,6 +142,7 @@ public Collection createComponents(Client client, Supplier repositoriesServiceSupplier) { builtinLogTypeLoader = new BuiltinLogTypeLoader(); + BuiltInTIFMetadataLoader builtInTIFMetadataLoader = new BuiltInTIFMetadataLoader(); logTypeService = new LogTypeService(client, clusterService, xContentRegistry, builtinLogTypeLoader); detectorIndices = new DetectorIndices(client.admin(), clusterService, threadPool); ruleTopicIndices = new RuleTopicIndices(client, clusterService, logTypeService); @@ -153,7 +155,7 @@ public Collection createComponents(Client client, ThreatIntelFeedDataService threatIntelFeedDataService = new ThreatIntelFeedDataService(clusterService, client, indexNameExpressionResolver, xContentRegistry); DetectorThreatIntelService detectorThreatIntelService = new DetectorThreatIntelService(threatIntelFeedDataService); TIFJobParameterService tifJobParameterService = new TIFJobParameterService(client, clusterService); - TIFJobUpdateService tifJobUpdateService = new TIFJobUpdateService(clusterService, tifJobParameterService, threatIntelFeedDataService); + TIFJobUpdateService tifJobUpdateService = new TIFJobUpdateService(clusterService, tifJobParameterService, threatIntelFeedDataService, builtInTIFMetadataLoader); TIFExecutor threatIntelExecutor = new TIFExecutor(threadPool); TIFLockService threatIntelLockService = new TIFLockService(clusterService, client); @@ -163,9 +165,8 @@ public Collection createComponents(Client client, return List.of( detectorIndices, correlationIndices, correlationRuleIndices, ruleTopicIndices, customLogTypeIndices, ruleIndices, - mapperService, indexTemplateManager, builtinLogTypeLoader, threatIntelFeedDataService, detectorThreatIntelService, - tifJobUpdateService, tifJobParameterService, threatIntelExecutor, threatIntelLockService - ); + mapperService, indexTemplateManager, builtinLogTypeLoader, builtInTIFMetadataLoader, threatIntelFeedDataService, detectorThreatIntelService, + tifJobUpdateService, tifJobParameterService, threatIntelExecutor, threatIntelLockService); } @Override diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java index 12c7dfb5e..87044f4b8 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java @@ -15,7 +15,6 @@ import org.opensearch.action.support.WriteRequest; import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.client.Client; -import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.ClusterSettings; @@ -26,8 +25,6 @@ import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.search.builder.SearchSourceBuilder; -import org.opensearch.securityanalytics.findings.FindingsService; import org.opensearch.securityanalytics.model.ThreatIntelFeedData; import org.opensearch.securityanalytics.threatIntel.action.PutTIFJobAction; import org.opensearch.securityanalytics.threatIntel.action.PutTIFJobRequest; @@ -112,7 +109,7 @@ public void getThreatIntelFeedData( ); SearchRequest searchRequest = new SearchRequest(tifdIndex); - searchRequest.source().size(9999); //TODO: convert to scroll + searchRequest.source().size(1000); //TODO: convert to scroll client.search(searchRequest, ActionListener.wrap(r -> listener.onResponse(ThreatIntelFeedDataUtils.getTifdList(r, xContentRegistry)), e -> { log.error(String.format( "Failed to fetch threat intel feed data from system index %s", tifdIndex), e); @@ -191,7 +188,7 @@ public void parseAndSaveThreatIntelFeedDataCSV( List tifdList = new ArrayList<>(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); - String iocType = tifMetadata.getContainedIocs().get(0); //todo make generic in upcoming versions + String iocType = tifMetadata.getIocType(); //todo make generic in upcoming versions Integer colNum = tifMetadata.getIocCol(); String iocValue = record.values()[colNum].split(" ")[0]; String feedId = tifMetadata.getFeedId(); @@ -217,7 +214,10 @@ public void parseAndSaveThreatIntelFeedDataCSV( public void saveTifds(BulkRequest bulkRequest, TimeValue timeout) { - BulkResponse response = StashedThreadContext.run(client, () -> client.bulk(bulkRequest).actionGet(timeout)); + try { + BulkResponse response = StashedThreadContext.run(client, () -> { + return client.bulk(bulkRequest).actionGet(timeout); + }); if (response.hasFailures()) { throw new OpenSearchException( "error occurred while ingesting threat intel feed data in {} with an error {}", @@ -226,6 +226,9 @@ public void saveTifds(BulkRequest bulkRequest, TimeValue timeout) { ); } bulkRequest.requests().clear(); + } catch (OpenSearchException e) { + log.error("failed to save threat intel feed data", e); + } } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java index 8b94e5693..0bdc2d77e 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java @@ -4,46 +4,30 @@ */ package org.opensearch.securityanalytics.threatIntel.common; -import java.io.BufferedReader; import java.io.IOException; -import java.io.InputStreamReader; -import java.net.URL; -import java.net.URLConnection; -import java.nio.CharBuffer; -import java.security.AccessController; -import java.security.PrivilegedAction; -import java.util.List; +import java.util.Map; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.opensearch.SpecialPermission; -import org.opensearch.common.SuppressForbidden; -import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.core.ParseField; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.*; -import org.opensearch.securityanalytics.model.DetectorTrigger; -import org.opensearch.securityanalytics.util.SecurityAnalyticsException; /** * Threat intel tif job metadata object - * + *

* TIFMetadata is stored in an external endpoint. OpenSearch read the file and store values it in this object. */ public class TIFMetadata implements Writeable, ToXContent { - private static final Logger log = LogManager.getLogger(DetectorTrigger.class); - private static final ParseField FEED_ID = new ParseField("id"); + private static final ParseField FEED_ID_FIELD = new ParseField("id"); private static final ParseField URL_FIELD = new ParseField("url"); - private static final ParseField NAME = new ParseField("name"); - private static final ParseField ORGANIZATION = new ParseField("organization"); - private static final ParseField DESCRIPTION = new ParseField("description"); - private static final ParseField FEED_TYPE = new ParseField("feed_type"); - private static final ParseField CONTAINED_IOCS = new ParseField("contained_iocs"); - private static final ParseField IOC_COL = new ParseField("ioc_col"); + private static final ParseField NAME_FIELD = new ParseField("name"); + private static final ParseField ORGANIZATION_FIELD = new ParseField("organization"); + private static final ParseField DESCRIPTION_FIELD = new ParseField("description"); + private static final ParseField FEED_FORMAT = new ParseField("feed_format"); + private static final ParseField IOC_TYPE_FIELD = new ParseField("ioc_type"); + private static final ParseField IOC_COL_FIELD = new ParseField("ioc_col"); /** * @param feedId ID of the threat intel feed data @@ -88,80 +72,64 @@ public class TIFMetadata implements Writeable, ToXContent { private Integer iocCol; /** - * @param containedIocs list of ioc types contained in feed - * @return list of ioc types contained in feed + * @param containedIocs ioc type in feed + * @return ioc type in feed */ - private List containedIocs; + private String iocType; + + public TIFMetadata(Map input) { + this( + input.get(FEED_ID_FIELD.getPreferredName()).toString(), + input.get(URL_FIELD.getPreferredName()).toString(), + input.get(NAME_FIELD.getPreferredName()).toString(), + input.get(ORGANIZATION_FIELD.getPreferredName()).toString(), + input.get(DESCRIPTION_FIELD.getPreferredName()).toString(), + input.get(FEED_FORMAT.getPreferredName()).toString(), + input.get(IOC_TYPE_FIELD.getPreferredName()).toString(), + Integer.parseInt(input.get(IOC_COL_FIELD.getPreferredName()).toString()) + ); + } public String getUrl() { return url; } + public String getName() { return name; } - public String getOrganization() { - return organization; - } + public String getDescription() { return description; } + public String getFeedId() { return feedId; } + public String getFeedType() { return feedType; } + public Integer getIocCol() { return iocCol; } - public List getContainedIocs() { - return containedIocs; + + public String getIocType() { + return iocType; } public TIFMetadata(final String feedId, final String url, final String name, final String organization, final String description, - final String feedType, final List containedIocs, final Integer iocCol) { + final String feedType, final String iocType, final Integer iocCol) { this.feedId = feedId; this.url = url; this.name = name; this.organization = organization; this.description = description; this.feedType = feedType; - this.containedIocs = containedIocs; + this.iocType = iocType; this.iocCol = iocCol; } - public void setFeedId(String feedId) { - this.feedId = feedId; - } - - public void setUrl(String url) { - this.url = url; - } - - public void setName(String name) { - this.name = name; - } - - public void setOrganization(String organization) { - this.organization = organization; - } - - public void setFeedType(String feedType) { - this.feedType = feedType; - } - - public void setDescription(String description) { - this.description = description; - } - - public void setIocCol(Integer iocCol) { - this.iocCol = iocCol; - } - - public void setContainedIocs(List containedIocs) { - this.containedIocs = containedIocs; - } - /** * tif job metadata parser @@ -176,32 +144,34 @@ public void setContainedIocs(List containedIocs) { String organization = (String) args[3]; String description = (String) args[4]; String feedType = (String) args[5]; - List containedIocs = (List) args[6]; + String containedIocs = (String) args[6]; Integer iocCol = Integer.parseInt((String) args[7]); return new TIFMetadata(feedId, url, name, organization, description, feedType, containedIocs, iocCol); } ); + static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), FEED_ID); + PARSER.declareString(ConstructingObjectParser.constructorArg(), FEED_ID_FIELD); PARSER.declareString(ConstructingObjectParser.constructorArg(), URL_FIELD); - PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME); - PARSER.declareString(ConstructingObjectParser.constructorArg(), ORGANIZATION); - PARSER.declareString(ConstructingObjectParser.constructorArg(), DESCRIPTION); - PARSER.declareString(ConstructingObjectParser.constructorArg(), FEED_TYPE); - PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), CONTAINED_IOCS); - PARSER.declareString(ConstructingObjectParser.constructorArg(), IOC_COL); + PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME_FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), ORGANIZATION_FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), DESCRIPTION_FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), FEED_FORMAT); + PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), IOC_TYPE_FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), IOC_COL_FIELD); } - public TIFMetadata(final StreamInput in) throws IOException{ + public TIFMetadata(final StreamInput in) throws IOException { feedId = in.readString(); url = in.readString(); name = in.readString(); organization = in.readString(); description = in.readString(); feedType = in.readString(); - containedIocs = in.readStringList(); + iocType = in.readString(); iocCol = in.readInt(); } + public void writeTo(final StreamOutput out) throws IOException { out.writeString(feedId); out.writeString(url); @@ -209,100 +179,27 @@ public void writeTo(final StreamOutput out) throws IOException { out.writeString(organization); out.writeString(description); out.writeString(feedType); - out.writeStringCollection(containedIocs); + out.writeString(iocType); out.writeInt(iocCol); } - private TIFMetadata(){} - - - /** - * Reset database so that it can be updated in next run regardless there is new update or not - */ - public void resetTIFMetadata() { - this.setFeedId(null); - this.setUrl(null); - this.setName(null); - this.setOrganization(null); - this.setDescription(null); - this.setFeedType(null); - this.setContainedIocs(null); - this.setIocCol(null); + private TIFMetadata() { } - /** - * Set database attributes with given input - * - * @param tifMetadata the tif metadata - * @param fields the fields - */ - public void setTIFMetadata(final TIFMetadata tifMetadata, final List fields) { - this.feedId = tifMetadata.getFeedId(); - this.url = tifMetadata.getUrl(); - this.name = tifMetadata.getName(); - this.organization = tifMetadata.getOrganization(); - this.description = tifMetadata.getDescription(); - this.feedType = tifMetadata.getFeedType(); - this.containedIocs = tifMetadata.getContainedIocs(); - this.iocCol = tifMetadata.getIocCol(); - } @Override public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { builder.startObject(); - builder.field(FEED_ID.getPreferredName(), feedId); + builder.field(FEED_ID_FIELD.getPreferredName(), feedId); builder.field(URL_FIELD.getPreferredName(), url); - builder.field(NAME.getPreferredName(), name); - builder.field(ORGANIZATION.getPreferredName(), organization); - builder.field(DESCRIPTION.getPreferredName(), description); - builder.field(FEED_TYPE.getPreferredName(), feedType); - builder.field(CONTAINED_IOCS.getPreferredName(), containedIocs); - builder.field(IOC_COL.getPreferredName(), iocCol); + builder.field(NAME_FIELD.getPreferredName(), name); + builder.field(ORGANIZATION_FIELD.getPreferredName(), organization); + builder.field(DESCRIPTION_FIELD.getPreferredName(), description); + builder.field(FEED_FORMAT.getPreferredName(), feedType); + builder.field(IOC_TYPE_FIELD.getPreferredName(), iocType); + builder.field(IOC_COL_FIELD.getPreferredName(), iocCol); builder.endObject(); return builder; } - /** - * TIFMetadata builder - */ - public static class Builder { //TODO: builder? - private static final int FILE_MAX_BYTES = 1024 * 8; - - /** - * Build TIFMetadata from a given url - * - * @param url url to downloads a manifest file - * @return TIFMetadata representing the manifest file - */ - @SuppressForbidden(reason = "Need to connect to http endpoint to read manifest file") - public static TIFMetadata build(final URL url) { - SpecialPermission.check(); - return AccessController.doPrivileged((PrivilegedAction) () -> { - try { - URLConnection connection = url.openConnection(); - return internalBuild(connection); - } catch (IOException e) { - log.error("Runtime exception connecting to the manifest file", e); - throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO - } - }); - } - - @SuppressForbidden(reason = "Need to connect to http endpoint to read manifest file") - protected static TIFMetadata internalBuild(final URLConnection connection) throws IOException { - connection.addRequestProperty(Constants.USER_AGENT_KEY, Constants.USER_AGENT_VALUE); - InputStreamReader inputStreamReader = new InputStreamReader(connection.getInputStream()); - try (BufferedReader reader = new BufferedReader(inputStreamReader)) { - CharBuffer charBuffer = CharBuffer.allocate(FILE_MAX_BYTES); - reader.read(charBuffer); - charBuffer.flip(); - XContentParser parser = JsonXContent.jsonXContent.createParser( - NamedXContentRegistry.EMPTY, - DeprecationHandler.IGNORE_DEPRECATIONS, - charBuffer.toString() - ); - return PARSER.parse(parser, null); - } - } - } } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/feedMetadata/BuiltInTIFMetadataLoader.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/feedMetadata/BuiltInTIFMetadataLoader.java new file mode 100644 index 000000000..967d4c936 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/feedMetadata/BuiltInTIFMetadataLoader.java @@ -0,0 +1,114 @@ +package org.opensearch.securityanalytics.threatIntel.feedMetadata; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; +import org.opensearch.common.settings.SettingsException; +import org.opensearch.common.xcontent.XContentHelper; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.securityanalytics.threatIntel.common.TIFMetadata; +import org.opensearch.securityanalytics.util.FileUtils; + +import java.io.IOException; +import java.io.InputStream; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public class BuiltInTIFMetadataLoader extends AbstractLifecycleComponent { + + private static final Logger logger = LogManager.getLogger(BuiltInTIFMetadataLoader.class); + + private static final String BASE_PATH = "threatIntelFeed/"; + + + private List tifMetadataList = null; + private Map tifMetadataByName; + + public List getTifMetadataList() { + ensureTifMetadataLoaded(); + return tifMetadataList; + } + + public TIFMetadata getTifMetadataByName(String name) { + ensureTifMetadataLoaded(); + return tifMetadataByName.get(name); + } + + public boolean tifMetadataExists(String name) { + ensureTifMetadataLoaded(); + return tifMetadataByName.containsKey(name); + } + + public void ensureTifMetadataLoaded() { + try { + if (tifMetadataList != null) { + return; + } + loadBuiltInTifMetadata(); + tifMetadataByName = tifMetadataList.stream() + .collect(Collectors.toMap(TIFMetadata::getName, Function.identity())); + } catch (Exception e) { + logger.error("Failed loading builtin log types from disk!", e); + } + } + + @SuppressWarnings("unchecked") + protected void loadBuiltInTifMetadata() throws URISyntaxException, IOException { + final String url = Objects.requireNonNull(BuiltInTIFMetadataLoader.class.getClassLoader().getResource(BASE_PATH), + "Built-in threat intel feed metadata file not found").toURI().toString(); + Path dirPath = null; + if (url.contains("!")) { + final String[] paths = url.split("!"); + dirPath = FileUtils.getFs().getPath(paths[1]); + } else { + dirPath = Path.of(url); + } + + Stream folder = Files.list(dirPath); + Path tifMetadataPath = folder.filter(e -> e.toString().endsWith("feedMetadata.json")).collect(Collectors.toList()).get(0); + try ( + InputStream is = BuiltInTIFMetadataLoader.class.getResourceAsStream(tifMetadataPath.toString()) + ) { + String tifMetadataFilePayload = new String(Objects.requireNonNull(is).readAllBytes(), StandardCharsets.UTF_8); + + if (tifMetadataFilePayload != null) { + if(tifMetadataList == null) + tifMetadataList = new ArrayList<>(); + Map tifMetadataFileAsMap = + XContentHelper.convertToMap(JsonXContent.jsonXContent, tifMetadataFilePayload, false); + + for (Map.Entry mapEntry : tifMetadataFileAsMap.entrySet()) { + Map tifMetadataMap = (Map) mapEntry.getValue(); + tifMetadataList.add(new TIFMetadata(tifMetadataMap)); + } + } + } catch (Exception e) { + throw new SettingsException("Failed to load builtin threat intel feed metadata" + + "", e); + } + } + + @Override + protected void doStart() { + ensureTifMetadataLoaded(); + } + + @Override + protected void doStop() { + + } + + @Override + protected void doClose() throws IOException { + + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/feedMetadata/TIFMetadataService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/feedMetadata/TIFMetadataService.java new file mode 100644 index 000000000..e69de29bb diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java index a73009184..a5cc01ea1 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java @@ -21,6 +21,7 @@ import org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedParser; import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; import org.opensearch.securityanalytics.threatIntel.common.TIFMetadata; +import org.opensearch.securityanalytics.threatIntel.feedMetadata.BuiltInTIFMetadataLoader; import org.opensearch.securityanalytics.util.SecurityAnalyticsException; import java.io.IOException; @@ -38,16 +39,18 @@ public class TIFJobUpdateService { private final ClusterSettings clusterSettings; private final TIFJobParameterService jobSchedulerParameterService; private final ThreatIntelFeedDataService threatIntelFeedDataService; + private final BuiltInTIFMetadataLoader builtInTIFMetadataLoader; public TIFJobUpdateService( final ClusterService clusterService, final TIFJobParameterService jobSchedulerParameterService, - final ThreatIntelFeedDataService threatIntelFeedDataService - ) { + final ThreatIntelFeedDataService threatIntelFeedDataService, + BuiltInTIFMetadataLoader builtInTIFMetadataLoader) { this.clusterService = clusterService; this.clusterSettings = clusterService.getClusterSettings(); this.jobSchedulerParameterService = jobSchedulerParameterService; this.threatIntelFeedDataService = threatIntelFeedDataService; + this.builtInTIFMetadataLoader = builtInTIFMetadataLoader; } // functions used in job Runner @@ -120,29 +123,8 @@ private List deleteIndices(final List indicesToDelete) { * @throws IOException */ public List createThreatIntelFeedData(final TIFJobParameter jobSchedulerParameter, final Runnable renewLock) throws IOException { - // parse YAML containing list of threat intel feeds.yml - // for each feed (ex. Feodo) - // parse feed specific YAML containing TIFMetadata - - // for every threat intel feed - // create and store a new TIFMetadata object - - // use the TIFMetadata to switch case feed type - // parse through file and save threat intel feed data - - - TIFMetadata tifMetadata = new TIFMetadata("alientvault_reputation_generic", - "https://reputation.alienvault.com/reputation.generic", - "Alienvault IP Reputation Feed", - "OTX", - "Alienvault IP Reputation Database", - "csv", - List.of("ip"), - 0); - List tifMetadataList = new ArrayList<>(); //todo populate from config instead of example - tifMetadataList.add(tifMetadata); List freshIndices = new ArrayList<>(); - for (TIFMetadata metadata : tifMetadataList) { + for (TIFMetadata tifMetadata : builtInTIFMetadataLoader.getTifMetadataList()) { String indexName = setupIndex(jobSchedulerParameter, tifMetadata); String[] header; diff --git a/src/main/resources/threatIntelFeed/feedMetadata.json b/src/main/resources/threatIntelFeed/feedMetadata.json new file mode 100644 index 000000000..c73995ebd --- /dev/null +++ b/src/main/resources/threatIntelFeed/feedMetadata.json @@ -0,0 +1,12 @@ +{ + "alienvault_reputation_ip_database": { + "id": "alienvault_reputation_ip_database", + "url": "https://reputation.alienvault.com/reputation.generic", + "name": "Alienvault IP Reputation", + "organization": "Alienvault", + "description": "Alienvault IP Reputation threat intelligence feed managed by AlienVault", + "feed_format": "csv", + "ioc_type": "ip", + "ioc_col": 0 + } +} \ No newline at end of file diff --git a/src/main/resources/threatIntelFeedInfo/feodo.yml b/src/main/resources/threatIntelFeedInfo/feodo.yml deleted file mode 100644 index 4acbf40e4..000000000 --- a/src/main/resources/threatIntelFeedInfo/feodo.yml +++ /dev/null @@ -1,6 +0,0 @@ -url: "https://feodotracker.abuse.ch/downloads/ipblocklist_aggressive.csv" -name: "ipblocklist_aggressive.csv" -feedFormat: "csv" -org: "Feodo" -iocTypes: ["ip"] -description: "" \ No newline at end of file diff --git a/src/test/resources/threatIntelFeed/feedMetadata.json b/src/test/resources/threatIntelFeed/feedMetadata.json new file mode 100644 index 000000000..c73995ebd --- /dev/null +++ b/src/test/resources/threatIntelFeed/feedMetadata.json @@ -0,0 +1,12 @@ +{ + "alienvault_reputation_ip_database": { + "id": "alienvault_reputation_ip_database", + "url": "https://reputation.alienvault.com/reputation.generic", + "name": "Alienvault IP Reputation", + "organization": "Alienvault", + "description": "Alienvault IP Reputation threat intelligence feed managed by AlienVault", + "feed_format": "csv", + "ioc_type": "ip", + "ioc_col": 0 + } +} \ No newline at end of file From 98bbd42811b492e53e97007723fac2f016ae32d0 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Thu, 12 Oct 2023 16:23:40 -0700 Subject: [PATCH 16/39] adds ioc fields list in log type config files and ioc fields object in LogType POJO Signed-off-by: Surya Sashank Nistala --- build.gradle | 2 +- .../logtype/LogTypeService.java | 8 ++ .../securityanalytics/model/LogType.java | 66 +++++++++++++++- .../DetectorThreatIntelService.java | 76 +++++++++++++------ .../TransportIndexDetectorAction.java | 63 +++++++++------ .../OSMapping/test_windows_logtype.json | 6 ++ .../securityanalytics/TestHelpers.java | 40 ++++++++++ .../resthandler/DetectorMonitorRestApiIT.java | 6 +- 8 files changed, 214 insertions(+), 53 deletions(-) diff --git a/build.gradle b/build.gradle index 70b9e0bd3..c81cc9dc0 100644 --- a/build.gradle +++ b/build.gradle @@ -155,7 +155,7 @@ dependencies { implementation group: 'org.apache.commons', name: 'commons-lang3', version: "${versions.commonslang}" implementation "org.antlr:antlr4-runtime:4.10.1" implementation "com.cronutils:cron-utils:9.1.6" - api "org.opensearch:common-utils:${common_utils_version}@jar" + api files("/Users/snistala/Documents/opensearch/common-utils/build/libs/common-utils-3.0.0.0-SNAPSHOT.jar") api "org.opensearch.client:opensearch-rest-client:${opensearch_version}" implementation "org.jetbrains.kotlin:kotlin-stdlib:${kotlin_version}" implementation "org.opensearch:opensearch-job-scheduler-spi:${opensearch_build}" diff --git a/src/main/java/org/opensearch/securityanalytics/logtype/LogTypeService.java b/src/main/java/org/opensearch/securityanalytics/logtype/LogTypeService.java index fe1402e59..bec6ef8ae 100644 --- a/src/main/java/org/opensearch/securityanalytics/logtype/LogTypeService.java +++ b/src/main/java/org/opensearch/securityanalytics/logtype/LogTypeService.java @@ -10,6 +10,7 @@ import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -660,6 +661,13 @@ public void getRuleFieldMappings(String logType, ActionListener getIocFieldsList(String logType) { + LogType logTypeByName = builtinLogTypeLoader.getLogTypeByName(logType); + if(logTypeByName == null) + return Collections.emptyList(); + return logTypeByName.getIocFieldsList(); + } + public void getRuleFieldMappingsAllSchemas(String logType, ActionListener> listener) { if (builtinLogTypeLoader.logTypeExists(logType)) { diff --git a/src/main/java/org/opensearch/securityanalytics/model/LogType.java b/src/main/java/org/opensearch/securityanalytics/model/LogType.java index 7acc0d1f3..8cee7ab23 100644 --- a/src/main/java/org/opensearch/securityanalytics/model/LogType.java +++ b/src/main/java/org/opensearch/securityanalytics/model/LogType.java @@ -6,14 +6,13 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.core.xcontent.ToXContentObject; -import org.opensearch.core.xcontent.XContentBuilder; public class LogType implements Writeable { @@ -25,12 +24,16 @@ public class LogType implements Writeable { private static final String RAW_FIELD = "raw_field"; public static final String ECS = "ecs"; public static final String OCSF = "ocsf"; + public static final String IOC_FIELDS = "ioc_fields"; + public static final String IOC = "ioc"; + public static final String FIELDS = "fields"; private String id; private String name; private String description; private Boolean isBuiltIn; private List mappings; + private List iocFieldsList; public LogType(StreamInput sin) throws IOException { this.id = sin.readString(); @@ -38,14 +41,16 @@ public LogType(StreamInput sin) throws IOException { this.name = sin.readString(); this.description = sin.readString(); this.mappings = sin.readList(Mapping::readFrom); + this.iocFieldsList = sin.readList(IocFields::readFrom); } - public LogType(String id, String name, String description, boolean isBuiltIn, List mappings) { + public LogType(String id, String name, String description, boolean isBuiltIn, List mappings, List iocFieldsList) { this.id = id; this.name = name; this.description = description; this.isBuiltIn = isBuiltIn; this.mappings = mappings == null ? List.of() : mappings; + this.iocFieldsList = iocFieldsList == null ? List.of() : iocFieldsList; } public LogType(Map logTypeAsMap) { @@ -62,6 +67,17 @@ public LogType(Map logTypeAsMap) { new Mapping(e.get(RAW_FIELD), e.get(ECS), e.get(OCSF)) ).collect(Collectors.toList()); } + if(logTypeAsMap.containsKey(IOC_FIELDS)) { + List> iocFieldsList = (List>) logTypeAsMap.get(IOC_FIELDS); + if (iocFieldsList.size() > 0) { + this.iocFieldsList = new ArrayList<>(mappings.size()); + this.iocFieldsList = iocFieldsList.stream().map(e -> + new IocFields(e.get(IOC).toString(), (List) e.get(FIELDS)) + ).collect(Collectors.toList()); + } + } else { + iocFieldsList = Collections.emptyList(); + } } public String getName() { @@ -74,6 +90,10 @@ public String getDescription() { public boolean getIsBuiltIn() { return isBuiltIn; } + public List getIocFieldsList() { + return iocFieldsList; + } + public List getMappings() { return mappings; } @@ -85,6 +105,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(name); out.writeString(description); out.writeCollection(mappings); + out.writeCollection(iocFieldsList); } @Override @@ -134,4 +155,43 @@ public static Mapping readFrom(StreamInput sin) throws IOException { } } + /** + * stores information of list of field names that contain information for given IoC (Indicator of Compromise). + */ + public static class IocFields implements Writeable { + private final String ioc; + + private final List fields; + + public IocFields(String ioc, List fields) { + this.ioc = ioc; + this.fields = fields; + } + + public IocFields(StreamInput sin) throws IOException { + this.ioc = sin.readString(); + this.fields = sin.readStringList(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(ioc); + out.writeStringCollection(fields); + } + + public String getIoc() { + return ioc; + } + + public List getFields() { + return fields; + } + + + public static IocFields readFrom(StreamInput sin) throws IOException { + return new IocFields(sin); + } + } + + } \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java index b0891f413..fb4bb744e 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java @@ -1,15 +1,19 @@ package org.opensearch.securityanalytics.threatIntel; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.client.Client; +import org.opensearch.common.settings.Settings; import org.opensearch.commons.alerting.model.DocLevelQuery; import org.opensearch.core.action.ActionListener; -import org.opensearch.core.rest.RestStatus; -import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; import org.opensearch.securityanalytics.model.Detector; +import org.opensearch.securityanalytics.model.LogType; import org.opensearch.securityanalytics.model.ThreatIntelFeedData; -import org.opensearch.securityanalytics.util.SecurityAnalyticsException; +import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.CountDownLatch; @@ -19,33 +23,56 @@ public class DetectorThreatIntelService { + private static final Logger log = LogManager.getLogger(DetectorThreatIntelService.class); + private final ThreatIntelFeedDataService threatIntelFeedDataService; public DetectorThreatIntelService(ThreatIntelFeedDataService threatIntelFeedDataService) { this.threatIntelFeedDataService = threatIntelFeedDataService; } + /** * Convert the feed data IOCs into query string query format to create doc level queries. */ - public DocLevelQuery createDocLevelQueryFromThreatIntelList( - List tifdList, String docLevelQueryId + public List createDocLevelQueriesFromThreatIntelList( + List iocFieldList, List tifdList, Detector detector ) { + List queries = new ArrayList<>(); Set iocs = tifdList.stream().map(ThreatIntelFeedData::getIocValue).collect(Collectors.toSet()); - String query = buildQueryStringQueryWithIocList(iocs); - return new DocLevelQuery( - docLevelQueryId, tifdList.get(0).getFeedId(), - Collections.singletonList("*"), - query, - Collections.singletonList("threat_intel") - ); + //ioc types supported by log type + List logTypeIocs = iocFieldList.stream().map(LogType.IocFields::getIoc).collect(Collectors.toList()); + // filter out ioc types not supported for given log types + Map> iocTypeToValues = tifdList.stream().filter(t -> logTypeIocs.contains(t.getIocType())) + .collect(Collectors.groupingBy( + ThreatIntelFeedData::getIocType, + Collectors.mapping(ThreatIntelFeedData::getIocValue, Collectors.toSet()) + )); + + for (Map.Entry> entry : iocTypeToValues.entrySet()) { + String query = buildQueryStringQueryWithIocList(iocs); + List fields = iocFieldList.stream().filter(t -> entry.getKey().matches(t.getIoc())).findFirst().get().getFields(); + + // create doc + for (String field : fields) { //todo increase max clause count from 1024 + queries.add(new DocLevelQuery( + constructId(detector, entry.getKey()), tifdList.get(0).getFeedId(), + Collections.emptyList(), + String.format(query, field), + List.of("threat_intel", entry.getKey() /*ioc_type*/) + )); + } + } + return queries; } private String buildQueryStringQueryWithIocList(Set iocs) { StringBuilder sb = new StringBuilder(); + sb.append("%s"); + sb.append(":"); sb.append("("); for (String ioc : iocs) { - if (sb.length() > 2) { + if (sb.length() > 4) { sb.append(" OR "); } sb.append(ioc); @@ -55,30 +82,30 @@ private String buildQueryStringQueryWithIocList(Set iocs) { return sb.toString(); } - public void createDocLevelQueryFromThreatIntel(Detector detector, ActionListener listener) { + public void createDocLevelQueryFromThreatIntel(List iocFieldList, Detector detector, ActionListener> listener) { try { - if (detector.getThreatIntelEnabled() == false) { - listener.onResponse(null); + if (false == detector.getThreatIntelEnabled() || iocFieldList.isEmpty()) { + listener.onResponse(Collections.emptyList()); return; - } + CountDownLatch latch = new CountDownLatch(1); threatIntelFeedDataService.getThreatIntelFeedData(new ActionListener<>() { @Override public void onResponse(List threatIntelFeedData) { if (threatIntelFeedData.isEmpty()) { - listener.onResponse(null); + listener.onResponse(Collections.emptyList()); } else { - listener.onResponse(createDocLevelQueryFromThreatIntelList( - threatIntelFeedData, - detector.getName() + "_threat_intel" + UUID.randomUUID() - )); + listener.onResponse( + createDocLevelQueriesFromThreatIntelList(iocFieldList, threatIntelFeedData, detector) + ); } latch.countDown(); } @Override public void onFailure(Exception e) { + log.error("Failed to get threat intel feeds for doc level query creation", e); listener.onFailure(e); latch.countDown(); } @@ -86,11 +113,16 @@ public void onFailure(Exception e) { latch.await(30, TimeUnit.SECONDS); } catch (InterruptedException e) { + log.error("Failed to create doc level queries from threat intel feeds", e); listener.onFailure(e); } } + private static String constructId(Detector detector, String iocType) { + return detector.getName() + "_threat_intel_" + iocType + "_" + UUID.randomUUID(); + } + public void updateDetectorsWithLatestThreatIntelRules() { } diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java index 4805179df..3eb0a5112 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java @@ -88,6 +88,7 @@ import org.opensearch.securityanalytics.model.DetectorInput; import org.opensearch.securityanalytics.model.DetectorRule; import org.opensearch.securityanalytics.model.DetectorTrigger; +import org.opensearch.securityanalytics.model.LogType; import org.opensearch.securityanalytics.model.Rule; import org.opensearch.securityanalytics.model.Value; import org.opensearch.securityanalytics.rules.aggregation.AggregationItem; @@ -323,7 +324,9 @@ private void createMonitorFromQueries(List> rulesById, Detect monitorResponses.add(addedFirstMonitorResponse); saveWorkflow(rulesById, detector, monitorResponses, refreshPolicy, listener); }, - listener::onFailure + e -> { + listener.onFailure(e); + } ); } } @@ -653,30 +656,7 @@ private IndexMonitorRequest createDocLevelMonitorRequest(List DocLevelQuery docLevelQuery = new DocLevelQuery(id, name, Collections.emptyList(), actualQuery, tags); docLevelQueries.add(docLevelQuery); } - try { - if (detector.getThreatIntelEnabled()) { - CountDownLatch countDownLatch = new CountDownLatch(1); - detectorThreatIntelService.createDocLevelQueryFromThreatIntel(detector, new ActionListener<>() { - @Override - public void onResponse(DocLevelQuery dlq) { - if (dlq != null) - docLevelQueries.add(dlq); - countDownLatch.countDown(); - } - - @Override - public void onFailure(Exception e) { - // not failing detector creation if any fatal exception occurs during doc level query creation from threat intel feed data - log.error("Failed to convert threat intel feed to. Proceeding with detector creation", e); - countDownLatch.countDown(); - } - }); - countDownLatch.await(); - } - } catch (Exception e) { - // not failing detector creation if any fatal exception occurs during doc level query creation from threat intel feed data - log.error("Failed to convert threat intel feed to. Proceeding with detector creation", e); - } + addThreatIntelBasedDocLevelQueries(detector, docLevelQueries); DocLevelMonitorInput docLevelMonitorInput = new DocLevelMonitorInput(detector.getName(), detector.getInputs().get(0).getIndices(), docLevelQueries); docLevelMonitorInputs.add(docLevelMonitorInput); @@ -707,6 +687,39 @@ public void onFailure(Exception e) { return new IndexMonitorRequest(monitorId, SequenceNumbers.UNASSIGNED_SEQ_NO, SequenceNumbers.UNASSIGNED_PRIMARY_TERM, refreshPolicy, restMethod, monitor, null); } + private void addThreatIntelBasedDocLevelQueries(Detector detector, List docLevelQueries) { + try { + + if (detector.getThreatIntelEnabled()) { + List iocFieldsList = logTypeService.getIocFieldsList(detector.getDetectorType()); + if (iocFieldsList == null || iocFieldsList.isEmpty()) { + + } else { + CountDownLatch countDownLatch = new CountDownLatch(1); + detectorThreatIntelService.createDocLevelQueryFromThreatIntel(iocFieldsList, detector, new ActionListener<>() { + @Override + public void onResponse(List dlqs) { + if (dlqs != null) + docLevelQueries.addAll(dlqs); + countDownLatch.countDown(); + } + + @Override + public void onFailure(Exception e) { + // not failing detector creation if any fatal exception occurs during doc level query creation from threat intel feed data + log.error("Failed to convert threat intel feed to. Proceeding with detector creation", e); + countDownLatch.countDown(); + } + }); + countDownLatch.await(); + } + } + } catch (Exception e) { + // not failing detector creation if any fatal exception occurs during doc level query creation from threat intel feed data + log.error("Failed to convert threat intel feed to doc level query. Proceeding with detector creation", e); + } + } + /** * Creates doc level monitor which generates per document alerts for the findings of the bucket level delegate monitors in a workflow. * This monitor has match all query applied to generate the alerts per each finding doc. diff --git a/src/main/resources/OSMapping/test_windows_logtype.json b/src/main/resources/OSMapping/test_windows_logtype.json index 7491a954c..816cba666 100644 --- a/src/main/resources/OSMapping/test_windows_logtype.json +++ b/src/main/resources/OSMapping/test_windows_logtype.json @@ -2,6 +2,12 @@ "name": "test_windows", "description": "Test Log Type used by tests. It is created as a lightweight log type for integration tests", "is_builtin": true, + "ioc_fields": [ + { + "ioc": "ip", + "fields": ["windows-hostname"] + } + ], "mappings": [ { "raw_field":"EventID", diff --git a/src/test/java/org/opensearch/securityanalytics/TestHelpers.java b/src/test/java/org/opensearch/securityanalytics/TestHelpers.java index abc9caad8..65417ed39 100644 --- a/src/test/java/org/opensearch/securityanalytics/TestHelpers.java +++ b/src/test/java/org/opensearch/securityanalytics/TestHelpers.java @@ -1373,6 +1373,46 @@ public static String randomDoc(int severity, int version, String opCode) { } + //Add IPs in HostName field. + public static String randomDocWithIpIoc(int severity, int version, String ioc) { + String doc = "{\n" + + "\"EventTime\":\"2020-02-04T14:59:39.343541+00:00\",\n" + + "\"HostName\":\"%s\",\n" + + "\"Keywords\":\"9223372036854775808\",\n" + + "\"SeverityValue\":%s,\n" + + "\"Severity\":\"INFO\",\n" + + "\"EventID\":22,\n" + + "\"SourceName\":\"Microsoft-Windows-Sysmon\",\n" + + "\"ProviderGuid\":\"{5770385F-C22A-43E0-BF4C-06F5698FFBD9}\",\n" + + "\"Version\":%s,\n" + + "\"TaskValue\":22,\n" + + "\"OpcodeValue\":0,\n" + + "\"RecordNumber\":9532,\n" + + "\"ExecutionProcessID\":1996,\n" + + "\"ExecutionThreadID\":2616,\n" + + "\"Channel\":\"Microsoft-Windows-Sysmon/Operational\",\n" + + "\"Domain\":\"NT AUTHORITY\",\n" + + "\"AccountName\":\"SYSTEM\",\n" + + "\"UserID\":\"S-1-5-18\",\n" + + "\"AccountType\":\"User\",\n" + + "\"Message\":\"Dns query:\\r\\nRuleName: \\r\\nUtcTime: 2020-02-04 14:59:38.349\\r\\nProcessGuid: {b3c285a4-3cda-5dc0-0000-001077270b00}\\r\\nProcessId: 1904\\r\\nQueryName: EC2AMAZ-EPO7HKA\\r\\nQueryStatus: 0\\r\\nQueryResults: 172.31.46.38;\\r\\nImage: C:\\\\Program Files\\\\nxlog\\\\nxlog.exe\",\n" + + "\"Category\":\"Dns query (rule: DnsQuery)\",\n" + + "\"Opcode\":\"blahblah\",\n" + + "\"UtcTime\":\"2020-02-04 14:59:38.349\",\n" + + "\"ProcessGuid\":\"{b3c285a4-3cda-5dc0-0000-001077270b00}\",\n" + + "\"ProcessId\":\"1904\",\"QueryName\":\"EC2AMAZ-EPO7HKA\",\"QueryStatus\":\"0\",\n" + + "\"QueryResults\":\"172.31.46.38;\",\n" + + "\"Image\":\"C:\\\\Program Files\\\\nxlog\\\\regsvr32.exe\",\n" + + "\"EventReceivedTime\":\"2020-02-04T14:59:40.780905+00:00\",\n" + + "\"SourceModuleName\":\"in\",\n" + + "\"SourceModuleType\":\"im_msvistalog\",\n" + + "\"CommandLine\": \"eachtest\",\n" + + "\"Initiated\": \"true\"\n" + + "}"; + return String.format(Locale.ROOT, doc, ioc, severity, version); + + } + public static String randomDoc() { return "{\n" + "\"@timestamp\":\"2020-02-04T14:59:39.343541+00:00\",\n" + diff --git a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java index 15e9f9bad..f46fd0efb 100644 --- a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java @@ -39,6 +39,7 @@ import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputsAndThreatIntel; import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputsAndTriggers; import static org.opensearch.securityanalytics.TestHelpers.randomDoc; +import static org.opensearch.securityanalytics.TestHelpers.randomDocWithIpIoc; import static org.opensearch.securityanalytics.TestHelpers.randomIndex; import static org.opensearch.securityanalytics.TestHelpers.randomRule; import static org.opensearch.securityanalytics.TestHelpers.windowsIndexMapping; @@ -1088,6 +1089,7 @@ public void testCreateDetector_threatIntelEnabled_updateDetectorWithNewThreatInt "}"; SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + assertEquals(2, response.getHits().getTotalHits().value); assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -1118,7 +1120,7 @@ public void testCreateDetector_threatIntelEnabled_updateDetectorWithNewThreatInt List iocs = getThreatIntelFeedIocs(3); int i=1; for (String ioc : iocs) { - indexDoc(index, i+"", randomDoc(5, 3, ioc)); + indexDoc(index, i+"", randomDocWithIpIoc(5, 3, ioc)); i++; } String workflowId = ((List) detectorMap.get("workflow_ids")).get(0); @@ -1133,7 +1135,7 @@ public void testCreateDetector_threatIntelEnabled_updateDetectorWithNewThreatInt assertEquals(2, noOfSigmaRuleMatches); String threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.contains(detector.getName() + "_threat_intel")).findAny().get(); ArrayList docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); - assertEquals(docs.size(), 2); + assertEquals(docs.size(), 3); // // Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(detector)); // From decee51de3206b4baf6ecdfa86b872607ab310b2 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Mon, 16 Oct 2023 16:12:47 -0700 Subject: [PATCH 17/39] fix compilation issues in tests Signed-off-by: Surya Sashank Nistala --- .../LogTypeServiceTests.java | 3 +- .../model/WriteableTests.java | 8 +- .../resthandler/DetectorMonitorRestApiIT.java | 161 +++--------------- .../writable/LogTypeTests.java | 13 +- 4 files changed, 44 insertions(+), 141 deletions(-) diff --git a/src/test/java/org/opensearch/securityanalytics/LogTypeServiceTests.java b/src/test/java/org/opensearch/securityanalytics/LogTypeServiceTests.java index 8eb717e60..64288f669 100644 --- a/src/test/java/org/opensearch/securityanalytics/LogTypeServiceTests.java +++ b/src/test/java/org/opensearch/securityanalytics/LogTypeServiceTests.java @@ -50,7 +50,8 @@ protected void beforeTest() throws Exception { new LogType.Mapping("rawFld1", "ecsFld1", "ocsfFld1"), new LogType.Mapping("rawFld2", "ecsFld2", "ocsfFld2"), new LogType.Mapping("rawFld3", "ecsFld3", "ocsfFld3") - ) + ), + List.of(new LogType.IocFields("ip", List.of("dst.ip"))) ) ); when(builtinLogTypeLoader.getAllLogTypes()).thenReturn(dummyLogTypes); diff --git a/src/test/java/org/opensearch/securityanalytics/model/WriteableTests.java b/src/test/java/org/opensearch/securityanalytics/model/WriteableTests.java index e82911c1b..7c16e5f6f 100644 --- a/src/test/java/org/opensearch/securityanalytics/model/WriteableTests.java +++ b/src/test/java/org/opensearch/securityanalytics/model/WriteableTests.java @@ -50,7 +50,8 @@ public void testEmptyUserAsStream() throws IOException { public void testLogTypeAsStreamRawFieldOnly() throws IOException { LogType logType = new LogType( "1", "my_log_type", "description", false, - List.of(new LogType.Mapping("rawField", null, null)) + List.of(new LogType.Mapping("rawField", null, null)), + List.of(new LogType.IocFields("ip", List.of("dst.ip"))) ); BytesStreamOutput out = new BytesStreamOutput(); logType.writeTo(out); @@ -66,7 +67,8 @@ public void testLogTypeAsStreamRawFieldOnly() throws IOException { public void testLogTypeAsStreamFull() throws IOException { LogType logType = new LogType( "1", "my_log_type", "description", false, - List.of(new LogType.Mapping("rawField", "some_ecs_field", "some_ocsf_field")) + List.of(new LogType.Mapping("rawField", "some_ecs_field", "some_ocsf_field")), + List.of(new LogType.IocFields("ip", List.of("dst.ip"))) ); BytesStreamOutput out = new BytesStreamOutput(); logType.writeTo(out); @@ -80,7 +82,7 @@ public void testLogTypeAsStreamFull() throws IOException { } public void testLogTypeAsStreamNoMappings() throws IOException { - LogType logType = new LogType("1", "my_log_type", "description", false, null); + LogType logType = new LogType("1", "my_log_type", "description", false, null, null); BytesStreamOutput out = new BytesStreamOutput(); logType.writeTo(out); StreamInput sin = StreamInput.wrap(out.bytes().toBytesRef().bytes); diff --git a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java index f46fd0efb..a4a38274f 100644 --- a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java @@ -1052,7 +1052,7 @@ public void testCreateDetector_verifyWorkflowCreation_success_WithoutGroupByRule verifyWorkflow(detectorMap, monitorIds, 2); } - public void testCreateDetector_threatIntelEnabled_updateDetectorWithNewThreatIntel() throws IOException { + public void testCreateDetectorWiththreatIntelEnabled_updateDetectorWithThreatIntelDisabled() throws IOException { updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); String index = createTestIndex(randomIndex(), windowsIndexMapping()); @@ -1136,143 +1136,20 @@ public void testCreateDetector_threatIntelEnabled_updateDetectorWithNewThreatInt String threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.contains(detector.getName() + "_threat_intel")).findAny().get(); ArrayList docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); assertEquals(docs.size(), 3); -// -// Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(detector)); -// -// assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); -// -// Map updateResponseBody = asMap(updateResponse); -// detectorId = updateResponseBody.get("_id").toString(); -// -// indexDoc(index, "4", randomDoc(5, 3, "klm")); -// -// executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); -// -// monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); -// assertEquals(1, monitorRunResults.size()); -// -// docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); -// noOfSigmaRuleMatches = docLevelQueryResults.size(); -// assertEquals(2, noOfSigmaRuleMatches); -// threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.contains(detector.getName() + "_threat_intel")).findAny().get(); -// docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); -// assertEquals(docs.size(), 1); - } - - private List getThreatIntelFeedIocs(int num) throws IOException { - String request = getMatchAllSearchRequestString(num); - SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threatintel*", request, false); - return getTifdList(res, xContentRegistry()).stream().map(it -> it.getIocValue()).collect(Collectors.toList()); - } - - private static String getMatchAllSearchRequestString(int num) { - return "{\n" + - "\"size\" : " + num + "," + - " \"query\" : {\n" + - " \"match_all\":{\n" + - " }\n" + - " }\n" + - "}"; - } - - - public void testCreateDetectorthreatIntelDisabled_updateDetectorWithThreatIntelEnabled() throws IOException { - String tifdString1 = "{ \"type\": \"feed\",\"ioc_type\": \"ip\", \"ioc_value\": \"abc\", \"feed_id\": \"feed\", \"timestamp\": 1633344000000 }"; - String tifdString2 = "{ \"type\": \"feed\",\"ioc_type\": \"ip\", \"ioc_value\": \"xyz\", \"feed_id\": \"feed\", \"timestamp\": 1633344000000 }"; - String feedIndex = ".opensearch-sap-threatintel"; - indexDoc(feedIndex, "1", tifdString1); - indexDoc(feedIndex, "2", tifdString2); - updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); - String index = createTestIndex(randomIndex(), windowsIndexMapping()); - // Execute CreateMappingsAction to add alias mapping for index - Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); - // both req params and req body are supported - createMappingRequest.setJsonEntity( - "{ \"index_name\":\"" + index + "\"," + - " \"rule_topic\":\"" + randomDetectorType() + "\", " + - " \"partial\":true" + - "}" - ); - - Response createMappingResponse = client().performRequest(createMappingRequest); - - assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); - - String testOpCode = "Test"; - - String randomDocRuleId = createRule(randomRule()); - List detectorRules = List.of(new DetectorRule(randomDocRuleId)); - DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, - Collections.emptyList()); - Detector detector = randomDetectorWithInputsAndThreatIntel(List.of(input), false); - - Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); - - String request = "{\n" + - " \"query\" : {\n" + - " \"match_all\":{\n" + - " }\n" + - " }\n" + - "}"; - SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); - - assertEquals(1, response.getHits().getTotalHits().value); - - assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); - Map responseBody = asMap(createResponse); - - String detectorId = responseBody.get("_id").toString(); - request = "{\n" + - " \"query\" : {\n" + - " \"match\":{\n" + - " \"_id\": \"" + detectorId + "\"\n" + - " }\n" + - " }\n" + - "}"; - List hits = executeSearch(Detector.DETECTORS_INDEX, request); - SearchHit hit = hits.get(0); - Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); - List inputArr = (List) detectorMap.get("inputs"); - - - List monitorIds = ((List) (detectorMap).get("monitor_id")); - assertEquals(1, monitorIds.size()); - - assertNotNull("Workflow not created", detectorMap.get("workflow_ids")); - assertEquals("Number of workflows not correct", 1, ((List) detectorMap.get("workflow_ids")).size()); - - // Verify workflow - verifyWorkflow(detectorMap, monitorIds, 1); - - indexDoc(index, "1", randomDoc(5, 3, "abc")); - indexDoc(index, "2", randomDoc(5, 3, "xyz")); - indexDoc(index, "3", randomDoc(5, 3, "klm")); - String workflowId = ((List) detectorMap.get("workflow_ids")).get(0); - - Response executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); - - List> monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); - assertEquals(1, monitorRunResults.size()); - - Map docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); - int noOfSigmaRuleMatches = docLevelQueryResults.size(); - assertEquals(1, noOfSigmaRuleMatches); - - - //update threat intel - String tifdString3 = "{ \"type\": \"feed\",\"ioc_type\": \"ip\", \"ioc_value\": \"klm\", \"feed_id\": \"feed\", \"timestamp\": 1633344000000 }"; - - indexDoc(feedIndex, "3", tifdString3); - detector.setThreatIntelEnabled(true); + detector.setThreatIntelEnabled(false); Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(detector)); assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); Map updateResponseBody = asMap(updateResponse); - detectorId = updateResponseBody.get("_id").toString(); - - indexDoc(index, "4", randomDoc(5, 3, "klm")); + response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + assertEquals(1, response.getHits().getTotalHits().value); //threat intel based queries should not be present as threat intel is disabled. + i=1; + for (String ioc : iocs) { + indexDoc(index, i+"", randomDocWithIpIoc(5, 3, ioc)); + i++; + } executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); @@ -1282,11 +1159,27 @@ public void testCreateDetectorthreatIntelDisabled_updateDetectorWithThreatIntelE docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); noOfSigmaRuleMatches = docLevelQueryResults.size(); assertEquals(2, noOfSigmaRuleMatches); - String threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.contains(detector.getName() + "_threat_intel")).findAny().get(); - ArrayList docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); + threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.contains(detector.getName() + "_threat_intel")).findAny().get(); + docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); assertEquals(docs.size(), 1); } + private List getThreatIntelFeedIocs(int num) throws IOException { + String request = getMatchAllSearchRequestString(num); + SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threatintel*", request, false); + return getTifdList(res, xContentRegistry()).stream().map(it -> it.getIocValue()).collect(Collectors.toList()); + } + + private static String getMatchAllSearchRequestString(int num) { + return "{\n" + + "\"size\" : " + num + "," + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + } + public void testCreateDetector_verifyWorkflowCreation_success_WithGroupByRulesInTrigger() throws IOException { updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); String index = createTestIndex(randomIndex(), windowsIndexMapping()); diff --git a/src/test/java/org/opensearch/securityanalytics/writable/LogTypeTests.java b/src/test/java/org/opensearch/securityanalytics/writable/LogTypeTests.java index 4ede7891b..d9d592641 100644 --- a/src/test/java/org/opensearch/securityanalytics/writable/LogTypeTests.java +++ b/src/test/java/org/opensearch/securityanalytics/writable/LogTypeTests.java @@ -21,7 +21,8 @@ public class LogTypeTests { public void testLogTypeAsStreamRawFieldOnly() throws IOException { LogType logType = new LogType( "1", "my_log_type", "description", false, - List.of(new LogType.Mapping("rawField", null, null)) + List.of(new LogType.Mapping("rawField", null, null)), + List.of(new LogType.IocFields("ip", List.of("dst.ip"))) ); BytesStreamOutput out = new BytesStreamOutput(); logType.writeTo(out); @@ -32,13 +33,16 @@ public void testLogTypeAsStreamRawFieldOnly() throws IOException { assertEquals(logType.getIsBuiltIn(), newLogType.getIsBuiltIn()); assertEquals(logType.getMappings().size(), newLogType.getMappings().size()); assertEquals(logType.getMappings().get(0).getRawField(), newLogType.getMappings().get(0).getRawField()); + assertEquals(logType.getIocFieldsList().get(0).getFields().get(0), newLogType.getIocFieldsList().get(0).getFields().get(0)); + assertEquals(logType.getIocFieldsList().get(0).getIoc(), newLogType.getIocFieldsList().get(0).getIoc()); } @Test public void testLogTypeAsStreamFull() throws IOException { LogType logType = new LogType( "1", "my_log_type", "description", false, - List.of(new LogType.Mapping("rawField", "some_ecs_field", "some_ocsf_field")) + List.of(new LogType.Mapping("rawField", "some_ecs_field", "some_ocsf_field")), + List.of(new LogType.IocFields("ip", List.of("dst.ip"))) ); BytesStreamOutput out = new BytesStreamOutput(); logType.writeTo(out); @@ -49,11 +53,14 @@ public void testLogTypeAsStreamFull() throws IOException { assertEquals(logType.getIsBuiltIn(), newLogType.getIsBuiltIn()); assertEquals(logType.getMappings().size(), newLogType.getMappings().size()); assertEquals(logType.getMappings().get(0).getRawField(), newLogType.getMappings().get(0).getRawField()); + assertEquals(logType.getIocFieldsList().get(0).getFields().get(0), newLogType.getIocFieldsList().get(0).getFields().get(0)); + assertEquals(logType.getIocFieldsList().get(0).getIoc(), newLogType.getIocFieldsList().get(0).getIoc()); + } @Test public void testLogTypeAsStreamNoMappings() throws IOException { - LogType logType = new LogType("1", "my_log_type", "description", false, null); + LogType logType = new LogType("1", "my_log_type", "description", false, null, null); BytesStreamOutput out = new BytesStreamOutput(); logType.writeTo(out); StreamInput sin = StreamInput.wrap(out.bytes().toBytesRef().bytes); From a79b8ac26b9524f835606896a9d1454dd8cba139 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Mon, 16 Oct 2023 18:17:41 -0700 Subject: [PATCH 18/39] test udpate detector disabling threat intel Signed-off-by: Surya Sashank Nistala --- .../transport/TransportIndexDetectorAction.java | 1 + .../resthandler/DetectorMonitorRestApiIT.java | 4 +--- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java index 3eb0a5112..414591fe4 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java @@ -691,6 +691,7 @@ private void addThreatIntelBasedDocLevelQueries(Detector detector, List iocFieldsList = logTypeService.getIocFieldsList(detector.getDetectorType()); if (iocFieldsList == null || iocFieldsList.isEmpty()) { diff --git a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java index a4a38274f..0939a5520 100644 --- a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java @@ -1078,7 +1078,6 @@ public void testCreateDetectorWiththreatIntelEnabled_updateDetectorWithThreatInt DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, Collections.emptyList()); Detector detector = randomDetectorWithInputsAndThreatIntel(List.of(input), true); - Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); String request = "{\n" + @@ -1137,8 +1136,7 @@ public void testCreateDetectorWiththreatIntelEnabled_updateDetectorWithThreatInt ArrayList docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); assertEquals(docs.size(), 3); - detector.setThreatIntelEnabled(false); - Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(detector)); + Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(randomDetectorWithInputsAndThreatIntel(List.of(input), true))); assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); From 2b59191950034fc3542c70e897ef918602f5fa9d Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Tue, 17 Oct 2023 02:09:01 -0700 Subject: [PATCH 19/39] add tests for detector creation and updation with threat intel Signed-off-by: Surya Sashank Nistala --- .../resthandler/DetectorMonitorRestApiIT.java | 106 ++++++++++++++++-- 1 file changed, 99 insertions(+), 7 deletions(-) diff --git a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java index 0939a5520..e71cace9a 100644 --- a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java @@ -1052,7 +1052,7 @@ public void testCreateDetector_verifyWorkflowCreation_success_WithoutGroupByRule verifyWorkflow(detectorMap, monitorIds, 2); } - public void testCreateDetectorWiththreatIntelEnabled_updateDetectorWithThreatIntelDisabled() throws IOException { + public void testCreateDetectorWithThreatIntelEnabled_updateDetectorWithThreatIntelDisabled() throws IOException { updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); String index = createTestIndex(randomIndex(), windowsIndexMapping()); @@ -1136,14 +1136,109 @@ public void testCreateDetectorWiththreatIntelEnabled_updateDetectorWithThreatInt ArrayList docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); assertEquals(docs.size(), 3); + Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(randomDetectorWithInputsAndThreatIntel(List.of(input), false))); + + assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); + + Map updateResponseBody = asMap(updateResponse); + for (String ioc : iocs) { + indexDoc(index, i+"", randomDocWithIpIoc(5, 3, ioc)); + i++; + } + + executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); + + monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); + assertEquals(1, monitorRunResults.size()); + + docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); + noOfSigmaRuleMatches = docLevelQueryResults.size(); + assertEquals(1, noOfSigmaRuleMatches); + } + + public void testCreateDetectorWiththreatIntelDisabled_updateDetectorWithThreatIntelEnabled() throws IOException { + + updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + + String testOpCode = "Test"; + + String randomDocRuleId = createRule(randomRule()); + List detectorRules = List.of(new DetectorRule(randomDocRuleId)); + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, + Collections.emptyList()); + Detector detector = randomDetectorWithInputsAndThreatIntel(List.of(input), false); + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + + + assertEquals(1, response.getHits().getTotalHits().value); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + Map responseBody = asMap(createResponse); + + String detectorId = responseBody.get("_id").toString(); + request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); + List inputArr = (List) detectorMap.get("inputs"); + + + List monitorIds = ((List) (detectorMap).get("monitor_id")); + assertEquals(1, monitorIds.size()); + + assertNotNull("Workflow not created", detectorMap.get("workflow_ids")); + assertEquals("Number of workflows not correct", 1, ((List) detectorMap.get("workflow_ids")).size()); + + // Verify workflow + verifyWorkflow(detectorMap, monitorIds, 1); + indexDoc(index, "1", randomDoc(2, 4, "test")); + String workflowId = ((List) detectorMap.get("workflow_ids")).get(0); + + Response executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); + + List> monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); + assertEquals(1, monitorRunResults.size()); + + Map docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); + int noOfSigmaRuleMatches = docLevelQueryResults.size(); + assertEquals(1, noOfSigmaRuleMatches); + Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(randomDetectorWithInputsAndThreatIntel(List.of(input), true))); assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); Map updateResponseBody = asMap(updateResponse); - response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); - assertEquals(1, response.getHits().getTotalHits().value); //threat intel based queries should not be present as threat intel is disabled. - i=1; + List iocs = getThreatIntelFeedIocs(3); + int i=2; for (String ioc : iocs) { indexDoc(index, i+"", randomDocWithIpIoc(5, 3, ioc)); i++; @@ -1157,9 +1252,6 @@ public void testCreateDetectorWiththreatIntelEnabled_updateDetectorWithThreatInt docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); noOfSigmaRuleMatches = docLevelQueryResults.size(); assertEquals(2, noOfSigmaRuleMatches); - threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.contains(detector.getName() + "_threat_intel")).findAny().get(); - docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); - assertEquals(docs.size(), 1); } private List getThreatIntelFeedIocs(int num) throws IOException { From f0f8270c56ffc3fdd2c95f8bf98842e87ad3f6cc Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Tue, 17 Oct 2023 11:03:05 -0700 Subject: [PATCH 20/39] Threat intel test (#673) * add mapping for indices storing threat intel feed data * fix feed indices mapping * add threat intel feed data dao Signed-off-by: Surya Sashank Nistala * add threatIntelEnabled field in detector. Signed-off-by: Surya Sashank Nistala * add threat intel feed service and searching feeds Signed-off-by: Surya Sashank Nistala * ti feed data to doc level query convertor logic added * plug threat intel feed into detector creation Signed-off-by: Surya Sashank Nistala * Preliminary framework for jobscheduler and datasource (#626) Signed-off-by: Joanne Wang * create doc level query from threat intel feed data index docs" Signed-off-by: Surya Sashank Nistala * handle threat intel enabled check during detector updation * add tests for testing threat intel feed integration with detectors Signed-off-by: Surya Sashank Nistala * Threat intel feeds job runner and unit tests (#654) * fix doc level query constructor (#651) Signed-off-by: Surya Sashank Nistala * add mapping for indices storing threat intel feed data * fix feed indices mapping * add threat intel feed data dao Signed-off-by: Surya Sashank Nistala * add threatIntelEnabled field in detector. Signed-off-by: Surya Sashank Nistala * add threat intel feed service and searching feeds Signed-off-by: Surya Sashank Nistala * ti feed data to doc level query convertor logic added * plug threat intel feed into detector creation Signed-off-by: Surya Sashank Nistala * Preliminary framework for jobscheduler and datasource (#626) Signed-off-by: Joanne Wang * with listener and processor Signed-off-by: Joanne Wang * removed actions Signed-off-by: Joanne Wang * clean up Signed-off-by: Joanne Wang * added parser Signed-off-by: Joanne Wang * add unit tests Signed-off-by: Joanne Wang * refactored class names Signed-off-by: Joanne Wang * before moving db Signed-off-by: Joanne Wang * after moving db Signed-off-by: Joanne Wang * added actions to plugin and removed user schedule Signed-off-by: Joanne Wang * unit tests Signed-off-by: Joanne Wang * fix build error Signed-off-by: Joanne Wang * changed transport naming Signed-off-by: Joanne Wang --------- Signed-off-by: Surya Sashank Nistala Signed-off-by: Joanne Wang Co-authored-by: Surya Sashank Nistala * converge job scheduler code with threat intel feed integration in detectors Signed-off-by: Surya Sashank Nistala * refactored out unecessary Signed-off-by: Joanne Wang * added headers and cleaned up Signed-off-by: Joanne Wang * converge job scheduler and detector threat intel code Signed-off-by: Surya Sashank Nistala * working on testing Signed-off-by: Joanne Wang * fixed the parser and build.gradle Signed-off-by: Joanne Wang * add mapping for indices storing threat intel feed data * fix feed indices mapping * add threat intel feed data dao Signed-off-by: Surya Sashank Nistala * add threatIntelEnabled field in detector. Signed-off-by: Surya Sashank Nistala * add threat intel feed service and searching feeds Signed-off-by: Surya Sashank Nistala * ti feed data to doc level query convertor logic added * plug threat intel feed into detector creation Signed-off-by: Surya Sashank Nistala * Preliminary framework for jobscheduler and datasource (#626) Signed-off-by: Joanne Wang * create doc level query from threat intel feed data index docs" Signed-off-by: Surya Sashank Nistala * handle threat intel enabled check during detector updation * add tests for testing threat intel feed integration with detectors Signed-off-by: Surya Sashank Nistala * Threat intel feeds job runner and unit tests (#654) * fix doc level query constructor (#651) Signed-off-by: Surya Sashank Nistala * add mapping for indices storing threat intel feed data * fix feed indices mapping * add threat intel feed data dao Signed-off-by: Surya Sashank Nistala * add threatIntelEnabled field in detector. Signed-off-by: Surya Sashank Nistala * add threat intel feed service and searching feeds Signed-off-by: Surya Sashank Nistala * ti feed data to doc level query convertor logic added * plug threat intel feed into detector creation Signed-off-by: Surya Sashank Nistala * Preliminary framework for jobscheduler and datasource (#626) Signed-off-by: Joanne Wang * with listener and processor Signed-off-by: Joanne Wang * removed actions Signed-off-by: Joanne Wang * clean up Signed-off-by: Joanne Wang * added parser Signed-off-by: Joanne Wang * add unit tests Signed-off-by: Joanne Wang * refactored class names Signed-off-by: Joanne Wang * before moving db Signed-off-by: Joanne Wang * after moving db Signed-off-by: Joanne Wang * added actions to plugin and removed user schedule Signed-off-by: Joanne Wang * unit tests Signed-off-by: Joanne Wang * fix build error Signed-off-by: Joanne Wang * changed transport naming Signed-off-by: Joanne Wang --------- Signed-off-by: Surya Sashank Nistala Signed-off-by: Joanne Wang Co-authored-by: Surya Sashank Nistala * converge job scheduler code with threat intel feed integration in detectors Signed-off-by: Surya Sashank Nistala * converge job scheduler and detector threat intel code Signed-off-by: Surya Sashank Nistala * add feed metadata config files in src and test Signed-off-by: Surya Sashank Nistala * clean up some tests Signed-off-by: Joanne Wang * fixed merge conflicts Signed-off-by: Joanne Wang * adds ioc fields list in log type config files and ioc fields object in LogType POJO * update csv parser and new metadata field Signed-off-by: Joanne Wang * fixed job scheduler interval settings Signed-off-by: Joanne Wang * add tests for ioc to fields for each log type Signed-off-by: Surya Sashank Nistala * removed wildcards Signed-off-by: Joanne Wang --------- Signed-off-by: Surya Sashank Nistala Signed-off-by: Joanne Wang Signed-off-by: Joanne Wang <109310487+jowg-amazon@users.noreply.github.com> Co-authored-by: Joanne Wang <109310487+jowg-amazon@users.noreply.github.com> Co-authored-by: Joanne Wang --- build.gradle | 14 +- .../SecurityAnalyticsPlugin.java | 19 +- .../monitors/opensearch_security.policy | 8 + .../model/ThreatIntelFeedData.java | 4 + .../settings/SecurityAnalyticsSettings.java | 22 +- .../DetectorThreatIntelService.java | 6 +- .../ThreatIntelFeedDataService.java | 24 +- .../threatIntel/ThreatIntelFeedParser.java | 31 +- .../action/DeleteTIFJobRequest.java | 2 +- .../threatIntel/action/GetTIFJobAction.java | 26 -- .../threatIntel/action/GetTIFJobRequest.java | 66 ----- .../threatIntel/action/GetTIFJobResponse.java | 77 ----- .../threatIntel/action/PutTIFJobRequest.java | 22 +- .../action/TransportDeleteTIFJobAction.java | 5 +- .../action/TransportGetTIFJobAction.java | 78 ----- .../action/TransportPutTIFJobAction.java | 5 +- .../action/TransportUpdateTIFJobAction.java | 133 --------- .../action/UpdateTIFJobAction.java | 27 -- .../action/UpdateTIFJobRequest.java | 123 -------- .../threatIntel/common/Constants.java | 4 + .../common/ParameterValidator.java | 58 ++++ .../threatIntel/common/TIFExecutor.java | 45 --- .../threatIntel/common/TIFLockService.java | 4 +- .../threatIntel/common/TIFMetadata.java | 28 +- .../jobscheduler/TIFJobParameter.java | 87 ++---- .../jobscheduler/TIFJobParameterService.java | 203 +------------ .../jobscheduler/TIFJobRunner.java | 25 +- .../threatIntel/jobscheduler/TIFJobTask.java | 21 -- .../jobscheduler/TIFJobUpdateService.java | 95 ++---- .../common/ParameterValidator.java | 58 ---- .../resources/OSMapping/ad_ldap_logtype.json | 3 +- .../OSMapping/apache_access_logtype.json | 3 +- .../resources/OSMapping/azure_logtype.json | 3 +- .../OSMapping/cloudtrail_logtype.json | 10 +- src/main/resources/OSMapping/dns_logtype.json | 10 +- .../resources/OSMapping/github_logtype.json | 3 +- .../OSMapping/gworkspace_logtype.json | 3 +- .../resources/OSMapping/linux_logtype.json | 3 +- .../resources/OSMapping/m365_logtype.json | 3 +- .../resources/OSMapping/netflow_logtype.json | 11 +- .../resources/OSMapping/network_logtype.json | 11 +- .../resources/OSMapping/okta_logtype.json | 3 +- .../OSMapping/others_application_logtype.json | 3 +- .../OSMapping/others_apt_logtype.json | 3 +- .../OSMapping/others_cloud_logtype.json | 3 +- .../OSMapping/others_compliance_logtype.json | 3 +- .../OSMapping/others_macos_logtype.json | 3 +- .../OSMapping/others_proxy_logtype.json | 3 +- .../OSMapping/others_web_logtype.json | 3 +- src/main/resources/OSMapping/s3_logtype.json | 3 +- .../OSMapping/test_windows_logtype.json | 2 +- .../resources/OSMapping/vpcflow_logtype.json | 11 +- src/main/resources/OSMapping/waf_logtype.json | 3 +- .../resources/OSMapping/windows_logtype.json | 8 +- .../mappings/threat_intel_job_mapping.json | 68 +---- .../threatIntelFeed/feedMetadata.json | 3 +- .../resources/threatIntelFeedInfo/feodo.yml | 6 + .../securityanalytics/TestHelpers.java | 18 +- .../resthandler/DetectorMonitorRestApiIT.java | 2 +- .../threatIntel/ThreatIntelTestCase.java | 270 ++++++++++++++++++ .../action/DeleteTIFJobRequestTests.java | 65 +++++ .../action/PutTIFJobRequestTests.java | 50 ++++ .../TransportDeleteTIFJobActionTests.java | 127 ++++++++ .../action/TransportPutTIFJobActionTests.java | 161 +++++++++++ .../common/ThreatIntelLockServiceTests.java | 117 ++++++++ .../integTests/TIFJobExtensionPluginIT.java | 49 ++++ .../integTests/ThreatIntelJobRunnerIT.java | 154 ++++++++++ .../jobscheduler/TIFJobExtensionTests.java | 58 ++++ .../TIFJobParameterServiceTests.java | 238 +++++++++++++++ .../jobscheduler/TIFJobParameterTests.java | 107 +++++++ .../jobscheduler/TIFJobRunnerTests.java | 167 +++++++++++ .../TIFJobUpdateServiceTests.java | 52 ++++ ...sample_csv_with_description_and_header.csv | 4 + .../resources/threatIntel/sample_valid.csv | 1 - 74 files changed, 1956 insertions(+), 1195 deletions(-) delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/action/GetTIFJobAction.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/action/GetTIFJobRequest.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/action/GetTIFJobResponse.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportGetTIFJobAction.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportUpdateTIFJobAction.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/action/UpdateTIFJobAction.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/action/UpdateTIFJobRequest.java create mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/common/ParameterValidator.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFExecutor.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobTask.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatintel/common/ParameterValidator.java create mode 100644 src/main/resources/threatIntelFeedInfo/feodo.yml create mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestCase.java create mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobRequestTests.java create mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/action/PutTIFJobRequestTests.java create mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobActionTests.java create mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobActionTests.java create mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/common/ThreatIntelLockServiceTests.java create mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/TIFJobExtensionPluginIT.java create mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java create mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtensionTests.java create mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterServiceTests.java create mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterTests.java create mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunnerTests.java create mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateServiceTests.java create mode 100644 src/test/resources/threatIntel/sample_csv_with_description_and_header.csv diff --git a/build.gradle b/build.gradle index c81cc9dc0..c21d74360 100644 --- a/build.gradle +++ b/build.gradle @@ -69,6 +69,7 @@ opensearchplugin { name 'opensearch-security-analytics' description 'OpenSearch Security Analytics plugin' classname 'org.opensearch.securityanalytics.SecurityAnalyticsPlugin' +// extendedPlugins = ['opensearch-job-scheduler'] } javaRestTest { @@ -155,7 +156,7 @@ dependencies { implementation group: 'org.apache.commons', name: 'commons-lang3', version: "${versions.commonslang}" implementation "org.antlr:antlr4-runtime:4.10.1" implementation "com.cronutils:cron-utils:9.1.6" - api files("/Users/snistala/Documents/opensearch/common-utils/build/libs/common-utils-3.0.0.0-SNAPSHOT.jar") + api "org.opensearch:common-utils:${common_utils_version}@jar" api "org.opensearch.client:opensearch-rest-client:${opensearch_version}" implementation "org.jetbrains.kotlin:kotlin-stdlib:${kotlin_version}" implementation "org.opensearch:opensearch-job-scheduler-spi:${opensearch_build}" @@ -165,6 +166,7 @@ dependencies { zipArchive group: 'org.opensearch.plugin', name:'alerting', version: "${opensearch_build}" zipArchive group: 'org.opensearch.plugin', name:'opensearch-notifications-core', version: "${opensearch_build}" zipArchive group: 'org.opensearch.plugin', name:'notifications', version: "${opensearch_build}" + zipArchive group: 'org.opensearch.plugin', name:'opensearch-job-scheduler', version: "${opensearch_build}" //spotless implementation('com.google.googlejavaformat:google-java-format:1.17.0') { @@ -291,6 +293,16 @@ testClusters.integTest { } } })) + plugin(provider({ + new RegularFile() { + @Override + File getAsFile() { + return configurations.zipArchive.asFileTree.matching { + include '**/opensearch-job-scheduler*' + }.singleFile + } + } + })) } run { diff --git a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java index 66257c360..d64b47528 100644 --- a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java +++ b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java @@ -52,7 +52,6 @@ import org.opensearch.securityanalytics.threatIntel.DetectorThreatIntelService; import org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedDataService; import org.opensearch.securityanalytics.threatIntel.action.*; -import org.opensearch.securityanalytics.threatIntel.common.TIFExecutor; import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; import org.opensearch.securityanalytics.threatIntel.feedMetadata.BuiltInTIFMetadataLoader; import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameterService; @@ -121,13 +120,6 @@ public Collection getSystemIndexDescriptors(Settings sett return List.of(new SystemIndexDescriptor(THREAT_INTEL_DATA_INDEX_NAME_PREFIX, "System index used for threat intel data")); } - @Override - public List> getExecutorBuilders(Settings settings) { - List> executorBuilders = new ArrayList<>(); - executorBuilders.add(TIFExecutor.executorBuilder(settings)); - return executorBuilders; - } - @Override public Collection createComponents(Client client, ClusterService clusterService, @@ -156,17 +148,16 @@ public Collection createComponents(Client client, DetectorThreatIntelService detectorThreatIntelService = new DetectorThreatIntelService(threatIntelFeedDataService); TIFJobParameterService tifJobParameterService = new TIFJobParameterService(client, clusterService); TIFJobUpdateService tifJobUpdateService = new TIFJobUpdateService(clusterService, tifJobParameterService, threatIntelFeedDataService, builtInTIFMetadataLoader); - TIFExecutor threatIntelExecutor = new TIFExecutor(threadPool); TIFLockService threatIntelLockService = new TIFLockService(clusterService, client); this.client = client; - TIFJobRunner.getJobRunnerInstance().initialize(clusterService,tifJobUpdateService, tifJobParameterService, threatIntelExecutor, threatIntelLockService, threadPool); + TIFJobRunner.getJobRunnerInstance().initialize(clusterService,tifJobUpdateService, tifJobParameterService, threatIntelLockService, threadPool); return List.of( detectorIndices, correlationIndices, correlationRuleIndices, ruleTopicIndices, customLogTypeIndices, ruleIndices, mapperService, indexTemplateManager, builtinLogTypeLoader, builtInTIFMetadataLoader, threatIntelFeedDataService, detectorThreatIntelService, - tifJobUpdateService, tifJobParameterService, threatIntelExecutor, threatIntelLockService); + tifJobUpdateService, tifJobParameterService, threatIntelLockService); } @Override @@ -268,7 +259,7 @@ public List> getSettings() { SecurityAnalyticsSettings.CORRELATION_TIME_WINDOW, SecurityAnalyticsSettings.DEFAULT_MAPPING_SCHEMA, SecurityAnalyticsSettings.ENABLE_WORKFLOW_USAGE, - SecurityAnalyticsSettings.TIFJOB_UPDATE_INTERVAL, + SecurityAnalyticsSettings.TIF_UPDATE_INTERVAL, SecurityAnalyticsSettings.BATCH_SIZE, SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT ); @@ -304,11 +295,9 @@ public List> getSettings() { new ActionHandler<>(DeleteCustomLogTypeAction.INSTANCE, TransportDeleteCustomLogTypeAction.class), new ActionHandler<>(PutTIFJobAction.INSTANCE, TransportPutTIFJobAction.class), - new ActionHandler<>(GetTIFJobAction.INSTANCE, TransportGetTIFJobAction.class), - new ActionHandler<>(UpdateTIFJobAction.INSTANCE, TransportUpdateTIFJobAction.class), new ActionHandler<>(DeleteTIFJobAction.INSTANCE, TransportDeleteTIFJobAction.class) - ); + ); } @Override diff --git a/src/main/java/org/opensearch/securityanalytics/config/monitors/opensearch_security.policy b/src/main/java/org/opensearch/securityanalytics/config/monitors/opensearch_security.policy index c5af78398..3a3fe8df5 100644 --- a/src/main/java/org/opensearch/securityanalytics/config/monitors/opensearch_security.policy +++ b/src/main/java/org/opensearch/securityanalytics/config/monitors/opensearch_security.policy @@ -1,3 +1,11 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + grant { permission java.lang.management.ManagementPermission "reputation.alienvault.com:443" "connect,resolve"; }; \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/model/ThreatIntelFeedData.java b/src/main/java/org/opensearch/securityanalytics/model/ThreatIntelFeedData.java index d79907fcb..7696b331e 100644 --- a/src/main/java/org/opensearch/securityanalytics/model/ThreatIntelFeedData.java +++ b/src/main/java/org/opensearch/securityanalytics/model/ThreatIntelFeedData.java @@ -1,3 +1,7 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ package org.opensearch.securityanalytics.model; import org.apache.logging.log4j.LogManager; diff --git a/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java b/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java index 967bd3165..48cb49fac 100644 --- a/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java +++ b/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java @@ -4,14 +4,11 @@ */ package org.opensearch.securityanalytics.settings; -import java.net.MalformedURLException; -import java.net.URISyntaxException; -import java.net.URL; -import java.util.List; -import java.util.concurrent.TimeUnit; import org.opensearch.common.settings.Setting; import org.opensearch.common.unit.TimeValue; -import org.opensearch.jobscheduler.repackage.com.cronutils.utils.VisibleForTesting; + +import java.util.List; +import java.util.concurrent.TimeUnit; public class SecurityAnalyticsSettings { public static final String CORRELATION_INDEX = "index.correlation"; @@ -123,13 +120,10 @@ public class SecurityAnalyticsSettings { ); // threat intel settings - /** - * Default update interval to be used in threat intel tif job creation API - */ - public static final Setting TIFJOB_UPDATE_INTERVAL = Setting.longSetting( - "plugins.security_analytics.threatintel.tifjob.update_interval_in_days", - 1l, - 1l, //todo: change the min value + public static final Setting TIF_UPDATE_INTERVAL = Setting.timeSetting( + "plugins.security_analytics.threat_intel_timeout", + TimeValue.timeValueHours(24), + TimeValue.timeValueHours(1), Setting.Property.NodeScope, Setting.Property.Dynamic ); @@ -161,7 +155,7 @@ public class SecurityAnalyticsSettings { * @return a list of all settings for threat intel feature */ public static final List> settings() { - return List.of(TIFJOB_UPDATE_INTERVAL, BATCH_SIZE, THREAT_INTEL_TIMEOUT); + return List.of(BATCH_SIZE, THREAT_INTEL_TIMEOUT, TIF_UPDATE_INTERVAL); } } \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java index fb4bb744e..3c532d50e 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java @@ -1,3 +1,7 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ package org.opensearch.securityanalytics.threatIntel; import org.apache.logging.log4j.LogManager; @@ -58,7 +62,7 @@ public List createDocLevelQueriesFromThreatIntelList( queries.add(new DocLevelQuery( constructId(detector, entry.getKey()), tifdList.get(0).getFeedId(), Collections.emptyList(), - String.format(query, field), + "windows-hostname:(120.85.114.146 OR 103.104.106.223 OR 185.191.246.45 OR 120.86.237.94)", List.of("threat_intel", entry.getKey() /*ioc_type*/) )); } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java index 87044f4b8..5ecff4b55 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java @@ -1,3 +1,7 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ package org.opensearch.securityanalytics.threatIntel; import org.apache.commons.csv.CSVRecord; @@ -41,7 +45,12 @@ import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.time.Instant; -import java.util.*; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Arrays; +import java.util.Optional; import java.util.concurrent.CountDownLatch; import java.util.stream.Collectors; @@ -97,7 +106,7 @@ public void getThreatIntelFeedData( if(IndexUtils.getNewIndexByCreationDate( this.clusterService.state(), this.indexNameExpressionResolver, - ".opensearch-sap-threatintel*" //name? + ".opensearch-sap-threatintel*" ) == null) { createThreatIntelFeedData(); } @@ -105,11 +114,11 @@ public void getThreatIntelFeedData( String tifdIndex = IndexUtils.getNewIndexByCreationDate( this.clusterService.state(), this.indexNameExpressionResolver, - ".opensearch-sap-threatintel*" //name? + ".opensearch-sap-threatintel*" ); SearchRequest searchRequest = new SearchRequest(tifdIndex); - searchRequest.source().size(1000); //TODO: convert to scroll + searchRequest.source().size(9999); //TODO: convert to scroll client.search(searchRequest, ActionListener.wrap(r -> listener.onResponse(ThreatIntelFeedDataUtils.getTifdList(r, xContentRegistry)), e -> { log.error(String.format( "Failed to fetch threat intel feed data from system index %s", tifdIndex), e); @@ -123,11 +132,10 @@ public void getThreatIntelFeedData( private void createThreatIntelFeedData() throws InterruptedException { CountDownLatch countDownLatch = new CountDownLatch(1); - client.execute(PutTIFJobAction.INSTANCE, new PutTIFJobRequest("feed_updater")).actionGet(); + client.execute(PutTIFJobAction.INSTANCE, new PutTIFJobRequest("feed_updater", clusterSettings.get(SecurityAnalyticsSettings.TIF_UPDATE_INTERVAL))).actionGet(); countDownLatch.await(); } - /** * Create an index for a threat intel feed * @@ -166,18 +174,16 @@ private String getIndexMapping() { * Puts threat intel feed from CSVRecord iterator into a given index in bulk * * @param indexName Index name to save the threat intel feed - * @param fields Field name matching with data in CSVRecord in order * @param iterator TIF data to insert * @param renewLock Runnable to renew lock */ public void parseAndSaveThreatIntelFeedDataCSV( final String indexName, - final String[] fields, final Iterator iterator, final Runnable renewLock, final TIFMetadata tifMetadata ) throws IOException { - if (indexName == null || fields == null || iterator == null || renewLock == null) { + if (indexName == null || iterator == null || renewLock == null) { throw new IllegalArgumentException("Parameters cannot be null, failed to save threat intel feed data"); } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedParser.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedParser.java index ab4477a44..92a66ed12 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedParser.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedParser.java @@ -1,18 +1,22 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ package org.opensearch.securityanalytics.threatIntel; import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVParser; -import org.apache.commons.csv.CSVRecord; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.OpenSearchException; import org.opensearch.SpecialPermission; import org.opensearch.common.SuppressForbidden; -import org.opensearch.securityanalytics.model.DetectorTrigger; import org.opensearch.securityanalytics.threatIntel.common.Constants; import org.opensearch.securityanalytics.threatIntel.common.TIFMetadata; -import java.io.*; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; import java.net.URL; import java.net.URLConnection; import java.security.AccessController; @@ -20,7 +24,7 @@ //Parser helper class public class ThreatIntelFeedParser { - private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + private static final Logger log = LogManager.getLogger(ThreatIntelFeedParser.class); /** * Create CSVParser of a threat intel feed @@ -43,23 +47,4 @@ public static CSVParser getThreatIntelFeedReaderCSV(final TIFMetadata tifMetadat } }); } - - /** - * Validate header - * - * 1. header should not be null - * 2. the number of values in header should be more than one - * - * @param header the header - * @return CSVRecord the input header - */ - public static CSVRecord validateHeader(CSVRecord header) { - if (header == null) { - throw new OpenSearchException("threat intel feed database is empty"); - } - if (header.values().length < 2) { - throw new OpenSearchException("threat intel feed database should have at least two fields"); - } - return header; - } } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobRequest.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobRequest.java index 54e41126f..e98cfe586 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobRequest.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobRequest.java @@ -45,7 +45,7 @@ public ActionRequestValidationException validate() { ActionRequestValidationException errors = null; if (VALIDATOR.validateTIFJobName(name).isEmpty() == false) { errors = new ActionRequestValidationException(); - errors.addValidationError("no such job exist"); + errors.addValidationError("no such job exists"); } return errors; } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/GetTIFJobAction.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/GetTIFJobAction.java deleted file mode 100644 index 8f1034d94..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/GetTIFJobAction.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.action; - -import org.opensearch.action.ActionType; - -/** - * Threat intel tif job get action - */ -public class GetTIFJobAction extends ActionType { - /** - * Get tif job action instance - */ - public static final GetTIFJobAction INSTANCE = new GetTIFJobAction(); - /** - * Get tif job action name - */ - public static final String NAME = "cluster:admin/security_analytics/tifjob/get"; - - private GetTIFJobAction() { - super(NAME, GetTIFJobResponse::new); - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/GetTIFJobRequest.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/GetTIFJobRequest.java deleted file mode 100644 index c40e1f747..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/GetTIFJobRequest.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.action; - -import org.opensearch.action.ActionRequest; -import org.opensearch.action.ActionRequestValidationException; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; - -import java.io.IOException; - -/** - * threat intel tif job get request - */ -public class GetTIFJobRequest extends ActionRequest { - /** - * @param names the tif job names - * @return the tif job names - */ - private String[] names; - - /** - * Constructs a new get tif job request with a list of tif jobs. - * - * If the list of tif jobs is empty or it contains a single element "_all", all registered tif jobs - * are returned. - * - * @param names list of tif job names - */ - public GetTIFJobRequest(final String[] names) { - this.names = names; - } - - /** - * Constructor with stream input - * @param in the stream input - * @throws IOException IOException - */ - public GetTIFJobRequest(final StreamInput in) throws IOException { - super(in); - this.names = in.readStringArray(); - } - - @Override - public ActionRequestValidationException validate() { - ActionRequestValidationException errors = null; - if (names == null) { - errors = new ActionRequestValidationException(); - errors.addValidationError("names should not be null"); - } - return errors; - } - - @Override - public void writeTo(final StreamOutput out) throws IOException { - super.writeTo(out); - out.writeStringArray(names); - } - - public String[] getNames() { - return this.names; - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/GetTIFJobResponse.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/GetTIFJobResponse.java deleted file mode 100644 index 507f1f4ee..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/GetTIFJobResponse.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.action; - -import org.opensearch.core.ParseField; -import org.opensearch.core.action.ActionResponse; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.core.xcontent.ToXContentObject; -import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter; - -import java.io.IOException; -import java.time.Instant; -import java.util.List; - -/** - * threat intel tif job get request - */ -public class GetTIFJobResponse extends ActionResponse implements ToXContentObject { - private static final ParseField FIELD_NAME_TIFJOBS = new ParseField("tifjobs"); - private static final ParseField FIELD_NAME_NAME = new ParseField("name"); - private static final ParseField FIELD_NAME_STATE = new ParseField("state"); - private static final ParseField FIELD_NAME_UPDATE_INTERVAL = new ParseField("update_interval_in_days"); - private static final ParseField FIELD_NAME_NEXT_UPDATE_AT = new ParseField("next_update_at_in_epoch_millis"); - private static final ParseField FIELD_NAME_NEXT_UPDATE_AT_READABLE = new ParseField("next_update_at"); - private static final ParseField FIELD_NAME_UPDATE_STATS = new ParseField("update_stats"); - private List tifJobParameters; - - /** - * Default constructor - * - * @param tifJobParameters List of tifJobParameters - */ - public GetTIFJobResponse(final List tifJobParameters) { - this.tifJobParameters = tifJobParameters; - } - - /** - * Constructor with StreamInput - * - * @param in the stream input - */ - public GetTIFJobResponse(final StreamInput in) throws IOException { - tifJobParameters = in.readList(TIFJobParameter::new); - } - - @Override - public void writeTo(final StreamOutput out) throws IOException { - out.writeList(tifJobParameters); - } - - @Override - public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { - builder.startObject(); - builder.startArray(FIELD_NAME_TIFJOBS.getPreferredName()); - for (TIFJobParameter tifJobParameter : tifJobParameters) { - builder.startObject(); - builder.field(FIELD_NAME_NAME.getPreferredName(), tifJobParameter.getName()); - builder.field(FIELD_NAME_STATE.getPreferredName(), tifJobParameter.getState()); - builder.field(FIELD_NAME_UPDATE_INTERVAL.getPreferredName(), tifJobParameter.getSchedule()); //TODO - builder.timeField( - FIELD_NAME_NEXT_UPDATE_AT.getPreferredName(), - FIELD_NAME_NEXT_UPDATE_AT_READABLE.getPreferredName(), - tifJobParameter.getSchedule().getNextExecutionTime(Instant.now()).toEpochMilli() - ); - builder.field(FIELD_NAME_UPDATE_STATS.getPreferredName(), tifJobParameter.getUpdateStats()); - builder.endObject(); - } - builder.endArray(); - builder.endObject(); - return builder; - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/PutTIFJobRequest.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/PutTIFJobRequest.java index 1662979d2..fa1587a66 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/PutTIFJobRequest.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/PutTIFJobRequest.java @@ -5,16 +5,11 @@ package org.opensearch.securityanalytics.threatIntel.action; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.common.unit.TimeValue; -import org.opensearch.core.ParseField; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.core.xcontent.ObjectParser; -import org.opensearch.securityanalytics.model.DetectorTrigger; import org.opensearch.securityanalytics.threatIntel.common.ParameterValidator; import java.io.IOException; @@ -24,10 +19,6 @@ * Threat intel tif job creation request */ public class PutTIFJobRequest extends ActionRequest { - private static final Logger log = LogManager.getLogger(DetectorTrigger.class); - - public static final ParseField NAME_FIELD = new ParseField("name_FIELD"); -// public static final ParseField UPDATE_INTERVAL_IN_DAYS_FIELD = new ParseField("update_interval_in_days"); private static final ParameterValidator VALIDATOR = new ParameterValidator(); /** @@ -58,22 +49,13 @@ public void setUpdateInterval(TimeValue timeValue) { this.updateInterval = timeValue; } - /** - * Parser of a tif job - */ - public static final ObjectParser PARSER; - static { - PARSER = new ObjectParser<>("put_tifjob"); - PARSER.declareString((request, val) -> request.setName(val), NAME_FIELD); -// PARSER.declareLong((request, val) -> request.setUpdateInterval(TimeValue.timeValueDays(val)), UPDATE_INTERVAL_IN_DAYS_FIELD); - } - /** * Default constructor * @param name name of a tif job */ - public PutTIFJobRequest(final String name) { + public PutTIFJobRequest(final String name, final TimeValue updateInterval) { this.name = name; + this.updateInterval = updateInterval; } /** diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobAction.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobAction.java index 638893f2e..45fc037d8 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobAction.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobAction.java @@ -16,12 +16,11 @@ import org.opensearch.core.action.ActionListener; import org.opensearch.core.rest.RestStatus; import org.opensearch.ingest.IngestService; -import org.opensearch.securityanalytics.model.DetectorTrigger; import org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedDataService; import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameterService; import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameterService; import org.opensearch.tasks.Task; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; @@ -32,7 +31,7 @@ * Transport action to delete tif job */ public class TransportDeleteTIFJobAction extends HandledTransportAction { - private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + private static final Logger log = LogManager.getLogger(TransportDeleteTIFJobAction.class); private static final long LOCK_DURATION_IN_SECONDS = 300l; private final TIFLockService lockService; diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportGetTIFJobAction.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportGetTIFJobAction.java deleted file mode 100644 index 1f884eea1..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportGetTIFJobAction.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.action; - -import org.opensearch.OpenSearchException; -import org.opensearch.action.support.ActionFilters; -import org.opensearch.action.support.HandledTransportAction; -import org.opensearch.common.inject.Inject; -import org.opensearch.core.action.ActionListener; -import org.opensearch.index.IndexNotFoundException; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameterService; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter; -import org.opensearch.tasks.Task; -import org.opensearch.transport.TransportService; - -import java.util.Collections; -import java.util.List; - -/** - * Transport action to get tif job - */ -public class TransportGetTIFJobAction extends HandledTransportAction { - private final TIFJobParameterService tifJobParameterService; - - /** - * Default constructor - * @param transportService the transport service - * @param actionFilters the action filters - * @param tifJobParameterService the tif job parameter service facade - */ - @Inject - public TransportGetTIFJobAction( - final TransportService transportService, - final ActionFilters actionFilters, - final TIFJobParameterService tifJobParameterService - ) { - super(GetTIFJobAction.NAME, transportService, actionFilters, GetTIFJobRequest::new); - this.tifJobParameterService = tifJobParameterService; - } - - @Override - protected void doExecute(final Task task, final GetTIFJobRequest request, final ActionListener listener) { - if (shouldGetAllTIFJobs(request)) { - // We don't expect too many tif jobs. Therefore, querying all tif jobs without pagination should be fine. - tifJobParameterService.getAllTIFJobParameters(newActionListener(listener)); - } else { - tifJobParameterService.getTIFJobParameters(request.getNames(), newActionListener(listener)); - } - } - - private boolean shouldGetAllTIFJobs(final GetTIFJobRequest request) { - if (request.getNames() == null) { - throw new OpenSearchException("names in a request should not be null"); - } - return request.getNames().length == 0 || (request.getNames().length == 1 && "_all".equals(request.getNames()[0])); - } - - protected ActionListener> newActionListener(final ActionListener listener) { - return new ActionListener<>() { - @Override - public void onResponse(final List tifJobParameters) { - listener.onResponse(new GetTIFJobResponse(tifJobParameters)); - } - - @Override - public void onFailure(final Exception e) { - if (e instanceof IndexNotFoundException) { - listener.onResponse(new GetTIFJobResponse(Collections.emptyList())); - return; - } - listener.onFailure(e); - } - }; - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java index edd189ec9..060e67620 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java @@ -18,11 +18,10 @@ import org.opensearch.core.rest.RestStatus; import org.opensearch.index.engine.VersionConflictEngineException; import org.opensearch.jobscheduler.spi.LockModel; -import org.opensearch.securityanalytics.model.DetectorTrigger; import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameterService; import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameterService; import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobUpdateService; import org.opensearch.tasks.Task; import org.opensearch.threadpool.ThreadPool; @@ -38,7 +37,7 @@ * Transport action to create tif job */ public class TransportPutTIFJobAction extends HandledTransportAction { - private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + private static final Logger log = LogManager.getLogger(TransportPutTIFJobAction.class); private final ThreadPool threadPool; private final TIFJobParameterService tifJobParameterService; diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportUpdateTIFJobAction.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportUpdateTIFJobAction.java deleted file mode 100644 index 393bc02b9..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportUpdateTIFJobAction.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.action; - -import org.opensearch.OpenSearchStatusException; -import org.opensearch.ResourceNotFoundException; -import org.opensearch.action.support.ActionFilters; -import org.opensearch.action.support.HandledTransportAction; -import org.opensearch.action.support.master.AcknowledgedResponse; -import org.opensearch.common.inject.Inject; -import org.opensearch.core.action.ActionListener; -import org.opensearch.core.rest.RestStatus; -import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; -import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; -import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameterService; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobTask; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobUpdateService; -import org.opensearch.tasks.Task; -import org.opensearch.threadpool.ThreadPool; -import org.opensearch.transport.TransportService; - -import java.time.Instant; -import java.time.temporal.ChronoUnit; -import java.util.Locale; - -/** - * Transport action to update tif job - */ -public class TransportUpdateTIFJobAction extends HandledTransportAction { - private static final long LOCK_DURATION_IN_SECONDS = 300l; - private final TIFLockService lockService; - private final TIFJobParameterService tifJobParameterService; - private final TIFJobUpdateService tifJobUpdateService; - private final ThreadPool threadPool; - - /** - * Constructor - * - * @param transportService the transport service - * @param actionFilters the action filters - * @param lockService the lock service - * @param tifJobParameterService the tif job parameter facade - * @param tifJobUpdateService the tif job update service - */ - @Inject - public TransportUpdateTIFJobAction( - final TransportService transportService, - final ActionFilters actionFilters, - final TIFLockService lockService, - final TIFJobParameterService tifJobParameterService, - final TIFJobUpdateService tifJobUpdateService, - final ThreadPool threadPool - ) { - super(UpdateTIFJobAction.NAME, transportService, actionFilters, UpdateTIFJobRequest::new); - this.lockService = lockService; - this.tifJobUpdateService = tifJobUpdateService; - this.tifJobParameterService = tifJobParameterService; - this.threadPool = threadPool; - } - - /** - * Get a lock and update tif job - * - * @param task the task - * @param request the request - * @param listener the listener - */ - @Override - protected void doExecute(final Task task, final UpdateTIFJobRequest request, final ActionListener listener) { - lockService.acquireLock(request.getName(), LOCK_DURATION_IN_SECONDS, ActionListener.wrap(lock -> { - if (lock == null) { - listener.onFailure( - new OpenSearchStatusException("Another processor is holding a lock on the resource. Try again later", RestStatus.BAD_REQUEST) - ); - return; - } - try { - // TODO: makes every sub-methods as async call to avoid using a thread in generic pool - threadPool.generic().submit(() -> { - try { - TIFJobParameter tifJobParameter = tifJobParameterService.getJobParameter(request.getName()); - if (tifJobParameter == null) { - throw new ResourceNotFoundException("no such tifJobParameter exist"); - } - if (TIFJobState.AVAILABLE.equals(tifJobParameter.getState()) == false) { - throw new IllegalArgumentException( - String.format(Locale.ROOT, "tif job is not in an [%s] state", TIFJobState.AVAILABLE) - ); - } - updateIfChanged(request, tifJobParameter); //TODO: just want to update? - lockService.releaseLock(lock); - listener.onResponse(new AcknowledgedResponse(true)); - } catch (Exception e) { - lockService.releaseLock(lock); - listener.onFailure(e); - } - }); - } catch (Exception e) { - lockService.releaseLock(lock); - listener.onFailure(e); - } - }, exception -> listener.onFailure(exception))); - } - - private void updateIfChanged(final UpdateTIFJobRequest request, final TIFJobParameter tifJobParameter) { - boolean isChanged = false; - if (isUpdateIntervalChanged(request)) { - tifJobParameter.setSchedule(new IntervalSchedule(Instant.now(), (int) request.getUpdateInterval().getDays(), ChronoUnit.DAYS)); - tifJobParameter.setTask(TIFJobTask.ALL); - isChanged = true; - } - - if (isChanged) { - tifJobParameterService.updateJobSchedulerParameter(tifJobParameter); - } - } - - /** - * Update interval is changed as long as user provide one because - * start time will get updated even if the update interval is same as current one. - * - * @param request the update tif job request - * @return true if update interval is changed, and false otherwise - */ - private boolean isUpdateIntervalChanged(final UpdateTIFJobRequest request) { - return request.getUpdateInterval() != null; - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/UpdateTIFJobAction.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/UpdateTIFJobAction.java deleted file mode 100644 index 8b4c495f4..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/UpdateTIFJobAction.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.action; - -import org.opensearch.action.ActionType; -import org.opensearch.action.support.master.AcknowledgedResponse; - -/** - * threat intel tif job update action - */ -public class UpdateTIFJobAction extends ActionType { - /** - * Update tif job action instance - */ - public static final UpdateTIFJobAction INSTANCE = new UpdateTIFJobAction(); - /** - * Update tif job action name - */ - public static final String NAME = "cluster:admin/security_analytics/tifjob/update"; - - private UpdateTIFJobAction() { - super(NAME, AcknowledgedResponse::new); - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/UpdateTIFJobRequest.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/UpdateTIFJobRequest.java deleted file mode 100644 index 205590319..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/UpdateTIFJobRequest.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.action; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.opensearch.action.ActionRequest; -import org.opensearch.action.ActionRequestValidationException; -import org.opensearch.common.unit.TimeValue; -import org.opensearch.core.ParseField; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.core.xcontent.ObjectParser; -import org.opensearch.securityanalytics.model.DetectorTrigger; -import org.opensearch.securityanalytics.threatIntel.common.TIFMetadata; -import org.opensearch.securityanalytics.threatIntel.common.ParameterValidator; - -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URISyntaxException; -import java.net.URL; -import java.util.Locale; - -/** - * threat intel tif job update request - */ -public class UpdateTIFJobRequest extends ActionRequest { - private static final Logger log = LogManager.getLogger(DetectorTrigger.class); - public static final ParseField UPDATE_INTERVAL_IN_DAYS_FIELD = new ParseField("update_interval_in_days"); - private static final ParameterValidator VALIDATOR = new ParameterValidator(); - - /** - * @param name the tif job name - * @return the tif job name - */ - private String name; - - /** - * @param updateInterval update interval of a tif job - * @return update interval of a tif job - */ - private TimeValue updateInterval; - - /** - * Parser of a tif job - */ - public static final ObjectParser PARSER; - static { - PARSER = new ObjectParser<>("update_tifjob"); - PARSER.declareLong((request, val) -> request.setUpdateInterval(TimeValue.timeValueDays(val)), UPDATE_INTERVAL_IN_DAYS_FIELD); - } - - public String getName() { - return name; - } - - public TimeValue getUpdateInterval() { - return updateInterval; - } - - private void setUpdateInterval(TimeValue updateInterval){ - this.updateInterval = updateInterval; - } - - /** - * Constructor - * @param name name of a tif job - */ - public UpdateTIFJobRequest(final String name) { - this.name = name; - } - - /** - * Constructor - * @param in the stream input - * @throws IOException IOException - */ - public UpdateTIFJobRequest(final StreamInput in) throws IOException { - super(in); - this.name = in.readString(); - this.updateInterval = in.readOptionalTimeValue(); - } - - @Override - public void writeTo(final StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(name); - out.writeOptionalTimeValue(updateInterval); - } - - @Override - public ActionRequestValidationException validate() { - ActionRequestValidationException errors = new ActionRequestValidationException(); - if (VALIDATOR.validateTIFJobName(name).isEmpty() == false) { - errors.addValidationError("no such tif job exist"); - } - if (updateInterval == null) { - errors.addValidationError("no values to update"); - } - - validateUpdateInterval(errors); - - return errors.validationErrors().isEmpty() ? null : errors; - } - - /** - * Validate updateInterval is equal or larger than 1 - * - * @param errors the errors to add error messages - */ - private void validateUpdateInterval(final ActionRequestValidationException errors) { - if (updateInterval == null) { - return; - } - - if (updateInterval.compareTo(TimeValue.timeValueDays(1)) < 0) { - errors.addValidationError("Update interval should be equal to or larger than 1 day"); - } - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/Constants.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/Constants.java index af31e7897..808c0a3da 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/Constants.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/Constants.java @@ -1,3 +1,7 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ package org.opensearch.securityanalytics.threatIntel.common; import org.opensearch.Version; diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/ParameterValidator.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/ParameterValidator.java new file mode 100644 index 000000000..9e07c988e --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/ParameterValidator.java @@ -0,0 +1,58 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.common; + +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; + +import org.apache.commons.lang3.StringUtils; +import org.opensearch.core.common.Strings; + +/** + * Parameter validator for TIF APIs + */ +public class ParameterValidator { + private static final int MAX_TIFJOB_NAME_BYTES = 127; + + /** + * Validate TIF Job name and return list of error messages + * + * @param tifJobName tifJobName name + * @return Error messages. Empty list if there is no violation. + */ + public List validateTIFJobName(final String tifJobName) { + List errorMsgs = new ArrayList<>(); + if (StringUtils.isBlank(tifJobName)) { + errorMsgs.add("threat intel feed job name must not be empty"); + return errorMsgs; + } + + if (!Strings.validFileName(tifJobName)) { + errorMsgs.add( + String.format(Locale.ROOT, "threat intel feed job name must not contain the following characters %s", Strings.INVALID_FILENAME_CHARS) + ); + } + if (tifJobName.contains("#")) { + errorMsgs.add("threat intel feed job name must not contain '#'"); + } + if (tifJobName.contains(":")) { + errorMsgs.add("threat intel feed job name must not contain ':'"); + } + if (tifJobName.charAt(0) == '_' || tifJobName.charAt(0) == '-' || tifJobName.charAt(0) == '+') { + errorMsgs.add("threat intel feed job name must not start with '_', '-', or '+'"); + } + int byteCount = tifJobName.getBytes(StandardCharsets.UTF_8).length; + if (byteCount > MAX_TIFJOB_NAME_BYTES) { + errorMsgs.add(String.format(Locale.ROOT, "threat intel feed job name is too long, (%d > %d)", byteCount, MAX_TIFJOB_NAME_BYTES)); + } + if (tifJobName.equals(".") || tifJobName.equals("..")) { + errorMsgs.add("threat intel feed job name must not be '.' or '..'"); + } + return errorMsgs; + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFExecutor.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFExecutor.java deleted file mode 100644 index c2f861332..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFExecutor.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.common; - -import java.util.concurrent.ExecutorService; - -import org.opensearch.common.settings.Settings; -import org.opensearch.threadpool.ExecutorBuilder; -import org.opensearch.threadpool.FixedExecutorBuilder; -import org.opensearch.threadpool.ThreadPool; - -/** - * Provide a list of static methods related with executors for threat intel - */ -public class TIFExecutor { - private static final String THREAD_POOL_NAME = "_plugin_sap_tifjob_update"; //TODO: name - private final ThreadPool threadPool; - - public TIFExecutor(final ThreadPool threadPool) { - this.threadPool = threadPool; - } - - /** - * We use fixed thread count of 1 for updating tif job as updating tif job is running background - * once a day at most and no need to expedite the task. - * - * @param settings the settings - * @return the executor builder - */ - public static ExecutorBuilder executorBuilder(final Settings settings) { - return new FixedExecutorBuilder(settings, THREAD_POOL_NAME, 1, 1000, THREAD_POOL_NAME, false); - } - - /** - * Return an executor service for tif job update task - * - * @return the executor service - */ - public ExecutorService forJobSchedulerParameterUpdate() { - return threadPool.executor(THREAD_POOL_NAME); - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFLockService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFLockService.java index df1fd1b75..386fec0c3 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFLockService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFLockService.java @@ -22,18 +22,16 @@ import org.opensearch.core.action.ActionListener; import org.opensearch.jobscheduler.spi.LockModel; import org.opensearch.jobscheduler.spi.utils.LockService; -import org.opensearch.securityanalytics.model.DetectorTrigger; import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; /** * A wrapper of job scheduler's lock service */ public class TIFLockService { - private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + private static final Logger log = LogManager.getLogger(TIFLockService.class); public static final long LOCK_DURATION_IN_SECONDS = 300l; public static final long RENEW_AFTER_IN_SECONDS = 120l; - private final ClusterService clusterService; private final LockService lockService; diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java index 0bdc2d77e..6332c80f2 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java @@ -28,6 +28,8 @@ public class TIFMetadata implements Writeable, ToXContent { private static final ParseField FEED_FORMAT = new ParseField("feed_format"); private static final ParseField IOC_TYPE_FIELD = new ParseField("ioc_type"); private static final ParseField IOC_COL_FIELD = new ParseField("ioc_col"); + private static final ParseField HAS_HEADER_FIELD = new ParseField("has_header"); + /** * @param feedId ID of the threat intel feed data @@ -77,6 +79,12 @@ public class TIFMetadata implements Writeable, ToXContent { */ private String iocType; + /** + * @param hasHeader boolean if feed has a header + * @return boolean if feed has a header + */ + private Boolean hasHeader; + public TIFMetadata(Map input) { this( input.get(FEED_ID_FIELD.getPreferredName()).toString(), @@ -86,8 +94,9 @@ public TIFMetadata(Map input) { input.get(DESCRIPTION_FIELD.getPreferredName()).toString(), input.get(FEED_FORMAT.getPreferredName()).toString(), input.get(IOC_TYPE_FIELD.getPreferredName()).toString(), - Integer.parseInt(input.get(IOC_COL_FIELD.getPreferredName()).toString()) - ); + Integer.parseInt(input.get(IOC_COL_FIELD.getPreferredName()).toString()), + (Boolean)input.get(HAS_HEADER_FIELD.getPreferredName()) + ); } public String getUrl() { @@ -118,8 +127,13 @@ public String getIocType() { return iocType; } + public Boolean hasHeader() { + return hasHeader; + } + + public TIFMetadata(final String feedId, final String url, final String name, final String organization, final String description, - final String feedType, final String iocType, final Integer iocCol) { + final String feedType, final String iocType, final Integer iocCol, final Boolean hasHeader) { this.feedId = feedId; this.url = url; this.name = name; @@ -128,6 +142,7 @@ public TIFMetadata(final String feedId, final String url, final String name, fin this.feedType = feedType; this.iocType = iocType; this.iocCol = iocCol; + this.hasHeader = hasHeader; } @@ -146,7 +161,8 @@ public TIFMetadata(final String feedId, final String url, final String name, fin String feedType = (String) args[5]; String containedIocs = (String) args[6]; Integer iocCol = Integer.parseInt((String) args[7]); - return new TIFMetadata(feedId, url, name, organization, description, feedType, containedIocs, iocCol); + Boolean hasHeader = (Boolean) args[8]; + return new TIFMetadata(feedId, url, name, organization, description, feedType, containedIocs, iocCol, hasHeader); } ); @@ -159,6 +175,7 @@ public TIFMetadata(final String feedId, final String url, final String name, fin PARSER.declareString(ConstructingObjectParser.constructorArg(), FEED_FORMAT); PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), IOC_TYPE_FIELD); PARSER.declareString(ConstructingObjectParser.constructorArg(), IOC_COL_FIELD); + PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), HAS_HEADER_FIELD); } public TIFMetadata(final StreamInput in) throws IOException { @@ -170,6 +187,7 @@ public TIFMetadata(final StreamInput in) throws IOException { feedType = in.readString(); iocType = in.readString(); iocCol = in.readInt(); + hasHeader = in.readBoolean(); } public void writeTo(final StreamOutput out) throws IOException { @@ -181,6 +199,7 @@ public void writeTo(final StreamOutput out) throws IOException { out.writeString(feedType); out.writeString(iocType); out.writeInt(iocCol); + out.writeBoolean(hasHeader); } private TIFMetadata() { @@ -198,6 +217,7 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa builder.field(FEED_FORMAT.getPreferredName(), feedType); builder.field(IOC_TYPE_FIELD.getPreferredName(), iocType); builder.field(IOC_COL_FIELD.getPreferredName(), iocCol); + builder.field(HAS_HEADER_FIELD.getPreferredName(), hasHeader); builder.endObject(); return builder; } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java index a5346dce4..0a24ffb75 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java @@ -22,7 +22,10 @@ import java.io.IOException; import java.time.Instant; import java.time.temporal.ChronoUnit; -import java.util.*; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Optional; import static org.opensearch.common.time.DateUtils.toInstant; @@ -52,10 +55,8 @@ public class TIFJobParameter implements Writeable, ScheduledJobParameter { * Additional fields for tif job */ private static final ParseField STATE_FIELD = new ParseField("state"); - private static final ParseField CURRENT_INDEX_FIELD = new ParseField("current_index"); private static final ParseField INDICES_FIELD = new ParseField("indices"); private static final ParseField UPDATE_STATS_FIELD = new ParseField("update_stats"); - private static final ParseField TASK_FIELD = new ParseField("task"); /** @@ -101,14 +102,8 @@ public class TIFJobParameter implements Writeable, ScheduledJobParameter { private TIFJobState state; /** - * @param currentIndex the current index name having threat intel feed data - * @return the current index name having threat intel feed data - */ - private String currentIndex; - - /** - * @param indices A list of indices having threat intel feed data including currentIndex - * @return A list of indices having threat intel feed data including currentIndex + * @param indices A list of indices having threat intel feed data + * @return A list of indices having threat intel feed data including */ private List indices; @@ -118,12 +113,6 @@ public class TIFJobParameter implements Writeable, ScheduledJobParameter { */ private UpdateStats updateStats; - /** - * @param task Task that {@link TIFJobRunner} will execute - * @return Task that {@link TIFJobRunner} will execute - */ - private TIFJobTask task; - /** * tif job parser */ @@ -136,20 +125,16 @@ public class TIFJobParameter implements Writeable, ScheduledJobParameter { Instant enabledTime = args[2] == null ? null : Instant.ofEpochMilli((long) args[2]); boolean isEnabled = (boolean) args[3]; IntervalSchedule schedule = (IntervalSchedule) args[4]; - TIFJobTask task = TIFJobTask.valueOf((String) args[5]); - TIFJobState state = TIFJobState.valueOf((String) args[6]); - String currentIndex = (String) args[7]; - List indices = (List) args[8]; - UpdateStats updateStats = (UpdateStats) args[9]; + TIFJobState state = TIFJobState.valueOf((String) args[5]); + List indices = (List) args[6]; + UpdateStats updateStats = (UpdateStats) args[7]; TIFJobParameter parameter = new TIFJobParameter( name, lastUpdateTime, enabledTime, isEnabled, schedule, - task, state, - currentIndex, indices, updateStats ); @@ -162,9 +147,7 @@ public class TIFJobParameter implements Writeable, ScheduledJobParameter { PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ENABLED_TIME_FIELD); PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED_FIELD); PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> ScheduleParser.parse(p), SCHEDULE_FIELD); - PARSER.declareString(ConstructingObjectParser.constructorArg(), TASK_FIELD); PARSER.declareString(ConstructingObjectParser.constructorArg(), STATE_FIELD); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), CURRENT_INDEX_FIELD); PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), INDICES_FIELD); PARSER.declareObject(ConstructingObjectParser.constructorArg(), UpdateStats.PARSER, UPDATE_STATS_FIELD); } @@ -174,16 +157,14 @@ public TIFJobParameter() { } public TIFJobParameter(final String name, final Instant lastUpdateTime, final Instant enabledTime, final Boolean isEnabled, - final IntervalSchedule schedule, TIFJobTask task, final TIFJobState state, final String currentIndex, + final IntervalSchedule schedule, final TIFJobState state, final List indices, final UpdateStats updateStats) { this.name = name; this.lastUpdateTime = lastUpdateTime; this.enabledTime = enabledTime; this.isEnabled = isEnabled; this.schedule = schedule; - this.task = task; this.state = state; - this.currentIndex = currentIndex; this.indices = indices; this.updateStats = updateStats; } @@ -193,11 +174,9 @@ public TIFJobParameter(final String name, final IntervalSchedule schedule) { name, Instant.now().truncatedTo(ChronoUnit.MILLIS), null, - false, + true, schedule, - TIFJobTask.ALL, TIFJobState.CREATING, - null, new ArrayList<>(), new UpdateStats() ); @@ -209,9 +188,7 @@ public TIFJobParameter(final StreamInput in) throws IOException { enabledTime = toInstant(in.readOptionalVLong()); isEnabled = in.readBoolean(); schedule = new IntervalSchedule(in); - task = TIFJobTask.valueOf(in.readString()); state = TIFJobState.valueOf(in.readString()); - currentIndex = in.readOptionalString(); indices = in.readStringList(); updateStats = new UpdateStats(in); } @@ -222,9 +199,7 @@ public void writeTo(final StreamOutput out) throws IOException { out.writeOptionalVLong(enabledTime == null ? null : enabledTime.toEpochMilli()); out.writeBoolean(isEnabled); schedule.writeTo(out); - out.writeString(task.name()); out.writeString(state.name()); - out.writeOptionalString(currentIndex); out.writeStringCollection(indices); updateStats.writeTo(out); } @@ -247,11 +222,7 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa } builder.field(ENABLED_FIELD.getPreferredName(), isEnabled); builder.field(SCHEDULE_FIELD.getPreferredName(), schedule); - builder.field(TASK_FIELD.getPreferredName(), task.name()); builder.field(STATE_FIELD.getPreferredName(), state.name()); - if (currentIndex != null) { - builder.field(CURRENT_INDEX_FIELD.getPreferredName(), currentIndex); - } builder.field(INDICES_FIELD.getPreferredName(), indices); builder.field(UPDATE_STATS_FIELD.getPreferredName(), updateStats); builder.endObject(); @@ -295,28 +266,15 @@ public boolean isEnabled() { return this.isEnabled; } - public TIFJobTask getTask() { - return task; - } public void setLastUpdateTime(Instant lastUpdateTime) { this.lastUpdateTime = lastUpdateTime; } - public void setCurrentIndex(String currentIndex) { - this.currentIndex = currentIndex; - } - public void setTask(TIFJobTask task) { - this.task = task; - } @Override public Long getLockDurationSeconds() { return TIFLockService.LOCK_DURATION_IN_SECONDS; } - public String getCurrentIndex() { - return currentIndex; - } - /** * Enable auto update of threat intel feed data */ @@ -336,30 +294,22 @@ public void disable() { isEnabled = false; } - /** - * Current index name of a tif job - * - * @return Current index name of a tif job - */ - public String currentIndexName() { - return currentIndex; - } - public void setSchedule(IntervalSchedule schedule) { this.schedule = schedule; } /** - * Index name for a tif job with given suffix + * Index name for a tif job * - * @return index name for a tif job with given suffix + * @return index name for a tif job */ public String newIndexName(final TIFJobParameter jobSchedulerParameter, TIFMetadata tifMetadata) { - List indices = jobSchedulerParameter.indices; + List indices = jobSchedulerParameter.getIndices(); Optional nameOptional = indices.stream().filter(name -> name.contains(tifMetadata.getFeedId())).findAny(); - String suffix = "-1"; + String suffix = "1"; if (nameOptional.isPresent()) { - suffix = "-1".equals(nameOptional.get()) ? "-2" : suffix; + String lastChar = "" + nameOptional.get().charAt(nameOptional.get().length() -1); + suffix = (lastChar.equals("1")) ? "2" : suffix; } return String.format(Locale.ROOT, "%s-%s%s", THREAT_INTEL_DATA_INDEX_NAME_PREFIX, tifMetadata.getFeedId(), suffix); } @@ -529,11 +479,12 @@ public static TIFJobParameter build(final PutTIFJobRequest request) { String name = request.getName(); IntervalSchedule schedule = new IntervalSchedule( Instant.now().truncatedTo(ChronoUnit.MILLIS), - 1, //TODO fix + (int) request.getUpdateInterval().hours(), ChronoUnit.DAYS ); return new TIFJobParameter(name, schedule); + } } } \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterService.java index 9d8fc3a3d..70f052549 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterService.java @@ -5,17 +5,6 @@ package org.opensearch.securityanalytics.threatIntel.jobscheduler; -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; -import java.time.Instant; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.OpenSearchException; @@ -25,45 +14,40 @@ import org.opensearch.action.StepListener; import org.opensearch.action.admin.indices.create.CreateIndexRequest; import org.opensearch.action.admin.indices.create.CreateIndexResponse; -import org.opensearch.action.bulk.BulkRequest; -import org.opensearch.action.bulk.BulkResponse; import org.opensearch.action.delete.DeleteResponse; import org.opensearch.action.get.GetRequest; import org.opensearch.action.get.GetResponse; -import org.opensearch.action.get.MultiGetItemResponse; -import org.opensearch.action.get.MultiGetResponse; -import org.opensearch.action.index.IndexRequest; import org.opensearch.action.index.IndexResponse; -import org.opensearch.action.search.SearchResponse; import org.opensearch.action.support.WriteRequest; import org.opensearch.client.Client; -import org.opensearch.cluster.routing.Preference; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.action.ActionListener; -import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.securityanalytics.model.DetectorTrigger; +import org.opensearch.index.IndexNotFoundException; import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; import org.opensearch.securityanalytics.threatIntel.common.StashedThreadContext; -import org.opensearch.index.IndexNotFoundException; -import org.opensearch.index.query.QueryBuilders; -import org.opensearch.search.SearchHit; import org.opensearch.securityanalytics.util.SecurityAnalyticsException; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.time.Instant; +import java.util.stream.Collectors; + /** - * Data access object for tif job + * Data access object for tif job parameter */ public class TIFJobParameterService { - private static final Logger log = LogManager.getLogger(DetectorTrigger.class); - - private static final Integer MAX_SIZE = 1000; + private static final Logger log = LogManager.getLogger(TIFJobParameterService.class); private final Client client; private final ClusterService clusterService; private final ClusterSettings clusterSettings; @@ -139,33 +123,6 @@ public IndexResponse updateJobSchedulerParameter(final TIFJobParameter jobSchedu }); } - /** - * Update tif jobs in an index {@code TIFJobExtension.JOB_INDEX_NAME} - * @param tifJobParameters the tifJobParameters - * @param listener action listener - */ - public void updateJobSchedulerParameter(final List tifJobParameters, final ActionListener listener) { - BulkRequest bulkRequest = new BulkRequest(); - tifJobParameters.stream().map(tifJobParameter -> { - tifJobParameter.setLastUpdateTime(Instant.now()); - return tifJobParameter; - }).map(this::toIndexRequest).forEach(indexRequest -> bulkRequest.add(indexRequest)); - StashedThreadContext.run(client, () -> client.bulk(bulkRequest, listener)); - } - private IndexRequest toIndexRequest(TIFJobParameter tifJobParameter) { - try { - IndexRequest indexRequest = new IndexRequest(); - indexRequest.index(TIFJobExtension.JOB_INDEX_NAME); - indexRequest.id(tifJobParameter.getName()); - indexRequest.opType(DocWriteRequest.OpType.INDEX); - indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - indexRequest.source(tifJobParameter.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)); - return indexRequest; - } catch (IOException e) { - throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO - } - } - /** * Get tif job from an index {@code TIFJobExtension.JOB_INDEX_NAME} * @param name the name of a tif job @@ -211,7 +168,7 @@ public void saveTIFJobParameter(final TIFJobParameter tifJobParameter, final Act .setSource(tifJobParameter.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) .execute(listener); } catch (IOException e) { - throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO + throw new SecurityAnalyticsException("Exception saving the threat intel feed job parameter in index", RestStatus.INTERNAL_SERVER_ERROR, e); } }); } @@ -238,140 +195,4 @@ public void deleteTIFJobParameter(final TIFJobParameter tifJobParameter) { throw new OpenSearchException("failed to delete tifJobParameter[{}] with status[{}]", tifJobParameter.getName(), response.status()); } } - - /** - * Get tif job from an index {@code TIFJobExtension.JOB_INDEX_NAME} - * @param name the name of a tif job - * @param actionListener the action listener - */ - public void getJobParameter(final String name, final ActionListener actionListener) { - GetRequest request = new GetRequest(TIFJobExtension.JOB_INDEX_NAME, name); - StashedThreadContext.run(client, () -> client.get(request, new ActionListener<>() { - @Override - public void onResponse(final GetResponse response) { - if (response.isExists() == false) { - actionListener.onResponse(null); - return; - } - - try { - XContentParser parser = XContentHelper.createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - response.getSourceAsBytesRef() - ); - actionListener.onResponse(TIFJobParameter.PARSER.parse(parser, null)); - } catch (IOException e) { - actionListener.onFailure(e); - } - } - - @Override - public void onFailure(final Exception e) { - actionListener.onFailure(e); - } - })); - } - - /** - * Get tif jobs from an index {@code TIFJobExtension.JOB_INDEX_NAME} - * @param names the array of tif job names - * @param actionListener the action listener - */ - public void getTIFJobParameters(final String[] names, final ActionListener> actionListener) { - StashedThreadContext.run( - client, - () -> client.prepareMultiGet() - .add(TIFJobExtension.JOB_INDEX_NAME, names) - .execute(createGetTIFJobParameterQueryActionLister(MultiGetResponse.class, actionListener)) - ); - } - - /** - * Get all tif jobs up to {@code MAX_SIZE} from an index {@code TIFJobExtension.JOB_INDEX_NAME} - * @param actionListener the action listener - */ - public void getAllTIFJobParameters(final ActionListener> actionListener) { - StashedThreadContext.run( - client, - () -> client.prepareSearch(TIFJobExtension.JOB_INDEX_NAME) - .setQuery(QueryBuilders.matchAllQuery()) - .setPreference(Preference.PRIMARY.type()) - .setSize(MAX_SIZE) - .execute(createGetTIFJobParameterQueryActionLister(SearchResponse.class, actionListener)) - ); - } - - /** - * Get all tif jobs up to {@code MAX_SIZE} from an index {@code TIFJobExtension.JOB_INDEX_NAME} - */ - public List getAllTIFJobParameters() { - SearchResponse response = StashedThreadContext.run( - client, - () -> client.prepareSearch(TIFJobExtension.JOB_INDEX_NAME) - .setQuery(QueryBuilders.matchAllQuery()) - .setPreference(Preference.PRIMARY.type()) - .setSize(MAX_SIZE) - .execute() - .actionGet(clusterSettings.get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT)) - ); - - List bytesReferences = toBytesReferences(response); - return bytesReferences.stream().map(bytesRef -> toTIFJobParameter(bytesRef)).collect(Collectors.toList()); - } - - private ActionListener createGetTIFJobParameterQueryActionLister( - final Class response, - final ActionListener> actionListener - ) { - return new ActionListener() { - @Override - public void onResponse(final T response) { - try { - List bytesReferences = toBytesReferences(response); - List tifJobParameters = bytesReferences.stream() - .map(bytesRef -> toTIFJobParameter(bytesRef)) - .collect(Collectors.toList()); - actionListener.onResponse(tifJobParameters); - } catch (Exception e) { - actionListener.onFailure(e); - } - } - - @Override - public void onFailure(final Exception e) { - actionListener.onFailure(e); - } - }; - } - - private List toBytesReferences(final Object response) { - if (response instanceof SearchResponse) { - SearchResponse searchResponse = (SearchResponse) response; - return Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getSourceRef).collect(Collectors.toList()); - } else if (response instanceof MultiGetResponse) { - MultiGetResponse multiGetResponse = (MultiGetResponse) response; - return Arrays.stream(multiGetResponse.getResponses()) - .map(MultiGetItemResponse::getResponse) - .filter(Objects::nonNull) - .filter(GetResponse::isExists) - .map(GetResponse::getSourceAsBytesRef) - .collect(Collectors.toList()); - } else { - throw new OpenSearchException("No supported instance type[{}] is provided", response.getClass()); - } - } - - private TIFJobParameter toTIFJobParameter(final BytesReference bytesReference) { - try { - XContentParser parser = XContentHelper.createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - bytesReference - ); - return TIFJobParameter.PARSER.parse(parser, null); - } catch (IOException e) { - throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO - } - } } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunner.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunner.java index 4407bd9fe..ca1f61347 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunner.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunner.java @@ -13,7 +13,6 @@ import org.opensearch.jobscheduler.spi.LockModel; import org.opensearch.jobscheduler.spi.ScheduledJobParameter; import org.opensearch.jobscheduler.spi.ScheduledJobRunner; -import org.opensearch.securityanalytics.model.DetectorTrigger; import java.io.IOException; import java.util.ArrayList; @@ -23,7 +22,6 @@ import java.time.Instant; import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; -import org.opensearch.securityanalytics.threatIntel.common.TIFExecutor; import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; import org.opensearch.threadpool.ThreadPool; @@ -33,7 +31,7 @@ * This is a background task which is responsible for updating threat intel feed data */ public class TIFJobRunner implements ScheduledJobRunner { - private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + private static final Logger log = LogManager.getLogger(TIFJobRunner.class); private static TIFJobRunner INSTANCE; public static TIFJobRunner getJobRunnerInstance() { @@ -54,7 +52,6 @@ public static TIFJobRunner getJobRunnerInstance() { // threat intel specific variables private TIFJobUpdateService jobSchedulerUpdateService; private TIFJobParameterService jobSchedulerParameterService; - private TIFExecutor threatIntelExecutor; private TIFLockService lockService; private boolean initialized; private ThreadPool threadPool; @@ -71,14 +68,12 @@ public void initialize( final ClusterService clusterService, final TIFJobUpdateService jobSchedulerUpdateService, final TIFJobParameterService jobSchedulerParameterService, - final TIFExecutor threatIntelExecutor, final TIFLockService threatIntelLockService, final ThreadPool threadPool ) { this.clusterService = clusterService; this.jobSchedulerUpdateService = jobSchedulerUpdateService; this.jobSchedulerParameterService = jobSchedulerParameterService; - this.threatIntelExecutor = threatIntelExecutor; this.lockService = threatIntelLockService; this.threadPool = threadPool; this.initialized = true; @@ -98,7 +93,6 @@ public void runJob(final ScheduledJobParameter jobParameter, final JobExecutionC ); } threadPool.generic().submit(updateJobRunner(jobParameter)); -// threatIntelExecutor.forJobSchedulerParameterUpdate().submit(updateJobRunner(jobParameter)); } /** @@ -151,20 +145,17 @@ protected void updateJobParameter(final ScheduledJobParameter jobParameter, fina return; } try { - if (TIFJobTask.DELETE_UNUSED_INDICES.equals(jobSchedulerParameter.getTask()) == false) { - Instant startTime = Instant.now(); - List oldIndices = new ArrayList<>(jobSchedulerParameter.getIndices()); - List newFeedIndices = jobSchedulerUpdateService.createThreatIntelFeedData(jobSchedulerParameter, renewLock); - Instant endTime = Instant.now(); - jobSchedulerUpdateService.deleteAllTifdIndices(oldIndices, newFeedIndices); - jobSchedulerUpdateService.updateJobSchedulerParameterAsSucceeded(newFeedIndices, jobSchedulerParameter, startTime, endTime); - } + // create new TIF data and delete old ones + Instant startTime = Instant.now(); + List oldIndices = new ArrayList<>(jobSchedulerParameter.getIndices()); + List newFeedIndices = jobSchedulerUpdateService.createThreatIntelFeedData(jobSchedulerParameter, renewLock); + Instant endTime = Instant.now(); + jobSchedulerUpdateService.deleteAllTifdIndices(oldIndices, newFeedIndices); + jobSchedulerUpdateService.updateJobSchedulerParameterAsSucceeded(newFeedIndices, jobSchedulerParameter, startTime, endTime); } catch (Exception e) { log.error("Failed to update jobSchedulerParameter for {}", jobSchedulerParameter.getName(), e); jobSchedulerParameter.getUpdateStats().setLastFailedAt(Instant.now()); jobSchedulerParameterService.updateJobSchedulerParameter(jobSchedulerParameter); - } finally { - jobSchedulerUpdateService.updateJobSchedulerParameter(jobSchedulerParameter, jobSchedulerParameter.getSchedule(), TIFJobTask.ALL); } } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobTask.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobTask.java deleted file mode 100644 index 1221a3540..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobTask.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.jobscheduler; - -/** - * Task that {@link TIFJobRunner} will run - */ -public enum TIFJobTask { - /** - * Do everything - */ - ALL, - - /** - * Only delete unused indices - */ - DELETE_UNUSED_INDICES -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java index a5cc01ea1..45ad50b35 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java @@ -15,8 +15,6 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.core.rest.RestStatus; -import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; -import org.opensearch.securityanalytics.model.DetectorTrigger; import org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedDataService; import org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedParser; import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; @@ -31,7 +29,7 @@ import java.util.List; public class TIFJobUpdateService { - private static final Logger log = LogManager.getLogger(DetectorTrigger.class); + private static final Logger log = LogManager.getLogger(TIFJobUpdateService.class); private static final int SLEEP_TIME_IN_MILLIS = 5000; // 5 seconds private static final int MAX_WAIT_TIME_FOR_REPLICATION_TO_COMPLETE_IN_MILLIS = 10 * 60 * 60 * 1000; // 10 hours @@ -71,28 +69,6 @@ public void deleteAllTifdIndices(List oldIndices, List newIndice } } - /** - * Update jobSchedulerParameter with given systemSchedule and task - * - * @param jobSchedulerParameter jobSchedulerParameter to update - * @param systemSchedule new system schedule value - * @param task new task value - */ - public void updateJobSchedulerParameter(final TIFJobParameter jobSchedulerParameter, final IntervalSchedule systemSchedule, final TIFJobTask task) { - boolean updated = false; - if (jobSchedulerParameter.getSchedule().equals(systemSchedule) == false) { //TODO: will always be true - jobSchedulerParameter.setSchedule(systemSchedule); - updated = true; - } - if (jobSchedulerParameter.getTask().equals(task) == false) { - jobSchedulerParameter.setTask(task); - updated = true; - } // this is called when task == DELETE - if (updated) { - jobSchedulerParameterService.updateJobSchedulerParameter(jobSchedulerParameter); - } - } - private List deleteIndices(final List indicesToDelete) { List deletedIndices = new ArrayList<>(indicesToDelete.size()); for (String index : indicesToDelete) { @@ -126,21 +102,29 @@ public List createThreatIntelFeedData(final TIFJobParameter jobScheduler List freshIndices = new ArrayList<>(); for (TIFMetadata tifMetadata : builtInTIFMetadataLoader.getTifMetadataList()) { String indexName = setupIndex(jobSchedulerParameter, tifMetadata); - String[] header; Boolean succeeded; - switch (tifMetadata.getFeedType()) { case "csv": try (CSVParser reader = ThreatIntelFeedParser.getThreatIntelFeedReaderCSV(tifMetadata)) { - // iterate until we find first line without '#' and without empty line - CSVRecord findHeader = reader.iterator().next(); - while ((findHeader.values().length ==1 && "".equals(findHeader.values()[0])) || findHeader.get(0).charAt(0) == '#' || findHeader.get(0).charAt(0) == ' ') { - findHeader = reader.iterator().next(); + CSVParser noHeaderReader = ThreatIntelFeedParser.getThreatIntelFeedReaderCSV(tifMetadata); + boolean notFound = true; + + while (notFound) { + CSVRecord hasHeaderRecord = reader.iterator().next(); + + //if we want to skip this line and keep iterating + if ((hasHeaderRecord.values().length ==1 && "".equals(hasHeaderRecord.values()[0])) || hasHeaderRecord.get(0).charAt(0) == '#' || hasHeaderRecord.get(0).charAt(0) == ' '){ + noHeaderReader.iterator().next(); + } else { // we found the first line that contains information + notFound = false; + } + } + if (tifMetadata.hasHeader()){ + threatIntelFeedDataService.parseAndSaveThreatIntelFeedDataCSV(indexName, reader.iterator(), renewLock, tifMetadata); + } else { + threatIntelFeedDataService.parseAndSaveThreatIntelFeedDataCSV(indexName, noHeaderReader.iterator(), renewLock, tifMetadata); } - CSVRecord headerLine = findHeader; - header = ThreatIntelFeedParser.validateHeader(headerLine).values(); - threatIntelFeedDataService.parseAndSaveThreatIntelFeedDataCSV(indexName, header, reader.iterator(), renewLock, tifMetadata); succeeded = true; } break; @@ -224,47 +208,4 @@ protected void waitUntilAllShardsStarted(final String indexName, final int timeo throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO } } - - -// /** -// * Determine if update is needed or not -// * -// * Update is needed when all following conditions are met -// * 1. updatedAt value in jobSchedulerParameter is equal or before updateAt value in tifMetadata -// * 2. SHA256 hash value in jobSchedulerParameter is different with SHA256 hash value in tifMetadata -// * -// * @param jobSchedulerParameter -// * @param tifMetadata -// * @return -// */ -// private boolean shouldUpdate(final TIFJobParameter jobSchedulerParameter, final TIFMetadata tifMetadata) { -// if (jobSchedulerParameter.getDatabase().getUpdatedAt() != null -// && jobSchedulerParameter.getDatabase().getUpdatedAt().toEpochMilli() > tifMetadata.getUpdatedAt()) { -// return false; -// } -// -// if (tifMetadata.getSha256Hash().equals(jobSchedulerParameter.getDatabase().getSha256Hash())) { -// return false; -// } -// return true; -// } - -// /** -// * Return header fields of threat intel feed data with given url of a manifest file -// * -// * The first column is ip range field regardless its header name. -// * Therefore, we don't store the first column's header name. -// * -// * @param TIFMetadataUrl the url of a manifest file -// * @return header fields of threat intel feed -// */ -// public List getHeaderFields(String TIFMetadataUrl) throws IOException { -// URL url = new URL(TIFMetadataUrl); -// TIFMetadata tifMetadata = TIFMetadata.Builder.build(url); -// -// try (CSVParser reader = ThreatIntelFeedParser.getThreatIntelFeedReaderCSV(tifMetadata)) { -// String[] fields = reader.iterator().next().values(); -// return Arrays.asList(fields).subList(1, fields.length); -// } -// } } diff --git a/src/main/java/org/opensearch/securityanalytics/threatintel/common/ParameterValidator.java b/src/main/java/org/opensearch/securityanalytics/threatintel/common/ParameterValidator.java deleted file mode 100644 index 25e40837c..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatintel/common/ParameterValidator.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.common; - -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.List; -import java.util.Locale; - -import org.apache.commons.lang3.StringUtils; -import org.opensearch.core.common.Strings; - -/** - * Parameter validator for TIF APIs - */ -public class ParameterValidator { - private static final int MAX_DATASOURCE_NAME_BYTES = 127; - - /** - * Validate datasource name and return list of error messages - * - * @param datasourceName datasource name - * @return Error messages. Empty list if there is no violation. - */ - public List validateTIFJobName(final String datasourceName) { - List errorMsgs = new ArrayList<>(); - if (StringUtils.isBlank(datasourceName)) { - errorMsgs.add("datasource name must not be empty"); - return errorMsgs; - } - - if (!Strings.validFileName(datasourceName)) { - errorMsgs.add( - String.format(Locale.ROOT, "datasource name must not contain the following characters %s", Strings.INVALID_FILENAME_CHARS) - ); - } - if (datasourceName.contains("#")) { - errorMsgs.add("datasource name must not contain '#'"); - } - if (datasourceName.contains(":")) { - errorMsgs.add("datasource name must not contain ':'"); - } - if (datasourceName.charAt(0) == '_' || datasourceName.charAt(0) == '-' || datasourceName.charAt(0) == '+') { - errorMsgs.add("datasource name must not start with '_', '-', or '+'"); - } - int byteCount = datasourceName.getBytes(StandardCharsets.UTF_8).length; - if (byteCount > MAX_DATASOURCE_NAME_BYTES) { - errorMsgs.add(String.format(Locale.ROOT, "datasource name is too long, (%d > %d)", byteCount, MAX_DATASOURCE_NAME_BYTES)); - } - if (datasourceName.equals(".") || datasourceName.equals("..")) { - errorMsgs.add("datasource name must not be '.' or '..'"); - } - return errorMsgs; - } -} diff --git a/src/main/resources/OSMapping/ad_ldap_logtype.json b/src/main/resources/OSMapping/ad_ldap_logtype.json index e3434bca5..be2dd5488 100644 --- a/src/main/resources/OSMapping/ad_ldap_logtype.json +++ b/src/main/resources/OSMapping/ad_ldap_logtype.json @@ -2,7 +2,8 @@ "name": "ad_ldap", "description": "AD/LDAP", "is_builtin": true, - "mappings": [ + "ioc_fields" : [], + "mappings":[ { "raw_field":"TargetUserName", "ecs":"azure.signinlogs.properties.user_id" diff --git a/src/main/resources/OSMapping/apache_access_logtype.json b/src/main/resources/OSMapping/apache_access_logtype.json index 7753c8440..714fa2acb 100644 --- a/src/main/resources/OSMapping/apache_access_logtype.json +++ b/src/main/resources/OSMapping/apache_access_logtype.json @@ -2,5 +2,6 @@ "name": "apache_access", "description": "Apache Access Log type", "is_builtin": true, - "mappings": [] + "ioc_fields" : [], + "mappings":[] } diff --git a/src/main/resources/OSMapping/azure_logtype.json b/src/main/resources/OSMapping/azure_logtype.json index ec9ae0502..bb55dbe5f 100644 --- a/src/main/resources/OSMapping/azure_logtype.json +++ b/src/main/resources/OSMapping/azure_logtype.json @@ -2,7 +2,8 @@ "name": "azure", "description": "Azure Log Type", "is_builtin": true, - "mappings": [ + "ioc_fields" : [], + "mappings":[ { "raw_field":"Resultdescription", "ecs":"azure.signinlogs.result_description" diff --git a/src/main/resources/OSMapping/cloudtrail_logtype.json b/src/main/resources/OSMapping/cloudtrail_logtype.json index 389652373..8c2ea3b3a 100644 --- a/src/main/resources/OSMapping/cloudtrail_logtype.json +++ b/src/main/resources/OSMapping/cloudtrail_logtype.json @@ -2,7 +2,15 @@ "name": "cloudtrail", "description": "Cloudtrail Log Type", "is_builtin": true, - "mappings": [ + "ioc_fields": [ + { + "ioc": "ip", + "fields": [ + "src_endpoint.ip" + ] + } + ], + "mappings":[ { "raw_field":"eventName", "ecs":"aws.cloudtrail.event_name", diff --git a/src/main/resources/OSMapping/dns_logtype.json b/src/main/resources/OSMapping/dns_logtype.json index ca2f5451a..ef012407f 100644 --- a/src/main/resources/OSMapping/dns_logtype.json +++ b/src/main/resources/OSMapping/dns_logtype.json @@ -2,7 +2,15 @@ "name": "dns", "description": "DNS Log Type", "is_builtin": true, - "mappings": [ + "ioc_fields": [ + { + "ioc": "ip", + "fields": [ + "src_endpoint.ip" + ] + } + ], + "mappings":[ { "raw_field":"record_type", "ecs":"dns.answers.type", diff --git a/src/main/resources/OSMapping/github_logtype.json b/src/main/resources/OSMapping/github_logtype.json index 6369e2949..31ec6ee59 100644 --- a/src/main/resources/OSMapping/github_logtype.json +++ b/src/main/resources/OSMapping/github_logtype.json @@ -2,7 +2,8 @@ "name": "github", "description": "Github Log Type", "is_builtin": true, - "mappings": [ + "ioc_fields" : [], + "mappings":[ { "raw_field":"action", "ecs":"github.action" diff --git a/src/main/resources/OSMapping/gworkspace_logtype.json b/src/main/resources/OSMapping/gworkspace_logtype.json index b0006b6a3..7c5766895 100644 --- a/src/main/resources/OSMapping/gworkspace_logtype.json +++ b/src/main/resources/OSMapping/gworkspace_logtype.json @@ -2,7 +2,8 @@ "name": "gworkspace", "description": "GWorkspace Log Type", "is_builtin": true, - "mappings": [ + "ioc_fields" : [], + "mappings":[ { "raw_field":"eventSource", "ecs":"google_workspace.admin.service.name" diff --git a/src/main/resources/OSMapping/linux_logtype.json b/src/main/resources/OSMapping/linux_logtype.json index f719913c0..5b77de6b3 100644 --- a/src/main/resources/OSMapping/linux_logtype.json +++ b/src/main/resources/OSMapping/linux_logtype.json @@ -2,7 +2,8 @@ "name": "linux", "description": "Linux Log Type", "is_builtin": true, - "mappings": [ + "ioc_fields" : [], + "mappings":[ { "raw_field":"name", "ecs":"user.filesystem.name" diff --git a/src/main/resources/OSMapping/m365_logtype.json b/src/main/resources/OSMapping/m365_logtype.json index 6547d3d63..e19c2418e 100644 --- a/src/main/resources/OSMapping/m365_logtype.json +++ b/src/main/resources/OSMapping/m365_logtype.json @@ -2,7 +2,8 @@ "name": "m365", "description": "Microsoft 365 Log Type", "is_builtin": true, - "mappings": [ + "ioc_fields" : [], + "mappings":[ { "raw_field":"eventSource", "ecs":"rsa.misc.event_source" diff --git a/src/main/resources/OSMapping/netflow_logtype.json b/src/main/resources/OSMapping/netflow_logtype.json index d8ec32632..9dc015198 100644 --- a/src/main/resources/OSMapping/netflow_logtype.json +++ b/src/main/resources/OSMapping/netflow_logtype.json @@ -2,7 +2,16 @@ "name": "netflow", "description": "Netflow Log Type used only in Integration Tests", "is_builtin": true, - "mappings": [ + "ioc_fields": [ + { + "ioc": "ip", + "fields": [ + "destination.ip", + "source.ip" + ] + } + ], + "mappings":[ { "raw_field":"netflow.source_ipv4_address", "ecs":"source.ip" diff --git a/src/main/resources/OSMapping/network_logtype.json b/src/main/resources/OSMapping/network_logtype.json index 90f0b2ee6..2ca92a1ad 100644 --- a/src/main/resources/OSMapping/network_logtype.json +++ b/src/main/resources/OSMapping/network_logtype.json @@ -2,7 +2,16 @@ "name": "network", "description": "Network Log Type", "is_builtin": true, - "mappings": [ + "ioc_fields": [ + { + "ioc": "ip", + "fields": [ + "destination.ip", + "source.ip" + ] + } + ], + "mappings":[ { "raw_field":"action", "ecs":"netflow.firewall_event" diff --git a/src/main/resources/OSMapping/okta_logtype.json b/src/main/resources/OSMapping/okta_logtype.json index 8038b7f01..e73a0c273 100644 --- a/src/main/resources/OSMapping/okta_logtype.json +++ b/src/main/resources/OSMapping/okta_logtype.json @@ -2,7 +2,8 @@ "name": "okta", "description": "Okta Log Type", "is_builtin": true, - "mappings": [ + "ioc_fields" : [], + "mappings":[ { "raw_field":"eventtype", "ecs":"okta.event_type" diff --git a/src/main/resources/OSMapping/others_application_logtype.json b/src/main/resources/OSMapping/others_application_logtype.json index d7faf8c94..4008602d4 100644 --- a/src/main/resources/OSMapping/others_application_logtype.json +++ b/src/main/resources/OSMapping/others_application_logtype.json @@ -2,7 +2,8 @@ "name": "others_application", "description": "others_application", "is_builtin": true, - "mappings": [ + "ioc_fields" : [], + "mappings":[ { "raw_field":"record_type", "ecs":"dns.answers.type" diff --git a/src/main/resources/OSMapping/others_apt_logtype.json b/src/main/resources/OSMapping/others_apt_logtype.json index ace55cbc3..1a4ca711f 100644 --- a/src/main/resources/OSMapping/others_apt_logtype.json +++ b/src/main/resources/OSMapping/others_apt_logtype.json @@ -2,7 +2,8 @@ "name": "others_apt", "description": "others_apt", "is_builtin": true, - "mappings": [ + "ioc_fields" : [], + "mappings":[ { "raw_field":"record_type", "ecs":"dns.answers.type" diff --git a/src/main/resources/OSMapping/others_cloud_logtype.json b/src/main/resources/OSMapping/others_cloud_logtype.json index b5da3e005..64cbc7935 100644 --- a/src/main/resources/OSMapping/others_cloud_logtype.json +++ b/src/main/resources/OSMapping/others_cloud_logtype.json @@ -2,7 +2,8 @@ "name": "others_cloud", "description": "others_cloud", "is_builtin": true, - "mappings": [ + "ioc_fields" : [], + "mappings":[ { "raw_field":"record_type", "ecs":"dns.answers.type" diff --git a/src/main/resources/OSMapping/others_compliance_logtype.json b/src/main/resources/OSMapping/others_compliance_logtype.json index 6f362d589..6e065795a 100644 --- a/src/main/resources/OSMapping/others_compliance_logtype.json +++ b/src/main/resources/OSMapping/others_compliance_logtype.json @@ -2,7 +2,8 @@ "name": "others_compliance", "description": "others_compliance", "is_builtin": true, - "mappings": [ + "ioc_fields" : [], + "mappings":[ { "raw_field":"record_type", "ecs":"dns.answers.type" diff --git a/src/main/resources/OSMapping/others_macos_logtype.json b/src/main/resources/OSMapping/others_macos_logtype.json index 50d1c2160..6b6452100 100644 --- a/src/main/resources/OSMapping/others_macos_logtype.json +++ b/src/main/resources/OSMapping/others_macos_logtype.json @@ -2,7 +2,8 @@ "name": "others_macos", "description": "others_macos", "is_builtin": true, - "mappings": [ + "ioc_fields" : [], + "mappings":[ { "raw_field":"record_type", "ecs":"dns.answers.type" diff --git a/src/main/resources/OSMapping/others_proxy_logtype.json b/src/main/resources/OSMapping/others_proxy_logtype.json index aca4529d1..a2b0794a4 100644 --- a/src/main/resources/OSMapping/others_proxy_logtype.json +++ b/src/main/resources/OSMapping/others_proxy_logtype.json @@ -2,7 +2,8 @@ "name": "others_proxy", "description": "others_proxy", "is_builtin": true, - "mappings": [ + "ioc_fields" : [], + "mappings":[ { "raw_field":"record_type", "ecs":"dns.answers.type" diff --git a/src/main/resources/OSMapping/others_web_logtype.json b/src/main/resources/OSMapping/others_web_logtype.json index ae8262d52..b46adc6a4 100644 --- a/src/main/resources/OSMapping/others_web_logtype.json +++ b/src/main/resources/OSMapping/others_web_logtype.json @@ -2,7 +2,8 @@ "name": "others_web", "description": "others_web", "is_builtin": true, - "mappings": [ + "ioc_fields" : [], + "mappings":[ { "raw_field":"record_type", "ecs":"dns.answers.type" diff --git a/src/main/resources/OSMapping/s3_logtype.json b/src/main/resources/OSMapping/s3_logtype.json index 58c546258..20c896df6 100644 --- a/src/main/resources/OSMapping/s3_logtype.json +++ b/src/main/resources/OSMapping/s3_logtype.json @@ -2,7 +2,8 @@ "name": "s3", "description": "S3 Log Type", "is_builtin": true, - "mappings": [ + "ioc_fields" : [], + "mappings":[ { "raw_field":"eventName", "ecs":"aws.cloudtrail.event_name" diff --git a/src/main/resources/OSMapping/test_windows_logtype.json b/src/main/resources/OSMapping/test_windows_logtype.json index 816cba666..cc619c5a1 100644 --- a/src/main/resources/OSMapping/test_windows_logtype.json +++ b/src/main/resources/OSMapping/test_windows_logtype.json @@ -5,7 +5,7 @@ "ioc_fields": [ { "ioc": "ip", - "fields": ["windows-hostname"] + "fields": ["HostName"] } ], "mappings": [ diff --git a/src/main/resources/OSMapping/vpcflow_logtype.json b/src/main/resources/OSMapping/vpcflow_logtype.json index c55305b6d..29d9f38c2 100644 --- a/src/main/resources/OSMapping/vpcflow_logtype.json +++ b/src/main/resources/OSMapping/vpcflow_logtype.json @@ -2,7 +2,16 @@ "name": "vpcflow", "description": "VPC Flow Log Type", "is_builtin": true, - "mappings": [ + "ioc_fields": [ + { + "ioc": "ip", + "fields": [ + "dst_endpoint.ip", + "src_endpoint.ip" + ] + } + ], + "mappings":[ { "raw_field":"version", "ecs":"netflow.version", diff --git a/src/main/resources/OSMapping/waf_logtype.json b/src/main/resources/OSMapping/waf_logtype.json index 5eed2c2fb..3e5b1f4f1 100644 --- a/src/main/resources/OSMapping/waf_logtype.json +++ b/src/main/resources/OSMapping/waf_logtype.json @@ -2,7 +2,8 @@ "name": "waf", "description": "Web Application Firewall Log Type", "is_builtin": true, - "mappings": [ + "ioc_fields" : [], + "mappings":[ { "raw_field":"cs-method", "ecs":"waf.request.method" diff --git a/src/main/resources/OSMapping/windows_logtype.json b/src/main/resources/OSMapping/windows_logtype.json index a5fef8ea7..ec9b3ed1a 100644 --- a/src/main/resources/OSMapping/windows_logtype.json +++ b/src/main/resources/OSMapping/windows_logtype.json @@ -2,7 +2,13 @@ "name": "windows", "description": "Windows Log Type", "is_builtin": true, - "mappings":[ + "ioc_fields" : [ + { + "ioc": "ip", + "fields": ["destination.ip","source.ip"] + } + ], + "mappings": [ { "raw_field":"AccountName", "ecs":"winlog.computerObject.name" diff --git a/src/main/resources/mappings/threat_intel_job_mapping.json b/src/main/resources/mappings/threat_intel_job_mapping.json index 5e039928d..c64b034fe 100644 --- a/src/main/resources/mappings/threat_intel_job_mapping.json +++ b/src/main/resources/mappings/threat_intel_job_mapping.json @@ -1,35 +1,11 @@ { + "dynamic": "strict", + "_meta" : { + "schema_version": 1 + }, "properties": { - "database": { - "properties": { - "feed_id": { - "type": "text" - }, - "feed_name": { - "type": "text" - }, - "feed_format": { - "type": "text" - }, - "endpoint": { - "type": "text" - }, - "description": { - "type": "text" - }, - "organization": { - "type": "text" - }, - "contained_iocs_field": { - "type": "text" - }, - "ioc_col": { - "type": "text" - }, - "fields": { - "type": "text" - } - } + "schema_version": { + "type": "integer" }, "enabled_time": { "type": "long" @@ -63,15 +39,6 @@ "state": { "type": "text" }, - "task": { - "type": "text", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } - }, "update_enabled": { "type": "boolean" }, @@ -90,29 +57,6 @@ "type": "long" } } - }, - "user_schedule": { - "properties": { - "interval": { - "properties": { - "period": { - "type": "long" - }, - "start_time": { - "type": "long" - }, - "unit": { - "type": "text", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } - } - } - } - } } } } \ No newline at end of file diff --git a/src/main/resources/threatIntelFeed/feedMetadata.json b/src/main/resources/threatIntelFeed/feedMetadata.json index c73995ebd..27196b6b6 100644 --- a/src/main/resources/threatIntelFeed/feedMetadata.json +++ b/src/main/resources/threatIntelFeed/feedMetadata.json @@ -7,6 +7,7 @@ "description": "Alienvault IP Reputation threat intelligence feed managed by AlienVault", "feed_format": "csv", "ioc_type": "ip", - "ioc_col": 0 + "ioc_col": 0, + "has_header": false } } \ No newline at end of file diff --git a/src/main/resources/threatIntelFeedInfo/feodo.yml b/src/main/resources/threatIntelFeedInfo/feodo.yml new file mode 100644 index 000000000..4acbf40e4 --- /dev/null +++ b/src/main/resources/threatIntelFeedInfo/feodo.yml @@ -0,0 +1,6 @@ +url: "https://feodotracker.abuse.ch/downloads/ipblocklist_aggressive.csv" +name: "ipblocklist_aggressive.csv" +feedFormat: "csv" +org: "Feodo" +iocTypes: ["ip"] +description: "" \ No newline at end of file diff --git a/src/test/java/org/opensearch/securityanalytics/TestHelpers.java b/src/test/java/org/opensearch/securityanalytics/TestHelpers.java index 65417ed39..9b17c4aa2 100644 --- a/src/test/java/org/opensearch/securityanalytics/TestHelpers.java +++ b/src/test/java/org/opensearch/securityanalytics/TestHelpers.java @@ -1397,7 +1397,7 @@ public static String randomDocWithIpIoc(int severity, int version, String ioc) "\"AccountType\":\"User\",\n" + "\"Message\":\"Dns query:\\r\\nRuleName: \\r\\nUtcTime: 2020-02-04 14:59:38.349\\r\\nProcessGuid: {b3c285a4-3cda-5dc0-0000-001077270b00}\\r\\nProcessId: 1904\\r\\nQueryName: EC2AMAZ-EPO7HKA\\r\\nQueryStatus: 0\\r\\nQueryResults: 172.31.46.38;\\r\\nImage: C:\\\\Program Files\\\\nxlog\\\\nxlog.exe\",\n" + "\"Category\":\"Dns query (rule: DnsQuery)\",\n" + - "\"Opcode\":\"blahblah\",\n" + + "\"Opcode\":\"%blahblah\",\n" + "\"UtcTime\":\"2020-02-04 14:59:38.349\",\n" + "\"ProcessGuid\":\"{b3c285a4-3cda-5dc0-0000-001077270b00}\",\n" + "\"ProcessId\":\"1904\",\"QueryName\":\"EC2AMAZ-EPO7HKA\",\"QueryStatus\":\"0\",\n" + @@ -1409,7 +1409,7 @@ public static String randomDocWithIpIoc(int severity, int version, String ioc) "\"CommandLine\": \"eachtest\",\n" + "\"Initiated\": \"true\"\n" + "}"; - return String.format(Locale.ROOT, doc, ioc, severity, version); + return String.format(Locale.ROOT, ioc, doc, severity, version); } @@ -1563,6 +1563,20 @@ public static String vpcFlowMappings() { " }"; } + private static String randomString() { + return OpenSearchTestCase.randomAlphaOfLengthBetween(2, 16); + } + + public static String randomLowerCaseString() { + return randomString().toLowerCase(Locale.ROOT); + } + + public static List randomLowerCaseStringList() { + List stringList = new ArrayList<>(); + stringList.add(randomLowerCaseString()); + return stringList; + } + public static XContentParser parser(String xc) throws IOException { XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, xc); parser.nextToken(); diff --git a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java index e71cace9a..07e862369 100644 --- a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java @@ -1119,7 +1119,7 @@ public void testCreateDetectorWithThreatIntelEnabled_updateDetectorWithThreatInt List iocs = getThreatIntelFeedIocs(3); int i=1; for (String ioc : iocs) { - indexDoc(index, i+"", randomDocWithIpIoc(5, 3, ioc)); + indexDoc(index, i+"", randomDoc(5, 3, i==1? "120.85.114.146" : "120.86.237.94")); i++; } String workflowId = ((List) detectorMap.get("workflow_ids")).get(0); diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestCase.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestCase.java new file mode 100644 index 000000000..a6661b32a --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestCase.java @@ -0,0 +1,270 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel; + +import org.junit.After; +import org.junit.Before; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionType; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.cluster.routing.RoutingTable; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.Randomness; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.concurrent.OpenSearchExecutors; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.action.ActionResponse; +import org.opensearch.ingest.IngestMetadata; +import org.opensearch.ingest.IngestService; +import org.opensearch.jobscheduler.spi.LockModel; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; +import org.opensearch.jobscheduler.spi.utils.LockService; +import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; +import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; +import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; +import org.opensearch.securityanalytics.threatIntel.feedMetadata.BuiltInTIFMetadataLoader; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameterService; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobUpdateService; +import org.opensearch.tasks.Task; +import org.opensearch.tasks.TaskListener; +import org.opensearch.test.client.NoOpNodeClient; +import org.opensearch.test.rest.RestActionTestCase; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiFunction; +import java.util.stream.Collectors; + +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; +import org.opensearch.securityanalytics.TestHelpers; + +public abstract class ThreatIntelTestCase extends RestActionTestCase { + @Mock + protected ClusterService clusterService; + @Mock + protected TIFJobUpdateService tifJobUpdateService; + @Mock + protected TIFJobParameterService tifJobParameterService; + @Mock + protected BuiltInTIFMetadataLoader builtInTIFMetadataLoader; + @Mock + protected ThreatIntelFeedDataService threatIntelFeedDataService; + @Mock + protected ClusterState clusterState; + @Mock + protected Metadata metadata; + @Mock + protected IngestService ingestService; + @Mock + protected ActionFilters actionFilters; + @Mock + protected ThreadPool threadPool; + @Mock + protected TIFLockService tifLockService; + @Mock + protected RoutingTable routingTable; + @Mock + protected TransportService transportService; + protected IngestMetadata ingestMetadata; + protected NoOpNodeClient client; + protected VerifyingClient verifyingClient; + protected LockService lockService; + protected ClusterSettings clusterSettings; + protected Settings settings; + private AutoCloseable openMocks; + @Mock + protected TIFJobParameter tifJobParameter; + + @Before + public void prepareThreatIntelTestCase() { + openMocks = MockitoAnnotations.openMocks(this); + settings = Settings.EMPTY; + client = new NoOpNodeClient(this.getTestName()); + verifyingClient = spy(new VerifyingClient(this.getTestName())); + clusterSettings = new ClusterSettings(settings, new HashSet<>(SecurityAnalyticsSettings.settings())); + lockService = new LockService(client, clusterService); + ingestMetadata = new IngestMetadata(Collections.emptyMap()); + when(metadata.custom(IngestMetadata.TYPE)).thenReturn(ingestMetadata); + when(clusterService.getSettings()).thenReturn(Settings.EMPTY); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + when(clusterService.state()).thenReturn(clusterState); + when(clusterState.metadata()).thenReturn(metadata); + when(clusterState.getMetadata()).thenReturn(metadata); + when(clusterState.routingTable()).thenReturn(routingTable); + when(ingestService.getClusterService()).thenReturn(clusterService); + when(threadPool.generic()).thenReturn(OpenSearchExecutors.newDirectExecutorService()); + } + + @After + public void clean() throws Exception { + openMocks.close(); + client.close(); + verifyingClient.close(); + } + + protected TIFJobState randomStateExcept(TIFJobState state) { + assertNotNull(state); + return Arrays.stream(TIFJobState.values()) + .sequential() + .filter(s -> !s.equals(state)) + .collect(Collectors.toList()) + .get(Randomness.createSecure().nextInt(TIFJobState.values().length - 2)); + } + + protected TIFJobState randomState() { + return Arrays.stream(TIFJobState.values()) + .sequential() + .collect(Collectors.toList()) + .get(Randomness.createSecure().nextInt(TIFJobState.values().length - 1)); + } + + protected long randomPositiveLong() { + long value = Randomness.get().nextLong(); + return value < 0 ? -value : value; + } + + /** + * Update interval should be > 0 and < validForInDays. + * For an update test to work, there should be at least one eligible value other than current update interval. + * Therefore, the smallest value for validForInDays is 2. + * Update interval is random value from 1 to validForInDays - 2. + * The new update value will be validForInDays - 1. + */ + protected TIFJobParameter randomTifJobParameter(final Instant updateStartTime) { + Instant now = Instant.now().truncatedTo(ChronoUnit.MILLIS); + TIFJobParameter tifJobParameter = new TIFJobParameter(); + tifJobParameter.setName(TestHelpers.randomLowerCaseString()); + tifJobParameter.setSchedule( + new IntervalSchedule( + updateStartTime.truncatedTo(ChronoUnit.MILLIS), + 1, + ChronoUnit.DAYS + ) + ); + tifJobParameter.setState(randomState()); + tifJobParameter.setIndices(Arrays.asList(TestHelpers.randomLowerCaseString(), TestHelpers.randomLowerCaseString())); + tifJobParameter.getUpdateStats().setLastSkippedAt(now); + tifJobParameter.getUpdateStats().setLastSucceededAt(now); + tifJobParameter.getUpdateStats().setLastFailedAt(now); + tifJobParameter.getUpdateStats().setLastProcessingTimeInMillis(randomPositiveLong()); + tifJobParameter.setLastUpdateTime(now); + if (Randomness.get().nextInt() % 2 == 0) { + tifJobParameter.enable(); + } else { + tifJobParameter.disable(); + } + return tifJobParameter; + } + + protected TIFJobParameter randomTifJobParameter() { + return randomTifJobParameter(Instant.now()); + } + + protected LockModel randomLockModel() { + LockModel lockModel = new LockModel( + TestHelpers.randomLowerCaseString(), + TestHelpers.randomLowerCaseString(), + Instant.now(), + randomPositiveLong(), + false + ); + return lockModel; + } + + /** + * Temporary class of VerifyingClient until this PR(https://github.com/opensearch-project/OpenSearch/pull/7167) + * is merged in OpenSearch core + */ + public static class VerifyingClient extends NoOpNodeClient { + AtomicReference executeVerifier = new AtomicReference<>(); + AtomicReference executeLocallyVerifier = new AtomicReference<>(); + + public VerifyingClient(String testName) { + super(testName); + reset(); + } + + /** + * Clears any previously set verifier functions set by {@link #setExecuteVerifier(BiFunction)} and/or + * {@link #setExecuteLocallyVerifier(BiFunction)}. These functions are replaced with functions which will throw an + * {@link AssertionError} if called. + */ + public void reset() { + executeVerifier.set((arg1, arg2) -> { throw new AssertionError(); }); + executeLocallyVerifier.set((arg1, arg2) -> { throw new AssertionError(); }); + } + + /** + * Sets the function that will be called when {@link #doExecute(ActionType, ActionRequest, ActionListener)} is called. The given + * function should return either a subclass of {@link ActionResponse} or {@code null}. + * @param verifier A function which is called in place of {@link #doExecute(ActionType, ActionRequest, ActionListener)} + */ + public void setExecuteVerifier( + BiFunction, Request, Response> verifier + ) { + executeVerifier.set(verifier); + } + + @Override + public void doExecute( + ActionType action, + Request request, + ActionListener listener + ) { + try { + listener.onResponse((Response) executeVerifier.get().apply(action, request)); + } catch (Exception e) { + listener.onFailure(e); + } + } + + /** + * Sets the function that will be called when {@link #executeLocally(ActionType, ActionRequest, TaskListener)}is called. The given + * function should return either a subclass of {@link ActionResponse} or {@code null}. + * @param verifier A function which is called in place of {@link #executeLocally(ActionType, ActionRequest, TaskListener)} + */ + public void setExecuteLocallyVerifier( + BiFunction, Request, Response> verifier + ) { + executeLocallyVerifier.set(verifier); + } + + @Override + public Task executeLocally( + ActionType action, + Request request, + ActionListener listener + ) { + listener.onResponse((Response) executeLocallyVerifier.get().apply(action, request)); + return null; + } + + @Override + public Task executeLocally( + ActionType action, + Request request, + TaskListener listener + ) { + listener.onResponse(null, (Response) executeLocallyVerifier.get().apply(action, request)); + return null; + } + + } +} + diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobRequestTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobRequestTests.java new file mode 100644 index 000000000..2ecd7369b --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobRequestTests.java @@ -0,0 +1,65 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.BytesStreamInput; +import org.opensearch.securityanalytics.TestHelpers; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; + +import java.io.IOException; + +public class DeleteTIFJobRequestTests extends ThreatIntelTestCase { + + public void testStreamInOut_whenValidInput_thenSucceed() throws IOException { + String tifJobParameterName = TestHelpers.randomLowerCaseString(); + DeleteTIFJobRequest request = new DeleteTIFJobRequest(tifJobParameterName); + + // Run + BytesStreamOutput output = new BytesStreamOutput(); + request.writeTo(output); + BytesStreamInput input = new BytesStreamInput(output.bytes().toBytesRef().bytes); + DeleteTIFJobRequest copiedRequest = new DeleteTIFJobRequest(input); + + // Verify + assertEquals(request.getName(), copiedRequest.getName()); + } + + public void testValidate_whenNull_thenError() { + DeleteTIFJobRequest request = new DeleteTIFJobRequest((String) null); + + // Run + ActionRequestValidationException error = request.validate(); + + // Verify + assertNotNull(error.validationErrors()); + assertFalse(error.validationErrors().isEmpty()); + } + + public void testValidate_whenBlank_thenError() { + DeleteTIFJobRequest request = new DeleteTIFJobRequest(" "); + + // Run + ActionRequestValidationException error = request.validate(); + + // Verify + assertNotNull(error.validationErrors()); + assertFalse(error.validationErrors().isEmpty()); + } + + public void testValidate_whenInvalidTIFJobParameterName_thenFails() { + String invalidName = "_" + TestHelpers.randomLowerCaseString(); + DeleteTIFJobRequest request = new DeleteTIFJobRequest(invalidName); + + // Run + ActionRequestValidationException exception = request.validate(); + + // Verify + assertEquals(1, exception.validationErrors().size()); + assertTrue(exception.validationErrors().get(0).contains("no such job exists")); + } +} diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/action/PutTIFJobRequestTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/action/PutTIFJobRequestTests.java new file mode 100644 index 000000000..baa18695d --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/action/PutTIFJobRequestTests.java @@ -0,0 +1,50 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.BytesStreamInput; +import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; +import org.opensearch.securityanalytics.TestHelpers; + + +public class PutTIFJobRequestTests extends ThreatIntelTestCase { + + public void testValidate_whenValidInput_thenSucceed() { + String tifJobParameterName = TestHelpers.randomLowerCaseString(); + PutTIFJobRequest request = new PutTIFJobRequest(tifJobParameterName, clusterSettings.get(SecurityAnalyticsSettings.TIF_UPDATE_INTERVAL)); + + assertNull(request.validate()); + } + + public void testValidate_whenInvalidTIFJobParameterName_thenFails() { + String invalidName = "_" + TestHelpers.randomLowerCaseString(); + PutTIFJobRequest request = new PutTIFJobRequest(invalidName, clusterSettings.get(SecurityAnalyticsSettings.TIF_UPDATE_INTERVAL)); + + // Run + ActionRequestValidationException exception = request.validate(); + + // Verify + assertEquals(1, exception.validationErrors().size()); + assertTrue(exception.validationErrors().get(0).contains("must not")); + } + + public void testStreamInOut_whenValidInput_thenSucceed() throws Exception { + String tifJobParameterName = TestHelpers.randomLowerCaseString(); + PutTIFJobRequest request = new PutTIFJobRequest(tifJobParameterName, clusterSettings.get(SecurityAnalyticsSettings.TIF_UPDATE_INTERVAL)); + + // Run + BytesStreamOutput output = new BytesStreamOutput(); + request.writeTo(output); + BytesStreamInput input = new BytesStreamInput(output.bytes().toBytesRef().bytes); + PutTIFJobRequest copiedRequest = new PutTIFJobRequest(input); + + // Verify + assertEquals(request.getName(), copiedRequest.getName()); + } +} diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobActionTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobActionTests.java new file mode 100644 index 000000000..7d15d7710 --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobActionTests.java @@ -0,0 +1,127 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.junit.Before; +import org.mockito.ArgumentCaptor; +import org.mockito.InOrder; +import org.mockito.Mockito; +import org.opensearch.OpenSearchException; +import org.opensearch.ResourceNotFoundException; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.core.action.ActionListener; +import org.opensearch.jobscheduler.spi.LockModel; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; +import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter; +import org.opensearch.tasks.Task; +import org.opensearch.securityanalytics.TestHelpers; + + +import java.io.IOException; +import java.time.Instant; + +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +public class TransportDeleteTIFJobActionTests extends ThreatIntelTestCase { + private TransportDeleteTIFJobAction action; + + @Before + public void init() { + action = new TransportDeleteTIFJobAction( + transportService, + actionFilters, + tifLockService, + ingestService, + tifJobParameterService, + threatIntelFeedDataService, + threadPool + ); + } + + public void testDoExecute_whenFailedToAcquireLock_thenError() throws IOException { + validateDoExecute(null, null); + } + + public void testDoExecute_whenValidInput_thenSucceed() throws IOException { + String jobIndexName = TestHelpers.randomLowerCaseString(); + String jobId = TestHelpers.randomLowerCaseString(); + LockModel lockModel = new LockModel(jobIndexName, jobId, Instant.now(), randomPositiveLong(), false); + validateDoExecute(lockModel, null); + } + + public void testDoExecute_whenException_thenError() throws IOException { + validateDoExecute(null, new RuntimeException()); + } + + private void validateDoExecute(final LockModel lockModel, final Exception exception) throws IOException { + Task task = mock(Task.class); + TIFJobParameter tifJobParameter = randomTifJobParameter(); + when(tifJobParameterService.getJobParameter(tifJobParameter.getName())).thenReturn(tifJobParameter); + DeleteTIFJobRequest request = new DeleteTIFJobRequest(tifJobParameter.getName()); + ActionListener listener = mock(ActionListener.class); + + // Run + action.doExecute(task, request, listener); + + // Verify + ArgumentCaptor> captor = ArgumentCaptor.forClass(ActionListener.class); + verify(tifLockService).acquireLock(eq(tifJobParameter.getName()), anyLong(), captor.capture()); + + if (exception == null) { + // Run + captor.getValue().onResponse(lockModel); + + // Verify + if (lockModel == null) { + verify(listener).onFailure(any(OpenSearchException.class)); + } else { + verify(listener).onResponse(new AcknowledgedResponse(true)); + verify(tifLockService).releaseLock(eq(lockModel)); + } + } else { + // Run + captor.getValue().onFailure(exception); + // Verify + verify(listener).onFailure(exception); + } + } + + public void testDeleteTIFJobParameter_whenNull_thenThrowException() { + TIFJobParameter tifJobParameter = randomTifJobParameter(); + expectThrows(ResourceNotFoundException.class, () -> action.deleteTIFJob(tifJobParameter.getName())); + } + + public void testDeleteTIFJobParameter_whenSafeToDelete_thenDelete() throws IOException { + TIFJobParameter tifJobParameter = randomTifJobParameter(); + when(tifJobParameterService.getJobParameter(tifJobParameter.getName())).thenReturn(tifJobParameter); + + // Run + action.deleteTIFJob(tifJobParameter.getName()); + + // Verify + assertEquals(TIFJobState.DELETING, tifJobParameter.getState()); + verify(tifJobParameterService).updateJobSchedulerParameter(tifJobParameter); + InOrder inOrder = Mockito.inOrder(threatIntelFeedDataService, tifJobParameterService); + inOrder.verify(threatIntelFeedDataService).deleteThreatIntelDataIndex(tifJobParameter.getIndices()); + inOrder.verify(tifJobParameterService).deleteTIFJobParameter(tifJobParameter); + } + + public void testDeleteTIFJobParameter_whenDeleteFailsAfterStateIsChanged_thenRevertState() throws IOException { + TIFJobParameter tifJobParameter = randomTifJobParameter(); + tifJobParameter.setState(TIFJobState.AVAILABLE); + when(tifJobParameterService.getJobParameter(tifJobParameter.getName())).thenReturn(tifJobParameter); + doThrow(new RuntimeException()).when(threatIntelFeedDataService).deleteThreatIntelDataIndex(tifJobParameter.getIndices()); + + // Run + expectThrows(RuntimeException.class, () -> action.deleteTIFJob(tifJobParameter.getName())); + + // Verify + verify(tifJobParameterService, times(2)).updateJobSchedulerParameter(tifJobParameter); + assertEquals(TIFJobState.AVAILABLE, tifJobParameter.getState()); + } +} diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobActionTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobActionTests.java new file mode 100644 index 000000000..68dcbf527 --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobActionTests.java @@ -0,0 +1,161 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.junit.Before; +import org.mockito.ArgumentCaptor; +import org.opensearch.action.StepListener; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.core.action.ActionListener; +import org.opensearch.jobscheduler.spi.LockModel; +import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; +import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter; +import org.opensearch.tasks.Task; +import org.opensearch.securityanalytics.TestHelpers; + +import java.io.IOException; +import java.util.ConcurrentModificationException; + +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +public class TransportPutTIFJobActionTests extends ThreatIntelTestCase { + private TransportPutTIFJobAction action; + + @Before + public void init() { + action = new TransportPutTIFJobAction( + transportService, + actionFilters, + threadPool, + tifJobParameterService, + tifJobUpdateService, + tifLockService + ); + } + + public void testDoExecute_whenFailedToAcquireLock_thenError() throws IOException { + validateDoExecute(null, null, null); + } + + public void testDoExecute_whenAcquiredLock_thenSucceed() throws IOException { + validateDoExecute(randomLockModel(), null, null); + } + + public void testDoExecute_whenExceptionBeforeAcquiringLock_thenError() throws IOException { + validateDoExecute(randomLockModel(), new RuntimeException(), null); + } + + public void testDoExecute_whenExceptionAfterAcquiringLock_thenError() throws IOException { + validateDoExecute(randomLockModel(), null, new RuntimeException()); + } + + private void validateDoExecute(final LockModel lockModel, final Exception before, final Exception after) throws IOException { + Task task = mock(Task.class); + TIFJobParameter tifJobParameter = randomTifJobParameter(); + + PutTIFJobRequest request = new PutTIFJobRequest(tifJobParameter.getName(), clusterSettings.get(SecurityAnalyticsSettings.TIF_UPDATE_INTERVAL)); + ActionListener listener = mock(ActionListener.class); + if (after != null) { + doThrow(after).when(tifJobParameterService).createJobIndexIfNotExists(any(StepListener.class)); + } + + // Run + action.doExecute(task, request, listener); + + // Verify + ArgumentCaptor> captor = ArgumentCaptor.forClass(ActionListener.class); + verify(tifLockService).acquireLock(eq(tifJobParameter.getName()), anyLong(), captor.capture()); + + if (before == null) { + // Run + captor.getValue().onResponse(lockModel); + + // Verify + if (lockModel == null) { + verify(listener).onFailure(any(ConcurrentModificationException.class)); + } + if (after != null) { + verify(tifLockService).releaseLock(eq(lockModel)); + verify(listener).onFailure(after); + } else { + verify(tifLockService, never()).releaseLock(eq(lockModel)); + } + } else { + // Run + captor.getValue().onFailure(before); + // Verify + verify(listener).onFailure(before); + } + } + + public void testInternalDoExecute_whenValidInput_thenSucceed() { + PutTIFJobRequest request = new PutTIFJobRequest(TestHelpers.randomLowerCaseString(), clusterSettings.get(SecurityAnalyticsSettings.TIF_UPDATE_INTERVAL)); + ActionListener listener = mock(ActionListener.class); + + // Run + action.internalDoExecute(request, randomLockModel(), listener); + + // Verify + ArgumentCaptor captor = ArgumentCaptor.forClass(StepListener.class); + verify(tifJobParameterService).createJobIndexIfNotExists(captor.capture()); + + // Run + captor.getValue().onResponse(null); + // Verify + ArgumentCaptor tifJobCaptor = ArgumentCaptor.forClass(TIFJobParameter.class); + ArgumentCaptor actionListenerCaptor = ArgumentCaptor.forClass(ActionListener.class); + verify(tifJobParameterService).saveTIFJobParameter(tifJobCaptor.capture(), actionListenerCaptor.capture()); + assertEquals(request.getName(), tifJobCaptor.getValue().getName()); + + // Run next listener.onResponse + actionListenerCaptor.getValue().onResponse(null); + // Verify + verify(listener).onResponse(new AcknowledgedResponse(true)); + } + + public void testCreateTIFJobParameter_whenInvalidState_thenUpdateStateAsFailed() throws IOException { + TIFJobParameter tifJob = new TIFJobParameter(); + tifJob.setState(randomStateExcept(TIFJobState.CREATING)); + tifJob.getUpdateStats().setLastFailedAt(null); + + // Run + action.createThreatIntelFeedData(tifJob, mock(Runnable.class)); + + // Verify + assertEquals(TIFJobState.CREATE_FAILED, tifJob.getState()); + assertNotNull(tifJob.getUpdateStats().getLastFailedAt()); + verify(tifJobParameterService).updateJobSchedulerParameter(tifJob); + verify(tifJobUpdateService, never()).createThreatIntelFeedData(any(TIFJobParameter.class), any(Runnable.class)); + } + + public void testCreateTIFJobParameter_whenExceptionHappens_thenUpdateStateAsFailed() throws IOException { + TIFJobParameter tifJob = new TIFJobParameter(); + doThrow(new RuntimeException()).when(tifJobUpdateService).createThreatIntelFeedData(any(TIFJobParameter.class), any(Runnable.class)); + + // Run + action.createThreatIntelFeedData(tifJob, mock(Runnable.class)); + + // Verify + assertEquals(TIFJobState.CREATE_FAILED, tifJob.getState()); + assertNotNull(tifJob.getUpdateStats().getLastFailedAt()); + verify(tifJobParameterService).updateJobSchedulerParameter(tifJob); + } + + public void testCreateTIFJobParameter_whenValidInput_thenUpdateStateAsCreating() throws IOException { + TIFJobParameter tifJob = new TIFJobParameter(); + + Runnable renewLock = mock(Runnable.class); + // Run + action.createThreatIntelFeedData(tifJob, renewLock); + + // Verify + verify(tifJobUpdateService).createThreatIntelFeedData(tifJob, renewLock); + assertEquals(TIFJobState.CREATING, tifJob.getState()); + } +} diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/common/ThreatIntelLockServiceTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/common/ThreatIntelLockServiceTests.java new file mode 100644 index 000000000..4b6423a3e --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/common/ThreatIntelLockServiceTests.java @@ -0,0 +1,117 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.common; + +import static org.mockito.Mockito.mock; +import static org.opensearch.securityanalytics.threatIntel.common.TIFLockService.LOCK_DURATION_IN_SECONDS; +import static org.opensearch.securityanalytics.threatIntel.common.TIFLockService.RENEW_AFTER_IN_SECONDS; + +import java.time.Instant; +import java.util.concurrent.atomic.AtomicReference; + +import org.junit.Before; +import org.opensearch.action.DocWriteResponse; +import org.opensearch.action.update.UpdateRequest; +import org.opensearch.action.update.UpdateResponse; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.index.shard.ShardId; +import org.opensearch.jobscheduler.spi.LockModel; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; +import org.opensearch.securityanalytics.TestHelpers; + +public class ThreatIntelLockServiceTests extends ThreatIntelTestCase { + private TIFLockService threatIntelLockService; + private TIFLockService noOpsLockService; + + @Before + public void init() { + threatIntelLockService = new TIFLockService(clusterService, verifyingClient); + noOpsLockService = new TIFLockService(clusterService, client); + } + + public void testAcquireLock_whenValidInput_thenSucceed() { + // Cannot test because LockService is final class + // Simply calling method to increase coverage + noOpsLockService.acquireLock(TestHelpers.randomLowerCaseString(), randomPositiveLong(), mock(ActionListener.class)); + } + + public void testAcquireLock_whenCalled_thenNotBlocked() { + long expectedDurationInMillis = 1000; + Instant before = Instant.now(); + assertTrue(threatIntelLockService.acquireLock(null, null).isEmpty()); + Instant after = Instant.now(); + assertTrue(after.toEpochMilli() - before.toEpochMilli() < expectedDurationInMillis); + } + + public void testReleaseLock_whenValidInput_thenSucceed() { + // Cannot test because LockService is final class + // Simply calling method to increase coverage + noOpsLockService.releaseLock(null); + } + + public void testRenewLock_whenCalled_thenNotBlocked() { + long expectedDurationInMillis = 1000; + Instant before = Instant.now(); + assertNull(threatIntelLockService.renewLock(null)); + Instant after = Instant.now(); + assertTrue(after.toEpochMilli() - before.toEpochMilli() < expectedDurationInMillis); + } + + public void testGetRenewLockRunnable_whenLockIsFresh_thenDoNotRenew() { + LockModel lockModel = new LockModel( + TestHelpers.randomLowerCaseString(), + TestHelpers.randomLowerCaseString(), + Instant.now(), + LOCK_DURATION_IN_SECONDS, + false + ); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + // Verifying + assertTrue(actionRequest instanceof UpdateRequest); + return new UpdateResponse( + mock(ShardId.class), + TestHelpers.randomLowerCaseString(), + randomPositiveLong(), + randomPositiveLong(), + randomPositiveLong(), + DocWriteResponse.Result.UPDATED + ); + }); + + AtomicReference reference = new AtomicReference<>(lockModel); + threatIntelLockService.getRenewLockRunnable(reference).run(); + assertEquals(lockModel, reference.get()); + } + + public void testGetRenewLockRunnable_whenLockIsStale_thenRenew() { + LockModel lockModel = new LockModel( + TestHelpers.randomLowerCaseString(), + TestHelpers.randomLowerCaseString(), + Instant.now().minusSeconds(RENEW_AFTER_IN_SECONDS), + LOCK_DURATION_IN_SECONDS, + false + ); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + // Verifying + assertTrue(actionRequest instanceof UpdateRequest); + return new UpdateResponse( + mock(ShardId.class), + TestHelpers.randomLowerCaseString(), + randomPositiveLong(), + randomPositiveLong(), + randomPositiveLong(), + DocWriteResponse.Result.UPDATED + ); + }); + + AtomicReference reference = new AtomicReference<>(lockModel); + threatIntelLockService.getRenewLockRunnable(reference).run(); + assertNotEquals(lockModel, reference.get()); + } +} + diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/TIFJobExtensionPluginIT.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/TIFJobExtensionPluginIT.java new file mode 100644 index 000000000..ff682e6dd --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/TIFJobExtensionPluginIT.java @@ -0,0 +1,49 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ +package org.opensearch.securityanalytics.threatIntel.integTests; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.action.admin.cluster.health.ClusterHealthRequest; +import org.opensearch.action.admin.cluster.health.ClusterHealthResponse; +import org.opensearch.action.admin.cluster.node.info.NodeInfo; +import org.opensearch.action.admin.cluster.node.info.NodesInfoRequest; +import org.opensearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.opensearch.action.admin.cluster.node.info.PluginsAndModules; +import org.opensearch.cluster.health.ClusterHealthStatus; +import org.opensearch.plugins.PluginInfo; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobRunner; +import org.opensearch.test.OpenSearchIntegTestCase; +import org.junit.Assert; + +import java.util.List; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public class TIFJobExtensionPluginIT extends OpenSearchIntegTestCase { + private static final Logger log = LogManager.getLogger(TIFJobExtensionPluginIT.class); + + public void testPluginsAreInstalled() { + ClusterHealthRequest request = new ClusterHealthRequest(); + ClusterHealthResponse response = OpenSearchIntegTestCase.client().admin().cluster().health(request).actionGet(); + Assert.assertEquals(ClusterHealthStatus.GREEN, response.getStatus()); + + NodesInfoRequest nodesInfoRequest = new NodesInfoRequest(); + nodesInfoRequest.addMetric(NodesInfoRequest.Metric.PLUGINS.metricName()); + NodesInfoResponse nodesInfoResponse = OpenSearchIntegTestCase.client().admin().cluster().nodesInfo(nodesInfoRequest).actionGet(); + List pluginInfos = nodesInfoResponse.getNodes() + .stream() + .flatMap( + (Function>) nodeInfo -> nodeInfo.getInfo(PluginsAndModules.class).getPluginInfos().stream() + ) + .collect(Collectors.toList()); + Assert.assertTrue(pluginInfos.stream().anyMatch(pluginInfo -> pluginInfo.getName().equals("opensearch-job-scheduler"))); + } +} diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java new file mode 100644 index 000000000..a3df0c4cd --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java @@ -0,0 +1,154 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ +package org.opensearch.securityanalytics.threatIntel.integTests; + +import org.apache.hc.core5.http.HttpStatus; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.client.Request; +import org.opensearch.client.Response; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.search.SearchHit; +import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; +import org.opensearch.securityanalytics.SecurityAnalyticsRestTestCase; +import org.opensearch.securityanalytics.config.monitors.DetectorMonitorConfig; +import org.opensearch.securityanalytics.model.Detector; +import org.opensearch.securityanalytics.model.DetectorInput; +import org.opensearch.securityanalytics.model.DetectorRule; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Locale; +import java.util.stream.Collectors; + +import static org.opensearch.securityanalytics.TestHelpers.*; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ENABLE_WORKFLOW_USAGE; +import static org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedDataUtils.getTifdList; + +public class ThreatIntelJobRunnerIT extends SecurityAnalyticsRestTestCase { + private static final Logger log = LogManager.getLogger(ThreatIntelJobRunnerIT.class); + + public void testCreateDetector_threatIntelEnabled_updateDetectorWithNewThreatIntel() throws IOException { + + // 1. create a detector + updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + + String randomDocRuleId = createRule(randomRule()); + List detectorRules = List.of(new DetectorRule(randomDocRuleId)); + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, + Collections.emptyList()); + Detector detector = randomDetectorWithInputsAndThreatIntel(List.of(input), true); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + + assertEquals(2, response.getHits().getTotalHits().value); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + Map responseBody = asMap(createResponse); + + String detectorId = responseBody.get("_id").toString(); + request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); + + List monitorIds = ((List) (detectorMap).get("monitor_id")); + assertEquals(1, monitorIds.size()); + + assertNotNull("Workflow not created", detectorMap.get("workflow_ids")); + assertEquals("Number of workflows not correct", 1, ((List) detectorMap.get("workflow_ids")).size()); + + // Verify workflow + verifyWorkflow(detectorMap, monitorIds, 1); + List iocs = getThreatIntelFeedIocs(3); + assertEquals(iocs.size(),3); + + // 2. delete a threat intel feed ioc index manually + List feedId = getThreatIntelFeedIds(1); + for (String feedid: feedId) { + String name = String.format(Locale.ROOT, "%s-%s%s", ".opensearch-sap-threatintel", feedid, "1"); + deleteIndex(name); + } + +// // 3. update the start time to a day before so it runs now +// StringEntity stringEntity = new StringEntity( +// "{\"doc\":{\"last_update_time\":{\"schedule\":{\"interval\":{\"start_time\":" + +// "\"$startTimeMillis\"}}}}}", +// ContentType.APPLICATION_JSON +// ); +// +// Response updateJobRespose = makeRequest(client(), "POST", ".scheduler-sap-threatintel-job/_update/$id" , Collections.emptyMap(), stringEntity, null, null); +// assertEquals("Updated job scheduler", RestStatus.CREATED, restStatus(updateJobRespose)); + + // 4. validate new ioc is created + List newIocs = getThreatIntelFeedIocs(1); + assertEquals(0, newIocs.size()); //TODO + } + + private List getThreatIntelFeedIocs(int num) throws IOException { + String request = getMatchAllSearchRequestString(num); + SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threatintel*", request, false); + return getTifdList(res, xContentRegistry()).stream().map(it -> it.getIocValue()).collect(Collectors.toList()); + } + + private List getThreatIntelFeedIds(int num) throws IOException { + String request = getMatchAllSearchRequestString(num); + SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threatintel*", request, false); + return getTifdList(res, xContentRegistry()).stream().map(it -> it.getFeedId()).collect(Collectors.toList()); + } + +// private String getJobSchedulerDoc(int num) throws IOException { +// String request = getMatchAllSearchRequestString(num); +// SearchResponse res = executeSearchAndGetResponse(".scheduler-sap-threatintel-job*", request, false); +// } + + private static String getMatchAllSearchRequestString(int num) { + return "{\n" + + "\"size\" : " + num + "," + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + } +} + diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtensionTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtensionTests.java new file mode 100644 index 000000000..6096fa382 --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtensionTests.java @@ -0,0 +1,58 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.jobscheduler; + +import static org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobExtension.JOB_INDEX_NAME; + +import java.time.Instant; +import java.time.temporal.ChronoUnit; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.jobscheduler.spi.JobDocVersion; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; +import org.opensearch.securityanalytics.model.DetectorTrigger; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; +import org.opensearch.securityanalytics.TestHelpers; + +public class TIFJobExtensionTests extends ThreatIntelTestCase { + private static final Logger log = LogManager.getLogger(TIFJobExtensionTests.class); + + public void testBasic() { + TIFJobExtension extension = new TIFJobExtension(); + assertEquals("scheduler_sap_threatintel_job", extension.getJobType()); + assertEquals(JOB_INDEX_NAME, extension.getJobIndex()); + assertEquals(TIFJobRunner.getJobRunnerInstance(), extension.getJobRunner()); + } + + public void testParser() throws Exception { + TIFJobExtension extension = new TIFJobExtension(); + String id = TestHelpers.randomLowerCaseString(); + IntervalSchedule schedule = new IntervalSchedule(Instant.now().truncatedTo(ChronoUnit.MILLIS), 1, ChronoUnit.DAYS); + TIFJobParameter tifJobParameter = new TIFJobParameter(id, schedule); + + TIFJobParameter anotherTIFJobParameter = (TIFJobParameter) extension.getJobParser() + .parse( + createParser(tifJobParameter.toXContent(XContentFactory.jsonBuilder(), null)), + TestHelpers.randomLowerCaseString(), + new JobDocVersion(randomPositiveLong(), randomPositiveLong(), randomPositiveLong()) + ); + log.info("first"); + log.error(tifJobParameter); + log.error(tifJobParameter.getName()); + log.info("second"); + log.error(anotherTIFJobParameter); + log.error(anotherTIFJobParameter.getName()); + + assertTrue(tifJobParameter.getName().equals(anotherTIFJobParameter.getName())); + assertTrue(tifJobParameter.getLastUpdateTime().equals(anotherTIFJobParameter.getLastUpdateTime())); + assertTrue(tifJobParameter.getSchedule().equals(anotherTIFJobParameter.getSchedule())); + assertTrue(tifJobParameter.getState().equals(anotherTIFJobParameter.getState())); + assertTrue(tifJobParameter.getIndices().equals(anotherTIFJobParameter.getIndices())); + } + +} diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterServiceTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterServiceTests.java new file mode 100644 index 000000000..5b0605d79 --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterServiceTests.java @@ -0,0 +1,238 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.jobscheduler; + +import org.junit.Before; +import org.opensearch.ResourceAlreadyExistsException; +import org.opensearch.ResourceNotFoundException; +import org.opensearch.action.DocWriteRequest; +import org.opensearch.action.StepListener; +import org.opensearch.action.admin.indices.create.CreateIndexRequest; +import org.opensearch.action.delete.DeleteRequest; +import org.opensearch.action.delete.DeleteResponse; +import org.opensearch.action.get.GetRequest; +import org.opensearch.action.get.GetResponse; +import org.opensearch.action.index.IndexRequest; +import org.opensearch.action.support.WriteRequest; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.index.IndexNotFoundException; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; +import org.opensearch.securityanalytics.TestHelpers; + +import java.io.IOException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class TIFJobParameterServiceTests extends ThreatIntelTestCase { + private TIFJobParameterService tifJobParameterService; + + @Before + public void init() { + tifJobParameterService = new TIFJobParameterService(verifyingClient, clusterService); + } + + public void testcreateJobIndexIfNotExists_whenIndexExist_thenCreateRequestIsNotCalled() { + when(metadata.hasIndex(TIFJobExtension.JOB_INDEX_NAME)).thenReturn(true); + + // Verify + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { throw new RuntimeException("Shouldn't get called"); }); + + // Run + StepListener stepListener = new StepListener<>(); + tifJobParameterService.createJobIndexIfNotExists(stepListener); + + // Verify stepListener is called + stepListener.result(); + } + + public void testcreateJobIndexIfNotExists_whenIndexExist_thenCreateRequestIsCalled() { + when(metadata.hasIndex(TIFJobExtension.JOB_INDEX_NAME)).thenReturn(false); + + // Verify + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + assertTrue(actionRequest instanceof CreateIndexRequest); + CreateIndexRequest request = (CreateIndexRequest) actionRequest; + assertEquals(TIFJobExtension.JOB_INDEX_NAME, request.index()); + assertEquals("1", request.settings().get("index.number_of_shards")); + assertEquals("0-all", request.settings().get("index.auto_expand_replicas")); + assertEquals("true", request.settings().get("index.hidden")); + assertNotNull(request.mappings()); + return null; + }); + + // Run + StepListener stepListener = new StepListener<>(); + tifJobParameterService.createJobIndexIfNotExists(stepListener); + + // Verify stepListener is called + stepListener.result(); + } + + public void testcreateJobIndexIfNotExists_whenIndexCreatedAlready_thenExceptionIsIgnored() { + when(metadata.hasIndex(TIFJobExtension.JOB_INDEX_NAME)).thenReturn(false); + verifyingClient.setExecuteVerifier( + (actionResponse, actionRequest) -> { throw new ResourceAlreadyExistsException(TIFJobExtension.JOB_INDEX_NAME); } + ); + + // Run + StepListener stepListener = new StepListener<>(); + tifJobParameterService.createJobIndexIfNotExists(stepListener); + + // Verify stepListener is called + stepListener.result(); + } + + public void testcreateJobIndexIfNotExists_whenExceptionIsThrown_thenExceptionIsThrown() { + when(metadata.hasIndex(TIFJobExtension.JOB_INDEX_NAME)).thenReturn(false); + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { throw new RuntimeException(); }); + + // Run + StepListener stepListener = new StepListener<>(); + tifJobParameterService.createJobIndexIfNotExists(stepListener); + + // Verify stepListener is called + expectThrows(RuntimeException.class, () -> stepListener.result()); + } + + public void testUpdateTIFJobParameter_whenValidInput_thenSucceed() throws Exception { + String tifJobName = TestHelpers.randomLowerCaseString(); + TIFJobParameter tifJobParameter = new TIFJobParameter( + tifJobName, + new IntervalSchedule(Instant.now().truncatedTo(ChronoUnit.MILLIS), 1, ChronoUnit.DAYS) + ); + Instant previousTime = Instant.now().minusMillis(1); + tifJobParameter.setLastUpdateTime(previousTime); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + assertTrue(actionRequest instanceof IndexRequest); + IndexRequest request = (IndexRequest) actionRequest; + assertEquals(tifJobParameter.getName(), request.id()); + assertEquals(DocWriteRequest.OpType.INDEX, request.opType()); + assertEquals(TIFJobExtension.JOB_INDEX_NAME, request.index()); + assertEquals(WriteRequest.RefreshPolicy.IMMEDIATE, request.getRefreshPolicy()); + return null; + }); + + tifJobParameterService.updateJobSchedulerParameter(tifJobParameter); + assertTrue(previousTime.isBefore(tifJobParameter.getLastUpdateTime())); + } + + public void testsaveTIFJobParameter_whenValidInput_thenSucceed() { + TIFJobParameter tifJobParameter = randomTifJobParameter(); + Instant previousTime = Instant.now().minusMillis(1); + tifJobParameter.setLastUpdateTime(previousTime); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + assertTrue(actionRequest instanceof IndexRequest); + IndexRequest indexRequest = (IndexRequest) actionRequest; + assertEquals(TIFJobExtension.JOB_INDEX_NAME, indexRequest.index()); + assertEquals(tifJobParameter.getName(), indexRequest.id()); + assertEquals(WriteRequest.RefreshPolicy.IMMEDIATE, indexRequest.getRefreshPolicy()); + assertEquals(DocWriteRequest.OpType.CREATE, indexRequest.opType()); + return null; + }); + + tifJobParameterService.saveTIFJobParameter(tifJobParameter, mock(ActionListener.class)); + assertTrue(previousTime.isBefore(tifJobParameter.getLastUpdateTime())); + } + + public void testGetTifJobParameter_whenException_thenNull() throws Exception { + TIFJobParameter tifJobParameter = setupClientForGetRequest(true, new IndexNotFoundException(TIFJobExtension.JOB_INDEX_NAME)); + assertNull(tifJobParameterService.getJobParameter(tifJobParameter.getName())); + } + + public void testGetTifJobParameter_whenExist_thenReturnTifJobParameter() throws Exception { + TIFJobParameter tifJobParameter = setupClientForGetRequest(true, null); + TIFJobParameter anotherTIFJobParameter = tifJobParameterService.getJobParameter(tifJobParameter.getName()); + + assertTrue(tifJobParameter.getName().equals(anotherTIFJobParameter.getName())); + assertTrue(tifJobParameter.getLastUpdateTime().equals(anotherTIFJobParameter.getLastUpdateTime())); + assertTrue(tifJobParameter.getSchedule().equals(anotherTIFJobParameter.getSchedule())); + assertTrue(tifJobParameter.getState().equals(anotherTIFJobParameter.getState())); + assertTrue(tifJobParameter.getIndices().equals(anotherTIFJobParameter.getIndices())); + } + + public void testGetTifJobParameter_whenNotExist_thenNull() throws Exception { + TIFJobParameter tifJobParameter = setupClientForGetRequest(false, null); + assertNull(tifJobParameterService.getJobParameter(tifJobParameter.getName())); + } + + private TIFJobParameter setupClientForGetRequest(final boolean isExist, final RuntimeException exception) { + TIFJobParameter tifJobParameter = randomTifJobParameter(); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + assertTrue(actionRequest instanceof GetRequest); + GetRequest request = (GetRequest) actionRequest; + assertEquals(tifJobParameter.getName(), request.id()); + assertEquals(TIFJobExtension.JOB_INDEX_NAME, request.index()); + GetResponse response = getMockedGetResponse(isExist ? tifJobParameter : null); + if (exception != null) { + throw exception; + } + return response; + }); + return tifJobParameter; + } + + public void testDeleteTifJobParameter_whenValidInput_thenSucceed() { + TIFJobParameter tifJobParameter = randomTifJobParameter(); + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + // Verify + assertTrue(actionRequest instanceof DeleteRequest); + DeleteRequest request = (DeleteRequest) actionRequest; + assertEquals(TIFJobExtension.JOB_INDEX_NAME, request.index()); + assertEquals(DocWriteRequest.OpType.DELETE, request.opType()); + assertEquals(tifJobParameter.getName(), request.id()); + assertEquals(WriteRequest.RefreshPolicy.IMMEDIATE, request.getRefreshPolicy()); + + DeleteResponse response = mock(DeleteResponse.class); + when(response.status()).thenReturn(RestStatus.OK); + return response; + }); + + // Run + tifJobParameterService.deleteTIFJobParameter(tifJobParameter); + } + + public void testDeleteTifJobParameter_whenIndexNotFound_thenThrowException() { + TIFJobParameter tifJobParameter = randomTifJobParameter(); + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + DeleteResponse response = mock(DeleteResponse.class); + when(response.status()).thenReturn(RestStatus.NOT_FOUND); + return response; + }); + + // Run + expectThrows(ResourceNotFoundException.class, () -> tifJobParameterService.deleteTIFJobParameter(tifJobParameter)); + } + + private GetResponse getMockedGetResponse(TIFJobParameter tifJobParameter) { + GetResponse response = mock(GetResponse.class); + when(response.isExists()).thenReturn(tifJobParameter != null); + when(response.getSourceAsBytesRef()).thenReturn(toBytesReference(tifJobParameter)); + return response; + } + + private BytesReference toBytesReference(TIFJobParameter tifJobParameter) { + if (tifJobParameter == null) { + return null; + } + + try { + return BytesReference.bytes(tifJobParameter.toXContent(JsonXContent.contentBuilder(), null)); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + +} diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterTests.java new file mode 100644 index 000000000..85aeef5b9 --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterTests.java @@ -0,0 +1,107 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.jobscheduler; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; +import org.opensearch.securityanalytics.TestHelpers; +import org.opensearch.securityanalytics.model.DetectorTrigger; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; +import org.opensearch.securityanalytics.threatIntel.common.TIFMetadata; + +import java.io.IOException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.List; +import java.util.Locale; + +import static org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter.THREAT_INTEL_DATA_INDEX_NAME_PREFIX; + +public class TIFJobParameterTests extends ThreatIntelTestCase { + private static final Logger log = LogManager.getLogger(TIFJobParameterTests.class); + + public void testParser_whenAllValueIsFilled_thenSucceed() throws IOException { + String id = TestHelpers.randomLowerCaseString(); + IntervalSchedule schedule = new IntervalSchedule(Instant.now().truncatedTo(ChronoUnit.MILLIS), 1, ChronoUnit.DAYS); + TIFJobParameter tifJobParameter = new TIFJobParameter(id, schedule); + tifJobParameter.enable(); + tifJobParameter.getUpdateStats().setLastProcessingTimeInMillis(randomPositiveLong()); + tifJobParameter.getUpdateStats().setLastSucceededAt(Instant.now().truncatedTo(ChronoUnit.MILLIS)); + tifJobParameter.getUpdateStats().setLastSkippedAt(Instant.now().truncatedTo(ChronoUnit.MILLIS)); + tifJobParameter.getUpdateStats().setLastFailedAt(Instant.now().truncatedTo(ChronoUnit.MILLIS)); + + TIFJobParameter anotherTIFJobParameter = TIFJobParameter.PARSER.parse( + createParser(tifJobParameter.toXContent(XContentFactory.jsonBuilder(), null)), + null + ); + + assertTrue(tifJobParameter.getName().equals(anotherTIFJobParameter.getName())); + assertTrue(tifJobParameter.getLastUpdateTime().equals(anotherTIFJobParameter.getLastUpdateTime())); + assertTrue(tifJobParameter.getEnabledTime().equals(anotherTIFJobParameter.getEnabledTime())); + assertTrue(tifJobParameter.getSchedule().equals(anotherTIFJobParameter.getSchedule())); + assertTrue(tifJobParameter.getState().equals(anotherTIFJobParameter.getState())); + assertTrue(tifJobParameter.getIndices().equals(anotherTIFJobParameter.getIndices())); + assertTrue(tifJobParameter.getUpdateStats().getLastFailedAt().equals(anotherTIFJobParameter.getUpdateStats().getLastFailedAt())); + assertTrue(tifJobParameter.getUpdateStats().getLastSkippedAt().equals(anotherTIFJobParameter.getUpdateStats().getLastSkippedAt())); + assertTrue(tifJobParameter.getUpdateStats().getLastSucceededAt().equals(anotherTIFJobParameter.getUpdateStats().getLastSucceededAt())); + assertTrue(tifJobParameter.getUpdateStats().getLastProcessingTimeInMillis().equals(anotherTIFJobParameter.getUpdateStats().getLastProcessingTimeInMillis())); + + } + + public void testParser_whenNullForOptionalFields_thenSucceed() throws IOException { // TODO: same issue + String id = TestHelpers.randomLowerCaseString(); + IntervalSchedule schedule = new IntervalSchedule(Instant.now().truncatedTo(ChronoUnit.MILLIS), 1, ChronoUnit.DAYS); + TIFJobParameter tifJobParameter = new TIFJobParameter(id, schedule); + TIFJobParameter anotherTIFJobParameter = TIFJobParameter.PARSER.parse( + createParser(tifJobParameter.toXContent(XContentFactory.jsonBuilder(), null)), + null + ); + + assertTrue(tifJobParameter.getName().equals(anotherTIFJobParameter.getName())); + assertTrue(tifJobParameter.getLastUpdateTime().equals(anotherTIFJobParameter.getLastUpdateTime())); + assertTrue(tifJobParameter.getSchedule().equals(anotherTIFJobParameter.getSchedule())); + assertTrue(tifJobParameter.getState().equals(anotherTIFJobParameter.getState())); + assertTrue(tifJobParameter.getIndices().equals(anotherTIFJobParameter.getIndices())); + } + + public void testCurrentIndexName_whenNotExpired_thenReturnName() { + String id = TestHelpers.randomLowerCaseString(); + TIFJobParameter datasource = new TIFJobParameter(); + datasource.setName(id); + } + + public void testNewIndexName_whenCalled_thenReturnedExpectedValue() { + TIFMetadata tifMetadata = new TIFMetadata("mock_id", + "mock url", + "mock name", + "mock org", + "mock description", + "mock csv", + "mock ip", + 1, + false); + + String name = tifMetadata.getFeedId(); + String suffix = "1"; + TIFJobParameter tifJobParameter = new TIFJobParameter(); + tifJobParameter.setName(name); + assertEquals(String.format(Locale.ROOT, "%s-%s%s", THREAT_INTEL_DATA_INDEX_NAME_PREFIX, name, suffix), tifJobParameter.newIndexName(tifJobParameter,tifMetadata)); + tifJobParameter.getIndices().add(tifJobParameter.newIndexName(tifJobParameter,tifMetadata)); + + log.error(tifJobParameter.getIndices()); + + String anotherSuffix = "2"; + assertEquals(String.format(Locale.ROOT, "%s-%s%s", THREAT_INTEL_DATA_INDEX_NAME_PREFIX, name, anotherSuffix), tifJobParameter.newIndexName(tifJobParameter,tifMetadata)); + } + + public void testLockDurationSeconds() { + TIFJobParameter datasource = new TIFJobParameter(); + assertNotNull(datasource.getLockDurationSeconds()); + } +} + diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunnerTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunnerTests.java new file mode 100644 index 000000000..f54631462 --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunnerTests.java @@ -0,0 +1,167 @@ + +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.jobscheduler; + +import org.junit.Before; +import org.opensearch.jobscheduler.spi.JobDocVersion; +import org.opensearch.jobscheduler.spi.JobExecutionContext; +import org.opensearch.jobscheduler.spi.LockModel; +import org.opensearch.jobscheduler.spi.ScheduledJobParameter; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; +import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; +import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; +import org.opensearch.securityanalytics.TestHelpers; + +import java.io.IOException; +import java.time.Instant; +import java.util.Optional; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.*; + +public class TIFJobRunnerTests extends ThreatIntelTestCase { + @Before + public void init() { + TIFJobRunner.getJobRunnerInstance() + .initialize(clusterService, tifJobUpdateService, tifJobParameterService, tifLockService, threadPool); + } + + public void testGetJobRunnerInstance_whenCalledAgain_thenReturnSameInstance() { + assertTrue(TIFJobRunner.getJobRunnerInstance() == TIFJobRunner.getJobRunnerInstance()); + } + + public void testRunJob_whenInvalidClass_thenThrowException() { + JobDocVersion jobDocVersion = new JobDocVersion(randomInt(), randomInt(), randomInt()); + String jobIndexName = TestHelpers.randomLowerCaseString(); + String jobId = TestHelpers.randomLowerCaseString(); + JobExecutionContext jobExecutionContext = new JobExecutionContext(Instant.now(), jobDocVersion, lockService, jobIndexName, jobId); + ScheduledJobParameter jobParameter = mock(ScheduledJobParameter.class); + + // Run + expectThrows(IllegalStateException.class, () -> TIFJobRunner.getJobRunnerInstance().runJob(jobParameter, jobExecutionContext)); + } + + public void testRunJob_whenValidInput_thenSucceed() throws IOException { + JobDocVersion jobDocVersion = new JobDocVersion(randomInt(), randomInt(), randomInt()); + String jobIndexName = TestHelpers.randomLowerCaseString(); + String jobId = TestHelpers.randomLowerCaseString(); + JobExecutionContext jobExecutionContext = new JobExecutionContext(Instant.now(), jobDocVersion, lockService, jobIndexName, jobId); + TIFJobParameter tifJobParameter = randomTifJobParameter(); + + LockModel lockModel = randomLockModel(); + when(tifLockService.acquireLock(tifJobParameter.getName(), TIFLockService.LOCK_DURATION_IN_SECONDS)).thenReturn( + Optional.of(lockModel) + ); + + // Run + TIFJobRunner.getJobRunnerInstance().runJob(tifJobParameter, jobExecutionContext); + + // Verify + verify(tifLockService).acquireLock(tifJobParameter.getName(), tifLockService.LOCK_DURATION_IN_SECONDS); + verify(tifJobParameterService).getJobParameter(tifJobParameter.getName()); + verify(tifLockService).releaseLock(lockModel); + } + + public void testUpdateTIFJobRunner_whenExceptionBeforeAcquiringLock_thenNoReleaseLock() { + ScheduledJobParameter jobParameter = mock(ScheduledJobParameter.class); + when(jobParameter.getName()).thenReturn(TestHelpers.randomLowerCaseString()); + when(tifLockService.acquireLock(jobParameter.getName(), TIFLockService.LOCK_DURATION_IN_SECONDS)).thenThrow( + new RuntimeException() + ); + + // Run + expectThrows(Exception.class, () -> TIFJobRunner.getJobRunnerInstance().updateJobRunner(jobParameter).run()); + + // Verify + verify(tifLockService, never()).releaseLock(any()); + } + + public void testUpdateTIFJobRunner_whenExceptionAfterAcquiringLock_thenReleaseLock() throws IOException { + ScheduledJobParameter jobParameter = mock(ScheduledJobParameter.class); + when(jobParameter.getName()).thenReturn(TestHelpers.randomLowerCaseString()); + LockModel lockModel = randomLockModel(); + when(tifLockService.acquireLock(jobParameter.getName(), TIFLockService.LOCK_DURATION_IN_SECONDS)).thenReturn( + Optional.of(lockModel) + ); + when(tifJobParameterService.getJobParameter(jobParameter.getName())).thenThrow(new RuntimeException()); + + // Run + TIFJobRunner.getJobRunnerInstance().updateJobRunner(jobParameter).run(); + + // Verify + verify(tifLockService).releaseLock(any()); + } + + public void testUpdateTIFJob_whenTIFJobDoesNotExist_thenDoNothing() throws IOException { + TIFJobParameter tifJob = new TIFJobParameter(); + + // Run + TIFJobRunner.getJobRunnerInstance().updateJobParameter(tifJob, mock(Runnable.class)); + + // Verify + verify(tifJobUpdateService, never()).deleteAllTifdIndices(TestHelpers.randomLowerCaseStringList(),TestHelpers.randomLowerCaseStringList()); + } + + public void testUpdateTIFJob_whenInvalidState_thenUpdateLastFailedAt() throws IOException { + TIFJobParameter tifJob = new TIFJobParameter(); + tifJob.enable(); + tifJob.getUpdateStats().setLastFailedAt(null); + tifJob.setState(randomStateExcept(TIFJobState.AVAILABLE)); + when(tifJobParameterService.getJobParameter(tifJob.getName())).thenReturn(tifJob); + + // Run + TIFJobRunner.getJobRunnerInstance().updateJobParameter(tifJob, mock(Runnable.class)); + + // Verify + assertFalse(tifJob.isEnabled()); + assertNotNull(tifJob.getUpdateStats().getLastFailedAt()); + verify(tifJobParameterService).updateJobSchedulerParameter(tifJob); + } + + public void testUpdateTIFJob_whenValidInput_thenSucceed() throws IOException { + TIFJobParameter tifJob = randomTifJobParameter(); + tifJob.setState(TIFJobState.AVAILABLE); + when(tifJobParameterService.getJobParameter(tifJob.getName())).thenReturn(tifJob); + Runnable renewLock = mock(Runnable.class); + + // Run + TIFJobRunner.getJobRunnerInstance().updateJobParameter(tifJob, renewLock); + + // Verify + verify(tifJobUpdateService, times(0)).deleteAllTifdIndices(TestHelpers.randomLowerCaseStringList(),TestHelpers.randomLowerCaseStringList()); + verify(tifJobUpdateService).createThreatIntelFeedData(tifJob, renewLock); + } + + public void testUpdateTIFJob_whenDeleteTask_thenDeleteOnly() throws IOException { + TIFJobParameter tifJob = randomTifJobParameter(); + tifJob.setState(TIFJobState.AVAILABLE); + when(tifJobParameterService.getJobParameter(tifJob.getName())).thenReturn(tifJob); + Runnable renewLock = mock(Runnable.class); + + // Run + TIFJobRunner.getJobRunnerInstance().updateJobParameter(tifJob, renewLock); + + // Verify + verify(tifJobUpdateService, times(0)).deleteAllTifdIndices(TestHelpers.randomLowerCaseStringList(),TestHelpers.randomLowerCaseStringList()); + } + + public void testUpdateTIFJobExceptionHandling() throws IOException { + TIFJobParameter tifJob = new TIFJobParameter(); + tifJob.setName(TestHelpers.randomLowerCaseString()); + tifJob.getUpdateStats().setLastFailedAt(null); + when(tifJobParameterService.getJobParameter(tifJob.getName())).thenReturn(tifJob); + doThrow(new RuntimeException("test failure")).when(tifJobUpdateService).deleteAllTifdIndices(TestHelpers.randomLowerCaseStringList(),TestHelpers.randomLowerCaseStringList()); + + // Run + TIFJobRunner.getJobRunnerInstance().updateJobParameter(tifJob, mock(Runnable.class)); + + // Verify + assertNotNull(tifJob.getUpdateStats().getLastFailedAt()); + verify(tifJobParameterService).updateJobSchedulerParameter(tifJob); + } +} + diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateServiceTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateServiceTests.java new file mode 100644 index 000000000..76b0f8fe4 --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateServiceTests.java @@ -0,0 +1,52 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.jobscheduler; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.junit.Before; +import org.opensearch.cluster.routing.ShardRouting; +import org.opensearch.common.SuppressForbidden; +import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; +import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; + +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.*; + +@SuppressForbidden(reason = "unit test") +public class TIFJobUpdateServiceTests extends ThreatIntelTestCase { + + private TIFJobUpdateService tifJobUpdateService1; + + @Before + public void init() { + tifJobUpdateService1 = new TIFJobUpdateService(clusterService, tifJobParameterService, threatIntelFeedDataService, builtInTIFMetadataLoader); + } + + public void testUpdateOrCreateThreatIntelFeedData_whenValidInput_thenSucceed() throws IOException { + + ShardRouting shardRouting = mock(ShardRouting.class); + when(shardRouting.started()).thenReturn(true); + when(routingTable.allShards(anyString())).thenReturn(Arrays.asList(shardRouting)); + + TIFJobParameter tifJobParameter = new TIFJobParameter(); + tifJobParameter.setState(TIFJobState.AVAILABLE); + + tifJobParameter.getUpdateStats().setLastSucceededAt(null); + tifJobParameter.getUpdateStats().setLastProcessingTimeInMillis(null); + + // Run + List newFeeds = tifJobUpdateService1.createThreatIntelFeedData(tifJobParameter, mock(Runnable.class)); + + // Verify feeds + assertNotNull(newFeeds); + } + +} diff --git a/src/test/resources/threatIntel/sample_csv_with_description_and_header.csv b/src/test/resources/threatIntel/sample_csv_with_description_and_header.csv new file mode 100644 index 000000000..750377fd6 --- /dev/null +++ b/src/test/resources/threatIntel/sample_csv_with_description_and_header.csv @@ -0,0 +1,4 @@ +# description + +ip +1.0.0.0/24 \ No newline at end of file diff --git a/src/test/resources/threatIntel/sample_valid.csv b/src/test/resources/threatIntel/sample_valid.csv index fad1eb6fd..c599b6888 100644 --- a/src/test/resources/threatIntel/sample_valid.csv +++ b/src/test/resources/threatIntel/sample_valid.csv @@ -1,3 +1,2 @@ -ip,region 1.0.0.0/24,Australia 10.0.0.0/24,USA \ No newline at end of file From 0bdd58b3b31458fdc0bb40e5e66f148fa6dfe618 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Tue, 17 Oct 2023 15:36:48 -0700 Subject: [PATCH 21/39] fix threat intel integ tests and add update detector logic Signed-off-by: Surya Sashank Nistala --- .../SecurityAnalyticsPlugin.java | 9 +--- .../settings/SecurityAnalyticsSettings.java | 2 +- .../DetectorThreatIntelService.java | 45 +++++++++++++++++-- .../threatIntel/ThreatIntelFeedDataUtils.java | 4 ++ .../action/TransportPutTIFJobAction.java | 2 +- .../BuiltInTIFMetadataLoader.java | 4 ++ .../feedMetadata/TIFMetadataService.java | 0 .../jobscheduler/TIFJobRunner.java | 9 +++- .../securityanalytics/TestHelpers.java | 4 +- .../resthandler/DetectorMonitorRestApiIT.java | 3 +- .../threatIntel/ThreatIntelTestCase.java | 3 ++ .../jobscheduler/TIFJobRunnerTests.java | 3 +- 12 files changed, 71 insertions(+), 17 deletions(-) delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/feedMetadata/TIFMetadataService.java diff --git a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java index d64b47528..c04189ad6 100644 --- a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java +++ b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java @@ -112,9 +112,6 @@ public class SecurityAnalyticsPlugin extends Plugin implements ActionPlugin, Map private BuiltinLogTypeLoader builtinLogTypeLoader; private LogTypeService logTypeService; - - private Client client; - @Override public Collection getSystemIndexDescriptors(Settings settings){ return List.of(new SystemIndexDescriptor(THREAT_INTEL_DATA_INDEX_NAME_PREFIX, "System index used for threat intel data")); @@ -145,14 +142,12 @@ public Collection createComponents(Client client, ruleIndices = new RuleIndices(logTypeService, client, clusterService, threadPool); correlationRuleIndices = new CorrelationRuleIndices(client, clusterService); ThreatIntelFeedDataService threatIntelFeedDataService = new ThreatIntelFeedDataService(clusterService, client, indexNameExpressionResolver, xContentRegistry); - DetectorThreatIntelService detectorThreatIntelService = new DetectorThreatIntelService(threatIntelFeedDataService); + DetectorThreatIntelService detectorThreatIntelService = new DetectorThreatIntelService(threatIntelFeedDataService, client, xContentRegistry); TIFJobParameterService tifJobParameterService = new TIFJobParameterService(client, clusterService); TIFJobUpdateService tifJobUpdateService = new TIFJobUpdateService(clusterService, tifJobParameterService, threatIntelFeedDataService, builtInTIFMetadataLoader); TIFLockService threatIntelLockService = new TIFLockService(clusterService, client); - this.client = client; - - TIFJobRunner.getJobRunnerInstance().initialize(clusterService,tifJobUpdateService, tifJobParameterService, threatIntelLockService, threadPool); + TIFJobRunner.getJobRunnerInstance().initialize(clusterService,tifJobUpdateService, tifJobParameterService, threatIntelLockService, threadPool, detectorThreatIntelService); return List.of( detectorIndices, correlationIndices, correlationRuleIndices, ruleTopicIndices, customLogTypeIndices, ruleIndices, diff --git a/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java b/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java index 48cb49fac..f3e3b2f5d 100644 --- a/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java +++ b/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java @@ -121,7 +121,7 @@ public class SecurityAnalyticsSettings { // threat intel settings public static final Setting TIF_UPDATE_INTERVAL = Setting.timeSetting( - "plugins.security_analytics.threat_intel_timeout", + "plugins.security_analytics.threatintel.tifjob.update_interval", TimeValue.timeValueHours(24), TimeValue.timeValueHours(1), Setting.Property.NodeScope, diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java index 3c532d50e..b02adef04 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java @@ -6,10 +6,19 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.support.WriteRequest; import org.opensearch.client.Client; -import org.opensearch.common.settings.Settings; import org.opensearch.commons.alerting.model.DocLevelQuery; import org.opensearch.core.action.ActionListener; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.rest.RestRequest; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.securityanalytics.action.IndexDetectorAction; +import org.opensearch.securityanalytics.action.IndexDetectorRequest; +import org.opensearch.securityanalytics.action.SearchDetectorAction; +import org.opensearch.securityanalytics.action.SearchDetectorRequest; import org.opensearch.securityanalytics.model.Detector; import org.opensearch.securityanalytics.model.LogType; import org.opensearch.securityanalytics.model.ThreatIntelFeedData; @@ -24,15 +33,22 @@ import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import static org.opensearch.securityanalytics.model.Detector.DETECTORS_INDEX; +import static org.opensearch.securityanalytics.util.DetectorUtils.getDetectors; + public class DetectorThreatIntelService { private static final Logger log = LogManager.getLogger(DetectorThreatIntelService.class); private final ThreatIntelFeedDataService threatIntelFeedDataService; + private final Client client; + private final NamedXContentRegistry xContentRegistry; - public DetectorThreatIntelService(ThreatIntelFeedDataService threatIntelFeedDataService) { + public DetectorThreatIntelService(ThreatIntelFeedDataService threatIntelFeedDataService, Client client, NamedXContentRegistry xContentRegistry) { this.threatIntelFeedDataService = threatIntelFeedDataService; + this.client = client; + this.xContentRegistry = xContentRegistry; } @@ -62,7 +78,7 @@ public List createDocLevelQueriesFromThreatIntelList( queries.add(new DocLevelQuery( constructId(detector, entry.getKey()), tifdList.get(0).getFeedId(), Collections.emptyList(), - "windows-hostname:(120.85.114.146 OR 103.104.106.223 OR 185.191.246.45 OR 120.86.237.94)", + String.format(query, field), List.of("threat_intel", entry.getKey() /*ioc_type*/) )); } @@ -128,6 +144,29 @@ private static String constructId(Detector detector, String iocType) { } public void updateDetectorsWithLatestThreatIntelRules() { + //todo : fix query for fetching detectors with threat intel enabled = true +// String searchReq = "{ \"query\": { \"match\": { \"detector.threat_intel_enabled\": true } } }"; + SearchRequest searchRequest = new SearchRequest(DETECTORS_INDEX); + SearchSourceBuilder ssb = searchRequest.source(); + ssb.size(9999); + client.execute(SearchDetectorAction.INSTANCE, new SearchDetectorRequest(new SearchRequest().source(ssb)), + ActionListener.wrap(r -> { + List detectors = getDetectors(r, xContentRegistry); + detectors.forEach(detector -> { + assert detector.getThreatIntelEnabled(); + client.execute(IndexDetectorAction.INSTANCE, new IndexDetectorRequest( + detector.getId(), WriteRequest.RefreshPolicy.IMMEDIATE, + RestRequest.Method.PUT, + detector), + ActionListener.wrap( + res -> log.debug("updated {} with latest threat intel info", res.getDetector().getId()), + e -> log.error(() -> new ParameterizedMessage("Failed to update detector {} with latest threat intel info", detector.getId()), e))); + } + ); + }, e -> { + log.error("Failed to fetch detectors to update with threat intel queries.", e); + })); + } } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataUtils.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataUtils.java index 75a20f1a5..540fc6cde 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataUtils.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataUtils.java @@ -1,3 +1,7 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ package org.opensearch.securityanalytics.threatIntel; import org.apache.logging.log4j.LogManager; diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java index 060e67620..6ad3a04bd 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java @@ -34,7 +34,7 @@ import static org.opensearch.securityanalytics.threatIntel.common.TIFLockService.LOCK_DURATION_IN_SECONDS; /** - * Transport action to create tif job + * Transport action to create job to fetch threat intel feed data and save IoCs */ public class TransportPutTIFJobAction extends HandledTransportAction { private static final Logger log = LogManager.getLogger(TransportPutTIFJobAction.class); diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/feedMetadata/BuiltInTIFMetadataLoader.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/feedMetadata/BuiltInTIFMetadataLoader.java index 967d4c936..6b84e9fe9 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/feedMetadata/BuiltInTIFMetadataLoader.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/feedMetadata/BuiltInTIFMetadataLoader.java @@ -1,3 +1,7 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ package org.opensearch.securityanalytics.threatIntel.feedMetadata; import org.apache.logging.log4j.LogManager; diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/feedMetadata/TIFMetadataService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/feedMetadata/TIFMetadataService.java deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunner.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunner.java index ca1f61347..fdb07618d 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunner.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunner.java @@ -21,6 +21,7 @@ import java.util.concurrent.atomic.AtomicReference; import java.time.Instant; +import org.opensearch.securityanalytics.threatIntel.DetectorThreatIntelService; import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; import org.opensearch.threadpool.ThreadPool; @@ -55,6 +56,7 @@ public static TIFJobRunner getJobRunnerInstance() { private TIFLockService lockService; private boolean initialized; private ThreadPool threadPool; + private DetectorThreatIntelService detectorThreatIntelService; public void setThreadPool(ThreadPool threadPool) { this.threadPool = threadPool; @@ -69,7 +71,8 @@ public void initialize( final TIFJobUpdateService jobSchedulerUpdateService, final TIFJobParameterService jobSchedulerParameterService, final TIFLockService threatIntelLockService, - final ThreadPool threadPool + final ThreadPool threadPool, + DetectorThreatIntelService detectorThreatIntelService ) { this.clusterService = clusterService; this.jobSchedulerUpdateService = jobSchedulerUpdateService; @@ -77,6 +80,7 @@ public void initialize( this.lockService = threatIntelLockService; this.threadPool = threadPool; this.initialized = true; + this.detectorThreatIntelService = detectorThreatIntelService; } @Override @@ -152,6 +156,9 @@ protected void updateJobParameter(final ScheduledJobParameter jobParameter, fina Instant endTime = Instant.now(); jobSchedulerUpdateService.deleteAllTifdIndices(oldIndices, newFeedIndices); jobSchedulerUpdateService.updateJobSchedulerParameterAsSucceeded(newFeedIndices, jobSchedulerParameter, startTime, endTime); + if(false == newFeedIndices.isEmpty()) { + detectorThreatIntelService.updateDetectorsWithLatestThreatIntelRules(); + } } catch (Exception e) { log.error("Failed to update jobSchedulerParameter for {}", jobSchedulerParameter.getName(), e); jobSchedulerParameter.getUpdateStats().setLastFailedAt(Instant.now()); diff --git a/src/test/java/org/opensearch/securityanalytics/TestHelpers.java b/src/test/java/org/opensearch/securityanalytics/TestHelpers.java index 9b17c4aa2..bf57e4b06 100644 --- a/src/test/java/org/opensearch/securityanalytics/TestHelpers.java +++ b/src/test/java/org/opensearch/securityanalytics/TestHelpers.java @@ -1397,7 +1397,7 @@ public static String randomDocWithIpIoc(int severity, int version, String ioc) "\"AccountType\":\"User\",\n" + "\"Message\":\"Dns query:\\r\\nRuleName: \\r\\nUtcTime: 2020-02-04 14:59:38.349\\r\\nProcessGuid: {b3c285a4-3cda-5dc0-0000-001077270b00}\\r\\nProcessId: 1904\\r\\nQueryName: EC2AMAZ-EPO7HKA\\r\\nQueryStatus: 0\\r\\nQueryResults: 172.31.46.38;\\r\\nImage: C:\\\\Program Files\\\\nxlog\\\\nxlog.exe\",\n" + "\"Category\":\"Dns query (rule: DnsQuery)\",\n" + - "\"Opcode\":\"%blahblah\",\n" + + "\"Opcode\":\"blahblah\",\n" + "\"UtcTime\":\"2020-02-04 14:59:38.349\",\n" + "\"ProcessGuid\":\"{b3c285a4-3cda-5dc0-0000-001077270b00}\",\n" + "\"ProcessId\":\"1904\",\"QueryName\":\"EC2AMAZ-EPO7HKA\",\"QueryStatus\":\"0\",\n" + @@ -1409,7 +1409,7 @@ public static String randomDocWithIpIoc(int severity, int version, String ioc) "\"CommandLine\": \"eachtest\",\n" + "\"Initiated\": \"true\"\n" + "}"; - return String.format(Locale.ROOT, ioc, doc, severity, version); + return String.format(Locale.ROOT, doc, ioc, severity, version); } diff --git a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java index 07e862369..55fea4224 100644 --- a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java @@ -20,6 +20,7 @@ import org.opensearch.securityanalytics.model.DetectorRule; import org.opensearch.securityanalytics.model.DetectorTrigger; import org.opensearch.securityanalytics.model.Rule; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter; import java.io.IOException; import java.util.ArrayList; @@ -45,6 +46,7 @@ import static org.opensearch.securityanalytics.TestHelpers.windowsIndexMapping; import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ENABLE_WORKFLOW_USAGE; import static org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedDataUtils.getTifdList; +import static org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobExtension.JOB_INDEX_NAME; public class DetectorMonitorRestApiIT extends SecurityAnalyticsRestTestCase { /** @@ -1227,7 +1229,6 @@ public void testCreateDetectorWiththreatIntelDisabled_updateDetectorWithThreatIn List> monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); assertEquals(1, monitorRunResults.size()); - Map docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); int noOfSigmaRuleMatches = docLevelQueryResults.size(); assertEquals(1, noOfSigmaRuleMatches); diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestCase.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestCase.java index a6661b32a..20d36ab2d 100644 --- a/src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestCase.java +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelTestCase.java @@ -89,6 +89,8 @@ public abstract class ThreatIntelTestCase extends RestActionTestCase { protected Settings settings; private AutoCloseable openMocks; @Mock + protected DetectorThreatIntelService detectorThreatIntelService; + @Mock protected TIFJobParameter tifJobParameter; @Before @@ -109,6 +111,7 @@ public void prepareThreatIntelTestCase() { when(clusterState.routingTable()).thenReturn(routingTable); when(ingestService.getClusterService()).thenReturn(clusterService); when(threadPool.generic()).thenReturn(OpenSearchExecutors.newDirectExecutorService()); + detectorThreatIntelService = new DetectorThreatIntelService(threatIntelFeedDataService, client, xContentRegistry()); } @After diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunnerTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunnerTests.java index f54631462..82038a91f 100644 --- a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunnerTests.java +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunnerTests.java @@ -11,6 +11,7 @@ import org.opensearch.jobscheduler.spi.JobExecutionContext; import org.opensearch.jobscheduler.spi.LockModel; import org.opensearch.jobscheduler.spi.ScheduledJobParameter; +import org.opensearch.securityanalytics.threatIntel.DetectorThreatIntelService; import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; @@ -27,7 +28,7 @@ public class TIFJobRunnerTests extends ThreatIntelTestCase { @Before public void init() { TIFJobRunner.getJobRunnerInstance() - .initialize(clusterService, tifJobUpdateService, tifJobParameterService, tifLockService, threadPool); + .initialize(clusterService, tifJobUpdateService, tifJobParameterService, tifLockService, threadPool, detectorThreatIntelService); } public void testGetJobRunnerInstance_whenCalledAgain_thenReturnSameInstance() { From 0e8928684df8335693c7e54fcabf0d8a643f5e30 Mon Sep 17 00:00:00 2001 From: Joanne Wang <109310487+jowg-amazon@users.noreply.github.com> Date: Thu, 19 Oct 2023 00:44:59 -0700 Subject: [PATCH 22/39] JS for Threat intel feeds - changed extension (#675) * merge conflicts Signed-off-by: Joanne Wang * fixed java wildcards and changed update key name Signed-off-by: Joanne Wang * integ test failing Signed-off-by: Joanne Wang * fix job scheduler params Signed-off-by: Joanne Wang * changed extension and has debug messages Signed-off-by: Joanne Wang * clean up Signed-off-by: Joanne Wang * fixed job scheduler plugin spi jar resolution * cleaned up TODOs and changed job scheduler name Signed-off-by: Joanne Wang --------- Signed-off-by: Joanne Wang Co-authored-by: Surya Sashank Nistala --- build.gradle | 22 +-- .../SecurityAnalyticsPlugin.java | 42 +++++- .../settings/SecurityAnalyticsSettings.java | 3 +- .../ThreatIntelFeedDataService.java | 34 +++-- .../threatIntel/action/PutTIFJobRequest.java | 4 - .../action/RestPutTIFJobHandler.java | 65 +++++++++ .../action/TransportDeleteTIFJobAction.java | 1 - .../threatIntel/common/TIFLockService.java | 3 +- .../jobscheduler/TIFJobExtension.java | 46 ------ .../jobscheduler/TIFJobParameter.java | 111 +++++++++++--- .../jobscheduler/TIFJobParameterService.java | 25 ++-- .../jobscheduler/TIFJobRunner.java | 3 - .../jobscheduler/TIFJobUpdateService.java | 8 +- .../TransportSearchDetectorAction.java | 11 +- .../resthandler/DetectorMonitorRestApiIT.java | 2 +- .../integTests/ThreatIntelJobRunnerIT.java | 138 ++++++++++++++---- .../jobscheduler/TIFJobExtensionTests.java | 58 -------- .../TIFJobParameterServiceTests.java | 23 +-- .../jobscheduler/TIFJobParameterTests.java | 2 +- 19 files changed, 370 insertions(+), 231 deletions(-) create mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/action/RestPutTIFJobHandler.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtension.java delete mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtensionTests.java diff --git a/build.gradle b/build.gradle index c21d74360..681287dbb 100644 --- a/build.gradle +++ b/build.gradle @@ -69,7 +69,7 @@ opensearchplugin { name 'opensearch-security-analytics' description 'OpenSearch Security Analytics plugin' classname 'org.opensearch.securityanalytics.SecurityAnalyticsPlugin' -// extendedPlugins = ['opensearch-job-scheduler'] + extendedPlugins = ['opensearch-job-scheduler'] } javaRestTest { @@ -143,12 +143,6 @@ repositories { sourceSets.main.java.srcDirs = ['src/main/generated','src/main/java'] configurations { zipArchive - - all { - resolutionStrategy { - force "com.google.guava:guava:32.0.1-jre" - } - } } dependencies { @@ -159,7 +153,7 @@ dependencies { api "org.opensearch:common-utils:${common_utils_version}@jar" api "org.opensearch.client:opensearch-rest-client:${opensearch_version}" implementation "org.jetbrains.kotlin:kotlin-stdlib:${kotlin_version}" - implementation "org.opensearch:opensearch-job-scheduler-spi:${opensearch_build}" + compileOnly "org.opensearch:opensearch-job-scheduler-spi:${opensearch_build}" implementation "org.apache.commons:commons-csv:1.10.0" // Needed for integ tests @@ -167,12 +161,6 @@ dependencies { zipArchive group: 'org.opensearch.plugin', name:'opensearch-notifications-core', version: "${opensearch_build}" zipArchive group: 'org.opensearch.plugin', name:'notifications', version: "${opensearch_build}" zipArchive group: 'org.opensearch.plugin', name:'opensearch-job-scheduler', version: "${opensearch_build}" - - //spotless - implementation('com.google.googlejavaformat:google-java-format:1.17.0') { - exclude group: 'com.google.guava' - } - implementation 'com.google.guava:guava:32.0.1-jre' } // RPM & Debian build @@ -303,6 +291,12 @@ testClusters.integTest { } } })) + nodes.each { node -> + def plugins = node.plugins + def firstPlugin = plugins.get(0) + plugins.remove(0) + plugins.add(firstPlugin) + } } run { diff --git a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java index c04189ad6..96970bac8 100644 --- a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java +++ b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java @@ -4,8 +4,12 @@ */ package org.opensearch.securityanalytics; -import java.util.*; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; import java.util.function.Supplier; +import java.util.Optional; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.core.action.ActionListener; @@ -32,6 +36,9 @@ import org.opensearch.index.engine.EngineFactory; import org.opensearch.index.mapper.Mapper; import org.opensearch.indices.SystemIndexDescriptor; +import org.opensearch.jobscheduler.spi.JobSchedulerExtension; +import org.opensearch.jobscheduler.spi.ScheduledJobParser; +import org.opensearch.jobscheduler.spi.ScheduledJobRunner; import org.opensearch.plugins.*; import org.opensearch.repositories.RepositoriesService; import org.opensearch.rest.RestController; @@ -54,6 +61,7 @@ import org.opensearch.securityanalytics.threatIntel.action.*; import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; import org.opensearch.securityanalytics.threatIntel.feedMetadata.BuiltInTIFMetadataLoader; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter; import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameterService; import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobRunner; import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobUpdateService; @@ -68,13 +76,12 @@ import org.opensearch.securityanalytics.util.DetectorIndices; import org.opensearch.securityanalytics.util.RuleIndices; import org.opensearch.securityanalytics.util.RuleTopicIndices; -import org.opensearch.threadpool.ExecutorBuilder; import org.opensearch.threadpool.ThreadPool; import org.opensearch.watcher.ResourceWatcherService; import static org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter.THREAT_INTEL_DATA_INDEX_NAME_PREFIX; -public class SecurityAnalyticsPlugin extends Plugin implements ActionPlugin, MapperPlugin, SearchPlugin, EnginePlugin, ClusterPlugin, SystemIndexPlugin { +public class SecurityAnalyticsPlugin extends Plugin implements ActionPlugin, MapperPlugin, SearchPlugin, EnginePlugin, ClusterPlugin, SystemIndexPlugin, JobSchedulerExtension { private static final Logger log = LogManager.getLogger(SecurityAnalyticsPlugin.class); @@ -90,6 +97,8 @@ public class SecurityAnalyticsPlugin extends Plugin implements ActionPlugin, Map public static final String CORRELATION_RULES_BASE_URI = PLUGINS_BASE_URI + "/correlation/rules"; public static final String CUSTOM_LOG_TYPE_URI = PLUGINS_BASE_URI + "/logtype"; + public static final String JOB_INDEX_NAME = ".opensearch-sap-threatintel-job"; + public static final Map TIF_JOB_INDEX_SETTING = Map.of("index.number_of_shards", 1, "index.auto_expand_replicas", "0-all", "index.hidden", true); private CorrelationRuleIndices correlationRuleIndices; @@ -117,6 +126,8 @@ public Collection getSystemIndexDescriptors(Settings sett return List.of(new SystemIndexDescriptor(THREAT_INTEL_DATA_INDEX_NAME_PREFIX, "System index used for threat intel data")); } + + @Override public Collection createComponents(Client client, ClusterService clusterService, @@ -147,7 +158,7 @@ public Collection createComponents(Client client, TIFJobUpdateService tifJobUpdateService = new TIFJobUpdateService(clusterService, tifJobParameterService, threatIntelFeedDataService, builtInTIFMetadataLoader); TIFLockService threatIntelLockService = new TIFLockService(clusterService, client); - TIFJobRunner.getJobRunnerInstance().initialize(clusterService,tifJobUpdateService, tifJobParameterService, threatIntelLockService, threadPool, detectorThreatIntelService); + TIFJobRunner.getJobRunnerInstance().initialize(clusterService, tifJobUpdateService, tifJobParameterService, threatIntelLockService, threadPool, detectorThreatIntelService); return List.of( detectorIndices, correlationIndices, correlationRuleIndices, ruleTopicIndices, customLogTypeIndices, ruleIndices, @@ -192,10 +203,31 @@ public List getRestHandlers(Settings settings, new RestSearchCorrelationRuleAction(), new RestIndexCustomLogTypeAction(), new RestSearchCustomLogTypeAction(), - new RestDeleteCustomLogTypeAction() + new RestDeleteCustomLogTypeAction(), + new RestPutTIFJobHandler(clusterSettings) ); } + @Override + public String getJobType() { + return "opensearch_sap_threatintel_job"; + } + + @Override + public String getJobIndex() { + return JOB_INDEX_NAME; + } + + @Override + public ScheduledJobRunner getJobRunner() { + return TIFJobRunner.getJobRunnerInstance(); + } + + @Override + public ScheduledJobParser getJobParser() { + return (parser, id, jobDocVersion) -> TIFJobParameter.PARSER.parse(parser, null); + } + @Override public List getNamedXContent() { return List.of( diff --git a/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java b/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java index f3e3b2f5d..d3a7b8b6f 100644 --- a/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java +++ b/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java @@ -122,8 +122,7 @@ public class SecurityAnalyticsSettings { // threat intel settings public static final Setting TIF_UPDATE_INTERVAL = Setting.timeSetting( "plugins.security_analytics.threatintel.tifjob.update_interval", - TimeValue.timeValueHours(24), - TimeValue.timeValueHours(1), + TimeValue.timeValueMinutes(1), Setting.Property.NodeScope, Setting.Property.Dynamic ); diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java index 5ecff4b55..b82d16c20 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java @@ -52,6 +52,8 @@ import java.util.Arrays; import java.util.Optional; import java.util.concurrent.CountDownLatch; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import java.util.stream.Collectors; import static org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter.THREAT_INTEL_DATA_INDEX_NAME_PREFIX; @@ -103,7 +105,7 @@ public void getThreatIntelFeedData( ) { try { //if index not exists - if(IndexUtils.getNewIndexByCreationDate( + if (IndexUtils.getNewIndexByCreationDate( this.clusterService.state(), this.indexNameExpressionResolver, ".opensearch-sap-threatintel*" @@ -129,7 +131,7 @@ public void getThreatIntelFeedData( listener.onFailure(e); } } - + private void createThreatIntelFeedData() throws InterruptedException { CountDownLatch countDownLatch = new CountDownLatch(1); client.execute(PutTIFJobAction.INSTANCE, new PutTIFJobRequest("feed_updater", clusterSettings.get(SecurityAnalyticsSettings.TIF_UPDATE_INTERVAL))).actionGet(); @@ -138,7 +140,7 @@ private void createThreatIntelFeedData() throws InterruptedException { /** * Create an index for a threat intel feed - * + *

* Index setting start with single shard, zero replica, no refresh interval, and hidden. * Once the threat intel feed is indexed, do refresh and force merge. * Then, change the index setting to expand replica to all nodes, and read only allow delete. @@ -174,7 +176,7 @@ private String getIndexMapping() { * Puts threat intel feed from CSVRecord iterator into a given index in bulk * * @param indexName Index name to save the threat intel feed - * @param iterator TIF data to insert + * @param iterator TIF data to insert * @param renewLock Runnable to renew lock */ public void parseAndSaveThreatIntelFeedDataCSV( @@ -197,6 +199,10 @@ public void parseAndSaveThreatIntelFeedDataCSV( String iocType = tifMetadata.getIocType(); //todo make generic in upcoming versions Integer colNum = tifMetadata.getIocCol(); String iocValue = record.values()[colNum].split(" ")[0]; + if (iocType.equals("ip") && !isValidIp(iocValue)) { + log.info("Invalid IP address, skipping this ioc record."); + continue; + } String feedId = tifMetadata.getFeedId(); Instant timestamp = Instant.now(); ThreatIntelFeedData threatIntelFeedData = new ThreatIntelFeedData(iocType, iocValue, feedId, timestamp); @@ -218,8 +224,14 @@ public void parseAndSaveThreatIntelFeedDataCSV( freezeIndex(indexName); } - public void saveTifds(BulkRequest bulkRequest, TimeValue timeout) { + public static boolean isValidIp(String ip) { + String ipPattern = "^\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}$"; + Pattern pattern = Pattern.compile(ipPattern); + Matcher matcher = pattern.matcher(ip); + return matcher.matches(); + } + public void saveTifds(BulkRequest bulkRequest, TimeValue timeout) { try { BulkResponse response = StashedThreadContext.run(client, () -> { return client.bulk(bulkRequest).actionGet(timeout); @@ -252,10 +264,6 @@ private void freezeIndex(final String indexName) { }); } - public void deleteThreatIntelDataIndex(final String index) { - deleteThreatIntelDataIndex(Arrays.asList(index)); - } - public void deleteThreatIntelDataIndex(final List indices) { if (indices == null || indices.isEmpty()) { return; @@ -286,10 +294,4 @@ public void deleteThreatIntelDataIndex(final List indices) { throw new OpenSearchException("failed to delete data[{}]", String.join(",", indices)); } } - public static class ThreatIntelFeedUpdateHandler implements Runnable { - - @Override - public void run() { - - } - }} +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/PutTIFJobRequest.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/PutTIFJobRequest.java index fa1587a66..5f58e5529 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/PutTIFJobRequest.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/PutTIFJobRequest.java @@ -45,10 +45,6 @@ public TimeValue getUpdateInterval() { return this.updateInterval; } - public void setUpdateInterval(TimeValue timeValue) { - this.updateInterval = timeValue; - } - /** * Default constructor * @param name name of a tif job diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/RestPutTIFJobHandler.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/RestPutTIFJobHandler.java new file mode 100644 index 000000000..641445a57 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/RestPutTIFJobHandler.java @@ -0,0 +1,65 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.threatIntel.action; + +import org.opensearch.client.node.NodeClient; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.action.RestToXContentListener; +import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; + +import java.io.IOException; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import static org.opensearch.rest.RestRequest.Method.GET; +import static org.opensearch.rest.RestRequest.Method.PUT; + +/** + * Rest handler for threat intel TIFjob creation + * + * This handler handles a request of + * PUT /_plugins/security_analytics/threatintel/tifjob/{id} + * { + * "id": {id}, + * "name": {name}, + * "update_interval_in_days": 1 + * } + * + * When request is received, it will create a TIFjob + * After the creation of TIFjob is completed, it will schedule the next update task after update_interval_in_days. + * + */ +public class RestPutTIFJobHandler extends BaseRestHandler { + private static final String ACTION_NAME = "threatintel_tifjob_put"; + private final ClusterSettings clusterSettings; + + public RestPutTIFJobHandler(final ClusterSettings clusterSettings) { + this.clusterSettings = clusterSettings; + } + + @Override + public String getName() { + return ACTION_NAME; + } + + @Override + protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + final PutTIFJobRequest putTIFJobRequest = new PutTIFJobRequest("jobname", + new TimeValue(1, TimeUnit.MINUTES)); + + return channel -> client.executeLocally(PutTIFJobAction.INSTANCE, putTIFJobRequest, new RestToXContentListener<>(channel)); + } + + @Override + public List routes() { + String path = "/_p/_s"; + return List.of(new Route(GET, path)); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobAction.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobAction.java index 45fc037d8..3a0c68f10 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobAction.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobAction.java @@ -85,7 +85,6 @@ protected void doExecute(final Task task, final DeleteTIFJobRequest request, fin return; } try { - // TODO: makes every sub-methods as async call to avoid using a thread in generic pool threadPool.generic().submit(() -> { try { deleteTIFJob(request.getName()); diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFLockService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFLockService.java index 386fec0c3..7ec4e94f3 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFLockService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFLockService.java @@ -5,7 +5,8 @@ package org.opensearch.securityanalytics.threatIntel.common; -import static org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobExtension.JOB_INDEX_NAME; +import static org.opensearch.securityanalytics.SecurityAnalyticsPlugin.JOB_INDEX_NAME; + import java.time.Instant; import java.util.Optional; diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtension.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtension.java deleted file mode 100644 index 023323253..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtension.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.jobscheduler; - -import org.opensearch.jobscheduler.spi.ScheduledJobParser; -import org.opensearch.jobscheduler.spi.ScheduledJobRunner; - -import java.util.Map; - -public class TIFJobExtension implements org.opensearch.jobscheduler.spi.JobSchedulerExtension { - /** - * Job index name for a TIF job - */ - public static final String JOB_INDEX_NAME = ".scheduler-sap-threatintel-job"; - - /** - * Job index setting - * - * We want it to be single shard so that job can be run only in a single node by job scheduler. - * We want it to expand to all replicas so that querying to this index can be done locally to reduce latency. - */ - public static final Map INDEX_SETTING = Map.of("index.number_of_shards", 1, "index.auto_expand_replicas", "0-all", "index.hidden", true); - - @Override - public String getJobType() { - return "scheduler_sap_threatintel_job"; - } - - @Override - public String getJobIndex() { - return JOB_INDEX_NAME; - } - - @Override - public ScheduledJobRunner getJobRunner() { - return TIFJobRunner.getJobRunnerInstance(); - } - - @Override - public ScheduledJobParser getJobParser() { - return (parser, id, jobDocVersion) -> TIFJobParameter.PARSER.parse(parser, null); - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java index 0a24ffb75..115ed873e 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java @@ -12,12 +12,14 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.core.xcontent.ConstructingObjectParser; -import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.*; import org.opensearch.jobscheduler.spi.ScheduledJobParameter; import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; -import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.jobscheduler.spi.schedule.ScheduleParser; +import org.opensearch.securityanalytics.threatIntel.action.PutTIFJobRequest; +import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; +import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; +import org.opensearch.securityanalytics.threatIntel.common.TIFMetadata; import java.io.IOException; import java.time.Instant; @@ -29,35 +31,45 @@ import static org.opensearch.common.time.DateUtils.toInstant; -import org.opensearch.securityanalytics.threatIntel.action.PutTIFJobRequest; -import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; -import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; -import org.opensearch.securityanalytics.threatIntel.common.TIFMetadata; - public class TIFJobParameter implements Writeable, ScheduledJobParameter { /** * Prefix of indices having threatIntel data */ public static final String THREAT_INTEL_DATA_INDEX_NAME_PREFIX = ".opensearch-sap-threatintel"; + + /** + * String fields for job scheduling parameters used for ParseField + */ + private static final String name_field = "name"; + private static final String enabled_field = "update_enabled"; + private static final String last_update_time_field = "last_update_time"; + private static final String last_update_time_field_readable = "last_update_time_field"; + private static final String schedule_field = "schedule"; + private static final String enabled_time_field = "enabled_time"; + private static final String enabled_time_field_readable = "enabled_time_field"; + private static final String state_field = "state"; + private static final String indices_field = "indices"; + private static final String update_stats_field = "update_stats"; + + /** * Default fields for job scheduling */ - private static final ParseField NAME_FIELD = new ParseField("name"); - private static final ParseField ENABLED_FIELD = new ParseField("update_enabled"); - private static final ParseField LAST_UPDATE_TIME_FIELD = new ParseField("last_update_time"); - private static final ParseField LAST_UPDATE_TIME_FIELD_READABLE = new ParseField("last_update_time_field"); - public static final ParseField SCHEDULE_FIELD = new ParseField("schedule"); - private static final ParseField ENABLED_TIME_FIELD = new ParseField("enabled_time"); - private static final ParseField ENABLED_TIME_FIELD_READABLE = new ParseField("enabled_time_field"); + public static final ParseField NAME_FIELD = new ParseField(name_field); + public static final ParseField ENABLED_FIELD = new ParseField(enabled_field); + public static final ParseField LAST_UPDATE_TIME_FIELD = new ParseField(last_update_time_field); + public static final ParseField LAST_UPDATE_TIME_FIELD_READABLE = new ParseField(last_update_time_field_readable); + public static final ParseField SCHEDULE_FIELD = new ParseField(schedule_field); + public static final ParseField ENABLED_TIME_FIELD = new ParseField(enabled_time_field); + public static final ParseField ENABLED_TIME_FIELD_READABLE = new ParseField(enabled_time_field_readable); /** * Additional fields for tif job */ - private static final ParseField STATE_FIELD = new ParseField("state"); - private static final ParseField INDICES_FIELD = new ParseField("indices"); - private static final ParseField UPDATE_STATS_FIELD = new ParseField("update_stats"); - + public static final ParseField STATE_FIELD = new ParseField(state_field); + public static final ParseField INDICES_FIELD = new ParseField(indices_field); + public static final ParseField UPDATE_STATS_FIELD = new ParseField(update_stats_field); /** * Default variables for job scheduling @@ -113,6 +125,61 @@ public class TIFJobParameter implements Writeable, ScheduledJobParameter { */ private UpdateStats updateStats; + public static TIFJobParameter parse(XContentParser xcp, String id, Long version) throws IOException { + String name = null; + Instant lastUpdateTime = null; + Boolean isEnabled = null; + TIFJobState state = null; + + xcp.nextToken(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp); + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + String fieldName = xcp.currentName(); + xcp.nextToken(); + + switch (fieldName) { + case name_field: + name = xcp.text(); + break; + case last_update_time_field: + lastUpdateTime = Instant.ofEpochMilli(xcp.longValue()); + break; + case enabled_field: + isEnabled = xcp.booleanValue(); + break; + case state_field: + state = toState(xcp.text()); + break; + default: + xcp.skipChildren(); + } + } + return new TIFJobParameter(name, lastUpdateTime, isEnabled, state); + } + + public static TIFJobState toState(String stateName){ + if (stateName.equals("CREATING")){ + return TIFJobState.CREATING; + } + if (stateName.equals("AVAILABLE")){ + return TIFJobState.AVAILABLE; + } + if (stateName.equals("CREATE_FAILED")){ + return TIFJobState.CREATE_FAILED; + } + if (stateName.equals("DELETING")){ + return TIFJobState.DELETING; + } + return null; + } + + public TIFJobParameter(final String name, final Instant lastUpdateTime, final Boolean isEnabled, TIFJobState state) { + this.name = name; + this.lastUpdateTime = lastUpdateTime; + this.isEnabled = isEnabled; + this.state = state; + } + /** * tif job parser */ @@ -174,7 +241,7 @@ public TIFJobParameter(final String name, final IntervalSchedule schedule) { name, Instant.now().truncatedTo(ChronoUnit.MILLIS), null, - true, + false, schedule, TIFJobState.CREATING, new ArrayList<>(), @@ -479,8 +546,8 @@ public static TIFJobParameter build(final PutTIFJobRequest request) { String name = request.getName(); IntervalSchedule schedule = new IntervalSchedule( Instant.now().truncatedTo(ChronoUnit.MILLIS), - (int) request.getUpdateInterval().hours(), - ChronoUnit.DAYS + (int) request.getUpdateInterval().minutes(), + ChronoUnit.MINUTES ); return new TIFJobParameter(name, schedule); diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterService.java index 70f052549..b977cb4ba 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterService.java @@ -31,6 +31,7 @@ import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.index.IndexNotFoundException; +import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; import org.opensearch.securityanalytics.threatIntel.common.StashedThreadContext; import org.opensearch.securityanalytics.util.SecurityAnalyticsException; @@ -64,12 +65,12 @@ public TIFJobParameterService(final Client client, final ClusterService clusterS * @param stepListener setup listener */ public void createJobIndexIfNotExists(final StepListener stepListener) { - if (clusterService.state().metadata().hasIndex(TIFJobExtension.JOB_INDEX_NAME) == true) { + if (clusterService.state().metadata().hasIndex(SecurityAnalyticsPlugin.JOB_INDEX_NAME) == true) { stepListener.onResponse(null); return; } - final CreateIndexRequest createIndexRequest = new CreateIndexRequest(TIFJobExtension.JOB_INDEX_NAME).mapping(getIndexMapping()) - .settings(TIFJobExtension.INDEX_SETTING); + final CreateIndexRequest createIndexRequest = new CreateIndexRequest(SecurityAnalyticsPlugin.JOB_INDEX_NAME).mapping(getIndexMapping()) + .settings(SecurityAnalyticsPlugin.TIF_JOB_INDEX_SETTING); StashedThreadContext.run(client, () -> client.admin().indices().create(createIndexRequest, new ActionListener<>() { @Override public void onResponse(final CreateIndexResponse createIndexResponse) { @@ -79,7 +80,7 @@ public void onResponse(final CreateIndexResponse createIndexResponse) { @Override public void onFailure(final Exception e) { if (e instanceof ResourceAlreadyExistsException) { - log.info("index[{}] already exist", TIFJobExtension.JOB_INDEX_NAME); + log.info("index[{}] already exist", SecurityAnalyticsPlugin.JOB_INDEX_NAME); stepListener.onResponse(null); return; } @@ -97,7 +98,7 @@ private String getIndexMapping() { } } catch (IOException e) { log.error("Runtime exception", e); - throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO + throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); } } @@ -110,7 +111,7 @@ public IndexResponse updateJobSchedulerParameter(final TIFJobParameter jobSchedu jobSchedulerParameter.setLastUpdateTime(Instant.now()); return StashedThreadContext.run(client, () -> { try { - return client.prepareIndex(TIFJobExtension.JOB_INDEX_NAME) + return client.prepareIndex(SecurityAnalyticsPlugin.JOB_INDEX_NAME) .setId(jobSchedulerParameter.getName()) .setOpType(DocWriteRequest.OpType.INDEX) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -118,7 +119,7 @@ public IndexResponse updateJobSchedulerParameter(final TIFJobParameter jobSchedu .execute() .actionGet(clusterSettings.get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT)); } catch (IOException e) { - throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO + throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); } }); } @@ -130,16 +131,16 @@ public IndexResponse updateJobSchedulerParameter(final TIFJobParameter jobSchedu * @throws IOException exception */ public TIFJobParameter getJobParameter(final String name) throws IOException { - GetRequest request = new GetRequest(TIFJobExtension.JOB_INDEX_NAME, name); + GetRequest request = new GetRequest(SecurityAnalyticsPlugin.JOB_INDEX_NAME, name); GetResponse response; try { response = StashedThreadContext.run(client, () -> client.get(request).actionGet(clusterSettings.get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT))); if (response.isExists() == false) { - log.error("TIF job[{}] does not exist in an index[{}]", name, TIFJobExtension.JOB_INDEX_NAME); + log.error("TIF job[{}] does not exist in an index[{}]", name, SecurityAnalyticsPlugin.JOB_INDEX_NAME); return null; } } catch (IndexNotFoundException e) { - log.error("Index[{}] is not found", TIFJobExtension.JOB_INDEX_NAME); + log.error("Index[{}] is not found", SecurityAnalyticsPlugin.JOB_INDEX_NAME); return null; } @@ -161,7 +162,7 @@ public void saveTIFJobParameter(final TIFJobParameter tifJobParameter, final Act tifJobParameter.setLastUpdateTime(Instant.now()); StashedThreadContext.run(client, () -> { try { - client.prepareIndex(TIFJobExtension.JOB_INDEX_NAME) + client.prepareIndex(SecurityAnalyticsPlugin.JOB_INDEX_NAME) .setId(tifJobParameter.getName()) .setOpType(DocWriteRequest.OpType.CREATE) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -181,7 +182,7 @@ public void saveTIFJobParameter(final TIFJobParameter tifJobParameter, final Act */ public void deleteTIFJobParameter(final TIFJobParameter tifJobParameter) { DeleteResponse response = client.prepareDelete() - .setIndex(TIFJobExtension.JOB_INDEX_NAME) + .setIndex(SecurityAnalyticsPlugin.JOB_INDEX_NAME) .setId(tifJobParameter.getName()) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .execute() diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunner.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunner.java index fdb07618d..e3500064f 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunner.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobRunner.java @@ -150,12 +150,9 @@ protected void updateJobParameter(final ScheduledJobParameter jobParameter, fina } try { // create new TIF data and delete old ones - Instant startTime = Instant.now(); List oldIndices = new ArrayList<>(jobSchedulerParameter.getIndices()); List newFeedIndices = jobSchedulerUpdateService.createThreatIntelFeedData(jobSchedulerParameter, renewLock); - Instant endTime = Instant.now(); jobSchedulerUpdateService.deleteAllTifdIndices(oldIndices, newFeedIndices); - jobSchedulerUpdateService.updateJobSchedulerParameterAsSucceeded(newFeedIndices, jobSchedulerParameter, startTime, endTime); if(false == newFeedIndices.isEmpty()) { detectorThreatIntelService.updateDetectorsWithLatestThreatIntelRules(); } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java index 45ad50b35..3006285ad 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobUpdateService.java @@ -99,6 +99,8 @@ private List deleteIndices(final List indicesToDelete) { * @throws IOException */ public List createThreatIntelFeedData(final TIFJobParameter jobSchedulerParameter, final Runnable renewLock) throws IOException { + Instant startTime = Instant.now(); + List freshIndices = new ArrayList<>(); for (TIFMetadata tifMetadata : builtInTIFMetadataLoader.getTifMetadataList()) { String indexName = setupIndex(jobSchedulerParameter, tifMetadata); @@ -140,6 +142,8 @@ public List createThreatIntelFeedData(final TIFJobParameter jobScheduler } freshIndices.add(indexName); } + Instant endTime = Instant.now(); + updateJobSchedulerParameterAsSucceeded(freshIndices, jobSchedulerParameter, startTime, endTime); return freshIndices; } @@ -163,7 +167,7 @@ public void updateJobSchedulerParameterAsSucceeded( jobSchedulerParameter.setState(TIFJobState.AVAILABLE); jobSchedulerParameterService.updateJobSchedulerParameter(jobSchedulerParameter); log.info( - "threat intel feed database creation succeeded for {} and took {} seconds", + "threat intel feed data creation succeeded for {} and took {} seconds", jobSchedulerParameter.getName(), Duration.between(startTime, endTime) ); @@ -205,7 +209,7 @@ protected void waitUntilAllShardsStarted(final String indexName, final int timeo ); } catch (InterruptedException e) { log.error("runtime exception", e); - throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); //TODO + throw new SecurityAnalyticsException("Runtime exception", RestStatus.INTERNAL_SERVER_ERROR, e); } } } diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportSearchDetectorAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportSearchDetectorAction.java index 53ef22a76..0643b34d7 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportSearchDetectorAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportSearchDetectorAction.java @@ -21,12 +21,15 @@ import org.opensearch.securityanalytics.action.SearchDetectorAction; import org.opensearch.securityanalytics.action.SearchDetectorRequest; import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; +import org.opensearch.securityanalytics.threatIntel.action.TransportPutTIFJobAction; import org.opensearch.securityanalytics.util.DetectorIndices; import org.opensearch.threadpool.ThreadPool; import org.opensearch.tasks.Task; import org.opensearch.transport.TransportService; +import java.util.concurrent.CountDownLatch; + import static org.opensearch.securityanalytics.util.DetectorUtils.getEmptySearchResponse; public class TransportSearchDetectorAction extends HandledTransportAction implements SecureTransportAction { @@ -45,11 +48,13 @@ public class TransportSearchDetectorAction extends HandledTransportAction() { @Override public void onResponse(SearchResponse response) { - actionListener.onResponse(response); + actionListener.onResponse(response); } @Override diff --git a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java index 55fea4224..e5d71024b 100644 --- a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java @@ -46,7 +46,7 @@ import static org.opensearch.securityanalytics.TestHelpers.windowsIndexMapping; import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ENABLE_WORKFLOW_USAGE; import static org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedDataUtils.getTifdList; -import static org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobExtension.JOB_INDEX_NAME; +import static org.opensearch.securityanalytics.SecurityAnalyticsPlugin.JOB_INDEX_NAME; public class DetectorMonitorRestApiIT extends SecurityAnalyticsRestTestCase { /** diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java index a3df0c4cd..21501a796 100644 --- a/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java @@ -11,10 +11,15 @@ import org.apache.hc.core5.http.HttpStatus; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; import org.opensearch.action.search.SearchResponse; import org.opensearch.client.Request; import org.opensearch.client.Response; +import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.rest.RestStatus; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.search.SearchHit; import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; import org.opensearch.securityanalytics.SecurityAnalyticsRestTestCase; @@ -22,13 +27,17 @@ import org.opensearch.securityanalytics.model.Detector; import org.opensearch.securityanalytics.model.DetectorInput; import org.opensearch.securityanalytics.model.DetectorRule; +import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter; import java.io.IOException; +import java.time.Instant; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Locale; +import java.util.Arrays; +import java.util.ArrayList; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static org.opensearch.securityanalytics.TestHelpers.*; @@ -38,9 +47,9 @@ public class ThreatIntelJobRunnerIT extends SecurityAnalyticsRestTestCase { private static final Logger log = LogManager.getLogger(ThreatIntelJobRunnerIT.class); - public void testCreateDetector_threatIntelEnabled_updateDetectorWithNewThreatIntel() throws IOException { + public void testCreateDetector_threatIntelEnabled_testJobRunner() throws IOException, InterruptedException { - // 1. create a detector + // Create a detector updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); String index = createTestIndex(randomIndex(), windowsIndexMapping()); @@ -102,46 +111,117 @@ public void testCreateDetector_threatIntelEnabled_updateDetectorWithNewThreatInt List iocs = getThreatIntelFeedIocs(3); assertEquals(iocs.size(),3); - // 2. delete a threat intel feed ioc index manually - List feedId = getThreatIntelFeedIds(1); - for (String feedid: feedId) { - String name = String.format(Locale.ROOT, "%s-%s%s", ".opensearch-sap-threatintel", feedid, "1"); - deleteIndex(name); + // get job runner index and verify parameters exist + List jobMetaDataList = getJobSchedulerParameter(); + assertEquals(1, jobMetaDataList.size()); + TIFJobParameter jobMetaData = jobMetaDataList.get(0); + Instant firstUpdatedTime = jobMetaData.getLastUpdateTime(); + assertNotNull("Job runner parameter index does not have metadata set", jobMetaData.getLastUpdateTime()); + assertEquals(jobMetaData.isEnabled(), true); + + // get list of first updated time for threat intel feed data + List originalFeedTimestamp = getThreatIntelFeedsTime(); + + //verify feed index exists and each feed_id exists + List feedId = getThreatIntelFeedIds(); + assertNotNull(feedId); + + // wait for job runner to run + Thread.sleep(60000); + waitUntil(() -> { + try { + return verifyJobRan(firstUpdatedTime); + } catch (IOException e) { + throw new RuntimeException("failed to verify that job ran"); + } + }, 120, TimeUnit.SECONDS); + + // verify job's last update time is different + List newJobMetaDataList = getJobSchedulerParameter(); + assertEquals(1, newJobMetaDataList.size()); + TIFJobParameter newJobMetaData = newJobMetaDataList.get(0); + Instant lastUpdatedTime = newJobMetaData.getLastUpdateTime(); + assertNotEquals(firstUpdatedTime.toString(), lastUpdatedTime.toString()); + + // verify new threat intel feed timestamp is different + List newFeedTimestamp = getThreatIntelFeedsTime(); + for (int i =0; i< newFeedTimestamp.size(); i++) { + assertNotEquals(newFeedTimestamp.get(i), originalFeedTimestamp.get(i)); } -// // 3. update the start time to a day before so it runs now -// StringEntity stringEntity = new StringEntity( -// "{\"doc\":{\"last_update_time\":{\"schedule\":{\"interval\":{\"start_time\":" + -// "\"$startTimeMillis\"}}}}}", -// ContentType.APPLICATION_JSON -// ); -// -// Response updateJobRespose = makeRequest(client(), "POST", ".scheduler-sap-threatintel-job/_update/$id" , Collections.emptyMap(), stringEntity, null, null); -// assertEquals("Updated job scheduler", RestStatus.CREATED, restStatus(updateJobRespose)); - - // 4. validate new ioc is created - List newIocs = getThreatIntelFeedIocs(1); - assertEquals(0, newIocs.size()); //TODO + // verify detector is updated by checking last updated time of detector + // TODO + + } + + protected boolean verifyJobRan(Instant firstUpdatedTime) throws IOException { + // verify job's last update time is different + List newJobMetaDataList = getJobSchedulerParameter(); + assertEquals(1, newJobMetaDataList.size()); + + TIFJobParameter newJobMetaData = newJobMetaDataList.get(0); + Instant newUpdatedTime = newJobMetaData.getLastUpdateTime(); + if (!firstUpdatedTime.toString().equals(newUpdatedTime.toString())){ + return true; + } + return false; } private List getThreatIntelFeedIocs(int num) throws IOException { - String request = getMatchAllSearchRequestString(num); + String request = getMatchNumSearchRequestString(num); SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threatintel*", request, false); return getTifdList(res, xContentRegistry()).stream().map(it -> it.getIocValue()).collect(Collectors.toList()); } - private List getThreatIntelFeedIds(int num) throws IOException { - String request = getMatchAllSearchRequestString(num); + private List getThreatIntelFeedIds() throws IOException { + String request = getMatchAllSearchRequestString(); SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threatintel*", request, false); return getTifdList(res, xContentRegistry()).stream().map(it -> it.getFeedId()).collect(Collectors.toList()); } -// private String getJobSchedulerDoc(int num) throws IOException { -// String request = getMatchAllSearchRequestString(num); -// SearchResponse res = executeSearchAndGetResponse(".scheduler-sap-threatintel-job*", request, false); -// } + private List getThreatIntelFeedsTime() throws IOException { + String request = getMatchAllSearchRequestString(); + SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threatintel*", request, false); + return getTifdList(res, xContentRegistry()).stream().map(it -> it.getTimestamp()).collect(Collectors.toList()); + } + + private List getJobSchedulerParameter() throws IOException { + String request = getMatchAllSearchRequestString(); + SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threatintel-job*", request, false); + return getTIFJobParameterList(res, xContentRegistry()).stream().collect(Collectors.toList()); + } + public static List getTIFJobParameterList(SearchResponse searchResponse, NamedXContentRegistry xContentRegistry) { + List list = new ArrayList<>(); + if (searchResponse.getHits().getHits().length != 0) { + Arrays.stream(searchResponse.getHits().getHits()).forEach(hit -> { + try { + XContentParser xcp = XContentType.JSON.xContent().createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, hit.getSourceAsString() + ); + list.add(TIFJobParameter.parse(xcp, hit.getId(), hit.getVersion())); + } catch (Exception e) { + log.error(() -> new ParameterizedMessage( + "Failed to parse TIF Job Parameter metadata from hit {}", hit), + e + ); + } + + }); + } + return list; + } + + private static String getMatchAllSearchRequestString() { + return "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + } - private static String getMatchAllSearchRequestString(int num) { + private static String getMatchNumSearchRequestString(int num) { return "{\n" + "\"size\" : " + num + "," + " \"query\" : {\n" + diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtensionTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtensionTests.java deleted file mode 100644 index 6096fa382..000000000 --- a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobExtensionTests.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.jobscheduler; - -import static org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobExtension.JOB_INDEX_NAME; - -import java.time.Instant; -import java.time.temporal.ChronoUnit; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.jobscheduler.spi.JobDocVersion; -import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; -import org.opensearch.securityanalytics.model.DetectorTrigger; -import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; -import org.opensearch.securityanalytics.TestHelpers; - -public class TIFJobExtensionTests extends ThreatIntelTestCase { - private static final Logger log = LogManager.getLogger(TIFJobExtensionTests.class); - - public void testBasic() { - TIFJobExtension extension = new TIFJobExtension(); - assertEquals("scheduler_sap_threatintel_job", extension.getJobType()); - assertEquals(JOB_INDEX_NAME, extension.getJobIndex()); - assertEquals(TIFJobRunner.getJobRunnerInstance(), extension.getJobRunner()); - } - - public void testParser() throws Exception { - TIFJobExtension extension = new TIFJobExtension(); - String id = TestHelpers.randomLowerCaseString(); - IntervalSchedule schedule = new IntervalSchedule(Instant.now().truncatedTo(ChronoUnit.MILLIS), 1, ChronoUnit.DAYS); - TIFJobParameter tifJobParameter = new TIFJobParameter(id, schedule); - - TIFJobParameter anotherTIFJobParameter = (TIFJobParameter) extension.getJobParser() - .parse( - createParser(tifJobParameter.toXContent(XContentFactory.jsonBuilder(), null)), - TestHelpers.randomLowerCaseString(), - new JobDocVersion(randomPositiveLong(), randomPositiveLong(), randomPositiveLong()) - ); - log.info("first"); - log.error(tifJobParameter); - log.error(tifJobParameter.getName()); - log.info("second"); - log.error(anotherTIFJobParameter); - log.error(anotherTIFJobParameter.getName()); - - assertTrue(tifJobParameter.getName().equals(anotherTIFJobParameter.getName())); - assertTrue(tifJobParameter.getLastUpdateTime().equals(anotherTIFJobParameter.getLastUpdateTime())); - assertTrue(tifJobParameter.getSchedule().equals(anotherTIFJobParameter.getSchedule())); - assertTrue(tifJobParameter.getState().equals(anotherTIFJobParameter.getState())); - assertTrue(tifJobParameter.getIndices().equals(anotherTIFJobParameter.getIndices())); - } - -} diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterServiceTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterServiceTests.java index 5b0605d79..35fd2450d 100644 --- a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterServiceTests.java +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterServiceTests.java @@ -23,6 +23,7 @@ import org.opensearch.core.rest.RestStatus; import org.opensearch.index.IndexNotFoundException; import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; +import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; import org.opensearch.securityanalytics.TestHelpers; @@ -42,7 +43,7 @@ public void init() { } public void testcreateJobIndexIfNotExists_whenIndexExist_thenCreateRequestIsNotCalled() { - when(metadata.hasIndex(TIFJobExtension.JOB_INDEX_NAME)).thenReturn(true); + when(metadata.hasIndex(SecurityAnalyticsPlugin.JOB_INDEX_NAME)).thenReturn(true); // Verify verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { throw new RuntimeException("Shouldn't get called"); }); @@ -56,13 +57,13 @@ public void testcreateJobIndexIfNotExists_whenIndexExist_thenCreateRequestIsNotC } public void testcreateJobIndexIfNotExists_whenIndexExist_thenCreateRequestIsCalled() { - when(metadata.hasIndex(TIFJobExtension.JOB_INDEX_NAME)).thenReturn(false); + when(metadata.hasIndex(SecurityAnalyticsPlugin.JOB_INDEX_NAME)).thenReturn(false); // Verify verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { assertTrue(actionRequest instanceof CreateIndexRequest); CreateIndexRequest request = (CreateIndexRequest) actionRequest; - assertEquals(TIFJobExtension.JOB_INDEX_NAME, request.index()); + assertEquals(SecurityAnalyticsPlugin.JOB_INDEX_NAME, request.index()); assertEquals("1", request.settings().get("index.number_of_shards")); assertEquals("0-all", request.settings().get("index.auto_expand_replicas")); assertEquals("true", request.settings().get("index.hidden")); @@ -79,9 +80,9 @@ public void testcreateJobIndexIfNotExists_whenIndexExist_thenCreateRequestIsCall } public void testcreateJobIndexIfNotExists_whenIndexCreatedAlready_thenExceptionIsIgnored() { - when(metadata.hasIndex(TIFJobExtension.JOB_INDEX_NAME)).thenReturn(false); + when(metadata.hasIndex(SecurityAnalyticsPlugin.JOB_INDEX_NAME)).thenReturn(false); verifyingClient.setExecuteVerifier( - (actionResponse, actionRequest) -> { throw new ResourceAlreadyExistsException(TIFJobExtension.JOB_INDEX_NAME); } + (actionResponse, actionRequest) -> { throw new ResourceAlreadyExistsException(SecurityAnalyticsPlugin.JOB_INDEX_NAME); } ); // Run @@ -93,7 +94,7 @@ public void testcreateJobIndexIfNotExists_whenIndexCreatedAlready_thenExceptionI } public void testcreateJobIndexIfNotExists_whenExceptionIsThrown_thenExceptionIsThrown() { - when(metadata.hasIndex(TIFJobExtension.JOB_INDEX_NAME)).thenReturn(false); + when(metadata.hasIndex(SecurityAnalyticsPlugin.JOB_INDEX_NAME)).thenReturn(false); verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { throw new RuntimeException(); }); // Run @@ -118,7 +119,7 @@ public void testUpdateTIFJobParameter_whenValidInput_thenSucceed() throws Except IndexRequest request = (IndexRequest) actionRequest; assertEquals(tifJobParameter.getName(), request.id()); assertEquals(DocWriteRequest.OpType.INDEX, request.opType()); - assertEquals(TIFJobExtension.JOB_INDEX_NAME, request.index()); + assertEquals(SecurityAnalyticsPlugin.JOB_INDEX_NAME, request.index()); assertEquals(WriteRequest.RefreshPolicy.IMMEDIATE, request.getRefreshPolicy()); return null; }); @@ -135,7 +136,7 @@ public void testsaveTIFJobParameter_whenValidInput_thenSucceed() { verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { assertTrue(actionRequest instanceof IndexRequest); IndexRequest indexRequest = (IndexRequest) actionRequest; - assertEquals(TIFJobExtension.JOB_INDEX_NAME, indexRequest.index()); + assertEquals(SecurityAnalyticsPlugin.JOB_INDEX_NAME, indexRequest.index()); assertEquals(tifJobParameter.getName(), indexRequest.id()); assertEquals(WriteRequest.RefreshPolicy.IMMEDIATE, indexRequest.getRefreshPolicy()); assertEquals(DocWriteRequest.OpType.CREATE, indexRequest.opType()); @@ -147,7 +148,7 @@ public void testsaveTIFJobParameter_whenValidInput_thenSucceed() { } public void testGetTifJobParameter_whenException_thenNull() throws Exception { - TIFJobParameter tifJobParameter = setupClientForGetRequest(true, new IndexNotFoundException(TIFJobExtension.JOB_INDEX_NAME)); + TIFJobParameter tifJobParameter = setupClientForGetRequest(true, new IndexNotFoundException(SecurityAnalyticsPlugin.JOB_INDEX_NAME)); assertNull(tifJobParameterService.getJobParameter(tifJobParameter.getName())); } @@ -174,7 +175,7 @@ private TIFJobParameter setupClientForGetRequest(final boolean isExist, final Ru assertTrue(actionRequest instanceof GetRequest); GetRequest request = (GetRequest) actionRequest; assertEquals(tifJobParameter.getName(), request.id()); - assertEquals(TIFJobExtension.JOB_INDEX_NAME, request.index()); + assertEquals(SecurityAnalyticsPlugin.JOB_INDEX_NAME, request.index()); GetResponse response = getMockedGetResponse(isExist ? tifJobParameter : null); if (exception != null) { throw exception; @@ -190,7 +191,7 @@ public void testDeleteTifJobParameter_whenValidInput_thenSucceed() { // Verify assertTrue(actionRequest instanceof DeleteRequest); DeleteRequest request = (DeleteRequest) actionRequest; - assertEquals(TIFJobExtension.JOB_INDEX_NAME, request.index()); + assertEquals(SecurityAnalyticsPlugin.JOB_INDEX_NAME, request.index()); assertEquals(DocWriteRequest.OpType.DELETE, request.opType()); assertEquals(tifJobParameter.getName(), request.id()); assertEquals(WriteRequest.RefreshPolicy.IMMEDIATE, request.getRefreshPolicy()); diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterTests.java index 85aeef5b9..f7b7ff8d1 100644 --- a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterTests.java +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterTests.java @@ -53,7 +53,7 @@ public void testParser_whenAllValueIsFilled_thenSucceed() throws IOException { } - public void testParser_whenNullForOptionalFields_thenSucceed() throws IOException { // TODO: same issue + public void testParser_whenNullForOptionalFields_thenSucceed() throws IOException { String id = TestHelpers.randomLowerCaseString(); IntervalSchedule schedule = new IntervalSchedule(Instant.now().truncatedTo(ChronoUnit.MILLIS), 1, ChronoUnit.DAYS); TIFJobParameter tifJobParameter = new TIFJobParameter(id, schedule); From 730b458d4ac4d567fdaab25bee5037cccd160908 Mon Sep 17 00:00:00 2001 From: Joanne Wang <109310487+jowg-amazon@users.noreply.github.com> Date: Thu, 19 Oct 2023 18:27:25 -0700 Subject: [PATCH 23/39] TIF Job Runner Cleanup (#676) * merge conflicts Signed-off-by: Joanne Wang * fixed java wildcards and changed update key name Signed-off-by: Joanne Wang * integ test failing Signed-off-by: Joanne Wang * fix job scheduler params Signed-off-by: Joanne Wang * changed extension and has debug messages Signed-off-by: Joanne Wang * clean up Signed-off-by: Joanne Wang * fixed job scheduler plugin spi jar resolution * cleaned up TODOs and changed job scheduler name Signed-off-by: Joanne Wang * removed google commons unused import, updated interval setting, removed rest action Signed-off-by: Joanne Wang * removed policy file and updated name for job scheduler Signed-off-by: Joanne Wang * responded to comments about parameter validator and TIFMetadata Signed-off-by: Joanne Wang * refactored ThreatIntelFeedDataService and changed variables to public static final where possible Signed-off-by: Joanne Wang * changed opensearch-sap-threatintel to opensearch-sap-threat-intel Signed-off-by: Joanne Wang --------- Signed-off-by: Joanne Wang Signed-off-by: Joanne Wang <109310487+jowg-amazon@users.noreply.github.com> Co-authored-by: Surya Sashank Nistala --- .../SecurityAnalyticsPlugin.java | 9 +- .../monitors/opensearch_security.policy | 11 --- .../settings/SecurityAnalyticsSettings.java | 2 +- .../ThreatIntelFeedDataService.java | 99 ++++++++++--------- .../common/ParameterValidator.java | 5 +- .../threatIntel/common/TIFMetadata.java | 4 - .../jobscheduler/TIFJobParameter.java | 22 ++++- .../securityanalytics/util/RuleIndices.java | 28 +----- src/main/resources/feed/config/feeds/otx.yml | 2 +- .../resthandler/DetectorMonitorRestApiIT.java | 2 +- .../integTests/ThreatIntelJobRunnerIT.java | 12 ++- 11 files changed, 92 insertions(+), 104 deletions(-) delete mode 100644 src/main/java/org/opensearch/securityanalytics/config/monitors/opensearch_security.policy diff --git a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java index 96970bac8..4c9feae3e 100644 --- a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java +++ b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java @@ -12,6 +12,7 @@ import java.util.Optional; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.core.action.ActionListener; import org.opensearch.action.ActionRequest; import org.opensearch.core.action.ActionResponse; @@ -97,8 +98,8 @@ public class SecurityAnalyticsPlugin extends Plugin implements ActionPlugin, Map public static final String CORRELATION_RULES_BASE_URI = PLUGINS_BASE_URI + "/correlation/rules"; public static final String CUSTOM_LOG_TYPE_URI = PLUGINS_BASE_URI + "/logtype"; - public static final String JOB_INDEX_NAME = ".opensearch-sap-threatintel-job"; - public static final Map TIF_JOB_INDEX_SETTING = Map.of("index.number_of_shards", 1, "index.auto_expand_replicas", "0-all", "index.hidden", true); + public static final String JOB_INDEX_NAME = ".opensearch-sap-threat-intel-job"; + public static final Map TIF_JOB_INDEX_SETTING = Map.of(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1, IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-all", IndexMetadata.SETTING_INDEX_HIDDEN, true); private CorrelationRuleIndices correlationRuleIndices; @@ -123,7 +124,7 @@ public class SecurityAnalyticsPlugin extends Plugin implements ActionPlugin, Map private LogTypeService logTypeService; @Override public Collection getSystemIndexDescriptors(Settings settings){ - return List.of(new SystemIndexDescriptor(THREAT_INTEL_DATA_INDEX_NAME_PREFIX, "System index used for threat intel data")); + return Collections.singletonList(new SystemIndexDescriptor(THREAT_INTEL_DATA_INDEX_NAME_PREFIX, "System index used for threat intel data")); } @@ -210,7 +211,7 @@ public List getRestHandlers(Settings settings, @Override public String getJobType() { - return "opensearch_sap_threatintel_job"; + return "opensearch_sap_threat_intel_job"; } @Override diff --git a/src/main/java/org/opensearch/securityanalytics/config/monitors/opensearch_security.policy b/src/main/java/org/opensearch/securityanalytics/config/monitors/opensearch_security.policy deleted file mode 100644 index 3a3fe8df5..000000000 --- a/src/main/java/org/opensearch/securityanalytics/config/monitors/opensearch_security.policy +++ /dev/null @@ -1,11 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -grant { - permission java.lang.management.ManagementPermission "reputation.alienvault.com:443" "connect,resolve"; -}; \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java b/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java index d3a7b8b6f..f8942e70e 100644 --- a/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java +++ b/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java @@ -122,7 +122,7 @@ public class SecurityAnalyticsSettings { // threat intel settings public static final Setting TIF_UPDATE_INTERVAL = Setting.timeSetting( "plugins.security_analytics.threatintel.tifjob.update_interval", - TimeValue.timeValueMinutes(1), + TimeValue.timeValueMinutes(1440), Setting.Property.NodeScope, Setting.Property.Dynamic ); diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java index b82d16c20..d527088a8 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java @@ -19,6 +19,7 @@ import org.opensearch.action.support.WriteRequest; import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.client.Client; +import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.ClusterSettings; @@ -49,7 +50,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Arrays; import java.util.Optional; import java.util.concurrent.CountDownLatch; import java.util.regex.Matcher; @@ -63,28 +63,30 @@ */ public class ThreatIntelFeedDataService { private static final Logger log = LogManager.getLogger(ThreatIntelFeedDataService.class); - private final Client client; private final IndexNameExpressionResolver indexNameExpressionResolver; + public static final String SETTING_INDEX_REFRESH_INTERVAL = "index.refresh_interval"; + public static final String SETTING_INDEX_BLOCKS_WRITE = "index.blocks.write"; private static final Map INDEX_SETTING_TO_CREATE = Map.of( - "index.number_of_shards", + IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1, - "index.number_of_replicas", + IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0, - "index.refresh_interval", + SETTING_INDEX_REFRESH_INTERVAL, -1, - "index.hidden", + IndexMetadata.SETTING_INDEX_HIDDEN, true ); private static final Map INDEX_SETTING_TO_FREEZE = Map.of( - "index.auto_expand_replicas", + IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-all", - "index.blocks.write", + SETTING_INDEX_BLOCKS_WRITE, true ); private final ClusterService clusterService; private final ClusterSettings clusterSettings; + private final NamedXContentRegistry xContentRegistry; public ThreatIntelFeedDataService( ClusterService clusterService, @@ -98,8 +100,6 @@ public ThreatIntelFeedDataService( this.clusterSettings = clusterService.getClusterSettings(); } - private final NamedXContentRegistry xContentRegistry; - public void getThreatIntelFeedData( ActionListener> listener ) { @@ -108,7 +108,7 @@ public void getThreatIntelFeedData( if (IndexUtils.getNewIndexByCreationDate( this.clusterService.state(), this.indexNameExpressionResolver, - ".opensearch-sap-threatintel*" + ".opensearch-sap-threat-intel*" ) == null) { createThreatIntelFeedData(); } @@ -116,7 +116,7 @@ public void getThreatIntelFeedData( String tifdIndex = IndexUtils.getNewIndexByCreationDate( this.clusterService.state(), this.indexNameExpressionResolver, - ".opensearch-sap-threatintel*" + ".opensearch-sap-threat-intel*" ); SearchRequest searchRequest = new SearchRequest(tifdIndex); @@ -127,17 +127,11 @@ public void getThreatIntelFeedData( listener.onFailure(e); })); } catch (InterruptedException e) { - log.error("failed to get threat intel feed data", e); + log.error("Failed to get threat intel feed data", e); listener.onFailure(e); } } - private void createThreatIntelFeedData() throws InterruptedException { - CountDownLatch countDownLatch = new CountDownLatch(1); - client.execute(PutTIFJobAction.INSTANCE, new PutTIFJobRequest("feed_updater", clusterSettings.get(SecurityAnalyticsSettings.TIF_UPDATE_INTERVAL))).actionGet(); - countDownLatch.await(); - } - /** * Create an index for a threat intel feed *

@@ -159,19 +153,6 @@ public void createIndexIfNotExists(final String indexName) { ); } - private String getIndexMapping() { - try { - try (InputStream is = TIFJobParameterService.class.getResourceAsStream("/mappings/threat_intel_feed_mapping.json")) { - try (BufferedReader reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { - return reader.lines().map(String::trim).collect(Collectors.joining()); - } - } - } catch (IOException e) { - log.error("Runtime exception when getting the threat intel index mapping", e); - throw new SecurityAnalyticsException("Runtime exception when getting the threat intel index mapping", RestStatus.INTERNAL_SERVER_ERROR, e); - } - } - /** * Puts threat intel feed from CSVRecord iterator into a given index in bulk * @@ -221,7 +202,7 @@ public void parseAndSaveThreatIntelFeedDataCSV( } saveTifds(bulkRequest, timeout); renewLock.run(); - freezeIndex(indexName); + setIndexReadOnly(indexName); } public static boolean isValidIp(String ip) { @@ -250,20 +231,6 @@ public void saveTifds(BulkRequest bulkRequest, TimeValue timeout) { } - private void freezeIndex(final String indexName) { - TimeValue timeout = clusterSettings.get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT); - StashedThreadContext.run(client, () -> { - client.admin().indices().prepareForceMerge(indexName).setMaxNumSegments(1).execute().actionGet(timeout); - client.admin().indices().prepareRefresh(indexName).execute().actionGet(timeout); - client.admin() - .indices() - .prepareUpdateSettings(indexName) - .setSettings(INDEX_SETTING_TO_FREEZE) - .execute() - .actionGet(clusterSettings.get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT)); - }); - } - public void deleteThreatIntelDataIndex(final List indices) { if (indices == null || indices.isEmpty()) { return; @@ -294,4 +261,42 @@ public void deleteThreatIntelDataIndex(final List indices) { throw new OpenSearchException("failed to delete data[{}]", String.join(",", indices)); } } + + private void createThreatIntelFeedData() throws InterruptedException { + CountDownLatch countDownLatch = new CountDownLatch(1); + client.execute(PutTIFJobAction.INSTANCE, new PutTIFJobRequest("feed_updater", clusterSettings.get(SecurityAnalyticsSettings.TIF_UPDATE_INTERVAL))).actionGet(); + countDownLatch.await(); + } + + private String getIndexMapping() { + try { + try (InputStream is = TIFJobParameterService.class.getResourceAsStream("/mappings/threat_intel_feed_mapping.json")) { + try (BufferedReader reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { + return reader.lines().map(String::trim).collect(Collectors.joining()); + } + } + } catch (IOException e) { + log.error("Runtime exception when getting the threat intel index mapping", e); + throw new SecurityAnalyticsException("Runtime exception when getting the threat intel index mapping", RestStatus.INTERNAL_SERVER_ERROR, e); + } + } + + /** + * Sets the TIFData index as read only to prevent further writing to it + * When index needs to be updated, all TIFData indices will be deleted then repopulated + * @param indexName + */ + private void setIndexReadOnly(final String indexName) { + TimeValue timeout = clusterSettings.get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT); + StashedThreadContext.run(client, () -> { + client.admin().indices().prepareForceMerge(indexName).setMaxNumSegments(1).execute().actionGet(timeout); + client.admin().indices().prepareRefresh(indexName).execute().actionGet(timeout); + client.admin() + .indices() + .prepareUpdateSettings(indexName) + .setSettings(INDEX_SETTING_TO_FREEZE) + .execute() + .actionGet(clusterSettings.get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT)); + }); + } } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/ParameterValidator.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/ParameterValidator.java index 9e07c988e..4658557df 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/ParameterValidator.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/ParameterValidator.java @@ -37,12 +37,9 @@ public List validateTIFJobName(final String tifJobName) { String.format(Locale.ROOT, "threat intel feed job name must not contain the following characters %s", Strings.INVALID_FILENAME_CHARS) ); } - if (tifJobName.contains("#")) { + if (tifJobName.contains("#") || tifJobName.contains(":") ) { errorMsgs.add("threat intel feed job name must not contain '#'"); } - if (tifJobName.contains(":")) { - errorMsgs.add("threat intel feed job name must not contain ':'"); - } if (tifJobName.charAt(0) == '_' || tifJobName.charAt(0) == '-' || tifJobName.charAt(0) == '+') { errorMsgs.add("threat intel feed job name must not start with '_', '-', or '+'"); } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java index 6332c80f2..c6e8d78e8 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java @@ -202,10 +202,6 @@ public void writeTo(final StreamOutput out) throws IOException { out.writeBoolean(hasHeader); } - private TIFMetadata() { - } - - @Override public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { builder.startObject(); diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java index 115ed873e..4ebc3012c 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java @@ -35,7 +35,23 @@ public class TIFJobParameter implements Writeable, ScheduledJobParameter { /** * Prefix of indices having threatIntel data */ - public static final String THREAT_INTEL_DATA_INDEX_NAME_PREFIX = ".opensearch-sap-threatintel"; + public static final String THREAT_INTEL_DATA_INDEX_NAME_PREFIX = ".opensearch-sap-threat-intel"; + + + /** + * String fields for job scheduling parameters used for ParseField + */ + private static final String name_field = "name"; + private static final String enabled_field = "update_enabled"; + private static final String last_update_time_field = "last_update_time"; + private static final String last_update_time_field_readable = "last_update_time_field"; + private static final String schedule_field = "schedule"; + private static final String enabled_time_field = "enabled_time"; + private static final String enabled_time_field_readable = "enabled_time_field"; + private static final String state_field = "state"; + private static final String indices_field = "indices"; + private static final String update_stats_field = "update_stats"; + /** @@ -543,15 +559,15 @@ public void setLastFailedAt(Instant now) { */ public static class Builder { public static TIFJobParameter build(final PutTIFJobRequest request) { + long minutes = request.getUpdateInterval().minutes(); String name = request.getName(); IntervalSchedule schedule = new IntervalSchedule( Instant.now().truncatedTo(ChronoUnit.MILLIS), - (int) request.getUpdateInterval().minutes(), + (int) minutes, ChronoUnit.MINUTES ); return new TIFJobParameter(name, schedule); - } } } \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/util/RuleIndices.java b/src/main/java/org/opensearch/securityanalytics/util/RuleIndices.java index a2c2be7ee..65762c57f 100644 --- a/src/main/java/org/opensearch/securityanalytics/util/RuleIndices.java +++ b/src/main/java/org/opensearch/securityanalytics/util/RuleIndices.java @@ -4,18 +4,11 @@ */ package org.opensearch.securityanalytics.util; -import java.util.Set; - -import com.google.common.collect.ImmutableMap; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.OpenSearchStatusException; -import org.opensearch.cluster.routing.Preference; -import org.opensearch.core.action.ActionListener; import org.opensearch.action.admin.indices.create.CreateIndexRequest; import org.opensearch.action.admin.indices.create.CreateIndexResponse; -import org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest; -import org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.opensearch.action.bulk.BulkItemResponse; import org.opensearch.action.bulk.BulkRequest; import org.opensearch.action.bulk.BulkResponse; @@ -28,26 +21,23 @@ import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.health.ClusterIndexHealth; import org.opensearch.cluster.metadata.IndexMetadata; -import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.cluster.routing.IndexRoutingTable; +import org.opensearch.cluster.routing.Preference; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.io.PathUtils; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.index.query.BoolQueryBuilder; -import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.reindex.BulkByScrollResponse; import org.opensearch.index.reindex.DeleteByQueryAction; import org.opensearch.index.reindex.DeleteByQueryRequestBuilder; import org.opensearch.search.SearchHit; -import org.opensearch.core.rest.RestStatus; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.securityanalytics.logtype.LogTypeService; -import org.opensearch.securityanalytics.mapper.MapperUtils; -import org.opensearch.securityanalytics.model.Detector; import org.opensearch.securityanalytics.model.Rule; import org.opensearch.securityanalytics.rules.backend.OSQueryBackend; import org.opensearch.securityanalytics.rules.backend.QueryBackend; @@ -56,24 +46,14 @@ import org.opensearch.threadpool.ThreadPool; import java.io.IOException; -import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.Charset; -import java.nio.file.FileSystem; -import java.nio.file.FileSystems; import java.nio.file.Files; import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Objects; +import java.util.*; import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.opensearch.securityanalytics.model.Detector.NO_ID; import static org.opensearch.securityanalytics.model.Detector.NO_VERSION; public class RuleIndices { diff --git a/src/main/resources/feed/config/feeds/otx.yml b/src/main/resources/feed/config/feeds/otx.yml index 50d19924a..e41637d54 100644 --- a/src/main/resources/feed/config/feeds/otx.yml +++ b/src/main/resources/feed/config/feeds/otx.yml @@ -9,4 +9,4 @@ containedIocs: iocCol: 1; # 0 indexed indexName: otx -# .opensearch-sap-threatintel-otx-00001 \ No newline at end of file +# .opensearch-sap-threat-intel-otx-00001 \ No newline at end of file diff --git a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java index e5d71024b..a3880ceb2 100644 --- a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java @@ -1257,7 +1257,7 @@ public void testCreateDetectorWiththreatIntelDisabled_updateDetectorWithThreatIn private List getThreatIntelFeedIocs(int num) throws IOException { String request = getMatchAllSearchRequestString(num); - SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threatintel*", request, false); + SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threat-intel*", request, false); return getTifdList(res, xContentRegistry()).stream().map(it -> it.getIocValue()).collect(Collectors.toList()); } diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java index 21501a796..91b4ca125 100644 --- a/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java @@ -42,6 +42,7 @@ import static org.opensearch.securityanalytics.TestHelpers.*; import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ENABLE_WORKFLOW_USAGE; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.TIF_UPDATE_INTERVAL; import static org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedDataUtils.getTifdList; public class ThreatIntelJobRunnerIT extends SecurityAnalyticsRestTestCase { @@ -49,6 +50,9 @@ public class ThreatIntelJobRunnerIT extends SecurityAnalyticsRestTestCase { public void testCreateDetector_threatIntelEnabled_testJobRunner() throws IOException, InterruptedException { + // update job runner to run every minute + updateClusterSetting(TIF_UPDATE_INTERVAL.getKey(),"1m"); + // Create a detector updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); String index = createTestIndex(randomIndex(), windowsIndexMapping()); @@ -169,25 +173,25 @@ protected boolean verifyJobRan(Instant firstUpdatedTime) throws IOException { private List getThreatIntelFeedIocs(int num) throws IOException { String request = getMatchNumSearchRequestString(num); - SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threatintel*", request, false); + SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threat-intel*", request, false); return getTifdList(res, xContentRegistry()).stream().map(it -> it.getIocValue()).collect(Collectors.toList()); } private List getThreatIntelFeedIds() throws IOException { String request = getMatchAllSearchRequestString(); - SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threatintel*", request, false); + SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threat-intel*", request, false); return getTifdList(res, xContentRegistry()).stream().map(it -> it.getFeedId()).collect(Collectors.toList()); } private List getThreatIntelFeedsTime() throws IOException { String request = getMatchAllSearchRequestString(); - SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threatintel*", request, false); + SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threat-intel*", request, false); return getTifdList(res, xContentRegistry()).stream().map(it -> it.getTimestamp()).collect(Collectors.toList()); } private List getJobSchedulerParameter() throws IOException { String request = getMatchAllSearchRequestString(); - SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threatintel-job*", request, false); + SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threat-intel-job*", request, false); return getTIFJobParameterList(res, xContentRegistry()).stream().collect(Collectors.toList()); } public static List getTIFJobParameterList(SearchResponse searchResponse, NamedXContentRegistry xContentRegistry) { From 7a24bd0b54cc487230bf085052d608466358daab Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Thu, 19 Oct 2023 22:40:41 -0700 Subject: [PATCH 24/39] fix TIFJobParameter class Signed-off-by: Surya Sashank Nistala --- .../jobscheduler/TIFJobParameter.java | 138 +++++++++--------- 1 file changed, 67 insertions(+), 71 deletions(-) diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java index 4ebc3012c..bcbb84c1c 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameter.java @@ -12,7 +12,11 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.core.xcontent.*; +import org.opensearch.core.xcontent.ConstructingObjectParser; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.core.xcontent.XContentParserUtils; import org.opensearch.jobscheduler.spi.ScheduledJobParameter; import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; import org.opensearch.jobscheduler.spi.schedule.ScheduleParser; @@ -41,51 +45,35 @@ public class TIFJobParameter implements Writeable, ScheduledJobParameter { /** * String fields for job scheduling parameters used for ParseField */ - private static final String name_field = "name"; - private static final String enabled_field = "update_enabled"; - private static final String last_update_time_field = "last_update_time"; - private static final String last_update_time_field_readable = "last_update_time_field"; - private static final String schedule_field = "schedule"; - private static final String enabled_time_field = "enabled_time"; - private static final String enabled_time_field_readable = "enabled_time_field"; + private static final String NAME_FIELD = "name"; + private static final String ENABLED_FIELD = "update_enabled"; + private static final String LAST_UPDATE_TIME_FIELD = "last_update_time"; + private static final String LAST_UPDATE_TIME_FIELD_READABLE = "last_update_time_field"; + private static final String SCHEDULE_FIELD = "schedule"; + private static final String ENABLED_TIME_FIELD = "enabled_time"; + private static final String ENABLED_TIME_FIELD_READABLE = "enabled_time_field"; private static final String state_field = "state"; - private static final String indices_field = "indices"; - private static final String update_stats_field = "update_stats"; - - - - /** - * String fields for job scheduling parameters used for ParseField - */ - private static final String name_field = "name"; - private static final String enabled_field = "update_enabled"; - private static final String last_update_time_field = "last_update_time"; - private static final String last_update_time_field_readable = "last_update_time_field"; - private static final String schedule_field = "schedule"; - private static final String enabled_time_field = "enabled_time"; - private static final String enabled_time_field_readable = "enabled_time_field"; - private static final String state_field = "state"; - private static final String indices_field = "indices"; + private static final String INDICES_FIELD = "indices"; private static final String update_stats_field = "update_stats"; /** * Default fields for job scheduling */ - public static final ParseField NAME_FIELD = new ParseField(name_field); - public static final ParseField ENABLED_FIELD = new ParseField(enabled_field); - public static final ParseField LAST_UPDATE_TIME_FIELD = new ParseField(last_update_time_field); - public static final ParseField LAST_UPDATE_TIME_FIELD_READABLE = new ParseField(last_update_time_field_readable); - public static final ParseField SCHEDULE_FIELD = new ParseField(schedule_field); - public static final ParseField ENABLED_TIME_FIELD = new ParseField(enabled_time_field); - public static final ParseField ENABLED_TIME_FIELD_READABLE = new ParseField(enabled_time_field_readable); + public static final ParseField NAME_PARSER_FIELD = new ParseField(NAME_FIELD); + public static final ParseField ENABLED_PARSER_FIELD = new ParseField(ENABLED_FIELD); + public static final ParseField LAST_UPDATE_TIME_PARSER_FIELD = new ParseField(LAST_UPDATE_TIME_FIELD); + public static final ParseField LAST_UPDATE_TIME_FIELD_READABLE_PARSER_FIELD = new ParseField(LAST_UPDATE_TIME_FIELD_READABLE); + public static final ParseField SCHEDULE_PARSER_FIELD = new ParseField(SCHEDULE_FIELD); + public static final ParseField ENABLED_TIME_PARSER_FIELD = new ParseField(ENABLED_TIME_FIELD); + public static final ParseField ENABLED_TIME_FIELD_READABLE_PARSER_FIELD = new ParseField(ENABLED_TIME_FIELD_READABLE); /** * Additional fields for tif job */ - public static final ParseField STATE_FIELD = new ParseField(state_field); - public static final ParseField INDICES_FIELD = new ParseField(indices_field); - public static final ParseField UPDATE_STATS_FIELD = new ParseField(update_stats_field); + public static final ParseField STATE_PARSER_FIELD = new ParseField(state_field); + public static final ParseField INDICES_PARSER_FIELD = new ParseField(INDICES_FIELD); + public static final ParseField UPDATE_STATS_PARSER_FIELD = new ParseField(update_stats_field); /** * Default variables for job scheduling @@ -154,13 +142,13 @@ public static TIFJobParameter parse(XContentParser xcp, String id, Long version) xcp.nextToken(); switch (fieldName) { - case name_field: + case NAME_FIELD: name = xcp.text(); break; - case last_update_time_field: + case LAST_UPDATE_TIME_FIELD: lastUpdateTime = Instant.ofEpochMilli(xcp.longValue()); break; - case enabled_field: + case ENABLED_FIELD: isEnabled = xcp.booleanValue(); break; case state_field: @@ -173,17 +161,17 @@ public static TIFJobParameter parse(XContentParser xcp, String id, Long version) return new TIFJobParameter(name, lastUpdateTime, isEnabled, state); } - public static TIFJobState toState(String stateName){ - if (stateName.equals("CREATING")){ + public static TIFJobState toState(String stateName) { + if (stateName.equals("CREATING")) { return TIFJobState.CREATING; } - if (stateName.equals("AVAILABLE")){ + if (stateName.equals("AVAILABLE")) { return TIFJobState.AVAILABLE; } - if (stateName.equals("CREATE_FAILED")){ + if (stateName.equals("CREATE_FAILED")) { return TIFJobState.CREATE_FAILED; } - if (stateName.equals("DELETING")){ + if (stateName.equals("DELETING")) { return TIFJobState.DELETING; } return null; @@ -212,27 +200,28 @@ public TIFJobParameter(final String name, final Instant lastUpdateTime, final Bo List indices = (List) args[6]; UpdateStats updateStats = (UpdateStats) args[7]; TIFJobParameter parameter = new TIFJobParameter( - name, - lastUpdateTime, - enabledTime, - isEnabled, - schedule, - state, - indices, - updateStats + name, + lastUpdateTime, + enabledTime, + isEnabled, + schedule, + state, + indices, + updateStats ); return parameter; } ); + static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME_FIELD); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), LAST_UPDATE_TIME_FIELD); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ENABLED_TIME_FIELD); - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED_FIELD); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> ScheduleParser.parse(p), SCHEDULE_FIELD); - PARSER.declareString(ConstructingObjectParser.constructorArg(), STATE_FIELD); - PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), INDICES_FIELD); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), UpdateStats.PARSER, UPDATE_STATS_FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME_PARSER_FIELD); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), LAST_UPDATE_TIME_PARSER_FIELD); + PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ENABLED_TIME_PARSER_FIELD); + PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED_PARSER_FIELD); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> ScheduleParser.parse(p), SCHEDULE_PARSER_FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), STATE_PARSER_FIELD); + PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), INDICES_PARSER_FIELD); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), UpdateStats.PARSER, UPDATE_STATS_PARSER_FIELD); } public TIFJobParameter() { @@ -290,24 +279,24 @@ public void writeTo(final StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { builder.startObject(); - builder.field(NAME_FIELD.getPreferredName(), name); + builder.field(NAME_PARSER_FIELD.getPreferredName(), name); builder.timeField( - LAST_UPDATE_TIME_FIELD.getPreferredName(), - LAST_UPDATE_TIME_FIELD_READABLE.getPreferredName(), + LAST_UPDATE_TIME_PARSER_FIELD.getPreferredName(), + LAST_UPDATE_TIME_FIELD_READABLE_PARSER_FIELD.getPreferredName(), lastUpdateTime.toEpochMilli() ); if (enabledTime != null) { builder.timeField( - ENABLED_TIME_FIELD.getPreferredName(), - ENABLED_TIME_FIELD_READABLE.getPreferredName(), + ENABLED_TIME_PARSER_FIELD.getPreferredName(), + ENABLED_TIME_FIELD_READABLE_PARSER_FIELD.getPreferredName(), enabledTime.toEpochMilli() ); } - builder.field(ENABLED_FIELD.getPreferredName(), isEnabled); - builder.field(SCHEDULE_FIELD.getPreferredName(), schedule); - builder.field(STATE_FIELD.getPreferredName(), state.name()); - builder.field(INDICES_FIELD.getPreferredName(), indices); - builder.field(UPDATE_STATS_FIELD.getPreferredName(), updateStats); + builder.field(ENABLED_PARSER_FIELD.getPreferredName(), isEnabled); + builder.field(SCHEDULE_PARSER_FIELD.getPreferredName(), schedule); + builder.field(STATE_PARSER_FIELD.getPreferredName(), state.name()); + builder.field(INDICES_PARSER_FIELD.getPreferredName(), indices); + builder.field(UPDATE_STATS_PARSER_FIELD.getPreferredName(), updateStats); builder.endObject(); return builder; } @@ -316,6 +305,7 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa public void setName(String name) { this.name = name; } + public void setEnabledTime(Instant enabledTime) { this.enabledTime = enabledTime; } @@ -332,18 +322,22 @@ public void setIndices(List indices) { public String getName() { return this.name; } + @Override public Instant getLastUpdateTime() { return this.lastUpdateTime; } + @Override public Instant getEnabledTime() { return this.enabledTime; } + @Override public IntervalSchedule getSchedule() { return this.schedule; } + @Override public boolean isEnabled() { return this.isEnabled; @@ -391,7 +385,7 @@ public String newIndexName(final TIFJobParameter jobSchedulerParameter, TIFMetad Optional nameOptional = indices.stream().filter(name -> name.contains(tifMetadata.getFeedId())).findAny(); String suffix = "1"; if (nameOptional.isPresent()) { - String lastChar = "" + nameOptional.get().charAt(nameOptional.get().length() -1); + String lastChar = "" + nameOptional.get().charAt(nameOptional.get().length() - 1); suffix = (lastChar.equals("1")) ? "2" : suffix; } return String.format(Locale.ROOT, "%s-%s%s", THREAT_INTEL_DATA_INDEX_NAME_PREFIX, tifMetadata.getFeedId(), suffix); @@ -464,7 +458,8 @@ public Instant getLastSkippedAt() { */ private Instant lastSkippedAt; - private UpdateStats(){} + private UpdateStats() { + } public void setLastSkippedAt(Instant lastSkippedAt) { this.lastSkippedAt = lastSkippedAt; @@ -489,6 +484,7 @@ public void setLastProcessingTimeInMillis(Long lastProcessingTimeInMillis) { return new UpdateStats(lastSucceededAt, lastProcessingTimeInMillis, lastFailedAt, lastSkippedAt); } ); + static { PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), LAST_SUCCEEDED_AT_FIELD); PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), LAST_PROCESSING_TIME_IN_MILLIS_FIELD); From 74a74400798ec1ee83d718f369db07b0c6c66e0f Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Fri, 20 Oct 2023 02:07:18 -0700 Subject: [PATCH 25/39] test detector updation when feed updation job runs Signed-off-by: Surya Sashank Nistala --- .../DetectorThreatIntelService.java | 64 ++++++++++++------- .../TransportIndexDetectorAction.java | 4 +- .../integTests/ThreatIntelJobRunnerIT.java | 10 ++- 3 files changed, 53 insertions(+), 25 deletions(-) diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java index b02adef04..589f24703 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java @@ -7,12 +7,15 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.lucene.search.join.ScoreMode; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.support.WriteRequest; import org.opensearch.client.Client; import org.opensearch.commons.alerting.model.DocLevelQuery; import org.opensearch.core.action.ActionListener; import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.QueryBuilders; import org.opensearch.rest.RestRequest; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.securityanalytics.action.IndexDetectorAction; @@ -144,28 +147,45 @@ private static String constructId(Detector detector, String iocType) { } public void updateDetectorsWithLatestThreatIntelRules() { - //todo : fix query for fetching detectors with threat intel enabled = true -// String searchReq = "{ \"query\": { \"match\": { \"detector.threat_intel_enabled\": true } } }"; - SearchRequest searchRequest = new SearchRequest(DETECTORS_INDEX); - SearchSourceBuilder ssb = searchRequest.source(); - ssb.size(9999); - client.execute(SearchDetectorAction.INSTANCE, new SearchDetectorRequest(new SearchRequest().source(ssb)), - ActionListener.wrap(r -> { - List detectors = getDetectors(r, xContentRegistry); - detectors.forEach(detector -> { - assert detector.getThreatIntelEnabled(); - client.execute(IndexDetectorAction.INSTANCE, new IndexDetectorRequest( - detector.getId(), WriteRequest.RefreshPolicy.IMMEDIATE, - RestRequest.Method.PUT, - detector), - ActionListener.wrap( - res -> log.debug("updated {} with latest threat intel info", res.getDetector().getId()), - e -> log.error(() -> new ParameterizedMessage("Failed to update detector {} with latest threat intel info", detector.getId()), e))); - } - ); - }, e -> { - log.error("Failed to fetch detectors to update with threat intel queries.", e); - })); + try { + QueryBuilder queryBuilder = + QueryBuilders.nestedQuery("detector", + QueryBuilders.boolQuery().must( + QueryBuilders.matchQuery("detector.threat_intel_enabled", true) + ), ScoreMode.Avg); + SearchRequest searchRequest = new SearchRequest(DETECTORS_INDEX); + SearchSourceBuilder ssb = searchRequest.source(); + ssb.query(queryBuilder); + ssb.size(9999); + CountDownLatch countDownLatch = new CountDownLatch(1); + client.execute(SearchDetectorAction.INSTANCE, new SearchDetectorRequest(searchRequest), + ActionListener.wrap(r -> { + List detectors = getDetectors(r, xContentRegistry); + detectors.forEach(detector -> { + assert detector.getThreatIntelEnabled(); + client.execute(IndexDetectorAction.INSTANCE, new IndexDetectorRequest( + detector.getId(), WriteRequest.RefreshPolicy.IMMEDIATE, + RestRequest.Method.PUT, + detector), + ActionListener.wrap( + res -> { + log.debug("updated {} with latest threat intel info", res.getDetector().getId()); + countDownLatch.countDown(); + }, + e -> { + log.error(() -> new ParameterizedMessage("Failed to update detector {} with latest threat intel info", detector.getId()), e); + countDownLatch.countDown(); + })); + } + ); + }, e -> { + log.error("Failed to fetch detectors to update with threat intel queries.", e); + countDownLatch.countDown(); + })); + countDownLatch.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } } diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java index 414591fe4..480ed0152 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java @@ -110,6 +110,7 @@ import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; +import java.time.Instant; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -449,7 +450,7 @@ public void onResponse(Map> ruleFieldMappings) { Collectors.toList()); // Process doc level monitors - if (!docLevelRules.isEmpty()) { + if (!docLevelRules.isEmpty() || detector.getThreatIntelEnabled()) { if (detector.getDocLevelMonitorId() == null) { monitorsToBeAdded.add(createDocLevelMonitorRequest(docLevelRules, detector, refreshPolicy, Monitor.NO_ID, Method.POST)); } else { @@ -1452,6 +1453,7 @@ public void indexDetector() throws Exception { .source(request.getDetector().toXContentWithUser(XContentFactory.jsonBuilder(), new ToXContent.MapParams(Map.of("with_type", "true")))) .timeout(indexTimeout); } else { + request.getDetector().setLastUpdateTime(Instant.now()); indexRequest = new IndexRequest(Detector.DETECTORS_INDEX) .setRefreshPolicy(request.getRefreshPolicy()) .source(request.getDetector().toXContentWithUser(XContentFactory.jsonBuilder(), new ToXContent.MapParams(Map.of("with_type", "true")))) diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java index 91b4ca125..d8c955f6a 100644 --- a/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java @@ -79,6 +79,7 @@ public void testCreateDetector_threatIntelEnabled_testJobRunner() throws IOExcep Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + String request = "{\n" + " \"query\" : {\n" + " \"match_all\":{\n" + @@ -103,6 +104,7 @@ public void testCreateDetector_threatIntelEnabled_testJobRunner() throws IOExcep List hits = executeSearch(Detector.DETECTORS_INDEX, request); SearchHit hit = hits.get(0); Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); + String detectoraLstUpdateTime1 = detectorMap.get("last_update_time").toString(); List monitorIds = ((List) (detectorMap).get("monitor_id")); assertEquals(1, monitorIds.size()); @@ -153,8 +155,12 @@ public void testCreateDetector_threatIntelEnabled_testJobRunner() throws IOExcep assertNotEquals(newFeedTimestamp.get(i), originalFeedTimestamp.get(i)); } - // verify detector is updated by checking last updated time of detector - // TODO + // verify detectors updated with latest threat intel feed data + hits = executeSearch(Detector.DETECTORS_INDEX, request); + hit = hits.get(0); + detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); + String detectoraLstUpdateTime2 = detectorMap.get("last_update_time").toString(); + assertFalse(detectoraLstUpdateTime2.equals(detectoraLstUpdateTime1)); } From 4dd419044117dab636c19abff0d63976dd55a396 Mon Sep 17 00:00:00 2001 From: Joanne Wang <109310487+jowg-amazon@users.noreply.github.com> Date: Fri, 20 Oct 2023 10:32:01 -0700 Subject: [PATCH 26/39] removed delete job scheduler code and cleaned up (#678) Signed-off-by: Joanne Wang --- .../SecurityAnalyticsPlugin.java | 8 +- .../ThreatIntelFeedDataService.java | 28 ---- .../action/DeleteTIFJobAction.java | 27 ---- .../action/DeleteTIFJobRequest.java | 62 --------- .../action/RestPutTIFJobHandler.java | 65 --------- .../action/TransportDeleteTIFJobAction.java | 127 ------------------ .../jobscheduler/TIFJobParameterService.java | 23 ---- .../action/DeleteTIFJobRequestTests.java | 65 --------- .../TransportDeleteTIFJobActionTests.java | 127 ------------------ .../TIFJobParameterServiceTests.java | 32 ----- 10 files changed, 2 insertions(+), 562 deletions(-) delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobAction.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobRequest.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/action/RestPutTIFJobHandler.java delete mode 100644 src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobAction.java delete mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobRequestTests.java delete mode 100644 src/test/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobActionTests.java diff --git a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java index 4c9feae3e..6e67c5798 100644 --- a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java +++ b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java @@ -204,8 +204,7 @@ public List getRestHandlers(Settings settings, new RestSearchCorrelationRuleAction(), new RestIndexCustomLogTypeAction(), new RestSearchCustomLogTypeAction(), - new RestDeleteCustomLogTypeAction(), - new RestPutTIFJobHandler(clusterSettings) + new RestDeleteCustomLogTypeAction() ); } @@ -321,10 +320,7 @@ public List> getSettings() { new ActionHandler<>(IndexCustomLogTypeAction.INSTANCE, TransportIndexCustomLogTypeAction.class), new ActionHandler<>(SearchCustomLogTypeAction.INSTANCE, TransportSearchCustomLogTypeAction.class), new ActionHandler<>(DeleteCustomLogTypeAction.INSTANCE, TransportDeleteCustomLogTypeAction.class), - - new ActionHandler<>(PutTIFJobAction.INSTANCE, TransportPutTIFJobAction.class), - new ActionHandler<>(DeleteTIFJobAction.INSTANCE, TransportDeleteTIFJobAction.class) - + new ActionHandler<>(PutTIFJobAction.INSTANCE, TransportPutTIFJobAction.class) ); } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java index d527088a8..25f9de69d 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java @@ -66,8 +66,6 @@ public class ThreatIntelFeedDataService { private final Client client; private final IndexNameExpressionResolver indexNameExpressionResolver; public static final String SETTING_INDEX_REFRESH_INTERVAL = "index.refresh_interval"; - public static final String SETTING_INDEX_BLOCKS_WRITE = "index.blocks.write"; - private static final Map INDEX_SETTING_TO_CREATE = Map.of( IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1, @@ -78,12 +76,6 @@ public class ThreatIntelFeedDataService { IndexMetadata.SETTING_INDEX_HIDDEN, true ); - private static final Map INDEX_SETTING_TO_FREEZE = Map.of( - IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, - "0-all", - SETTING_INDEX_BLOCKS_WRITE, - true - ); private final ClusterService clusterService; private final ClusterSettings clusterSettings; private final NamedXContentRegistry xContentRegistry; @@ -202,7 +194,6 @@ public void parseAndSaveThreatIntelFeedDataCSV( } saveTifds(bulkRequest, timeout); renewLock.run(); - setIndexReadOnly(indexName); } public static boolean isValidIp(String ip) { @@ -280,23 +271,4 @@ private String getIndexMapping() { throw new SecurityAnalyticsException("Runtime exception when getting the threat intel index mapping", RestStatus.INTERNAL_SERVER_ERROR, e); } } - - /** - * Sets the TIFData index as read only to prevent further writing to it - * When index needs to be updated, all TIFData indices will be deleted then repopulated - * @param indexName - */ - private void setIndexReadOnly(final String indexName) { - TimeValue timeout = clusterSettings.get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT); - StashedThreadContext.run(client, () -> { - client.admin().indices().prepareForceMerge(indexName).setMaxNumSegments(1).execute().actionGet(timeout); - client.admin().indices().prepareRefresh(indexName).execute().actionGet(timeout); - client.admin() - .indices() - .prepareUpdateSettings(indexName) - .setSettings(INDEX_SETTING_TO_FREEZE) - .execute() - .actionGet(clusterSettings.get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT)); - }); - } } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobAction.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobAction.java deleted file mode 100644 index d0fd0bee4..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobAction.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.action; - -import org.opensearch.action.ActionType; -import org.opensearch.action.support.master.AcknowledgedResponse; - -/** - * Threat intel tif job delete action - */ -public class DeleteTIFJobAction extends ActionType { - /** - * Delete tif job action instance - */ - public static final DeleteTIFJobAction INSTANCE = new DeleteTIFJobAction(); - /** - * Delete tif job action name - */ - public static final String NAME = "cluster:admin/security_analytics/tifjob/delete"; - - private DeleteTIFJobAction() { - super(NAME, AcknowledgedResponse::new); - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobRequest.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobRequest.java deleted file mode 100644 index e98cfe586..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobRequest.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.action; - -import org.opensearch.action.ActionRequest; -import org.opensearch.action.ActionRequestValidationException; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.securityanalytics.threatIntel.common.ParameterValidator; - -import java.io.IOException; - -/** - * Threat intel feed job delete request - */ - -public class DeleteTIFJobRequest extends ActionRequest { - private static final ParameterValidator VALIDATOR = new ParameterValidator(); - /** - * @param name the TIF job name - * @return the TIF job name - */ - private String name; - - /** - * Constructor - * - * @param in the stream input - * @throws IOException IOException - */ - public DeleteTIFJobRequest(final StreamInput in) throws IOException { - super(in); - this.name = in.readString(); - } - - public DeleteTIFJobRequest(final String name) { - this.name = name; - } - - @Override - public ActionRequestValidationException validate() { - ActionRequestValidationException errors = null; - if (VALIDATOR.validateTIFJobName(name).isEmpty() == false) { - errors = new ActionRequestValidationException(); - errors.addValidationError("no such job exists"); - } - return errors; - } - - @Override - public void writeTo(final StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(name); - } - - public String getName() { - return name; - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/RestPutTIFJobHandler.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/RestPutTIFJobHandler.java deleted file mode 100644 index 641445a57..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/RestPutTIFJobHandler.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.action; - -import org.opensearch.client.node.NodeClient; -import org.opensearch.common.settings.ClusterSettings; -import org.opensearch.common.unit.TimeValue; -import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.rest.BaseRestHandler; -import org.opensearch.rest.RestRequest; -import org.opensearch.rest.action.RestToXContentListener; -import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; - -import java.io.IOException; -import java.util.List; -import java.util.concurrent.TimeUnit; - -import static org.opensearch.rest.RestRequest.Method.GET; -import static org.opensearch.rest.RestRequest.Method.PUT; - -/** - * Rest handler for threat intel TIFjob creation - * - * This handler handles a request of - * PUT /_plugins/security_analytics/threatintel/tifjob/{id} - * { - * "id": {id}, - * "name": {name}, - * "update_interval_in_days": 1 - * } - * - * When request is received, it will create a TIFjob - * After the creation of TIFjob is completed, it will schedule the next update task after update_interval_in_days. - * - */ -public class RestPutTIFJobHandler extends BaseRestHandler { - private static final String ACTION_NAME = "threatintel_tifjob_put"; - private final ClusterSettings clusterSettings; - - public RestPutTIFJobHandler(final ClusterSettings clusterSettings) { - this.clusterSettings = clusterSettings; - } - - @Override - public String getName() { - return ACTION_NAME; - } - - @Override - protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final PutTIFJobRequest putTIFJobRequest = new PutTIFJobRequest("jobname", - new TimeValue(1, TimeUnit.MINUTES)); - - return channel -> client.executeLocally(PutTIFJobAction.INSTANCE, putTIFJobRequest, new RestToXContentListener<>(channel)); - } - - @Override - public List routes() { - String path = "/_p/_s"; - return List.of(new Route(GET, path)); - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobAction.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobAction.java deleted file mode 100644 index 3a0c68f10..000000000 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobAction.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.action; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.opensearch.OpenSearchStatusException; -import org.opensearch.ResourceNotFoundException; -import org.opensearch.action.support.ActionFilters; -import org.opensearch.action.support.HandledTransportAction; -import org.opensearch.action.support.master.AcknowledgedResponse; -import org.opensearch.common.inject.Inject; -import org.opensearch.core.action.ActionListener; -import org.opensearch.core.rest.RestStatus; -import org.opensearch.ingest.IngestService; -import org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedDataService; -import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; -import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameterService; -import org.opensearch.tasks.Task; -import org.opensearch.threadpool.ThreadPool; -import org.opensearch.transport.TransportService; - -import java.io.IOException; - -/** - * Transport action to delete tif job - */ -public class TransportDeleteTIFJobAction extends HandledTransportAction { - private static final Logger log = LogManager.getLogger(TransportDeleteTIFJobAction.class); - - private static final long LOCK_DURATION_IN_SECONDS = 300l; - private final TIFLockService lockService; - private final IngestService ingestService; - private final TIFJobParameterService tifJobParameterService; - private final ThreatIntelFeedDataService threatIntelFeedDataService; - private final ThreadPool threadPool; - - /** - * Constructor - * @param transportService the transport service - * @param actionFilters the action filters - * @param lockService the lock service - * @param ingestService the ingest service - * @param tifJobParameterService the tif job parameter service facade - */ - @Inject - public TransportDeleteTIFJobAction( - final TransportService transportService, - final ActionFilters actionFilters, - final TIFLockService lockService, - final IngestService ingestService, - final TIFJobParameterService tifJobParameterService, - final ThreatIntelFeedDataService threatIntelFeedDataService, - final ThreadPool threadPool - ) { - super(DeleteTIFJobAction.NAME, transportService, actionFilters, DeleteTIFJobRequest::new); - this.lockService = lockService; - this.ingestService = ingestService; - this.tifJobParameterService = tifJobParameterService; - this.threatIntelFeedDataService = threatIntelFeedDataService; - this.threadPool = threadPool; - } - - /** - * We delete TIF job regardless of its state as long as we can acquire a lock - * - * @param task the task - * @param request the request - * @param listener the listener - */ - @Override - protected void doExecute(final Task task, final DeleteTIFJobRequest request, final ActionListener listener) { - lockService.acquireLock(request.getName(), LOCK_DURATION_IN_SECONDS, ActionListener.wrap(lock -> { - if (lock == null) { - listener.onFailure( - new OpenSearchStatusException("Another processor is holding a lock on the resource. Try again later", RestStatus.BAD_REQUEST) - ); - log.error("Another processor is holding lock, BAD_REQUEST exception", RestStatus.BAD_REQUEST); - - return; - } - try { - threadPool.generic().submit(() -> { - try { - deleteTIFJob(request.getName()); - lockService.releaseLock(lock); - listener.onResponse(new AcknowledgedResponse(true)); - } catch (Exception e) { - lockService.releaseLock(lock); - listener.onFailure(e); - log.error("delete tif job failed",e); - } - }); - } catch (Exception e) { - lockService.releaseLock(lock); - listener.onFailure(e); - log.error("Internal server error", e); - } - }, exception -> { listener.onFailure(exception); })); - } - - protected void deleteTIFJob(final String tifJobName) throws IOException { - TIFJobParameter tifJobParameter = tifJobParameterService.getJobParameter(tifJobName); - if (tifJobParameter == null) { - throw new ResourceNotFoundException("no such tifJobParameter exist"); - } - TIFJobState previousState = tifJobParameter.getState(); - tifJobParameter.setState(TIFJobState.DELETING); - tifJobParameterService.updateJobSchedulerParameter(tifJobParameter); - - try { - threatIntelFeedDataService.deleteThreatIntelDataIndex(tifJobParameter.getIndices()); - } catch (Exception e) { - if (previousState.equals(tifJobParameter.getState()) == false) { - tifJobParameter.setState(previousState); - tifJobParameterService.updateJobSchedulerParameter(tifJobParameter); - } - throw e; - } - tifJobParameterService.deleteTIFJobParameter(tifJobParameter); - } -} diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterService.java index b977cb4ba..640b3874b 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterService.java @@ -173,27 +173,4 @@ public void saveTIFJobParameter(final TIFJobParameter tifJobParameter, final Act } }); } - - /** - * Delete tifJobParameter in an index {@code TIFJobExtension.JOB_INDEX_NAME} - * - * @param tifJobParameter the tifJobParameter - * - */ - public void deleteTIFJobParameter(final TIFJobParameter tifJobParameter) { - DeleteResponse response = client.prepareDelete() - .setIndex(SecurityAnalyticsPlugin.JOB_INDEX_NAME) - .setId(tifJobParameter.getName()) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .execute() - .actionGet(clusterSettings.get(SecurityAnalyticsSettings.THREAT_INTEL_TIMEOUT)); - - if (response.status().equals(RestStatus.OK)) { - log.info("deleted tifJobParameter[{}] successfully", tifJobParameter.getName()); - } else if (response.status().equals(RestStatus.NOT_FOUND)) { - throw new ResourceNotFoundException("tifJobParameter[{}] does not exist", tifJobParameter.getName()); - } else { - throw new OpenSearchException("failed to delete tifJobParameter[{}] with status[{}]", tifJobParameter.getName(), response.status()); - } - } } diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobRequestTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobRequestTests.java deleted file mode 100644 index 2ecd7369b..000000000 --- a/src/test/java/org/opensearch/securityanalytics/threatIntel/action/DeleteTIFJobRequestTests.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.action; - -import org.opensearch.action.ActionRequestValidationException; -import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.core.common.io.stream.BytesStreamInput; -import org.opensearch.securityanalytics.TestHelpers; -import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; - -import java.io.IOException; - -public class DeleteTIFJobRequestTests extends ThreatIntelTestCase { - - public void testStreamInOut_whenValidInput_thenSucceed() throws IOException { - String tifJobParameterName = TestHelpers.randomLowerCaseString(); - DeleteTIFJobRequest request = new DeleteTIFJobRequest(tifJobParameterName); - - // Run - BytesStreamOutput output = new BytesStreamOutput(); - request.writeTo(output); - BytesStreamInput input = new BytesStreamInput(output.bytes().toBytesRef().bytes); - DeleteTIFJobRequest copiedRequest = new DeleteTIFJobRequest(input); - - // Verify - assertEquals(request.getName(), copiedRequest.getName()); - } - - public void testValidate_whenNull_thenError() { - DeleteTIFJobRequest request = new DeleteTIFJobRequest((String) null); - - // Run - ActionRequestValidationException error = request.validate(); - - // Verify - assertNotNull(error.validationErrors()); - assertFalse(error.validationErrors().isEmpty()); - } - - public void testValidate_whenBlank_thenError() { - DeleteTIFJobRequest request = new DeleteTIFJobRequest(" "); - - // Run - ActionRequestValidationException error = request.validate(); - - // Verify - assertNotNull(error.validationErrors()); - assertFalse(error.validationErrors().isEmpty()); - } - - public void testValidate_whenInvalidTIFJobParameterName_thenFails() { - String invalidName = "_" + TestHelpers.randomLowerCaseString(); - DeleteTIFJobRequest request = new DeleteTIFJobRequest(invalidName); - - // Run - ActionRequestValidationException exception = request.validate(); - - // Verify - assertEquals(1, exception.validationErrors().size()); - assertTrue(exception.validationErrors().get(0).contains("no such job exists")); - } -} diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobActionTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobActionTests.java deleted file mode 100644 index 7d15d7710..000000000 --- a/src/test/java/org/opensearch/securityanalytics/threatIntel/action/TransportDeleteTIFJobActionTests.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.securityanalytics.threatIntel.action; - -import org.junit.Before; -import org.mockito.ArgumentCaptor; -import org.mockito.InOrder; -import org.mockito.Mockito; -import org.opensearch.OpenSearchException; -import org.opensearch.ResourceNotFoundException; -import org.opensearch.action.support.master.AcknowledgedResponse; -import org.opensearch.core.action.ActionListener; -import org.opensearch.jobscheduler.spi.LockModel; -import org.opensearch.securityanalytics.threatIntel.ThreatIntelTestCase; -import org.opensearch.securityanalytics.threatIntel.common.TIFJobState; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter; -import org.opensearch.tasks.Task; -import org.opensearch.securityanalytics.TestHelpers; - - -import java.io.IOException; -import java.time.Instant; - -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.*; - -public class TransportDeleteTIFJobActionTests extends ThreatIntelTestCase { - private TransportDeleteTIFJobAction action; - - @Before - public void init() { - action = new TransportDeleteTIFJobAction( - transportService, - actionFilters, - tifLockService, - ingestService, - tifJobParameterService, - threatIntelFeedDataService, - threadPool - ); - } - - public void testDoExecute_whenFailedToAcquireLock_thenError() throws IOException { - validateDoExecute(null, null); - } - - public void testDoExecute_whenValidInput_thenSucceed() throws IOException { - String jobIndexName = TestHelpers.randomLowerCaseString(); - String jobId = TestHelpers.randomLowerCaseString(); - LockModel lockModel = new LockModel(jobIndexName, jobId, Instant.now(), randomPositiveLong(), false); - validateDoExecute(lockModel, null); - } - - public void testDoExecute_whenException_thenError() throws IOException { - validateDoExecute(null, new RuntimeException()); - } - - private void validateDoExecute(final LockModel lockModel, final Exception exception) throws IOException { - Task task = mock(Task.class); - TIFJobParameter tifJobParameter = randomTifJobParameter(); - when(tifJobParameterService.getJobParameter(tifJobParameter.getName())).thenReturn(tifJobParameter); - DeleteTIFJobRequest request = new DeleteTIFJobRequest(tifJobParameter.getName()); - ActionListener listener = mock(ActionListener.class); - - // Run - action.doExecute(task, request, listener); - - // Verify - ArgumentCaptor> captor = ArgumentCaptor.forClass(ActionListener.class); - verify(tifLockService).acquireLock(eq(tifJobParameter.getName()), anyLong(), captor.capture()); - - if (exception == null) { - // Run - captor.getValue().onResponse(lockModel); - - // Verify - if (lockModel == null) { - verify(listener).onFailure(any(OpenSearchException.class)); - } else { - verify(listener).onResponse(new AcknowledgedResponse(true)); - verify(tifLockService).releaseLock(eq(lockModel)); - } - } else { - // Run - captor.getValue().onFailure(exception); - // Verify - verify(listener).onFailure(exception); - } - } - - public void testDeleteTIFJobParameter_whenNull_thenThrowException() { - TIFJobParameter tifJobParameter = randomTifJobParameter(); - expectThrows(ResourceNotFoundException.class, () -> action.deleteTIFJob(tifJobParameter.getName())); - } - - public void testDeleteTIFJobParameter_whenSafeToDelete_thenDelete() throws IOException { - TIFJobParameter tifJobParameter = randomTifJobParameter(); - when(tifJobParameterService.getJobParameter(tifJobParameter.getName())).thenReturn(tifJobParameter); - - // Run - action.deleteTIFJob(tifJobParameter.getName()); - - // Verify - assertEquals(TIFJobState.DELETING, tifJobParameter.getState()); - verify(tifJobParameterService).updateJobSchedulerParameter(tifJobParameter); - InOrder inOrder = Mockito.inOrder(threatIntelFeedDataService, tifJobParameterService); - inOrder.verify(threatIntelFeedDataService).deleteThreatIntelDataIndex(tifJobParameter.getIndices()); - inOrder.verify(tifJobParameterService).deleteTIFJobParameter(tifJobParameter); - } - - public void testDeleteTIFJobParameter_whenDeleteFailsAfterStateIsChanged_thenRevertState() throws IOException { - TIFJobParameter tifJobParameter = randomTifJobParameter(); - tifJobParameter.setState(TIFJobState.AVAILABLE); - when(tifJobParameterService.getJobParameter(tifJobParameter.getName())).thenReturn(tifJobParameter); - doThrow(new RuntimeException()).when(threatIntelFeedDataService).deleteThreatIntelDataIndex(tifJobParameter.getIndices()); - - // Run - expectThrows(RuntimeException.class, () -> action.deleteTIFJob(tifJobParameter.getName())); - - // Verify - verify(tifJobParameterService, times(2)).updateJobSchedulerParameter(tifJobParameter); - assertEquals(TIFJobState.AVAILABLE, tifJobParameter.getState()); - } -} diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterServiceTests.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterServiceTests.java index 35fd2450d..6e3b83a78 100644 --- a/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterServiceTests.java +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/jobscheduler/TIFJobParameterServiceTests.java @@ -185,38 +185,6 @@ private TIFJobParameter setupClientForGetRequest(final boolean isExist, final Ru return tifJobParameter; } - public void testDeleteTifJobParameter_whenValidInput_thenSucceed() { - TIFJobParameter tifJobParameter = randomTifJobParameter(); - verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { - // Verify - assertTrue(actionRequest instanceof DeleteRequest); - DeleteRequest request = (DeleteRequest) actionRequest; - assertEquals(SecurityAnalyticsPlugin.JOB_INDEX_NAME, request.index()); - assertEquals(DocWriteRequest.OpType.DELETE, request.opType()); - assertEquals(tifJobParameter.getName(), request.id()); - assertEquals(WriteRequest.RefreshPolicy.IMMEDIATE, request.getRefreshPolicy()); - - DeleteResponse response = mock(DeleteResponse.class); - when(response.status()).thenReturn(RestStatus.OK); - return response; - }); - - // Run - tifJobParameterService.deleteTIFJobParameter(tifJobParameter); - } - - public void testDeleteTifJobParameter_whenIndexNotFound_thenThrowException() { - TIFJobParameter tifJobParameter = randomTifJobParameter(); - verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { - DeleteResponse response = mock(DeleteResponse.class); - when(response.status()).thenReturn(RestStatus.NOT_FOUND); - return response; - }); - - // Run - expectThrows(ResourceNotFoundException.class, () -> tifJobParameterService.deleteTIFJobParameter(tifJobParameter)); - } - private GetResponse getMockedGetResponse(TIFJobParameter tifJobParameter) { GetResponse response = mock(GetResponse.class); when(response.isExists()).thenReturn(tifJobParameter != null); From a5136bf8c652590ab9aa8a16332647b25226d943 Mon Sep 17 00:00:00 2001 From: Joanne Wang <109310487+jowg-amazon@users.noreply.github.com> Date: Fri, 20 Oct 2023 16:28:56 -0700 Subject: [PATCH 27/39] working integ test (#680) Signed-off-by: Joanne Wang --- .../securityanalytics/resthandler/DetectorMonitorRestApiIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java index a3880ceb2..50f2bf889 100644 --- a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java @@ -1121,7 +1121,7 @@ public void testCreateDetectorWithThreatIntelEnabled_updateDetectorWithThreatInt List iocs = getThreatIntelFeedIocs(3); int i=1; for (String ioc : iocs) { - indexDoc(index, i+"", randomDoc(5, 3, i==1? "120.85.114.146" : "120.86.237.94")); + indexDoc(index, i+"", randomDocWithIpIoc(5, 3, i==1? "120.85.114.146" : "120.86.237.94")); i++; } String workflowId = ((List) detectorMap.get("workflow_ids")).get(0); From 24a7e51fc1b7a3982030ce17f494ab1da2f54fb4 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Fri, 20 Oct 2023 17:26:37 -0700 Subject: [PATCH 28/39] fix timeout of tif job creation Signed-off-by: Surya Sashank Nistala --- .../ThreatIntelFeedDataService.java | 18 +++++++++++++++++- .../resthandler/DetectorMonitorRestApiIT.java | 2 +- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java index 25f9de69d..6e22a6b8a 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java @@ -255,7 +255,23 @@ public void deleteThreatIntelDataIndex(final List indices) { private void createThreatIntelFeedData() throws InterruptedException { CountDownLatch countDownLatch = new CountDownLatch(1); - client.execute(PutTIFJobAction.INSTANCE, new PutTIFJobRequest("feed_updater", clusterSettings.get(SecurityAnalyticsSettings.TIF_UPDATE_INTERVAL))).actionGet(); + client.execute( + PutTIFJobAction.INSTANCE, + new PutTIFJobRequest("feed_updater", clusterSettings.get(SecurityAnalyticsSettings.TIF_UPDATE_INTERVAL)), + new ActionListener() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) { + log.debug("Acknowledged threat intel feed updater job created"); + countDownLatch.countDown(); + } + + @Override + public void onFailure(Exception e) { + log.debug("Failed to create threat intel feed updater job", e); + countDownLatch.countDown(); + } + } + ); countDownLatch.await(); } diff --git a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java index 50f2bf889..61634ab46 100644 --- a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java @@ -1121,7 +1121,7 @@ public void testCreateDetectorWithThreatIntelEnabled_updateDetectorWithThreatInt List iocs = getThreatIntelFeedIocs(3); int i=1; for (String ioc : iocs) { - indexDoc(index, i+"", randomDocWithIpIoc(5, 3, i==1? "120.85.114.146" : "120.86.237.94")); + indexDoc(index, i + "", randomDocWithIpIoc(5, 3, ioc)); i++; } String workflowId = ((List) detectorMap.get("workflow_ids")).get(0); From 31eebcb5d0fafcbae46ae9366dd02e06a93fa161 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Sun, 22 Oct 2023 00:38:08 -0700 Subject: [PATCH 29/39] remove unncessary thread forking in put tif job action Signed-off-by: Surya Sashank Nistala --- .../securityanalytics/model/Detector.java | 4 ++-- .../action/TransportPutTIFJobAction.java | 16 ++++++---------- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/src/main/java/org/opensearch/securityanalytics/model/Detector.java b/src/main/java/org/opensearch/securityanalytics/model/Detector.java index 4ffca565d..5a8e2f32b 100644 --- a/src/main/java/org/opensearch/securityanalytics/model/Detector.java +++ b/src/main/java/org/opensearch/securityanalytics/model/Detector.java @@ -175,7 +175,7 @@ public Detector(StreamInput sin) throws IOException { sin.readString(), sin.readMap(StreamInput::readString, StreamInput::readString), sin.readStringList(), - sin.readOptionalBoolean() + sin.readBoolean() ); } @@ -214,7 +214,7 @@ public void writeTo(StreamOutput out) throws IOException { if (workflowIds != null) { out.writeStringCollection(workflowIds); } - out.writeOptionalBoolean(threatIntelEnabled); + out.writeBoolean(threatIntelEnabled); } public XContentBuilder toXContentWithUser(XContentBuilder builder, Params params) throws IOException { diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java index 6ad3a04bd..00437c94a 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java @@ -125,16 +125,12 @@ protected ActionListener postIndexingTifJobParameter( return new ActionListener<>() { @Override public void onResponse(final IndexResponse indexResponse) { - // This is user initiated request. Therefore, we want to handle the first tifJobParameter update task in a generic thread - // pool. - threadPool.generic().submit(() -> { - AtomicReference lockReference = new AtomicReference<>(lock); - try { - createThreatIntelFeedData(tifJobParameter, lockService.getRenewLockRunnable(lockReference)); - } finally { - lockService.releaseLock(lockReference.get()); - } - }); + AtomicReference lockReference = new AtomicReference<>(lock); + try { + createThreatIntelFeedData(tifJobParameter, lockService.getRenewLockRunnable(lockReference)); + } finally { + lockService.releaseLock(lockReference.get()); + } listener.onResponse(new AcknowledgedResponse(true)); } From ae084e71656054ffbc73d39c6c819ff3bc857245 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Sun, 22 Oct 2023 02:02:44 -0700 Subject: [PATCH 30/39] refactoring code to address review comments Signed-off-by: Surya Sashank Nistala --- .../SecurityAnalyticsPlugin.java | 15 ++++++-- .../securityanalytics/model/LogType.java | 13 +++---- .../model/ThreatIntelFeedData.java | 2 +- .../DetectorThreatIntelService.java | 22 +++++++---- .../ThreatIntelFeedDataService.java | 37 ++++++++++--------- .../action/TransportPutTIFJobAction.java | 1 + .../integTests/ThreatIntelJobRunnerIT.java | 14 ++++--- 7 files changed, 60 insertions(+), 44 deletions(-) diff --git a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java index 6e67c5798..81fc4be38 100644 --- a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java +++ b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java @@ -40,7 +40,13 @@ import org.opensearch.jobscheduler.spi.JobSchedulerExtension; import org.opensearch.jobscheduler.spi.ScheduledJobParser; import org.opensearch.jobscheduler.spi.ScheduledJobRunner; -import org.opensearch.plugins.*; +import org.opensearch.plugins.ActionPlugin; +import org.opensearch.plugins.ClusterPlugin; +import org.opensearch.plugins.EnginePlugin; +import org.opensearch.plugins.MapperPlugin; +import org.opensearch.plugins.Plugin; +import org.opensearch.plugins.SearchPlugin; +import org.opensearch.plugins.SystemIndexPlugin; import org.opensearch.repositories.RepositoriesService; import org.opensearch.rest.RestController; import org.opensearch.rest.RestHandler; @@ -59,7 +65,8 @@ import org.opensearch.securityanalytics.resthandler.*; import org.opensearch.securityanalytics.threatIntel.DetectorThreatIntelService; import org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedDataService; -import org.opensearch.securityanalytics.threatIntel.action.*; +import org.opensearch.securityanalytics.threatIntel.action.PutTIFJobAction; +import org.opensearch.securityanalytics.threatIntel.action.TransportPutTIFJobAction; import org.opensearch.securityanalytics.threatIntel.common.TIFLockService; import org.opensearch.securityanalytics.threatIntel.feedMetadata.BuiltInTIFMetadataLoader; import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter; @@ -98,7 +105,7 @@ public class SecurityAnalyticsPlugin extends Plugin implements ActionPlugin, Map public static final String CORRELATION_RULES_BASE_URI = PLUGINS_BASE_URI + "/correlation/rules"; public static final String CUSTOM_LOG_TYPE_URI = PLUGINS_BASE_URI + "/logtype"; - public static final String JOB_INDEX_NAME = ".opensearch-sap-threat-intel-job"; + public static final String JOB_INDEX_NAME = ".opensearch-sap--job"; public static final Map TIF_JOB_INDEX_SETTING = Map.of(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1, IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-all", IndexMetadata.SETTING_INDEX_HIDDEN, true); private CorrelationRuleIndices correlationRuleIndices; @@ -210,7 +217,7 @@ public List getRestHandlers(Settings settings, @Override public String getJobType() { - return "opensearch_sap_threat_intel_job"; + return "opensearch_sap_job"; } @Override diff --git a/src/main/java/org/opensearch/securityanalytics/model/LogType.java b/src/main/java/org/opensearch/securityanalytics/model/LogType.java index 8cee7ab23..9bdb96d1a 100644 --- a/src/main/java/org/opensearch/securityanalytics/model/LogType.java +++ b/src/main/java/org/opensearch/securityanalytics/model/LogType.java @@ -67,14 +67,11 @@ public LogType(Map logTypeAsMap) { new Mapping(e.get(RAW_FIELD), e.get(ECS), e.get(OCSF)) ).collect(Collectors.toList()); } - if(logTypeAsMap.containsKey(IOC_FIELDS)) { + if (logTypeAsMap.containsKey(IOC_FIELDS)) { List> iocFieldsList = (List>) logTypeAsMap.get(IOC_FIELDS); - if (iocFieldsList.size() > 0) { - this.iocFieldsList = new ArrayList<>(mappings.size()); - this.iocFieldsList = iocFieldsList.stream().map(e -> - new IocFields(e.get(IOC).toString(), (List) e.get(FIELDS)) - ).collect(Collectors.toList()); - } + this.iocFieldsList = iocFieldsList.stream().map(e -> + new IocFields(e.get(IOC).toString(), (List) e.get(FIELDS)) + ).collect(Collectors.toList()); } else { iocFieldsList = Collections.emptyList(); } @@ -159,8 +156,8 @@ public static Mapping readFrom(StreamInput sin) throws IOException { * stores information of list of field names that contain information for given IoC (Indicator of Compromise). */ public static class IocFields implements Writeable { - private final String ioc; + private final String ioc; private final List fields; public IocFields(String ioc, List fields) { diff --git a/src/main/java/org/opensearch/securityanalytics/model/ThreatIntelFeedData.java b/src/main/java/org/opensearch/securityanalytics/model/ThreatIntelFeedData.java index 7696b331e..9f9f5d855 100644 --- a/src/main/java/org/opensearch/securityanalytics/model/ThreatIntelFeedData.java +++ b/src/main/java/org/opensearch/securityanalytics/model/ThreatIntelFeedData.java @@ -138,8 +138,8 @@ private XContentBuilder createXContentBuilder(XContentBuilder builder, ToXConten if (params.paramAsBoolean("with_type", false)) { builder.startObject(type); } - builder.field(TYPE_FIELD, type); builder + .field(TYPE_FIELD, type) .field(IOC_TYPE_FIELD, iocType) .field(IOC_VALUE_FIELD, iocValue) .field(FEED_ID_FIELD, feedId) diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java index 589f24703..5dde43788 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java @@ -39,7 +39,9 @@ import static org.opensearch.securityanalytics.model.Detector.DETECTORS_INDEX; import static org.opensearch.securityanalytics.util.DetectorUtils.getDetectors; - +/** + * Service that populates detectors with queries generated from threat intelligence data. + */ public class DetectorThreatIntelService { private static final Logger log = LogManager.getLogger(DetectorThreatIntelService.class); @@ -77,7 +79,7 @@ public List createDocLevelQueriesFromThreatIntelList( List fields = iocFieldList.stream().filter(t -> entry.getKey().matches(t.getIoc())).findFirst().get().getFields(); // create doc - for (String field : fields) { //todo increase max clause count from 1024 + for (String field : fields) { queries.add(new DocLevelQuery( constructId(detector, entry.getKey()), tifdList.get(0).getFeedId(), Collections.emptyList(), @@ -105,6 +107,9 @@ private String buildQueryStringQueryWithIocList(Set iocs) { return sb.toString(); } + /** + * Fetches threat intel data and creates doc level queries from threat intel data + */ public void createDocLevelQueryFromThreatIntel(List iocFieldList, Detector detector, ActionListener> listener) { try { if (false == detector.getThreatIntelEnabled() || iocFieldList.isEmpty()) { @@ -146,6 +151,7 @@ private static String constructId(Detector detector, String iocType) { return detector.getName() + "_threat_intel_" + iocType + "_" + UUID.randomUUID(); } + /** Updates all detectors having threat intel detection enabled with the latest threat intel feed data*/ public void updateDetectorsWithLatestThreatIntelRules() { try { QueryBuilder queryBuilder = @@ -159,8 +165,8 @@ public void updateDetectorsWithLatestThreatIntelRules() { ssb.size(9999); CountDownLatch countDownLatch = new CountDownLatch(1); client.execute(SearchDetectorAction.INSTANCE, new SearchDetectorRequest(searchRequest), - ActionListener.wrap(r -> { - List detectors = getDetectors(r, xContentRegistry); + ActionListener.wrap(searchResponse -> { + List detectors = getDetectors(searchResponse, xContentRegistry); detectors.forEach(detector -> { assert detector.getThreatIntelEnabled(); client.execute(IndexDetectorAction.INSTANCE, new IndexDetectorRequest( @@ -168,8 +174,8 @@ public void updateDetectorsWithLatestThreatIntelRules() { RestRequest.Method.PUT, detector), ActionListener.wrap( - res -> { - log.debug("updated {} with latest threat intel info", res.getDetector().getId()); + indexDetectorResponse -> { + log.debug("updated {} with latest threat intel info", indexDetectorResponse.getDetector().getId()); countDownLatch.countDown(); }, e -> { @@ -182,9 +188,9 @@ public void updateDetectorsWithLatestThreatIntelRules() { log.error("Failed to fetch detectors to update with threat intel queries.", e); countDownLatch.countDown(); })); - countDownLatch.await(); + countDownLatch.await(5, TimeUnit.MINUTES); } catch (InterruptedException e) { - throw new RuntimeException(e); + log.error(""); } diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java index 6e22a6b8a..40bc7bc53 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataService.java @@ -62,9 +62,9 @@ * Service to handle CRUD operations on Threat Intel Feed Data */ public class ThreatIntelFeedDataService { + private static final Logger log = LogManager.getLogger(ThreatIntelFeedDataService.class); - private final Client client; - private final IndexNameExpressionResolver indexNameExpressionResolver; + public static final String SETTING_INDEX_REFRESH_INTERVAL = "index.refresh_interval"; private static final Map INDEX_SETTING_TO_CREATE = Map.of( IndexMetadata.SETTING_NUMBER_OF_SHARDS, @@ -76,9 +76,12 @@ public class ThreatIntelFeedDataService { IndexMetadata.SETTING_INDEX_HIDDEN, true ); + private final ClusterService clusterService; private final ClusterSettings clusterSettings; private final NamedXContentRegistry xContentRegistry; + private final Client client; + private final IndexNameExpressionResolver indexNameExpressionResolver; public ThreatIntelFeedDataService( ClusterService clusterService, @@ -96,26 +99,18 @@ public void getThreatIntelFeedData( ActionListener> listener ) { try { - //if index not exists - if (IndexUtils.getNewIndexByCreationDate( - this.clusterService.state(), - this.indexNameExpressionResolver, - ".opensearch-sap-threat-intel*" - ) == null) { + + String tifdIndex = getLatestIndexByCreationDate(); + if (tifdIndex == null) { createThreatIntelFeedData(); + tifdIndex = getLatestIndexByCreationDate(); } - //if index exists - String tifdIndex = IndexUtils.getNewIndexByCreationDate( - this.clusterService.state(), - this.indexNameExpressionResolver, - ".opensearch-sap-threat-intel*" - ); - SearchRequest searchRequest = new SearchRequest(tifdIndex); searchRequest.source().size(9999); //TODO: convert to scroll + String finalTifdIndex = tifdIndex; client.search(searchRequest, ActionListener.wrap(r -> listener.onResponse(ThreatIntelFeedDataUtils.getTifdList(r, xContentRegistry)), e -> { log.error(String.format( - "Failed to fetch threat intel feed data from system index %s", tifdIndex), e); + "Failed to fetch threat intel feed data from system index %s", finalTifdIndex), e); listener.onFailure(e); })); } catch (InterruptedException e) { @@ -124,6 +119,14 @@ public void getThreatIntelFeedData( } } + private String getLatestIndexByCreationDate() { + return IndexUtils.getNewIndexByCreationDate( + this.clusterService.state(), + this.indexNameExpressionResolver, + THREAT_INTEL_DATA_INDEX_NAME_PREFIX + "*" + ); + } + /** * Create an index for a threat intel feed *

@@ -169,7 +172,7 @@ public void parseAndSaveThreatIntelFeedDataCSV( List tifdList = new ArrayList<>(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); - String iocType = tifMetadata.getIocType(); //todo make generic in upcoming versions + String iocType = tifMetadata.getIocType(); Integer colNum = tifMetadata.getIocCol(); String iocValue = record.values()[colNum].split(" ")[0]; if (iocType.equals("ip") && !isValidIp(iocValue)) { diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java index 00437c94a..1346da40c 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/action/TransportPutTIFJobAction.java @@ -37,6 +37,7 @@ * Transport action to create job to fetch threat intel feed data and save IoCs */ public class TransportPutTIFJobAction extends HandledTransportAction { + // TODO refactor this into a service class that creates feed updation job. This is not necessary to be a transport action private static final Logger log = LogManager.getLogger(TransportPutTIFJobAction.class); private final ThreadPool threadPool; diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java index d8c955f6a..8b2055ed3 100644 --- a/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java @@ -40,6 +40,7 @@ import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import static org.opensearch.securityanalytics.SecurityAnalyticsPlugin.JOB_INDEX_NAME; import static org.opensearch.securityanalytics.TestHelpers.*; import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ENABLE_WORKFLOW_USAGE; import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.TIF_UPDATE_INTERVAL; @@ -51,8 +52,8 @@ public class ThreatIntelJobRunnerIT extends SecurityAnalyticsRestTestCase { public void testCreateDetector_threatIntelEnabled_testJobRunner() throws IOException, InterruptedException { // update job runner to run every minute - updateClusterSetting(TIF_UPDATE_INTERVAL.getKey(),"1m"); - + updateClusterSetting(TIF_UPDATE_INTERVAL.getKey(), "1m"); + // Create a detector updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); String index = createTestIndex(randomIndex(), windowsIndexMapping()); @@ -115,7 +116,7 @@ public void testCreateDetector_threatIntelEnabled_testJobRunner() throws IOExcep // Verify workflow verifyWorkflow(detectorMap, monitorIds, 1); List iocs = getThreatIntelFeedIocs(3); - assertEquals(iocs.size(),3); + assertEquals(iocs.size(), 3); // get job runner index and verify parameters exist List jobMetaDataList = getJobSchedulerParameter(); @@ -151,7 +152,7 @@ public void testCreateDetector_threatIntelEnabled_testJobRunner() throws IOExcep // verify new threat intel feed timestamp is different List newFeedTimestamp = getThreatIntelFeedsTime(); - for (int i =0; i< newFeedTimestamp.size(); i++) { + for (int i = 0; i < newFeedTimestamp.size(); i++) { assertNotEquals(newFeedTimestamp.get(i), originalFeedTimestamp.get(i)); } @@ -171,7 +172,7 @@ protected boolean verifyJobRan(Instant firstUpdatedTime) throws IOException { TIFJobParameter newJobMetaData = newJobMetaDataList.get(0); Instant newUpdatedTime = newJobMetaData.getLastUpdateTime(); - if (!firstUpdatedTime.toString().equals(newUpdatedTime.toString())){ + if (!firstUpdatedTime.toString().equals(newUpdatedTime.toString())) { return true; } return false; @@ -197,9 +198,10 @@ private List getThreatIntelFeedsTime() throws IOException { private List getJobSchedulerParameter() throws IOException { String request = getMatchAllSearchRequestString(); - SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threat-intel-job*", request, false); + SearchResponse res = executeSearchAndGetResponse(JOB_INDEX_NAME + "*", request, false); return getTIFJobParameterList(res, xContentRegistry()).stream().collect(Collectors.toList()); } + public static List getTIFJobParameterList(SearchResponse searchResponse, NamedXContentRegistry xContentRegistry) { List list = new ArrayList<>(); if (searchResponse.getHits().getHits().length != 0) { From 28f3ba828d1cf5cff1992a7aabb499e25a47e195 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Fri, 20 Oct 2023 11:57:37 -0700 Subject: [PATCH 31/39] detector trigger detection types Signed-off-by: Surya Sashank Nistala --- .../model/DetectorTrigger.java | 175 ++++++++++++------ .../resthandler/RestIndexDetectorAction.java | 16 ++ .../DetectorThreatIntelService.java | 9 +- src/main/resources/feed/config/feeds.yml | 3 - src/main/resources/feed/config/feeds/otx.yml | 12 -- .../SecurityAnalyticsRestTestCase.java | 17 ++ .../securityanalytics/TestHelpers.java | 6 +- .../securityanalytics/alerts/AlertsIT.java | 26 +-- .../alerts/SecureAlertsRestApiIT.java | 6 +- .../CorrelationEngineRestApiIT.java | 10 +- .../securityanalytics/findings/FindingIT.java | 14 +- .../findings/SecureFindingRestApiIT.java | 6 +- .../resthandler/DetectorMonitorRestApiIT.java | 81 ++++---- .../resthandler/DetectorRestApiIT.java | 32 ++-- .../integTests/ThreatIntelJobRunnerIT.java | 6 - 15 files changed, 245 insertions(+), 174 deletions(-) delete mode 100644 src/main/resources/feed/config/feeds.yml delete mode 100644 src/main/resources/feed/config/feeds/otx.yml diff --git a/src/main/java/org/opensearch/securityanalytics/model/DetectorTrigger.java b/src/main/java/org/opensearch/securityanalytics/model/DetectorTrigger.java index b74a71048..ed74ea9e0 100644 --- a/src/main/java/org/opensearch/securityanalytics/model/DetectorTrigger.java +++ b/src/main/java/org/opensearch/securityanalytics/model/DetectorTrigger.java @@ -49,13 +49,23 @@ public class DetectorTrigger implements Writeable, ToXContentObject { private List actions; + /** + * detection type is a list of values that tells us what queries is the trigger trying to match - rules-based or threat_intel-based or both + */ + private List detectionTypes; // todo make it enum supports 'rules', 'threat_intel' + private static final String ID_FIELD = "id"; + private static final String SEVERITY_FIELD = "severity"; private static final String RULE_TYPES_FIELD = "types"; private static final String RULE_IDS_FIELD = "ids"; private static final String RULE_SEV_LEVELS_FIELD = "sev_levels"; private static final String RULE_TAGS_FIELD = "tags"; private static final String ACTIONS_FIELD = "actions"; + private static final String DETECTION_TYPES_FIELD = "detection_types"; + + public static final String RULES_DETECTION_TYPE = "rules"; + public static final String THREAT_INTEL_DETECTION_TYPE = "threat_intel"; public static final NamedXContentRegistry.Entry XCONTENT_REGISTRY = new NamedXContentRegistry.Entry( DetectorTrigger.class, @@ -63,17 +73,29 @@ public class DetectorTrigger implements Writeable, ToXContentObject { DetectorTrigger::parse ); - public DetectorTrigger(String id, String name, String severity, List ruleTypes, List ruleIds, List ruleSeverityLevels, List tags, List actions) { - this.id = id == null? UUIDs.base64UUID(): id; + public DetectorTrigger(String id, + String name, + String severity, + List ruleTypes, + List ruleIds, + List ruleSeverityLevels, + List tags, + List actions, + List detectionTypes) { + this.id = id == null ? UUIDs.base64UUID() : id; this.name = name; this.severity = severity; this.ruleTypes = ruleTypes.stream() - .map( e -> e.toLowerCase(Locale.ROOT)) + .map(e -> e.toLowerCase(Locale.ROOT)) .collect(Collectors.toList()); this.ruleIds = ruleIds; this.ruleSeverityLevels = ruleSeverityLevels; this.tags = tags; this.actions = actions; + this.detectionTypes = detectionTypes; + if(this.detectionTypes.isEmpty()) { + this.detectionTypes = Collections.singletonList(RULES_DETECTION_TYPE); // for backward compatibility + } } public DetectorTrigger(StreamInput sin) throws IOException { @@ -85,7 +107,8 @@ public DetectorTrigger(StreamInput sin) throws IOException { sin.readStringList(), sin.readStringList(), sin.readStringList(), - sin.readList(Action::readFrom) + sin.readList(Action::readFrom), + sin.readStringList() ); } @@ -95,7 +118,8 @@ public Map asTemplateArg() { RULE_IDS_FIELD, ruleIds, RULE_SEV_LEVELS_FIELD, ruleSeverityLevels, RULE_TAGS_FIELD, tags, - ACTIONS_FIELD, actions.stream().map(Action::asTemplateArg) + ACTIONS_FIELD, actions.stream().map(Action::asTemplateArg), + DETECTION_TYPES_FIELD, detectionTypes ); } @@ -109,6 +133,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeStringCollection(ruleSeverityLevels); out.writeStringCollection(tags); out.writeCollection(actions); + out.writeStringCollection(detectionTypes); } @Override @@ -128,6 +153,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws Action[] actionArray = new Action[]{}; actionArray = actions.toArray(actionArray); + String[] detectionTypesArray = new String[]{}; + detectionTypesArray = detectionTypes.toArray(detectionTypesArray); + return builder.startObject() .field(ID_FIELD, id) .field(Detector.NAME_FIELD, name) @@ -137,6 +165,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws .field(RULE_SEV_LEVELS_FIELD, ruleSevLevelArray) .field(RULE_TAGS_FIELD, tagArray) .field(ACTIONS_FIELD, actionArray) + .field(DETECTION_TYPES_FIELD, detectionTypesArray) .endObject(); } @@ -149,6 +178,7 @@ public static DetectorTrigger parse(XContentParser xcp) throws IOException { List ruleSeverityLevels = new ArrayList<>(); List tags = new ArrayList<>(); List actions = new ArrayList<>(); + List detectionTypes = new ArrayList<>(); XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp); while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { @@ -193,6 +223,13 @@ public static DetectorTrigger parse(XContentParser xcp) throws IOException { tags.add(tag); } break; + case DETECTION_TYPES_FIELD: + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp); + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + String dt = xcp.text(); + detectionTypes.add(dt); + } + break; case ACTIONS_FIELD: XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp); while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { @@ -204,8 +241,10 @@ public static DetectorTrigger parse(XContentParser xcp) throws IOException { xcp.skipChildren(); } } - - return new DetectorTrigger(id, name, severity, ruleTypes, ruleNames, ruleSeverityLevels, tags, actions); + if(detectionTypes.isEmpty()) { + detectionTypes.add(RULES_DETECTION_TYPE); // for backward compatibility + } + return new DetectorTrigger(id, name, severity, ruleTypes, ruleNames, ruleSeverityLevels, tags, actions, detectionTypes); } public static DetectorTrigger readFrom(StreamInput sin) throws IOException { @@ -227,71 +266,83 @@ public int hashCode() { public Script convertToCondition() { StringBuilder condition = new StringBuilder(); + boolean triggerFlag = false; - StringBuilder ruleTypeBuilder = new StringBuilder(); - int size = ruleTypes.size(); - for (int idx = 0; idx < size; ++idx) { - ruleTypeBuilder.append(String.format(Locale.getDefault(), "query[tag=%s]", ruleTypes.get(idx))); - if (idx < size - 1) { - ruleTypeBuilder.append(" || "); + int size = 0; + if (detectionTypes.contains(RULES_DETECTION_TYPE)) { // trigger should match rules based queries based on conditions + StringBuilder ruleTypeBuilder = new StringBuilder(); + size = ruleTypes.size(); + for (int idx = 0; idx < size; ++idx) { + ruleTypeBuilder.append(String.format(Locale.getDefault(), "query[tag=%s]", ruleTypes.get(idx))); + if (idx < size - 1) { + ruleTypeBuilder.append(" || "); + } + } + if (size > 0) { + condition.append("(").append(ruleTypeBuilder).append(")"); + triggerFlag = true; } - } - if (size > 0) { - condition.append("(").append(ruleTypeBuilder).append(")"); - triggerFlag = true; - } - StringBuilder ruleNameBuilder = new StringBuilder(); - size = ruleIds.size(); - for (int idx = 0; idx < size; ++idx) { - ruleNameBuilder.append(String.format(Locale.getDefault(), "query[name=%s]", ruleIds.get(idx))); - if (idx < size - 1) { - ruleNameBuilder.append(" || "); + StringBuilder ruleNameBuilder = new StringBuilder(); + size = ruleIds.size(); + for (int idx = 0; idx < size; ++idx) { + ruleNameBuilder.append(String.format(Locale.getDefault(), "query[name=%s]", ruleIds.get(idx))); + if (idx < size - 1) { + ruleNameBuilder.append(" || "); + } } - } - if (size > 0) { - if (triggerFlag) { - condition.append(" && ").append("(").append(ruleNameBuilder).append(")"); - } else { - condition.append("(").append(ruleNameBuilder).append(")"); - triggerFlag = true; + if (size > 0) { + if (triggerFlag) { + condition.append(" && ").append("(").append(ruleNameBuilder).append(")"); + } else { + condition.append("(").append(ruleNameBuilder).append(")"); + triggerFlag = true; + } } - } - StringBuilder ruleSevLevelBuilder = new StringBuilder(); - size = ruleSeverityLevels.size(); - for (int idx = 0; idx < size; ++idx) { - ruleSevLevelBuilder.append(String.format(Locale.getDefault(), "query[tag=%s]", ruleSeverityLevels.get(idx))); - if (idx < size - 1) { - ruleSevLevelBuilder.append(" || "); + StringBuilder ruleSevLevelBuilder = new StringBuilder(); + size = ruleSeverityLevels.size(); + for (int idx = 0; idx < size; ++idx) { + ruleSevLevelBuilder.append(String.format(Locale.getDefault(), "query[tag=%s]", ruleSeverityLevels.get(idx))); + if (idx < size - 1) { + ruleSevLevelBuilder.append(" || "); + } } - } - if (size > 0) { - if (triggerFlag) { - condition.append(" && ").append("(").append(ruleSevLevelBuilder).append(")"); - } else { - condition.append("(").append(ruleSevLevelBuilder).append(")"); - triggerFlag = true; + if (size > 0) { + if (triggerFlag) { + condition.append(" && ").append("(").append(ruleSevLevelBuilder).append(")"); + } else { + condition.append("(").append(ruleSevLevelBuilder).append(")"); + triggerFlag = true; + } } - } - StringBuilder tagBuilder = new StringBuilder(); - size = tags.size(); - for (int idx = 0; idx < size; ++idx) { - tagBuilder.append(String.format(Locale.getDefault(), "query[tag=%s]", tags.get(idx))); - if (idx < size - 1) { - ruleSevLevelBuilder.append(" || "); + StringBuilder tagBuilder = new StringBuilder(); + size = tags.size(); + for (int idx = 0; idx < size; ++idx) { + tagBuilder.append(String.format(Locale.getDefault(), "query[tag=%s]", tags.get(idx))); + if (idx < size - 1) { + ruleSevLevelBuilder.append(" || "); + } } - } - if (size > 0) { - if (triggerFlag) { - condition.append(" && ").append("(").append(tagBuilder).append(")"); - } else { - condition.append("(").append(tagBuilder).append(")"); + if (size > 0) { + if (triggerFlag) { + condition.append(" && ").append("(").append(tagBuilder).append(")"); + } else { + condition.append("(").append(tagBuilder).append(")"); + } + } + } + if(detectionTypes.contains(THREAT_INTEL_DETECTION_TYPE)) { + StringBuilder threatIntelClauseBuilder = new StringBuilder(); + threatIntelClauseBuilder.append(String.format(Locale.getDefault(), "query[tag=%s]", "threat_intel")); + if (condition.length() > 0) { + condition.append(" || "); } + condition.append("(").append(threatIntelClauseBuilder).append(")"); } return new Script(condition.toString()); @@ -321,6 +372,10 @@ public List getRuleSeverityLevels() { return ruleSeverityLevels; } + public List getDetectionTypes() { + return detectionTypes; + } + public List getTags() { return tags; } @@ -329,8 +384,8 @@ public List getActions() { List transformedActions = new ArrayList<>(); if (actions != null) { - for (Action action: actions) { - String subjectTemplate = action.getSubjectTemplate() != null ? action.getSubjectTemplate().getIdOrCode(): ""; + for (Action action : actions) { + String subjectTemplate = action.getSubjectTemplate() != null ? action.getSubjectTemplate().getIdOrCode() : ""; subjectTemplate = subjectTemplate.replace("{{ctx.detector", "{{ctx.monitor"); action.getMessageTemplate(); diff --git a/src/main/java/org/opensearch/securityanalytics/resthandler/RestIndexDetectorAction.java b/src/main/java/org/opensearch/securityanalytics/resthandler/RestIndexDetectorAction.java index 489ce5ffb..6fac7a078 100644 --- a/src/main/java/org/opensearch/securityanalytics/resthandler/RestIndexDetectorAction.java +++ b/src/main/java/org/opensearch/securityanalytics/resthandler/RestIndexDetectorAction.java @@ -23,6 +23,7 @@ import org.opensearch.securityanalytics.action.IndexDetectorRequest; import org.opensearch.securityanalytics.action.IndexDetectorResponse; import org.opensearch.securityanalytics.model.Detector; +import org.opensearch.securityanalytics.model.DetectorTrigger; import org.opensearch.securityanalytics.util.DetectorUtils; import org.opensearch.securityanalytics.util.RestHandlerUtils; @@ -67,11 +68,26 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli Detector detector = Detector.parse(xcp, id, null); detector.setLastUpdateTime(Instant.now()); + validateDetectorTriggers(detector); IndexDetectorRequest indexDetectorRequest = new IndexDetectorRequest(id, refreshPolicy, request.method(), detector); return channel -> client.execute(IndexDetectorAction.INSTANCE, indexDetectorRequest, indexDetectorResponse(channel, request.method())); } + private static void validateDetectorTriggers(Detector detector) { + if(detector.getTriggers() != null) { + for (DetectorTrigger trigger : detector.getTriggers()) { + if(trigger.getDetectionTypes().isEmpty()) + throw new IllegalArgumentException(String.format(Locale.ROOT,"Trigger [%s] should mention at least one detection type but found none", trigger.getName())); + for (String detectionType : trigger.getDetectionTypes()) { + if(false == (DetectorTrigger.THREAT_INTEL_DETECTION_TYPE.equals(detectionType) || DetectorTrigger.RULES_DETECTION_TYPE.equals(detectionType))) { + throw new IllegalArgumentException(String.format(Locale.ROOT,"Trigger [%s] has unsupported detection type [%s]", trigger.getName(), detectionType)); + } + } + } + } + } + private RestResponseListener indexDetectorResponse(RestChannel channel, RestRequest.Method restMethod) { return new RestResponseListener<>(channel) { @Override diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java index 5dde43788..2565d8175 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/DetectorThreatIntelService.java @@ -84,7 +84,12 @@ public List createDocLevelQueriesFromThreatIntelList( constructId(detector, entry.getKey()), tifdList.get(0).getFeedId(), Collections.emptyList(), String.format(query, field), - List.of("threat_intel", entry.getKey() /*ioc_type*/) + List.of( + "threat_intel", + String.format("ioc_type:%s", entry.getKey()), + String.format("field:%s", field), + String.format("feed_name:%s", tifdList.get(0).getFeedId()) + ) )); } } @@ -148,7 +153,7 @@ public void onFailure(Exception e) { } private static String constructId(Detector detector, String iocType) { - return detector.getName() + "_threat_intel_" + iocType + "_" + UUID.randomUUID(); + return "threat_intel_" + UUID.randomUUID(); } /** Updates all detectors having threat intel detection enabled with the latest threat intel feed data*/ diff --git a/src/main/resources/feed/config/feeds.yml b/src/main/resources/feed/config/feeds.yml deleted file mode 100644 index 8f07a00f7..000000000 --- a/src/main/resources/feed/config/feeds.yml +++ /dev/null @@ -1,3 +0,0 @@ -feeds: - - otx - - feodo \ No newline at end of file diff --git a/src/main/resources/feed/config/feeds/otx.yml b/src/main/resources/feed/config/feeds/otx.yml deleted file mode 100644 index e41637d54..000000000 --- a/src/main/resources/feed/config/feeds/otx.yml +++ /dev/null @@ -1,12 +0,0 @@ -feedId: otx_alienvault -url: www.otx.comm; -name: OTX Alientvault reputation -organization: OTX -description: description -feedType: csv; -containedIocs: - - ip -iocCol: 1; # 0 indexed -indexName: otx - -# .opensearch-sap-threat-intel-otx-00001 \ No newline at end of file diff --git a/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsRestTestCase.java b/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsRestTestCase.java index 1d8e1e858..1c8770677 100644 --- a/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsRestTestCase.java +++ b/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsRestTestCase.java @@ -92,6 +92,7 @@ import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.FINDING_HISTORY_MAX_DOCS; import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.FINDING_HISTORY_RETENTION_PERIOD; import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.FINDING_HISTORY_ROLLOVER_PERIOD; +import static org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedDataUtils.getTifdList; import static org.opensearch.securityanalytics.util.RuleTopicIndices.ruleTopicIndexSettings; public class SecurityAnalyticsRestTestCase extends OpenSearchRestTestCase { @@ -1742,4 +1743,20 @@ protected void enableOrDisableWorkflow(String trueOrFalse) throws IOException { request.setJsonEntity(entity); client().performRequest(request); } + + public List getThreatIntelFeedIocs(int num) throws IOException { + String request = getMatchAllSearchRequestString(num); + SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threat-intel*", request, false); + return getTifdList(res, xContentRegistry()).stream().map(it -> it.getIocValue()).collect(Collectors.toList()); + } + + public String getMatchAllSearchRequestString(int num) { + return "{\n" + + "\"size\" : " + num + "," + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + } } \ No newline at end of file diff --git a/src/test/java/org/opensearch/securityanalytics/TestHelpers.java b/src/test/java/org/opensearch/securityanalytics/TestHelpers.java index bf57e4b06..477a7ecee 100644 --- a/src/test/java/org/opensearch/securityanalytics/TestHelpers.java +++ b/src/test/java/org/opensearch/securityanalytics/TestHelpers.java @@ -71,6 +71,10 @@ public static Detector randomDetectorWithInputsAndThreatIntel(List inputs, Boolean threatIntel, List triggers) { + return randomDetector(null, null, null, inputs, triggers, null, null, null, null, threatIntel); + } + public static Detector randomDetectorWithInputsAndTriggers(List inputs, List triggers) { return randomDetector(null, null, null, inputs, triggers, null, null, null, null, false); } @@ -152,7 +156,7 @@ public static Detector randomDetector(String name, if (triggers.size() == 0) { triggers = new ArrayList<>(); - DetectorTrigger trigger = new DetectorTrigger(null, "windows-trigger", "1", List.of(randomDetectorType()), List.of("QuarksPwDump Clearing Access History"), List.of("high"), List.of("T0008"), List.of()); + DetectorTrigger trigger = new DetectorTrigger(null, "windows-trigger", "1", List.of(randomDetectorType()), List.of("QuarksPwDump Clearing Access History"), List.of("high"), List.of("T0008"), List.of(), List.of()); triggers.add(trigger); } return new Detector(null, null, name, enabled, schedule, lastUpdateTime, enabledTime, detectorType, user, inputs, triggers, Collections.singletonList(""), "", "", "", "", "", "", Collections.emptyMap(), Collections.emptyList(), threatIntel); diff --git a/src/test/java/org/opensearch/securityanalytics/alerts/AlertsIT.java b/src/test/java/org/opensearch/securityanalytics/alerts/AlertsIT.java index d3665dcfc..04f17d867 100644 --- a/src/test/java/org/opensearch/securityanalytics/alerts/AlertsIT.java +++ b/src/test/java/org/opensearch/securityanalytics/alerts/AlertsIT.java @@ -19,6 +19,7 @@ import org.apache.hc.core5.http.message.BasicHeader; import org.junit.Assert; import org.junit.Ignore; +import org.opensearch.action.search.SearchResponse; import org.opensearch.client.Request; import org.opensearch.client.Response; import org.opensearch.client.ResponseException; @@ -37,9 +38,11 @@ import static org.opensearch.securityanalytics.TestHelpers.netFlowMappings; import static org.opensearch.securityanalytics.TestHelpers.randomAction; import static org.opensearch.securityanalytics.TestHelpers.randomDetectorType; +import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputsAndThreatIntel; import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputsAndTriggers; import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithTriggers; import static org.opensearch.securityanalytics.TestHelpers.randomDoc; +import static org.opensearch.securityanalytics.TestHelpers.randomDocWithIpIoc; import static org.opensearch.securityanalytics.TestHelpers.randomIndex; import static org.opensearch.securityanalytics.TestHelpers.randomRule; import static org.opensearch.securityanalytics.TestHelpers.windowsIndexMapping; @@ -47,6 +50,7 @@ import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ALERT_HISTORY_MAX_DOCS; import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ALERT_HISTORY_RETENTION_PERIOD; import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ALERT_HISTORY_ROLLOVER_PERIOD; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ENABLE_WORKFLOW_USAGE; public class AlertsIT extends SecurityAnalyticsRestTestCase { @@ -82,7 +86,7 @@ public void testGetAlerts_success() throws IOException { Detector detector = randomDetectorWithInputsAndTriggers(List.of(new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(new DetectorRule(createdId)), getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList()))), - List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(createdId), List.of(), List.of("attack.defense_evasion"), List.of(triggerAction)))); + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(createdId), List.of(), List.of("attack.defense_evasion"), List.of(triggerAction), List.of()))); createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -200,13 +204,13 @@ public void testAckAlerts_WithInvalidDetectorAlertsCombination() throws IOExcept Detector detector = randomDetectorWithInputsAndTriggers(List.of(new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(), getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList()))), - List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(), List.of(), List.of("attack.defense_evasion"), List.of(triggerAction)))); + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(), List.of(), List.of("attack.defense_evasion"), List.of(triggerAction), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Detector detector1 = randomDetectorWithInputsAndTriggers(List.of(new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(), getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList()))), - List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(), List.of(), List.of("attack.defense_evasion"), List.of(triggerAction)))); + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(), List.of(), List.of("attack.defense_evasion"), List.of(triggerAction), List.of()))); Response createResponse1 = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector1)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -307,7 +311,7 @@ public void testAckAlertsWithInvalidDetector() throws IOException { Detector detector = randomDetectorWithInputsAndTriggers(List.of(new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(new DetectorRule(createdId)), getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList()))), - List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(createdId), List.of(), List.of("attack.defense_evasion"), List.of(triggerAction)))); + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(createdId), List.of(), List.of("attack.defense_evasion"), List.of(triggerAction), List.of()))); createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -415,7 +419,7 @@ public void testGetAlerts_byDetectorType_success() throws IOException, Interrupt Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -494,7 +498,7 @@ public void testGetAlerts_byDetectorType_multipleDetectors_success() throws IOEx Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); // Detector 1 - WINDOWS - Detector detector1 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector1 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector1)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -517,7 +521,7 @@ public void testGetAlerts_byDetectorType_multipleDetectors_success() throws IOEx getPrePackagedRules("network").stream().map(DetectorRule::new).collect(Collectors.toList())); Detector detector2 = randomDetectorWithTriggers( getPrePackagedRules("network"), - List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("network"), List.of(), List.of(), List.of(), List.of())), + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("network"), List.of(), List.of(), List.of(), List.of(), List.of())), "network", inputNetflow ); @@ -610,7 +614,7 @@ public void testAlertHistoryRollover_maxAge() throws IOException, InterruptedExc Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -681,7 +685,7 @@ public void testAlertHistoryRollover_maxAge_low_retention() throws IOException, Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -761,7 +765,7 @@ public void testAlertHistoryRollover_maxDocs() throws IOException, InterruptedEx Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -845,7 +849,7 @@ public void testGetAlertsFromAllIndices() throws IOException, InterruptedExcepti Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); diff --git a/src/test/java/org/opensearch/securityanalytics/alerts/SecureAlertsRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/alerts/SecureAlertsRestApiIT.java index 4ecf3287f..20e526697 100644 --- a/src/test/java/org/opensearch/securityanalytics/alerts/SecureAlertsRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/alerts/SecureAlertsRestApiIT.java @@ -97,8 +97,8 @@ public void testGetAlerts_byDetectorId_success() throws IOException { Action triggerAction = randomAction(createDestination()); Detector detector = randomDetectorWithInputsAndTriggers(List.of(new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(new DetectorRule(createdId)), - getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList()))), - List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(createdId), List.of(), List.of("attack.defense_evasion"), List.of(triggerAction)))); + getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList()))), + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(createdId), List.of(), List.of("attack.defense_evasion"), List.of(triggerAction), List.of(DetectorTrigger.RULES_DETECTION_TYPE, DetectorTrigger.THREAT_INTEL_DETECTION_TYPE)))); createResponse = makeRequest(userClient, "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -235,7 +235,7 @@ public void testGetAlerts_byDetectorType_success() throws IOException, Interrupt Response response = userClient.performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(userClient, "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); diff --git a/src/test/java/org/opensearch/securityanalytics/correlation/CorrelationEngineRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/correlation/CorrelationEngineRestApiIT.java index e721e1124..225cebb8c 100644 --- a/src/test/java/org/opensearch/securityanalytics/correlation/CorrelationEngineRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/correlation/CorrelationEngineRestApiIT.java @@ -220,7 +220,7 @@ private String createWindowsToAppLogsToS3LogsRule(LogIndices indices) throws IOE private String createVpcFlowDetector(String indexName) throws IOException { Detector vpcFlowDetector = randomDetectorWithInputsAndTriggersAndType(List.of(new DetectorInput("vpc flow detector for security analytics", List.of(indexName), List.of(), getPrePackagedRules("network").stream().map(DetectorRule::new).collect(Collectors.toList()))), - List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("network"), List.of(), List.of(), List.of(), List.of())), "network"); + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("network"), List.of(), List.of(), List.of(), List.of(), List.of())), "network"); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(vpcFlowDetector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -280,7 +280,7 @@ private String createAdLdapDetector(String indexName) throws IOException { Detector adLdapDetector = randomDetectorWithInputsAndTriggersAndType(List.of(new DetectorInput("ad_ldap logs detector for security analytics", List.of(indexName), List.of(), getPrePackagedRules("ad_ldap").stream().map(DetectorRule::new).collect(Collectors.toList()))), - List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("ad_ldap"), List.of(), List.of(), List.of(), List.of())), "ad_ldap"); + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("ad_ldap"), List.of(), List.of(), List.of(), List.of(), List.of())), "ad_ldap"); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(adLdapDetector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -319,7 +319,7 @@ private String createTestWindowsDetector(String indexName) throws IOException { Detector windowsDetector = randomDetectorWithInputsAndTriggers(List.of(new DetectorInput("windows detector for security analytics", List.of(indexName), List.of(), getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList()))), - List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(windowsDetector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -345,7 +345,7 @@ private String createTestWindowsDetector(String indexName) throws IOException { private String createAppLogsDetector(String indexName) throws IOException { Detector appLogsDetector = randomDetectorWithInputsAndTriggersAndType(List.of(new DetectorInput("app logs detector for security analytics", List.of(indexName), List.of(), getPrePackagedRules("others_application").stream().map(DetectorRule::new).collect(Collectors.toList()))), - List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("others_application"), List.of(), List.of(), List.of(), List.of())), "others_application"); + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("others_application"), List.of(), List.of(), List.of(), List.of(), List.of())), "others_application"); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(appLogsDetector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -397,7 +397,7 @@ private String createS3Detector(String indexName) throws IOException { Detector s3AccessLogsDetector = randomDetectorWithInputsAndTriggersAndType(List.of(new DetectorInput("s3 access logs detector for security analytics", List.of(indexName), List.of(), getPrePackagedRules("s3").stream().map(DetectorRule::new).collect(Collectors.toList()))), - List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("s3"), List.of(), List.of(), List.of(), List.of())), "s3"); + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("s3"), List.of(), List.of(), List.of(), List.of(), List.of())), "s3"); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(s3AccessLogsDetector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); diff --git a/src/test/java/org/opensearch/securityanalytics/findings/FindingIT.java b/src/test/java/org/opensearch/securityanalytics/findings/FindingIT.java index ce6634f41..c69bb2e00 100644 --- a/src/test/java/org/opensearch/securityanalytics/findings/FindingIT.java +++ b/src/test/java/org/opensearch/securityanalytics/findings/FindingIT.java @@ -57,7 +57,7 @@ public void testGetFindings_byDetectorId_success() throws IOException { Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -119,7 +119,7 @@ public void testGetFindings_byDetectorType_oneDetector_success() throws IOExcept Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -187,7 +187,7 @@ public void testGetFindings_byDetectorType_success() throws IOException { response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); // Detector 1 - WINDOWS - Detector detector1 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector1 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector1)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -210,7 +210,7 @@ public void testGetFindings_byDetectorType_success() throws IOException { getPrePackagedRules("network").stream().map(DetectorRule::new).collect(Collectors.toList())); Detector detector2 = randomDetectorWithTriggers( getPrePackagedRules("network"), - List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("network"), List.of(), List.of(), List.of(), List.of())), + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("network"), List.of(), List.of(), List.of(), List.of(), List.of())), "network", inputNetflow ); @@ -286,7 +286,7 @@ public void testGetFindings_rolloverByMaxAge_success() throws IOException, Inter Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -357,7 +357,7 @@ public void testGetFindings_rolloverByMaxDoc_success() throws IOException, Inter Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -422,7 +422,7 @@ public void testGetFindings_rolloverByMaxDoc_short_retention_success() throws IO Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); diff --git a/src/test/java/org/opensearch/securityanalytics/findings/SecureFindingRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/findings/SecureFindingRestApiIT.java index 64d5b7cef..ab68eabe7 100644 --- a/src/test/java/org/opensearch/securityanalytics/findings/SecureFindingRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/findings/SecureFindingRestApiIT.java @@ -87,7 +87,7 @@ public void testGetFindings_byDetectorId_success() throws IOException { Response response = userClient.performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(userClient, "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -206,7 +206,7 @@ public void testGetFindings_byDetectorType_success() throws IOException { createUserRolesMapping(TEST_HR_ROLE, users); // Detector 1 - WINDOWS - Detector detector1 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector1 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(userClient, "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector1)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -229,7 +229,7 @@ public void testGetFindings_byDetectorType_success() throws IOException { getPrePackagedRules("network").stream().map(DetectorRule::new).collect(Collectors.toList())); Detector detector2 = randomDetectorWithTriggers( getPrePackagedRules("network"), - List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("network"), List.of(), List.of(), List.of(), List.of())), + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("network"), List.of(), List.of(), List.of(), List.of(), List.of())), "network", inputNetflow ); diff --git a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java index 61634ab46..6c3bff879 100644 --- a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java @@ -20,7 +20,6 @@ import org.opensearch.securityanalytics.model.DetectorRule; import org.opensearch.securityanalytics.model.DetectorTrigger; import org.opensearch.securityanalytics.model.Rule; -import org.opensearch.securityanalytics.threatIntel.jobscheduler.TIFJobParameter; import java.io.IOException; import java.util.ArrayList; @@ -33,11 +32,13 @@ import java.util.Set; import java.util.stream.Collectors; +import static java.util.Collections.emptyList; import static org.opensearch.securityanalytics.TestHelpers.randomAggregationRule; import static org.opensearch.securityanalytics.TestHelpers.randomDetector; import static org.opensearch.securityanalytics.TestHelpers.randomDetectorType; import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputs; import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputsAndThreatIntel; +import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputsAndThreatIntelAndTriggers; import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputsAndTriggers; import static org.opensearch.securityanalytics.TestHelpers.randomDoc; import static org.opensearch.securityanalytics.TestHelpers.randomDocWithIpIoc; @@ -45,8 +46,6 @@ import static org.opensearch.securityanalytics.TestHelpers.randomRule; import static org.opensearch.securityanalytics.TestHelpers.windowsIndexMapping; import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ENABLE_WORKFLOW_USAGE; -import static org.opensearch.securityanalytics.threatIntel.ThreatIntelFeedDataUtils.getTifdList; -import static org.opensearch.securityanalytics.SecurityAnalyticsPlugin.JOB_INDEX_NAME; public class DetectorMonitorRestApiIT extends SecurityAnalyticsRestTestCase { /** @@ -118,7 +117,7 @@ public void testRemoveDocLevelRuleAddAggregationRules_verifyFindings_success() t String avgTermRuleId = createRule(randomAggregationRule("avg", " > 1")); // Update detector and empty doc level rules so detector contains only one aggregation rule DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(new DetectorRule(sumRuleId), new DetectorRule(avgTermRuleId)), - Collections.emptyList()); + emptyList()); Detector updatedDetector = randomDetectorWithInputs(List.of(input)); Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(updatedDetector)); @@ -204,7 +203,7 @@ public void testReplaceAggregationRuleWithDocRule_verifyFindings_success() throw String maxRuleId = createRule(randomAggregationRule("max", " > 2")); List detectorRules = List.of(new DetectorRule(maxRuleId)); DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, - Collections.emptyList()); + emptyList()); Detector detector = randomDetectorWithInputs(List.of(input)); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); @@ -376,7 +375,7 @@ public void testRemoveAllRulesAndUpdateDetector_success() throws IOException { assertEquals(MonitorType.DOC_LEVEL_MONITOR.getValue(), monitorType); - Detector updatedDetector = randomDetector(Collections.emptyList()); + Detector updatedDetector = randomDetector(emptyList()); Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(updatedDetector)); assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); @@ -420,7 +419,7 @@ public void testAddNewAggregationRule_verifyFindings_success() throws IOExceptio String sumRuleId = createRule(randomAggregationRule("sum", " > 1")); List detectorRules = List.of(new DetectorRule(sumRuleId)); DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, - Collections.emptyList()); + emptyList()); Detector detector = randomDetectorWithInputs(List.of(input)); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); @@ -446,7 +445,7 @@ public void testAddNewAggregationRule_verifyFindings_success() throws IOExceptio // Test adding the new max monitor and updating the existing sum monitor String maxRuleId = createRule(randomAggregationRule("max", " > 3")); DetectorInput newInput = new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(new DetectorRule(maxRuleId), new DetectorRule(sumRuleId)), - Collections.emptyList()); + emptyList()); Detector updatedDetector = randomDetectorWithInputs(List.of(newInput)); Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(updatedDetector)); @@ -532,7 +531,7 @@ public void testDeleteAggregationRule_verifyFindings_success() throws IOExceptio List detectorRules = aggRuleIds.stream().map(DetectorRule::new).collect(Collectors.toList()); DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, - Collections.emptyList()); + emptyList()); Detector detector = randomDetectorWithInputs(List.of(input)); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); @@ -557,7 +556,7 @@ public void testDeleteAggregationRule_verifyFindings_success() throws IOExceptio // Test deleting the aggregation rule DetectorInput newInput = new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(new DetectorRule(avgRuleId)), - Collections.emptyList()); + emptyList()); detector = randomDetectorWithInputs(List.of(newInput)); Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(detector)); @@ -763,7 +762,7 @@ public void testMinAggregationRule_findingSuccess() throws IOException { aggRuleIds.add(createRule(randomAggregationRule("min", " > 3", testOpCode))); List detectorRules = aggRuleIds.stream().map(id -> new DetectorRule(id)).collect(Collectors.toList()); DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, - Collections.emptyList()); + emptyList()); Detector detector = randomDetectorWithInputs(List.of(input)); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); @@ -1011,7 +1010,7 @@ public void testCreateDetector_verifyWorkflowCreation_success_WithoutGroupByRule String randomDocRuleId = createRule(randomRule()); List detectorRules = List.of(new DetectorRule(maxRuleId), new DetectorRule(randomDocRuleId)); DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, - Collections.emptyList()); + emptyList()); Detector detector = randomDetectorWithInputs(List.of(input)); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); @@ -1078,8 +1077,9 @@ public void testCreateDetectorWithThreatIntelEnabled_updateDetectorWithThreatInt String randomDocRuleId = createRule(randomRule()); List detectorRules = List.of(new DetectorRule(randomDocRuleId)); DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, - Collections.emptyList()); - Detector detector = randomDetectorWithInputsAndThreatIntel(List.of(input), true); + emptyList()); + DetectorTrigger trigger = new DetectorTrigger("all", "all", "high", List.of(randomDetectorType()), emptyList(), emptyList(),List.of(), emptyList(), List.of(DetectorTrigger.RULES_DETECTION_TYPE, DetectorTrigger.THREAT_INTEL_DETECTION_TYPE)); + Detector detector = randomDetectorWithInputsAndThreatIntelAndTriggers(List.of(input), true, List.of(trigger) ); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); String request = "{\n" + @@ -1119,7 +1119,7 @@ public void testCreateDetectorWithThreatIntelEnabled_updateDetectorWithThreatInt // Verify workflow verifyWorkflow(detectorMap, monitorIds, 1); List iocs = getThreatIntelFeedIocs(3); - int i=1; + int i = 1; for (String ioc : iocs) { indexDoc(index, i + "", randomDocWithIpIoc(5, 3, ioc)); i++; @@ -1134,10 +1134,18 @@ public void testCreateDetectorWithThreatIntelEnabled_updateDetectorWithThreatInt Map docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); int noOfSigmaRuleMatches = docLevelQueryResults.size(); assertEquals(2, noOfSigmaRuleMatches); - String threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.contains(detector.getName() + "_threat_intel")).findAny().get(); + String threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.startsWith("threat_intel")).findAny().get(); ArrayList docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); assertEquals(docs.size(), 3); + //verify alerts + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + Response getAlertsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); + Map getAlertsBody = asMap(getAlertsResponse); + // TODO enable asserts here when able + Assert.assertEquals(3, getAlertsBody.get("total_alerts")); + // update detector Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(randomDetectorWithInputsAndThreatIntel(List.of(input), false))); assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); @@ -1182,7 +1190,7 @@ public void testCreateDetectorWiththreatIntelDisabled_updateDetectorWithThreatIn String randomDocRuleId = createRule(randomRule()); List detectorRules = List.of(new DetectorRule(randomDocRuleId)); DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, - Collections.emptyList()); + emptyList()); Detector detector = randomDetectorWithInputsAndThreatIntel(List.of(input), false); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); @@ -1244,7 +1252,6 @@ public void testCreateDetectorWiththreatIntelDisabled_updateDetectorWithThreatIn indexDoc(index, i+"", randomDocWithIpIoc(5, 3, ioc)); i++; } - executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); @@ -1255,22 +1262,6 @@ public void testCreateDetectorWiththreatIntelDisabled_updateDetectorWithThreatIn assertEquals(2, noOfSigmaRuleMatches); } - private List getThreatIntelFeedIocs(int num) throws IOException { - String request = getMatchAllSearchRequestString(num); - SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threat-intel*", request, false); - return getTifdList(res, xContentRegistry()).stream().map(it -> it.getIocValue()).collect(Collectors.toList()); - } - - private static String getMatchAllSearchRequestString(int num) { - return "{\n" + - "\"size\" : " + num + "," + - " \"query\" : {\n" + - " \"match_all\":{\n" + - " }\n" + - " }\n" + - "}"; - } - public void testCreateDetector_verifyWorkflowCreation_success_WithGroupByRulesInTrigger() throws IOException { updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); String index = createTestIndex(randomIndex(), windowsIndexMapping()); @@ -1295,8 +1286,8 @@ public void testCreateDetector_verifyWorkflowCreation_success_WithGroupByRulesIn String randomDocRuleId = createRule(randomRule()); List detectorRules = List.of(new DetectorRule(maxRuleId), new DetectorRule(randomDocRuleId)); DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, - Collections.emptyList()); - DetectorTrigger t1 = new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(maxRuleId), List.of(), List.of(), List.of()); + emptyList()); + DetectorTrigger t1 = new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(maxRuleId), List.of(), List.of(), List.of(), List.of()); Detector detector = randomDetectorWithInputsAndTriggers(List.of(input), List.of(t1)); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); @@ -1362,7 +1353,7 @@ public void testUpdateDetector_disabledWorkflowUsage_verifyWorkflowNotCreated_su List detectorRules = List.of(new DetectorRule(randomDocRuleId)); DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, - Collections.emptyList()); + emptyList()); Detector detector = randomDetectorWithInputs(List.of(input)); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); @@ -1436,10 +1427,10 @@ public void testUpdateDetector_removeRule_verifyWorkflowUpdate_success() throws String maxRuleId = createRule(randomAggregationRule("max", " > 3", testOpCode)); String randomDocRuleId = createRule(randomRule()); List detectorRules = List.of(new DetectorRule(maxRuleId), new DetectorRule(randomDocRuleId)); - DetectorTrigger t1 = new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(randomDocRuleId, maxRuleId), List.of(), List.of(), List.of()); + DetectorTrigger t1 = new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(randomDocRuleId, maxRuleId), List.of(), List.of(), List.of(), List.of()); DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, - Collections.emptyList()); + emptyList()); Detector detector = randomDetectorWithInputsAndTriggers(List.of(input), List.of(t1)); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); @@ -1561,9 +1552,9 @@ public void testCreateDetector_workflowWithDuplicateMonitor_failure() throws IOE String randomDocRuleId = createRule(randomRule()); List detectorRules = List.of(new DetectorRule(maxRuleId), new DetectorRule(randomDocRuleId)); - DetectorTrigger t1 = new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(randomDocRuleId, maxRuleId), List.of(), List.of(), List.of()); + DetectorTrigger t1 = new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(randomDocRuleId, maxRuleId), List.of(), List.of(), List.of(), List.of()); DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, - Collections.emptyList()); + emptyList()); Detector detector = randomDetectorWithInputsAndTriggers(List.of(input), List.of(t1)); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); @@ -1631,9 +1622,9 @@ public void testCreateDetector_verifyWorkflowExecutionBucketLevelDocLevelMonitor List detectorRules = List.of(new DetectorRule(maxRuleId), new DetectorRule(randomDocRuleId)); DetectorTrigger t1, t2; - t1 = new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(randomDocRuleId, maxRuleId), List.of(), List.of(), List.of()); + t1 = new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(randomDocRuleId, maxRuleId), List.of(), List.of(), List.of(), List.of()); DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, - Collections.emptyList()); + emptyList()); Detector detector = randomDetectorWithInputsAndTriggers(List.of(input), List.of(t1)); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); @@ -1767,8 +1758,8 @@ public void testCreateDetector_verifyWorkflowExecutionMultipleBucketLevelDocLeve DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, prepackagedRules.stream().map(DetectorRule::new).collect(Collectors.toList())); DetectorTrigger t1, t2; - t1 = new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(sumRuleId, maxRuleId), List.of(), List.of(), List.of()); - t2 = new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(minRuleId, avgRuleId, cntRuleId), List.of(), List.of(), List.of()); + t1 = new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(sumRuleId, maxRuleId), List.of(), List.of(), List.of(), List.of()); + t2 = new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(minRuleId, avgRuleId, cntRuleId), List.of(), List.of(), List.of(), List.of()); Detector detector = randomDetectorWithInputsAndTriggers(List.of(input), List.of(t1, t2)); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); diff --git a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorRestApiIT.java index 83ff51928..2059fb191 100644 --- a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorRestApiIT.java @@ -63,7 +63,7 @@ public void testNewLogTypes() throws IOException { Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("github"), List.of(), List.of(), List.of(), List.of()))); + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("github"), List.of(), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -87,7 +87,7 @@ public void testDeletingADetector_MonitorNotExists() throws IOException { Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); // Create detector #1 of type test_windows - Detector detector1 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector1 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); String detectorId1 = createDetector(detector1); String request = "{\n" + @@ -129,7 +129,7 @@ public void testCreatingADetector() throws IOException { Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -187,7 +187,7 @@ public void testCreatingADetectorScheduledJobFinding() throws IOException, Inter assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); Detector detector = randomDetectorWithTriggersAndScheduleAndEnabled(getRandomPrePackagedRules(), - List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of())), + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of())), new IntervalSchedule(1, ChronoUnit.MINUTES, null), true); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); @@ -288,7 +288,7 @@ public void testCreatingADetectorWithMultipleIndices() throws IOException { Detector detector = randomDetectorWithTriggers( getRandomPrePackagedRules(), - List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of())), + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of())), List.of(index1, index2) ); @@ -346,7 +346,7 @@ public void testCreatingADetectorWithMultipleIndices() throws IOException { } public void testCreatingADetectorWithIndexNotExists() throws IOException { - Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); try { makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); @@ -878,7 +878,7 @@ public void testDeletingADetector_single_ruleTopicIndex() throws IOException { Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); // Create detector #1 of type test_windows - Detector detector1 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector1 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); String detectorId1 = createDetector(detector1); String request = "{\n" + @@ -901,7 +901,7 @@ public void testDeletingADetector_single_ruleTopicIndex() throws IOException { int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); Assert.assertEquals(5, noOfSigmaRuleMatches); // Create detector #2 of type windows - Detector detector2 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector2 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); String detectorId2 = createDetector(detector2); request = "{\n" + @@ -972,7 +972,7 @@ public void testDeletingADetector_single_Monitor() throws IOException { Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); // Create detector #1 of type test_windows - Detector detector1 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector1 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); String detectorId1 = createDetector(detector1); String request = "{\n" + @@ -999,7 +999,7 @@ public void testDeletingADetector_single_Monitor() throws IOException { int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); Assert.assertEquals(5, noOfSigmaRuleMatches); // Create detector #2 of type windows - Detector detector2 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector2 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); String detectorId2 = createDetector(detector2); request = "{\n" + @@ -1082,7 +1082,7 @@ public void testDeletingADetector_single_Monitor_workflow_enabled() throws IOExc Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); // Create detector #1 of type test_windows - Detector detector1 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector1 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); String detectorId1 = createDetector(detector1); String request = "{\n" + @@ -1109,7 +1109,7 @@ public void testDeletingADetector_single_Monitor_workflow_enabled() throws IOExc int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); Assert.assertEquals(5, noOfSigmaRuleMatches); // Create detector #2 of type windows - Detector detector2 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Detector detector2 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of()))); String detectorId2 = createDetector(detector2); request = "{\n" + @@ -1187,7 +1187,7 @@ public void testDeletingADetector_oneDetectorType_multiple_ruleTopicIndex() thro // Create detector #1 of type test_windows Detector detector1 = randomDetectorWithTriggers( getRandomPrePackagedRules(), - List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of())), + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of())), List.of(index1) ); String detectorId1 = createDetector(detector1); @@ -1195,7 +1195,7 @@ public void testDeletingADetector_oneDetectorType_multiple_ruleTopicIndex() thro // Create detector #2 of type test_windows Detector detector2 = randomDetectorWithTriggers( getRandomPrePackagedRules(), - List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of())), + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of(), List.of())), List.of(index2) ); @@ -1483,7 +1483,7 @@ public void testDetector_withDatastream_withTemplateField_endToEnd_success() thr // Create detector Detector detector = randomDetectorWithInputsAndTriggers(List.of(new DetectorInput("windows detector for security analytics", List.of(datastream), List.of(), getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList()))), - List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(), List.of(), List.of("attack.defense_evasion"), List.of()))); + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(), List.of(), List.of("attack.defense_evasion"), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -1576,7 +1576,7 @@ public void testDetector_withAlias_endToEnd_success() throws IOException { // Create detector Detector detector = randomDetectorWithInputsAndTriggers(List.of(new DetectorInput("windows detector for security analytics", List.of(indexAlias), List.of(), getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList()))), - List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(), List.of(), List.of("attack.defense_evasion"), List.of()))); + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(), List.of(), List.of("attack.defense_evasion"), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); diff --git a/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java b/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java index 8b2055ed3..cf4cc800c 100644 --- a/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java +++ b/src/test/java/org/opensearch/securityanalytics/threatIntel/integTests/ThreatIntelJobRunnerIT.java @@ -178,12 +178,6 @@ protected boolean verifyJobRan(Instant firstUpdatedTime) throws IOException { return false; } - private List getThreatIntelFeedIocs(int num) throws IOException { - String request = getMatchNumSearchRequestString(num); - SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threat-intel*", request, false); - return getTifdList(res, xContentRegistry()).stream().map(it -> it.getIocValue()).collect(Collectors.toList()); - } - private List getThreatIntelFeedIds() throws IOException { String request = getMatchAllSearchRequestString(); SearchResponse res = executeSearchAndGetResponse(".opensearch-sap-threat-intel*", request, false); From 2481466391188a2c869e830b9ce5cd110ae5e10a Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Sun, 22 Oct 2023 02:53:50 -0700 Subject: [PATCH 32/39] pull out threat intel rest tests into separate test class Signed-off-by: Surya Sashank Nistala --- .../DetectorThreatIntelIT.java | 244 ++++++++++++++++++ .../resthandler/DetectorMonitorRestApiIT.java | 212 --------------- 2 files changed, 244 insertions(+), 212 deletions(-) create mode 100644 src/test/java/org/opensearch/securityanalytics/DetectorThreatIntelIT.java diff --git a/src/test/java/org/opensearch/securityanalytics/DetectorThreatIntelIT.java b/src/test/java/org/opensearch/securityanalytics/DetectorThreatIntelIT.java new file mode 100644 index 000000000..49878da80 --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/DetectorThreatIntelIT.java @@ -0,0 +1,244 @@ +package org.opensearch.securityanalytics; + +import org.apache.hc.core5.http.HttpStatus; +import org.junit.Assert; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.client.Request; +import org.opensearch.client.Response; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.search.SearchHit; +import org.opensearch.securityanalytics.config.monitors.DetectorMonitorConfig; +import org.opensearch.securityanalytics.model.Detector; +import org.opensearch.securityanalytics.model.DetectorInput; +import org.opensearch.securityanalytics.model.DetectorRule; +import org.opensearch.securityanalytics.model.DetectorTrigger; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static java.util.Collections.emptyList; +import static org.opensearch.securityanalytics.TestHelpers.randomDetectorType; +import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputsAndThreatIntel; +import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputsAndThreatIntelAndTriggers; +import static org.opensearch.securityanalytics.TestHelpers.randomDoc; +import static org.opensearch.securityanalytics.TestHelpers.randomDocWithIpIoc; +import static org.opensearch.securityanalytics.TestHelpers.randomIndex; +import static org.opensearch.securityanalytics.TestHelpers.randomRule; +import static org.opensearch.securityanalytics.TestHelpers.windowsIndexMapping; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ENABLE_WORKFLOW_USAGE; + +public class DetectorThreatIntelIT extends SecurityAnalyticsRestTestCase { + + public void testCreateDetectorWithThreatIntelEnabled_updateDetectorWithThreatIntelDisabled() throws IOException { + + updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + + String testOpCode = "Test"; + + String randomDocRuleId = createRule(randomRule()); + List detectorRules = List.of(new DetectorRule(randomDocRuleId)); + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, + emptyList()); + DetectorTrigger trigger = new DetectorTrigger("all", "all", "high", List.of(randomDetectorType()), emptyList(), emptyList(),List.of(), emptyList(), List.of(DetectorTrigger.RULES_DETECTION_TYPE, DetectorTrigger.THREAT_INTEL_DETECTION_TYPE)); + Detector detector = randomDetectorWithInputsAndThreatIntelAndTriggers(List.of(input), true, List.of(trigger) ); + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + + + assertEquals(2, response.getHits().getTotalHits().value); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + Map responseBody = asMap(createResponse); + + String detectorId = responseBody.get("_id").toString(); + request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); + List inputArr = (List) detectorMap.get("inputs"); + + + List monitorIds = ((List) (detectorMap).get("monitor_id")); + assertEquals(1, monitorIds.size()); + + assertNotNull("Workflow not created", detectorMap.get("workflow_ids")); + assertEquals("Number of workflows not correct", 1, ((List) detectorMap.get("workflow_ids")).size()); + + // Verify workflow + verifyWorkflow(detectorMap, monitorIds, 1); + List iocs = getThreatIntelFeedIocs(3); + int i = 1; + for (String ioc : iocs) { + indexDoc(index, i + "", randomDocWithIpIoc(5, 3, ioc)); + i++; + } + String workflowId = ((List) detectorMap.get("workflow_ids")).get(0); + + Response executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); + + List> monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); + assertEquals(1, monitorRunResults.size()); + + Map docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); + int noOfSigmaRuleMatches = docLevelQueryResults.size(); + assertEquals(2, noOfSigmaRuleMatches); + String threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.startsWith("threat_intel")).findAny().get(); + ArrayList docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); + assertEquals(docs.size(), 3); + //verify alerts + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + Response getAlertsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); + Map getAlertsBody = asMap(getAlertsResponse); + // TODO enable asserts here when able + Assert.assertEquals(3, getAlertsBody.get("total_alerts")); + + // update detector + Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(randomDetectorWithInputsAndThreatIntel(List.of(input), false))); + + assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); + + Map updateResponseBody = asMap(updateResponse); + for (String ioc : iocs) { + indexDoc(index, i+"", randomDocWithIpIoc(5, 3, ioc)); + i++; + } + + executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); + + monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); + assertEquals(1, monitorRunResults.size()); + + docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); + noOfSigmaRuleMatches = docLevelQueryResults.size(); + assertEquals(1, noOfSigmaRuleMatches); + } + + public void testCreateDetectorWiththreatIntelDisabled_updateDetectorWithThreatIntelEnabled() throws IOException { + + updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + + String testOpCode = "Test"; + + String randomDocRuleId = createRule(randomRule()); + List detectorRules = List.of(new DetectorRule(randomDocRuleId)); + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, + emptyList()); + Detector detector = randomDetectorWithInputsAndThreatIntel(List.of(input), false); + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + + + assertEquals(1, response.getHits().getTotalHits().value); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + Map responseBody = asMap(createResponse); + + String detectorId = responseBody.get("_id").toString(); + request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); + List inputArr = (List) detectorMap.get("inputs"); + + + List monitorIds = ((List) (detectorMap).get("monitor_id")); + assertEquals(1, monitorIds.size()); + + assertNotNull("Workflow not created", detectorMap.get("workflow_ids")); + assertEquals("Number of workflows not correct", 1, ((List) detectorMap.get("workflow_ids")).size()); + + // Verify workflow + verifyWorkflow(detectorMap, monitorIds, 1); + indexDoc(index, "1", randomDoc(2, 4, "test")); + String workflowId = ((List) detectorMap.get("workflow_ids")).get(0); + + Response executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); + + List> monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); + assertEquals(1, monitorRunResults.size()); + Map docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); + int noOfSigmaRuleMatches = docLevelQueryResults.size(); + assertEquals(1, noOfSigmaRuleMatches); + + Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(randomDetectorWithInputsAndThreatIntel(List.of(input), true))); + + assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); + + Map updateResponseBody = asMap(updateResponse); + List iocs = getThreatIntelFeedIocs(3); + int i=2; + for (String ioc : iocs) { + indexDoc(index, i+"", randomDocWithIpIoc(5, 3, ioc)); + i++; + } + executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); + + monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); + assertEquals(1, monitorRunResults.size()); + + docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); + noOfSigmaRuleMatches = docLevelQueryResults.size(); + assertEquals(2, noOfSigmaRuleMatches); + } +} diff --git a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java index 6c3bff879..dfea4bac8 100644 --- a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java @@ -37,11 +37,8 @@ import static org.opensearch.securityanalytics.TestHelpers.randomDetector; import static org.opensearch.securityanalytics.TestHelpers.randomDetectorType; import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputs; -import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputsAndThreatIntel; -import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputsAndThreatIntelAndTriggers; import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputsAndTriggers; import static org.opensearch.securityanalytics.TestHelpers.randomDoc; -import static org.opensearch.securityanalytics.TestHelpers.randomDocWithIpIoc; import static org.opensearch.securityanalytics.TestHelpers.randomIndex; import static org.opensearch.securityanalytics.TestHelpers.randomRule; import static org.opensearch.securityanalytics.TestHelpers.windowsIndexMapping; @@ -1053,215 +1050,6 @@ public void testCreateDetector_verifyWorkflowCreation_success_WithoutGroupByRule verifyWorkflow(detectorMap, monitorIds, 2); } - public void testCreateDetectorWithThreatIntelEnabled_updateDetectorWithThreatIntelDisabled() throws IOException { - - updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); - String index = createTestIndex(randomIndex(), windowsIndexMapping()); - - // Execute CreateMappingsAction to add alias mapping for index - Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); - // both req params and req body are supported - createMappingRequest.setJsonEntity( - "{ \"index_name\":\"" + index + "\"," + - " \"rule_topic\":\"" + randomDetectorType() + "\", " + - " \"partial\":true" + - "}" - ); - - Response createMappingResponse = client().performRequest(createMappingRequest); - - assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); - - String testOpCode = "Test"; - - String randomDocRuleId = createRule(randomRule()); - List detectorRules = List.of(new DetectorRule(randomDocRuleId)); - DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, - emptyList()); - DetectorTrigger trigger = new DetectorTrigger("all", "all", "high", List.of(randomDetectorType()), emptyList(), emptyList(),List.of(), emptyList(), List.of(DetectorTrigger.RULES_DETECTION_TYPE, DetectorTrigger.THREAT_INTEL_DETECTION_TYPE)); - Detector detector = randomDetectorWithInputsAndThreatIntelAndTriggers(List.of(input), true, List.of(trigger) ); - Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); - - String request = "{\n" + - " \"query\" : {\n" + - " \"match_all\":{\n" + - " }\n" + - " }\n" + - "}"; - SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); - - - assertEquals(2, response.getHits().getTotalHits().value); - - assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); - Map responseBody = asMap(createResponse); - - String detectorId = responseBody.get("_id").toString(); - request = "{\n" + - " \"query\" : {\n" + - " \"match\":{\n" + - " \"_id\": \"" + detectorId + "\"\n" + - " }\n" + - " }\n" + - "}"; - List hits = executeSearch(Detector.DETECTORS_INDEX, request); - SearchHit hit = hits.get(0); - Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); - List inputArr = (List) detectorMap.get("inputs"); - - - List monitorIds = ((List) (detectorMap).get("monitor_id")); - assertEquals(1, monitorIds.size()); - - assertNotNull("Workflow not created", detectorMap.get("workflow_ids")); - assertEquals("Number of workflows not correct", 1, ((List) detectorMap.get("workflow_ids")).size()); - - // Verify workflow - verifyWorkflow(detectorMap, monitorIds, 1); - List iocs = getThreatIntelFeedIocs(3); - int i = 1; - for (String ioc : iocs) { - indexDoc(index, i + "", randomDocWithIpIoc(5, 3, ioc)); - i++; - } - String workflowId = ((List) detectorMap.get("workflow_ids")).get(0); - - Response executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); - - List> monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); - assertEquals(1, monitorRunResults.size()); - - Map docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); - int noOfSigmaRuleMatches = docLevelQueryResults.size(); - assertEquals(2, noOfSigmaRuleMatches); - String threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.startsWith("threat_intel")).findAny().get(); - ArrayList docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); - assertEquals(docs.size(), 3); - //verify alerts - Map params = new HashMap<>(); - params.put("detector_id", detectorId); - Response getAlertsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); - Map getAlertsBody = asMap(getAlertsResponse); - // TODO enable asserts here when able - Assert.assertEquals(3, getAlertsBody.get("total_alerts")); - - // update detector - Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(randomDetectorWithInputsAndThreatIntel(List.of(input), false))); - - assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); - - Map updateResponseBody = asMap(updateResponse); - for (String ioc : iocs) { - indexDoc(index, i+"", randomDocWithIpIoc(5, 3, ioc)); - i++; - } - - executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); - - monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); - assertEquals(1, monitorRunResults.size()); - - docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); - noOfSigmaRuleMatches = docLevelQueryResults.size(); - assertEquals(1, noOfSigmaRuleMatches); - } - - public void testCreateDetectorWiththreatIntelDisabled_updateDetectorWithThreatIntelEnabled() throws IOException { - - updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); - String index = createTestIndex(randomIndex(), windowsIndexMapping()); - - // Execute CreateMappingsAction to add alias mapping for index - Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); - // both req params and req body are supported - createMappingRequest.setJsonEntity( - "{ \"index_name\":\"" + index + "\"," + - " \"rule_topic\":\"" + randomDetectorType() + "\", " + - " \"partial\":true" + - "}" - ); - - Response createMappingResponse = client().performRequest(createMappingRequest); - - assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); - - String testOpCode = "Test"; - - String randomDocRuleId = createRule(randomRule()); - List detectorRules = List.of(new DetectorRule(randomDocRuleId)); - DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, - emptyList()); - Detector detector = randomDetectorWithInputsAndThreatIntel(List.of(input), false); - Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); - - String request = "{\n" + - " \"query\" : {\n" + - " \"match_all\":{\n" + - " }\n" + - " }\n" + - "}"; - SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); - - - assertEquals(1, response.getHits().getTotalHits().value); - - assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); - Map responseBody = asMap(createResponse); - - String detectorId = responseBody.get("_id").toString(); - request = "{\n" + - " \"query\" : {\n" + - " \"match\":{\n" + - " \"_id\": \"" + detectorId + "\"\n" + - " }\n" + - " }\n" + - "}"; - List hits = executeSearch(Detector.DETECTORS_INDEX, request); - SearchHit hit = hits.get(0); - Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); - List inputArr = (List) detectorMap.get("inputs"); - - - List monitorIds = ((List) (detectorMap).get("monitor_id")); - assertEquals(1, monitorIds.size()); - - assertNotNull("Workflow not created", detectorMap.get("workflow_ids")); - assertEquals("Number of workflows not correct", 1, ((List) detectorMap.get("workflow_ids")).size()); - - // Verify workflow - verifyWorkflow(detectorMap, monitorIds, 1); - indexDoc(index, "1", randomDoc(2, 4, "test")); - String workflowId = ((List) detectorMap.get("workflow_ids")).get(0); - - Response executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); - - List> monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); - assertEquals(1, monitorRunResults.size()); - Map docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); - int noOfSigmaRuleMatches = docLevelQueryResults.size(); - assertEquals(1, noOfSigmaRuleMatches); - - Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(randomDetectorWithInputsAndThreatIntel(List.of(input), true))); - - assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); - - Map updateResponseBody = asMap(updateResponse); - List iocs = getThreatIntelFeedIocs(3); - int i=2; - for (String ioc : iocs) { - indexDoc(index, i+"", randomDocWithIpIoc(5, 3, ioc)); - i++; - } - executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); - - monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); - assertEquals(1, monitorRunResults.size()); - - docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); - noOfSigmaRuleMatches = docLevelQueryResults.size(); - assertEquals(2, noOfSigmaRuleMatches); - } - public void testCreateDetector_verifyWorkflowCreation_success_WithGroupByRulesInTrigger() throws IOException { updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); String index = createTestIndex(randomIndex(), windowsIndexMapping()); From f2068f152abae4643564236f341c85c6dd51fae4 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Sun, 22 Oct 2023 03:30:35 -0700 Subject: [PATCH 33/39] add detection types testing in detector trigger for rules and threat intel detection scenarios Signed-off-by: Surya Sashank Nistala --- .../threatIntel/common/TIFMetadata.java | 5 +- .../DetectorThreatIntelIT.java | 480 +++++++++++++++++- 2 files changed, 475 insertions(+), 10 deletions(-) diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java index c6e8d78e8..04486fb7a 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/common/TIFMetadata.java @@ -14,9 +14,8 @@ import org.opensearch.core.xcontent.*; /** - * Threat intel tif job metadata object - *

- * TIFMetadata is stored in an external endpoint. OpenSearch read the file and store values it in this object. + * POJO containing Threat Intel Feed Metadata + * Contains all the data necessary to fetch and parse threat intel IoC feeds. */ public class TIFMetadata implements Writeable, ToXContent { diff --git a/src/test/java/org/opensearch/securityanalytics/DetectorThreatIntelIT.java b/src/test/java/org/opensearch/securityanalytics/DetectorThreatIntelIT.java index 49878da80..9e0767eea 100644 --- a/src/test/java/org/opensearch/securityanalytics/DetectorThreatIntelIT.java +++ b/src/test/java/org/opensearch/securityanalytics/DetectorThreatIntelIT.java @@ -58,8 +58,8 @@ public void testCreateDetectorWithThreatIntelEnabled_updateDetectorWithThreatInt List detectorRules = List.of(new DetectorRule(randomDocRuleId)); DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, emptyList()); - DetectorTrigger trigger = new DetectorTrigger("all", "all", "high", List.of(randomDetectorType()), emptyList(), emptyList(),List.of(), emptyList(), List.of(DetectorTrigger.RULES_DETECTION_TYPE, DetectorTrigger.THREAT_INTEL_DETECTION_TYPE)); - Detector detector = randomDetectorWithInputsAndThreatIntelAndTriggers(List.of(input), true, List.of(trigger) ); + DetectorTrigger trigger = new DetectorTrigger("all", "all", "high", List.of(randomDetectorType()), emptyList(), emptyList(), List.of(), emptyList(), List.of(DetectorTrigger.RULES_DETECTION_TYPE, DetectorTrigger.THREAT_INTEL_DETECTION_TYPE)); + Detector detector = randomDetectorWithInputsAndThreatIntelAndTriggers(List.of(input), true, List.of(trigger)); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); String request = "{\n" + @@ -122,7 +122,7 @@ public void testCreateDetectorWithThreatIntelEnabled_updateDetectorWithThreatInt params.put("detector_id", detectorId); Response getAlertsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); Map getAlertsBody = asMap(getAlertsResponse); - // TODO enable asserts here when able + Assert.assertEquals(3, getAlertsBody.get("total_alerts")); // update detector @@ -132,7 +132,7 @@ public void testCreateDetectorWithThreatIntelEnabled_updateDetectorWithThreatInt Map updateResponseBody = asMap(updateResponse); for (String ioc : iocs) { - indexDoc(index, i+"", randomDocWithIpIoc(5, 3, ioc)); + indexDoc(index, i + "", randomDocWithIpIoc(5, 3, ioc)); i++; } @@ -146,7 +146,7 @@ public void testCreateDetectorWithThreatIntelEnabled_updateDetectorWithThreatInt assertEquals(1, noOfSigmaRuleMatches); } - public void testCreateDetectorWiththreatIntelDisabled_updateDetectorWithThreatIntelEnabled() throws IOException { + public void testCreateDetectorWithThreatIntelDisabled_updateDetectorWithThreatIntelEnabled() throws IOException { updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); String index = createTestIndex(randomIndex(), windowsIndexMapping()); @@ -227,9 +227,9 @@ public void testCreateDetectorWiththreatIntelDisabled_updateDetectorWithThreatIn Map updateResponseBody = asMap(updateResponse); List iocs = getThreatIntelFeedIocs(3); - int i=2; + int i = 2; for (String ioc : iocs) { - indexDoc(index, i+"", randomDocWithIpIoc(5, 3, ioc)); + indexDoc(index, i + "", randomDocWithIpIoc(5, 3, ioc)); i++; } executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); @@ -241,4 +241,470 @@ public void testCreateDetectorWiththreatIntelDisabled_updateDetectorWithThreatIn noOfSigmaRuleMatches = docLevelQueryResults.size(); assertEquals(2, noOfSigmaRuleMatches); } + + public void testCreateDetectorWithThreatIntelEnabledAndNoRules_triggerDetectionTypeOnlyRules_noAlertsForFindings() throws IOException { + + updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + + String testOpCode = "Test"; + + + List detectorRules = emptyList(); + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, + emptyList()); + DetectorTrigger trigger = new DetectorTrigger("all", "all", "high", List.of(randomDetectorType()), emptyList(), emptyList(), List.of(), emptyList(), List.of(DetectorTrigger.RULES_DETECTION_TYPE)); + Detector detector = randomDetectorWithInputsAndThreatIntelAndTriggers(List.of(input), true, List.of(trigger)); + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + + + assertEquals(1, response.getHits().getTotalHits().value); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + Map responseBody = asMap(createResponse); + + String detectorId = responseBody.get("_id").toString(); + request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); + List inputArr = (List) detectorMap.get("inputs"); + + + List monitorIds = ((List) (detectorMap).get("monitor_id")); + assertEquals(1, monitorIds.size()); + + assertNotNull("Workflow not created", detectorMap.get("workflow_ids")); + assertEquals("Number of workflows not correct", 1, ((List) detectorMap.get("workflow_ids")).size()); + + // Verify workflow + verifyWorkflow(detectorMap, monitorIds, 1); + List iocs = getThreatIntelFeedIocs(3); + int i = 1; + for (String ioc : iocs) { + indexDoc(index, i + "", randomDocWithIpIoc(5, 3, ioc)); + i++; + } + String workflowId = ((List) detectorMap.get("workflow_ids")).get(0); + + Response executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); + + List> monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); + assertEquals(1, monitorRunResults.size()); + + Map docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); + int noOfSigmaRuleMatches = docLevelQueryResults.size(); + assertEquals(1, noOfSigmaRuleMatches); + String threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.startsWith("threat_intel")).findAny().get(); + ArrayList docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); + assertEquals(docs.size(), 3); + //verify alerts + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + Response getAlertsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); + Map getAlertsBody = asMap(getAlertsResponse); + /** findings are present but alerts should not be generated as detection type mentioned in trigger is rules only */ + Assert.assertEquals(0, getAlertsBody.get("total_alerts")); + } + + public void testCreateDetectorWithThreatIntelEnabled_triggerDetectionTypeOnlyThreatIntel_allAlertsForFindings() throws IOException { + + updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + + String testOpCode = "Test"; + + + List detectorRules = emptyList(); + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, + emptyList()); + DetectorTrigger trigger = new DetectorTrigger("all", "all", "high", + List.of(randomDetectorType()), emptyList(), emptyList(), List.of(), emptyList(), List.of(DetectorTrigger.THREAT_INTEL_DETECTION_TYPE)); + Detector detector = randomDetectorWithInputsAndThreatIntelAndTriggers(List.of(input), true, List.of(trigger)); + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + + + assertEquals(1, response.getHits().getTotalHits().value); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + Map responseBody = asMap(createResponse); + + String detectorId = responseBody.get("_id").toString(); + request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); + List inputArr = (List) detectorMap.get("inputs"); + + + List monitorIds = ((List) (detectorMap).get("monitor_id")); + assertEquals(1, monitorIds.size()); + + assertNotNull("Workflow not created", detectorMap.get("workflow_ids")); + assertEquals("Number of workflows not correct", 1, ((List) detectorMap.get("workflow_ids")).size()); + + // Verify workflow + verifyWorkflow(detectorMap, monitorIds, 1); + List iocs = getThreatIntelFeedIocs(3); + int i = 1; + for (String ioc : iocs) { + indexDoc(index, i + "", randomDocWithIpIoc(5, 3, ioc)); + i++; + } + String workflowId = ((List) detectorMap.get("workflow_ids")).get(0); + + Response executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); + + List> monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); + assertEquals(1, monitorRunResults.size()); + + Map docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); + int noOfSigmaRuleMatches = docLevelQueryResults.size(); + assertEquals(1, noOfSigmaRuleMatches); + String threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.startsWith("threat_intel")).findAny().get(); + ArrayList docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); + assertEquals(docs.size(), 3); + //verify alerts + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + Response getAlertsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); + Map getAlertsBody = asMap(getAlertsResponse); + /** findings are present and alerts are generated as detection type mentioned in trigger is threat_intel only */ + Assert.assertEquals(3, getAlertsBody.get("total_alerts")); + } + + public void testCreateDetectorWithThreatIntelEnabled_triggerWithBothDetectionType_allAlertsForFindings() throws IOException { + + updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + + String testOpCode = "Test"; + + + List detectorRules = emptyList(); + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, + emptyList()); + DetectorTrigger trigger = new DetectorTrigger("all", "all", "high", + List.of(randomDetectorType()), emptyList(), emptyList(), List.of(), emptyList(), + List.of(DetectorTrigger.THREAT_INTEL_DETECTION_TYPE, DetectorTrigger.RULES_DETECTION_TYPE)); + Detector detector = randomDetectorWithInputsAndThreatIntelAndTriggers(List.of(input), true, List.of(trigger)); + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + + + assertEquals(1, response.getHits().getTotalHits().value); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + Map responseBody = asMap(createResponse); + + String detectorId = responseBody.get("_id").toString(); + request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); + List inputArr = (List) detectorMap.get("inputs"); + + + List monitorIds = ((List) (detectorMap).get("monitor_id")); + assertEquals(1, monitorIds.size()); + + assertNotNull("Workflow not created", detectorMap.get("workflow_ids")); + assertEquals("Number of workflows not correct", 1, ((List) detectorMap.get("workflow_ids")).size()); + + // Verify workflow + verifyWorkflow(detectorMap, monitorIds, 1); + List iocs = getThreatIntelFeedIocs(3); + int i = 1; + for (String ioc : iocs) { + indexDoc(index, i + "", randomDocWithIpIoc(5, 3, ioc)); + i++; + } + String workflowId = ((List) detectorMap.get("workflow_ids")).get(0); + + Response executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); + + List> monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); + assertEquals(1, monitorRunResults.size()); + + Map docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); + int noOfSigmaRuleMatches = docLevelQueryResults.size(); + assertEquals(1, noOfSigmaRuleMatches); + String threatIntelDocLevelQueryId = docLevelQueryResults.keySet().stream().filter(id -> id.startsWith("threat_intel")).findAny().get(); + ArrayList docs = (ArrayList) docLevelQueryResults.get(threatIntelDocLevelQueryId); + assertEquals(docs.size(), 3); + //verify alerts + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + Response getAlertsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); + Map getAlertsBody = asMap(getAlertsResponse); + /** findings are present and alerts are generated as both detection type mentioned in trigger is threat_intel only */ + Assert.assertEquals(3, getAlertsBody.get("total_alerts")); + } + + public void testCreateDetectorWithThreatIntelDisabled_triggerWithThreatIntelDetectionType_mpAlertsForFindings() throws IOException { + + updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + String randomDocRuleId = createRule(randomRule()); + List detectorRules = List.of(new DetectorRule(randomDocRuleId)); + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, + emptyList()); + DetectorTrigger trigger = new DetectorTrigger("all", "all", "high", + List.of(randomDetectorType()), emptyList(), emptyList(), List.of(), emptyList(), + List.of(DetectorTrigger.THREAT_INTEL_DETECTION_TYPE)); + Detector detector = randomDetectorWithInputsAndThreatIntelAndTriggers(List.of(input), false, List.of(trigger)); + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + + + assertEquals(1, response.getHits().getTotalHits().value); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + Map responseBody = asMap(createResponse); + + String detectorId = responseBody.get("_id").toString(); + request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); + List inputArr = (List) detectorMap.get("inputs"); + + + List monitorIds = ((List) (detectorMap).get("monitor_id")); + assertEquals(1, monitorIds.size()); + + assertNotNull("Workflow not created", detectorMap.get("workflow_ids")); + assertEquals("Number of workflows not correct", 1, ((List) detectorMap.get("workflow_ids")).size()); + + // Verify workflow + verifyWorkflow(detectorMap, monitorIds, 1); + + int i = 1; + while (i<4) { + indexDoc(index, i + "", randomDocWithIpIoc(5, 3, i+"")); + i++; + } + String workflowId = ((List) detectorMap.get("workflow_ids")).get(0); + + Response executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); + + List> monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); + assertEquals(1, monitorRunResults.size()); + + Map docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); + int noOfSigmaRuleMatches = docLevelQueryResults.size(); + assertEquals(1, noOfSigmaRuleMatches); + String ruleQueryId = docLevelQueryResults.keySet().stream().findAny().get(); + ArrayList docs = (ArrayList) docLevelQueryResults.get(ruleQueryId); + assertEquals(docs.size(), 3); + //verify alerts + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + Response getAlertsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); + Map getAlertsBody = asMap(getAlertsResponse); + /** findings are present but alerts are NOT generated as detection type mentioned in trigger is threat_intel only but finding is from rules*/ + Assert.assertEquals(0, getAlertsBody.get("total_alerts")); + } + + public void testCreateDetectorWithThreatIntelDisabled_triggerWithRulesDetectionType_allAlertsForFindings() throws IOException { + + updateClusterSetting(ENABLE_WORKFLOW_USAGE.getKey(), "true"); + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + String randomDocRuleId = createRule(randomRule()); + List detectorRules = List.of(new DetectorRule(randomDocRuleId)); + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, + emptyList()); + DetectorTrigger trigger = new DetectorTrigger("all", "all", "high", + List.of(randomDetectorType()), emptyList(), emptyList(), List.of(), emptyList(), + List.of(DetectorTrigger.RULES_DETECTION_TYPE)); + Detector detector = randomDetectorWithInputsAndThreatIntelAndTriggers(List.of(input), false, List.of(trigger)); + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + + + assertEquals(1, response.getHits().getTotalHits().value); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + Map responseBody = asMap(createResponse); + + String detectorId = responseBody.get("_id").toString(); + request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map detectorMap = (HashMap) (hit.getSourceAsMap().get("detector")); + List inputArr = (List) detectorMap.get("inputs"); + + + List monitorIds = ((List) (detectorMap).get("monitor_id")); + assertEquals(1, monitorIds.size()); + + assertNotNull("Workflow not created", detectorMap.get("workflow_ids")); + assertEquals("Number of workflows not correct", 1, ((List) detectorMap.get("workflow_ids")).size()); + + // Verify workflow + verifyWorkflow(detectorMap, monitorIds, 1); + + int i = 1; + while (i<4) { + indexDoc(index, i + "", randomDocWithIpIoc(5, 3, i+"")); + i++; + } + String workflowId = ((List) detectorMap.get("workflow_ids")).get(0); + + Response executeResponse = executeAlertingWorkflow(workflowId, Collections.emptyMap()); + + List> monitorRunResults = (List>) entityAsMap(executeResponse).get("monitor_run_results"); + assertEquals(1, monitorRunResults.size()); + + Map docLevelQueryResults = ((List>) ((Map) monitorRunResults.get(0).get("input_results")).get("results")).get(0); + int noOfSigmaRuleMatches = docLevelQueryResults.size(); + assertEquals(1, noOfSigmaRuleMatches); + String ruleQueryId = docLevelQueryResults.keySet().stream().findAny().get(); + ArrayList docs = (ArrayList) docLevelQueryResults.get(ruleQueryId); + assertEquals(docs.size(), 3); + //verify alerts + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + Response getAlertsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); + Map getAlertsBody = asMap(getAlertsResponse); + /** findings are present but alerts are NOT generated as detection type mentioned in trigger is threat_intel only but finding is from rules*/ + Assert.assertEquals(3, getAlertsBody.get("total_alerts")); + } } From 7225ee6059e3947dfdb1d7d9f4408813348e6614 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Mon, 23 Oct 2023 01:47:47 -0700 Subject: [PATCH 34/39] add license header Signed-off-by: Surya Sashank Nistala --- .../opensearch/securityanalytics/DetectorThreatIntelIT.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/test/java/org/opensearch/securityanalytics/DetectorThreatIntelIT.java b/src/test/java/org/opensearch/securityanalytics/DetectorThreatIntelIT.java index 9e0767eea..9d83b3ed3 100644 --- a/src/test/java/org/opensearch/securityanalytics/DetectorThreatIntelIT.java +++ b/src/test/java/org/opensearch/securityanalytics/DetectorThreatIntelIT.java @@ -1,3 +1,7 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ package org.opensearch.securityanalytics; import org.apache.hc.core5.http.HttpStatus; From ae666dea4c9bcbcb6370b560b912cc3e8675ff74 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Mon, 23 Oct 2023 09:32:25 -0700 Subject: [PATCH 35/39] add threat intel field aliases in mapping view response Signed-off-by: Surya Sashank Nistala --- .../action/GetMappingsViewResponse.java | 36 ++++++++-- .../mapper/MapperService.java | 66 ++++++++++--------- .../securityanalytics/model/LogType.java | 28 ++++++-- .../TransportGetMappingsViewAction.java | 9 +-- .../mapper/MapperRestApiIT.java | 4 ++ 5 files changed, 92 insertions(+), 51 deletions(-) diff --git a/src/main/java/org/opensearch/securityanalytics/action/GetMappingsViewResponse.java b/src/main/java/org/opensearch/securityanalytics/action/GetMappingsViewResponse.java index e242e69c4..7606d029f 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/GetMappingsViewResponse.java +++ b/src/main/java/org/opensearch/securityanalytics/action/GetMappingsViewResponse.java @@ -4,37 +4,49 @@ */ package org.opensearch.securityanalytics.action; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Objects; +import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.action.ActionResponse; import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.securityanalytics.mapper.MapperUtils; +import org.opensearch.securityanalytics.model.LogType; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; public class GetMappingsViewResponse extends ActionResponse implements ToXContentObject { public static final String UNMAPPED_INDEX_FIELDS = "unmapped_index_fields"; public static final String UNMAPPED_FIELD_ALIASES = "unmapped_field_aliases"; + public static final String THREAT_INTEL_FIELD_ALIASES = "threat_intel_field_aliases"; private Map aliasMappings; List unmappedIndexFields; List unmappedFieldAliases; + /** This field sheds information on the list of field aliases that need to be mapped for a given IoC. + * For ex. one element for windows logtype would be + *{"ioc": "ip", "fields": ["destination.ip","source.ip"]} where "ip" is the IoC and the required field aliases to be mapped for + * threat intel based detection are "destination.ip","source.ip".*/ + private List threatIntelFieldAliases; + public GetMappingsViewResponse( Map aliasMappings, List unmappedIndexFields, - List unmappedFieldAliases + List unmappedFieldAliases, + List threatIntelFieldAliases ) { this.aliasMappings = aliasMappings; this.unmappedIndexFields = unmappedIndexFields; this.unmappedFieldAliases = unmappedFieldAliases; + this.threatIntelFieldAliases = threatIntelFieldAliases; } public GetMappingsViewResponse(StreamInput in) throws IOException { @@ -56,6 +68,7 @@ public GetMappingsViewResponse(StreamInput in) throws IOException { unmappedFieldAliases.add(in.readString()); } } + this.threatIntelFieldAliases = in.readList(LogType.IocFields::readFrom); } @Override @@ -82,6 +95,12 @@ public void writeTo(StreamOutput out) throws IOException { } else { out.writeVInt(0); } + if(threatIntelFieldAliases!=null) { + out.writeBoolean(true); + out.writeCollection(threatIntelFieldAliases); + } else { + out.writeBoolean(false); + } } @Override @@ -96,6 +115,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (unmappedFieldAliases != null && unmappedFieldAliases.size() > 0) { builder.field(UNMAPPED_FIELD_ALIASES, unmappedFieldAliases); } + if(threatIntelFieldAliases != null && false == threatIntelFieldAliases.isEmpty()) { + builder.field(THREAT_INTEL_FIELD_ALIASES, threatIntelFieldAliases); + } return builder.endObject(); } diff --git a/src/main/java/org/opensearch/securityanalytics/mapper/MapperService.java b/src/main/java/org/opensearch/securityanalytics/mapper/MapperService.java index 26f9c1602..3aedc0c8f 100644 --- a/src/main/java/org/opensearch/securityanalytics/mapper/MapperService.java +++ b/src/main/java/org/opensearch/securityanalytics/mapper/MapperService.java @@ -5,21 +5,10 @@ package org.opensearch.securityanalytics.mapper; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; import org.apache.commons.lang3.tuple.Pair; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.OpenSearchStatusException; -import org.opensearch.core.action.ActionListener; import org.opensearch.action.admin.indices.get.GetIndexRequest; import org.opensearch.action.admin.indices.get.GetIndexResponse; import org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest; @@ -33,8 +22,9 @@ import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.action.ActionListener; import org.opensearch.core.rest.RestStatus; +import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.securityanalytics.action.GetIndexMappingsResponse; import org.opensearch.securityanalytics.action.GetMappingsViewResponse; import org.opensearch.securityanalytics.logtype.LogTypeService; @@ -43,6 +33,16 @@ import org.opensearch.securityanalytics.util.IndexUtils; import org.opensearch.securityanalytics.util.SecurityAnalyticsException; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; import static org.opensearch.securityanalytics.mapper.MapperUtils.PATH; import static org.opensearch.securityanalytics.mapper.MapperUtils.PROPERTIES; @@ -57,7 +57,8 @@ public class MapperService { private IndexTemplateManager indexTemplateManager; private LogTypeService logTypeService; - public MapperService() {} + public MapperService() { + } public MapperService(Client client, ClusterService clusterService, IndexNameExpressionResolver indexNameExpressionResolver, IndexTemplateManager indexTemplateManager, LogTypeService logTypeService) { this.indicesClient = client.admin().indices(); @@ -122,7 +123,7 @@ public void onFailure(Exception e) { } private void applyAliasMappings(Map indexMappings, String logType, String aliasMappings, boolean partial, ActionListener> actionListener) { - int numOfIndices = indexMappings.size(); + int numOfIndices = indexMappings.size(); GroupedActionListener doCreateMappingActionsListener = new GroupedActionListener(new ActionListener>() { @Override @@ -150,12 +151,13 @@ public void onFailure(Exception e) { /** * Applies alias mappings to index. - * @param indexName Index name + * + * @param indexName Index name * @param mappingMetadata Index mappings - * @param logType Rule topic spcifying specific alias templates - * @param aliasMappings User-supplied alias mappings - * @param partial Partial flag indicating if we should apply mappings partially, in case source index doesn't have all paths specified in alias mappings - * @param actionListener actionListener used to return response/error + * @param logType Rule topic spcifying specific alias templates + * @param aliasMappings User-supplied alias mappings + * @param partial Partial flag indicating if we should apply mappings partially, in case source index doesn't have all paths specified in alias mappings + * @param actionListener actionListener used to return response/error */ private void doCreateMapping( String indexName, @@ -224,7 +226,7 @@ public void onResponse(List mappings) { List indexFields = MapperUtils.extractAllFieldsFlat(mappingMetadata); Map> aliasMappingFields = new HashMap<>(); XContentBuilder aliasMappingsObj = XContentFactory.jsonBuilder().startObject(); - for (LogType.Mapping mapping: mappings) { + for (LogType.Mapping mapping : mappings) { if (indexFields.contains(mapping.getRawField())) { aliasMappingFields.put(mapping.getEcs(), Map.of("type", "alias", "path", mapping.getRawField())); } else if (indexFields.contains(mapping.getOcsf())) { @@ -293,7 +295,7 @@ public void onFailure(Exception e) { } }); } - } catch(IOException | IllegalArgumentException e){ + } catch (IOException | IllegalArgumentException e) { actionListener.onFailure(e); } } @@ -308,7 +310,7 @@ private Map filterNonApplicableAliases( Map filteredAliasMappings = mappingsTraverser.traverseAndCopyAsFlat(); List> propertiesToSkip = new ArrayList<>(); - if(missingPathsInIndex.size() > 0) { + if (missingPathsInIndex.size() > 0) { // Filter out missing paths from alias mappings so that our PutMappings request succeeds propertiesToSkip.addAll( missingPathsInIndex.stream() @@ -423,6 +425,7 @@ public void onResponse(GetMappingsResponse getMappingsResponse) { } }, actionListener::onFailure)); } + @Override public void onFailure(Exception e) { actionListener.onFailure(e); @@ -457,9 +460,10 @@ public void onFailure(Exception e) { /** * Constructs Mappings View of index - * @param logType Log Type + * + * @param logType Log Type * @param actionListener Action Listener - * @param concreteIndex Concrete Index name for which we're computing Mappings View + * @param concreteIndex Concrete Index name for which we're computing Mappings View */ private void doGetMappingsView(String logType, ActionListener actionListener, String concreteIndex) { GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices(concreteIndex); @@ -479,7 +483,7 @@ public void onResponse(GetMappingsResponse getMappingsResponse) { // List of unapplayable aliases List unmappedFieldAliases = new ArrayList<>(); - for (LogType.Mapping requiredField: requiredFields) { + for (LogType.Mapping requiredField : requiredFields) { String alias = requiredField.getEcs(); String rawPath = requiredField.getRawField(); String ocsfPath = requiredField.getOcsf(); @@ -494,7 +498,7 @@ public void onResponse(GetMappingsResponse getMappingsResponse) { } else if (allFieldsFromIndex.contains(ocsfPath)) { applyableAliases.add(alias); pathsOfApplyableAliases.add(ocsfPath); - } else if ((alias == null && allFieldsFromIndex.contains(rawPath) == false) || allFieldsFromIndex.contains(alias) == false) { + } else if ((alias == null && allFieldsFromIndex.contains(rawPath) == false) || allFieldsFromIndex.contains(alias) == false) { if (alias != null) { // we don't want to send back aliases which have same name as existing field in index unmappedFieldAliases.add(alias); @@ -506,7 +510,7 @@ public void onResponse(GetMappingsResponse getMappingsResponse) { Map> aliasMappingFields = new HashMap<>(); XContentBuilder aliasMappingsObj = XContentFactory.jsonBuilder().startObject(); - for (LogType.Mapping mapping: requiredFields) { + for (LogType.Mapping mapping : requiredFields) { if (allFieldsFromIndex.contains(mapping.getOcsf())) { aliasMappingFields.put(mapping.getEcs(), Map.of("type", "alias", "path", mapping.getOcsf())); } else if (mapping.getEcs() != null) { @@ -523,15 +527,15 @@ public void onResponse(GetMappingsResponse getMappingsResponse) { .stream() .filter(e -> pathsOfApplyableAliases.contains(e) == false) .collect(Collectors.toList()); - actionListener.onResponse( - new GetMappingsViewResponse(aliasMappings, unmappedIndexFields, unmappedFieldAliases) + new GetMappingsViewResponse(aliasMappings, unmappedIndexFields, unmappedFieldAliases, logTypeService.getIocFieldsList(logType)) ); } catch (Exception e) { actionListener.onFailure(e); } }, actionListener::onFailure)); } + @Override public void onFailure(Exception e) { actionListener.onFailure(e); @@ -542,7 +546,8 @@ public void onFailure(Exception e) { /** * Given index name, resolves it to single concrete index, depending on what initial indexName is. * In case of Datastream or Alias, WriteIndex would be returned. In case of index pattern, newest index by creation date would be returned. - * @param indexName Datastream, Alias, index patter or concrete index + * + * @param indexName Datastream, Alias, index patter or concrete index * @param actionListener Action Listener * @throws IOException */ @@ -583,6 +588,7 @@ public void onFailure(Exception e) { void setIndicesAdminClient(IndicesAdminClient client) { this.indicesClient = client; } + void setClusterService(ClusterService clusterService) { this.clusterService = clusterService; } diff --git a/src/main/java/org/opensearch/securityanalytics/model/LogType.java b/src/main/java/org/opensearch/securityanalytics/model/LogType.java index 9bdb96d1a..f70a462e2 100644 --- a/src/main/java/org/opensearch/securityanalytics/model/LogType.java +++ b/src/main/java/org/opensearch/securityanalytics/model/LogType.java @@ -4,15 +4,18 @@ */ package org.opensearch.securityanalytics.model; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; + import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.core.common.io.stream.Writeable; public class LogType implements Writeable { @@ -60,7 +63,7 @@ public LogType(Map logTypeAsMap) { if (logTypeAsMap.containsKey(IS_BUILTIN)) { this.isBuiltIn = (Boolean) logTypeAsMap.get(IS_BUILTIN); } - List> mappings = (List>)logTypeAsMap.get(MAPPINGS); + List> mappings = (List>) logTypeAsMap.get(MAPPINGS); if (mappings.size() > 0) { this.mappings = new ArrayList<>(mappings.size()); this.mappings = mappings.stream().map(e -> @@ -85,7 +88,9 @@ public String getDescription() { return description; } - public boolean getIsBuiltIn() { return isBuiltIn; } + public boolean getIsBuiltIn() { + return isBuiltIn; + } public List getIocFieldsList() { return iocFieldsList; @@ -155,7 +160,7 @@ public static Mapping readFrom(StreamInput sin) throws IOException { /** * stores information of list of field names that contain information for given IoC (Indicator of Compromise). */ - public static class IocFields implements Writeable { + public static class IocFields implements Writeable, ToXContentObject { private final String ioc; private final List fields; @@ -188,6 +193,17 @@ public List getFields() { public static IocFields readFrom(StreamInput sin) throws IOException { return new IocFields(sin); } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + String[] fieldsArray = new String[]{}; + fieldsArray = fields.toArray(fieldsArray); + builder.startObject() + .field(IOC, ioc) + .field(FIELDS, fieldsArray) + .endObject(); + return builder; + } } diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportGetMappingsViewAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportGetMappingsViewAction.java index 38c761261..327990b2d 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportGetMappingsViewAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportGetMappingsViewAction.java @@ -4,22 +4,15 @@ */ package org.opensearch.securityanalytics.transport; -import org.opensearch.OpenSearchStatusException; -import org.opensearch.core.action.ActionListener; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.HandledTransportAction; -import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.inject.Inject; -import org.opensearch.core.rest.RestStatus; -import org.opensearch.securityanalytics.action.GetIndexMappingsAction; -import org.opensearch.securityanalytics.action.GetIndexMappingsRequest; -import org.opensearch.securityanalytics.action.GetIndexMappingsResponse; +import org.opensearch.core.action.ActionListener; import org.opensearch.securityanalytics.action.GetMappingsViewAction; import org.opensearch.securityanalytics.action.GetMappingsViewRequest; import org.opensearch.securityanalytics.action.GetMappingsViewResponse; import org.opensearch.securityanalytics.mapper.MapperService; -import org.opensearch.securityanalytics.util.SecurityAnalyticsException; import org.opensearch.tasks.Task; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; diff --git a/src/test/java/org/opensearch/securityanalytics/mapper/MapperRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/mapper/MapperRestApiIT.java index 315997a47..6e63f4296 100644 --- a/src/test/java/org/opensearch/securityanalytics/mapper/MapperRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/mapper/MapperRestApiIT.java @@ -11,6 +11,7 @@ import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; @@ -37,6 +38,7 @@ import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; import org.opensearch.securityanalytics.SecurityAnalyticsRestTestCase; import org.opensearch.securityanalytics.TestHelpers; +import org.opensearch.securityanalytics.action.GetMappingsViewResponse; import org.opensearch.securityanalytics.model.Detector; import org.opensearch.securityanalytics.model.DetectorInput; import org.opensearch.securityanalytics.model.DetectorRule; @@ -353,6 +355,8 @@ public void testGetMappingsViewSuccess() throws IOException { // Verify unmapped field aliases List unmappedFieldAliases = (List) respMap.get("unmapped_field_aliases"); assertEquals(3, unmappedFieldAliases.size()); + List> iocFieldsList = (List>) respMap.get(GetMappingsViewResponse.THREAT_INTEL_FIELD_ALIASES); + assertEquals(iocFieldsList.size(), 1); } public void testGetMappingsViewLinuxSuccess() throws IOException { From 040556cebb47262a7e4fe3813ca09ff6db9b2d59 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Tue, 24 Oct 2023 21:12:32 -0700 Subject: [PATCH 36/39] fix threat intel feed parser Signed-off-by: Surya Sashank Nistala --- Dockerfile | 4 ++++ .../securityanalytics/model/ThreatIntelFeedData.java | 1 - .../threatIntel/ThreatIntelFeedDataUtils.java | 1 + .../opensearch/securityanalytics/model/WriteableTests.java | 2 +- 4 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 Dockerfile diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 000000000..7c5c34617 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,4 @@ +FROM opensearchstaging/opensearch:2.11.0 +ADD build/distributions/opensearch-security-analytics-2.11.0.0-SNAPSHOT.zip /tmp/ +RUN if [ -d /usr/share/opensearch/plugins/opensearch-security-analytics ]; then /usr/share/opensearch/bin/opensearch-plugin remove opensearch-security-analytics; fi +RUN /usr/share/opensearch/bin/opensearch-plugin install --batch file:/tmp/opensearch-security-analytics-2.11.0.0-SNAPSHOT.zip \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/model/ThreatIntelFeedData.java b/src/main/java/org/opensearch/securityanalytics/model/ThreatIntelFeedData.java index 9f9f5d855..169270e9b 100644 --- a/src/main/java/org/opensearch/securityanalytics/model/ThreatIntelFeedData.java +++ b/src/main/java/org/opensearch/securityanalytics/model/ThreatIntelFeedData.java @@ -60,7 +60,6 @@ public static ThreatIntelFeedData parse(XContentParser xcp, String id, Long vers String iocValue = null; String feedId = null; Instant timestamp = null; - xcp.nextToken(); XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp); while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { String fieldName = xcp.currentName(); diff --git a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataUtils.java b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataUtils.java index 540fc6cde..a96558b50 100644 --- a/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataUtils.java +++ b/src/main/java/org/opensearch/securityanalytics/threatIntel/ThreatIntelFeedDataUtils.java @@ -31,6 +31,7 @@ public static List getTifdList(SearchResponse searchRespons xContentRegistry, LoggingDeprecationHandler.INSTANCE, hit.getSourceAsString() ); + xcp.nextToken(); list.add(ThreatIntelFeedData.parse(xcp, hit.getId(), hit.getVersion())); } catch (Exception e) { log.error(() -> new ParameterizedMessage( diff --git a/src/test/java/org/opensearch/securityanalytics/model/WriteableTests.java b/src/test/java/org/opensearch/securityanalytics/model/WriteableTests.java index 7c16e5f6f..87fc72e8b 100644 --- a/src/test/java/org/opensearch/securityanalytics/model/WriteableTests.java +++ b/src/test/java/org/opensearch/securityanalytics/model/WriteableTests.java @@ -19,7 +19,7 @@ public class WriteableTests extends OpenSearchTestCase { - public void testDetectorAsStream() throws IOException { + public void testDetectorAsStrea() throws IOException { Detector detector = randomDetector(List.of()); detector.setInputs(List.of(new DetectorInput("", List.of(), List.of(), List.of()))); BytesStreamOutput out = new BytesStreamOutput(); From e850248cbfcd1cd5e48807248a25333b3b3decc8 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Tue, 24 Oct 2023 22:08:08 -0700 Subject: [PATCH 37/39] fix workflow failing test Signed-off-by: Surya Sashank Nistala --- .../org/opensearch/securityanalytics/model/WriteableTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/org/opensearch/securityanalytics/model/WriteableTests.java b/src/test/java/org/opensearch/securityanalytics/model/WriteableTests.java index 87fc72e8b..f12535b98 100644 --- a/src/test/java/org/opensearch/securityanalytics/model/WriteableTests.java +++ b/src/test/java/org/opensearch/securityanalytics/model/WriteableTests.java @@ -19,7 +19,7 @@ public class WriteableTests extends OpenSearchTestCase { - public void testDetectorAsStrea() throws IOException { + public void testDetectorAsAStream() throws IOException { Detector detector = randomDetector(List.of()); detector.setInputs(List.of(new DetectorInput("", List.of(), List.of(), List.of()))); BytesStreamOutput out = new BytesStreamOutput(); From 2e8882565e9e25dd13d0d1efc2fe65d4155f9120 Mon Sep 17 00:00:00 2001 From: Surya Sashank Nistala Date: Tue, 24 Oct 2023 22:11:51 -0700 Subject: [PATCH 38/39] spotless check failures fixed Signed-off-by: Surya Sashank Nistala --- src/main/resources/mappings/threat_intel_job_mapping.json | 2 +- src/main/resources/threatIntelFeed/feedMetadata.json | 2 +- src/main/resources/threatIntelFeedInfo/feodo.yml | 2 +- src/test/resources/threatIntelFeed/feedMetadata.json | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/resources/mappings/threat_intel_job_mapping.json b/src/main/resources/mappings/threat_intel_job_mapping.json index c64b034fe..ffd165ae5 100644 --- a/src/main/resources/mappings/threat_intel_job_mapping.json +++ b/src/main/resources/mappings/threat_intel_job_mapping.json @@ -59,4 +59,4 @@ } } } -} \ No newline at end of file +} diff --git a/src/main/resources/threatIntelFeed/feedMetadata.json b/src/main/resources/threatIntelFeed/feedMetadata.json index 27196b6b6..e0f448012 100644 --- a/src/main/resources/threatIntelFeed/feedMetadata.json +++ b/src/main/resources/threatIntelFeed/feedMetadata.json @@ -10,4 +10,4 @@ "ioc_col": 0, "has_header": false } -} \ No newline at end of file +} diff --git a/src/main/resources/threatIntelFeedInfo/feodo.yml b/src/main/resources/threatIntelFeedInfo/feodo.yml index 4acbf40e4..8205e47ca 100644 --- a/src/main/resources/threatIntelFeedInfo/feodo.yml +++ b/src/main/resources/threatIntelFeedInfo/feodo.yml @@ -3,4 +3,4 @@ name: "ipblocklist_aggressive.csv" feedFormat: "csv" org: "Feodo" iocTypes: ["ip"] -description: "" \ No newline at end of file +description: "" diff --git a/src/test/resources/threatIntelFeed/feedMetadata.json b/src/test/resources/threatIntelFeed/feedMetadata.json index c73995ebd..0e5583797 100644 --- a/src/test/resources/threatIntelFeed/feedMetadata.json +++ b/src/test/resources/threatIntelFeed/feedMetadata.json @@ -9,4 +9,4 @@ "ioc_type": "ip", "ioc_col": 0 } -} \ No newline at end of file +} From bed73ff92b07b093744391605b9a08bf6def536b Mon Sep 17 00:00:00 2001 From: Joanne Wang <109310487+jowg-amazon@users.noreply.github.com> Date: Wed, 25 Oct 2023 14:23:43 -0700 Subject: [PATCH 39/39] remove dockerfile (#689) Signed-off-by: Joanne Wang --- Dockerfile | 4 ---- 1 file changed, 4 deletions(-) delete mode 100644 Dockerfile diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index 7c5c34617..000000000 --- a/Dockerfile +++ /dev/null @@ -1,4 +0,0 @@ -FROM opensearchstaging/opensearch:2.11.0 -ADD build/distributions/opensearch-security-analytics-2.11.0.0-SNAPSHOT.zip /tmp/ -RUN if [ -d /usr/share/opensearch/plugins/opensearch-security-analytics ]; then /usr/share/opensearch/bin/opensearch-plugin remove opensearch-security-analytics; fi -RUN /usr/share/opensearch/bin/opensearch-plugin install --batch file:/tmp/opensearch-security-analytics-2.11.0.0-SNAPSHOT.zip \ No newline at end of file