Skip to content

Commit

Permalink
Sigma Aggregation rule fixes (#622) (#640)
Browse files Browse the repository at this point in the history
Signed-off-by: Subhobrata Dey <[email protected]>
  • Loading branch information
opensearch-trigger-bot[bot] authored Oct 4, 2023
1 parent 49481ec commit 088a4aa
Show file tree
Hide file tree
Showing 16 changed files with 240 additions and 35 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,6 @@ public void getFindingsByDetectorId(String detectorId, Table table, ActionListen
public void onResponse(GetDetectorResponse getDetectorResponse) {
// Get all monitor ids from detector
Detector detector = getDetectorResponse.getDetector();
List<String> monitorIds = detector.getMonitorIds();
ActionListener<GetFindingsResponse> getFindingsResponseListener = new ActionListener<>() {
@Override
public void onResponse(GetFindingsResponse resp) {
Expand Down Expand Up @@ -87,12 +86,20 @@ public void onFailure(Exception e) {
// monitor --> detectorId mapping
Map<String, Detector> monitorToDetectorMapping = new HashMap<>();
detector.getMonitorIds().forEach(
monitorId -> monitorToDetectorMapping.put(monitorId, detector)
monitorId -> {
if (detector.getRuleIdMonitorIdMap().containsKey("chained_findings_monitor")) {
if (!detector.getRuleIdMonitorIdMap().get("chained_findings_monitor").equals(monitorId)) {
monitorToDetectorMapping.put(monitorId, detector);
}
} else {
monitorToDetectorMapping.put(monitorId, detector);
}
}
);
// Get findings for all monitor ids
FindingsService.this.getFindingsByMonitorIds(
monitorToDetectorMapping,
monitorIds,
new ArrayList<>(monitorToDetectorMapping.keySet()),
DetectorMonitorConfig.getAllFindingsIndicesPattern(detector.getDetectorType()),
table,
getFindingsResponseListener
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -481,6 +481,7 @@ public List<AggregationItem> getAggregationItemsFromRule () throws SigmaError {
for (SigmaCondition condition: sigmaRule.getDetection().getParsedCondition()) {
Pair<ConditionItem, AggregationItem> parsedItems = condition.parsed();
AggregationItem aggItem = parsedItems.getRight();
aggItem.setTimeframe(sigmaRule.getDetection().getTimeframe());
aggregationItems.add(aggItem);
}
return aggregationItems;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ public class AggregationItem implements Serializable {

private Double threshold;

private String timeframe;

public void setAggFunction(String aggFunction) {
this.aggFunction = aggFunction;
}
Expand Down Expand Up @@ -59,4 +61,12 @@ public void setThreshold(Double threshold) {
public Double getThreshold() {
return threshold;
}

public void setTimeframe(String timeframe) {
this.timeframe = timeframe;
}

public String getTimeframe() {
return timeframe;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -381,10 +381,10 @@ public AggregationQueries convertAggregation(AggregationItem aggregation) {
fmtAggQuery = String.format(Locale.getDefault(), aggCountQuery, "result_agg", aggregation.getGroupByField());
}
aggBuilder.field(fieldName);
fmtBucketTriggerQuery = String.format(Locale.getDefault(), bucketTriggerQuery, "_cnt", "_cnt", "result_agg", "_cnt", aggregation.getCompOperator(), aggregation.getThreshold());
fmtBucketTriggerQuery = String.format(Locale.getDefault(), bucketTriggerQuery, "_cnt", "_count", "result_agg", "_cnt", aggregation.getCompOperator(), aggregation.getThreshold());

Script script = new Script(String.format(Locale.getDefault(), bucketTriggerScript, "_cnt", aggregation.getCompOperator(), aggregation.getThreshold()));
condition = new BucketSelectorExtAggregationBuilder(bucketTriggerSelectorId, Collections.singletonMap("_cnt", "_cnt"), script, "result_agg", null);
condition = new BucketSelectorExtAggregationBuilder(bucketTriggerSelectorId, Collections.singletonMap("_cnt", "_count"), script, "result_agg", null);
} else {
fmtAggQuery = String.format(Locale.getDefault(), aggQuery, "result_agg", aggregation.getGroupByField(), aggregation.getAggField(), aggregation.getAggFunction(), aggregation.getAggField());
fmtBucketTriggerQuery = String.format(Locale.getDefault(), bucketTriggerQuery, aggregation.getAggField(), aggregation.getAggField(), "result_agg", aggregation.getAggField(), aggregation.getCompOperator(), aggregation.getThreshold());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@ public List<Object> convertRule(SigmaRule rule) throws SigmaError {
}
queries.add(query);
if (aggItem != null) {
aggItem.setTimeframe(rule.getDetection().getTimeframe());
queries.add(convertAggregation(aggItem));
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,14 @@ public class SigmaDetections {

private List<String> condition;

private String timeframe;

private List<SigmaCondition> parsedCondition;

public SigmaDetections(Map<String, SigmaDetection> detections, List<String> condition) throws SigmaDetectionError {
public SigmaDetections(Map<String, SigmaDetection> detections, List<String> condition, String timeframe) throws SigmaDetectionError {
this.detections = detections;
this.condition = condition;
this.timeframe = timeframe;

if (this.detections.isEmpty()) {
throw new SigmaDetectionError("No detections defined in Sigma rule");
Expand Down Expand Up @@ -55,7 +58,12 @@ protected static SigmaDetections fromDict(Map<String, Object> detectionMap) thro
}
}

return new SigmaDetections(detections, conditionList);
String timeframe = null;
if (detectionMap.containsKey("timeframe")) {
timeframe = detectionMap.get("timeframe").toString();
}

return new SigmaDetections(detections, conditionList, timeframe);
}

public Map<String, SigmaDetection> getDetections() {
Expand All @@ -69,4 +77,8 @@ public List<String> getCondition() {
public List<SigmaCondition> getParsedCondition() {
return parsedCondition;
}

public String getTimeframe() {
return timeframe;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@
import org.opensearch.securityanalytics.model.DetectorTrigger;
import org.opensearch.securityanalytics.model.Rule;
import org.opensearch.securityanalytics.model.Value;
import org.opensearch.securityanalytics.rules.aggregation.AggregationItem;
import org.opensearch.securityanalytics.rules.backend.OSQueryBackend;
import org.opensearch.securityanalytics.rules.backend.OSQueryBackend.AggregationQueries;
import org.opensearch.securityanalytics.rules.backend.QueryBackend;
Expand Down Expand Up @@ -784,7 +785,8 @@ private IndexMonitorRequest createBucketLevelMonitorRequest(

List<String> indices = detector.getInputs().get(0).getIndices();

AggregationQueries aggregationQueries = queryBackend.convertAggregation(rule.getAggregationItemsFromRule().get(0));
AggregationItem aggItem = rule.getAggregationItemsFromRule().get(0);
AggregationQueries aggregationQueries = queryBackend.convertAggregation(aggItem);

SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder()
.seqNoAndPrimaryTerm(true)
Expand Down Expand Up @@ -814,7 +816,7 @@ private IndexMonitorRequest createBucketLevelMonitorRequest(
? new BoolQueryBuilder()
: QueryBuilders.boolQuery().must(searchSourceBuilder.query());
RangeQueryBuilder timeRangeFilter = QueryBuilders.rangeQuery(TIMESTAMP_FIELD_ALIAS)
.gt("{{period_end}}||-1h")
.gt("{{period_end}}||-" + (aggItem.getTimeframe() != null? aggItem.getTimeframe(): "1h"))
.lte("{{period_end}}")
.format("epoch_millis");
boolQueryBuilder.must(timeRangeFilter);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,7 @@ private List<Rule> getQueries(QueryBackend backend, String category, List<String

Rule ruleModel = new Rule(
rule.getId().toString(), NO_VERSION, rule, category,
ruleQueries,
ruleQueries.stream().map(Object::toString).collect(Collectors.toList()),
new ArrayList<>(queryFieldNames),
ruleStr
);
Expand Down
30 changes: 30 additions & 0 deletions src/test/java/org/opensearch/securityanalytics/TestHelpers.java
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,11 @@ public static Detector randomDetectorWithTriggers(List<String> rules, List<Detec
rules.stream().map(DetectorRule::new).collect(Collectors.toList()));
return randomDetector(null, null, null, List.of(input), triggers, null, null, null, null);
}
public static Detector randomDetectorWithTriggersAndScheduleAndEnabled(List<String> rules, List<DetectorTrigger> triggers, Schedule schedule, boolean enabled) {
DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), Collections.emptyList(),
rules.stream().map(DetectorRule::new).collect(Collectors.toList()));
return randomDetector(null, null, null, List.of(input), triggers, schedule, enabled, null, null);
}

public static Detector randomDetectorWithTriggers(List<String> rules, List<DetectorTrigger> triggers, String detectorType, DetectorInput input) {
return randomDetector(null, detectorType, null, List.of(input), triggers, null, null, null, null);
Expand Down Expand Up @@ -352,6 +357,7 @@ public static String productIndexMaxAggRule() {

public static String randomProductDocument(){
return "{\n" +
" \"name\": \"laptop\",\n" +
" \"fieldA\": 123,\n" +
" \"mappedB\": 111,\n" +
" \"fieldC\": \"valueC\"\n" +
Expand Down Expand Up @@ -563,6 +569,9 @@ public static String netFlowMappings() {

public static String productIndexMapping(){
return "\"properties\":{\n" +
" \"name\":{\n" +
" \"type\":\"keyword\"\n" +
" },\n" +
" \"fieldA\":{\n" +
" \"type\":\"long\"\n" +
" },\n" +
Expand Down Expand Up @@ -591,13 +600,32 @@ public static String productIndexAvgAggRule(){
" category: test_category\n" +
" product: test_product\n" +
" detection:\n" +
" timeframe: 5m\n" +
" sel:\n" +
" fieldA: 123\n" +
" fieldB: 111\n" +
" fieldC: valueC\n" +
" condition: sel | avg(fieldA) by fieldC > 110";
}

public static String productIndexCountAggRule(){
return " title: Test\n" +
" id: 39f918f3-981b-4e6f-a975-8af7e507ef2b\n" +
" status: test\n" +
" level: critical\n" +
" description: Detects QuarksPwDump clearing access history in hive\n" +
" author: Florian Roth\n" +
" date: 2017/05/15\n" +
" logsource:\n" +
" category: test_category\n" +
" product: test_product\n" +
" detection:\n" +
" timeframe: 5m\n" +
" sel:\n" +
" name: laptop\n" +
" condition: sel | count(*) by name > 2";
}

public static String randomAggregationRule(String aggFunction, String signAndValue) {
String rule = "title: Remote Encrypting File System Abuse\n" +
"id: 5f92fff9-82e2-48eb-8fc1-8b133556a551\n" +
Expand All @@ -619,6 +647,7 @@ public static String randomAggregationRule(String aggFunction, String signAndVa
" category: application\n" +
" definition: 'Requirements: install and apply the RPC Firewall to all processes with \"audit:true action:block uuid:df1941c5-fe89-4e79-bf10-463657acf44d or c681d488-d850-11d0-8c52-00c04fd90f7e'\n" +
"detection:\n" +
" timeframe: 5m\n" +
" sel:\n" +
" Opcode: Info\n" +
" condition: sel | %s(SeverityValue) by Version %s\n" +
Expand Down Expand Up @@ -649,6 +678,7 @@ public static String randomAggregationRule(String aggFunction, String signAndVa
" category: application\n" +
" definition: 'Requirements: install and apply the RPC Firewall to all processes with \"audit:true action:block uuid:df1941c5-fe89-4e79-bf10-463657acf44d or c681d488-d850-11d0-8c52-00c04fd90f7e'\n" +
"detection:\n" +
" timeframe: 5m\n" +
" sel:\n" +
" Opcode: %s\n" +
" condition: sel | %s(SeverityValue) by Version %s\n" +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1483,7 +1483,7 @@ public void testCreateDetector_verifyWorkflowExecutionBucketLevelDocLevelMonitor
Map<String, Object> getFindingsBody = entityAsMap(getFindingsResponse);

assertNotNull(getFindingsBody);
assertEquals(10, getFindingsBody.get("total_findings"));
assertEquals(6, getFindingsBody.get("total_findings"));

String findingDetectorId = ((Map<String, Object>)((List)getFindingsBody.get("findings")).get(0)).get("detectorId").toString();
assertEquals(detectorId, findingDetectorId);
Expand All @@ -1495,7 +1495,6 @@ public void testCreateDetector_verifyWorkflowExecutionBucketLevelDocLevelMonitor
List<Map<String, Object>> findings = (List)getFindingsBody.get("findings");

Set<String> docLevelRules = new HashSet<>(List.of(randomDocRuleId));
List<String> bucketLevelMonitorFindingDocs = new ArrayList<>();
for(Map<String, Object> finding : findings) {
List<Map<String, Object>> queries = (List<Map<String, Object>>) finding.get("queries");
Set<String> findingRules = queries.stream().map(it -> it.get("id").toString()).collect(Collectors.toSet());
Expand All @@ -1504,16 +1503,10 @@ public void testCreateDetector_verifyWorkflowExecutionBucketLevelDocLevelMonitor
docLevelFinding.addAll((List<String>) finding.get("related_doc_ids"));
} else {
List<String> findingDocs = (List<String>) finding.get("related_doc_ids");
if (((Map<String, Object>) ((List<Object>) finding.get("queries")).get(0)).get("query").equals("_id:*")) {
Assert.assertEquals(1, findingDocs.size());
bucketLevelMonitorFindingDocs.addAll(findingDocs);
} else {
Assert.assertEquals(4, findingDocs.size());
assertTrue(Arrays.asList("1", "2", "3", "4").containsAll(findingDocs));
}
Assert.assertEquals(4, findingDocs.size());
assertTrue(Arrays.asList("1", "2", "3", "4").containsAll(findingDocs));
}
}
assertTrue(bucketLevelMonitorFindingDocs.containsAll(Arrays.asList("1", "2", "3", "4")));
// Verify doc level finding
assertTrue(Arrays.asList("1", "2", "3", "4", "5").containsAll(docLevelFinding));
}
Expand Down Expand Up @@ -1652,7 +1645,7 @@ public void testCreateDetector_verifyWorkflowExecutionMultipleBucketLevelDocLeve

// Assert findings
assertNotNull(getFindingsBody);
assertEquals(33, getFindingsBody.get("total_findings"));
assertEquals(19, getFindingsBody.get("total_findings"));
}


Expand Down
Loading

0 comments on commit 088a4aa

Please sign in to comment.