From 52c48e9728aae1cfbb711037fcdb47f3a35accf8 Mon Sep 17 00:00:00 2001 From: Justin Henderson Date: Wed, 27 Jun 2018 02:46:00 -0500 Subject: [PATCH] c --- .../1_parsing.conf | 36 ----- .../parsing.conf | 132 ++++++++++++++++++ .../sid_translation.ps1 | 9 ++ 3 files changed, 141 insertions(+), 36 deletions(-) delete mode 100644 High Fidelity Alerts_Context for your context/1_parsing.conf create mode 100644 High Fidelity Alerts_Context for your context/parsing.conf create mode 100644 High Fidelity Alerts_Context for your context/sid_translation.ps1 diff --git a/High Fidelity Alerts_Context for your context/1_parsing.conf b/High Fidelity Alerts_Context for your context/1_parsing.conf deleted file mode 100644 index 2234202..0000000 --- a/High Fidelity Alerts_Context for your context/1_parsing.conf +++ /dev/null @@ -1,36 +0,0 @@ -input { - stdin {} -} - -filter { - # This is used to capture the current time before filter section processing takes place - ruby { - code => "event.set('task_start',Time.now.to_f)" - } - # This is the initial parsing of the log - grok { - match => ["message", "(%{INT:step:int})?\[%{INT:gid}:%{INT:sid}:%{INT:rev}\]\s%{DATA:alert}\[Classification:\s+%{DATA:classification}\]\s+\[Priority:\s+%{INT:priority}\]:\s+<%{DATA:interface}>\s+{%{DATA:protocol}}\s+%{IPV4:source_ip}:%{INT:source_port}\s+->\s+%{IPV4:destination_ip}:%{INT:destination_port}", - "message", "(%{INT:step:int})?\[%{INT:gid}:%{INT:sid}:%{INT:rev}\]\s%{DATA:alert}\[Classification:\s+%{DATA:classification}\]\s+\[Priority:\s+%{INT:priority}\]:\s+<%{DATA:interface}>\s+{%{DATA:protocol}}\s%{IPV4:source_ip}\s+->\s+%{IPV4:destination_ip}", - "message", "(%{INT:step:int})?\[%{INT:gid}:%{INT:sid}:%{INT:rev}\]\s%{DATA:alert}\[Classification:\s+%{DATA:classification}\]\s+\[Priority:\s+%{INT:priority}\]:\s+{%{DATA:protocol}}\s+%{IPV4:source_ip}:%{INT:source_port}\s+->\s+%{IPV4:destination_ip}:%{INT:destination_port}", - "message", "(%{INT:step:int})?\[%{INT:gid}:%{INT:sid}:%{INT:rev}\]\s%{DATA:alert}\[Classification:\s+%{DATA:classification}\]\s+\[Priority:\s+%{INT:priority}\]:\s+{%{DATA:protocol}}\s%{IPV4:source_ip}\s+->\s+%{IPV4:destination_ip}"] - } - # This will capture the finish time of the filter processing section and then use it with the - # start time to calculate how long the log took to process. - ruby { - code => "event.set('task_end',Time.now.to_f)" - } - ruby { - code => "event.set('logstash_time',(event.get('task_end') - event.get('task_start')).round(8))" - } - mutate { - remove_field => [ 'task_start', 'task_end' ] - } -} - -output { - stdout { codec => rubydebug } - file { - path => "/tmp/1_parsing.log" - codec => "json" - } -} \ No newline at end of file diff --git a/High Fidelity Alerts_Context for your context/parsing.conf b/High Fidelity Alerts_Context for your context/parsing.conf new file mode 100644 index 0000000..5bdf590 --- /dev/null +++ b/High Fidelity Alerts_Context for your context/parsing.conf @@ -0,0 +1,132 @@ +input { + stdin {} +} + +filter { + # This is used to capture the current time before filter section processing takes place + ruby { + code => "event.set('task_start',Time.now.to_f)" + } + # This is the initial parsing of the log + grok { + match => ["message", "(%{INT:step})?\[%{INT:gid}:%{INT:sid}:%{INT:rev}\]\s%{DATA:alert}\[Classification:\s+%{DATA:classification}\]\s+\[Priority:\s+%{INT:priority}\]:\s+<%{DATA:interface}>\s+{%{DATA:protocol}}\s+%{IPV4:source_ip}:%{INT:source_port}\s+->\s+%{IPV4:destination_ip}:%{INT:destination_port}", + "message", "(%{INT:step})?\[%{INT:gid}:%{INT:sid}:%{INT:rev}\]\s%{DATA:alert}\[Classification:\s+%{DATA:classification}\]\s+\[Priority:\s+%{INT:priority}\]:\s+<%{DATA:interface}>\s+{%{DATA:protocol}}\s%{IPV4:source_ip}\s+->\s+%{IPV4:destination_ip}", + "message", "(%{INT:step})?\[%{INT:gid}:%{INT:sid}:%{INT:rev}\]\s%{DATA:alert}\[Classification:\s+%{DATA:classification}\]\s+\[Priority:\s+%{INT:priority}\]:\s+{%{DATA:protocol}}\s+%{IPV4:source_ip}:%{INT:source_port}\s+->\s+%{IPV4:destination_ip}:%{INT:destination_port}", + "message", "(%{INT:step})?\[%{INT:gid}:%{INT:sid}:%{INT:rev}\]\s%{DATA:alert}\[Classification:\s+%{DATA:classification}\]\s+\[Priority:\s+%{INT:priority}\]:\s+{%{DATA:protocol}}\s%{IPV4:source_ip}\s+->\s+%{IPV4:destination_ip}"] + } + if [step] { + if [step] >= 1 { + if [alert] =~ "GPL " { + # This will parse out the category type from the alert + grok { + match => { "alert" => "GPL\s+%{DATA:category}\s" } + } + # This will store the category + mutate { + add_field => { "rule_type" => "Snort GPL" } + lowercase => [ "category"] + } + } + # If the alert is an Emerging Threat alert break it apart for easier reading and categorization + if [alert] =~ "ET " { + # This will parse out the category type from the alert + grok { + match => { "alert" => "ET\s+%{DATA:category}\s" } + } + # This will store the category + mutate { + add_field => { "rule_type" => "Emerging Threats" } + lowercase => [ "category"] + } + } + # I recommend changing the field types below to integer so searches can do greater than or less than + # and also so math functions can be ran against them + mutate { + convert => [ "source_port", "integer" ] + convert => [ "destination_port", "integer" ] + convert => [ "gid", "integer" ] + convert => [ "sid", "integer" ] + # remove_field => [ "message"] + } + # This will translate the priority field into a severity field of either High, Medium, or Low + if [priority] == 1 { + mutate { + add_field => { "severity" => "High" } + } + } + if [priority] == 2 { + mutate { + add_field => { "severity" => "Medium" } + } + } + if [priority] == 3 { + mutate { + add_field => { "severity" => "Low" } + } + } + } + } + # Step 2 and up + if [step] { + if [step] >= 2 { + translate { + field => "sid" + destination => "rule" + dictionary_path => "/home/jhenderson/rules.csv" + } + } + } + # Step 3 and up + if [step] { + if [step] >= 3 { + # This will perform a standard geoip lookup on the source and destination IP addresses + geoip { + source => "destination_ip" + target => "destination_geo" + tag_on_failure => [] + } + geoip { + source => "source_ip" + target => "source_geo" + tag_on_failure => [] + } + } + } + # Step 4 and up + if [step] { + if [step] >= 4 { + # This will perform a geoip ASN lookup on the source and destination IP addresses + geoip { + source => "destination_ip" + target => "destination_geo" + default_database_type => "ASN" + tag_on_failure => [] + } + geoip { + source => "source_ip" + target => "source_geo" + default_database_type => "ASN" + tag_on_failure => [] + } + } + } + # This will capture the finish time of the filter processing section and then use it with the + # start time to calculate how long the log took to process. + ruby { + code => "event.set('task_end',Time.now.to_f)" + } + ruby { + code => "event.set('logstash_time',(event.get('task_end') - event.get('task_start')).round(8))" + } + mutate { + remove_field => [ 'task_start', 'task_end' ] + } +} + +output { + stdout { codec => rubydebug } +# file { +# path => "/tmp/1_parsing.log" +# codec => "json" +# } +} diff --git a/High Fidelity Alerts_Context for your context/sid_translation.ps1 b/High Fidelity Alerts_Context for your context/sid_translation.ps1 new file mode 100644 index 0000000..e5cb090 --- /dev/null +++ b/High Fidelity Alerts_Context for your context/sid_translation.ps1 @@ -0,0 +1,9 @@ +#$rules = get-content -Path C:\Users\jhenderson\Downloads\rules\*.rules | Where-Object { $_ -notmatch "^#" } +$output = "{" + +foreach($rule in $rules){ + if($sid = [regex]::match($rule, "sid:(?[0-9]+);").Groups[1].Value){ + $modified_rule = $rule -replace "'","" -replace '"','' -replace ",","" + "$($sid),$($modified_rule)" | Out-File -FilePath rules.csv -Append -Encoding utf8 + } +} \ No newline at end of file