Skip to content

Commit

Permalink
c
Browse files Browse the repository at this point in the history
  • Loading branch information
SMAPPER committed Jun 27, 2018
1 parent 8f53caa commit 52c48e9
Show file tree
Hide file tree
Showing 3 changed files with 141 additions and 36 deletions.
36 changes: 0 additions & 36 deletions High Fidelity Alerts_Context for your context/1_parsing.conf

This file was deleted.

132 changes: 132 additions & 0 deletions High Fidelity Alerts_Context for your context/parsing.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
input {
stdin {}
}

filter {
# This is used to capture the current time before filter section processing takes place
ruby {
code => "event.set('task_start',Time.now.to_f)"
}
# This is the initial parsing of the log
grok {
match => ["message", "(%{INT:step})?\[%{INT:gid}:%{INT:sid}:%{INT:rev}\]\s%{DATA:alert}\[Classification:\s+%{DATA:classification}\]\s+\[Priority:\s+%{INT:priority}\]:\s+<%{DATA:interface}>\s+{%{DATA:protocol}}\s+%{IPV4:source_ip}:%{INT:source_port}\s+->\s+%{IPV4:destination_ip}:%{INT:destination_port}",
"message", "(%{INT:step})?\[%{INT:gid}:%{INT:sid}:%{INT:rev}\]\s%{DATA:alert}\[Classification:\s+%{DATA:classification}\]\s+\[Priority:\s+%{INT:priority}\]:\s+<%{DATA:interface}>\s+{%{DATA:protocol}}\s%{IPV4:source_ip}\s+->\s+%{IPV4:destination_ip}",
"message", "(%{INT:step})?\[%{INT:gid}:%{INT:sid}:%{INT:rev}\]\s%{DATA:alert}\[Classification:\s+%{DATA:classification}\]\s+\[Priority:\s+%{INT:priority}\]:\s+{%{DATA:protocol}}\s+%{IPV4:source_ip}:%{INT:source_port}\s+->\s+%{IPV4:destination_ip}:%{INT:destination_port}",
"message", "(%{INT:step})?\[%{INT:gid}:%{INT:sid}:%{INT:rev}\]\s%{DATA:alert}\[Classification:\s+%{DATA:classification}\]\s+\[Priority:\s+%{INT:priority}\]:\s+{%{DATA:protocol}}\s%{IPV4:source_ip}\s+->\s+%{IPV4:destination_ip}"]
}
if [step] {
if [step] >= 1 {
if [alert] =~ "GPL " {
# This will parse out the category type from the alert
grok {
match => { "alert" => "GPL\s+%{DATA:category}\s" }
}
# This will store the category
mutate {
add_field => { "rule_type" => "Snort GPL" }
lowercase => [ "category"]
}
}
# If the alert is an Emerging Threat alert break it apart for easier reading and categorization
if [alert] =~ "ET " {
# This will parse out the category type from the alert
grok {
match => { "alert" => "ET\s+%{DATA:category}\s" }
}
# This will store the category
mutate {
add_field => { "rule_type" => "Emerging Threats" }
lowercase => [ "category"]
}
}
# I recommend changing the field types below to integer so searches can do greater than or less than
# and also so math functions can be ran against them
mutate {
convert => [ "source_port", "integer" ]
convert => [ "destination_port", "integer" ]
convert => [ "gid", "integer" ]
convert => [ "sid", "integer" ]
# remove_field => [ "message"]
}
# This will translate the priority field into a severity field of either High, Medium, or Low
if [priority] == 1 {
mutate {
add_field => { "severity" => "High" }
}
}
if [priority] == 2 {
mutate {
add_field => { "severity" => "Medium" }
}
}
if [priority] == 3 {
mutate {
add_field => { "severity" => "Low" }
}
}
}
}
# Step 2 and up
if [step] {
if [step] >= 2 {
translate {
field => "sid"
destination => "rule"
dictionary_path => "/home/jhenderson/rules.csv"
}
}
}
# Step 3 and up
if [step] {
if [step] >= 3 {
# This will perform a standard geoip lookup on the source and destination IP addresses
geoip {
source => "destination_ip"
target => "destination_geo"
tag_on_failure => []
}
geoip {
source => "source_ip"
target => "source_geo"
tag_on_failure => []
}
}
}
# Step 4 and up
if [step] {
if [step] >= 4 {
# This will perform a geoip ASN lookup on the source and destination IP addresses
geoip {
source => "destination_ip"
target => "destination_geo"
default_database_type => "ASN"
tag_on_failure => []
}
geoip {
source => "source_ip"
target => "source_geo"
default_database_type => "ASN"
tag_on_failure => []
}
}
}
# This will capture the finish time of the filter processing section and then use it with the
# start time to calculate how long the log took to process.
ruby {
code => "event.set('task_end',Time.now.to_f)"
}
ruby {
code => "event.set('logstash_time',(event.get('task_end') - event.get('task_start')).round(8))"
}
mutate {
remove_field => [ 'task_start', 'task_end' ]
}
}

output {
stdout { codec => rubydebug }
# file {
# path => "/tmp/1_parsing.log"
# codec => "json"
# }
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
#$rules = get-content -Path C:\Users\jhenderson\Downloads\rules\*.rules | Where-Object { $_ -notmatch "^#" }
$output = "{"

foreach($rule in $rules){
if($sid = [regex]::match($rule, "sid:(?<sid>[0-9]+);").Groups[1].Value){
$modified_rule = $rule -replace "'","" -replace '"','' -replace ",",""
"$($sid),$($modified_rule)" | Out-File -FilePath rules.csv -Append -Encoding utf8
}
}

0 comments on commit 52c48e9

Please sign in to comment.