diff --git a/High Fidelity Alerts_Context for your context/parsing.conf b/High Fidelity Alerts_Context for your context/parsing.conf index 23a4e01..c36d2b1 100644 --- a/High Fidelity Alerts_Context for your context/parsing.conf +++ b/High Fidelity Alerts_Context for your context/parsing.conf @@ -135,6 +135,35 @@ filter { } } } + # Step 6 Top 1 Million check + if [step] { + if [step] >= 6 { + if [source_highest_registered_domain] { + translate { + field => "source_highest_registered_domain" + destination => "source_fqdn_top1m" + dictionary_path => "/home/jhenderson/top1m.csv" + } + } + if [destination_highest_registered_domain] { + translate { + field => "destination_highest_registered_domain" + destination => "destination_fqdn_top1m" + dictionary_path => "/home/jhenderson/top1m.csv" + } + } + if [source_fqdn_top1m] { + mutate { + add_tag => [ "top-1m", "source_top1m" ] + } + } + if [destination_fqdn_top1m] { + mutate { + add_tag => [ "top-1m", "destination_top1m" ] + } + } + } + } # This will capture the finish time of the filter processing section and then use it with the # start time to calculate how long the log took to process. ruby { diff --git a/High Fidelity Alerts_Context for your context/top-1m_translation.ps1 b/High Fidelity Alerts_Context for your context/top-1m_translation.ps1 new file mode 100644 index 0000000..55bcbba --- /dev/null +++ b/High Fidelity Alerts_Context for your context/top-1m_translation.ps1 @@ -0,0 +1,10 @@ +$path = "C:\Users\jhenderson\Downloads" + +Add-Type -AssemblyName System.IO.Compression.FileSystem +Invoke-WebRequest -Uri http://s3.amazonaws.com/alexa-static/top-1m.csv.zip -OutFile "$path\top-1m.csv.zip" +Remove-Item "$path\top-1m.csv" -Force +[System.IO.Compression.ZipFile]::ExtractToDirectory("$path\top-1m.csv.zip", $path) +$top1m = Import-csv -Delimiter "," -Header "rank","site" -Path "$path\top-1m.csv" +foreach($record in $top1m){ + "$record.site,$record.rank" | Out-File -FilePath top1m.csv -Append -Encoding utf8 +} \ No newline at end of file