diff --git a/filebeat/module/auditd/log/ingest/pipeline.json b/filebeat/module/auditd/log/ingest/pipeline.json index e2a97600389..f4911f86335 100644 --- a/filebeat/module/auditd/log/ingest/pipeline.json +++ b/filebeat/module/auditd/log/ingest/pipeline.json @@ -71,7 +71,7 @@ }, { "convert": { - "field" : "auditd.log.sequence", + "field": "auditd.log.sequence", "type": "integer", "ignore_missing": true } @@ -79,7 +79,11 @@ { "script": { "lang": "painless", - "inline": " String trimQuotes(def v) {\n if (v.startsWith(\"'\") || v.startsWith('\"')) {\n v = v.substring(1, v.length());\n }\n if (v.endsWith(\"'\") || v.endsWith('\"')) {\n v = v.substring(0, v.length()-1);\n } \n return v;\n }\n \n boolean isHexAscii(String v) {\n def len = v.length();\n if (len == 0 || len % 2 != 0) {\n return false; \n }\n \n for (int i = 0 ; i < len ; i++) {\n if (Character.digit(v.charAt(i), 16) == -1) {\n return false;\n }\n }\n\n return true;\n }\n \n String convertHexToString(String hex) {\n\t StringBuilder sb = new StringBuilder();\n\n for (int i=0; i < hex.length() - 1; i+=2) {\n String output = hex.substring(i, (i + 2));\n int decimal = Integer.parseInt(output, 16);\n sb.append((char)decimal);\n }\n\n return sb.toString();\n }\n \n def possibleHexKeys = ['exe', 'cmd'];\n \n def audit = ctx.auditd.get(\"log\");\n Iterator entries = audit.entrySet().iterator();\n while (entries.hasNext()) {\n def e = entries.next();\n def k = e.getKey();\n def v = e.getValue(); \n\n // Remove entries whose value is ?\n if (v == \"?\" || v == \"(null)\" || v == \"\") {\n entries.remove();\n continue;\n }\n \n // Convert hex values to ASCII.\n if (possibleHexKeys.contains(k) && isHexAscii(v)) {\n v = convertHexToString(v);\n audit.put(k, v);\n }\n \n // Trim quotes.\n if (v instanceof String) {\n v = trimQuotes(v);\n audit.put(k, v);\n }\n \n // Convert arch.\n if (k == \"arch\" && v == \"c000003e\") {\n audit.put(k, \"x86_64\");\n }\n }" + "source": " String trimQuotes(def singleQuote, def doubleQuote, def v) {\n if (v.startsWith(singleQuote) || v.startsWith(doubleQuote)) {\n v = v.substring(1, v.length());\n }\n if (v.endsWith(singleQuote) || v.endsWith(doubleQuote)) {\n v = v.substring(0, v.length()-1);\n } \n return v;\n }\n \n boolean isHexAscii(String v) {\n def len = v.length();\n if (len == 0 || len % 2 != 0) {\n return false; \n }\n \n for (int i = 0 ; i < len ; i++) {\n if (Character.digit(v.charAt(i), 16) == -1) {\n return false;\n }\n }\n\n return true;\n }\n \n String convertHexToString(String hex) {\n\t StringBuilder sb = new StringBuilder();\n\n for (int i=0; i < hex.length() - 1; i+=2) {\n String output = hex.substring(i, (i + 2));\n int decimal = Integer.parseInt(output, 16);\n sb.append((char)decimal);\n }\n\n return sb.toString();\n }\n \n def possibleHexKeys = ['exe', 'cmd'];\n \n def audit = ctx.auditd.get(\"log\");\n Iterator entries = audit.entrySet().iterator();\n while (entries.hasNext()) {\n def e = entries.next();\n def k = e.getKey();\n def v = e.getValue(); \n\n // Remove entries whose value is ?\n if (v == \"?\" || v == \"(null)\" || v == \"\") {\n entries.remove();\n continue;\n }\n \n // Convert hex values to ASCII.\n if (possibleHexKeys.contains(k) && isHexAscii(v)) {\n v = convertHexToString(v);\n audit.put(k, v);\n }\n \n // Trim quotes.\n if (v instanceof String) {\n v = trimQuotes(params.single_quote, params.double_quote, v);\n audit.put(k, v);\n }\n \n // Convert arch.\n if (k == \"arch\" && v == \"c000003e\") {\n audit.put(k, \"x86_64\");\n }\n }", + "params": { + "single_quote": "'", + "double_quote": "\"" + } } }, { diff --git a/filebeat/module/nginx/access/ingest/default.json b/filebeat/module/nginx/access/ingest/default.json index d4c789343ef..bbc209bdaa7 100644 --- a/filebeat/module/nginx/access/ingest/default.json +++ b/filebeat/module/nginx/access/ingest/default.json @@ -1,112 +1,139 @@ { - "description": "Pipeline for parsing Nginx access logs. Requires the geoip and user_agent plugins.", - "processors": [{ - "grok": { - "field": "message", - "patterns":[ - "\"?%{IP_LIST:network.forwarded_ip} - %{DATA:user.name} \\[%{HTTPDATE:nginx.access.time}\\] \"%{GREEDYDATA:nginx.access.info}\" %{NUMBER:http.response.status_code:long} %{NUMBER:nginx.access.body_sent.bytes:long} \"%{DATA:http.request.referrer}\" \"%{DATA:nginx.access.agent}\"" - ], - "pattern_definitions": { - "IP_LIST": "%{IP}(\"?,?\\s*%{IP})*" - }, - "ignore_missing": true - } - }, { - "grok": { - "field": "nginx.access.info", - "patterns": [ - "%{WORD:http.request.method} %{DATA:url.original} HTTP/%{NUMBER:http.version}", - "" - ], - "ignore_missing": true - } - }, { - "remove": { - "field": "nginx.access.info" - } - }, { - "split": { - "field": "network.forwarded_ip", - "separator": "\"?,?\\s+" - } - }, { - "set": { - "field": "source.ip", - "value": "" - } - }, { - "script": { - "lang": "painless", - "inline": "boolean isPrivate(def ip) { try { StringTokenizer tok = new StringTokenizer(ip, '.'); int firstByte = Integer.parseInt(tok.nextToken()); int secondByte = Integer.parseInt(tok.nextToken()); if (firstByte == 10) { return true; } if (firstByte == 192 && secondByte == 168) { return true; } if (firstByte == 172 && secondByte >= 16 && secondByte <= 31) { return true; } if (firstByte == 127) { return true; } return false; } catch (Exception e) { return false; } } def found = false; for (def item : ctx.network.forwarded_ip) { if (!isPrivate(item)) { ctx.source.ip = item; found = true; break; } } if (!found) { ctx.source.ip = ctx.network.forwarded_ip[0]; }" - } - }, { - "remove":{ - "field": "message" - } - }, { - "rename": { - "field": "@timestamp", - "target_field": "read_timestamp" - } - }, { - "date": { - "field": "nginx.access.time", - "target_field": "@timestamp", - "formats": ["dd/MMM/YYYY:H:m:s Z"] - } - }, { - "remove": { - "field": "nginx.access.time" - } - - }, { "user_agent": { "field": "nginx.access.agent" } - }, { - "rename": { - "field": "nginx.access.agent", - "target_field": "user_agent.original" - } - }, { - "rename": { - "field": "user_agent.os", - "target_field": "user_agent.os.full_name", - "ignore_missing": true - } - }, { - "rename": { - "field": "user_agent.os_name", - "target_field": "user_agent.os.name", - "ignore_missing": true - } - }, { - "rename": { - "field": "user_agent.os_major", - "target_field": "user_agent.os.major", - "ignore_missing": true - } - }, { - "rename": { - "field": "user_agent.os_minor", - "target_field": "user_agent.os.minor", - "ignore_missing": true - } - }, { - "rename": { - "field": "user_agent.os_patch", - "target_field": "user_agent.os.patch", - "ignore_missing": true - } - - },{ - "geoip": { - "field": "source.ip", - "target_field": "source.geo", - "ignore_missing": true - } - }], - "on_failure" : [{ - "set" : { - "field" : "error.message", - "value" : "{{ _ingest.on_failure_message }}" - } - }] + "description": "Pipeline for parsing Nginx access logs. Requires the geoip and user_agent plugins.", + "processors": [ + { + "grok": { + "field": "message", + "patterns": [ + "\"?%{IP_LIST:network.forwarded_ip} - %{DATA:user.name} \\[%{HTTPDATE:nginx.access.time}\\] \"%{GREEDYDATA:nginx.access.info}\" %{NUMBER:http.response.status_code:long} %{NUMBER:nginx.access.body_sent.bytes:long} \"%{DATA:http.request.referrer}\" \"%{DATA:nginx.access.agent}\"" + ], + "pattern_definitions": { + "IP_LIST": "%{IP}(\"?,?\\s*%{IP})*" + }, + "ignore_missing": true + } + }, + { + "grok": { + "field": "nginx.access.info", + "patterns": [ + "%{WORD:http.request.method} %{DATA:url.original} HTTP/%{NUMBER:http.version}", + "" + ], + "ignore_missing": true + } + }, + { + "remove": { + "field": "nginx.access.info" + } + }, + { + "split": { + "field": "network.forwarded_ip", + "separator": "\"?,?\\s+" + } + }, + { + "set": { + "field": "source.ip", + "value": "" + } + }, + { + "script": { + "lang": "painless", + "source": "boolean isPrivate(def dot, def ip) { try { StringTokenizer tok = new StringTokenizer(ip, dot); int firstByte = Integer.parseInt(tok.nextToken()); int secondByte = Integer.parseInt(tok.nextToken()); if (firstByte == 10) { return true; } if (firstByte == 192 && secondByte == 168) { return true; } if (firstByte == 172 && secondByte >= 16 && secondByte <= 31) { return true; } if (firstByte == 127) { return true; } return false; } catch (Exception e) { return false; } } def found = false; for (def item : ctx.network.forwarded_ip) { if (!isPrivate(params.dot, item)) { ctx.source.ip = item; found = true; break; } } if (!found) { ctx.source.ip = ctx.network.forwarded_ip[0]; }", + "params": { + "dot": "." + } + } + }, + { + "remove": { + "field": "message" + } + }, + { + "rename": { + "field": "@timestamp", + "target_field": "read_timestamp" + } + }, + { + "date": { + "field": "nginx.access.time", + "target_field": "@timestamp", + "formats": [ + "dd/MMM/YYYY:H:m:s Z" + ] + } + }, + { + "remove": { + "field": "nginx.access.time" + } + }, + { + "user_agent": { + "field": "nginx.access.agent" + } + }, + { + "rename": { + "field": "nginx.access.agent", + "target_field": "user_agent.original" + } + }, + { + "rename": { + "field": "user_agent.os", + "target_field": "user_agent.os.full_name", + "ignore_missing": true + } + }, + { + "rename": { + "field": "user_agent.os_name", + "target_field": "user_agent.os.name", + "ignore_missing": true + } + }, + { + "rename": { + "field": "user_agent.os_major", + "target_field": "user_agent.os.major", + "ignore_missing": true + } + }, + { + "rename": { + "field": "user_agent.os_minor", + "target_field": "user_agent.os.minor", + "ignore_missing": true + } + }, + { + "rename": { + "field": "user_agent.os_patch", + "target_field": "user_agent.os.patch", + "ignore_missing": true + } + }, + { + "geoip": { + "field": "source.ip", + "target_field": "source.geo", + "ignore_missing": true + } + } + ], + "on_failure": [ + { + "set": { + "field": "error.message", + "value": "{{ _ingest.on_failure_message }}" + } + } + ] } diff --git a/filebeat/module/redis/log/ingest/pipeline.json b/filebeat/module/redis/log/ingest/pipeline.json index 45a3aebf618..f932547d61b 100644 --- a/filebeat/module/redis/log/ingest/pipeline.json +++ b/filebeat/module/redis/log/ingest/pipeline.json @@ -1,51 +1,82 @@ { - "description": "Pipeline for parsing redis logs", - "processors": [ - { - "grok": { - "field": "message", - "patterns": [ - "(%{POSINT:process.pid:long}:%{CHAR:redis.log.role} )?%{REDISTIMESTAMP:redis.log.timestamp} %{REDISLEVEL:log.level} %{GREEDYDATA:message}", - "%{POSINT:process.pid:long}:signal-handler \\(%{POSINT:redis.log.timestamp}\\) %{GREEDYDATA:message}" - ], - "pattern_definitions": { - "CHAR": "[a-zA-Z]", - "REDISLEVEL": "[.\\-*#]" + "description": "Pipeline for parsing redis logs", + "processors": [ + { + "grok": { + "field": "message", + "patterns": [ + "(%{POSINT:process.pid:long}:%{CHAR:redis.log.role} )?%{REDISTIMESTAMP:redis.log.timestamp} %{REDISLEVEL:log.level} %{GREEDYDATA:message}", + "%{POSINT:process.pid:long}:signal-handler \\(%{POSINT:redis.log.timestamp}\\) %{GREEDYDATA:message}" + ], + "pattern_definitions": { + "CHAR": "[a-zA-Z]", + "REDISLEVEL": "[.\\-*#]" + } + } + }, + { + "script": { + "lang": "painless", + "source": "if (ctx.log.level == params.dot) {\n ctx.log.level = params.debug;\n } else if (ctx.log.level == params.dash) {\n ctx.log.level = params.verbose;\n } else if (ctx.log.level == params.asterisk) {\n ctx.log.level = params.notice;\n } else if (ctx.log.level == params.hash) {\n ctx.log.level = params.warning;\n }", + "params": { + "dot": ".", + "debug": "debug", + "dash": "-", + "verbose": "verbose", + "asterisk": "*", + "notice": "notice", + "hash": "#", + "warning": "warning" + } + } + }, + { + "script": { + "lang": "painless", + "source": "if (ctx.redis.log.role == params.master_abbrev) {\n ctx.redis.log.role = params.master;\n } else if (ctx.redis.log.role == params.slave_abbrev) {\n ctx.redis.log.role = params.slave;\n } else if (ctx.redis.log.role == params.child_abbrev) {\n ctx.redis.log.role = params.child;\n } else if (ctx.redis.log.role == params.sentinel_abbrev) {\n ctx.redis.log.role = params.sentinel;\n }\n ", + "params": { + "master_abbrev": "M", + "master": "master", + "slave_abbrev": "S", + "slave": "slave", + "child_abbrev": "C", + "child": "child", + "sentinel_abbrev": "X", + "sentinel": "sentinel" + } + } + }, + { + "rename": { + "field": "@timestamp", + "target_field": "read_timestamp" + } + }, + { + "date": { + "field": "redis.log.timestamp", + "target_field": "@timestamp", + "formats": [ + "dd MMM H:m:s.SSS", + "dd MMM H:m:s", + "UNIX" + ], + "ignore_failure": true + } + }, + { + "remove": { + "field": "redis.log.timestamp", + "ignore_failure": true + } } - } - }, { - "script": { - "lang": "painless", - "inline": "if (ctx.log.level == '.') {\n ctx.log.level = 'debug';\n } else if (ctx.log.level == '-') {\n ctx.log.level = 'verbose';\n } else if (ctx.log.level == '*') {\n ctx.log.level = 'notice';\n } else if (ctx.log.level == '#') {\n ctx.log.level = 'warning';\n }" - } - }, { - "script": { - "lang": "painless", - "inline": "if (ctx.redis.log.role == 'M') {\n ctx.redis.log.role = 'master';\n } else if (ctx.redis.log.role == 'S') {\n ctx.redis.log.role = 'slave';\n } else if (ctx.redis.log.role == 'C') {\n ctx.redis.log.role = 'child';\n } else if (ctx.redis.log.role == 'X') {\n ctx.redis.log.role = 'sentinel';\n }\n " - } - }, { - "rename": { - "field": "@timestamp", - "target_field": "read_timestamp" - } - }, { - "date": { - "field": "redis.log.timestamp", - "target_field": "@timestamp", - "formats": ["dd MMM H:m:s.SSS", "dd MMM H:m:s", "UNIX"], - "ignore_failure": true - } - }, { - "remove": { - "field": "redis.log.timestamp", - "ignore_failure": true - } - } - ], - "on_failure" : [{ - "set" : { - "field" : "error.message", - "value" : "{{ _ingest.on_failure_message }}" - } - }] + ], + "on_failure": [ + { + "set": { + "field": "error.message", + "value": "{{ _ingest.on_failure_message }}" + } + } + ] }