diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index 422663f5..9663c471 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -27,33 +27,36 @@ Several rule types with common monitoring paradigms are included with ElastAlert Currently, we have support built in for these alert types: +- Alerta +- AWS SES +- AWS SNS +- Chatwork - Command +- Datadog +- Debug +- Dingtalk +- Discord - Email +- Exotel +- Gitter +- GoogleChat +- HTTP POST - JIRA -- OpsGenie -- AWS SNS -- MS Teams -- Slack +- Line Notify - Mattermost -- Telegram -- GoogleChat +- Microsoft Teams +- OpsGenie - PagerDuty - PagerTree -- Exotel -- Twilio -- Splunk On-Call (Formerly VictorOps) -- Gitter +- Squadcast - ServiceNow -- Debug +- Slack +- Splunk On-Call (Formerly VictorOps) - Stomp -- Alerta -- HTTP POST -- Line Notify +- Telegram - TheHive +- Twilio - Zabbix -- Discord -- Dingtalk -- Chatwork Additional rule types and alerts can be easily imported or written. (See :ref:`Writing rule types ` and :ref:`Writing alerts `) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index f100866d..bb446206 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1480,6 +1480,220 @@ come from an individual event, usually the one which triggers the alert. When using ``alert_text_args``, you can access nested fields and index into arrays. For example, if your match was ``{"data": {"ips": ["127.0.0.1", "12.34.56.78"]}}``, then by using ``"data.ips[1]"`` in ``alert_text_args``, it would replace value with ``"12.34.56.78"``. This can go arbitrarily deep into fields and will still work on keys that contain dots themselves. +Alerter +~~~~~~~ + +For all Alerter subclasses, you may reference values from a top-level rule property in your Alerter fields by referring to the property name surrounded by dollar signs. This can be useful when you have rule-level properties that you would like to reference many times in your alert. For example: + +Example usage:: + + jira_priority: $priority$ + jira_alert_owner: $owner$ + +Alerta +~~~~~~ + +Alerta alerter will post an alert in the Alerta server instance through the alert API endpoint. +See https://docs.alerta.io/en/latest/api/alert.html for more details on the Alerta JSON format. + +For Alerta 5.0 + +Required: + +``alerta_api_url``: API server URL. + +Optional: + +``alerta_api_key``: This is the api key for alerta server, sent in an ``Authorization`` HTTP header. If not defined, no Authorization header is sent. + +``alerta_use_qk_as_resource``: If true and query_key is present, this will override ``alerta_resource`` field with the ``query_key value`` (Can be useful if ``query_key`` is a hostname). + +``alerta_use_match_timestamp``: If true, it will use the timestamp of the first match as the ``createTime`` of the alert. otherwise, the current server time is used. + +``alerta_api_skip_ssl``: Defaults to False. + +``alert_missing_value``: Text to replace any match field not found when formating strings. Defaults to ````. + +The following options dictate the values of the API JSON payload: + +``alerta_severity``: Defaults to "warning". + +``alerta_timeout``: Defaults 84600 (1 Day). + +``alerta_type``: Defaults to "elastalert". + +The following options use Python-like string syntax ``{}`` or ``%()s`` to access parts of the match, similar to the CommandAlerter. Ie: "Alert for {clientip}". +If the referenced key is not found in the match, it is replaced by the text indicated by the option ``alert_missing_value``. + +``alerta_resource``: Defaults to "elastalert". + +``alerta_service``: Defaults to "elastalert". + +``alerta_origin``: Defaults to "elastalert". + +``alerta_environment``: Defaults to "Production". + +``alerta_group``: Defaults to "". + +``alerta_correlate``: Defaults to an empty list. + +``alerta_tags``: Defaults to an empty list. + +``alerta_event``: Defaults to the rule's name. + +``alerta_text``: Defaults to the rule's text according to its type. + +``alerta_value``: Defaults to "". + +The ``attributes`` dictionary is built by joining the lists from ``alerta_attributes_keys`` and ``alerta_attributes_values``, considered in order. + + +Example usage using old-style format:: + + alert: + - alerta + alerta_api_url: "http://youralertahost/api/alert" + alerta_attributes_keys: ["hostname", "TimestampEvent", "senderIP" ] + alerta_attributes_values: ["%(key)s", "%(logdate)s", "%(sender_ip)s" ] + alerta_correlate: ["ProbeUP","ProbeDOWN"] + alerta_event: "ProbeUP" + alerta_text: "Probe %(hostname)s is UP at %(logdate)s GMT" + alerta_value: "UP" + +Example usage using new-style format:: + + alert: + - alerta + alerta_attributes_values: ["{key}", "{logdate}", "{sender_ip}" ] + alerta_text: "Probe {hostname} is UP at {logdate} GMT" + +AWS SES +~~~~~~~ + +The AWS SES alerter is similar to Email alerter but uses AWS SES to send emails. The AWS SES alerter can use AWS credentials +from the rule yaml, standard AWS config files or environment variables. + +AWS SES requires one option: + +``ses_email``: An address or list of addresses to sent the alert to. + +``ses_from_addr``: This sets the From header in the email. + +Optional: + +``ses_aws_access_key``: An access key to connect to AWS SES with. + +``ses_aws_secret_key``: The secret key associated with the access key. + +``ses_aws_region``: The AWS region in which the AWS SES resource is located. Default is us-east-1 + +``ses_aws_profile``: The AWS profile to use. If none specified, the default will be used. + +``ses_email_reply_to``: This sets the Reply-To header in the email. + +``ses_cc``: This adds the CC emails to the list of recipients. By default, this is left empty. + +``ses_bcc``: This adds the BCC emails to the list of recipients but does not show up in the email message. By default, this is left empty. + +Example When not using aws_profile usage:: + + alert: + - "ses" + ses_aws_access_key_id: "XXXXXXXXXXXXXXXXXX'" + ses_aws_secret_access_key: "YYYYYYYYYYYYYYYYYYYY" + ses_aws_region: "us-east-1" + ses_from_addr: "xxxx1@xxx.com" + ses_email: "xxxx1@xxx.com" + +Example When to use aws_profile usage:: + + # Create ~/.aws/credentials + + [default] + aws_access_key_id = xxxxxxxxxxxxxxxxxxxx + aws_secret_access_key = yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy + + # Create ~/.aws/config + + [default] + region = us-east-1 + + # alert rule setting + + alert: + - "ses" + ses_aws_profile: "default" + ses_from_addr: "xxxx1@xxx.com" + ses_email: "xxxx1@xxx.com" + +AWS SNS +~~~~~~~ + +The AWS SNS alerter will send an AWS SNS notification. The body of the notification is formatted the same as with other alerters. +The AWS SNS alerter uses boto3 and can use credentials in the rule yaml, in a standard AWS credential and config files, or +via environment variables. See http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html for details. + +AWS SNS requires one option: + +``sns_topic_arn``: The SNS topic's ARN. For example, ``arn:aws:sns:us-east-1:123456789:somesnstopic`` + +Optional: + +``sns_aws_access_key_id``: An access key to connect to SNS with. + +``sns_aws_secret_access_key``: The secret key associated with the access key. + +``sns_aws_region``: The AWS region in which the SNS resource is located. Default is us-east-1 + +``sns_aws_profile``: The AWS profile to use. If none specified, the default will be used. + +Example When not using aws_profile usage:: + + alert: + - sns + sns_topic_arn: 'arn:aws:sns:us-east-1:123456789:somesnstopic' + sns_aws_access_key_id: 'XXXXXXXXXXXXXXXXXX'' + sns_aws_secret_access_key: 'YYYYYYYYYYYYYYYYYYYY' + sns_aws_region: 'us-east-1' # You must nest aws_region within your alert configuration so it is not used to sign AWS requests. + +Example When to use aws_profile usage:: + + # Create ~/.aws/credentials + + [default] + aws_access_key_id = xxxxxxxxxxxxxxxxxxxx + aws_secret_access_key = yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy + + # Create ~/.aws/config + + [default] + region = us-east-1 + + # alert rule setting + + alert: + - sns + sns_topic_arn: 'arn:aws:sns:us-east-1:123456789:somesnstopic' + sns_aws_profile: 'default' + +Chatwork +~~~~~~~~ + +Chatwork will send notification to a Chatwork application. The body of the notification is formatted the same as with other alerters. + +Required: + +``chatwork_apikey``: ChatWork API KEY. + +``chatwork_room_id``: The ID of the room you are talking to in Chatwork. How to find the room ID is the part of the number after "rid" at the end of the URL of the browser. + +Example usage:: + + alert: + - "chatwork" + chatwork_apikey: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" + chatwork_room_id: "xxxxxxxxx" + Command ~~~~~~~ @@ -1523,7 +1737,6 @@ Example usage using new-style format:: - command command: ["/bin/send_alert", "--username", "{match[username]}"] - Datadog ~~~~~~~ @@ -1532,9 +1745,116 @@ a message that is longer than 4000 characters, only his first 4000 characters wi This alert requires two additional options: -``datadog_api_key``: [Datadog API key](https://docs.datadoghq.com/account_management/api-app-keys/#api-keys) +``datadog_api_key``: [Datadog API key](https://docs.datadoghq.com/account_management/api-app-keys/#api-keys) + +``datadog_app_key``: [Datadog application key](https://docs.datadoghq."com/account_management/api-app-keys/#application-keys) + +Example usage:: + + alert: + - "datadog" + datadog_api_key: "Datadog API Key" + datadog_app_key: "Datadog APP Key" + +Debug +~~~~~ + +The debug alerter will log the alert information using the Python logger at the info level. It is logged into a Python Logger object with the name ``elastalert`` that can be easily accessed using the ``getLogger`` command. + +Dingtalk +~~~~~~~~ + +Dingtalk will send notification to a Dingtalk application. The body of the notification is formatted the same as with other alerters. + +Required: + +``dingtalk_access_token``: Dingtalk access token. + +``dingtalk_msgtype``: Dingtalk msgtype. ``text``, ``markdown``, ``single_action_card``, ``action_card``. + +dingtalk_msgtype single_action_card Required: + +``dingtalk_single_title``: The title of a single button.. + +``dingtalk_single_url``: Jump link for a single button. + +dingtalk_msgtype action_card Required: + +``dingtalk_btns``: Button. + +dingtalk_msgtype action_card Optional: + +``dingtalk_btn_orientation``: "0": Buttons are arranged vertically "1": Buttons are arranged horizontally. + +Example msgtype : text:: + + alert: + - "dingtalk" + dingtalk_access_token: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" + dingtalk_msgtype: "text" + + +Example msgtype : markdown:: + + alert: + - "dingtalk" + dingtalk_access_token: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" + dingtalk_msgtype: "markdown" + + +Example msgtype : single_action_card:: + + alert: + - "dingtalk" + dingtalk_access_token: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" + dingtalk_msgtype: "single_action_card" + dingtalk_single_title: "test3" + dingtalk_single_url: "https://xxxx.xxx" + + +Example msgtype : action_card:: + + alert: + - "dingtalk" + dingtalk_access_token: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" + dingtalk_msgtype: "action_card" + dingtalk_btn_orientation: "0" + dingtalk_btns: [{"title": "a", "actionURL": "https://xxxx1.xxx"}, {"title": "b", "actionURL": "https://xxxx2.xxx"}] + +Discord +~~~~~~~ + +Discord will send notification to a Discord application. The body of the notification is formatted the same as with other alerters. + +Required: + +``discord_webhook_url``: The webhook URL. + +Optional: + +``discord_emoji_title``: By default ElastAlert will use the ``:warning:`` emoji when posting to the channel. You can use a different emoji per ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . If slack_icon_url_override parameter is provided, emoji is ignored. + +``discord_proxy``: By default ElastAlert will not use a network proxy to send notifications to Discord. Set this option using ``hostname:port`` if you need to use a proxy. + +``discord_proxy_login``: The Discord proxy auth username. + +``discord_proxy_password``: The Discord proxy auth username. + +``discord_embed_color``: embed color. By default ``0xffffff``. + +``discord_embed_footer``: embed footer. + +``discord_embed_icon_url``: You can provide icon_url to use custom image. Provide absolute address of the pciture. + +Example usage:: -``datadog_app_key``: [Datadog application key](https://docs.datadoghq.com/account_management/api-app-keys/#application-keys) + alert: + - "discord" + discord_webhook_url: "Your discord webhook url" + discord_emoji_title: ":lock:" + discord_embed_color: 0xE24D42 + discord_embed_footer: "Message sent by ElastAlert from your computer" + discord_embed_icon_url: "https://humancoders-formations.s3.amazonaws.com/uploads/course/logo/38/thumb_bigger_formation-elasticsearch.png" Email ~~~~~ @@ -1588,341 +1908,239 @@ by the smtp server. ``email_format``: If set to ``html``, the email's MIME type will be set to HTML, and HTML content should correctly render. If you use this, you need to put your own HTML into ``alert_text`` and use ``alert_text_type: alert_text_jinja`` Or ``alert_text_type: alert_text_only``. -Jira -~~~~ - -The JIRA alerter will open a ticket on jira whenever an alert is triggered. You must have a service account for ElastAlert to connect with. -The credentials of the service account are loaded from a separate file. The ticket number will be written to the alert pipeline, and if it -is followed by an email alerter, a link will be included in the email. - -This alert requires four additional options: - -``jira_server``: The hostname of the JIRA server. - -``jira_project``: The project to open the ticket under. - -``jira_issuetype``: The type of issue that the ticket will be filed as. Note that this is case sensitive. - -``jira_account_file``: The path to the file which contains JIRA account credentials. - -For an example JIRA account file, see ``example_rules/jira_acct.yaml``. The account file is also yaml formatted and must contain two fields: - -``user``: The username. - -``password``: The password. - -Optional: - -``jira_assignee``: Assigns an issue to a user. - -``jira_component``: The name of the component or components to set the ticket to. This can be a single string or a list of strings. This is provided for backwards compatibility and will eventually be deprecated. It is preferable to use the plural ``jira_components`` instead. - -``jira_components``: The name of the component or components to set the ticket to. This can be a single string or a list of strings. - -``jira_description``: Similar to ``alert_text``, this text is prepended to the JIRA description. - -``jira_label``: The label or labels to add to the JIRA ticket. This can be a single string or a list of strings. This is provided for backwards compatibility and will eventually be deprecated. It is preferable to use the plural ``jira_labels`` instead. - -``jira_labels``: The label or labels to add to the JIRA ticket. This can be a single string or a list of strings. - -``jira_priority``: The index of the priority to set the issue to. In the JIRA dropdown for priorities, 0 would represent the first priority, -1 the 2nd, etc. - -``jira_watchers``: A list of user names to add as watchers on a JIRA ticket. This can be a single string or a list of strings. - -``jira_bump_tickets``: If true, ElastAlert search for existing tickets newer than ``jira_max_age`` and comment on the ticket with -information about the alert instead of opening another ticket. ElastAlert finds the existing ticket by searching by summary. If the -summary has changed or contains special characters, it may fail to find the ticket. If you are using a custom ``alert_subject``, -the two summaries must be exact matches, except by setting ``jira_ignore_in_title``, you can ignore the value of a field when searching. -For example, if the custom subject is "foo occured at bar", and "foo" is the value field X in the match, you can set ``jira_ignore_in_title`` -to "X" and it will only bump tickets with "bar" in the subject. Defaults to false. - -``jira_ignore_in_title``: ElastAlert will attempt to remove the value for this field from the JIRA subject when searching for tickets to bump. -See ``jira_bump_tickets`` description above for an example. - -``jira_max_age``: If ``jira_bump_tickets`` is true, the maximum age of a ticket, in days, such that ElastAlert will comment on the ticket -instead of opening a new one. Default is 30 days. - -``jira_bump_not_in_statuses``: If ``jira_bump_tickets`` is true, a list of statuses the ticket must **not** be in for ElastAlert to comment on -the ticket instead of opening a new one. For example, to prevent comments being added to resolved or closed tickets, set this to 'Resolved' -and 'Closed'. This option should not be set if the ``jira_bump_in_statuses`` option is set. +Exotel +~~~~~~ -Example usage:: +Developers in India can use Exotel alerter, it will trigger an incident to a mobile phone as sms from your exophone. Alert name along with the message body will be sent as an sms. - jira_bump_not_in_statuses: - - Resolved - - Closed +The alerter requires the following option: -``jira_bump_in_statuses``: If ``jira_bump_tickets`` is true, a list of statuses the ticket *must be in* for ElastAlert to comment on -the ticket instead of opening a new one. For example, to only comment on 'Open' tickets -- and thus not 'In Progress', 'Analyzing', -'Resolved', etc. tickets -- set this to 'Open'. This option should not be set if the ``jira_bump_not_in_statuses`` option is set. +``exotel_account_sid``: This is sid of your Exotel account. -Example usage:: +``exotel_auth_token``: Auth token assosiated with your Exotel account. - jira_bump_in_statuses: - - Open +If you don't know how to find your accound sid and auth token, refer - https://support.exotel.com/support/solutions/articles/3000023019-how-to-find-my-exotel-token-and-exotel-sid -``jira_bump_only``: Only update if a ticket is found to bump. This skips ticket creation for rules where you only want to affect existing tickets. +``exotel_to_number``: The phone number where you would like send the notification. -Example usage:: +``exotel_from_number``: Your exophone number from which message will be sent. - jira_bump_only: true +The alerter has one optional argument: -``jira_transition_to``: If ``jira_bump_tickets`` is true, Transition this ticket to the given Status when bumping. Must match the text of your JIRA implementation's Status field. +``exotel_message_body``: Message you want to send in the sms, is you don't specify this argument only the rule name is sent Example usage:: - jira_transition_to: 'Fixed' - + alert: + - "exotel" + exotel_account_sid: "Exotel Account sid" + exotel_auth_token: "Exotel Auth token" + exotel_to_number: "Exotel to Number" + exotel_from_number: "Exotel from Numbeer" +Gitter +~~~~~~ -``jira_bump_after_inactivity``: If this is set, ElastAlert will only comment on tickets that have been inactive for at least this many days. -It only applies if ``jira_bump_tickets`` is true. Default is 0 days. +Gitter alerter will send a notification to a predefined Gitter channel. The body of the notification is formatted the same as with other alerters. -Arbitrary Jira fields: +The alerter requires the following option: -ElastAlert supports setting any arbitrary JIRA field that your jira issue supports. For example, if you had a custom field, called "Affected User", you can set it by providing that field name in ``snake_case`` prefixed with ``jira_``. These fields can contain primitive strings or arrays of strings. Note that when you create a custom field in your JIRA server, internally, the field is represented as ``customfield_1111``. In elastalert, you may refer to either the public facing name OR the internal representation. +``gitter_webhook_url``: The webhook URL that includes your auth data and the ID of the channel (room) you want to post to. Go to the Integration Settings +of the channel https://gitter.im/ORGA/CHANNEL#integrations , click 'CUSTOM' and copy the resulting URL. -In addition, if you would like to use a field in the alert as the value for a custom JIRA field, use the field name plus a # symbol in front. For example, if you wanted to set a custom JIRA field called "user" to the value of the field "username" from the match, you would use the following. +Optional: -Example:: +``gitter_msg_level``: By default the alert will be posted with the 'error' level. You can use 'info' if you want the messages to be black instead of red. - jira_user: "#username" +``gitter_proxy``: By default ElastAlert will not use a network proxy to send notifications to Gitter. Set this option using ``hostname:port`` if you need to use a proxy. Example usage:: - jira_arbitrary_singular_field: My Name - jira_arbitrary_multivalue_field: - - Name 1 - - Name 2 - jira_customfield_12345: My Custom Value - jira_customfield_9999: - - My Custom Value 1 - - My Custom Value 2 - -OpsGenie -~~~~~~~~ - -OpsGenie alerter will create an alert which can be used to notify Operations people of issues or log information. An OpsGenie ``API`` -integration must be created in order to acquire the necessary ``opsgenie_key`` rule variable. Currently the OpsGenieAlerter only creates -an alert, however it could be extended to update or close existing alerts. + alert: + - "gitter" + gitter_webhook_url: "Your Gitter Webhook URL" + gitter_msg_level: "error" -It is necessary for the user to create an OpsGenie Rest HTTPS API `integration page `_ in order to create alerts. +GoogleChat +~~~~~~~~~~ +GoogleChat alerter will send a notification to a predefined GoogleChat channel. The body of the notification is formatted the same as with other alerters. -The OpsGenie alert requires one option: +The alerter requires the following options: -``opsgenie_key``: The randomly generated API Integration key created by OpsGenie. +``googlechat_webhook_url``: The webhook URL that includes the channel (room) you want to post to. Go to the Google Chat website https://chat.google.com and choose the channel in which you wish to receive the notifications. Select 'Configure Webhooks' to create a new webhook or to copy the URL from an existing one. You can use a list of URLs to send to multiple channels. Optional: -``opsgenie_account``: The OpsGenie account to integrate with. -``opsgenie_addr``: The OpsGenie URL to to connect against, default is ``https://api.opsgenie.com/v2/alerts`` -``opsgenie_recipients``: A list OpsGenie recipients who will be notified by the alert. -``opsgenie_recipients_args``: Map of arguments used to format opsgenie_recipients. -``opsgenie_default_receipients``: List of default recipients to notify when the formatting of opsgenie_recipients is unsuccesful. -``opsgenie_teams``: A list of OpsGenie teams to notify (useful for schedules with escalation). -``opsgenie_teams_args``: Map of arguments used to format opsgenie_teams (useful for assigning the alerts to teams based on some data) -``opsgenie_default_teams``: List of default teams to notify when the formatting of opsgenie_teams is unsuccesful. -``opsgenie_tags``: A list of tags for this alert. - -``opsgenie_message``: Set the OpsGenie message to something other than the rule name. The message can be formatted with fields from the first match e.g. "Error occurred for {app_name} at {timestamp}.". - -``opsgenie_alias``: Set the OpsGenie alias. The alias can be formatted with fields from the first match e.g "{app_name} error". - -``opsgenie_subject``: A string used to create the title of the OpsGenie alert. Can use Python string formatting. - -``opsgenie_subject_args``: A list of fields to use to format ``opsgenie_subject`` if it contains formaters. - -``opsgenie_priority``: Set the OpsGenie priority level. Possible values are P1, P2, P3, P4, P5. - -``opsgenie_details``: Map of custom key/value pairs to include in the alert's details. The value can sourced from either fields in the first match, environment variables, or a constant value. +``googlechat_format``: Formatting for the notification. Can be either 'card' or 'basic' (default). -``opsgenie_proxy``: By default ElastAlert will not use a network proxy to send notifications to OpsGenie. Set this option using ``hostname:port`` if you need to use a proxy. +``googlechat_header_title``: Sets the text for the card header title. (Only used if format=card) -Example usage:: +``googlechat_header_subtitle``: Sets the text for the card header subtitle. (Only used if format=card) - opsgenie_details: - Author: 'Bob Smith' # constant value - Environment: '$VAR' # environment variable - Message: { field: message } # field in the first match +``googlechat_header_image``: URL for the card header icon. (Only used if format=card) -AWS SES -~~~~~~~ +``googlechat_footer_kibanalink``: URL to Kibana to include in the card footer. (Only used if format=card) -The AWS SES alerter is similar to Email alerter but uses AWS SES to send emails. The AWS SES alerter can use AWS credentials -from the rule yaml, standard AWS config files or environment variables. +HTTP POST +~~~~~~~~~ -AWS SES requires one option: +This alert type will send results to a JSON endpoint using HTTP POST. The key names are configurable so this is compatible with almost any endpoint. By default, the JSON will contain all the items from the match, unless you specify http_post_payload, in which case it will only contain those items. -``ses_email``: An address or list of addresses to sent the alert to. +Required: -``ses_from_addr``: This sets the From header in the email. +``http_post_url``: The URL to POST. Optional: -``ses_aws_access_key``: An access key to connect to AWS SES with. - -``ses_aws_secret_key``: The secret key associated with the access key. - -``ses_aws_region``: The AWS region in which the AWS SES resource is located. Default is us-east-1 +``http_post_payload``: List of keys:values to use as the content of the POST. Example - ip:clientip will map the value from the clientip index of Elasticsearch to JSON key named ip. If not defined, all the Elasticsearch keys will be sent. -``ses_aws_profile``: The AWS profile to use. If none specified, the default will be used. +``http_post_static_payload``: Key:value pairs of static parameters to be sent, along with the Elasticsearch results. Put your authentication or other information here. -``ses_email_reply_to``: This sets the Reply-To header in the email. +``http_post_headers``: Key:value pairs of headers to be sent as part of the request. -``ses_cc``: This adds the CC emails to the list of recipients. By default, this is left empty. +``http_post_proxy``: URL of proxy, if required. -``ses_bcc``: This adds the BCC emails to the list of recipients but does not show up in the email message. By default, this is left empty. +``http_post_all_values``: Boolean of whether or not to include every key value pair from the match in addition to those in http_post_payload and http_post_static_payload. Defaults to True if http_post_payload is not specified, otherwise False. -Example When not using aws_profile usage:: +``http_post_timeout``: The timeout value, in seconds, for making the post. The default is 10. If a timeout occurs, the alert will be retried next time elastalert cycles. - alert: - - "ses" - ses_aws_access_key_id: "XXXXXXXXXXXXXXXXXX'" - ses_aws_secret_access_key: "YYYYYYYYYYYYYYYYYYYY" - ses_aws_region: "us-east-1" - ses_from_addr: "xxxx1@xxx.com" - ses_email: "xxxx1@xxx.com" +Example usage:: -Example When to use aws_profile usage:: + alert: post + http_post_url: "http://example.com/api" + http_post_payload: + ip: clientip + http_post_static_payload: + apikey: abc123 + http_post_headers: + authorization: Basic 123dr3234 - # Create ~/.aws/credentials +JIRA +~~~~ - [default] - aws_access_key_id = xxxxxxxxxxxxxxxxxxxx - aws_secret_access_key = yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy +The JIRA alerter will open a ticket on jira whenever an alert is triggered. You must have a service account for ElastAlert to connect with. +The credentials of the service account are loaded from a separate file. The ticket number will be written to the alert pipeline, and if it +is followed by an email alerter, a link will be included in the email. - # Create ~/.aws/config +This alert requires four additional options: - [default] - region = us-east-1 +``jira_server``: The hostname of the JIRA server. - # alert rule setting +``jira_project``: The project to open the ticket under. - alert: - - "ses" - ses_aws_profile: "default" - ses_from_addr: "xxxx1@xxx.com" - ses_email: "xxxx1@xxx.com" +``jira_issuetype``: The type of issue that the ticket will be filed as. Note that this is case sensitive. -AWS SNS -~~~~~~~ +``jira_account_file``: The path to the file which contains JIRA account credentials. -The AWS SNS alerter will send an AWS SNS notification. The body of the notification is formatted the same as with other alerters. -The AWS SNS alerter uses boto3 and can use credentials in the rule yaml, in a standard AWS credential and config files, or -via environment variables. See http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html for details. +For an example JIRA account file, see ``example_rules/jira_acct.yaml``. The account file is also yaml formatted and must contain two fields: -AWS SNS requires one option: +``user``: The username. -``sns_topic_arn``: The SNS topic's ARN. For example, ``arn:aws:sns:us-east-1:123456789:somesnstopic`` +``password``: The password. Optional: -``sns_aws_access_key_id``: An access key to connect to SNS with. - -``sns_aws_secret_access_key``: The secret key associated with the access key. - -``sns_aws_region``: The AWS region in which the SNS resource is located. Default is us-east-1 - -``sns_aws_profile``: The AWS profile to use. If none specified, the default will be used. - -Example When not using aws_profile usage:: +``jira_assignee``: Assigns an issue to a user. - alert: - - sns - sns_topic_arn: 'arn:aws:sns:us-east-1:123456789:somesnstopic' - sns_aws_access_key_id: 'XXXXXXXXXXXXXXXXXX'' - sns_aws_secret_access_key: 'YYYYYYYYYYYYYYYYYYYY' - sns_aws_region: 'us-east-1' # You must nest aws_region within your alert configuration so it is not used to sign AWS requests. - -Example When to use aws_profile usage:: +``jira_component``: The name of the component or components to set the ticket to. This can be a single string or a list of strings. This is provided for backwards compatibility and will eventually be deprecated. It is preferable to use the plural ``jira_components`` instead. - # Create ~/.aws/credentials +``jira_components``: The name of the component or components to set the ticket to. This can be a single string or a list of strings. - [default] - aws_access_key_id = xxxxxxxxxxxxxxxxxxxx - aws_secret_access_key = yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy +``jira_description``: Similar to ``alert_text``, this text is prepended to the JIRA description. - # Create ~/.aws/config +``jira_label``: The label or labels to add to the JIRA ticket. This can be a single string or a list of strings. This is provided for backwards compatibility and will eventually be deprecated. It is preferable to use the plural ``jira_labels`` instead. - [default] - region = us-east-1 +``jira_labels``: The label or labels to add to the JIRA ticket. This can be a single string or a list of strings. - # alert rule setting +``jira_priority``: The index of the priority to set the issue to. In the JIRA dropdown for priorities, 0 would represent the first priority, +1 the 2nd, etc. - alert: - - sns - sns_topic_arn: 'arn:aws:sns:us-east-1:123456789:somesnstopic' - sns_aws_profile: 'default' +``jira_watchers``: A list of user names to add as watchers on a JIRA ticket. This can be a single string or a list of strings. -MS Teams -~~~~~~~~ +``jira_bump_tickets``: If true, ElastAlert search for existing tickets newer than ``jira_max_age`` and comment on the ticket with +information about the alert instead of opening another ticket. ElastAlert finds the existing ticket by searching by summary. If the +summary has changed or contains special characters, it may fail to find the ticket. If you are using a custom ``alert_subject``, +the two summaries must be exact matches, except by setting ``jira_ignore_in_title``, you can ignore the value of a field when searching. +For example, if the custom subject is "foo occured at bar", and "foo" is the value field X in the match, you can set ``jira_ignore_in_title`` +to "X" and it will only bump tickets with "bar" in the subject. Defaults to false. -MS Teams alerter will send a notification to a predefined Microsoft Teams channel. +``jira_ignore_in_title``: ElastAlert will attempt to remove the value for this field from the JIRA subject when searching for tickets to bump. +See ``jira_bump_tickets`` description above for an example. -The alerter requires the following options: +``jira_max_age``: If ``jira_bump_tickets`` is true, the maximum age of a ticket, in days, such that ElastAlert will comment on the ticket +instead of opening a new one. Default is 30 days. -``ms_teams_webhook_url``: The webhook URL that includes your auth data and the ID of the channel you want to post to. Go to the Connectors -menu in your channel and configure an Incoming Webhook, then copy the resulting URL. You can use a list of URLs to send to multiple channels. +``jira_bump_not_in_statuses``: If ``jira_bump_tickets`` is true, a list of statuses the ticket must **not** be in for ElastAlert to comment on +the ticket instead of opening a new one. For example, to prevent comments being added to resolved or closed tickets, set this to 'Resolved' +and 'Closed'. This option should not be set if the ``jira_bump_in_statuses`` option is set. -``ms_teams_alert_summary``: Summary should be configured according to `MS documentation `_, although it seems not displayed by Teams currently. +Example usage:: -Optional: + jira_bump_not_in_statuses: + - Resolved + - Closed -``ms_teams_theme_color``: By default the alert will be posted without any color line. To add color, set this attribute to a HTML color value e.g. ``#ff0000`` for red. +``jira_bump_in_statuses``: If ``jira_bump_tickets`` is true, a list of statuses the ticket *must be in* for ElastAlert to comment on +the ticket instead of opening a new one. For example, to only comment on 'Open' tickets -- and thus not 'In Progress', 'Analyzing', +'Resolved', etc. tickets -- set this to 'Open'. This option should not be set if the ``jira_bump_not_in_statuses`` option is set. -``ms_teams_proxy``: By default ElastAlert will not use a network proxy to send notifications to MS Teams. Set this option using ``hostname:port`` if you need to use a proxy. +Example usage:: -``ms_teams_alert_fixed_width``: By default this is ``False`` and the notification will be sent to MS Teams as-is. Teams supports a partial Markdown implementation, which means asterisk, underscore and other characters may be interpreted as Markdown. Currenlty, Teams does not fully implement code blocks. Setting this attribute to ``True`` will enable line by line code blocks. It is recommended to enable this to get clearer notifications in Teams. + jira_bump_in_statuses: + - Open -Slack -~~~~~ +``jira_bump_only``: Only update if a ticket is found to bump. This skips ticket creation for rules where you only want to affect existing tickets. -Slack alerter will send a notification to a predefined Slack channel. The body of the notification is formatted the same as with other alerters. +Example usage:: -The alerter requires the following option: + jira_bump_only: true -``slack_webhook_url``: The webhook URL that includes your auth data and the ID of the channel (room) you want to post to. Go to the Incoming Webhooks -section in your Slack account https://XXXXX.slack.com/services/new/incoming-webhook , choose the channel, click 'Add Incoming Webhooks Integration' -and copy the resulting URL. You can use a list of URLs to send to multiple channels. +``jira_transition_to``: If ``jira_bump_tickets`` is true, Transition this ticket to the given Status when bumping. Must match the text of your JIRA implementation's Status field. -Optional: +Example usage:: -``slack_username_override``: By default Slack will use your username when posting to the channel. Use this option to change it (free text). + jira_transition_to: 'Fixed' -``slack_channel_override``: Incoming webhooks have a default channel, but it can be overridden. A public channel can be specified "#other-channel", and a Direct Message with "@username". -``slack_emoji_override``: By default ElastAlert will use the :ghost: emoji when posting to the channel. You can use a different emoji per -ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . If slack_icon_url_override parameter is provided, emoji is ignored. -``slack_icon_url_override``: By default ElastAlert will use the :ghost: emoji when posting to the channel. You can provide icon_url to use custom image. -Provide absolute address of the pciture. +``jira_bump_after_inactivity``: If this is set, ElastAlert will only comment on tickets that have been inactive for at least this many days. +It only applies if ``jira_bump_tickets`` is true. Default is 0 days. -``slack_msg_color``: By default the alert will be posted with the 'danger' color. You can also use 'good' or 'warning' colors. +Arbitrary Jira fields: -``slack_parse_override``: By default the notification message is escaped 'none'. You can also use 'full'. +ElastAlert supports setting any arbitrary JIRA field that your jira issue supports. For example, if you had a custom field, called "Affected User", you can set it by providing that field name in ``snake_case`` prefixed with ``jira_``. These fields can contain primitive strings or arrays of strings. Note that when you create a custom field in your JIRA server, internally, the field is represented as ``customfield_1111``. In elastalert, you may refer to either the public facing name OR the internal representation. -``slack_text_string``: Notification message you want to add. +In addition, if you would like to use a field in the alert as the value for a custom JIRA field, use the field name plus a # symbol in front. For example, if you wanted to set a custom JIRA field called "user" to the value of the field "username" from the match, you would use the following. -``slack_proxy``: By default ElastAlert will not use a network proxy to send notifications to Slack. Set this option using ``hostname:port`` if you need to use a proxy. +Example:: -``slack_alert_fields``: You can add additional fields to your slack alerts using this field. Specify the title using `title` and a value for the field using `value`. Additionally you can specify whether or not this field should be a `short` field using `short: true`. + jira_user: "#username" -``slack_ignore_ssl_errors``: By default ElastAlert will verify SSL certificate. Set this option to False if you want to ignore SSL errors. +Example usage:: -``slack_title``: Sets a title for the message, this shows up as a blue text at the start of the message + jira_arbitrary_singular_field: My Name + jira_arbitrary_multivalue_field: + - Name 1 + - Name 2 + jira_customfield_12345: My Custom Value + jira_customfield_9999: + - My Custom Value 1 + - My Custom Value 2 -``slack_title_link``: You can add a link in your Slack notification by setting this to a valid URL. Requires slack_title to be set. +Line Notify +~~~~~~~~~~~ -``slack_timeout``: You can specify a timeout value, in seconds, for making communicating with Slack. The default is 10. If a timeout occurs, the alert will be retried next time elastalert cycles. +Line Notify will send notification to a Line application. The body of the notification is formatted the same as with other alerters. -``slack_attach_kibana_discover_url``: Enables the attachment of the ``kibana_discover_url`` to the slack notification. The config ``generate_kibana_discover_url`` must also be ``True`` in order to generate the url. Defaults to ``False``. +Required: -``slack_kibana_discover_color``: The color of the Kibana Discover url attachment. Defaults to ``#ec4b98``. +``linenotify_access_token``: The access token that you got from https://notify-bot.line.me/my/ -``slack_kibana_discover_title``: The title of the Kibana Discover url attachment. Defaults to ``Discover in Kibana``. +Example usage:: -``slack_ca_certs``: path to a CA cert bundle to use to verify SSL connections. + alert: + - "linenotify" + linenotify_access_token: "Your linenotify access token" Mattermost ~~~~~~~~~~ @@ -1953,47 +2171,90 @@ Provide absolute address of the picture or Base64 data url. ``mattermost_msg_fields``: You can add fields to your Mattermost alerts using this option. You can specify the title using `title` and the text value using `value`. Additionally you can specify whether this field should be a `short` field using `short: true`. If you set `args` and `value` is a formattable string, ElastAlert will format the incident key based on the provided array of fields from the rule or match. See https://docs.mattermost.com/developer/message-attachments.html#fields for more information. +Example mattermost_msg_fields:: -Telegram -~~~~~~~~ -Telegram alerter will send a notification to a predefined Telegram username or channel. The body of the notification is formatted the same as with other alerters. + mattermost_msg_fields: + - title: Stack + value: "{0} {1}" # interpolate fields mentioned in args + short: false + args: ["type", "msg.status_code"] # fields from doc + - title: Name + value: static field + short: false -The alerter requires the following two options: +Microsoft Teams +~~~~~~~~~~~~~~~ -``telegram_bot_token``: The token is a string along the lines of ``110201543:AAHdqTcvCH1vGWJxfSeofSAs0K5PALDsaw`` that will be required to authorize the bot and send requests to the Bot API. You can learn about obtaining tokens and generating new ones in this document https://core.telegram.org/bots#6-botfather +Microsoft Teams alerter will send a notification to a predefined Microsoft Teams channel. -``telegram_room_id``: Unique identifier for the target chat or username of the target channel using telegram chat_id (in the format "-xxxxxxxx") +The alerter requires the following options: + +``ms_teams_webhook_url``: The webhook URL that includes your auth data and the ID of the channel you want to post to. Go to the Connectors +menu in your channel and configure an Incoming Webhook, then copy the resulting URL. You can use a list of URLs to send to multiple channels. + +``ms_teams_alert_summary``: Summary should be configured according to `MS documentation `_, although it seems not displayed by Teams currently. Optional: -``telegram_api_url``: Custom domain to call Telegram Bot API. Default to api.telegram.org +``ms_teams_theme_color``: By default the alert will be posted without any color line. To add color, set this attribute to a HTML color value e.g. ``#ff0000`` for red. -``telegram_proxy``: By default ElastAlert will not use a network proxy to send notifications to Telegram. Set this option using ``hostname:port`` if you need to use a proxy. +``ms_teams_proxy``: By default ElastAlert will not use a network proxy to send notifications to MS Teams. Set this option using ``hostname:port`` if you need to use a proxy. -``telegram_proxy_login``: The Telegram proxy auth username. +``ms_teams_alert_fixed_width``: By default this is ``False`` and the notification will be sent to MS Teams as-is. Teams supports a partial Markdown implementation, which means asterisk, underscore and other characters may be interpreted as Markdown. Currenlty, Teams does not fully implement code blocks. Setting this attribute to ``True`` will enable line by line code blocks. It is recommended to enable this to get clearer notifications in Teams. -``telegram_proxy_pass``: The Telegram proxy auth password. +Example usage:: -GoogleChat -~~~~~~~~~~ -GoogleChat alerter will send a notification to a predefined GoogleChat channel. The body of the notification is formatted the same as with other alerters. + alert: + - "ms_teams" + ms_teams_alert_summary: "Alert" + ms_teams_theme_color: "#6600ff" + ms_teams_webhook_url: "MS Teams Webhook URL" -The alerter requires the following options: +OpsGenie +~~~~~~~~ -``googlechat_webhook_url``: The webhook URL that includes the channel (room) you want to post to. Go to the Google Chat website https://chat.google.com and choose the channel in which you wish to receive the notifications. Select 'Configure Webhooks' to create a new webhook or to copy the URL from an existing one. You can use a list of URLs to send to multiple channels. +OpsGenie alerter will create an alert which can be used to notify Operations people of issues or log information. An OpsGenie ``API`` +integration must be created in order to acquire the necessary ``opsgenie_key`` rule variable. Currently the OpsGenieAlerter only creates +an alert, however it could be extended to update or close existing alerts. + +It is necessary for the user to create an OpsGenie Rest HTTPS API `integration page `_ in order to create alerts. + +The OpsGenie alert requires one option: + +``opsgenie_key``: The randomly generated API Integration key created by OpsGenie. Optional: -``googlechat_format``: Formatting for the notification. Can be either 'card' or 'basic' (default). +``opsgenie_account``: The OpsGenie account to integrate with. +``opsgenie_addr``: The OpsGenie URL to to connect against, default is ``https://api.opsgenie.com/v2/alerts`` +``opsgenie_recipients``: A list OpsGenie recipients who will be notified by the alert. +``opsgenie_recipients_args``: Map of arguments used to format opsgenie_recipients. +``opsgenie_default_receipients``: List of default recipients to notify when the formatting of opsgenie_recipients is unsuccesful. +``opsgenie_teams``: A list of OpsGenie teams to notify (useful for schedules with escalation). +``opsgenie_teams_args``: Map of arguments used to format opsgenie_teams (useful for assigning the alerts to teams based on some data) +``opsgenie_default_teams``: List of default teams to notify when the formatting of opsgenie_teams is unsuccesful. +``opsgenie_tags``: A list of tags for this alert. -``googlechat_header_title``: Sets the text for the card header title. (Only used if format=card) +``opsgenie_message``: Set the OpsGenie message to something other than the rule name. The message can be formatted with fields from the first match e.g. "Error occurred for {app_name} at {timestamp}.". -``googlechat_header_subtitle``: Sets the text for the card header subtitle. (Only used if format=card) +``opsgenie_alias``: Set the OpsGenie alias. The alias can be formatted with fields from the first match e.g "{app_name} error". -``googlechat_header_image``: URL for the card header icon. (Only used if format=card) +``opsgenie_subject``: A string used to create the title of the OpsGenie alert. Can use Python string formatting. -``googlechat_footer_kibanalink``: URL to Kibana to include in the card footer. (Only used if format=card) +``opsgenie_subject_args``: A list of fields to use to format ``opsgenie_subject`` if it contains formaters. + +``opsgenie_priority``: Set the OpsGenie priority level. Possible values are P1, P2, P3, P4, P5. + +``opsgenie_details``: Map of custom key/value pairs to include in the alert's details. The value can sourced from either fields in the first match, environment variables, or a constant value. + +``opsgenie_proxy``: By default ElastAlert will not use a network proxy to send notifications to OpsGenie. Set this option using ``hostname:port`` if you need to use a proxy. + +Example usage:: + opsgenie_details: + Author: 'Bob Smith' # constant value + Environment: '$VAR' # environment variable + Message: { field: message } # field in the first match PagerDuty ~~~~~~~~~ @@ -2051,7 +2312,6 @@ See https://developer.pagerduty.com/docs/events-api-v2/trigger-events/ ``pagerduty_v2_payload_include_all_info``: If True, this will include the entire Elasticsearch document as a custom detail field called "information" in the PagerDuty alert. - PagerTree ~~~~~~~~~ @@ -2061,107 +2321,28 @@ The alerter requires the following options: ``pagertree_integration_url``: URL generated by PagerTree for the integration. -``pagertree_proxy``: By default ElastAlert will not use a network proxy to send notifications to PagerTree. Set this option using hostname:port if you need to use a proxy. - -Exotel -~~~~~~ - -Developers in India can use Exotel alerter, it will trigger an incident to a mobile phone as sms from your exophone. Alert name along with the message body will be sent as an sms. - -The alerter requires the following option: - -``exotel_account_sid``: This is sid of your Exotel account. - -``exotel_auth_token``: Auth token assosiated with your Exotel account. - -If you don't know how to find your accound sid and auth token, refer - https://support.exotel.com/support/solutions/articles/3000023019-how-to-find-my-exotel-token-and-exotel-sid - -``exotel_to_number``: The phone number where you would like send the notification. - -``exotel_from_number``: Your exophone number from which message will be sent. - -The alerter has one optional argument: - -``exotel_message_body``: Message you want to send in the sms, is you don't specify this argument only the rule name is sent - - -Twilio -~~~~~~ - -Twilio alerter will trigger an incident to a mobile phone as an sms from your twilio phone number. The sms will contain the alert name. You may use either twilio SMS or twilio copilot -to send the message, controlled by the ``twilio_use_copilot`` option. - -Note that when twilio copilot *is* used the ``twilio_message_service_sid`` option is required. Likewise, when *not* using twilio copilot, the ``twilio_from_number`` option is required. - -The alerter requires the following options: - -``twilio_account_sid``: This is sid of your twilio account. - -``twilio_auth_token``: Auth token assosiated with your twilio account. - -``twilio_to_number``: The phone number where you would like send the notification. - -Either one of - * ``twilio_from_number``: Your twilio phone number from which message will be sent. - * ``twilio_message_service_sid``: The SID of your twilio message service. - -Optional: - -``twilio_use_copilot``: Whether or not to use twilio copilot, False by default. +``pagertree_proxy``: By default ElastAlert will not use a network proxy to send notifications to PagerTree. Set this option using ``hostname:port`` if you need to use a proxy. Example usage:: alert: - - twilio # With Copilot - twilio_use_copilot: True - twilio_to_number: "0123456789" - twilio_auth_token: "abcdefghijklmnopqrstuvwxyz012345" - twilio_account_sid: "ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567" - twilio_message_service_sid: "ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567" - - - twilio # With Legacy SMS - twilio_use_copilot: False - twilio_to_number: "0123456789" - twilio_from_number: "9876543210" - twilio_auth_token: "abcdefghijklmnopqrstuvwxyz012345" - twilio_account_sid: "ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567" - -Splunk On-Call (Formerly VictorOps) -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Splunk On-Call (Formerly VictorOps) alerter will trigger an incident to a predefined Splunk On-Call (Formerly VictorOps) routing key. The body of the notification is formatted the same as with other alerters. - -The alerter requires the following options: - -``victorops_api_key``: API key generated under the 'REST Endpoint' in the Integrations settings. - -``victorops_routing_key``: Splunk On-Call (Formerly VictorOps) routing key to route the alert to. - -``victorops_message_type``: Splunk On-Call (Formerly VictorOps) field to specify severity level. Must be one of the following: INFO, WARNING, ACKNOWLEDGEMENT, CRITICAL, RECOVERY - -Optional: - -``victorops_entity_id``: The identity of the incident used by Splunk On-Call (Formerly VictorOps) to correlate incidents throughout the alert lifecycle. If not defined, Splunk On-Call (Formerly VictorOps) will assign a random string to each alert. - -``victorops_entity_display_name``: Human-readable name of alerting entity to summarize incidents without affecting the life-cycle workflow. - -``victorops_proxy``: By default ElastAlert will not use a network proxy to send notifications to Splunk On-Call (Formerly VictorOps). Set this option using ``hostname:port`` if you need to use a proxy. - -Gitter -~~~~~~ - -Gitter alerter will send a notification to a predefined Gitter channel. The body of the notification is formatted the same as with other alerters. + - "pagertree" + pagertree_integration_url: "PagerTree Integration URL" -The alerter requires the following option: +Squadcast +~~~~~~~~~ -``gitter_webhook_url``: The webhook URL that includes your auth data and the ID of the channel (room) you want to post to. Go to the Integration Settings -of the channel https://gitter.im/ORGA/CHANNEL#integrations , click 'CUSTOM' and copy the resulting URL. +Alerts can be sent to Squadcast using the `http post` method described above and Squadcast will process it and send Phone, SMS, Email and Push notifications to the relevant person(s) and let them take actions. -Optional: +Configuration variables in rules YAML file:: -``gitter_msg_level``: By default the alert will be posted with the 'error' level. You can use 'info' if you want the messages to be black instead of red. + alert: post + http_post_url: + http_post_static_payload: + Title: + http_post_all_values: true -``gitter_proxy``: By default ElastAlert will not use a network proxy to send notifications to Gitter. Set this option using ``hostname:port`` if you need to use a proxy. +For more details, you can refer the `Squadcast documentation `_. ServiceNow ~~~~~~~~~~ @@ -2195,186 +2376,174 @@ Optional: ``servicenow_proxy``: By default ElastAlert will not use a network proxy to send notifications to ServiceNow. Set this option using ``hostname:port`` if you need to use a proxy. +Example usage:: -Debug -~~~~~ - -The debug alerter will log the alert information using the Python logger at the info level. It is logged into a Python Logger object with the name ``elastalert`` that can be easily accessed using the ``getLogger`` command. + alert: + - "servicenow" + servicenow_rest_url: "servicenow rest url" + username: "user" + password: "password" + short_description: "xxxxxx" + comments: "xxxxxx" + assignment_group: "xxxxxx" + category: "xxxxxx" + subcategory: "xxxxxx" + cmdb_ci: "xxxxxx" + caller_id: "xxxxxx" -Stomp +Slack ~~~~~ -This alert type will use the STOMP protocol in order to push a message to a broker like ActiveMQ or RabbitMQ. The message body is a JSON string containing the alert details. -The default values will work with a pristine ActiveMQ installation. - -The alerter requires the following options: - -``stomp_hostname``: The STOMP host to use, defaults to localhost. - -``stomp_hostport``: The STOMP port to use, defaults to 61613. +Slack alerter will send a notification to a predefined Slack channel. The body of the notification is formatted the same as with other alerters. -``stomp_login``: The STOMP login to use, defaults to admin. +The alerter requires the following option: -``stomp_password``: The STOMP password to use, defaults to admin. +``slack_webhook_url``: The webhook URL that includes your auth data and the ID of the channel (room) you want to post to. Go to the Incoming Webhooks +section in your Slack account https://XXXXX.slack.com/services/new/incoming-webhook , choose the channel, click 'Add Incoming Webhooks Integration' +and copy the resulting URL. You can use a list of URLs to send to multiple channels. Optional: -``stomp_ssl``: Connect the STOMP host using TLS, defaults to False. - -``stomp_destination``: The STOMP destination to use, defaults to /queue/ALERT - -The stomp_destination field depends on the broker, the /queue/ALERT example is the nomenclature used by ActiveMQ. Each broker has its own logic. +``slack_username_override``: By default Slack will use your username when posting to the channel. Use this option to change it (free text). -Alerta -~~~~~~ +``slack_channel_override``: Incoming webhooks have a default channel, but it can be overridden. A public channel can be specified "#other-channel", and a Direct Message with "@username". -Alerta alerter will post an alert in the Alerta server instance through the alert API endpoint. -See https://docs.alerta.io/en/latest/api/alert.html for more details on the Alerta JSON format. +``slack_emoji_override``: By default ElastAlert will use the ``:ghost:`` emoji when posting to the channel. You can use a different emoji per +ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . If slack_icon_url_override parameter is provided, emoji is ignored. -For Alerta 5.0 +``slack_icon_url_override``: By default ElastAlert will use the ``:ghost:`` emoji when posting to the channel. You can provide icon_url to use custom image. +Provide absolute address of the pciture. -Required: +``slack_msg_color``: By default the alert will be posted with the 'danger' color. You can also use 'good' or 'warning' colors. -``alerta_api_url``: API server URL. +``slack_parse_override``: By default the notification message is escaped 'none'. You can also use 'full'. -Optional: +``slack_text_string``: Notification message you want to add. -``alerta_api_key``: This is the api key for alerta server, sent in an ``Authorization`` HTTP header. If not defined, no Authorization header is sent. +``slack_proxy``: By default ElastAlert will not use a network proxy to send notifications to Slack. Set this option using ``hostname:port`` if you need to use a proxy. -``alerta_use_qk_as_resource``: If true and query_key is present, this will override ``alerta_resource`` field with the ``query_key value`` (Can be useful if ``query_key`` is a hostname). +``slack_alert_fields``: You can add additional fields to your slack alerts using this field. Specify the title using `title` and a value for the field using `value`. Additionally you can specify whether or not this field should be a `short` field using `short: true`. -``alerta_use_match_timestamp``: If true, it will use the timestamp of the first match as the ``createTime`` of the alert. otherwise, the current server time is used. +Example slack_alert_fields:: -``alerta_api_skip_ssl``: Defaults to False. + slack_alert_fields: + - title: Host + value: monitor.host + short: true + - title: Status + value: monitor.status + short: true + - title: Zone + value: beat.name + short: true -``alert_missing_value``: Text to replace any match field not found when formating strings. Defaults to ````. +``slack_ignore_ssl_errors``: By default ElastAlert will verify SSL certificate. Set this option to ``False`` if you want to ignore SSL errors. -The following options dictate the values of the API JSON payload: +``slack_title``: Sets a title for the message, this shows up as a blue text at the start of the message -``alerta_severity``: Defaults to "warning". +``slack_title_link``: You can add a link in your Slack notification by setting this to a valid URL. Requires slack_title to be set. -``alerta_timeout``: Defaults 84600 (1 Day). +``slack_timeout``: You can specify a timeout value, in seconds, for making communicating with Slack. The default is 10. If a timeout occurs, the alert will be retried next time elastalert cycles. -``alerta_type``: Defaults to "elastalert". +``slack_attach_kibana_discover_url``: Enables the attachment of the ``kibana_discover_url`` to the slack notification. The config ``generate_kibana_discover_url`` must also be ``True`` in order to generate the url. Defaults to ``False``. -The following options use Python-like string syntax ``{}`` or ``%()s`` to access parts of the match, similar to the CommandAlerter. Ie: "Alert for {clientip}". -If the referenced key is not found in the match, it is replaced by the text indicated by the option ``alert_missing_value``. +``slack_kibana_discover_color``: The color of the Kibana Discover url attachment. Defaults to ``#ec4b98``. -``alerta_resource``: Defaults to "elastalert". +``slack_kibana_discover_title``: The title of the Kibana Discover url attachment. Defaults to ``Discover in Kibana``. -``alerta_service``: Defaults to "elastalert". +``slack_ca_certs``: Set this option to ``True`` if you want to validate the SSL certificate. -``alerta_origin``: Defaults to "elastalert". +Splunk On-Call (Formerly VictorOps) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -``alerta_environment``: Defaults to "Production". +Splunk On-Call (Formerly VictorOps) alerter will trigger an incident to a predefined Splunk On-Call (Formerly VictorOps) routing key. The body of the notification is formatted the same as with other alerters. -``alerta_group``: Defaults to "". +The alerter requires the following options: -``alerta_correlate``: Defaults to an empty list. +``victorops_api_key``: API key generated under the 'REST Endpoint' in the Integrations settings. -``alerta_tags``: Defaults to an empty list. +``victorops_routing_key``: Splunk On-Call (Formerly VictorOps) routing key to route the alert to. -``alerta_event``: Defaults to the rule's name. +``victorops_message_type``: Splunk On-Call (Formerly VictorOps) field to specify severity level. Must be one of the following: INFO, WARNING, ACKNOWLEDGEMENT, CRITICAL, RECOVERY -``alerta_text``: Defaults to the rule's text according to its type. +Optional: -``alerta_value``: Defaults to "". +``victorops_entity_id``: The identity of the incident used by Splunk On-Call (Formerly VictorOps) to correlate incidents throughout the alert lifecycle. If not defined, Splunk On-Call (Formerly VictorOps) will assign a random string to each alert. -The ``attributes`` dictionary is built by joining the lists from ``alerta_attributes_keys`` and ``alerta_attributes_values``, considered in order. +``victorops_entity_display_name``: Human-readable name of alerting entity to summarize incidents without affecting the life-cycle workflow. +``victorops_proxy``: By default ElastAlert will not use a network proxy to send notifications to Splunk On-Call (Formerly VictorOps). Set this option using ``hostname:port`` if you need to use a proxy. -Example usage using old-style format:: +Example usage:: alert: - - alerta - alerta_api_url: "http://youralertahost/api/alert" - alerta_attributes_keys: ["hostname", "TimestampEvent", "senderIP" ] - alerta_attributes_values: ["%(key)s", "%(logdate)s", "%(sender_ip)s" ] - alerta_correlate: ["ProbeUP","ProbeDOWN"] - alerta_event: "ProbeUP" - alerta_text: "Probe %(hostname)s is UP at %(logdate)s GMT" - alerta_value: "UP" + - "victorops" + victorops_api_key: "VictorOps API Key" + victorops_routing_key: "VictorOps routing Key" + victorops_message_type: "INFO" -Example usage using new-style format:: - - alert: - - alerta - alerta_attributes_values: ["{key}", "{logdate}", "{sender_ip}" ] - alerta_text: "Probe {hostname} is UP at {logdate} GMT" +Stomp +~~~~~ +This alert type will use the STOMP protocol in order to push a message to a broker like ActiveMQ or RabbitMQ. The message body is a JSON string containing the alert details. +The default values will work with a pristine ActiveMQ installation. +The alerter requires the following options: -HTTP POST -~~~~~~~~~ +``stomp_hostname``: The STOMP host to use, defaults to ``localhost``. -This alert type will send results to a JSON endpoint using HTTP POST. The key names are configurable so this is compatible with almost any endpoint. By default, the JSON will contain all the items from the match, unless you specify http_post_payload, in which case it will only contain those items. +``stomp_hostport``: The STOMP port to use, defaults to ``61613``. -Required: +``stomp_login``: The STOMP login to use, defaults to ``admin``. -``http_post_url``: The URL to POST. +``stomp_password``: The STOMP password to use, defaults to ``admin``. Optional: -``http_post_payload``: List of keys:values to use as the content of the POST. Example - ip:clientip will map the value from the clientip index of Elasticsearch to JSON key named ip. If not defined, all the Elasticsearch keys will be sent. - -``http_post_static_payload``: Key:value pairs of static parameters to be sent, along with the Elasticsearch results. Put your authentication or other information here. - -``http_post_headers``: Key:value pairs of headers to be sent as part of the request. - -``http_post_proxy``: URL of proxy, if required. +``stomp_ssl``: Connect the STOMP host using TLS, defaults to ``False``. -``http_post_all_values``: Boolean of whether or not to include every key value pair from the match in addition to those in http_post_payload and http_post_static_payload. Defaults to True if http_post_payload is not specified, otherwise False. +``stomp_destination``: The STOMP destination to use, defaults to ``/queue/ALERT`` -``http_post_timeout``: The timeout value, in seconds, for making the post. The default is 10. If a timeout occurs, the alert will be retried next time elastalert cycles. +The stomp_destination field depends on the broker, the /queue/ALERT example is the nomenclature used by ActiveMQ. Each broker has its own logic. Example usage:: - alert: post - http_post_url: "http://example.com/api" - http_post_payload: - ip: clientip - http_post_static_payload: - apikey: abc123 - http_post_headers: - authorization: Basic 123dr3234 - -Squadcast -~~~~~~~~~ - -Alerts can be sent to Squadcast using the `http post` method described above and Squadcast will process it and send Phone, SMS, Email and Push notifications to the relevant person(s) and let them take actions. - -Configuration variables in rules YAML file:: - - alert: post - http_post_url: - http_post_static_payload: - Title: - http_post_all_values: true + alert: + - "stomp" + stomp_hostname: "localhost" + stomp_hostport: "61613" + stomp_login: "admin" + stomp_password: "admin" + stomp_destination: "/queue/ALERT" -For more details, you can refer the `Squadcast documentation `_. +Telegram +~~~~~~~~ +Telegram alerter will send a notification to a predefined Telegram username or channel. The body of the notification is formatted the same as with other alerters. -Alerter -~~~~~~~ +The alerter requires the following two options: -For all Alerter subclasses, you may reference values from a top-level rule property in your Alerter fields by referring to the property name surrounded by dollar signs. This can be useful when you have rule-level properties that you would like to reference many times in your alert. For example: +``telegram_bot_token``: The token is a string along the lines of ``110201543:AAHdqTcvCH1vGWJxfSeofSAs0K5PALDsaw`` that will be required to authorize the bot and send requests to the Bot API. You can learn about obtaining tokens and generating new ones in this document https://core.telegram.org/bots#6-botfather -Example usage:: +``telegram_room_id``: Unique identifier for the target chat or username of the target channel using telegram chat_id (in the format "-xxxxxxxx") - jira_priority: $priority$ - jira_alert_owner: $owner$ +Optional: +``telegram_api_url``: Custom domain to call Telegram Bot API. Default to api.telegram.org +``telegram_proxy``: By default ElastAlert will not use a network proxy to send notifications to Telegram. Set this option using ``hostname:port`` if you need to use a proxy. -Line Notify -~~~~~~~~~~~ +``telegram_proxy_login``: The Telegram proxy auth username. -Line Notify will send notification to a Line application. The body of the notification is formatted the same as with other alerters. +``telegram_proxy_pass``: The Telegram proxy auth password. -Required: +Example usage:: -``linenotify_access_token``: The access token that you got from https://notify-bot.line.me/my/ + alert: + - "telegram" + telegram_bot_token: "bot_token" + telegram_room_id: "chat_id" -theHive +TheHive ~~~~~~~ theHive alert type will send JSON request to theHive (Security Incident Response Platform) with TheHive4py API. Sent request will be stored like Hive Alert with description and observables. @@ -2397,143 +2566,90 @@ Example usage:: alert: hivealerter - hive_connection: - hive_host: http://localhost - hive_port: - hive_apikey: - hive_proxies: - http: '' - https: '' - - hive_alert_config: - title: 'Title' ## This will default to {rule[index]_rule[name]} if not provided - type: 'external' - source: 'elastalert' - description: '{match[field1]} {rule[name]} Sample description' - severity: 2 - tags: ['tag1', 'tag2 {rule[name]}'] - tlp: 3 - status: 'New' - follow: True + hive_connection: + hive_host: http://localhost + hive_port: + hive_apikey: + hive_proxies: + http: '' + https: '' + + hive_alert_config: + title: 'Title' ## This will default to {rule[index]_rule[name]} if not provided + type: 'external' + source: 'elastalert' + description: '{match[field1]} {rule[name]} Sample description' + severity: 2 + tags: ['tag1', 'tag2 {rule[name]}'] + tlp: 3 + status: 'New' + follow: True hive_observable_data_mapping: - - domain: "{match[field1]}_{rule[name]}" - - domain: "{match[field]}" - - ip: "{match[ip_field]}" - - -Zabbix -~~~~~~~~~~~ + - domain: "{match[field1]}_{rule[name]}" + - domain: "{match[field]}" + - ip: "{match[ip_field]}" -Zabbix will send notification to a Zabbix server. The item in the host specified receive a 1 value for each hit. For example, if the elastic query produce 3 hits in the last execution of elastalert, three '1' (integer) values will be send from elastalert to Zabbix Server. If the query have 0 hits, any value will be sent. +Twilio +~~~~~~ -Required: +Twilio alerter will trigger an incident to a mobile phone as an sms from your twilio phone number. The sms will contain the alert name. You may use either twilio SMS or twilio copilot +to send the message, controlled by the ``twilio_use_copilot`` option. -``zbx_sender_host``: The address where zabbix server is running. -``zbx_sender_port``: The port where zabbix server is listenning. -``zbx_host``: This field setup the host in zabbix that receives the value sent by ElastAlert 2. -``zbx_key``: This field setup the key in the host that receives the value sent by ElastAlert 2. +Note that when twilio copilot *is* used the ``twilio_message_service_sid`` option is required. Likewise, when *not* using twilio copilot, the ``twilio_from_number`` option is required. +The alerter requires the following options: -Discord -~~~~~~~ +``twilio_account_sid``: This is sid of your twilio account. -Discord will send notification to a Discord application. The body of the notification is formatted the same as with other alerters. +``twilio_auth_token``: Auth token assosiated with your twilio account. -Required: +``twilio_to_number``: The phone number where you would like send the notification. -``discord_webhook_url``: The webhook URL. +Either one of + * ``twilio_from_number``: Your twilio phone number from which message will be sent. + * ``twilio_message_service_sid``: The SID of your twilio message service. Optional: -``discord_emoji_title``: By default ElastAlert will use the ``:warning:`` emoji when posting to the channel. You can use a different emoji per ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . If slack_icon_url_override parameter is provided, emoji is ignored. - -``discord_proxy``: By default ElastAlert will not use a network proxy to send notifications to Discord. Set this option using hostname:port if you need to use a proxy. - -``discord_proxy_login``: The Discord proxy auth username. - -``discord_proxy_password``: The Discord proxy auth username. - -``discord_embed_color``: embed color. By default ``0xffffff``. - -``discord_embed_footer``: embed footer. - -``discord_embed_icon_url``: You can provide icon_url to use custom image. Provide absolute address of the pciture. - -Dingtalk -~~~~~~~~ - -Dingtalk will send notification to a Dingtalk application. The body of the notification is formatted the same as with other alerters. - -Required: - -``dingtalk_access_token``: Dingtalk access token. - -``dingtalk_msgtype``: Dingtalk msgtype. ``text``, ``markdown``, ``single_action_card``, ``action_card``. - -dingtalk_msgtype single_action_card Required: - -``dingtalk_single_title``: The title of a single button.. - -``dingtalk_single_url``: Jump link for a single button. - -dingtalk_msgtype action_card Required: - -``dingtalk_btns``: Button. - -dingtalk_msgtype action_card Optional: - -``dingtalk_btn_orientation``: "0": Buttons are arranged vertically "1": Buttons are arranged horizontally. - -Example msgtype : text:: - - alert: - - dingtalk - dingtalk_access_token: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' - dingtalk_msgtype: 'text' - - -Example msgtype : markdown:: - - alert: - - dingtalk - dingtalk_access_token: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' - dingtalk_msgtype: 'markdown' - +``twilio_use_copilot``: Whether or not to use twilio copilot, False by default. -Example msgtype : single_action_card:: +Example With Copilot usage:: alert: - - dingtalk - dingtalk_access_token: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' - dingtalk_msgtype: 'single_action_card' - dingtalk_single_title: 'test3' - dingtalk_single_url: 'https://xxxx.xxx' - + - "twilio" + twilio_use_copilot: True + twilio_to_number: "0123456789" + twilio_auth_token: "abcdefghijklmnopqrstuvwxyz012345" + twilio_account_sid: "ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567" + twilio_message_service_sid: "ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567" -Example msgtype : action_card:: +Example With SMS usage:: alert: - - dingtalk - dingtalk_access_token: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' - dingtalk_msgtype: 'action_card' - dingtalk_btn_orientation: '0' - dingtalk_btns: [{'title': 'a', 'actionURL': 'https://xxxx1.xxx'}, {'title': 'b', 'actionURL': 'https://xxxx2.xxx'}] + - "twilio" + twilio_to_number: "0123456789" + twilio_from_number: "9876543210" + twilio_auth_token: "abcdefghijklmnopqrstuvwxyz012345" + twilio_account_sid: "ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567" -Chatwork -~~~~~~~~ +Zabbix +~~~~~~ -Chatwork will send notification to a Chatwork application. The body of the notification is formatted the same as with other alerters. +Zabbix will send notification to a Zabbix server. The item in the host specified receive a 1 value for each hit. For example, if the elastic query produce 3 hits in the last execution of elastalert, three '1' (integer) values will be send from elastalert to Zabbix Server. If the query have 0 hits, any value will be sent. Required: -``chatwork_apikey``: ChatWork API KEY. - -``chatwork_room_id``: The ID of the room you are talking to in Chatwork. How to find the room ID is the part of the number after "rid" at the end of the URL of the browser. +``zbx_sender_host``: The address where zabbix server is running. +``zbx_sender_port``: The port where zabbix server is listenning. +``zbx_host``: This field setup the host in zabbix that receives the value sent by ElastAlert 2. +``zbx_key``: This field setup the key in the host that receives the value sent by ElastAlert 2. Example usage:: alert: - - chatwork - chatwork_apikey: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' - chatwork_room_id: 'xxxxxxxxx' + - "zabbix" + zbx_sender_host: "zabbix-server" + zbx_sender_port: 10051 + zbx_host: "test001" + zbx_key: "sender_load1" \ No newline at end of file diff --git a/requirements-dev.txt b/requirements-dev.txt index 66bcc386..a64fe430 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -8,4 +8,4 @@ pylint<2.9 pytest<3.7.0 setuptools sphinx_rtd_theme -tox==3.23.0 +tox==3.23.1 diff --git a/requirements.txt b/requirements.txt index 478c7556..878d0355 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,6 @@ aws-requests-auth>=0.3.0 sortedcontainers>=2.2.2 boto3>=1.4.4 cffi>=1.11.5 -configparser>=3.5.0 croniter>=0.3.16 elasticsearch==7.0.0 envparse>=0.2.0 diff --git a/setup.py b/setup.py index c1dcc7bc..5423daba 100644 --- a/setup.py +++ b/setup.py @@ -29,7 +29,6 @@ 'aws-requests-auth>=0.3.0', 'sortedcontainers>=2.2.2', 'boto3>=1.4.4', - 'configparser>=3.5.0', 'croniter>=0.3.16', 'elasticsearch==7.0.0', 'envparse>=0.2.0', diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 28aa5074..3af230a1 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -9,6 +9,7 @@ import mock import pytest from jira.exceptions import JIRAError +from requests.auth import HTTPProxyAuth from elastalert.alerts import AlertaAlerter from elastalert.alerts import Alerter @@ -723,315 +724,844 @@ def test_opsgenie_details_with_environment_variable_replacement(environ): assert expected_json == actual_json -def test_jira(): - description_txt = "Description stuff goes here like a runbook link." +def test_opsgenie_tags(): rule = { - 'name': 'test alert', - 'jira_account_file': 'jirafile', + 'name': 'Opsgenie Details', 'type': mock_rule(), - 'jira_project': 'testproject', - 'jira_priority': 0, - 'jira_issuetype': 'testtype', - 'jira_server': 'jiraserver', - 'jira_label': 'testlabel', - 'jira_component': 'testcomponent', - 'jira_description': description_txt, - 'jira_watchers': ['testwatcher1', 'testwatcher2'], - 'timestamp_field': '@timestamp', - 'alert_subject': 'Issue {0} occurred at {1}', - 'alert_subject_args': ['test_term', '@timestamp'], - 'rule_file': '/tmp/foo.yaml' + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_tags': ['test1', 'test2'] + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' } + alert = OpsGenieAlerter(rule) - mock_priority = mock.Mock(id='5') + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = [] - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) - expected = [ - mock.call('jiraserver', basic_auth=('jirauser', 'jirapassword')), - mock.call().priorities(), - mock.call().fields(), - mock.call().create_issue( - issuetype={'name': 'testtype'}, - priority={'id': '5'}, - project={'key': 'testproject'}, - labels=['testlabel'], - components=[{'name': 'testcomponent'}], - description=mock.ANY, - summary='Issue test_value occurred at 2014-10-31T00:00:00', - ), - mock.call().add_watcher(mock.ANY, 'testwatcher1'), - mock.call().add_watcher(mock.ANY, 'testwatcher2'), - ] + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['test1', 'test2', 'ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json - # We don't care about additional calls to mock_jira, such as __str__ - assert mock_jira.mock_calls[:6] == expected - assert mock_jira.mock_calls[3][2]['description'].startswith(description_txt) - # Search called if jira_bump_tickets - rule['jira_bump_tickets'] = True - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value = mock.Mock() - mock_jira.return_value.search_issues.return_value = [] - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = [] +def test_opsgenie_message(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_message': 'test1' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) - expected.insert(3, mock.call().search_issues(mock.ANY)) - assert mock_jira.mock_calls == expected + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) - # Remove a field if jira_ignore_in_title set - rule['jira_ignore_in_title'] = 'test_term' - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value = mock.Mock() - mock_jira.return_value.search_issues.return_value = [] - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = [] + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'test1', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - assert 'test_value' not in mock_jira.mock_calls[3][1][0] +def test_opsgenie_alias(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_alias': 'test1' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) - # Issue is still created if search_issues throws an exception - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value = mock.Mock() - mock_jira.return_value.search_issues.side_effect = JIRAError - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = [] + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) - assert mock_jira.mock_calls == expected + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies', + 'alias': 'test1' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json - # Only bump after 3d of inactivity - rule['jira_bump_after_inactivity'] = 3 - mock_issue = mock.Mock() - # Check ticket is bumped if it is updated 4 days ago - mock_issue.fields.updated = str(ts_now() - datetime.timedelta(days=4)) - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value = mock.Mock() - mock_jira.return_value.search_issues.return_value = [mock_issue] - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = [] +def test_opsgenie_subject(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_subject': 'test1' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - # Check add_comment is called - assert len(mock_jira.mock_calls) == 5 - assert '().add_comment' == mock_jira.mock_calls[4][0] + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) - # Check ticket is bumped is not bumped if ticket is updated right now - mock_issue.fields.updated = str(ts_now()) - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value = mock.Mock() - mock_jira.return_value.search_issues.return_value = [mock_issue] - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = [] + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - # Only 4 calls for mock_jira since add_comment is not called - assert len(mock_jira.mock_calls) == 4 + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'test1', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json - # Test match resolved values - rule = { - 'name': 'test alert', - 'jira_account_file': 'jirafile', - 'type': mock_rule(), - 'owner': 'the_owner', - 'jira_project': 'testproject', - 'jira_issuetype': 'testtype', - 'jira_server': 'jiraserver', - 'jira_label': 'testlabel', - 'jira_component': 'testcomponent', - 'jira_description': "DESC", - 'jira_watchers': ['testwatcher1', 'testwatcher2'], - 'timestamp_field': '@timestamp', - 'jira_affected_user': "#gmail.the_user", - 'rule_file': '/tmp/foo.yaml' - } - mock_issue = mock.Mock() - mock_issue.fields.updated = str(ts_now() - datetime.timedelta(days=4)) - mock_fields = [ - {'name': 'affected user', 'id': 'affected_user_id', 'schema': {'type': 'string'}} - ] - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value = mock.Mock() - mock_jira.return_value.search_issues.return_value = [mock_issue] - mock_jira.return_value.fields.return_value = mock_fields - mock_jira.return_value.priorities.return_value = [mock_priority] - alert = JiraAlerter(rule) - alert.alert([{'gmail.the_user': 'jdoe', '@timestamp': '2014-10-31T00:00:00'}]) - assert mock_jira.mock_calls[4][2]['affected_user_id'] == "jdoe" - -def test_jira_arbitrary_field_support(): - description_txt = "Description stuff goes here like a runbook link." +def test_opsgenie_subject_args(): rule = { - 'name': 'test alert', - 'jira_account_file': 'jirafile', + 'name': 'Opsgenie Details', 'type': mock_rule(), - 'owner': 'the_owner', - 'jira_project': 'testproject', - 'jira_issuetype': 'testtype', - 'jira_server': 'jiraserver', - 'jira_label': 'testlabel', - 'jira_component': 'testcomponent', - 'jira_description': description_txt, - 'jira_watchers': ['testwatcher1', 'testwatcher2'], - 'jira_arbitrary_reference_string_field': '$owner$', - 'jira_arbitrary_string_field': 'arbitrary_string_value', - 'jira_arbitrary_string_array_field': ['arbitrary_string_value1', 'arbitrary_string_value2'], - 'jira_arbitrary_string_array_field_provided_as_single_value': 'arbitrary_string_value_in_array_field', - 'jira_arbitrary_number_field': 1, - 'jira_arbitrary_number_array_field': [2, 3], - 'jira_arbitrary_number_array_field_provided_as_single_value': 1, - 'jira_arbitrary_complex_field': 'arbitrary_complex_value', - 'jira_arbitrary_complex_array_field': ['arbitrary_complex_value1', 'arbitrary_complex_value2'], - 'jira_arbitrary_complex_array_field_provided_as_single_value': 'arbitrary_complex_value_in_array_field', - 'timestamp_field': '@timestamp', - 'alert_subject': 'Issue {0} occurred at {1}', - 'alert_subject_args': ['test_term', '@timestamp'], - 'rule_file': '/tmp/foo.yaml' + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_subject': 'test', + 'opsgenie_subject_args': ['Testing', 'message'] + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' } + alert = OpsGenieAlerter(rule) - mock_priority = mock.MagicMock(id='5') + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) - mock_fields = [ - {'name': 'arbitrary reference string field', 'id': 'arbitrary_reference_string_field', 'schema': {'type': 'string'}}, - {'name': 'arbitrary string field', 'id': 'arbitrary_string_field', 'schema': {'type': 'string'}}, - {'name': 'arbitrary string array field', 'id': 'arbitrary_string_array_field', 'schema': {'type': 'array', 'items': 'string'}}, - { - 'name': 'arbitrary string array field provided as single value', - 'id': 'arbitrary_string_array_field_provided_as_single_value', - 'schema': {'type': 'array', 'items': 'string'} - }, - {'name': 'arbitrary number field', 'id': 'arbitrary_number_field', 'schema': {'type': 'number'}}, - {'name': 'arbitrary number array field', 'id': 'arbitrary_number_array_field', 'schema': {'type': 'array', 'items': 'number'}}, - { - 'name': 'arbitrary number array field provided as single value', - 'id': 'arbitrary_number_array_field_provided_as_single_value', - 'schema': {'type': 'array', 'items': 'number'} - }, - {'name': 'arbitrary complex field', 'id': 'arbitrary_complex_field', 'schema': {'type': 'ArbitraryType'}}, - { - 'name': 'arbitrary complex array field', - 'id': 'arbitrary_complex_array_field', - 'schema': {'type': 'array', 'items': 'ArbitraryType'} - }, - { - 'name': 'arbitrary complex array field provided as single value', - 'id': 'arbitrary_complex_array_field_provided_as_single_value', - 'schema': {'type': 'array', 'items': 'ArbitraryType'} + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' }, - ] + json=mock.ANY, + proxies=None + ) - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = mock_fields - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'test', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json - expected = [ - mock.call('jiraserver', basic_auth=('jirauser', 'jirapassword')), - mock.call().priorities(), - mock.call().fields(), - mock.call().create_issue( - issuetype={'name': 'testtype'}, - project={'key': 'testproject'}, - labels=['testlabel'], - components=[{'name': 'testcomponent'}], - description=mock.ANY, - summary='Issue test_value occurred at 2014-10-31T00:00:00', - arbitrary_reference_string_field='the_owner', - arbitrary_string_field='arbitrary_string_value', - arbitrary_string_array_field=['arbitrary_string_value1', 'arbitrary_string_value2'], - arbitrary_string_array_field_provided_as_single_value=['arbitrary_string_value_in_array_field'], - arbitrary_number_field=1, - arbitrary_number_array_field=[2, 3], - arbitrary_number_array_field_provided_as_single_value=[1], - arbitrary_complex_field={'name': 'arbitrary_complex_value'}, - arbitrary_complex_array_field=[{'name': 'arbitrary_complex_value1'}, {'name': 'arbitrary_complex_value2'}], - arbitrary_complex_array_field_provided_as_single_value=[{'name': 'arbitrary_complex_value_in_array_field'}], - ), - mock.call().add_watcher(mock.ANY, 'testwatcher1'), - mock.call().add_watcher(mock.ANY, 'testwatcher2'), - ] - # We don't care about additional calls to mock_jira, such as __str__ - assert mock_jira.mock_calls[:6] == expected - assert mock_jira.mock_calls[3][2]['description'].startswith(description_txt) +def test_opsgenie_priority_p1(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_priority': 'P1' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) - # Reference an arbitrary string field that is not defined on the JIRA server - rule['jira_nonexistent_field'] = 'nonexistent field value' + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = mock_fields + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) - with pytest.raises(Exception) as exception: - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - assert "Could not find a definition for the jira field 'nonexistent field'" in str(exception) + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': 'P1', + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json - del rule['jira_nonexistent_field'] - # Reference a watcher that does not exist - rule['jira_watchers'] = 'invalid_watcher' +def test_opsgenie_priority_p2(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_priority': 'P2' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = mock_fields + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) - # Cause add_watcher to raise, which most likely means that the user did not exist - mock_jira.return_value.add_watcher.side_effect = Exception() + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) - with pytest.raises(Exception) as exception: - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - assert "Exception encountered when trying to add 'invalid_watcher' as a watcher. Does the user exist?" in str(exception) + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': 'P2', + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json -def test_kibana(ea): - rule = {'filter': [{'query': {'query_string': {'query': 'xy:z'}}}], - 'name': 'Test rule!', - 'es_host': 'test.testing', - 'es_port': 12345, - 'timeframe': datetime.timedelta(hours=1), - 'index': 'logstash-test', - 'include': ['@timestamp'], - 'timestamp_field': '@timestamp'} - match = {'@timestamp': '2014-10-10T00:00:00'} - with mock.patch("elastalert.elastalert.elasticsearch_client") as mock_es: +def test_opsgenie_priority_p3(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_priority': 'P3' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': 'P3', + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_priority_p4(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_priority': 'P4' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': 'P4', + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_priority_p5(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_priority': 'P5' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': 'P5', + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_priority_none(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_priority': 'abc' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_proxy(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_proxy': 'https://proxy.url' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies={'https': 'https://proxy.url'} + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_jira(): + description_txt = "Description stuff goes here like a runbook link." + rule = { + 'name': 'test alert', + 'jira_account_file': 'jirafile', + 'type': mock_rule(), + 'jira_project': 'testproject', + 'jira_priority': 0, + 'jira_issuetype': 'testtype', + 'jira_server': 'jiraserver', + 'jira_label': 'testlabel', + 'jira_component': 'testcomponent', + 'jira_description': description_txt, + 'jira_watchers': ['testwatcher1', 'testwatcher2'], + 'timestamp_field': '@timestamp', + 'alert_subject': 'Issue {0} occurred at {1}', + 'alert_subject_args': ['test_term', '@timestamp'], + 'rule_file': '/tmp/foo.yaml' + } + + mock_priority = mock.Mock(id='5') + + with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = [] + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + + expected = [ + mock.call('jiraserver', basic_auth=('jirauser', 'jirapassword')), + mock.call().priorities(), + mock.call().fields(), + mock.call().create_issue( + issuetype={'name': 'testtype'}, + priority={'id': '5'}, + project={'key': 'testproject'}, + labels=['testlabel'], + components=[{'name': 'testcomponent'}], + description=mock.ANY, + summary='Issue test_value occurred at 2014-10-31T00:00:00', + ), + mock.call().add_watcher(mock.ANY, 'testwatcher1'), + mock.call().add_watcher(mock.ANY, 'testwatcher2'), + ] + + # We don't care about additional calls to mock_jira, such as __str__ + assert mock_jira.mock_calls[:6] == expected + assert mock_jira.mock_calls[3][2]['description'].startswith(description_txt) + + # Search called if jira_bump_tickets + rule['jira_bump_tickets'] = True + with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value = mock.Mock() + mock_jira.return_value.search_issues.return_value = [] + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = [] + + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + + expected.insert(3, mock.call().search_issues(mock.ANY)) + assert mock_jira.mock_calls == expected + + # Remove a field if jira_ignore_in_title set + rule['jira_ignore_in_title'] = 'test_term' + with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value = mock.Mock() + mock_jira.return_value.search_issues.return_value = [] + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = [] + + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + + assert 'test_value' not in mock_jira.mock_calls[3][1][0] + + # Issue is still created if search_issues throws an exception + with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value = mock.Mock() + mock_jira.return_value.search_issues.side_effect = JIRAError + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = [] + + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + + assert mock_jira.mock_calls == expected + + # Only bump after 3d of inactivity + rule['jira_bump_after_inactivity'] = 3 + mock_issue = mock.Mock() + + # Check ticket is bumped if it is updated 4 days ago + mock_issue.fields.updated = str(ts_now() - datetime.timedelta(days=4)) + with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value = mock.Mock() + mock_jira.return_value.search_issues.return_value = [mock_issue] + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = [] + + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + # Check add_comment is called + assert len(mock_jira.mock_calls) == 5 + assert '().add_comment' == mock_jira.mock_calls[4][0] + + # Check ticket is bumped is not bumped if ticket is updated right now + mock_issue.fields.updated = str(ts_now()) + with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value = mock.Mock() + mock_jira.return_value.search_issues.return_value = [mock_issue] + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = [] + + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + # Only 4 calls for mock_jira since add_comment is not called + assert len(mock_jira.mock_calls) == 4 + + # Test match resolved values + rule = { + 'name': 'test alert', + 'jira_account_file': 'jirafile', + 'type': mock_rule(), + 'owner': 'the_owner', + 'jira_project': 'testproject', + 'jira_issuetype': 'testtype', + 'jira_server': 'jiraserver', + 'jira_label': 'testlabel', + 'jira_component': 'testcomponent', + 'jira_description': "DESC", + 'jira_watchers': ['testwatcher1', 'testwatcher2'], + 'timestamp_field': '@timestamp', + 'jira_affected_user': "#gmail.the_user", + 'rule_file': '/tmp/foo.yaml' + } + mock_issue = mock.Mock() + mock_issue.fields.updated = str(ts_now() - datetime.timedelta(days=4)) + mock_fields = [ + {'name': 'affected user', 'id': 'affected_user_id', 'schema': {'type': 'string'}} + ] + with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value = mock.Mock() + mock_jira.return_value.search_issues.return_value = [mock_issue] + mock_jira.return_value.fields.return_value = mock_fields + mock_jira.return_value.priorities.return_value = [mock_priority] + alert = JiraAlerter(rule) + alert.alert([{'gmail.the_user': 'jdoe', '@timestamp': '2014-10-31T00:00:00'}]) + assert mock_jira.mock_calls[4][2]['affected_user_id'] == "jdoe" + + +def test_jira_arbitrary_field_support(): + description_txt = "Description stuff goes here like a runbook link." + rule = { + 'name': 'test alert', + 'jira_account_file': 'jirafile', + 'type': mock_rule(), + 'owner': 'the_owner', + 'jira_project': 'testproject', + 'jira_issuetype': 'testtype', + 'jira_server': 'jiraserver', + 'jira_label': 'testlabel', + 'jira_component': 'testcomponent', + 'jira_description': description_txt, + 'jira_watchers': ['testwatcher1', 'testwatcher2'], + 'jira_arbitrary_reference_string_field': '$owner$', + 'jira_arbitrary_string_field': 'arbitrary_string_value', + 'jira_arbitrary_string_array_field': ['arbitrary_string_value1', 'arbitrary_string_value2'], + 'jira_arbitrary_string_array_field_provided_as_single_value': 'arbitrary_string_value_in_array_field', + 'jira_arbitrary_number_field': 1, + 'jira_arbitrary_number_array_field': [2, 3], + 'jira_arbitrary_number_array_field_provided_as_single_value': 1, + 'jira_arbitrary_complex_field': 'arbitrary_complex_value', + 'jira_arbitrary_complex_array_field': ['arbitrary_complex_value1', 'arbitrary_complex_value2'], + 'jira_arbitrary_complex_array_field_provided_as_single_value': 'arbitrary_complex_value_in_array_field', + 'timestamp_field': '@timestamp', + 'alert_subject': 'Issue {0} occurred at {1}', + 'alert_subject_args': ['test_term', '@timestamp'], + 'rule_file': '/tmp/foo.yaml' + } + + mock_priority = mock.MagicMock(id='5') + + mock_fields = [ + {'name': 'arbitrary reference string field', 'id': 'arbitrary_reference_string_field', 'schema': {'type': 'string'}}, + {'name': 'arbitrary string field', 'id': 'arbitrary_string_field', 'schema': {'type': 'string'}}, + {'name': 'arbitrary string array field', 'id': 'arbitrary_string_array_field', 'schema': {'type': 'array', 'items': 'string'}}, + { + 'name': 'arbitrary string array field provided as single value', + 'id': 'arbitrary_string_array_field_provided_as_single_value', + 'schema': {'type': 'array', 'items': 'string'} + }, + {'name': 'arbitrary number field', 'id': 'arbitrary_number_field', 'schema': {'type': 'number'}}, + {'name': 'arbitrary number array field', 'id': 'arbitrary_number_array_field', 'schema': {'type': 'array', 'items': 'number'}}, + { + 'name': 'arbitrary number array field provided as single value', + 'id': 'arbitrary_number_array_field_provided_as_single_value', + 'schema': {'type': 'array', 'items': 'number'} + }, + {'name': 'arbitrary complex field', 'id': 'arbitrary_complex_field', 'schema': {'type': 'ArbitraryType'}}, + { + 'name': 'arbitrary complex array field', + 'id': 'arbitrary_complex_array_field', + 'schema': {'type': 'array', 'items': 'ArbitraryType'} + }, + { + 'name': 'arbitrary complex array field provided as single value', + 'id': 'arbitrary_complex_array_field_provided_as_single_value', + 'schema': {'type': 'array', 'items': 'ArbitraryType'} + }, + ] + + with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = mock_fields + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + + expected = [ + mock.call('jiraserver', basic_auth=('jirauser', 'jirapassword')), + mock.call().priorities(), + mock.call().fields(), + mock.call().create_issue( + issuetype={'name': 'testtype'}, + project={'key': 'testproject'}, + labels=['testlabel'], + components=[{'name': 'testcomponent'}], + description=mock.ANY, + summary='Issue test_value occurred at 2014-10-31T00:00:00', + arbitrary_reference_string_field='the_owner', + arbitrary_string_field='arbitrary_string_value', + arbitrary_string_array_field=['arbitrary_string_value1', 'arbitrary_string_value2'], + arbitrary_string_array_field_provided_as_single_value=['arbitrary_string_value_in_array_field'], + arbitrary_number_field=1, + arbitrary_number_array_field=[2, 3], + arbitrary_number_array_field_provided_as_single_value=[1], + arbitrary_complex_field={'name': 'arbitrary_complex_value'}, + arbitrary_complex_array_field=[{'name': 'arbitrary_complex_value1'}, {'name': 'arbitrary_complex_value2'}], + arbitrary_complex_array_field_provided_as_single_value=[{'name': 'arbitrary_complex_value_in_array_field'}], + ), + mock.call().add_watcher(mock.ANY, 'testwatcher1'), + mock.call().add_watcher(mock.ANY, 'testwatcher2'), + ] + + # We don't care about additional calls to mock_jira, such as __str__ + assert mock_jira.mock_calls[:6] == expected + assert mock_jira.mock_calls[3][2]['description'].startswith(description_txt) + + # Reference an arbitrary string field that is not defined on the JIRA server + rule['jira_nonexistent_field'] = 'nonexistent field value' + + with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = mock_fields + + with pytest.raises(Exception) as exception: + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + assert "Could not find a definition for the jira field 'nonexistent field'" in str(exception) + + del rule['jira_nonexistent_field'] + + # Reference a watcher that does not exist + rule['jira_watchers'] = 'invalid_watcher' + + with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = mock_fields + + # Cause add_watcher to raise, which most likely means that the user did not exist + mock_jira.return_value.add_watcher.side_effect = Exception() + + with pytest.raises(Exception) as exception: + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + assert "Exception encountered when trying to add 'invalid_watcher' as a watcher. Does the user exist?" in str(exception) + + +def test_kibana(ea): + rule = {'filter': [{'query': {'query_string': {'query': 'xy:z'}}}], + 'name': 'Test rule!', + 'es_host': 'test.testing', + 'es_port': 12345, + 'timeframe': datetime.timedelta(hours=1), + 'index': 'logstash-test', + 'include': ['@timestamp'], + 'timestamp_field': '@timestamp'} + match = {'@timestamp': '2014-10-10T00:00:00'} + with mock.patch("elastalert.elastalert.elasticsearch_client") as mock_es: mock_create = mock.Mock(return_value={'_id': 'ABCDEFGH'}) mock_es_inst = mock.Mock() mock_es_inst.index = mock_create @@ -1040,654 +1570,1862 @@ def test_kibana(ea): mock_es.return_value = mock_es_inst link = ea.generate_kibana_db(rule, match) - assert 'http://test.testing:12345/_plugin/kibana/#/dashboard/temp/ABCDEFGH' == link + assert 'http://test.testing:12345/_plugin/kibana/#/dashboard/temp/ABCDEFGH' == link + + # Name and index + dashboard = json.loads(mock_create.call_args_list[0][1]['body']['dashboard']) + assert dashboard['index']['default'] == 'logstash-test' + assert 'Test rule!' in dashboard['title'] + + # Filters and time range + filters = dashboard['services']['filter']['list'] + assert 'xy:z' in filters['1']['query'] + assert filters['1']['type'] == 'querystring' + time_range = filters['0'] + assert time_range['from'] == ts_add(match['@timestamp'], -rule['timeframe']) + assert time_range['to'] == ts_add(match['@timestamp'], datetime.timedelta(minutes=10)) + + # Included fields active in table + assert dashboard['rows'][1]['panels'][0]['fields'] == ['@timestamp'] + + +def test_command(): + # Test command as list with a formatted arg + rule = {'command': ['/bin/test/', '--arg', '%(somefield)s']} + alert = CommandAlerter(rule) + match = {'@timestamp': '2014-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'nested': {'field': 1}} + with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + alert.alert([match]) + assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) + + # Test command as string with formatted arg (old-style string format) + rule = {'command': '/bin/test/ --arg %(somefield)s'} + alert = CommandAlerter(rule) + with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + alert.alert([match]) + assert mock_popen.called_with('/bin/test --arg foobarbaz', stdin=subprocess.PIPE, shell=False) + + # Test command as string without formatted arg (old-style string format) + rule = {'command': '/bin/test/foo.sh'} + alert = CommandAlerter(rule) + with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + alert.alert([match]) + assert mock_popen.called_with('/bin/test/foo.sh', stdin=subprocess.PIPE, shell=True) + + # Test command with pipe_match_json + rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], + 'pipe_match_json': True} + alert = CommandAlerter(rule) + match = {'@timestamp': '2014-01-01T00:00:00', + 'somefield': 'foobarbaz'} + with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + mock_subprocess = mock.Mock() + mock_popen.return_value = mock_subprocess + mock_subprocess.communicate.return_value = (None, None) + alert.alert([match]) + assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) + assert mock_subprocess.communicate.called_with(input=json.dumps(match)) + + # Test command with pipe_alert_text + rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], + 'pipe_alert_text': True, 'type': mock_rule(), 'name': 'Test'} + alert = CommandAlerter(rule) + match = {'@timestamp': '2014-01-01T00:00:00', + 'somefield': 'foobarbaz'} + alert_text = str(BasicMatchString(rule, match)) + with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + mock_subprocess = mock.Mock() + mock_popen.return_value = mock_subprocess + mock_subprocess.communicate.return_value = (None, None) + alert.alert([match]) + assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) + assert mock_subprocess.communicate.called_with(input=alert_text.encode()) + + # Test command with fail_on_non_zero_exit + rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], + 'fail_on_non_zero_exit': True} + alert = CommandAlerter(rule) + match = {'@timestamp': '2014-01-01T00:00:00', + 'somefield': 'foobarbaz'} + with pytest.raises(Exception) as exception: + with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + mock_subprocess = mock.Mock() + mock_popen.return_value = mock_subprocess + mock_subprocess.wait.return_value = 1 + alert.alert([match]) + assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) + assert "Non-zero exit code while running command" in str(exception) + + +def test_ms_teams(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'ms_teams_webhook_url': 'http://test.webhook.url', + 'ms_teams_alert_summary': 'Alert from ElastAlert', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MsTeamsAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + '@type': 'MessageCard', + '@context': 'http://schema.org/extensions', + 'summary': rule['ms_teams_alert_summary'], + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__() + } + mock_post_request.assert_called_once_with( + rule['ms_teams_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_ms_teams_uses_color_and_fixed_width_text(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'ms_teams_webhook_url': 'http://test.webhook.url', + 'ms_teams_alert_summary': 'Alert from ElastAlert', + 'ms_teams_alert_fixed_width': True, + 'ms_teams_theme_color': '#124578', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MsTeamsAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + body = BasicMatchString(rule, match).__str__() + body = body.replace('`', "'") + body = "```{0}```".format('```\n\n```'.join(x for x in body.split('\n'))).replace('\n``````', '') + expected_data = { + '@type': 'MessageCard', + '@context': 'http://schema.org/extensions', + 'summary': rule['ms_teams_alert_summary'], + 'title': rule['alert_subject'], + 'themeColor': '#124578', + 'text': body + } + mock_post_request.assert_called_once_with( + rule['ms_teams_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_ms_teams_proxy(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'ms_teams_webhook_url': 'http://test.webhook.url', + 'ms_teams_alert_summary': 'Alert from ElastAlert', + 'ms_teams_proxy': 'https://test.proxy.url', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MsTeamsAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + '@type': 'MessageCard', + '@context': 'http://schema.org/extensions', + 'summary': rule['ms_teams_alert_summary'], + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__() + } + mock_post_request.assert_called_once_with( + rule['ms_teams_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': rule['ms_teams_proxy']} + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_uses_custom_title(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_uses_custom_timeout(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'alert_subject': 'Cool subject', + 'alert': [], + 'slack_timeout': 20 + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=20 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_uses_rule_name_when_custom_title_is_not_provided(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': ['http://please.dontgohere.slack'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none', + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_uses_custom_slack_channel(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': ['http://please.dontgohere.slack'], + 'slack_channel_override': '#test-alert', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '#test-alert', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none', + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_uses_list_of_custom_slack_channel(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': ['http://please.dontgohere.slack'], + 'slack_channel_override': ['#test-alert', '#test-alert2'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data1 = { + 'username': 'elastalert', + 'channel': '#test-alert', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + expected_data2 = { + 'username': 'elastalert', + 'channel': '#test-alert2', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_with( + rule['slack_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data1 == json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data2 == json.loads(mock_post_request.call_args_list[1][1]['data']) + + +def test_slack_attach_kibana_discover_url_when_generated(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_attach_kibana_discover_url': True, + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'kibana_discover_url': 'http://kibana#discover' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'parse': 'none', + 'text': '', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Test Rule', + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + }, + { + 'color': '#ec4b98', + 'title': 'Discover in Kibana', + 'title_link': 'http://kibana#discover' + } + ], + 'icon_emoji': ':ghost:', + 'channel': '' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_slack_attach_kibana_discover_url_when_not_generated(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_attach_kibana_discover_url': True, + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'parse': 'none', + 'text': '', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Test Rule', + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'icon_emoji': ':ghost:', + 'channel': '' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_slack_kibana_discover_title(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_attach_kibana_discover_url': True, + 'slack_kibana_discover_title': 'Click to discover in Kibana', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'kibana_discover_url': 'http://kibana#discover' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'parse': 'none', + 'text': '', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Test Rule', + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + }, + { + 'color': '#ec4b98', + 'title': 'Click to discover in Kibana', + 'title_link': 'http://kibana#discover' + } + ], + 'icon_emoji': ':ghost:', + 'channel': '' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_slack_kibana_discover_color(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_attach_kibana_discover_url': True, + 'slack_kibana_discover_color': 'blue', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'kibana_discover_url': 'http://kibana#discover' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'parse': 'none', + 'text': '', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Test Rule', + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + }, + { + 'color': 'blue', + 'title': 'Discover in Kibana', + 'title_link': 'http://kibana#discover' + } + ], + 'icon_emoji': ':ghost:', + 'channel': '' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_slack_ignore_ssl_errors(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_ignore_ssl_errors': True, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=False, + timeout=10 + ) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Test Rule', + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_proxy(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_proxy': 'http://proxy.url', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': rule['slack_proxy']}, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_username_override(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'test elastalert', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'test elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_title_link(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_title_link': 'http://slack.title.link', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [], + 'title_link': 'http://slack.title.link' + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_title(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_title': 'slack title', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'slack title', + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_icon_url_override(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_icon_url_override': 'http://slack.icon.url.override', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_url': 'http://slack.icon.url.override', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_msg_color(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_msg_color': 'good', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'good', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_parse_override(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_parse_override': 'full', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'full' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + - # Name and index - dashboard = json.loads(mock_create.call_args_list[0][1]['body']['dashboard']) - assert dashboard['index']['default'] == 'logstash-test' - assert 'Test rule!' in dashboard['title'] +def test_slack_text_string(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_text_string': 'text str', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) - # Filters and time range - filters = dashboard['services']['filter']['list'] - assert 'xy:z' in filters['1']['query'] - assert filters['1']['type'] == 'querystring' - time_range = filters['0'] - assert time_range['from'] == ts_add(match['@timestamp'], -rule['timeframe']) - assert time_range['to'] == ts_add(match['@timestamp'], datetime.timedelta(minutes=10)) + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': 'text str', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - # Included fields active in table - assert dashboard['rows'][1]['panels'][0]['fields'] == ['@timestamp'] +def test_slack_alert_fields(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_alert_fields': [ + { + 'title': 'Host', + 'value': 'somefield', + 'short': 'true' + }, + { + 'title': 'Sensors', + 'value': '@timestamp', + 'short': 'true' + } + ], + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) -def test_command(): - # Test command as list with a formatted arg - rule = {'command': ['/bin/test/', '--arg', '%(somefield)s']} - alert = CommandAlerter(rule) - match = {'@timestamp': '2014-01-01T00:00:00', - 'somefield': 'foobarbaz', - 'nested': {'field': 1}} - with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': + [ + { + 'short': 'true', + 'title': 'Host', + 'value': 'foobarbaz' + }, + { + 'short': 'true', + 'title': 'Sensors', + 'value': '2016-01-01T00:00:00' + } + ], + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_ca_certs(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_ca_certs': True, + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) - # Test command as string with formatted arg (old-style string format) - rule = {'command': '/bin/test/ --arg %(somefield)s'} - alert = CommandAlerter(rule) - with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [], + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_http_alerter_with_payload(): + rule = { + 'name': 'Test HTTP Post Alerter With Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_payload': {'posted_name': 'somefield'}, + 'http_post_static_payload': {'name': 'somestaticname'}, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - assert mock_popen.called_with('/bin/test --arg foobarbaz', stdin=subprocess.PIPE, shell=False) + expected_data = { + 'posted_name': 'foobarbaz', + 'name': 'somestaticname' + } + mock_post_request.assert_called_once_with( + rule['http_post_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - # Test command as string without formatted arg (old-style string format) - rule = {'command': '/bin/test/foo.sh'} - alert = CommandAlerter(rule) - with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + +def test_http_alerter_with_payload_all_values(): + rule = { + 'name': 'Test HTTP Post Alerter With Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_payload': {'posted_name': 'somefield'}, + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_all_values': True, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'posted_name': 'foobarbaz', + 'name': 'somestaticname', + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_post_request.assert_called_once_with( + rule['http_post_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_http_alerter_without_payload(): + rule = { + 'name': 'Test HTTP Post Alerter Without Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - assert mock_popen.called_with('/bin/test/foo.sh', stdin=subprocess.PIPE, shell=True) + expected_data = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' + } + mock_post_request.assert_called_once_with( + rule['http_post_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - # Test command with pipe_match_json - rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], - 'pipe_match_json': True} - alert = CommandAlerter(rule) - match = {'@timestamp': '2014-01-01T00:00:00', - 'somefield': 'foobarbaz'} - with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: - mock_subprocess = mock.Mock() - mock_popen.return_value = mock_subprocess - mock_subprocess.communicate.return_value = (None, None) - alert.alert([match]) - assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) - assert mock_subprocess.communicate.called_with(input=json.dumps(match)) - # Test command with fail_on_non_zero_exit - rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], - 'fail_on_non_zero_exit': True} - alert = CommandAlerter(rule) - match = {'@timestamp': '2014-01-01T00:00:00', - 'somefield': 'foobarbaz'} - with pytest.raises(Exception) as exception: - with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: - mock_subprocess = mock.Mock() - mock_popen.return_value = mock_subprocess - mock_subprocess.wait.return_value = 1 - alert.alert([match]) - assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) - assert "Non-zero exit code while running command" in str(exception) +def test_http_alerter_proxy(): + rule = { + 'name': 'Test HTTP Post Alerter Without Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' + } + mock_post_request.assert_called_once_with( + rule['http_post_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + proxies={'https': 'http://proxy.url'}, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_ms_teams(): +def test_http_alerter_timeout(): rule = { - 'name': 'Test Rule', + 'name': 'Test HTTP Post Alerter Without Payload', 'type': 'any', - 'ms_teams_webhook_url': 'http://test.webhook.url', - 'ms_teams_alert_summary': 'Alert from ElastAlert', - 'alert_subject': 'Cool subject', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_timeout': 20, 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = MsTeamsAlerter(rule) + alert = HTTPPostAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00', + '@timestamp': '2017-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - expected_data = { - '@type': 'MessageCard', - '@context': 'http://schema.org/extensions', - 'summary': rule['ms_teams_alert_summary'], - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__() + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' } mock_post_request.assert_called_once_with( - rule['ms_teams_webhook_url'], + rule['http_post_url'], data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + proxies=None, + timeout=20, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_ms_teams_uses_color_and_fixed_width_text(): +def test_http_alerter_headers(): rule = { - 'name': 'Test Rule', + 'name': 'Test HTTP Post Alerter Without Payload', 'type': 'any', - 'ms_teams_webhook_url': 'http://test.webhook.url', - 'ms_teams_alert_summary': 'Alert from ElastAlert', - 'ms_teams_alert_fixed_width': True, - 'ms_teams_theme_color': '#124578', - 'alert_subject': 'Cool subject', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_headers': {'authorization': 'Basic 123dr3234'}, 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = MsTeamsAlerter(rule) + alert = HTTPPostAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00', + '@timestamp': '2017-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - body = BasicMatchString(rule, match).__str__() - body = body.replace('`', "'") - body = "```{0}```".format('```\n\n```'.join(x for x in body.split('\n'))).replace('\n``````', '') expected_data = { - '@type': 'MessageCard', - '@context': 'http://schema.org/extensions', - 'summary': rule['ms_teams_alert_summary'], - 'title': rule['alert_subject'], - 'themeColor': '#124578', - 'text': body + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' } mock_post_request.assert_called_once_with( - rule['ms_teams_webhook_url'], + rule['http_post_url'], data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8', 'authorization': 'Basic 123dr3234'}, + proxies=None, + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_slack_uses_custom_title(): +def test_http_alerter_post_ca_certs_true(): rule = { - 'name': 'Test Rule', + 'name': 'Test HTTP Post Alerter Without Payload', 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'alert_subject': 'Cool subject', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_ca_certs': True, 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = SlackAlerter(rule) + alert = HTTPPostAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00', + '@timestamp': '2017-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none' + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' } mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], + rule['http_post_url'], data=mock.ANY, - headers={'content-type': 'application/json'}, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, proxies=None, - verify=True, - timeout=10 + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_slack_uses_custom_timeout(): +def test_http_alerter_post_ca_certs_false(): rule = { - 'name': 'Test Rule', + 'name': 'Test HTTP Post Alerter Without Payload', 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'alert_subject': 'Cool subject', - 'alert': [], - 'slack_timeout': 20 + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_ca_certs': False, + 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = SlackAlerter(rule) + alert = HTTPPostAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00', + '@timestamp': '2017-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none' + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' } mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], + rule['http_post_url'], data=mock.ANY, - headers={'content-type': 'application/json'}, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, proxies=None, - verify=True, - timeout=20 + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_slack_uses_rule_name_when_custom_title_is_not_provided(): +def test_pagerduty_alerter(): rule = { - 'name': 'Test Rule', + 'name': 'Test PD Rule', 'type': 'any', - 'slack_webhook_url': ['http://please.dontgohere.slack'], + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = SlackAlerter(rule) + alert = PagerDutyAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'client': 'ponies inc.', + 'description': 'Test PD Rule', + 'details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'event_type': 'trigger', + 'incident_key': '', + 'service_key': 'magicalbadgers', + } + mock_post_request.assert_called_once_with('https://events.pagerduty.com/generic/2010-04-15/create_event.json', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_pagerduty_alerter_v2(): + rule = { + 'name': 'Test PD Rule', + 'type': 'any', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'ping failure', + 'pagerduty_v2_payload_component': 'mysql', + 'pagerduty_v2_payload_group': 'app-stack', + 'pagerduty_v2_payload_severity': 'error', + 'pagerduty_v2_payload_source': 'mysql.host.name', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerDutyAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00', + '@timestamp': '2017-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['name'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none', + 'client': 'ponies inc.', + 'payload': { + 'class': 'ping failure', + 'component': 'mysql', + 'group': 'app-stack', + 'severity': 'error', + 'source': 'mysql.host.name', + 'summary': 'Test PD Rule', + 'custom_details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'timestamp': '2017-01-01T00:00:00' + }, + 'event_action': 'trigger', + 'dedup_key': '', + 'routing_key': 'magicalbadgers', } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'][0], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) + mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_slack_uses_custom_slack_channel(): +def test_pagerduty_alerter_v2_payload_class_args(): rule = { - 'name': 'Test Rule', + 'name': 'Test PD Rule', 'type': 'any', - 'slack_webhook_url': ['http://please.dontgohere.slack'], - 'slack_channel_override': '#test-alert', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'somefield', + 'pagerduty_v2_payload_class_args': ['@timestamp', 'somefield'], + 'pagerduty_v2_payload_component': 'mysql', + 'pagerduty_v2_payload_group': 'app-stack', + 'pagerduty_v2_payload_severity': 'error', + 'pagerduty_v2_payload_source': 'mysql.host.name', 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = SlackAlerter(rule) + alert = PagerDutyAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00', + '@timestamp': '2017-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - expected_data = { - 'username': 'elastalert', - 'channel': '#test-alert', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['name'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none', + 'client': 'ponies inc.', + 'payload': { + 'class': 'somefield', + 'component': 'mysql', + 'group': 'app-stack', + 'severity': 'error', + 'source': 'mysql.host.name', + 'summary': 'Test PD Rule', + 'custom_details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'timestamp': '2017-01-01T00:00:00' + }, + 'event_action': 'trigger', + 'dedup_key': '', + 'routing_key': 'magicalbadgers', } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'][0], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) + mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_slack_uses_list_of_custom_slack_channel(): +def test_pagerduty_alerter_v2_payload_component_args(): rule = { - 'name': 'Test Rule', + 'name': 'Test PD Rule', 'type': 'any', - 'slack_webhook_url': ['http://please.dontgohere.slack'], - 'slack_channel_override': ['#test-alert', '#test-alert2'], + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'ping failure', + 'pagerduty_v2_payload_component': 'somefield', + 'pagerduty_v2_payload_component_args': ['@timestamp', 'somefield'], + 'pagerduty_v2_payload_group': 'app-stack', + 'pagerduty_v2_payload_severity': 'error', + 'pagerduty_v2_payload_source': 'mysql.host.name', 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = SlackAlerter(rule) + alert = PagerDutyAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00', + '@timestamp': '2017-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - - expected_data1 = { - 'username': 'elastalert', - 'channel': '#test-alert', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['name'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none' - } - expected_data2 = { - 'username': 'elastalert', - 'channel': '#test-alert2', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['name'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none' + expected_data = { + 'client': 'ponies inc.', + 'payload': { + 'class': 'ping failure', + 'component': 'somefield', + 'group': 'app-stack', + 'severity': 'error', + 'source': 'mysql.host.name', + 'summary': 'Test PD Rule', + 'custom_details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'timestamp': '2017-01-01T00:00:00' + }, + 'event_action': 'trigger', + 'dedup_key': '', + 'routing_key': 'magicalbadgers', } - mock_post_request.assert_called_with( - rule['slack_webhook_url'][0], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - assert expected_data1 == json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data2 == json.loads(mock_post_request.call_args_list[1][1]['data']) + mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_slack_attach_kibana_discover_url_when_generated(): +def test_pagerduty_alerter_v2_payload_group_args(): rule = { - 'name': 'Test Rule', + 'name': 'Test PD Rule', 'type': 'any', - 'slack_attach_kibana_discover_url': True, - 'slack_webhook_url': 'http://please.dontgohere.slack', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'ping failure', + 'pagerduty_v2_payload_component': 'mysql', + 'pagerduty_v2_payload_group': 'somefield', + 'pagerduty_v2_payload_group_args': ['@timestamp', 'somefield'], + 'pagerduty_v2_payload_severity': 'error', + 'pagerduty_v2_payload_source': 'mysql.host.name', 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = SlackAlerter(rule) + alert = PagerDutyAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00', - 'kibana_discover_url': 'http://kibana#discover' + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - expected_data = { - 'username': 'elastalert', - 'parse': 'none', - 'text': '', - 'attachments': [ - { - 'color': 'danger', - 'title': 'Test Rule', - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] + 'client': 'ponies inc.', + 'payload': { + 'class': 'ping failure', + 'component': 'mysql', + 'group': 'somefield', + 'severity': 'error', + 'source': 'mysql.host.name', + 'summary': 'Test PD Rule', + 'custom_details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' }, - { - 'color': '#ec4b98', - 'title': 'Discover in Kibana', - 'title_link': 'http://kibana#discover' - } - ], - 'icon_emoji': ':ghost:', - 'channel': '' + 'timestamp': '2017-01-01T00:00:00' + }, + 'event_action': 'trigger', + 'dedup_key': '', + 'routing_key': 'magicalbadgers', } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data + mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_slack_attach_kibana_discover_url_when_not_generated(): +def test_pagerduty_alerter_v2_payload_source_args(): rule = { - 'name': 'Test Rule', + 'name': 'Test PD Rule', 'type': 'any', - 'slack_attach_kibana_discover_url': True, - 'slack_webhook_url': 'http://please.dontgohere.slack', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'ping failure', + 'pagerduty_v2_payload_component': 'mysql', + 'pagerduty_v2_payload_group': 'app-stack', + 'pagerduty_v2_payload_severity': 'error', + 'pagerduty_v2_payload_source': 'somefield', + 'pagerduty_v2_payload_source_args': ['@timestamp', 'somefield'], 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = SlackAlerter(rule) + alert = PagerDutyAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00' + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - expected_data = { - 'username': 'elastalert', - 'parse': 'none', - 'text': '', - 'attachments': [ - { - 'color': 'danger', - 'title': 'Test Rule', - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'icon_emoji': ':ghost:', - 'channel': '' + 'client': 'ponies inc.', + 'payload': { + 'class': 'ping failure', + 'component': 'mysql', + 'group': 'app-stack', + 'severity': 'error', + 'source': 'somefield', + 'summary': 'Test PD Rule', + 'custom_details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'timestamp': '2017-01-01T00:00:00' + }, + 'event_action': 'trigger', + 'dedup_key': '', + 'routing_key': 'magicalbadgers', } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data + mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_slack_kibana_discover_title(): +def test_pagerduty_alerter_v2_payload_custom_details(): rule = { - 'name': 'Test Rule', + 'name': 'Test PD Rule', 'type': 'any', - 'slack_attach_kibana_discover_url': True, - 'slack_kibana_discover_title': 'Click to discover in Kibana', - 'slack_webhook_url': 'http://please.dontgohere.slack', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'ping failure', + 'pagerduty_v2_payload_component': 'mysql', + 'pagerduty_v2_payload_group': 'app-stack', + 'pagerduty_v2_payload_severity': 'error', + 'pagerduty_v2_payload_source': 'mysql.host.name', + 'pagerduty_v2_payload_custom_details': {'a': 'somefield', 'c': 'f'}, 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = SlackAlerter(rule) + alert = PagerDutyAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00', - 'kibana_discover_url': 'http://kibana#discover' + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - expected_data = { - 'username': 'elastalert', - 'parse': 'none', - 'text': '', - 'attachments': [ - { - 'color': 'danger', - 'title': 'Test Rule', - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] + 'client': 'ponies inc.', + 'payload': { + 'class': 'ping failure', + 'component': 'mysql', + 'group': 'app-stack', + 'severity': 'error', + 'source': 'mysql.host.name', + 'summary': 'Test PD Rule', + 'custom_details': { + 'a': 'foobarbaz', + 'c': None, + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' }, - { - 'color': '#ec4b98', - 'title': 'Click to discover in Kibana', - 'title_link': 'http://kibana#discover' - } - ], - 'icon_emoji': ':ghost:', - 'channel': '' + 'timestamp': '2017-01-01T00:00:00' + }, + 'event_action': 'trigger', + 'dedup_key': '', + 'routing_key': 'magicalbadgers', } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data + mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_slack_kibana_discover_color(): +def test_pagerduty_alerter_v2_payload_include_all_info(): rule = { - 'name': 'Test Rule', + 'name': 'Test PD Rule', 'type': 'any', - 'slack_attach_kibana_discover_url': True, - 'slack_kibana_discover_color': 'blue', - 'slack_webhook_url': 'http://please.dontgohere.slack', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'ping failure', + 'pagerduty_v2_payload_component': 'mysql', + 'pagerduty_v2_payload_group': 'app-stack', + 'pagerduty_v2_payload_severity': 'error', + 'pagerduty_v2_payload_source': 'mysql.host.name', + 'pagerduty_v2_payload_include_all_info': False, 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = SlackAlerter(rule) + alert = PagerDutyAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00', - 'kibana_discover_url': 'http://kibana#discover' + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - expected_data = { - 'username': 'elastalert', - 'parse': 'none', - 'text': '', - 'attachments': [ - { - 'color': 'danger', - 'title': 'Test Rule', - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - }, - { - 'color': 'blue', - 'title': 'Discover in Kibana', - 'title_link': 'http://kibana#discover' - } - ], - 'icon_emoji': ':ghost:', - 'channel': '' + 'client': 'ponies inc.', + 'payload': { + 'class': 'ping failure', + 'component': 'mysql', + 'group': 'app-stack', + 'severity': 'error', + 'source': 'mysql.host.name', + 'summary': 'Test PD Rule', + 'custom_details': {}, + 'timestamp': '2017-01-01T00:00:00' + }, + 'event_action': 'trigger', + 'dedup_key': '', + 'routing_key': 'magicalbadgers', } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data + mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_slack_ignore_ssl_errors(): +def test_pagerduty_alerter_custom_incident_key(): rule = { - 'name': 'Test Rule', + 'name': 'Test PD Rule', 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_ignore_ssl_errors': True, + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_incident_key': 'custom key', 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = SlackAlerter(rule) + alert = PagerDutyAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00' + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=False, - timeout=10 - ) - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': 'Test Rule', - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none' + 'client': 'ponies inc.', + 'description': 'Test PD Rule', + 'details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'event_type': 'trigger', + 'incident_key': 'custom key', + 'service_key': 'magicalbadgers', } + mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_http_alerter_with_payload(): +def test_pagerduty_alerter_custom_incident_key_with_args(): rule = { - 'name': 'Test HTTP Post Alerter With Payload', + 'name': 'Test PD Rule', 'type': 'any', - 'http_post_url': 'http://test.webhook.url', - 'http_post_payload': {'posted_name': 'somefield'}, - 'http_post_static_payload': {'name': 'somestaticname'}, + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_incident_key': 'custom {0}', + 'pagerduty_incident_key_args': ['somefield'], 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = HTTPPostAlerter(rule) + alert = PagerDutyAlerter(rule) match = { '@timestamp': '2017-01-01T00:00:00', 'somefield': 'foobarbaz' @@ -1695,33 +3433,33 @@ def test_http_alerter_with_payload(): with mock.patch('requests.post') as mock_post_request: alert.alert([match]) expected_data = { - 'posted_name': 'foobarbaz', - 'name': 'somestaticname' + 'client': 'ponies inc.', + 'description': 'Test PD Rule', + 'details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'event_type': 'trigger', + 'incident_key': 'custom foobarbaz', + 'service_key': 'magicalbadgers', } - mock_post_request.assert_called_once_with( - rule['http_post_url'], - data=mock.ANY, - headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, - proxies=None, - timeout=10, - verify=True - ) + mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_http_alerter_with_payload_all_values(): +def test_pagerduty_alerter_custom_alert_subject(): rule = { - 'name': 'Test HTTP Post Alerter With Payload', + 'name': 'Test PD Rule', 'type': 'any', - 'http_post_url': 'http://test.webhook.url', - 'http_post_payload': {'posted_name': 'somefield'}, - 'http_post_static_payload': {'name': 'somestaticname'}, - 'http_post_all_values': True, + 'alert_subject': 'Hungry kittens', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_incident_key': 'custom {0}', + 'pagerduty_incident_key_args': ['somefield'], 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = HTTPPostAlerter(rule) + alert = PagerDutyAlerter(rule) match = { '@timestamp': '2017-01-01T00:00:00', 'somefield': 'foobarbaz' @@ -1729,61 +3467,66 @@ def test_http_alerter_with_payload_all_values(): with mock.patch('requests.post') as mock_post_request: alert.alert([match]) expected_data = { - 'posted_name': 'foobarbaz', - 'name': 'somestaticname', - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' + 'client': 'ponies inc.', + 'description': 'Hungry kittens', + 'details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'event_type': 'trigger', + 'incident_key': 'custom foobarbaz', + 'service_key': 'magicalbadgers', } - mock_post_request.assert_called_once_with( - rule['http_post_url'], - data=mock.ANY, - headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, - proxies=None, - timeout=10, - verify=True - ) + mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_http_alerter_without_payload(): +def test_pagerduty_alerter_custom_alert_subject_with_args(): rule = { - 'name': 'Test HTTP Post Alerter Without Payload', + 'name': 'Test PD Rule', 'type': 'any', - 'http_post_url': 'http://test.webhook.url', - 'http_post_static_payload': {'name': 'somestaticname'}, + 'alert_subject': '{0} kittens', + 'alert_subject_args': ['somefield'], + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_incident_key': 'custom {0}', + 'pagerduty_incident_key_args': ['someotherfield'], 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = HTTPPostAlerter(rule) + alert = PagerDutyAlerter(rule) match = { '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' + 'somefield': 'Stinky', + 'someotherfield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) expected_data = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz', - 'name': 'somestaticname' + 'client': 'ponies inc.', + 'description': 'Stinky kittens', + 'details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: Stinky\nsomeotherfield: foobarbaz\n' + }, + 'event_type': 'trigger', + 'incident_key': 'custom foobarbaz', + 'service_key': 'magicalbadgers', } - mock_post_request.assert_called_once_with( - rule['http_post_url'], - data=mock.ANY, - headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, - proxies=None, - timeout=10, - verify=True - ) + mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_pagerduty_alerter(): +def test_pagerduty_alerter_custom_alert_subject_with_args_specifying_trigger(): rule = { 'name': 'Test PD Rule', 'type': 'any', + 'alert_subject': '{0} kittens', + 'alert_subject_args': ['somefield'], 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_event_type': 'trigger', 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_incident_key': 'custom {0}', + 'pagerduty_incident_key_args': ['someotherfield'], 'alert': [] } rules_loader = FileRulesLoader({}) @@ -1791,37 +3534,37 @@ def test_pagerduty_alerter(): alert = PagerDutyAlerter(rule) match = { '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' + 'somefield': 'Stinkiest', + 'someotherfield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) expected_data = { 'client': 'ponies inc.', - 'description': 'Test PD Rule', + 'description': 'Stinkiest kittens', 'details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: Stinkiest\nsomeotherfield: foobarbaz\n' }, 'event_type': 'trigger', - 'incident_key': '', + 'incident_key': 'custom foobarbaz', 'service_key': 'magicalbadgers', } - mock_post_request.assert_called_once_with('https://events.pagerduty.com/generic/2010-04-15/create_event.json', - data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_pagerduty_alerter_v2(): +def test_pagerduty_alerter_proxy(): rule = { 'name': 'Test PD Rule', 'type': 'any', + 'alert_subject': '{0} kittens', + 'alert_subject_args': ['somefield'], 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_event_type': 'trigger', 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_api_version': 'v2', - 'pagerduty_v2_payload_class': 'ping failure', - 'pagerduty_v2_payload_component': 'mysql', - 'pagerduty_v2_payload_group': 'app-stack', - 'pagerduty_v2_payload_severity': 'error', - 'pagerduty_v2_payload_source': 'mysql.host.name', + 'pagerduty_incident_key': 'custom {0}', + 'pagerduty_incident_key_args': ['someotherfield'], + 'pagerduty_proxy': 'http://proxy.url', 'alert': [] } rules_loader = FileRulesLoader({}) @@ -1829,322 +3572,557 @@ def test_pagerduty_alerter_v2(): alert = PagerDutyAlerter(rule) match = { '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' + 'somefield': 'Stinkiest', + 'someotherfield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) expected_data = { 'client': 'ponies inc.', - 'payload': { - 'class': 'ping failure', - 'component': 'mysql', - 'group': 'app-stack', - 'severity': 'error', - 'source': 'mysql.host.name', - 'summary': 'Test PD Rule', - 'custom_details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' - }, - 'timestamp': '2017-01-01T00:00:00' + 'description': 'Stinkiest kittens', + 'details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: Stinkiest\nsomeotherfield: foobarbaz\n' }, - 'event_action': 'trigger', - 'dedup_key': '', - 'routing_key': 'magicalbadgers', + 'event_type': 'trigger', + 'incident_key': 'custom foobarbaz', + 'service_key': 'magicalbadgers', } - mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', - data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, + proxies={'https': 'http://proxy.url'}) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_pagerduty_alerter_custom_incident_key(): +def test_alert_text_kw(ea): + rule = ea.rules[0].copy() + rule['alert_text'] = '{field} at {time}' + rule['alert_text_kw'] = { + '@timestamp': 'time', + 'field': 'field', + } + match = {'@timestamp': '1918-01-17', 'field': 'value'} + alert_text = str(BasicMatchString(rule, match)) + body = '{field} at {@timestamp}'.format(**match) + assert body in alert_text + + +def test_alert_text_global_substitution(ea): + rule = ea.rules[0].copy() + rule['owner'] = 'the owner from rule' + rule['priority'] = 'priority from rule' + rule['abc'] = 'abc from rule' + rule['alert_text'] = 'Priority: {0}; Owner: {1}; Abc: {2}' + rule['alert_text_args'] = ['priority', 'owner', 'abc'] + + match = { + '@timestamp': '2016-01-01', + 'field': 'field_value', + 'abc': 'abc from match', + } + + alert_text = str(BasicMatchString(rule, match)) + assert 'Priority: priority from rule' in alert_text + assert 'Owner: the owner from rule' in alert_text + + # When the key exists in both places, it will come from the match + assert 'Abc: abc from match' in alert_text + + +def test_alert_text_kw_global_substitution(ea): + rule = ea.rules[0].copy() + rule['foo_rule'] = 'foo from rule' + rule['owner'] = 'the owner from rule' + rule['abc'] = 'abc from rule' + rule['alert_text'] = 'Owner: {owner}; Foo: {foo}; Abc: {abc}' + rule['alert_text_kw'] = { + 'owner': 'owner', + 'foo_rule': 'foo', + 'abc': 'abc', + } + + match = { + '@timestamp': '2016-01-01', + 'field': 'field_value', + 'abc': 'abc from match', + } + + alert_text = str(BasicMatchString(rule, match)) + assert 'Owner: the owner from rule' in alert_text + assert 'Foo: foo from rule' in alert_text + + # When the key exists in both places, it will come from the match + assert 'Abc: abc from match' in alert_text + + +def test_resolving_rule_references(): rule = { - 'name': 'Test PD Rule', + 'name': 'test_rule', + 'type': mock_rule(), + 'owner': 'the_owner', + 'priority': 2, + 'list_of_things': [ + '1', + '$owner$', + [ + '11', + '$owner$', + ], + ], + 'nested_dict': { + 'nested_one': '1', + 'nested_owner': '$owner$', + }, + 'resolved_string_reference': '$owner$', + 'resolved_int_reference': '$priority$', + 'unresolved_reference': '$foo$', + } + alert = Alerter(rule) + assert 'the_owner' == alert.rule['resolved_string_reference'] + assert 2 == alert.rule['resolved_int_reference'] + assert '$foo$' == alert.rule['unresolved_reference'] + assert 'the_owner' == alert.rule['list_of_things'][1] + assert 'the_owner' == alert.rule['list_of_things'][2][1] + assert 'the_owner' == alert.rule['nested_dict']['nested_owner'] + + +def test_alerta_no_auth(): + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_api_skip_ssl': True, + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["%(key)s", "%(logdate)s", "%(sender_ip)s"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe %(hostname)s is UP at %(logdate)s GMT", + 'alerta_value': "UP", 'type': 'any', - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_incident_key': 'custom key', - 'alert': [] + 'alerta_use_match_timestamp': True, + 'alert': 'alerta' } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) + match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' + '@timestamp': '2014-10-10T00:00:00', + # 'key': ---- missing field on purpose, to verify that simply the text is left empty + # 'logdate': ---- missing field on purpose, to verify that simply the text is left empty + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) with mock.patch('requests.post') as mock_post_request: alert.alert([match]) + expected_data = { - 'client': 'ponies inc.', - 'description': 'Test PD Rule', - 'details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' - }, - 'event_type': 'trigger', - 'incident_key': 'custom key', - 'service_key': 'magicalbadgers', + "origin": "ElastAlert 2", + "resource": "elastalert", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" } - mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + headers={ + 'content-type': 'application/json'}, + verify=False + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) -def test_pagerduty_alerter_custom_incident_key_with_args(): +def test_alerta_auth(): rule = { - 'name': 'Test PD Rule', + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'alerta_api_key': '123456789ABCDEF', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_severity': "debug", 'type': 'any', - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_incident_key': 'custom {0}', - 'pagerduty_incident_key_args': ['somefield'], - 'alert': [] + 'alerta_use_match_timestamp': True, + 'alert': 'alerta' } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) + match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - expected_data = { - 'client': 'ponies inc.', - 'description': 'Test PD Rule', - 'details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' - }, - 'event_type': 'trigger', - 'incident_key': 'custom foobarbaz', - 'service_key': 'magicalbadgers', - } - mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json', + 'Authorization': 'Key {}'.format(rule['alerta_api_key'])}) -def test_pagerduty_alerter_custom_alert_subject(): +def test_alerta_new_style(): rule = { - 'name': 'Test PD Rule', + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", 'type': 'any', - 'alert_subject': 'Hungry kittens', - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_incident_key': 'custom {0}', - 'pagerduty_incident_key_args': ['somefield'], - 'alert': [] + 'alerta_use_match_timestamp': True, + 'alert': 'alerta' } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) + match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' + '@timestamp': '2014-10-10T00:00:00', + # 'key': ---- missing field on purpose, to verify that simply the text is left empty + # 'logdate': ---- missing field on purpose, to verify that simply the text is left empty + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) with mock.patch('requests.post') as mock_post_request: alert.alert([match]) + expected_data = { - 'client': 'ponies inc.', - 'description': 'Hungry kittens', - 'details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' - }, - 'event_type': 'trigger', - 'incident_key': 'custom foobarbaz', - 'service_key': 'magicalbadgers', + "origin": "ElastAlert 2", + "resource": "elastalert", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" } - mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json'} + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) -def test_pagerduty_alerter_custom_alert_subject_with_args(): +def test_alerta_use_qk_as_resource(): rule = { - 'name': 'Test PD Rule', + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", 'type': 'any', - 'alert_subject': '{0} kittens', - 'alert_subject_args': ['somefield'], - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_incident_key': 'custom {0}', - 'pagerduty_incident_key_args': ['someotherfield'], - 'alert': [] + 'alerta_use_match_timestamp': True, + 'alerta_use_qk_as_resource': True, + 'query_key': 'hostname', + 'alert': 'alerta' } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) + match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'Stinky', - 'someotherfield': 'foobarbaz' + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) with mock.patch('requests.post') as mock_post_request: alert.alert([match]) + expected_data = { - 'client': 'ponies inc.', - 'description': 'Stinky kittens', - 'details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: Stinky\nsomeotherfield: foobarbaz\n' - }, - 'event_type': 'trigger', - 'incident_key': 'custom foobarbaz', - 'service_key': 'magicalbadgers', + "origin": "ElastAlert 2", + "resource": "aProbe", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" } - mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json'} + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) -def test_pagerduty_alerter_custom_alert_subject_with_args_specifying_trigger(): +def test_alerta_timeout(): rule = { - 'name': 'Test PD Rule', - 'type': 'any', - 'alert_subject': '{0} kittens', - 'alert_subject_args': ['somefield'], - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_event_type': 'trigger', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_incident_key': 'custom {0}', - 'pagerduty_incident_key_args': ['someotherfield'], - 'alert': [] + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", + 'type': 'any', + 'alerta_use_match_timestamp': True, + 'alerta_timeout': 86450, + 'alert': 'alerta' } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) + match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'Stinkiest', - 'someotherfield': 'foobarbaz' + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) with mock.patch('requests.post') as mock_post_request: alert.alert([match]) + expected_data = { - 'client': 'ponies inc.', - 'description': 'Stinkiest kittens', - 'details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: Stinkiest\nsomeotherfield: foobarbaz\n' - }, - 'event_type': 'trigger', - 'incident_key': 'custom foobarbaz', - 'service_key': 'magicalbadgers', + "origin": "ElastAlert 2", + "resource": "elastalert", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86450, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" } - mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - -def test_alert_text_kw(ea): - rule = ea.rules[0].copy() - rule['alert_text'] = '{field} at {time}' - rule['alert_text_kw'] = { - '@timestamp': 'time', - 'field': 'field', - } - match = {'@timestamp': '1918-01-17', 'field': 'value'} - alert_text = str(BasicMatchString(rule, match)) - body = '{field} at {@timestamp}'.format(**match) - assert body in alert_text + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json'} + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) -def test_alert_text_global_substitution(ea): - rule = ea.rules[0].copy() - rule['owner'] = 'the owner from rule' - rule['priority'] = 'priority from rule' - rule['abc'] = 'abc from rule' - rule['alert_text'] = 'Priority: {0}; Owner: {1}; Abc: {2}' - rule['alert_text_args'] = ['priority', 'owner', 'abc'] +def test_alerta_type(): + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", + 'type': 'any', + 'alerta_use_match_timestamp': True, + 'alerta_type': 'elastalert2', + 'alert': 'alerta' + } match = { - '@timestamp': '2016-01-01', - 'field': 'field_value', - 'abc': 'abc from match', + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' } - alert_text = str(BasicMatchString(rule, match)) - assert 'Priority: priority from rule' in alert_text - assert 'Owner: the owner from rule' in alert_text + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) - # When the key exists in both places, it will come from the match - assert 'Abc: abc from match' in alert_text + expected_data = { + "origin": "ElastAlert 2", + "resource": "elastalert", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert2", + "event": "ProbeUP" + } + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json'} + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) -def test_alert_text_kw_global_substitution(ea): - rule = ea.rules[0].copy() - rule['foo_rule'] = 'foo from rule' - rule['owner'] = 'the owner from rule' - rule['abc'] = 'abc from rule' - rule['alert_text'] = 'Owner: {owner}; Foo: {foo}; Abc: {abc}' - rule['alert_text_kw'] = { - 'owner': 'owner', - 'foo_rule': 'foo', - 'abc': 'abc', +def test_alerta_resource(): + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", + 'type': 'any', + 'alerta_use_match_timestamp': True, + 'alerta_resource': 'elastalert2', + 'alert': 'alerta' } match = { - '@timestamp': '2016-01-01', - 'field': 'field_value', - 'abc': 'abc from match', + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' } - alert_text = str(BasicMatchString(rule, match)) - assert 'Owner: the owner from rule' in alert_text - assert 'Foo: foo from rule' in alert_text - - # When the key exists in both places, it will come from the match - assert 'Abc: abc from match' in alert_text - + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) -def test_resolving_rule_references(ea): - rule = { - 'name': 'test_rule', - 'type': mock_rule(), - 'owner': 'the_owner', - 'priority': 2, - 'list_of_things': [ - '1', - '$owner$', - [ - '11', - '$owner$', - ], - ], - 'nested_dict': { - 'nested_one': '1', - 'nested_owner': '$owner$', - }, - 'resolved_string_reference': '$owner$', - 'resolved_int_reference': '$priority$', - 'unresolved_reference': '$foo$', + expected_data = { + "origin": "ElastAlert 2", + "resource": "elastalert2", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" } - alert = Alerter(rule) - assert 'the_owner' == alert.rule['resolved_string_reference'] - assert 2 == alert.rule['resolved_int_reference'] - assert '$foo$' == alert.rule['unresolved_reference'] - assert 'the_owner' == alert.rule['list_of_things'][1] - assert 'the_owner' == alert.rule['list_of_things'][2][1] - assert 'the_owner' == alert.rule['nested_dict']['nested_owner'] + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json'} + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) -def test_alerta_no_auth(ea): +def test_alerta_service(): rule = { 'name': 'Test Alerta rule!', 'alerta_api_url': 'http://elastalerthost:8080/api/alert', 'timeframe': datetime.timedelta(hours=1), 'timestamp_field': '@timestamp', - 'alerta_api_skip_ssl': True, 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], - 'alerta_attributes_values': ["%(key)s", "%(logdate)s", "%(sender_ip)s"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], 'alerta_event': "ProbeUP", 'alerta_group': "Health", 'alerta_origin': "ElastAlert 2", 'alerta_severity': "debug", - 'alerta_text': "Probe %(hostname)s is UP at %(logdate)s GMT", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", 'alerta_value': "UP", 'type': 'any', 'alerta_use_match_timestamp': True, + 'alerta_service': ['elastalert2'], 'alert': 'alerta' } match = { '@timestamp': '2014-10-10T00:00:00', - # 'key': ---- missing field on purpose, to verify that simply the text is left empty - # 'logdate': ---- missing field on purpose, to verify that simply the text is left empty 'sender_ip': '1.1.1.1', 'hostname': 'aProbe' } @@ -2159,7 +4137,7 @@ def test_alerta_no_auth(ea): "origin": "ElastAlert 2", "resource": "elastalert", "severity": "debug", - "service": ["elastalert"], + "service": ["elastalert2"], "tags": [], "text": "Probe aProbe is UP at GMT", "value": "UP", @@ -2169,7 +4147,7 @@ def test_alerta_no_auth(ea): "timeout": 86400, "correlate": ["ProbeUP", "ProbeDOWN"], "group": "Health", - "attributes": {"senderIP": "1.1.1.1", "hostname": "", "TimestampEvent": ""}, + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, "type": "elastalert", "event": "ProbeUP" } @@ -2177,24 +4155,32 @@ def test_alerta_no_auth(ea): mock_post_request.assert_called_once_with( alert.url, data=mock.ANY, + verify=True, headers={ - 'content-type': 'application/json'}, - verify=False + 'content-type': 'application/json'} ) assert expected_data == json.loads( mock_post_request.call_args_list[0][1]['data']) -def test_alerta_auth(ea): +def test_alerta_environment(): rule = { 'name': 'Test Alerta rule!', 'alerta_api_url': 'http://elastalerthost:8080/api/alert', - 'alerta_api_key': '123456789ABCDEF', 'timeframe': datetime.timedelta(hours=1), 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", 'type': 'any', 'alerta_use_match_timestamp': True, + 'alerta_environment': 'Production2', 'alert': 'alerta' } @@ -2210,16 +4196,37 @@ def test_alerta_auth(ea): with mock.patch('requests.post') as mock_post_request: alert.alert([match]) + expected_data = { + "origin": "ElastAlert 2", + "resource": "elastalert", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production2", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" + } + mock_post_request.assert_called_once_with( alert.url, data=mock.ANY, verify=True, headers={ - 'content-type': 'application/json', - 'Authorization': 'Key {}'.format(rule['alerta_api_key'])}) + 'content-type': 'application/json'} + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) -def test_alerta_new_style(ea): +def test_alerta_tags(): rule = { 'name': 'Test Alerta rule!', 'alerta_api_url': 'http://elastalerthost:8080/api/alert', @@ -2236,13 +4243,12 @@ def test_alerta_new_style(ea): 'alerta_value': "UP", 'type': 'any', 'alerta_use_match_timestamp': True, + 'alerta_tags': ['elastalert2'], 'alert': 'alerta' } match = { '@timestamp': '2014-10-10T00:00:00', - # 'key': ---- missing field on purpose, to verify that simply the text is left empty - # 'logdate': ---- missing field on purpose, to verify that simply the text is left empty 'sender_ip': '1.1.1.1', 'hostname': 'aProbe' } @@ -2258,7 +4264,7 @@ def test_alerta_new_style(ea): "resource": "elastalert", "severity": "debug", "service": ["elastalert"], - "tags": [], + "tags": ['elastalert2'], "text": "Probe aProbe is UP at GMT", "value": "UP", "createTime": "2014-10-10T00:00:00.000000Z", @@ -2283,7 +4289,7 @@ def test_alerta_new_style(ea): mock_post_request.call_args_list[0][1]['data']) -def test_alert_subject_size_limit_no_args(ea): +def test_alert_subject_size_limit_no_args(): rule = { 'name': 'test_rule', 'type': mock_rule(), @@ -2297,6 +4303,39 @@ def test_alert_subject_size_limit_no_args(ea): assert 5 == len(alertSubject) +def test_alert_error(): + rule = { + 'name': 'test_rule', + 'type': mock_rule(), + 'owner': 'the_owner', + 'priority': 2, + 'alert_subject': 'A very long subject', + 'alert_subject_max_len': 5 + } + match = { + '@timestamp': '2021-01-01T00:00:00', + 'name': 'datadog-test-name' + } + alert = Alerter(rule) + try: + alert.alert([match]) + except NotImplementedError: + assert True + + +def test_alert_get_aggregation_summary_text__maximum_width(): + rule = { + 'name': 'test_rule', + 'type': mock_rule(), + 'owner': 'the_owner', + 'priority': 2, + 'alert_subject': 'A very long subject', + 'alert_subject_max_len': 5 + } + alert = Alerter(rule) + assert 80 == alert.get_aggregation_summary_text__maximum_width() + + def test_alert_subject_size_limit_with_args(ea): rule = { 'name': 'test_rule', @@ -2388,6 +4427,47 @@ def test_pagertree(): assert expected_data["Description"] == actual_data['Description'] +def test_pagertree_proxy(): + rule = { + 'name': 'Test PagerTree Rule', + 'type': 'any', + 'pagertree_integration_url': 'https://api.pagertree.com/integration/xxxxx', + 'pagertree_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerTreeAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'event_type': 'create', + 'Id': str(uuid.uuid4()), + 'Title': 'Test PagerTree Rule', + 'Description': 'Test PagerTree Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + + mock_post_request.assert_called_once_with( + rule['pagertree_integration_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': 'http://proxy.url'} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + uuid4hex = re.compile(r'^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z', re.I) + match = uuid4hex.match(actual_data['Id']) + assert bool(match) is True + assert expected_data["event_type"] == actual_data['event_type'] + assert expected_data["Title"] == actual_data['Title'] + assert expected_data["Description"] == actual_data['Description'] + + def test_line_notify(): rule = { 'name': 'Test LineNotify Rule', @@ -2525,6 +4605,41 @@ def test_gitter_msg_level_error(): assert 'error' in actual_data['level'] +def test_gitter_proxy(): + rule = { + 'name': 'Test Gitter Rule', + 'type': 'any', + 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx', + 'gitter_msg_level': 'error', + 'gitter_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GitterAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'message': 'Test Gitter Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'level': 'error' + } + + mock_post_request.assert_called_once_with( + rule['gitter_webhook_url'], + mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': 'http://proxy.url'} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][0][1]) + assert expected_data == actual_data + assert 'error' in actual_data['level'] + + def test_chatwork(): rule = { 'name': 'Test Chatwork Rule', @@ -2554,47 +4669,211 @@ def test_chatwork(): auth=None ) - actual_data = mock_post_request.call_args_list[0][1]['params'] + actual_data = mock_post_request.call_args_list[0][1]['params'] + assert expected_data == actual_data + + +def test_chatwork_proxy(): + rule = { + 'name': 'Test Chatwork Rule', + 'type': 'any', + 'chatwork_apikey': 'xxxx1', + 'chatwork_room_id': 'xxxx2', + 'chatwork_proxy': 'http://proxy.url', + 'chatwork_proxy_login': 'admin', + 'chatwork_proxy_pass': 'password', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ChatworkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'body': 'Test Chatwork Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + } + + mock_post_request.assert_called_once_with( + 'https://api.chatwork.com/v2/rooms/xxxx2/messages', + params=mock.ANY, + headers={'X-ChatWorkToken': 'xxxx1'}, + proxies={'https': 'http://proxy.url'}, + auth=HTTPProxyAuth('admin', 'password') + ) + + actual_data = mock_post_request.call_args_list[0][1]['params'] + assert expected_data == actual_data + + +def test_telegram(): + rule = { + 'name': 'Test Telegram Rule', + 'type': 'any', + 'telegram_bot_token': 'xxxxx1', + 'telegram_room_id': 'xxxxx2', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TelegramAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'chat_id': rule['telegram_room_id'], + 'text': '⚠ *Test Telegram Rule* ⚠ ```\nTest Telegram Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n ```', + 'parse_mode': 'markdown', + 'disable_web_page_preview': True + } + + mock_post_request.assert_called_once_with( + 'https://api.telegram.org/botxxxxx1/sendMessage', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_telegram_proxy(): + rule = { + 'name': 'Test Telegram Rule', + 'type': 'any', + 'telegram_bot_token': 'xxxxx1', + 'telegram_room_id': 'xxxxx2', + 'telegram_proxy': 'http://proxy.url', + 'telegram_proxy_login': 'admin', + 'telegram_proxy_pass': 'password', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TelegramAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'chat_id': rule['telegram_room_id'], + 'text': '⚠ *Test Telegram Rule* ⚠ ```\nTest Telegram Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n ```', + 'parse_mode': 'markdown', + 'disable_web_page_preview': True + } + + mock_post_request.assert_called_once_with( + 'https://api.telegram.org/botxxxxx1/sendMessage', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': 'http://proxy.url'}, + auth=HTTPProxyAuth('admin', 'password') + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_telegram_text_maxlength(): + rule = { + 'name': 'Test Telegram Rule' + ('a' * 3985), + 'type': 'any', + 'telegram_bot_token': 'xxxxx1', + 'telegram_room_id': 'xxxxx2', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TelegramAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'chat_id': rule['telegram_room_id'], + 'text': '⚠ *Test Telegram Rule' + ('a' * 3979) + + '\n⚠ *message was cropped according to telegram limits!* ⚠ ```', + 'parse_mode': 'markdown', + 'disable_web_page_preview': True + } + + mock_post_request.assert_called_once_with( + 'https://api.telegram.org/botxxxxx1/sendMessage', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data -def test_telegram(): +def test_service_now(): rule = { - 'name': 'Test Telegram Rule', + 'name': 'Test ServiceNow Rule', 'type': 'any', - 'telegram_bot_token': 'xxxxx1', - 'telegram_room_id': 'xxxxx2', + 'username': 'ServiceNow username', + 'password': 'ServiceNow password', + 'servicenow_rest_url': 'https://xxxxxxxxxx', + 'short_description': 'ServiceNow short_description', + 'comments': 'ServiceNow comments', + 'assignment_group': 'ServiceNow assignment_group', + 'category': 'ServiceNow category', + 'subcategory': 'ServiceNow subcategory', + 'cmdb_ci': 'ServiceNow cmdb_ci', + 'caller_id': 'ServiceNow caller_id', 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = TelegramAlerter(rule) + alert = ServiceNowAlerter(rule) match = { '@timestamp': '2021-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) + expected_data = { - 'chat_id': rule['telegram_room_id'], - 'text': '⚠ *Test Telegram Rule* ⚠ ```\nTest Telegram Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n ```', - 'parse_mode': 'markdown', - 'disable_web_page_preview': True + 'description': 'Test ServiceNow Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'short_description': rule['short_description'], + 'comments': rule['comments'], + 'assignment_group': rule['assignment_group'], + 'category': rule['category'], + 'subcategory': rule['subcategory'], + 'cmdb_ci': rule['cmdb_ci'], + 'caller_id': rule['caller_id'] } mock_post_request.assert_called_once_with( - 'https://api.telegram.org/botxxxxx1/sendMessage', + rule['servicenow_rest_url'], + auth=(rule['username'], rule['password']), + headers={ + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + }, data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - auth=None + proxies=None ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data -def test_service_now(): +def test_service_now_proxy(): rule = { 'name': 'Test ServiceNow Rule', 'type': 'any', @@ -2608,6 +4887,7 @@ def test_service_now(): 'subcategory': 'ServiceNow subcategory', 'cmdb_ci': 'ServiceNow cmdb_ci', 'caller_id': 'ServiceNow caller_id', + 'servicenow_proxy': 'http://proxy.url', 'alert': [] } rules_loader = FileRulesLoader({}) @@ -2639,7 +4919,7 @@ def test_service_now(): 'Accept': 'application/json;charset=utf-8' }, data=mock.ANY, - proxies=None + proxies={'https': 'http://proxy.url'} ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -2684,6 +4964,45 @@ def test_victor_ops(): assert expected_data == actual_data +def test_victor_ops_proxy(): + rule = { + 'name': 'Test VictorOps Rule', + 'type': 'any', + 'victorops_api_key': 'xxxx1', + 'victorops_routing_key': 'xxxx2', + 'victorops_message_type': 'INFO', + 'victorops_entity_display_name': 'no entity display name', + 'victorops_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = VictorOpsAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'message_type': rule['victorops_message_type'], + 'entity_display_name': rule['victorops_entity_display_name'], + 'monitoring_tool': 'ElastAlert', + 'state_message': 'Test VictorOps Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + + mock_post_request.assert_called_once_with( + 'https://alert.victorops.com/integrations/generic/20131114/alert/xxxx1/xxxx2', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': 'http://proxy.url'} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + def test_google_chat_basic(): rule = { 'name': 'Test GoogleChat Rule', @@ -2869,6 +5188,92 @@ def test_discord_not_footer(): assert expected_data == actual_data +def test_discord_proxy(): + rule = { + 'name': 'Test Discord Rule', + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'discord_proxy': 'http://proxy.url', + 'discord_proxy_login': 'admin', + 'discord_proxy_password': 'password', + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'content': ':warning: Test Discord :warning:', + 'embeds': + [{ + 'description': 'Test Discord Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n```', + 'color': 0xffffff + }] + } + + mock_post_request.assert_called_once_with( + rule['discord_webhook_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json'}, + proxies={'https': 'http://proxy.url'}, + auth=HTTPProxyAuth('admin', 'password') + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_discord_description_maxlength(): + rule = { + 'name': 'Test Discord Rule' + ('a' * 2069), + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'content': ':warning: Test Discord :warning:', + 'embeds': + [{ + 'description': 'Test Discord Rule' + ('a' * 1933) + + '\n *message was cropped according to discord embed description limits!* ```', + 'color': 0xffffff + }] + } + + mock_post_request.assert_called_once_with( + rule['discord_webhook_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json'}, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + def test_dingtalk_text(): rule = { 'name': 'Test DingTalk Rule', @@ -3053,7 +5458,114 @@ def test_dingtalk_action_card(): assert expected_data == actual_data -def test_Mattermost_alert_text_only(): +def test_dingtalk_proxy(): + rule = { + 'name': 'Test DingTalk Rule', + 'type': 'any', + 'dingtalk_access_token': 'xxxxxxx', + 'dingtalk_msgtype': 'action_card', + 'dingtalk_single_title': 'elastalert', + 'dingtalk_single_url': 'http://xxxxx2', + 'dingtalk_btn_orientation': '1', + 'dingtalk_btns': [ + { + 'title': 'test1', + 'actionURL': 'https://xxxxx0/' + }, + { + 'title': 'test2', + 'actionURL': 'https://xxxxx1/' + } + ], + 'dingtalk_proxy': 'http://proxy.url', + 'dingtalk_proxy_login': 'admin', + 'dingtalk_proxy_pass': 'password', + 'alert': [], + 'alert_subject': 'Test DingTalk' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DingTalkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'msgtype': 'actionCard', + 'actionCard': { + 'title': 'Test DingTalk', + 'text': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'btnOrientation': rule['dingtalk_btn_orientation'], + 'btns': rule['dingtalk_btns'] + } + } + + mock_post_request.assert_called_once_with( + 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', + data=mock.ANY, + headers={ + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + }, + proxies={'https': 'http://proxy.url'}, + auth=HTTPProxyAuth('admin', 'password') + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_proxy(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_proxy': 'https://proxy.url', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n' + } + ], 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies={'https': 'https://proxy.url'} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_alert_text_only(): rule = { 'name': 'Test Mattermost Rule', 'type': 'any', @@ -3099,7 +5611,7 @@ def test_Mattermost_alert_text_only(): assert expected_data == actual_data -def test_Mattermost_not_alert_text_only(): +def test_mattermost_not_alert_text_only(): rule = { 'name': 'Test Mattermost Rule', 'type': 'any', @@ -3143,11 +5655,10 @@ def test_Mattermost_not_alert_text_only(): ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - print(actual_data) assert expected_data == actual_data -def test_Mattermost_msg_fields(): +def test_mattermost_msg_fields(): rule = { 'name': 'Test Mattermost Rule', 'type': 'any', @@ -3209,7 +5720,7 @@ def test_Mattermost_msg_fields(): assert expected_data == actual_data -def test_Mattermost_icon_url_override(): +def test_mattermost_icon_url_override(): rule = { 'name': 'Test Mattermost Rule', 'type': 'any', @@ -3258,7 +5769,7 @@ def test_Mattermost_icon_url_override(): assert expected_data == actual_data -def test_Mattermost_channel_override(): +def test_mattermost_channel_override(): rule = { 'name': 'Test Mattermost Rule', 'type': 'any', @@ -3307,7 +5818,7 @@ def test_Mattermost_channel_override(): assert expected_data == actual_data -def test_Mattermost_ignore_ssl_errors(): +def test_mattermost_ignore_ssl_errors(): rule = { 'name': 'Test Mattermost Rule', 'type': 'any', diff --git a/tests/util_test.py b/tests/util_test.py index 55a2f9c8..2a24446d 100644 --- a/tests/util_test.py +++ b/tests/util_test.py @@ -7,14 +7,18 @@ from dateutil.parser import parse as dt from elastalert.util import add_raw_postfix +from elastalert.util import dt_to_ts_with_format +from elastalert.util import flatten_dict from elastalert.util import format_index from elastalert.util import lookup_es_key from elastalert.util import parse_deadline from elastalert.util import parse_duration +from elastalert.util import pytzfy from elastalert.util import replace_dots_in_field_names from elastalert.util import resolve_string from elastalert.util import set_es_key from elastalert.util import should_scrolling_continue +from elastalert.util import ts_to_dt_with_format @pytest.mark.parametrize('spec, expected_delta', [ @@ -228,3 +232,22 @@ def test_should_scrolling_continue(): assert should_scrolling_continue(rule_before_first_run) is True assert should_scrolling_continue(rule_before_max_scrolling) is True assert should_scrolling_continue(rule_over_max_scrolling) is False + + +def test_ts_to_dt_with_format(): + assert ts_to_dt_with_format('2021/02/01 12:30:00', '%Y/%m/%d %H:%M:%S') == dt('2021-02-01 12:30:00+00:00') + assert ts_to_dt_with_format('01/02/2021 12:30:00', '%d/%m/%Y %H:%M:%S') == dt('2021-02-01 12:30:00+00:00') + + +def test_dt_to_ts_with_format(): + assert dt_to_ts_with_format(dt('2021-02-01 12:30:00+00:00'), '%Y/%m/%d %H:%M:%S') == '2021/02/01 12:30:00' + assert dt_to_ts_with_format(dt('2021-02-01 12:30:00+00:00'), '%d/%m/%Y %H:%M:%S') == '01/02/2021 12:30:00' + + +def test_flatten_dict(): + assert flatten_dict({'test': 'value1', 'test2': 'value2'}) == {'test': 'value1', 'test2': 'value2'} + + +def test_pytzfy(): + assert pytzfy(dt('2021-02-01 12:30:00+00:00')) == dt('2021-02-01 12:30:00+00:00') + assert pytzfy(datetime(2018, 12, 31, 5, 0, 30, 1000)) == dt('2018-12-31 05:00:30.001000')