diff --git a/hdfs_datanode/README.md b/hdfs_datanode/README.md index 2abd2e78cfa27..114bfa88ae4ce 100644 --- a/hdfs_datanode/README.md +++ b/hdfs_datanode/README.md @@ -67,6 +67,30 @@ For containerized environments, see the [Autodiscovery Integration Templates][2] | `` | blank or `{}` | | `` | `{"hdfs_datanode_jmx_uri": "http://%%host%%:50075"}` | +#### Log collection + +**Available for Agent >6.0** + +1. Collecting logs is disabled by default in the Datadog Agent. Enable it in the `datadog.yaml` file with: + + ```yaml + logs_enabled: true + ``` + +2. Add this configuration block to your `hdfs_datanode.d/conf.yaml` file to start collecting your DataNode logs: + + ```yaml + logs: + - type: file + path: /var/log/hadoop-hdfs/*.log + source: hdfs_datanode + service: + ``` + + Change the `path` and `service` parameter values and configure them for your environment. + +3. [Restart the Agent][6]. + ### Validation [Run the Agent's status subcommand][7] and look for `hdfs_datanode` under the Checks section. diff --git a/hdfs_datanode/datadog_checks/hdfs_datanode/data/conf.yaml.example b/hdfs_datanode/datadog_checks/hdfs_datanode/data/conf.yaml.example index 68eb46f3e35a8..9092744df9f80 100644 --- a/hdfs_datanode/datadog_checks/hdfs_datanode/data/conf.yaml.example +++ b/hdfs_datanode/datadog_checks/hdfs_datanode/data/conf.yaml.example @@ -209,3 +209,20 @@ instances: ## Whether or not to persist cookies and use connection pooling for increased performance. # # persist_connections: false + +## Log Section (Available for Agent >=6.0) +## +## type - mandatory - Type of log input source (tcp / udp / file / windows_event) +## port / path / channel_path - mandatory - Set port if type is tcp or udp. Set path if type is file. Set channel_path if type is windows_event +## service - mandatory - Name of the service that generated the log +## source - mandatory - Attribute that defines which Integration sent the logs +## sourcecategory - optional - Multiple value attribute. Used to refine the source attribute +## tags: - optional - Add tags to the collected logs +## +## Discover Datadog log collection: https://docs.datadoghq.com/logs/log_collection/ +# +# logs: +# - type: file +# path: /var/log/hadoop-hdfs/*.log +# source: hdfs_datanode +# service: diff --git a/hdfs_datanode/manifest.json b/hdfs_datanode/manifest.json index 63e4e83b18b0e..f8183de03f972 100644 --- a/hdfs_datanode/manifest.json +++ b/hdfs_datanode/manifest.json @@ -2,6 +2,7 @@ "categories": [ "processing", "os & system", + "log collection", "autodiscovery" ], "creates_events": false, diff --git a/hdfs_namenode/README.md b/hdfs_namenode/README.md index bcfca5de28033..e4bfaa4a22654 100644 --- a/hdfs_namenode/README.md +++ b/hdfs_namenode/README.md @@ -67,6 +67,30 @@ For containerized environments, see the [Autodiscovery Integration Templates][11 | `` | blank or `{}` | | `` | `{"hdfs_namenode_jmx_uri": "https://%%host%%:50070"}` | +#### Log collection + +**Available for Agent >6.0** + +1. Collecting logs is disabled by default in the Datadog Agent. Enable it in the `datadog.yaml` file with: + + ```yaml + logs_enabled: true + ``` + +2. Add this configuration block to your `hdfs_namenode.d/conf.yaml` file to start collecting your NameNode logs: + + ```yaml + logs: + - type: file + path: /var/log/hadoop-hdfs/*.log + source: hdfs_namenode + service: + ``` + + Change the `path` and `service` parameter values and configure them for your environment. + +3. [Restart the Agent][6]. + ### Validation [Run the Agent's status subcommand][117] and look for `hdfs_namenode` under the Checks section. diff --git a/hdfs_namenode/datadog_checks/hdfs_namenode/data/conf.yaml.example b/hdfs_namenode/datadog_checks/hdfs_namenode/data/conf.yaml.example index f84350e0cffa7..45230ebf34a6f 100644 --- a/hdfs_namenode/datadog_checks/hdfs_namenode/data/conf.yaml.example +++ b/hdfs_namenode/datadog_checks/hdfs_namenode/data/conf.yaml.example @@ -209,3 +209,20 @@ instances: ## Whether or not to persist cookies and use connection pooling for increased performance. # # persist_connections: false + +## Log Section (Available for Agent >=6.0) +## +## type - mandatory - Type of log input source (tcp / udp / file / windows_event) +## port / path / channel_path - mandatory - Set port if type is tcp or udp. Set path if type is file. Set channel_path if type is windows_event +## service - mandatory - Name of the service that generated the log +## source - mandatory - Attribute that defines which Integration sent the logs +## sourcecategory - optional - Multiple value attribute. Used to refine the source attribute +## tags: - optional - Add tags to the collected logs +## +## Discover Datadog log collection: https://docs.datadoghq.com/logs/log_collection/ +# +# logs: +# - type: file +# path: /var/log/hadoop-hdfs/*.log +# source: hdfs_namenode +# service: diff --git a/hdfs_namenode/manifest.json b/hdfs_namenode/manifest.json index 163434dd4cb04..ce6040b2509a3 100644 --- a/hdfs_namenode/manifest.json +++ b/hdfs_namenode/manifest.json @@ -2,6 +2,7 @@ "categories": [ "processing", "os & system", + "log collection", "autodiscovery" ], "creates_events": false,