diff --git a/.gitignore b/.gitignore index c733e317ce1a7..7c3fbd21c3535 100644 --- a/.gitignore +++ b/.gitignore @@ -6,3 +6,4 @@ /vendor .DS_Store process.yml +/.vscode diff --git a/.golangci.yml b/.golangci.yml index 23218a5c7ff1b..47bfdae26e95f 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -50,7 +50,7 @@ linters-settings: - name: error-return - name: error-strings - name: errorf - - name: flag-parameter +# - name: flag-parameter #disable for now - name: function-result-limit arguments: [ 3 ] - name: identical-branches diff --git a/CHANGELOG.md b/CHANGELOG.md index 2a40e857433be..8f0a59529a9ad 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,4 @@ -## v1.19.0-rc1 [2021-06-10] +## v1.19.0 [2021-06-17] #### Release Notes @@ -13,6 +13,14 @@ - [#9051](https://github.com/influxdata/telegraf/pull/9051) `outputs.kafka` Don't prevent telegraf from starting when there's a connection error - [#8795](https://github.com/influxdata/telegraf/pull/8795) `parsers.prometheusremotewrite` Update prometheus dependency to v2.21.0 - [#9295](https://github.com/influxdata/telegraf/pull/9295) `outputs.dynatrace` Use dynatrace-metric-utils +- [#9368](https://github.com/influxdata/telegraf/pull/9368) `parsers.json_v2` Update json_v2 parser to handle null types +- [#9359](https://github.com/influxdata/telegraf/pull/9359) `inputs.sql` Fix import of sqlite and ignore it on all platforms that require CGO. +- [#9329](https://github.com/influxdata/telegraf/pull/9329) `inputs.kube_inventory` Fix connecting to the wrong url +- [#9358](https://github.com/influxdata/telegraf/pull/9358) upgrade denisenkom go-mssql to v0.10.0 +- [#9283](https://github.com/influxdata/telegraf/pull/9283) `processors.parser` Fix segfault +- [#9243](https://github.com/influxdata/telegraf/pull/9243) `inputs.docker` Close all idle connections +- [#9338](https://github.com/influxdata/telegraf/pull/9338) `inputs.suricata` Support new JSON format +- [#9296](https://github.com/influxdata/telegraf/pull/9296) `outputs.influxdb` Fix endless retries #### Features @@ -54,6 +62,7 @@ - [OpenTelemetry](https://github.com/influxdata/telegraf/tree/master/plugins/inputs/opentelemetry) - contributed by @jacobmarble - [Intel Data Plane Development Kit (DPDK)](https://github.com/influxdata/telegraf/tree/master/plugins/inputs/dpdk) - contributed by @p-zak - [KNX](https://github.com/influxdata/telegraf/tree/master/plugins/inputs/knx_listener) - contributed by @DocLambda +- [SQL](https://github.com/influxdata/telegraf/tree/master/plugins/inputs/sql) - contributed by @srebhan #### New Output Plugins diff --git a/build_version.txt b/build_version.txt index 815d5ca06d530..3989355915568 100644 --- a/build_version.txt +++ b/build_version.txt @@ -1 +1 @@ -1.19.0 +1.20.0 diff --git a/docs/LICENSE_OF_DEPENDENCIES.md b/docs/LICENSE_OF_DEPENDENCIES.md index 92965418ebe32..fad0787c02428 100644 --- a/docs/LICENSE_OF_DEPENDENCIES.md +++ b/docs/LICENSE_OF_DEPENDENCIES.md @@ -54,7 +54,7 @@ following works: - github.com/containerd/containerd [Apache License 2.0](https://github.com/containerd/containerd/blob/master/LICENSE) - github.com/couchbase/go-couchbase [MIT License](https://github.com/couchbase/go-couchbase/blob/master/LICENSE) - github.com/couchbase/gomemcached [MIT License](https://github.com/couchbase/gomemcached/blob/master/LICENSE) -- github.com/couchbase/goutils [COUCHBASE INC. COMMUNITY EDITION LICENSE](https://github.com/couchbase/goutils/blob/master/LICENSE.md) +- github.com/couchbase/goutils [Apache License 2.0](https://github.com/couchbase/goutils/blob/master/LICENSE.md) - github.com/davecgh/go-spew [ISC License](https://github.com/davecgh/go-spew/blob/master/LICENSE) - github.com/denisenkom/go-mssqldb [BSD 3-Clause "New" or "Revised" License](https://github.com/denisenkom/go-mssqldb/blob/master/LICENSE.txt) - github.com/devigned/tab [MIT License](https://github.com/devigned/tab/blob/master/LICENSE) diff --git a/etc/telegraf.conf b/etc/telegraf.conf index 42e7d22b54b8f..492bf704087db 100644 --- a/etc/telegraf.conf +++ b/etc/telegraf.conf @@ -8046,6 +8046,84 @@ # # content_encoding = "identity" +# # Read metrics from SQL queries +# [[inputs.sql]] +# ## Database Driver +# ## See https://github.com/influxdata/telegraf/blob/master/docs/SQL_DRIVERS_INPUT.md for +# ## a list of supported drivers. +# driver = "mysql" +# +# ## Data source name for connecting +# ## The syntax and supported options depends on selected driver. +# dsn = "username:password@mysqlserver:3307/dbname?param=value" +# +# ## Timeout for any operation +# # timeout = "5s" +# +# ## Connection time limits +# ## By default the maximum idle time and maximum lifetime of a connection is unlimited, i.e. the connections +# ## will not be closed automatically. If you specify a positive time, the connections will be closed after +# ## idleing or existing for at least that amount of time, respectively. +# # connection_max_idle_time = "0s" +# # connection_max_life_time = "0s" +# +# ## Connection count limits +# ## By default the number of open connections is not limited and the number of maximum idle connections +# ## will be inferred from the number of queries specified. If you specify a positive number for any of the +# ## two options, connections will be closed when reaching the specified limit. The number of idle connections +# ## will be clipped to the maximum number of connections limit if any. +# # connection_max_open = 0 +# # connection_max_idle = auto +# +# [[inputs.sql.query]] +# ## Query to perform on the server +# query="SELECT user,state,latency,score FROM Scoreboard WHERE application > 0" +# ## Alternatively to specifying the query directly you can select a file here containing the SQL query. +# ## Only one of 'query' and 'query_script' can be specified! +# # query_script = "/path/to/sql/script.sql" +# +# ## Name of the measurement +# ## In case both measurement and 'measurement_col' are given, the latter takes precedence. +# # measurement = "sql" +# +# ## Column name containing the name of the measurement +# ## If given, this will take precedence over the 'measurement' setting. In case a query result +# ## does not contain the specified column, we fall-back to the 'measurement' setting. +# # measurement_column = "" +# +# ## Column name containing the time of the measurement +# ## If ommited, the time of the query will be used. +# # time_column = "" +# +# ## Format of the time contained in 'time_col' +# ## The time must be 'unix', 'unix_ms', 'unix_us', 'unix_ns', or a golang time format. +# ## See https://golang.org/pkg/time/#Time.Format for details. +# # time_format = "unix" +# +# ## Column names containing tags +# ## An empty include list will reject all columns and an empty exclude list will not exclude any column. +# ## I.e. by default no columns will be returned as tag and the tags are empty. +# # tag_columns_include = [] +# # tag_columns_exclude = [] +# +# ## Column names containing fields (explicit types) +# ## Convert the given columns to the corresponding type. Explicit type conversions take precedence over +# ## the automatic (driver-based) conversion below. +# ## NOTE: Columns should not be specified for multiple types or the resulting type is undefined. +# # field_columns_float = [] +# # field_columns_int = [] +# # field_columns_uint = [] +# # field_columns_bool = [] +# # field_columns_string = [] +# +# ## Column names containing fields (automatic types) +# ## An empty include list is equivalent to '[*]' and all returned columns will be accepted. An empty +# ## exclude list will not exclude any column. I.e. by default all columns will be returned as fields. +# ## NOTE: We rely on the database driver to perform automatic datatype conversion. +# # field_columns_include = [] +# # field_columns_exclude = [] + + # # Read metrics from Microsoft SQL Server # [[inputs.sqlserver]] # ## Specify instances to monitor with a list of connection strings. diff --git a/go.mod b/go.mod index 6d11a213eac73..0d21185c3d966 100644 --- a/go.mod +++ b/go.mod @@ -38,15 +38,16 @@ require ( github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 github.com/caio/go-tdigest v3.1.0+incompatible github.com/cisco-ie/nx-telemetry-proto v0.0.0-20190531143454-82441e232cf6 - github.com/couchbase/go-couchbase v0.0.0-20180501122049-16db1f1fe037 - github.com/couchbase/gomemcached v0.0.0-20180502221210-0da75df14530 // indirect - github.com/couchbase/goutils v0.0.0-20180530154633-e865a1461c8a // indirect + github.com/couchbase/go-couchbase v0.1.0 + github.com/couchbase/gomemcached v0.1.3 // indirect + github.com/couchbase/goutils v0.1.0 // indirect github.com/denisenkom/go-mssqldb v0.10.0 github.com/dgrijalva/jwt-go/v4 v4.0.0-preview1 github.com/dimchansky/utfbom v1.1.1 github.com/docker/docker v20.10.6+incompatible github.com/dynatrace-oss/dynatrace-metric-utils-go v0.1.0 github.com/eclipse/paho.mqtt.golang v1.3.0 + github.com/form3tech-oss/jwt-go v3.2.3+incompatible // indirect github.com/ghodss/yaml v1.0.1-0.20190212211648-25d852aebe32 github.com/go-logfmt/logfmt v0.5.0 github.com/go-ping/ping v0.0.0-20210201095549-52eed920f98c @@ -93,6 +94,7 @@ require ( github.com/moby/ipvs v1.0.1 github.com/multiplay/go-ts3 v1.0.0 github.com/naoina/go-stringutil v0.1.0 // indirect + github.com/nats-io/jwt v1.2.2 // indirect github.com/nats-io/nats-server/v2 v2.1.4 github.com/nats-io/nats.go v1.10.0 github.com/newrelic/newrelic-telemetry-sdk-go v0.5.1 @@ -110,7 +112,7 @@ require ( github.com/sensu/sensu-go/api/core/v2 v2.6.0 github.com/shirou/gopsutil v3.21.3+incompatible github.com/shopspring/decimal v0.0.0-20200105231215-408a2507e114 // indirect - github.com/signalfx/golib/v3 v3.3.0 + github.com/signalfx/golib/v3 v3.3.34 github.com/sirupsen/logrus v1.7.0 github.com/sleepinggenius2/gosmi v0.4.3 github.com/snowflakedb/gosnowflake v1.5.0 @@ -118,7 +120,7 @@ require ( github.com/stretchr/testify v1.7.0 github.com/tbrandon/mbserver v0.0.0-20170611213546-993e1772cc62 github.com/testcontainers/testcontainers-go v0.11.0 - github.com/tidwall/gjson v1.6.0 + github.com/tidwall/gjson v1.8.0 github.com/tinylib/msgp v1.1.5 github.com/tklauser/go-sysconf v0.3.5 // indirect github.com/vapourismo/knx-go v0.0.0-20201122213738-75fe09ace330 @@ -134,7 +136,7 @@ require ( golang.org/x/net v0.0.0-20210226172049-e18ecbb05110 golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d golang.org/x/sync v0.0.0-20201207232520-09787c993a3a - golang.org/x/sys v0.0.0-20210324051608-47abb6519492 + golang.org/x/sys v0.0.0-20210426230700-d19ff857e887 golang.org/x/text v0.3.4 golang.org/x/tools v0.1.0 golang.zx2c4.com/wireguard/wgctrl v0.0.0-20200205215550-e35592f146e4 @@ -150,7 +152,7 @@ require ( gopkg.in/yaml.v2 v2.4.0 gotest.tools v2.2.0+incompatible k8s.io/api v0.20.4 - k8s.io/apimachinery v0.20.4 + k8s.io/apimachinery v0.21.1 k8s.io/client-go v0.20.4 modernc.org/sqlite v1.10.8 ) diff --git a/go.sum b/go.sum index 98ee98e484110..cef47cfccb2c2 100644 --- a/go.sum +++ b/go.sum @@ -399,12 +399,12 @@ github.com/coreos/go-systemd/v22 v22.0.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+ github.com/coreos/go-systemd/v22 v22.1.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk= github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= -github.com/couchbase/go-couchbase v0.0.0-20180501122049-16db1f1fe037 h1:Dbz60fpCq04vRxVVVJLbQuL0G7pRt0Gyo2BkozFc4SQ= -github.com/couchbase/go-couchbase v0.0.0-20180501122049-16db1f1fe037/go.mod h1:TWI8EKQMs5u5jLKW/tsb9VwauIrMIxQG1r5fMsswK5U= -github.com/couchbase/gomemcached v0.0.0-20180502221210-0da75df14530 h1:F8nmbiuX+gCz9xvWMi6Ak8HQntB4ATFXP46gaxifbp4= -github.com/couchbase/gomemcached v0.0.0-20180502221210-0da75df14530/go.mod h1:srVSlQLB8iXBVXHgnqemxUXqN6FCvClgCMPCsjBDR7c= -github.com/couchbase/goutils v0.0.0-20180530154633-e865a1461c8a h1:Y5XsLCEhtEI8qbD9RP3Qlv5FXdTDHxZM9UPUnMRgBp8= -github.com/couchbase/goutils v0.0.0-20180530154633-e865a1461c8a/go.mod h1:BQwMFlJzDjFDG3DJUdU0KORxn88UlsOULuxLExMh3Hs= +github.com/couchbase/go-couchbase v0.1.0 h1:g4bCvDwRL+ZL6HLhYeRlXxEYP31Wpy0VFxnFw6efEp8= +github.com/couchbase/go-couchbase v0.1.0/go.mod h1:+/bddYDxXsf9qt0xpDUtRR47A2GjaXmGGAqQ/k3GJ8A= +github.com/couchbase/gomemcached v0.1.3 h1:HIc5qMYNbuhB7zNaiEtj61DCYkquAwrQlf64q7JzdEY= +github.com/couchbase/gomemcached v0.1.3/go.mod h1:mxliKQxOv84gQ0bJWbI+w9Wxdpt9HjDvgW9MjCym5Vo= +github.com/couchbase/goutils v0.1.0 h1:0WLlKJilu7IBm98T8nS9+J36lBFVLRUSIUtyD/uWpAE= +github.com/couchbase/goutils v0.1.0/go.mod h1:BQwMFlJzDjFDG3DJUdU0KORxn88UlsOULuxLExMh3Hs= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= @@ -506,8 +506,9 @@ github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5Kwzbycv github.com/fatih/color v1.9.0 h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s= github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= -github.com/form3tech-oss/jwt-go v3.2.2+incompatible h1:TcekIExNqud5crz4xD2pavyTgWiPvpYe4Xau31I0PRk= github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= +github.com/form3tech-oss/jwt-go v3.2.3+incompatible h1:7ZaBxOI7TMoYBfyA3cQHErNNyAWIKUMIwqxEtgHOs5c= +github.com/form3tech-oss/jwt-go v3.2.3+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4= @@ -540,8 +541,9 @@ github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V github.com/go-logfmt/logfmt v0.5.0 h1:TrB8swr/68K7m9CcGut2g3UOihhbcbiMAYiuTXdEih4= github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas= -github.com/go-logr/logr v0.2.0 h1:QvGt2nLcHH0WK9orKa+ppBPAxREcH364nPUedEpK0TY= github.com/go-logr/logr v0.2.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= +github.com/go-logr/logr v0.4.0 h1:K7/B1jt6fIBQVd4Owv2MqGQClcgf0R266+7C/QjRcLc= +github.com/go-logr/logr v0.4.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= github.com/go-ole/go-ole v1.2.4 h1:nNBDSCOigTSiarFpYE9J/KtEA1IOW4CNeqT9TQDqCxI= github.com/go-ole/go-ole v1.2.4/go.mod h1:XCwSNxSkXRo4vlyPy93sltvi/qJq0jqQhjqQNIwKuxM= github.com/go-openapi/analysis v0.0.0-20180825180245-b006789cd277/go.mod h1:k70tL6pCuVxPJOHXQ+wIac1FUrvNkHolPie/cLEU6hI= @@ -1088,6 +1090,7 @@ github.com/moby/ipvs v1.0.1 h1:aoZ7fhLTXgDbzVrAnvV+XbKOU8kOET7B3+xULDF/1o0= github.com/moby/ipvs v1.0.1/go.mod h1:2pngiyseZbIKXNv7hsKj3O9UEz30c53MT9005gt2hxQ= github.com/moby/ipvs v1.0.1/go.mod h1:2pngiyseZbIKXNv7hsKj3O9UEz30c53MT9005gt2hxQ= github.com/moby/ipvs v1.0.1/go.mod h1:2pngiyseZbIKXNv7hsKj3O9UEz30c53MT9005gt2hxQ= +github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c= github.com/moby/sys/mount v0.2.0 h1:WhCW5B355jtxndN5ovugJlMFJawbUODuW8fSnEH6SSM= github.com/moby/sys/mount v0.2.0 h1:WhCW5B355jtxndN5ovugJlMFJawbUODuW8fSnEH6SSM= github.com/moby/sys/mount v0.2.0/go.mod h1:aAivFE2LB3W4bACsUXChRHQ0qKWsetY4Y9V7sxOougM= @@ -1124,8 +1127,9 @@ github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+ github.com/naoina/go-stringutil v0.1.0 h1:rCUeRUHjBjGTSHl0VC00jUPLz8/F9dDzYI70Hzifhks= github.com/naoina/go-stringutil v0.1.0/go.mod h1:XJ2SJL9jCtBh+P9q5btrd/Ylo8XwT/h1USek5+NqSA0= github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg= -github.com/nats-io/jwt v0.3.2 h1:+RB5hMpXUUA2dfxuhBTEkMOrYmM+gKIZYS1KjSostMI= github.com/nats-io/jwt v0.3.2/go.mod h1:/euKqTS1ZD+zzjYrY7pseZrTtWQSjujC7xjPc8wL6eU= +github.com/nats-io/jwt v1.2.2 h1:w3GMTO969dFg+UOKTmmyuu7IGdusK+7Ytlt//OYH/uU= +github.com/nats-io/jwt v1.2.2/go.mod h1:/xX356yQA6LuXI9xWW7mZNpxgF2mBmGecH+Fj34sP5Q= github.com/nats-io/nats-server/v2 v2.1.2/go.mod h1:Afk+wRZqkMQs/p45uXdrVLuab3gwv3Z8C4HTBu8GD/k= github.com/nats-io/nats-server/v2 v2.1.4 h1:BILRnsJ2Yb/fefiFbBWADpViGF69uh4sxe8poVDQ06g= github.com/nats-io/nats-server/v2 v2.1.4/go.mod h1:Jw1Z28soD/QasIA2uWjXyM9El1jly3YwyFOuR8tH1rg= @@ -1134,8 +1138,9 @@ github.com/nats-io/nats.go v1.10.0 h1:L8qnKaofSfNFbXg0C5F71LdjPRnmQwSsA4ukmkt1Tv github.com/nats-io/nats.go v1.10.0/go.mod h1:AjGArbfyR50+afOUotNX2Xs5SYHf+CoOa5HH1eEl2HE= github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= github.com/nats-io/nkeys v0.1.3/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= -github.com/nats-io/nkeys v0.1.4 h1:aEsHIssIk6ETN5m2/MD8Y4B2X7FfXrBAUdkyRvbVYzA= github.com/nats-io/nkeys v0.1.4/go.mod h1:XdZpAbhgyyODYqjTawOnIOI7VlbKSarI9Gfy1tqEu/s= +github.com/nats-io/nkeys v0.2.0 h1:WXKF7diOaPU9cJdLD7nuzwasQy9vT1tBqzXZZf3AMJM= +github.com/nats-io/nkeys v0.2.0/go.mod h1:XdZpAbhgyyODYqjTawOnIOI7VlbKSarI9Gfy1tqEu/s= github.com/nats-io/nuid v1.0.1 h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw= github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= github.com/ncw/swift v1.0.47/go.mod h1:23YIA4yWVnGwv2dQlN4bB7egfYX6YLn0Yo/S6zZO/ZM= @@ -1348,12 +1353,12 @@ github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749/go.mod h1:ZY1cvUeJ github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw= github.com/shurcooL/vfsgen v0.0.0-20200627165143-92b8a710ab6c/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw= -github.com/signalfx/com_signalfx_metrics_protobuf v0.0.0-20190222193949-1fb69526e884 h1:KgLGEw137KEUtQnWBGzneCetphBj4+kKHRnhpAkXJC0= -github.com/signalfx/com_signalfx_metrics_protobuf v0.0.0-20190222193949-1fb69526e884/go.mod h1:muYA2clvwCdj7nzAJ5vJIXYpJsUumhAl4Uu1wUNpWzA= +github.com/signalfx/com_signalfx_metrics_protobuf v0.0.2 h1:X886QgwZH5qr9HIQkk3mWcNEhUxx6D8rUZumzLV4Wiw= +github.com/signalfx/com_signalfx_metrics_protobuf v0.0.2/go.mod h1:tCQQqyJAVF1+mxNdqOi18sS/zaSrE6EMyWwRA2QTl70= github.com/signalfx/gohistogram v0.0.0-20160107210732-1ccfd2ff5083 h1:WsShHmu12ZztYPfh9b+I+VjYD1o8iOHhB67WZCMEEE8= github.com/signalfx/gohistogram v0.0.0-20160107210732-1ccfd2ff5083/go.mod h1:adPDS6s7WaajdFBV9mQ7i0dKfQ8xiDnF9ZNETVPpp7c= -github.com/signalfx/golib/v3 v3.3.0 h1:vSXsAb73bdrlnjk5rnZ7y3t09Qzu9qfBEbXdcyBHsmE= -github.com/signalfx/golib/v3 v3.3.0/go.mod h1:GzjWpV0skAXZn7+u9LnkOkiXAx9KKd5XZcd5r+RoF5o= +github.com/signalfx/golib/v3 v3.3.34 h1:s78S24+exS0jH21oeSB1qPeiekIKkeXGv0hg7f67HvU= +github.com/signalfx/golib/v3 v3.3.34/go.mod h1:PB7OovVijH7OGhzMewarEcIZG3eG6akWMDucIb5Jnb4= github.com/signalfx/gomemcache v0.0.0-20180823214636-4f7ef64c72a9/go.mod h1:Ytb8KfCSyuwy/VILnROdgCvbQLA5ch0nkbG7lKT0BXw= github.com/signalfx/sapm-proto v0.4.0 h1:5lQX++6FeIjUZEIcnSgBqhOpmSjMkRBW3y/4ZiKMo5E= github.com/signalfx/sapm-proto v0.4.0/go.mod h1:x3gtwJ1GRejtkghB4nYpwixh2zqJrLbPU959ZNhM0Fk= @@ -1431,12 +1436,13 @@ github.com/tchap/go-patricia v2.2.6+incompatible/go.mod h1:bmLyhP68RS6kStMGxByiQ github.com/tedsuo/ifrit v0.0.0-20180802180643-bea94bb476cc/go.mod h1:eyZnKCc955uh98WQvzOm0dgAeLnf2O0Rz0LPoC5ze+0= github.com/testcontainers/testcontainers-go v0.11.0 h1:HO5YOx2DYBHqcg4MzVWPj3FuHAv7USWVu94vCSsgiaM= github.com/testcontainers/testcontainers-go v0.11.0/go.mod h1:HztBCODzuA+YpMXGK8amjO8j50jz2gcT0BOzSKUiYIs= -github.com/tidwall/gjson v1.6.0 h1:9VEQWz6LLMUsUl6PueE49ir4Ka6CzLymOAZDxpFsTDc= -github.com/tidwall/gjson v1.6.0/go.mod h1:P256ACg0Mn+j1RXIDXoss50DeIABTYK1PULOJHhxOls= -github.com/tidwall/match v1.0.1 h1:PnKP62LPNxHKTwvHHZZzdOAOCtsJTjo6dZLCwpKm5xc= -github.com/tidwall/match v1.0.1/go.mod h1:LujAq0jyVjBy028G1WhWfIzbpQfMO8bBZ6Tyb0+pL9E= -github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4= +github.com/tidwall/gjson v1.8.0 h1:Qt+orfosKn0rbNTZqHYDqBrmm3UDA4KRkv70fDzG+PQ= +github.com/tidwall/gjson v1.8.0/go.mod h1:5/xDoumyyDNerp2U36lyolv46b3uF/9Bu6OfyQ9GImk= +github.com/tidwall/match v1.0.3 h1:FQUVvBImDutD8wJLN6c5eMzWtjgONK9MwIBCOrUJKeE= +github.com/tidwall/match v1.0.3/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/tidwall/pretty v1.1.0 h1:K3hMW5epkdAVwibsQEfR/7Zj0Qgt4DxtNumTq/VloO8= +github.com/tidwall/pretty v1.1.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/tinylib/msgp v1.0.2/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE= github.com/tinylib/msgp v1.1.5 h1:2gXmtWueD2HefZHQe1QOy9HVzmFrLOVvsXwXBQ0ayy0= github.com/tinylib/msgp v1.1.5/go.mod h1:eQsjooMTnV42mHu917E26IogZ2930nFyBQdofk10Udg= @@ -1503,6 +1509,7 @@ github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50/go.mod h1:NUSPS github.com/yvasiyarov/newrelic_platform_go v0.0.0-20140908184405-b21fdbd4370f/go.mod h1:GlGEuHIJweS1mbCqG+7vt2nvWLzLLnRHbXz5JKd/Qbg= github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= +go.etcd.io/bbolt v1.3.4/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg= go.etcd.io/etcd v0.5.0-alpha.5.0.20200910180754-dd1b699fc489/go.mod h1:yVHk9ub3CSBatqGNg7GRmsnfLWtoW60w4eDYfh7vHDg= @@ -1657,6 +1664,7 @@ golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwY golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210224082022-3d97a244fca7/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110 h1:qWPm9rbaAMKs8Bq/9LRpbMqxWRVUAQwMI9fVrssnTfw= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -1774,10 +1782,12 @@ golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210316164454-77fc1eacc6aa/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210324051608-47abb6519492 h1:Paq34FxTluEPvVyayQqMPgHm+vTOrIifmcYxFBx9TLg= golang.org/x/sys v0.0.0-20210324051608-47abb6519492/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210426230700-d19ff857e887 h1:dXfMednGJh/SUUFjTLsWJz3P+TQt9qnR11GgeI3vWKs= +golang.org/x/sys v0.0.0-20210426230700-d19ff857e887/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= @@ -2060,8 +2070,9 @@ k8s.io/api v0.20.4 h1:xZjKidCirayzX6tHONRQyTNDVIR55TYVqgATqo6ZULY= k8s.io/api v0.20.4/go.mod h1:++lNL1AJMkDymriNniQsWRkMDzRaX2Y/POTUi8yvqYQ= k8s.io/apimachinery v0.18.8/go.mod h1:6sQd+iHEqmOtALqOFjSWp2KZ9F0wlU/nWm0ZgsYWMig= k8s.io/apimachinery v0.20.1/go.mod h1:WlLqWAHZGg07AeltaI0MV5uk1Omp8xaN0JGLY6gkRpU= -k8s.io/apimachinery v0.20.4 h1:vhxQ0PPUUU2Ns1b9r4/UFp13UPs8cw2iOoTjnY9faa0= k8s.io/apimachinery v0.20.4/go.mod h1:WlLqWAHZGg07AeltaI0MV5uk1Omp8xaN0JGLY6gkRpU= +k8s.io/apimachinery v0.21.1 h1:Q6XuHGlj2xc+hlMCvqyYfbv3H7SRGn2c8NycxJquDVs= +k8s.io/apimachinery v0.21.1/go.mod h1:jbreFvJo3ov9rj7eWT7+sYiRx+qZuCYXwWT1bcDswPY= k8s.io/apiserver v0.20.1/go.mod h1:ro5QHeQkgMS7ZGpvf4tSMx6bBOgPfE+f52KwvXfScaU= k8s.io/client-go v0.18.8/go.mod h1:HqFqMllQ5NnQJNwjro9k5zMyfhZlOwpuTLVrxjkYSxU= k8s.io/client-go v0.20.1/go.mod h1:/zcHdt1TeWSd5HoUe6elJmHSQ6uLLgp4bIJHVEuy+/Y= @@ -2077,10 +2088,12 @@ k8s.io/klog v0.3.0/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= k8s.io/klog v1.0.0 h1:Pt+yjF5aB1xDSVbau4VsWe+dQNzA0qv1LlXdC2dF6Q8= k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I= k8s.io/klog/v2 v2.0.0/go.mod h1:PBfzABfn139FHAV07az/IF9Wp1bkk3vpT2XSJ76fSDE= -k8s.io/klog/v2 v2.4.0 h1:7+X0fUguPyrKEC4WjH8iGDg3laWgMo5tMnRTIGTTxGQ= k8s.io/klog/v2 v2.4.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y= +k8s.io/klog/v2 v2.8.0 h1:Q3gmuM9hKEjefWFFYF0Mat+YyFJvsUyYuwyNNJ5C9Ts= +k8s.io/klog/v2 v2.8.0/go.mod h1:hy9LJ/NvuK+iVyP4Ehqva4HxZG/oXyIS3n3Jmire4Ec= k8s.io/kube-openapi v0.0.0-20200410145947-61e04a5be9a6/go.mod h1:GRQhZsXIAJ1xR0C9bd8UpWHZ5plfAS9fzPjJuQ6JL3E= k8s.io/kube-openapi v0.0.0-20201113171705-d219536bb9fd/go.mod h1:WOJ3KddDSol4tAGcJo0Tvi+dK12EcqSLqcWsryKMpfM= +k8s.io/kube-openapi v0.0.0-20210305001622-591a79e4bda7/go.mod h1:wXW5VT87nVfh/iLV8FpR2uDvrFyomxbtb1KivDbvPTE= k8s.io/kubernetes v1.13.0/go.mod h1:ocZa8+6APFNC2tX1DZASIbocyYT5jHzqFVsY5aoB7Jk= k8s.io/utils v0.0.0-20200324210504-a9aa75ae1b89/go.mod h1:sZAwmy6armz5eXlNoLmJcl4F1QuKu7sr+mFQ0byX7Ew= k8s.io/utils v0.0.0-20200414100711-2df71ebbae66/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= @@ -2122,8 +2135,9 @@ rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.14/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg= sigs.k8s.io/structured-merge-diff/v3 v3.0.0-20200116222232-67a7b8c61874/go.mod h1:PlARxl6Hbt/+BC80dRLi1qAmnMqwqDg62YvvVkZjemw= sigs.k8s.io/structured-merge-diff/v3 v3.0.0/go.mod h1:PlARxl6Hbt/+BC80dRLi1qAmnMqwqDg62YvvVkZjemw= -sigs.k8s.io/structured-merge-diff/v4 v4.0.2 h1:YHQV7Dajm86OuqnIR6zAelnDWBRjo+YhYV9PmGrh1s8= sigs.k8s.io/structured-merge-diff/v4 v4.0.2/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= +sigs.k8s.io/structured-merge-diff/v4 v4.1.0 h1:C4r9BgJ98vrKnnVCjwCSXcWjWe0NKcUQkmzDXZXGwH8= +sigs.k8s.io/structured-merge-diff/v4 v4.1.0/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= sigs.k8s.io/yaml v1.2.0 h1:kr/MCeFWJWTwyaHoR9c8EjH9OumOmoF9YGiZd7lFm/Q= sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc= diff --git a/plugins/inputs/aliyuncms/README.md b/plugins/inputs/aliyuncms/README.md index c239baa63e05c..4e351ea6d8b37 100644 --- a/plugins/inputs/aliyuncms/README.md +++ b/plugins/inputs/aliyuncms/README.md @@ -1,4 +1,4 @@ -# Alibaba (aka Aliyun) CloudMonitor Service Statistics Input +# Alibaba (Aliyun) CloudMonitor Service Statistics Input Plugin Here and after we use `Aliyun` instead `Alibaba` as it is default naming across web console and docs. This plugin will pull Metric Statistics from Aliyun CMS. diff --git a/plugins/inputs/all/all.go b/plugins/inputs/all/all.go index 7c4e0bcf45c76..95cfcf6626444 100644 --- a/plugins/inputs/all/all.go +++ b/plugins/inputs/all/all.go @@ -44,6 +44,7 @@ import ( _ "github.com/influxdata/telegraf/plugins/inputs/dpdk" _ "github.com/influxdata/telegraf/plugins/inputs/ecs" _ "github.com/influxdata/telegraf/plugins/inputs/elasticsearch" + _ "github.com/influxdata/telegraf/plugins/inputs/elasticsearch_query" _ "github.com/influxdata/telegraf/plugins/inputs/ethtool" _ "github.com/influxdata/telegraf/plugins/inputs/eventhub_consumer" _ "github.com/influxdata/telegraf/plugins/inputs/exec" diff --git a/plugins/inputs/ceph/README.md b/plugins/inputs/ceph/README.md index dc58adb0ffe6b..5d5afadc19fad 100644 --- a/plugins/inputs/ceph/README.md +++ b/plugins/inputs/ceph/README.md @@ -2,7 +2,7 @@ Collects performance metrics from the MON and OSD nodes in a Ceph storage cluster. -Ceph has introduced a Telegraf and Influx plugin in the 13.x Mimic release. The Telegraf module sends to a Telegraf configured with a socket_listener. [Learn more in their docs](http://docs.ceph.com/docs/mimic/mgr/telegraf/) +Ceph has introduced a Telegraf and Influx plugin in the 13.x Mimic release. The Telegraf module sends to a Telegraf configured with a socket_listener. [Learn more in their docs](https://docs.ceph.com/en/latest/mgr/telegraf/) *Admin Socket Stats* diff --git a/plugins/inputs/cisco_telemetry_mdt/README.md b/plugins/inputs/cisco_telemetry_mdt/README.md index d15f122081d05..f4ca7243b8cde 100644 --- a/plugins/inputs/cisco_telemetry_mdt/README.md +++ b/plugins/inputs/cisco_telemetry_mdt/README.md @@ -1,4 +1,4 @@ -# Cisco model-driven telemetry (MDT) +# Cisco Model-Driven Telemetry (MDT) Input Plugin Cisco model-driven telemetry (MDT) is an input plugin that consumes telemetry data from Cisco IOS XR, IOS XE and NX-OS platforms. It supports TCP & GRPC dialout transports. diff --git a/plugins/inputs/dcos/client.go b/plugins/inputs/dcos/client.go index 534c2fcb1eab7..fcb976e311ccf 100644 --- a/plugins/inputs/dcos/client.go +++ b/plugins/inputs/dcos/client.go @@ -156,7 +156,7 @@ func (c *ClusterClient) Login(ctx context.Context, sa *ServiceAccount) (*AuthTok return nil, err } - loc := c.url("/acs/api/v1/auth/login") + loc := c.toURL("/acs/api/v1/auth/login") req, err := http.NewRequest("POST", loc, bytes.NewBuffer(octets)) if err != nil { return nil, err @@ -208,7 +208,7 @@ func (c *ClusterClient) Login(ctx context.Context, sa *ServiceAccount) (*AuthTok func (c *ClusterClient) GetSummary(ctx context.Context) (*Summary, error) { summary := &Summary{} - err := c.doGet(ctx, c.url("/mesos/master/state-summary"), summary) + err := c.doGet(ctx, c.toURL("/mesos/master/state-summary"), summary) if err != nil { return nil, err } @@ -220,7 +220,7 @@ func (c *ClusterClient) GetContainers(ctx context.Context, node string) ([]Conta list := []string{} path := fmt.Sprintf("/system/v1/agent/%s/metrics/v0/containers", node) - err := c.doGet(ctx, c.url(path), &list) + err := c.doGet(ctx, c.toURL(path), &list) if err != nil { return nil, err } @@ -233,10 +233,10 @@ func (c *ClusterClient) GetContainers(ctx context.Context, node string) ([]Conta return containers, nil } -func (c *ClusterClient) getMetrics(ctx context.Context, url string) (*Metrics, error) { +func (c *ClusterClient) getMetrics(ctx context.Context, address string) (*Metrics, error) { metrics := &Metrics{} - err := c.doGet(ctx, url, metrics) + err := c.doGet(ctx, address, metrics) if err != nil { return nil, err } @@ -246,21 +246,21 @@ func (c *ClusterClient) getMetrics(ctx context.Context, url string) (*Metrics, e func (c *ClusterClient) GetNodeMetrics(ctx context.Context, node string) (*Metrics, error) { path := fmt.Sprintf("/system/v1/agent/%s/metrics/v0/node", node) - return c.getMetrics(ctx, c.url(path)) + return c.getMetrics(ctx, c.toURL(path)) } func (c *ClusterClient) GetContainerMetrics(ctx context.Context, node, container string) (*Metrics, error) { path := fmt.Sprintf("/system/v1/agent/%s/metrics/v0/containers/%s", node, container) - return c.getMetrics(ctx, c.url(path)) + return c.getMetrics(ctx, c.toURL(path)) } func (c *ClusterClient) GetAppMetrics(ctx context.Context, node, container string) (*Metrics, error) { path := fmt.Sprintf("/system/v1/agent/%s/metrics/v0/containers/%s/app", node, container) - return c.getMetrics(ctx, c.url(path)) + return c.getMetrics(ctx, c.toURL(path)) } -func createGetRequest(url string, token string) (*http.Request, error) { - req, err := http.NewRequest("GET", url, nil) +func createGetRequest(address string, token string) (*http.Request, error) { + req, err := http.NewRequest("GET", address, nil) if err != nil { return nil, err } @@ -273,8 +273,8 @@ func createGetRequest(url string, token string) (*http.Request, error) { return req, nil } -func (c *ClusterClient) doGet(ctx context.Context, url string, v interface{}) error { - req, err := createGetRequest(url, c.token) +func (c *ClusterClient) doGet(ctx context.Context, address string, v interface{}) error { + req, err := createGetRequest(address, c.token) if err != nil { return err } @@ -304,7 +304,7 @@ func (c *ClusterClient) doGet(ctx context.Context, url string, v interface{}) er if resp.StatusCode < 200 || resp.StatusCode >= 300 { return &APIError{ - URL: url, + URL: address, StatusCode: resp.StatusCode, Title: resp.Status, } @@ -318,10 +318,10 @@ func (c *ClusterClient) doGet(ctx context.Context, url string, v interface{}) er return err } -func (c *ClusterClient) url(path string) string { - url := *c.clusterURL - url.Path = path - return url.String() +func (c *ClusterClient) toURL(path string) string { + clusterURL := *c.clusterURL + clusterURL.Path = path + return clusterURL.String() } func (c *ClusterClient) createLoginToken(sa *ServiceAccount) (string, error) { diff --git a/plugins/inputs/dcos/dcos.go b/plugins/inputs/dcos/dcos.go index 25e4e4755cc30..8fcb321ff36cf 100644 --- a/plugins/inputs/dcos/dcos.go +++ b/plugins/inputs/dcos/dcos.go @@ -10,6 +10,7 @@ import ( "time" jwt "github.com/dgrijalva/jwt-go/v4" + "github.com/influxdata/telegraf" "github.com/influxdata/telegraf/config" "github.com/influxdata/telegraf/filter" @@ -352,13 +353,13 @@ func (d *DCOS) createClient() (Client, error) { return nil, err } - url, err := url.Parse(d.ClusterURL) + address, err := url.Parse(d.ClusterURL) if err != nil { return nil, err } client := NewClusterClient( - url, + address, time.Duration(d.ResponseTimeout), d.MaxConnections, tlsCfg, diff --git a/plugins/inputs/directory_monitor/directory_monitor.go b/plugins/inputs/directory_monitor/directory_monitor.go index d8ed8acf04764..45acd1c062ba9 100644 --- a/plugins/inputs/directory_monitor/directory_monitor.go +++ b/plugins/inputs/directory_monitor/directory_monitor.go @@ -2,27 +2,27 @@ package directory_monitor import ( "bufio" + "compress/gzip" "context" "errors" "fmt" + "io" + "io/ioutil" + "os" + "path/filepath" "regexp" + "sync" "time" + "golang.org/x/sync/semaphore" + "gopkg.in/djherbis/times.v1" + "github.com/influxdata/telegraf" "github.com/influxdata/telegraf/config" "github.com/influxdata/telegraf/plugins/inputs" "github.com/influxdata/telegraf/plugins/parsers" "github.com/influxdata/telegraf/plugins/parsers/csv" "github.com/influxdata/telegraf/selfstat" - "golang.org/x/sync/semaphore" - "gopkg.in/djherbis/times.v1" - - "compress/gzip" - "io" - "io/ioutil" - "os" - "path/filepath" - "sync" ) const sampleConfig = ` @@ -263,9 +263,7 @@ func (monitor *DirectoryMonitor) parseFile(parser parsers.Parser, reader io.Read if err != nil { return err } - if firstLine { - firstLine = false - } + firstLine = false if err := monitor.sendMetrics(metrics); err != nil { return err diff --git a/plugins/inputs/directory_monitor/directory_monitor_test.go b/plugins/inputs/directory_monitor/directory_monitor_test.go index 3cad4ee6857b9..2ad504637c6c2 100644 --- a/plugins/inputs/directory_monitor/directory_monitor_test.go +++ b/plugins/inputs/directory_monitor/directory_monitor_test.go @@ -8,9 +8,10 @@ import ( "path/filepath" "testing" + "github.com/stretchr/testify/require" + "github.com/influxdata/telegraf/plugins/parsers" "github.com/influxdata/telegraf/testutil" - "github.com/stretchr/testify/require" ) func TestCSVGZImport(t *testing.T) { @@ -77,8 +78,9 @@ func TestCSVGZImport(t *testing.T) { // File should have gone back to the test directory, as we configured. _, err = os.Stat(filepath.Join(finishedDirectory, testCsvFile)) - _, err = os.Stat(filepath.Join(finishedDirectory, testCsvGzFile)) + require.NoError(t, err) + _, err = os.Stat(filepath.Join(finishedDirectory, testCsvGzFile)) require.NoError(t, err) } diff --git a/plugins/inputs/disk/disk.go b/plugins/inputs/disk/disk.go index 0ceea27167389..0a0fbf6f728a3 100644 --- a/plugins/inputs/disk/disk.go +++ b/plugins/inputs/disk/disk.go @@ -13,7 +13,7 @@ type DiskStats struct { ps system.PS // Legacy support - Mountpoints []string `toml:"mountpoints"` + LegacyMountPoints []string `toml:"mountpoints"` MountPoints []string `toml:"mount_points"` IgnoreFS []string `toml:"ignore_fs"` @@ -38,8 +38,8 @@ func (ds *DiskStats) SampleConfig() string { func (ds *DiskStats) Gather(acc telegraf.Accumulator) error { // Legacy support: - if len(ds.Mountpoints) != 0 { - ds.MountPoints = ds.Mountpoints + if len(ds.LegacyMountPoints) != 0 { + ds.MountPoints = ds.LegacyMountPoints } disks, partitions, err := ds.ps.DiskUsage(ds.MountPoints, ds.IgnoreFS) diff --git a/plugins/inputs/disk/disk_test.go b/plugins/inputs/disk/disk_test.go index 13180fffb1c37..47a822b4410bf 100644 --- a/plugins/inputs/disk/disk_test.go +++ b/plugins/inputs/disk/disk_test.go @@ -5,12 +5,12 @@ import ( "os" "testing" - "github.com/influxdata/telegraf/plugins/inputs/system" - "github.com/influxdata/telegraf/testutil" - "github.com/shirou/gopsutil/disk" - "github.com/stretchr/testify/assert" + diskUtil "github.com/shirou/gopsutil/disk" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" + + "github.com/influxdata/telegraf/plugins/inputs/system" + "github.com/influxdata/telegraf/testutil" ) type MockFileInfo struct { @@ -25,7 +25,7 @@ func TestDiskUsage(t *testing.T) { var acc testutil.Accumulator var err error - psAll := []disk.PartitionStat{ + psAll := []diskUtil.PartitionStat{ { Device: "/dev/sda", Mountpoint: "/", @@ -39,7 +39,7 @@ func TestDiskUsage(t *testing.T) { Opts: "rw,noatime,nodiratime,errors=remount-ro", }, } - duAll := []disk.UsageStat{ + duAll := []diskUtil.UsageStat{ { Path: "/", Fstype: "ext4", @@ -72,7 +72,7 @@ func TestDiskUsage(t *testing.T) { numDiskMetrics := acc.NFields() expectedAllDiskMetrics := 14 - assert.Equal(t, expectedAllDiskMetrics, numDiskMetrics) + require.Equal(t, expectedAllDiskMetrics, numDiskMetrics) tags1 := map[string]string{ "path": string(os.PathSeparator), @@ -111,26 +111,28 @@ func TestDiskUsage(t *testing.T) { // We expect 6 more DiskMetrics to show up with an explicit match on "/" // and /home not matching the /dev in MountPoints err = (&DiskStats{ps: &mps, MountPoints: []string{"/", "/dev"}}).Gather(&acc) - assert.Equal(t, expectedAllDiskMetrics+7, acc.NFields()) + require.NoError(t, err) + require.Equal(t, expectedAllDiskMetrics+7, acc.NFields()) // We should see all the diskpoints as MountPoints includes both // / and /home err = (&DiskStats{ps: &mps, MountPoints: []string{"/", "/home"}}).Gather(&acc) - assert.Equal(t, 2*expectedAllDiskMetrics+7, acc.NFields()) + require.NoError(t, err) + require.Equal(t, 2*expectedAllDiskMetrics+7, acc.NFields()) } func TestDiskUsageHostMountPrefix(t *testing.T) { tests := []struct { name string - partitionStats []disk.PartitionStat - usageStats []*disk.UsageStat + partitionStats []diskUtil.PartitionStat + usageStats []*diskUtil.UsageStat hostMountPrefix string expectedTags map[string]string expectedFields map[string]interface{} }{ { name: "no host mount prefix", - partitionStats: []disk.PartitionStat{ + partitionStats: []diskUtil.PartitionStat{ { Device: "/dev/sda", Mountpoint: "/", @@ -138,7 +140,7 @@ func TestDiskUsageHostMountPrefix(t *testing.T) { Opts: "ro", }, }, - usageStats: []*disk.UsageStat{ + usageStats: []*diskUtil.UsageStat{ { Path: "/", Total: 42, @@ -162,7 +164,7 @@ func TestDiskUsageHostMountPrefix(t *testing.T) { }, { name: "host mount prefix", - partitionStats: []disk.PartitionStat{ + partitionStats: []diskUtil.PartitionStat{ { Device: "/dev/sda", Mountpoint: "/hostfs/var", @@ -170,7 +172,7 @@ func TestDiskUsageHostMountPrefix(t *testing.T) { Opts: "ro", }, }, - usageStats: []*disk.UsageStat{ + usageStats: []*diskUtil.UsageStat{ { Path: "/hostfs/var", Total: 42, @@ -195,7 +197,7 @@ func TestDiskUsageHostMountPrefix(t *testing.T) { }, { name: "host mount prefix exact match", - partitionStats: []disk.PartitionStat{ + partitionStats: []diskUtil.PartitionStat{ { Device: "/dev/sda", Mountpoint: "/hostfs", @@ -203,7 +205,7 @@ func TestDiskUsageHostMountPrefix(t *testing.T) { Opts: "ro", }, }, - usageStats: []*disk.UsageStat{ + usageStats: []*diskUtil.UsageStat{ { Path: "/hostfs", Total: 42, @@ -259,7 +261,7 @@ func TestDiskStats(t *testing.T) { var acc testutil.Accumulator var err error - duAll := []*disk.UsageStat{ + duAll := []*diskUtil.UsageStat{ { Path: "/", Fstype: "ext4", @@ -281,7 +283,7 @@ func TestDiskStats(t *testing.T) { InodesUsed: 2000, }, } - duFiltered := []*disk.UsageStat{ + duFiltered := []*diskUtil.UsageStat{ { Path: "/", Fstype: "ext4", @@ -294,7 +296,7 @@ func TestDiskStats(t *testing.T) { }, } - psAll := []*disk.PartitionStat{ + psAll := []*diskUtil.PartitionStat{ { Device: "/dev/sda", Mountpoint: "/", @@ -309,7 +311,7 @@ func TestDiskStats(t *testing.T) { }, } - psFiltered := []*disk.PartitionStat{ + psFiltered := []*diskUtil.PartitionStat{ { Device: "/dev/sda", Mountpoint: "/", @@ -327,7 +329,7 @@ func TestDiskStats(t *testing.T) { numDiskMetrics := acc.NFields() expectedAllDiskMetrics := 14 - assert.Equal(t, expectedAllDiskMetrics, numDiskMetrics) + require.Equal(t, expectedAllDiskMetrics, numDiskMetrics) tags1 := map[string]string{ "path": "/", @@ -366,10 +368,12 @@ func TestDiskStats(t *testing.T) { // We expect 6 more DiskMetrics to show up with an explicit match on "/" // and /home not matching the /dev in MountPoints err = (&DiskStats{ps: &mps, MountPoints: []string{"/", "/dev"}}).Gather(&acc) - assert.Equal(t, expectedAllDiskMetrics+7, acc.NFields()) + require.NoError(t, err) + require.Equal(t, expectedAllDiskMetrics+7, acc.NFields()) // We should see all the diskpoints as MountPoints includes both // / and /home err = (&DiskStats{ps: &mps, MountPoints: []string{"/", "/home"}}).Gather(&acc) - assert.Equal(t, 2*expectedAllDiskMetrics+7, acc.NFields()) + require.NoError(t, err) + require.Equal(t, 2*expectedAllDiskMetrics+7, acc.NFields()) } diff --git a/plugins/inputs/diskio/diskio.go b/plugins/inputs/diskio/diskio.go index c347e90a36526..9458b2af7a68f 100644 --- a/plugins/inputs/diskio/diskio.go +++ b/plugins/inputs/diskio/diskio.go @@ -74,11 +74,11 @@ func hasMeta(s string) bool { func (d *DiskIO) init() error { for _, device := range d.Devices { if hasMeta(device) { - filter, err := filter.Compile(d.Devices) + deviceFilter, err := filter.Compile(d.Devices) if err != nil { return fmt.Errorf("error compiling device pattern: %s", err.Error()) } - d.deviceFilter = filter + d.deviceFilter = deviceFilter } } d.initialized = true diff --git a/plugins/inputs/diskio/diskio_linux_test.go b/plugins/inputs/diskio/diskio_linux_test.go index 222cb783f1870..ede35b5befead 100644 --- a/plugins/inputs/diskio/diskio_linux_test.go +++ b/plugins/inputs/diskio/diskio_linux_test.go @@ -92,12 +92,14 @@ func TestDiskIOStats_diskName(t *testing.T) { } for _, tc := range tests { - s := DiskIO{ - NameTemplates: tc.templates, - } - defer setupNullDisk(t, &s, "null")() - name, _ := s.diskName("null") - require.Equal(t, tc.expected, name, "Templates: %#v", tc.templates) + func() { + s := DiskIO{ + NameTemplates: tc.templates, + } + defer setupNullDisk(t, &s, "null")() //nolint:revive // done on purpose, cleaning will be executed properly + name, _ := s.diskName("null") + require.Equal(t, tc.expected, name, "Templates: %#v", tc.templates) + }() } } @@ -107,7 +109,7 @@ func TestDiskIOStats_diskTags(t *testing.T) { s := &DiskIO{ DeviceTags: []string{"MY_PARAM_2"}, } - defer setupNullDisk(t, s, "null")() + defer setupNullDisk(t, s, "null")() //nolint:revive // done on purpose, cleaning will be executed properly dt := s.diskTags("null") require.Equal(t, map[string]string{"MY_PARAM_2": "myval2"}, dt) } diff --git a/plugins/inputs/disque/disque.go b/plugins/inputs/disque/disque.go index 6c2606af4ad94..6fa63ec8bd874 100644 --- a/plugins/inputs/disque/disque.go +++ b/plugins/inputs/disque/disque.go @@ -65,10 +65,10 @@ var ErrProtocolError = errors.New("disque protocol error") // Returns one of the errors encountered while gather stats (if any). func (d *Disque) Gather(acc telegraf.Accumulator) error { if len(d.Servers) == 0 { - url := &url.URL{ + address := &url.URL{ Host: ":7711", } - return d.gatherServer(url, acc) + return d.gatherServer(address, acc) } var wg sync.WaitGroup diff --git a/plugins/inputs/dns_query/dns_query.go b/plugins/inputs/dns_query/dns_query.go index 4c721a0964776..a3b2f262ba7e0 100644 --- a/plugins/inputs/dns_query/dns_query.go +++ b/plugins/inputs/dns_query/dns_query.go @@ -16,9 +16,9 @@ import ( type ResultType uint64 const ( - Success ResultType = 0 - Timeout = 1 - Error = 2 + Success ResultType = iota + Timeout + Error ) type DNSQuery struct { diff --git a/plugins/inputs/docker/client.go b/plugins/inputs/docker/client.go index 14e4396980b9a..6abba44c549d6 100644 --- a/plugins/inputs/docker/client.go +++ b/plugins/inputs/docker/client.go @@ -7,7 +7,7 @@ import ( "github.com/docker/docker/api/types" "github.com/docker/docker/api/types/swarm" - docker "github.com/docker/docker/client" + dockerClient "github.com/docker/docker/client" ) var ( @@ -27,7 +27,7 @@ type Client interface { } func NewEnvClient() (Client, error) { - client, err := docker.NewClientWithOpts(docker.FromEnv) + client, err := dockerClient.NewClientWithOpts(dockerClient.FromEnv) if err != nil { return nil, err } @@ -40,11 +40,11 @@ func NewClient(host string, tlsConfig *tls.Config) (Client, error) { } httpClient := &http.Client{Transport: transport} - client, err := docker.NewClientWithOpts( - docker.WithHTTPHeaders(defaultHeaders), - docker.WithHTTPClient(httpClient), - docker.WithVersion(version), - docker.WithHost(host)) + client, err := dockerClient.NewClientWithOpts( + dockerClient.WithHTTPHeaders(defaultHeaders), + dockerClient.WithHTTPClient(httpClient), + dockerClient.WithVersion(version), + dockerClient.WithHost(host)) if err != nil { return nil, err } @@ -53,7 +53,7 @@ func NewClient(host string, tlsConfig *tls.Config) (Client, error) { } type SocketClient struct { - client *docker.Client + client *dockerClient.Client } func (c *SocketClient) Info(ctx context.Context) (types.Info, error) { diff --git a/plugins/inputs/docker/docker.go b/plugins/inputs/docker/docker.go index 47eab7ce2430e..4e6dc5ad4d221 100644 --- a/plugins/inputs/docker/docker.go +++ b/plugins/inputs/docker/docker.go @@ -15,11 +15,12 @@ import ( "github.com/docker/docker/api/types" "github.com/docker/docker/api/types/filters" "github.com/docker/docker/api/types/swarm" + "github.com/influxdata/telegraf" "github.com/influxdata/telegraf/config" "github.com/influxdata/telegraf/filter" "github.com/influxdata/telegraf/internal/choice" - "github.com/influxdata/telegraf/internal/docker" + dockerint "github.com/influxdata/telegraf/internal/docker" tlsint "github.com/influxdata/telegraf/plugins/common/tls" "github.com/influxdata/telegraf/plugins/inputs" ) @@ -513,7 +514,7 @@ func (d *Docker) gatherContainer( return nil } - imageName, imageVersion := docker.ParseImage(container.Image) + imageName, imageVersion := dockerint.ParseImage(container.Image) tags := map[string]string{ "engine_host": d.engineHost, @@ -628,18 +629,16 @@ func (d *Docker) gatherContainerInspect( } } - parseContainerStats(v, acc, tags, container.ID, d.PerDeviceInclude, d.TotalInclude, daemonOSType) + d.parseContainerStats(v, acc, tags, container.ID, daemonOSType) return nil } -func parseContainerStats( +func (d *Docker) parseContainerStats( stat *types.StatsJSON, acc telegraf.Accumulator, tags map[string]string, id string, - perDeviceInclude []string, - totalInclude []string, daemonOSType string, ) { tm := stat.Read @@ -708,7 +707,7 @@ func parseContainerStats( acc.AddFields("docker_container_mem", memfields, tags, tm) - if choice.Contains("cpu", totalInclude) { + if choice.Contains("cpu", d.TotalInclude) { cpufields := map[string]interface{}{ "usage_total": stat.CPUStats.CPUUsage.TotalUsage, "usage_in_usermode": stat.CPUStats.CPUUsage.UsageInUsermode, @@ -735,7 +734,7 @@ func parseContainerStats( acc.AddFields("docker_container_cpu", cpufields, cputags, tm) } - if choice.Contains("cpu", perDeviceInclude) && len(stat.CPUStats.CPUUsage.PercpuUsage) > 0 { + if choice.Contains("cpu", d.PerDeviceInclude) && len(stat.CPUStats.CPUUsage.PercpuUsage) > 0 { // If we have OnlineCPUs field, then use it to restrict stats gathering to only Online CPUs // (https://github.com/moby/moby/commit/115f91d7575d6de6c7781a96a082f144fd17e400) var percpuusage []uint64 @@ -770,12 +769,12 @@ func parseContainerStats( "container_id": id, } // Create a new network tag dictionary for the "network" tag - if choice.Contains("network", perDeviceInclude) { + if choice.Contains("network", d.PerDeviceInclude) { nettags := copyTags(tags) nettags["network"] = network acc.AddFields("docker_container_net", netfields, nettags, tm) } - if choice.Contains("network", totalInclude) { + if choice.Contains("network", d.TotalInclude) { for field, value := range netfields { if field == "container_id" { continue @@ -802,17 +801,14 @@ func parseContainerStats( } // totalNetworkStatMap could be empty if container is running with --net=host. - if choice.Contains("network", totalInclude) && len(totalNetworkStatMap) != 0 { + if choice.Contains("network", d.TotalInclude) && len(totalNetworkStatMap) != 0 { nettags := copyTags(tags) nettags["network"] = "total" totalNetworkStatMap["container_id"] = id acc.AddFields("docker_container_net", totalNetworkStatMap, nettags, tm) } - perDeviceBlkio := choice.Contains("blkio", perDeviceInclude) - totalBlkio := choice.Contains("blkio", totalInclude) - - gatherBlockIOMetrics(stat, acc, tags, tm, id, perDeviceBlkio, totalBlkio) + d.gatherBlockIOMetrics(acc, stat, tags, tm, id) } // Make a map of devices to their block io stats @@ -877,27 +873,27 @@ func getDeviceStatMap(blkioStats types.BlkioStats) map[string]map[string]interfa return deviceStatMap } -func gatherBlockIOMetrics( - stat *types.StatsJSON, +func (d *Docker) gatherBlockIOMetrics( acc telegraf.Accumulator, + stat *types.StatsJSON, tags map[string]string, tm time.Time, id string, - perDevice bool, - total bool, ) { + perDeviceBlkio := choice.Contains("blkio", d.PerDeviceInclude) + totalBlkio := choice.Contains("blkio", d.TotalInclude) blkioStats := stat.BlkioStats deviceStatMap := getDeviceStatMap(blkioStats) totalStatMap := make(map[string]interface{}) for device, fields := range deviceStatMap { fields["container_id"] = id - if perDevice { + if perDeviceBlkio { iotags := copyTags(tags) iotags["device"] = device acc.AddFields("docker_container_blkio", fields, iotags, tm) } - if total { + if totalBlkio { for field, value := range fields { if field == "container_id" { continue @@ -922,7 +918,7 @@ func gatherBlockIOMetrics( } } } - if total { + if totalBlkio { totalStatMap["container_id"] = id iotags := copyTags(tags) iotags["device"] = "total" @@ -965,20 +961,20 @@ func (d *Docker) createContainerFilters() error { d.ContainerInclude = append(d.ContainerInclude, d.ContainerNames...) } - filter, err := filter.NewIncludeExcludeFilter(d.ContainerInclude, d.ContainerExclude) + containerFilter, err := filter.NewIncludeExcludeFilter(d.ContainerInclude, d.ContainerExclude) if err != nil { return err } - d.containerFilter = filter + d.containerFilter = containerFilter return nil } func (d *Docker) createLabelFilters() error { - filter, err := filter.NewIncludeExcludeFilter(d.LabelInclude, d.LabelExclude) + labelFilter, err := filter.NewIncludeExcludeFilter(d.LabelInclude, d.LabelExclude) if err != nil { return err } - d.labelFilter = filter + d.labelFilter = labelFilter return nil } @@ -986,11 +982,11 @@ func (d *Docker) createContainerStateFilters() error { if len(d.ContainerStateInclude) == 0 && len(d.ContainerStateExclude) == 0 { d.ContainerStateInclude = []string{"running"} } - filter, err := filter.NewIncludeExcludeFilter(d.ContainerStateInclude, d.ContainerStateExclude) + stateFilter, err := filter.NewIncludeExcludeFilter(d.ContainerStateInclude, d.ContainerStateExclude) if err != nil { return err } - d.stateFilter = filter + d.stateFilter = stateFilter return nil } diff --git a/plugins/inputs/docker/docker_test.go b/plugins/inputs/docker/docker_test.go index f5a8ff7a89b83..599adae409e99 100644 --- a/plugins/inputs/docker/docker_test.go +++ b/plugins/inputs/docker/docker_test.go @@ -12,10 +12,11 @@ import ( "github.com/docker/docker/api/types" "github.com/docker/docker/api/types/swarm" + "github.com/stretchr/testify/require" + "github.com/influxdata/telegraf" "github.com/influxdata/telegraf/internal/choice" "github.com/influxdata/telegraf/testutil" - "github.com/stretchr/testify/require" ) type MockClient struct { @@ -120,7 +121,12 @@ func TestDockerGatherContainerStats(t *testing.T) { "container_image": "redis/image", } - parseContainerStats(stats, &acc, tags, "123456789", containerMetricClasses, containerMetricClasses, "linux") + d := &Docker{ + Log: testutil.Logger{}, + PerDeviceInclude: containerMetricClasses, + TotalInclude: containerMetricClasses, + } + d.parseContainerStats(stats, &acc, tags, "123456789", "linux") // test docker_container_net measurement netfields := map[string]interface{}{ @@ -1270,8 +1276,12 @@ func Test_parseContainerStatsPerDeviceAndTotal(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { var acc testutil.Accumulator - parseContainerStats(tt.args.stat, &acc, tt.args.tags, tt.args.id, tt.args.perDeviceInclude, - tt.args.totalInclude, tt.args.daemonOSType) + d := &Docker{ + Log: testutil.Logger{}, + PerDeviceInclude: tt.args.perDeviceInclude, + TotalInclude: tt.args.totalInclude, + } + d.parseContainerStats(tt.args.stat, &acc, tt.args.tags, tt.args.id, tt.args.daemonOSType) actual := FilterMetrics(acc.GetTelegrafMetrics(), func(m telegraf.Metric) bool { return choice.Contains(m.Name(), diff --git a/plugins/inputs/docker_log/docker_log.go b/plugins/inputs/docker_log/docker_log.go index f877961ba2676..622f9924e4236 100644 --- a/plugins/inputs/docker_log/docker_log.go +++ b/plugins/inputs/docker_log/docker_log.go @@ -15,6 +15,7 @@ import ( "github.com/docker/docker/api/types" "github.com/docker/docker/api/types/filters" "github.com/docker/docker/pkg/stdcopy" + "github.com/influxdata/telegraf" "github.com/influxdata/telegraf/config" "github.com/influxdata/telegraf/filter" @@ -307,8 +308,7 @@ func (d *DockerLogs) tailContainerLogs( func parseLine(line []byte) (time.Time, string, error) { parts := bytes.SplitN(line, []byte(" "), 2) - switch len(parts) { - case 1: + if len(parts) == 1 { parts = append(parts, []byte("")) } @@ -421,20 +421,20 @@ func (d *DockerLogs) Stop() { // Following few functions have been inherited from telegraf docker input plugin func (d *DockerLogs) createContainerFilters() error { - filter, err := filter.NewIncludeExcludeFilter(d.ContainerInclude, d.ContainerExclude) + containerFilter, err := filter.NewIncludeExcludeFilter(d.ContainerInclude, d.ContainerExclude) if err != nil { return err } - d.containerFilter = filter + d.containerFilter = containerFilter return nil } func (d *DockerLogs) createLabelFilters() error { - filter, err := filter.NewIncludeExcludeFilter(d.LabelInclude, d.LabelExclude) + labelFilter, err := filter.NewIncludeExcludeFilter(d.LabelInclude, d.LabelExclude) if err != nil { return err } - d.labelFilter = filter + d.labelFilter = labelFilter return nil } @@ -442,11 +442,11 @@ func (d *DockerLogs) createContainerStateFilters() error { if len(d.ContainerStateInclude) == 0 && len(d.ContainerStateExclude) == 0 { d.ContainerStateInclude = []string{"running"} } - filter, err := filter.NewIncludeExcludeFilter(d.ContainerStateInclude, d.ContainerStateExclude) + stateFilter, err := filter.NewIncludeExcludeFilter(d.ContainerStateInclude, d.ContainerStateExclude) if err != nil { return err } - d.stateFilter = filter + d.stateFilter = stateFilter return nil } diff --git a/plugins/inputs/dpdk/README.md b/plugins/inputs/dpdk/README.md index bd98af050427d..00398760d2e9d 100644 --- a/plugins/inputs/dpdk/README.md +++ b/plugins/inputs/dpdk/README.md @@ -1,4 +1,4 @@ -# DPDK Input Plugin +# Data Plane Development Kit (DPDK) Input Plugin The `dpdk` plugin collects metrics exposed by applications built with [Data Plane Development Kit](https://www.dpdk.org/) which is an extensive set of open source libraries designed for accelerating packet processing workloads. diff --git a/plugins/inputs/ecs/ecs.go b/plugins/inputs/ecs/ecs.go index d563fef5038d5..f044e8d2cb7fe 100644 --- a/plugins/inputs/ecs/ecs.go +++ b/plugins/inputs/ecs/ecs.go @@ -220,20 +220,20 @@ func mergeTags(a map[string]string, b map[string]string) map[string]string { } func (ecs *Ecs) createContainerNameFilters() error { - filter, err := filter.NewIncludeExcludeFilter(ecs.ContainerNameInclude, ecs.ContainerNameExclude) + containerNameFilter, err := filter.NewIncludeExcludeFilter(ecs.ContainerNameInclude, ecs.ContainerNameExclude) if err != nil { return err } - ecs.containerNameFilter = filter + ecs.containerNameFilter = containerNameFilter return nil } func (ecs *Ecs) createLabelFilters() error { - filter, err := filter.NewIncludeExcludeFilter(ecs.LabelInclude, ecs.LabelExclude) + labelFilter, err := filter.NewIncludeExcludeFilter(ecs.LabelInclude, ecs.LabelExclude) if err != nil { return err } - ecs.labelFilter = filter + ecs.labelFilter = labelFilter return nil } @@ -250,11 +250,11 @@ func (ecs *Ecs) createContainerStatusFilters() error { ecs.ContainerStatusExclude[i] = strings.ToUpper(exclude) } - filter, err := filter.NewIncludeExcludeFilter(ecs.ContainerStatusInclude, ecs.ContainerStatusExclude) + statusFilter, err := filter.NewIncludeExcludeFilter(ecs.ContainerStatusInclude, ecs.ContainerStatusExclude) if err != nil { return err } - ecs.statusFilter = filter + ecs.statusFilter = statusFilter return nil } diff --git a/plugins/inputs/elasticsearch/elasticsearch.go b/plugins/inputs/elasticsearch/elasticsearch.go index aac23d707edba..0bd4ce677cd9e 100644 --- a/plugins/inputs/elasticsearch/elasticsearch.go +++ b/plugins/inputs/elasticsearch/elasticsearch.go @@ -644,7 +644,8 @@ func (e *Elasticsearch) gatherSingleIndexStats(name string, index indexStat, now // determine shard tag and primary/replica designation shardType := "replica" - if flattened.Fields["routing_primary"] == true { + routingPrimary, _ := flattened.Fields["routing_primary"].(bool) + if routingPrimary { shardType = "primary" } delete(flattened.Fields, "routing_primary") diff --git a/plugins/inputs/elasticsearch/elasticsearch_test.go b/plugins/inputs/elasticsearch/elasticsearch_test.go index 1a24d3caaf66e..8248d063b6883 100644 --- a/plugins/inputs/elasticsearch/elasticsearch_test.go +++ b/plugins/inputs/elasticsearch/elasticsearch_test.go @@ -6,9 +6,9 @@ import ( "strings" "testing" - "github.com/influxdata/telegraf/testutil" - "github.com/stretchr/testify/require" + + "github.com/influxdata/telegraf/testutil" ) func defaultTags() map[string]string { @@ -206,8 +206,8 @@ func TestGatherClusterStatsMaster(t *testing.T) { info.masterID = masterID es.serverInfo["http://example.com:9200"] = info - IsMasterResultTokens := strings.Split(string(IsMasterResult), " ") - require.Equal(t, masterID, IsMasterResultTokens[0], "catmaster is incorrect") + isMasterResultTokens := strings.Split(IsMasterResult, " ") + require.Equal(t, masterID, isMasterResultTokens[0], "catmaster is incorrect") // now get node status, which determines whether we're master var acc testutil.Accumulator @@ -244,8 +244,8 @@ func TestGatherClusterStatsNonMaster(t *testing.T) { masterID, err := es.getCatMaster("junk") require.NoError(t, err) - IsNotMasterResultTokens := strings.Split(string(IsNotMasterResult), " ") - require.Equal(t, masterID, IsNotMasterResultTokens[0], "catmaster is incorrect") + isNotMasterResultTokens := strings.Split(IsNotMasterResult, " ") + require.Equal(t, masterID, isNotMasterResultTokens[0], "catmaster is incorrect") // now get node status, which determines whether we're master var acc testutil.Accumulator diff --git a/plugins/inputs/elasticsearch_query/README.md b/plugins/inputs/elasticsearch_query/README.md new file mode 100755 index 0000000000000..881cb6609b5b0 --- /dev/null +++ b/plugins/inputs/elasticsearch_query/README.md @@ -0,0 +1,158 @@ +# Elasticsearch query input plugin + +This [elasticsearch](https://www.elastic.co/) query plugin queries endpoints to obtain metrics from data stored in an Elasticsearch cluster. + +The following is supported: + +- return number of hits for a search query +- calculate the avg/max/min/sum for a numeric field, filtered by a query, aggregated per tag +- count number of terms for a particular field + +## Elasticsearch support + +This plugins is tested against Elasticsearch 5.x and 6.x releases. +Currently it is known to break on 7.x or greater versions. + +## Configuration + +```toml +[[inputs.elasticsearch_query]] + ## The full HTTP endpoint URL for your Elasticsearch instance + ## Multiple urls can be specified as part of the same cluster, + ## this means that only ONE of the urls will be written to each interval. + urls = [ "http://node1.es.example.com:9200" ] # required. + + ## Elasticsearch client timeout, defaults to "5s". + # timeout = "5s" + + ## Set to true to ask Elasticsearch a list of all cluster nodes, + ## thus it is not necessary to list all nodes in the urls config option + # enable_sniffer = false + + ## Set the interval to check if the Elasticsearch nodes are available + ## This option is only used if enable_sniffer is also set (0s to disable it) + # health_check_interval = "10s" + + ## HTTP basic authentication details (eg. when using x-pack) + # username = "telegraf" + # password = "mypassword" + + ## Optional TLS Config + # tls_ca = "/etc/telegraf/ca.pem" + # tls_cert = "/etc/telegraf/cert.pem" + # tls_key = "/etc/telegraf/key.pem" + ## Use TLS but skip chain & host verification + # insecure_skip_verify = false + + [[inputs.elasticsearch_query.aggregation]] + ## measurement name for the results of the aggregation query + measurement_name = "measurement" + + ## Elasticsearch indexes to query (accept wildcards). + index = "index-*" + + ## The date/time field in the Elasticsearch index (mandatory). + date_field = "@timestamp" + + ## Time window to query (eg. "1m" to query documents from last minute). + ## Normally should be set to same as collection interval + query_period = "1m" + + ## Lucene query to filter results + # filter_query = "*" + + ## Fields to aggregate values (must be numeric fields) + # metric_fields = ["metric"] + + ## Aggregation function to use on the metric fields + ## Must be set if 'metric_fields' is set + ## Valid values are: avg, sum, min, max, sum + # metric_function = "avg" + + ## Fields to be used as tags + ## Must be text, non-analyzed fields. Metric aggregations are performed per tag + # tags = ["field.keyword", "field2.keyword"] + + ## Set to true to not ignore documents when the tag(s) above are missing + # include_missing_tag = false + + ## String value of the tag when the tag does not exist + ## Used when include_missing_tag is true + # missing_tag_value = "null" +``` + +## Examples + +Please note that the `[[inputs.elasticsearch_query]]` is still required for all of the examples below. + +### Search the average response time, per URI and per response status code + +```toml +[[inputs.elasticsearch_query.aggregation]] + measurement_name = "http_logs" + index = "my-index-*" + filter_query = "*" + metric_fields = ["response_time"] + metric_function = "avg" + tags = ["URI.keyword", "response.keyword"] + include_missing_tag = true + missing_tag_value = "null" + date_field = "@timestamp" + query_period = "1m" +``` + +### Search the maximum response time per method and per URI + +```toml +[[inputs.elasticsearch_query.aggregation]] + measurement_name = "http_logs" + index = "my-index-*" + filter_query = "*" + metric_fields = ["response_time"] + metric_function = "max" + tags = ["method.keyword","URI.keyword"] + include_missing_tag = false + missing_tag_value = "null" + date_field = "@timestamp" + query_period = "1m" +``` + +### Search number of documents matching a filter query in all indices + +```toml +[[inputs.elasticsearch_query.aggregation]] + measurement_name = "http_logs" + index = "*" + filter_query = "product_1 AND HEAD" + query_period = "1m" + date_field = "@timestamp" +``` + +### Search number of documents matching a filter query, returning per response status code + +```toml +[[inputs.elasticsearch_query.aggregation]] + measurement_name = "http_logs" + index = "*" + filter_query = "downloads" + tags = ["response.keyword"] + include_missing_tag = false + date_field = "@timestamp" + query_period = "1m" +``` + +### Required parameters + +- `measurement_name`: The target measurement to be stored the results of the aggregation query. +- `index`: The index name to query on Elasticsearch +- `query_period`: The time window to query (eg. "1m" to query documents from last minute). Normally should be set to same as collection +- `date_field`: The date/time field in the Elasticsearch index + +### Optional parameters + +- `filter_query`: Lucene query to filter the results (default: "\*") +- `metric_fields`: The list of fields to perform metric aggregation (these must be indexed as numeric fields) +- `metric_funcion`: The single-value metric aggregation function to be performed on the `metric_fields` defined. Currently supported aggregations are "avg", "min", "max", "sum". (see [https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics.html](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics.html) +- `tags`: The list of fields to be used as tags (these must be indexed as non-analyzed fields). A "terms aggregation" will be done per tag defined +- `include_missing_tag`: Set to true to not ignore documents where the tag(s) specified above does not exist. (If false, documents without the specified tag field will be ignored in `doc_count` and in the metric aggregation) +- `missing_tag_value`: The value of the tag that will be set for documents in which the tag field does not exist. Only used when `include_missing_tag` is set to `true`. diff --git a/plugins/inputs/elasticsearch_query/aggregation_parser.go b/plugins/inputs/elasticsearch_query/aggregation_parser.go new file mode 100644 index 0000000000000..c4dff05ee6fee --- /dev/null +++ b/plugins/inputs/elasticsearch_query/aggregation_parser.go @@ -0,0 +1,153 @@ +package elasticsearch_query + +import ( + "fmt" + + "github.com/influxdata/telegraf" + elastic5 "gopkg.in/olivere/elastic.v5" +) + +type resultMetric struct { + name string + fields map[string]interface{} + tags map[string]string +} + +func parseSimpleResult(acc telegraf.Accumulator, measurement string, searchResult *elastic5.SearchResult) { + fields := make(map[string]interface{}) + tags := make(map[string]string) + + fields["doc_count"] = searchResult.Hits.TotalHits + + acc.AddFields(measurement, fields, tags) +} + +func parseAggregationResult(acc telegraf.Accumulator, aggregationQueryList []aggregationQueryData, searchResult *elastic5.SearchResult) error { + measurements := map[string]map[string]string{} + + // organize the aggregation query data by measurement + for _, aggregationQuery := range aggregationQueryList { + if measurements[aggregationQuery.measurement] == nil { + measurements[aggregationQuery.measurement] = map[string]string{ + aggregationQuery.name: aggregationQuery.function, + } + } else { + t := measurements[aggregationQuery.measurement] + t[aggregationQuery.name] = aggregationQuery.function + measurements[aggregationQuery.measurement] = t + } + } + + // recurse over query aggregation results per measurement + for measurement, aggNameFunction := range measurements { + var m resultMetric + + m.fields = make(map[string]interface{}) + m.tags = make(map[string]string) + m.name = measurement + + _, err := recurseResponse(acc, aggNameFunction, searchResult.Aggregations, m) + if err != nil { + return err + } + } + return nil +} + +func recurseResponse(acc telegraf.Accumulator, aggNameFunction map[string]string, bucketResponse elastic5.Aggregations, m resultMetric) (resultMetric, error) { + var err error + + aggNames := getAggNames(bucketResponse) + if len(aggNames) == 0 { + // we've reached a single bucket or response without aggregation, nothing here + return m, nil + } + + // metrics aggregations response can contain multiple field values, so we iterate over them + for _, aggName := range aggNames { + aggFunction, found := aggNameFunction[aggName] + if !found { + return m, fmt.Errorf("child aggregation function '%s' not found %v", aggName, aggNameFunction) + } + + resp := getResponseAggregation(aggFunction, aggName, bucketResponse) + if resp == nil { + return m, fmt.Errorf("child aggregation '%s' not found", aggName) + } + + switch resp := resp.(type) { + case *elastic5.AggregationBucketKeyItems: + // we've found a terms aggregation, iterate over the buckets and try to retrieve the inner aggregation values + for _, bucket := range resp.Buckets { + var s string + var ok bool + m.fields["doc_count"] = bucket.DocCount + if s, ok = bucket.Key.(string); !ok { + return m, fmt.Errorf("bucket key is not a string (%s, %s)", aggName, aggFunction) + } + m.tags[aggName] = s + + // we need to recurse down through the buckets, as it may contain another terms aggregation + m, err = recurseResponse(acc, aggNameFunction, bucket.Aggregations, m) + if err != nil { + return m, err + } + + // if there are fields present after finishing the bucket, it is a complete metric + // store it and clean the fields to start a new metric + if len(m.fields) > 0 { + acc.AddFields(m.name, m.fields, m.tags) + m.fields = make(map[string]interface{}) + } + + // after finishing the bucket, remove its tag from the tags map + delete(m.tags, aggName) + } + + case *elastic5.AggregationValueMetric: + if resp.Value != nil { + m.fields[aggName] = *resp.Value + } else { + m.fields[aggName] = float64(0) + } + + default: + return m, fmt.Errorf("aggregation type %T not supported", resp) + } + } + + // if there are fields here it comes from a metrics aggregation without a parent terms aggregation + if len(m.fields) > 0 { + acc.AddFields(m.name, m.fields, m.tags) + m.fields = make(map[string]interface{}) + } + return m, nil +} + +func getResponseAggregation(function string, aggName string, aggs elastic5.Aggregations) (agg interface{}) { + switch function { + case "avg": + agg, _ = aggs.Avg(aggName) + case "sum": + agg, _ = aggs.Sum(aggName) + case "min": + agg, _ = aggs.Min(aggName) + case "max": + agg, _ = aggs.Max(aggName) + case "terms": + agg, _ = aggs.Terms(aggName) + } + + return agg +} + +// getAggNames returns the aggregation names from a response aggregation +func getAggNames(agg elastic5.Aggregations) (aggs []string) { + for k := range agg { + if (k != "key") && (k != "doc_count") { + aggs = append(aggs, k) + } + } + + return aggs +} diff --git a/plugins/inputs/elasticsearch_query/aggregation_query.go b/plugins/inputs/elasticsearch_query/aggregation_query.go new file mode 100644 index 0000000000000..b5fa9db3c667a --- /dev/null +++ b/plugins/inputs/elasticsearch_query/aggregation_query.go @@ -0,0 +1,206 @@ +package elasticsearch_query + +import ( + "context" + "fmt" + "strings" + "time" + + elastic5 "gopkg.in/olivere/elastic.v5" +) + +type aggKey struct { + measurement string + name string + function string + field string +} + +type aggregationQueryData struct { + aggKey + isParent bool + aggregation elastic5.Aggregation +} + +func (e *ElasticsearchQuery) runAggregationQuery(ctx context.Context, aggregation esAggregation) (*elastic5.SearchResult, error) { + now := time.Now().UTC() + from := now.Add(time.Duration(-aggregation.QueryPeriod)) + filterQuery := aggregation.FilterQuery + if filterQuery == "" { + filterQuery = "*" + } + + query := elastic5.NewBoolQuery() + query = query.Filter(elastic5.NewQueryStringQuery(filterQuery)) + query = query.Filter(elastic5.NewRangeQuery(aggregation.DateField).From(from).To(now)) + + search := e.esClient.Search().Index(aggregation.Index).Query(query).Size(0) + + // add only parent elastic.Aggregations to the search request, all the rest are subaggregations of these + for _, v := range aggregation.aggregationQueryList { + if v.isParent && v.aggregation != nil { + search.Aggregation(v.aggKey.name, v.aggregation) + } + } + + searchResult, err := search.Do(ctx) + if err != nil && searchResult != nil { + return searchResult, fmt.Errorf("%s - %s", searchResult.Error.Type, searchResult.Error.Reason) + } + + return searchResult, err +} + +// getMetricFields function returns a map of fields and field types on Elasticsearch that matches field.MetricFields +func (e *ElasticsearchQuery) getMetricFields(ctx context.Context, aggregation esAggregation) (map[string]string, error) { + mapMetricFields := make(map[string]string) + + for _, metricField := range aggregation.MetricFields { + resp, err := e.esClient.GetFieldMapping().Index(aggregation.Index).Field(metricField).Do(ctx) + if err != nil { + return mapMetricFields, fmt.Errorf("error retrieving field mappings for %s: %s", aggregation.Index, err.Error()) + } + + for _, index := range resp { + var ok bool + var mappings interface{} + if mappings, ok = index.(map[string]interface{})["mappings"]; !ok { + return nil, fmt.Errorf("assertion error, wrong type (expected map[string]interface{}, got %T)", index) + } + + var types map[string]interface{} + if types, ok = mappings.(map[string]interface{}); !ok { + return nil, fmt.Errorf("assertion error, wrong type (expected map[string]interface{}, got %T)", mappings) + } + + var fields map[string]interface{} + for _, _type := range types { + if fields, ok = _type.(map[string]interface{}); !ok { + return nil, fmt.Errorf("assertion error, wrong type (expected map[string]interface{}, got %T)", _type) + } + + var field map[string]interface{} + for _, _field := range fields { + if field, ok = _field.(map[string]interface{}); !ok { + return nil, fmt.Errorf("assertion error, wrong type (expected map[string]interface{}, got %T)", _field) + } + + fullname := field["full_name"] + mapping := field["mapping"] + + var fname string + if fname, ok = fullname.(string); !ok { + return nil, fmt.Errorf("assertion error, wrong type (expected string, got %T)", fullname) + } + + var fieldTypes map[string]interface{} + if fieldTypes, ok = mapping.(map[string]interface{}); !ok { + return nil, fmt.Errorf("assertion error, wrong type (expected map[string]interface{}, got %T)", mapping) + } + + var fieldType interface{} + for _, _fieldType := range fieldTypes { + if fieldType, ok = _fieldType.(map[string]interface{})["type"]; !ok { + return nil, fmt.Errorf("assertion error, wrong type (expected map[string]interface{}, got %T)", _fieldType) + } + + var ftype string + if ftype, ok = fieldType.(string); !ok { + return nil, fmt.Errorf("assertion error, wrong type (expected string, got %T)", fieldType) + } + mapMetricFields[fname] = ftype + } + } + } + } + } + + return mapMetricFields, nil +} + +func (aggregation *esAggregation) buildAggregationQuery() error { + // create one aggregation per metric field found & function defined for numeric fields + for k, v := range aggregation.mapMetricFields { + switch v { + case "long": + case "float": + case "integer": + case "short": + case "double": + case "scaled_float": + default: + continue + } + + agg, err := getFunctionAggregation(aggregation.MetricFunction, k) + if err != nil { + return err + } + + aggregationQuery := aggregationQueryData{ + aggKey: aggKey{ + measurement: aggregation.MeasurementName, + function: aggregation.MetricFunction, + field: k, + name: strings.Replace(k, ".", "_", -1) + "_" + aggregation.MetricFunction, + }, + isParent: true, + aggregation: agg, + } + + aggregation.aggregationQueryList = append(aggregation.aggregationQueryList, aggregationQuery) + } + + // create a terms aggregation per tag + for _, term := range aggregation.Tags { + agg := elastic5.NewTermsAggregation() + if aggregation.IncludeMissingTag && aggregation.MissingTagValue != "" { + agg.Missing(aggregation.MissingTagValue) + } + + agg.Field(term).Size(1000) + + // add each previous parent aggregations as subaggregations of this terms aggregation + for key, aggMap := range aggregation.aggregationQueryList { + if aggMap.isParent { + agg.Field(term).SubAggregation(aggMap.name, aggMap.aggregation).Size(1000) + // update subaggregation map with parent information + aggregation.aggregationQueryList[key].isParent = false + } + } + + aggregationQuery := aggregationQueryData{ + aggKey: aggKey{ + measurement: aggregation.MeasurementName, + function: "terms", + field: term, + name: strings.Replace(term, ".", "_", -1), + }, + isParent: true, + aggregation: agg, + } + + aggregation.aggregationQueryList = append(aggregation.aggregationQueryList, aggregationQuery) + } + + return nil +} + +func getFunctionAggregation(function string, aggfield string) (elastic5.Aggregation, error) { + var agg elastic5.Aggregation + + switch function { + case "avg": + agg = elastic5.NewAvgAggregation().Field(aggfield) + case "sum": + agg = elastic5.NewSumAggregation().Field(aggfield) + case "min": + agg = elastic5.NewMinAggregation().Field(aggfield) + case "max": + agg = elastic5.NewMaxAggregation().Field(aggfield) + default: + return nil, fmt.Errorf("aggregation function '%s' not supported", function) + } + + return agg, nil +} diff --git a/plugins/inputs/elasticsearch_query/elasticsearch_query.go b/plugins/inputs/elasticsearch_query/elasticsearch_query.go new file mode 100644 index 0000000000000..3c04f952b5bee --- /dev/null +++ b/plugins/inputs/elasticsearch_query/elasticsearch_query.go @@ -0,0 +1,314 @@ +package elasticsearch_query + +import ( + "context" + "fmt" + "net/http" + "strconv" + "strings" + "sync" + "time" + + elastic5 "gopkg.in/olivere/elastic.v5" + + "github.com/influxdata/telegraf" + "github.com/influxdata/telegraf/config" + "github.com/influxdata/telegraf/plugins/common/tls" + "github.com/influxdata/telegraf/plugins/inputs" +) + +const sampleConfig = ` + ## The full HTTP endpoint URL for your Elasticsearch instance + ## Multiple urls can be specified as part of the same cluster, + ## this means that only ONE of the urls will be written to each interval. + urls = [ "http://node1.es.example.com:9200" ] # required. + + ## Elasticsearch client timeout, defaults to "5s". + # timeout = "5s" + + ## Set to true to ask Elasticsearch a list of all cluster nodes, + ## thus it is not necessary to list all nodes in the urls config option + # enable_sniffer = false + + ## Set the interval to check if the Elasticsearch nodes are available + ## This option is only used if enable_sniffer is also set (0s to disable it) + # health_check_interval = "10s" + + ## HTTP basic authentication details (eg. when using x-pack) + # username = "telegraf" + # password = "mypassword" + + ## Optional TLS Config + # tls_ca = "/etc/telegraf/ca.pem" + # tls_cert = "/etc/telegraf/cert.pem" + # tls_key = "/etc/telegraf/key.pem" + ## Use TLS but skip chain & host verification + # insecure_skip_verify = false + + [[inputs.elasticsearch_query.aggregation]] + ## measurement name for the results of the aggregation query + measurement_name = "measurement" + + ## Elasticsearch indexes to query (accept wildcards). + index = "index-*" + + ## The date/time field in the Elasticsearch index (mandatory). + date_field = "@timestamp" + + ## Time window to query (eg. "1m" to query documents from last minute). + ## Normally should be set to same as collection interval + query_period = "1m" + + ## Lucene query to filter results + # filter_query = "*" + + ## Fields to aggregate values (must be numeric fields) + # metric_fields = ["metric"] + + ## Aggregation function to use on the metric fields + ## Must be set if 'metric_fields' is set + ## Valid values are: avg, sum, min, max, sum + # metric_function = "avg" + + ## Fields to be used as tags + ## Must be text, non-analyzed fields. Metric aggregations are performed per tag + # tags = ["field.keyword", "field2.keyword"] + + ## Set to true to not ignore documents when the tag(s) above are missing + # include_missing_tag = false + + ## String value of the tag when the tag does not exist + ## Used when include_missing_tag is true + # missing_tag_value = "null" +` + +// ElasticsearchQuery struct +type ElasticsearchQuery struct { + URLs []string `toml:"urls"` + Username string `toml:"username"` + Password string `toml:"password"` + EnableSniffer bool `toml:"enable_sniffer"` + Timeout config.Duration `toml:"timeout"` + HealthCheckInterval config.Duration `toml:"health_check_interval"` + Aggregations []esAggregation `toml:"aggregation"` + + Log telegraf.Logger `toml:"-"` + + tls.ClientConfig + httpclient *http.Client + esClient *elastic5.Client +} + +// esAggregation struct +type esAggregation struct { + Index string `toml:"index"` + MeasurementName string `toml:"measurement_name"` + DateField string `toml:"date_field"` + QueryPeriod config.Duration `toml:"query_period"` + FilterQuery string `toml:"filter_query"` + MetricFields []string `toml:"metric_fields"` + MetricFunction string `toml:"metric_function"` + Tags []string `toml:"tags"` + IncludeMissingTag bool `toml:"include_missing_tag"` + MissingTagValue string `toml:"missing_tag_value"` + mapMetricFields map[string]string + aggregationQueryList []aggregationQueryData +} + +// SampleConfig returns sample configuration for this plugin. +func (e *ElasticsearchQuery) SampleConfig() string { + return sampleConfig +} + +// Description returns the plugin description. +func (e *ElasticsearchQuery) Description() string { + return `Derive metrics from aggregating Elasticsearch query results` +} + +// Init the plugin. +func (e *ElasticsearchQuery) Init() error { + if e.URLs == nil { + return fmt.Errorf("elasticsearch urls is not defined") + } + + err := e.connectToES() + if err != nil { + e.Log.Errorf("E! error connecting to elasticsearch: %s", err) + return nil + } + + ctx, cancel := context.WithTimeout(context.Background(), time.Duration(e.Timeout)) + defer cancel() + + for i, agg := range e.Aggregations { + if agg.MeasurementName == "" { + return fmt.Errorf("field 'measurement_name' is not set") + } + if agg.DateField == "" { + return fmt.Errorf("field 'date_field' is not set") + } + err = e.initAggregation(ctx, agg, i) + if err != nil { + e.Log.Errorf("%s", err) + return nil + } + } + return nil +} + +func (e *ElasticsearchQuery) initAggregation(ctx context.Context, agg esAggregation, i int) (err error) { + // retrieve field mapping and build queries only once + agg.mapMetricFields, err = e.getMetricFields(ctx, agg) + if err != nil { + return fmt.Errorf("not possible to retrieve fields: %v", err.Error()) + } + + for _, metricField := range agg.MetricFields { + if _, ok := agg.mapMetricFields[metricField]; !ok { + return fmt.Errorf("metric field '%s' not found on index '%s'", metricField, agg.Index) + } + } + + err = agg.buildAggregationQuery() + if err != nil { + return err + } + + e.Aggregations[i] = agg + return nil +} + +func (e *ElasticsearchQuery) connectToES() error { + var clientOptions []elastic5.ClientOptionFunc + + if e.esClient != nil { + if e.esClient.IsRunning() { + return nil + } + } + + if e.httpclient == nil { + httpclient, err := e.createHTTPClient() + if err != nil { + return err + } + e.httpclient = httpclient + } + + clientOptions = append(clientOptions, + elastic5.SetHttpClient(e.httpclient), + elastic5.SetSniff(e.EnableSniffer), + elastic5.SetURL(e.URLs...), + elastic5.SetHealthcheckInterval(time.Duration(e.HealthCheckInterval)), + ) + + if e.Username != "" { + clientOptions = append(clientOptions, elastic5.SetBasicAuth(e.Username, e.Password)) + } + + if time.Duration(e.HealthCheckInterval) == 0 { + clientOptions = append(clientOptions, elastic5.SetHealthcheck(false)) + } + + client, err := elastic5.NewClient(clientOptions...) + if err != nil { + return err + } + + // check for ES version on first node + esVersion, err := client.ElasticsearchVersion(e.URLs[0]) + if err != nil { + return fmt.Errorf("elasticsearch version check failed: %s", err) + } + + esVersionSplit := strings.Split(esVersion, ".") + + // quit if ES version is not supported + if len(esVersionSplit) == 0 { + return fmt.Errorf("elasticsearch version check failed") + } + + i, err := strconv.Atoi(esVersionSplit[0]) + if err != nil || i < 5 || i > 6 { + return fmt.Errorf("elasticsearch version %s not supported (currently supported versions are 5.x and 6.x)", esVersion) + } + + e.esClient = client + return nil +} + +// Gather writes the results of the queries from Elasticsearch to the Accumulator. +func (e *ElasticsearchQuery) Gather(acc telegraf.Accumulator) error { + var wg sync.WaitGroup + + err := e.connectToES() + if err != nil { + return err + } + + for i, agg := range e.Aggregations { + wg.Add(1) + go func(agg esAggregation, i int) { + defer wg.Done() + err := e.esAggregationQuery(acc, agg, i) + if err != nil { + acc.AddError(fmt.Errorf("elasticsearch query aggregation %s: %s ", agg.MeasurementName, err.Error())) + } + }(agg, i) + } + + wg.Wait() + return nil +} + +func (e *ElasticsearchQuery) createHTTPClient() (*http.Client, error) { + tlsCfg, err := e.ClientConfig.TLSConfig() + if err != nil { + return nil, err + } + tr := &http.Transport{ + ResponseHeaderTimeout: time.Duration(e.Timeout), + TLSClientConfig: tlsCfg, + } + httpclient := &http.Client{ + Transport: tr, + Timeout: time.Duration(e.Timeout), + } + + return httpclient, nil +} + +func (e *ElasticsearchQuery) esAggregationQuery(acc telegraf.Accumulator, aggregation esAggregation, i int) error { + ctx, cancel := context.WithTimeout(context.Background(), time.Duration(e.Timeout)) + defer cancel() + + // try to init the aggregation query if it is not done already + if aggregation.aggregationQueryList == nil { + err := e.initAggregation(ctx, aggregation, i) + if err != nil { + return err + } + aggregation = e.Aggregations[i] + } + + searchResult, err := e.runAggregationQuery(ctx, aggregation) + if err != nil { + return err + } + + if searchResult.Aggregations == nil { + parseSimpleResult(acc, aggregation.MeasurementName, searchResult) + return nil + } + + return parseAggregationResult(acc, aggregation.aggregationQueryList, searchResult) +} + +func init() { + inputs.Add("elasticsearch_query", func() telegraf.Input { + return &ElasticsearchQuery{ + Timeout: config.Duration(time.Second * 5), + HealthCheckInterval: config.Duration(time.Second * 10), + } + }) +} diff --git a/plugins/inputs/elasticsearch_query/elasticsearch_query_test.go b/plugins/inputs/elasticsearch_query/elasticsearch_query_test.go new file mode 100644 index 0000000000000..6a89dc8eea617 --- /dev/null +++ b/plugins/inputs/elasticsearch_query/elasticsearch_query_test.go @@ -0,0 +1,713 @@ +package elasticsearch_query + +import ( + "bufio" + "context" + "os" + "strconv" + "strings" + "sync" + "testing" + "time" + + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" + "github.com/influxdata/telegraf" + "github.com/influxdata/telegraf/config" + "github.com/influxdata/telegraf/testutil" + "github.com/stretchr/testify/require" + elastic5 "gopkg.in/olivere/elastic.v5" +) + +var ( + testindex = "test-elasticsearch_query-" + strconv.Itoa(int(time.Now().Unix())) + setupOnce sync.Once +) + +type esAggregationQueryTest struct { + queryName string + testAggregationQueryInput esAggregation + testAggregationQueryData []aggregationQueryData + expectedMetrics []telegraf.Metric + wantBuildQueryErr bool + wantGetMetricFieldsErr bool + wantQueryResErr bool +} + +var queryPeriod = config.Duration(time.Second * 600) + +var testEsAggregationData = []esAggregationQueryTest{ + { + "query 1", + esAggregation{ + Index: testindex, + MeasurementName: "measurement1", + MetricFields: []string{"size"}, + FilterQuery: "product_1", + MetricFunction: "avg", + DateField: "@timestamp", + QueryPeriod: queryPeriod, + Tags: []string{"URI.keyword"}, + mapMetricFields: map[string]string{"size": "long"}, + }, + []aggregationQueryData{ + { + aggKey: aggKey{measurement: "measurement1", name: "size_avg", function: "avg", field: "size"}, + isParent: false, + }, + { + aggKey: aggKey{measurement: "measurement1", name: "URI_keyword", function: "terms", field: "URI.keyword"}, + isParent: true, + }, + }, + []telegraf.Metric{ + testutil.MustMetric( + "measurement1", + map[string]string{"URI_keyword": "/downloads/product_1"}, + map[string]interface{}{"size_avg": float64(202.30038022813687), "doc_count": int64(263)}, + time.Date(2018, 6, 14, 5, 51, 53, 266176036, time.UTC), + ), + }, + false, + false, + false, + }, + { + "query 2", + esAggregation{ + Index: testindex, + MeasurementName: "measurement2", + MetricFields: []string{"size"}, + FilterQuery: "downloads", + MetricFunction: "max", + DateField: "@timestamp", + QueryPeriod: queryPeriod, + Tags: []string{"URI.keyword"}, + mapMetricFields: map[string]string{"size": "long"}, + }, + []aggregationQueryData{ + { + aggKey: aggKey{measurement: "measurement2", name: "size_max", function: "max", field: "size"}, + isParent: false, + }, + { + aggKey: aggKey{measurement: "measurement2", name: "URI_keyword", function: "terms", field: "URI.keyword"}, + isParent: true, + }, + }, + []telegraf.Metric{ + testutil.MustMetric( + "measurement2", + map[string]string{"URI_keyword": "/downloads/product_1"}, + map[string]interface{}{"size_max": float64(3301), "doc_count": int64(263)}, + time.Date(2018, 6, 14, 5, 51, 53, 266176036, time.UTC), + ), + testutil.MustMetric( + "measurement2", + map[string]string{"URI_keyword": "/downloads/product_2"}, + map[string]interface{}{"size_max": float64(3318), "doc_count": int64(237)}, + time.Date(2018, 6, 14, 5, 51, 53, 266176036, time.UTC), + ), + }, + false, + false, + false, + }, + { + "query 3", + esAggregation{ + Index: testindex, + MeasurementName: "measurement3", + MetricFields: []string{"size"}, + FilterQuery: "downloads", + MetricFunction: "sum", + DateField: "@timestamp", + QueryPeriod: queryPeriod, + Tags: []string{"response.keyword"}, + mapMetricFields: map[string]string{"size": "long"}, + }, + []aggregationQueryData{ + { + aggKey: aggKey{measurement: "measurement3", name: "size_sum", function: "sum", field: "size"}, + isParent: false, + }, + { + aggKey: aggKey{measurement: "measurement3", name: "response_keyword", function: "terms", field: "response.keyword"}, + isParent: true, + }, + }, + []telegraf.Metric{ + testutil.MustMetric( + "measurement3", + map[string]string{"response_keyword": "200"}, + map[string]interface{}{"size_sum": float64(22790), "doc_count": int64(22)}, + time.Date(2018, 6, 14, 5, 51, 53, 266176036, time.UTC), + ), + testutil.MustMetric( + "measurement3", + map[string]string{"response_keyword": "304"}, + map[string]interface{}{"size_sum": float64(0), "doc_count": int64(219)}, + time.Date(2018, 6, 14, 5, 51, 53, 266176036, time.UTC), + ), + testutil.MustMetric( + "measurement3", + map[string]string{"response_keyword": "404"}, + map[string]interface{}{"size_sum": float64(86932), "doc_count": int64(259)}, + time.Date(2018, 6, 14, 5, 51, 53, 266176036, time.UTC), + ), + }, + false, + false, + false, + }, + { + "query 4", + esAggregation{ + Index: testindex, + MeasurementName: "measurement4", + MetricFields: []string{"size", "response_time"}, + FilterQuery: "downloads", + MetricFunction: "min", + DateField: "@timestamp", + QueryPeriod: queryPeriod, + IncludeMissingTag: true, + MissingTagValue: "missing", + Tags: []string{"response.keyword", "URI.keyword", "method.keyword"}, + mapMetricFields: map[string]string{"size": "long", "response_time": "long"}, + }, + []aggregationQueryData{ + { + aggKey: aggKey{measurement: "measurement4", name: "size_min", function: "min", field: "size"}, + isParent: false, + }, + { + aggKey: aggKey{measurement: "measurement4", name: "response_time_min", function: "min", field: "response_time"}, + isParent: false, + }, + { + aggKey: aggKey{measurement: "measurement4", name: "response_keyword", function: "terms", field: "response.keyword"}, + isParent: false, + }, + { + aggKey: aggKey{measurement: "measurement4", name: "URI_keyword", function: "terms", field: "URI.keyword"}, + isParent: false, + }, + { + aggKey: aggKey{measurement: "measurement4", name: "method_keyword", function: "terms", field: "method.keyword"}, + isParent: true, + }, + }, + []telegraf.Metric{ + testutil.MustMetric( + "measurement4", + map[string]string{"response_keyword": "404", "URI_keyword": "/downloads/product_1", "method_keyword": "GET"}, + map[string]interface{}{"size_min": float64(318), "response_time_min": float64(126), "doc_count": int64(146)}, + time.Date(2018, 6, 14, 5, 51, 53, 266176036, time.UTC), + ), + testutil.MustMetric( + "measurement4", + map[string]string{"response_keyword": "304", "URI_keyword": "/downloads/product_1", "method_keyword": "GET"}, + map[string]interface{}{"size_min": float64(0), "response_time_min": float64(71), "doc_count": int64(113)}, + time.Date(2018, 6, 14, 5, 51, 53, 266176036, time.UTC), + ), + testutil.MustMetric( + "measurement4", + map[string]string{"response_keyword": "200", "URI_keyword": "/downloads/product_1", "method_keyword": "GET"}, + map[string]interface{}{"size_min": float64(490), "response_time_min": float64(1514), "doc_count": int64(3)}, + time.Date(2018, 6, 14, 5, 51, 53, 266176036, time.UTC), + ), + testutil.MustMetric( + "measurement4", + map[string]string{"response_keyword": "404", "URI_keyword": "/downloads/product_2", "method_keyword": "GET"}, + map[string]interface{}{"size_min": float64(318), "response_time_min": float64(237), "doc_count": int64(113)}, + time.Date(2018, 6, 14, 5, 51, 53, 266176036, time.UTC), + ), + testutil.MustMetric( + "measurement4", + map[string]string{"response_keyword": "304", "URI_keyword": "/downloads/product_2", "method_keyword": "GET"}, + map[string]interface{}{"size_min": float64(0), "response_time_min": float64(134), "doc_count": int64(106)}, + time.Date(2018, 6, 14, 5, 51, 53, 266176036, time.UTC), + ), + testutil.MustMetric( + "measurement4", + map[string]string{"response_keyword": "200", "URI_keyword": "/downloads/product_2", "method_keyword": "GET"}, + map[string]interface{}{"size_min": float64(490), "response_time_min": float64(2), "doc_count": int64(13)}, + time.Date(2018, 6, 14, 5, 51, 53, 266176036, time.UTC), + ), + testutil.MustMetric( + "measurement4", + map[string]string{"response_keyword": "200", "URI_keyword": "/downloads/product_1", "method_keyword": "HEAD"}, + map[string]interface{}{"size_min": float64(0), "response_time_min": float64(8479), "doc_count": int64(1)}, + time.Date(2018, 6, 14, 5, 51, 53, 266176036, time.UTC), + ), + testutil.MustMetric( + "measurement4", + map[string]string{"response_keyword": "200", "URI_keyword": "/downloads/product_2", "method_keyword": "HEAD"}, + map[string]interface{}{"size_min": float64(0), "response_time_min": float64(1059), "doc_count": int64(5)}, + time.Date(2018, 6, 14, 5, 51, 53, 266176036, time.UTC), + ), + }, + false, + false, + false, + }, + { + "query 5", + esAggregation{ + Index: testindex, + MeasurementName: "measurement5", + FilterQuery: "product_2", + DateField: "@timestamp", + QueryPeriod: queryPeriod, + Tags: []string{"URI.keyword"}, + mapMetricFields: map[string]string{}, + }, + []aggregationQueryData{ + { + aggKey: aggKey{measurement: "measurement5", name: "URI_keyword", function: "terms", field: "URI.keyword"}, + isParent: true, + }, + }, + []telegraf.Metric{ + testutil.MustMetric( + "measurement5", + map[string]string{"URI_keyword": "/downloads/product_2"}, + map[string]interface{}{"doc_count": int64(237)}, + time.Date(2018, 6, 14, 5, 51, 53, 266176036, time.UTC), + ), + }, + false, + false, + false, + }, + { + "query 6", + esAggregation{ + Index: testindex, + MeasurementName: "measurement6", + FilterQuery: "response: 200", + DateField: "@timestamp", + QueryPeriod: queryPeriod, + Tags: []string{"URI.keyword", "response.keyword"}, + mapMetricFields: map[string]string{}, + }, + []aggregationQueryData{ + { + aggKey: aggKey{measurement: "measurement6", name: "URI_keyword", function: "terms", field: "URI.keyword"}, + isParent: false, + }, + { + aggKey: aggKey{measurement: "measurement6", name: "response_keyword", function: "terms", field: "response.keyword"}, + isParent: true, + }, + }, + []telegraf.Metric{ + testutil.MustMetric( + "measurement6", + map[string]string{"response_keyword": "200", "URI_keyword": "/downloads/product_1"}, + map[string]interface{}{"doc_count": int64(4)}, + time.Date(2018, 6, 14, 5, 51, 53, 266176036, time.UTC), + ), + testutil.MustMetric( + "measurement6", + map[string]string{"response_keyword": "200", "URI_keyword": "/downloads/product_2"}, + map[string]interface{}{"doc_count": int64(18)}, + time.Date(2018, 6, 14, 5, 51, 53, 266176036, time.UTC), + ), + }, + false, + false, + false, + }, + { + "query 7 - simple query", + esAggregation{ + Index: testindex, + MeasurementName: "measurement7", + FilterQuery: "response: 200", + DateField: "@timestamp", + QueryPeriod: queryPeriod, + Tags: []string{}, + mapMetricFields: map[string]string{}, + }, + nil, + []telegraf.Metric{ + testutil.MustMetric( + "measurement7", + map[string]string{}, + map[string]interface{}{"doc_count": int64(22)}, + time.Date(2018, 6, 14, 5, 51, 53, 266176036, time.UTC), + ), + }, + false, + false, + false, + }, + { + "query 8", + esAggregation{ + Index: testindex, + MeasurementName: "measurement8", + MetricFields: []string{"size"}, + FilterQuery: "downloads", + MetricFunction: "max", + DateField: "@timestamp", + QueryPeriod: queryPeriod, + Tags: []string{}, + mapMetricFields: map[string]string{"size": "long"}, + }, + []aggregationQueryData{ + { + aggKey: aggKey{measurement: "measurement8", name: "size_max", function: "max", field: "size"}, + isParent: true, + }, + }, + []telegraf.Metric{ + testutil.MustMetric( + "measurement8", + map[string]string{}, + map[string]interface{}{"size_max": float64(3318)}, + time.Date(2018, 6, 14, 5, 51, 53, 266176036, time.UTC), + ), + }, + false, + false, + false, + }, + { + "query 9 - invalid function", + esAggregation{ + Index: testindex, + MeasurementName: "measurement9", + MetricFields: []string{"size"}, + FilterQuery: "downloads", + MetricFunction: "average", + DateField: "@timestamp", + QueryPeriod: queryPeriod, + Tags: []string{}, + mapMetricFields: map[string]string{"size": "long"}, + }, + nil, + nil, + true, + false, + true, + }, + { + "query 10 - non-existing metric field", + esAggregation{ + Index: testindex, + MeasurementName: "measurement10", + MetricFields: []string{"none"}, + DateField: "@timestamp", + QueryPeriod: queryPeriod, + Tags: []string{}, + mapMetricFields: map[string]string{}, + }, + nil, + nil, + false, + false, + true, + }, + { + "query 11 - non-existing index field", + esAggregation{ + Index: "notanindex", + MeasurementName: "measurement11", + DateField: "@timestamp", + QueryPeriod: queryPeriod, + Tags: []string{}, + mapMetricFields: map[string]string{}, + }, + nil, + nil, + false, + false, + true, + }, + { + "query 12 - non-existing timestamp field", + esAggregation{ + Index: testindex, + MeasurementName: "measurement12", + MetricFields: []string{"size"}, + MetricFunction: "avg", + DateField: "@notatimestamp", + QueryPeriod: queryPeriod, + Tags: []string{}, + mapMetricFields: map[string]string{"size": "long"}, + }, + []aggregationQueryData{ + { + aggKey: aggKey{measurement: "measurement12", name: "size_avg", function: "avg", field: "size"}, + isParent: true, + }, + }, + []telegraf.Metric{ + testutil.MustMetric( + "measurement12", + map[string]string{}, + map[string]interface{}{"size_avg": float64(0)}, + time.Date(2018, 6, 14, 5, 51, 53, 266176036, time.UTC), + ), + }, + false, + false, + false, + }, + { + "query 13 - non-existing tag field", + esAggregation{ + Index: testindex, + MeasurementName: "measurement13", + MetricFields: []string{"size"}, + MetricFunction: "avg", + DateField: "@timestamp", + QueryPeriod: queryPeriod, + IncludeMissingTag: false, + Tags: []string{"nothere"}, + mapMetricFields: map[string]string{"size": "long"}, + }, + []aggregationQueryData{ + { + aggKey: aggKey{measurement: "measurement13", name: "size_avg", function: "avg", field: "size"}, + isParent: false, + }, + { + aggKey: aggKey{measurement: "measurement13", name: "nothere", function: "terms", field: "nothere"}, + isParent: true, + }, + }, + nil, + false, + false, + false, + }, +} + +func setupIntegrationTest() error { + type nginxlog struct { + IPaddress string `json:"IP"` + Timestamp time.Time `json:"@timestamp"` + Method string `json:"method"` + URI string `json:"URI"` + Httpversion string `json:"http_version"` + Response string `json:"response"` + Size float64 `json:"size"` + ResponseTime float64 `json:"response_time"` + } + + e := &ElasticsearchQuery{ + URLs: []string{"http://" + testutil.GetLocalHost() + ":9200"}, + Timeout: config.Duration(time.Second * 30), + Log: testutil.Logger{}, + } + + err := e.connectToES() + if err != nil { + return err + } + + bulkRequest := e.esClient.Bulk() + + // populate elasticsearch with nginx_logs test data file + file, err := os.Open("testdata/nginx_logs") + if err != nil { + return err + } + + defer file.Close() + + scanner := bufio.NewScanner(file) + + for scanner.Scan() { + parts := strings.Split(scanner.Text(), " ") + size, _ := strconv.Atoi(parts[9]) + responseTime, _ := strconv.Atoi(parts[len(parts)-1]) + + logline := nginxlog{ + IPaddress: parts[0], + Timestamp: time.Now().UTC(), + Method: strings.Replace(parts[5], `"`, "", -1), + URI: parts[6], + Httpversion: strings.Replace(parts[7], `"`, "", -1), + Response: parts[8], + Size: float64(size), + ResponseTime: float64(responseTime), + } + + bulkRequest.Add(elastic5.NewBulkIndexRequest(). + Index(testindex). + Type("testquery_data"). + Doc(logline)) + } + if scanner.Err() != nil { + return err + } + + _, err = bulkRequest.Do(context.Background()) + if err != nil { + return err + } + + // wait 5s (default) for Elasticsearch to index, so results are consistent + time.Sleep(time.Second * 5) + return nil +} + +func TestElasticsearchQuery(t *testing.T) { + if testing.Short() { + t.Skip("Skipping integration test in short mode") + } + + setupOnce.Do(func() { + err := setupIntegrationTest() + require.NoError(t, err) + }) + + var acc testutil.Accumulator + e := &ElasticsearchQuery{ + URLs: []string{"http://" + testutil.GetLocalHost() + ":9200"}, + Timeout: config.Duration(time.Second * 30), + Log: testutil.Logger{}, + } + + err := e.connectToES() + require.NoError(t, err) + + var aggs []esAggregation + var aggsErr []esAggregation + + for _, agg := range testEsAggregationData { + if !agg.wantQueryResErr { + aggs = append(aggs, agg.testAggregationQueryInput) + } + } + e.Aggregations = aggs + + require.NoError(t, e.Init()) + require.NoError(t, e.Gather(&acc)) + + if len(acc.Errors) > 0 { + t.Errorf("%s", acc.Errors) + } + + var expectedMetrics []telegraf.Metric + for _, result := range testEsAggregationData { + expectedMetrics = append(expectedMetrics, result.expectedMetrics...) + } + testutil.RequireMetricsEqual(t, expectedMetrics, acc.GetTelegrafMetrics(), testutil.SortMetrics(), testutil.IgnoreTime()) + + // aggregations that should return an error + for _, agg := range testEsAggregationData { + if agg.wantQueryResErr { + aggsErr = append(aggsErr, agg.testAggregationQueryInput) + } + } + e.Aggregations = aggsErr + require.NoError(t, e.Init()) + require.NoError(t, e.Gather(&acc)) + + if len(acc.Errors) != len(aggsErr) { + t.Errorf("expecting %v query result errors, got %v: %s", len(aggsErr), len(acc.Errors), acc.Errors) + } +} + +func TestElasticsearchQuery_getMetricFields(t *testing.T) { + if testing.Short() { + t.Skip("Skipping integration test in short mode") + } + + setupOnce.Do(func() { + err := setupIntegrationTest() + require.NoError(t, err) + }) + + type args struct { + ctx context.Context + aggregation esAggregation + } + + e := &ElasticsearchQuery{ + URLs: []string{"http://" + testutil.GetLocalHost() + ":9200"}, + Timeout: config.Duration(time.Second * 30), + Log: testutil.Logger{}, + } + + err := e.connectToES() + require.NoError(t, err) + + type test struct { + name string + e *ElasticsearchQuery + args args + want map[string]string + wantErr bool + } + + var tests []test + + for _, d := range testEsAggregationData { + tests = append(tests, test{ + "getMetricFields " + d.queryName, + e, + args{context.Background(), d.testAggregationQueryInput}, + d.testAggregationQueryInput.mapMetricFields, + d.wantGetMetricFieldsErr, + }) + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.e.getMetricFields(tt.args.ctx, tt.args.aggregation) + if (err != nil) != tt.wantErr { + t.Errorf("ElasticsearchQuery.buildAggregationQuery() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if !cmp.Equal(got, tt.want) { + t.Errorf("ElasticsearchQuery.getMetricFields() = error = %s", cmp.Diff(got, tt.want)) + } + }) + } +} + +func TestElasticsearchQuery_buildAggregationQuery(t *testing.T) { + type test struct { + name string + aggregation esAggregation + want []aggregationQueryData + wantErr bool + } + var tests []test + + for _, d := range testEsAggregationData { + tests = append(tests, test{ + "build " + d.queryName, + d.testAggregationQueryInput, + d.testAggregationQueryData, + d.wantBuildQueryErr, + }) + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := tt.aggregation.buildAggregationQuery() + if (err != nil) != tt.wantErr { + t.Errorf("ElasticsearchQuery.buildAggregationQuery() error = %v, wantErr %v", err, tt.wantErr) + return + } + + opts := []cmp.Option{ + cmp.AllowUnexported(aggKey{}, aggregationQueryData{}), + cmpopts.IgnoreFields(aggregationQueryData{}, "aggregation"), + cmpopts.SortSlices(func(x, y aggregationQueryData) bool { return x.aggKey.name > y.aggKey.name }), + } + + if !cmp.Equal(tt.aggregation.aggregationQueryList, tt.want, opts...) { + t.Errorf("ElasticsearchQuery.buildAggregationQuery(): %s error = %s ", tt.name, cmp.Diff(tt.aggregation.aggregationQueryList, tt.want, opts...)) + } + }) + } +} diff --git a/plugins/inputs/elasticsearch_query/testdata/nginx_logs b/plugins/inputs/elasticsearch_query/testdata/nginx_logs new file mode 100644 index 0000000000000..f6e9c8a110226 --- /dev/null +++ b/plugins/inputs/elasticsearch_query/testdata/nginx_logs @@ -0,0 +1,500 @@ +93.180.71.3 - - [17/May/2015:08:05:32 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 12060 +93.180.71.3 - - [17/May/2015:08:05:23 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 12355 +80.91.33.133 - - [17/May/2015:08:05:24 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 26272 +217.168.17.5 - - [17/May/2015:08:05:34 +0000] "GET /downloads/product_1 HTTP/1.1" 200 490 "-" "Debian APT-HTTP/1.3 (0.8.10.3)" 1514 +217.168.17.5 - - [17/May/2015:08:05:09 +0000] "GET /downloads/product_2 HTTP/1.1" 200 490 "-" "Debian APT-HTTP/1.3 (0.8.10.3)" 2204 +93.180.71.3 - - [17/May/2015:08:05:57 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 6012 +217.168.17.5 - - [17/May/2015:08:05:02 +0000] "GET /downloads/product_2 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.8.10.3)" 11220 +217.168.17.5 - - [17/May/2015:08:05:42 +0000] "GET /downloads/product_1 HTTP/1.1" 404 332 "-" "Debian APT-HTTP/1.3 (0.8.10.3)" 17843 +80.91.33.133 - - [17/May/2015:08:05:01 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 22599 +93.180.71.3 - - [17/May/2015:08:05:27 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 24828 +217.168.17.5 - - [17/May/2015:08:05:12 +0000] "GET /downloads/product_2 HTTP/1.1" 200 3316 "-" "-" 6947 +188.138.60.101 - - [17/May/2015:08:05:49 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 28288 +80.91.33.133 - - [17/May/2015:08:05:14 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 23182 +46.4.66.76 - - [17/May/2015:08:05:45 +0000] "GET /downloads/product_1 HTTP/1.1" 404 318 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 16302 +93.180.71.3 - - [17/May/2015:08:05:26 +0000] "GET /downloads/product_1 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 16102 +91.234.194.89 - - [17/May/2015:08:05:22 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 20268 +80.91.33.133 - - [17/May/2015:08:05:07 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 2794 +37.26.93.214 - - [17/May/2015:08:05:38 +0000] "GET /downloads/product_2 HTTP/1.1" 404 319 "-" "Go 1.1 package http" 22809 +188.138.60.101 - - [17/May/2015:08:05:25 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 8807 +93.180.71.3 - - [17/May/2015:08:05:11 +0000] "GET /downloads/product_1 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 30172 +46.4.66.76 - - [17/May/2015:08:05:02 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 1973 +62.75.198.179 - - [17/May/2015:08:05:06 +0000] "GET /downloads/product_2 HTTP/1.1" 200 490 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 10182 +80.91.33.133 - - [17/May/2015:08:05:55 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 14307 +173.203.139.108 - - [17/May/2015:08:05:53 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 10828 +210.245.80.75 - - [17/May/2015:08:05:32 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 21956 +46.4.83.163 - - [17/May/2015:08:05:52 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 5726 +91.234.194.89 - - [17/May/2015:08:05:18 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 10841 +31.22.86.126 - - [17/May/2015:08:05:24 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 18132 +217.168.17.5 - - [17/May/2015:08:05:25 +0000] "GET /downloads/product_1 HTTP/1.1" 200 3301 "-" "-" 10094 +80.91.33.133 - - [17/May/2015:08:05:50 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 12355 +173.203.139.108 - - [17/May/2015:08:05:03 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 27325 +80.91.33.133 - - [17/May/2015:08:05:35 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 14101 +5.83.131.103 - - [17/May/2015:08:05:51 +0000] "GET /downloads/product_1 HTTP/1.1" 200 490 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 20175 +80.91.33.133 - - [17/May/2015:08:05:59 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 21384 +200.6.73.40 - - [17/May/2015:08:05:42 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 6570 +80.91.33.133 - - [17/May/2015:08:05:48 +0000] "GET /downloads/product_1 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 26145 +93.180.71.3 - - [17/May/2015:08:05:58 +0000] "GET /downloads/product_1 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 32705 +62.75.198.179 - - [17/May/2015:08:05:39 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 18865 +50.57.209.92 - - [17/May/2015:08:05:41 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 21639 +188.138.60.101 - - [17/May/2015:08:05:48 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 31242 +46.4.66.76 - - [17/May/2015:08:05:02 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 5910 +50.57.209.92 - - [17/May/2015:08:05:25 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 22900 +91.239.186.133 - - [17/May/2015:08:05:04 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 23919 +173.203.139.108 - - [17/May/2015:08:05:08 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 25169 +80.91.33.133 - - [17/May/2015:08:05:04 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 24395 +93.190.71.150 - - [17/May/2015:08:05:33 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 25750 +91.234.194.89 - - [17/May/2015:08:05:57 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 26673 +46.4.83.163 - - [17/May/2015:08:05:20 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 32509 +173.203.139.108 - - [17/May/2015:08:05:39 +0000] "GET /downloads/product_1 HTTP/1.1" 404 335 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 32714 +54.187.216.43 - - [17/May/2015:08:05:07 +0000] "GET /downloads/product_2 HTTP/1.1" 200 951 "-" "urlgrabber/3.9.1 yum/3.4.3" 5016 +50.57.209.92 - - [17/May/2015:08:05:59 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 14449 +80.91.33.133 - - [17/May/2015:08:05:02 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 13183 +173.203.139.108 - - [17/May/2015:08:05:07 +0000] "GET /downloads/product_1 HTTP/1.1" 404 332 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 7791 +5.83.131.103 - - [17/May/2015:08:05:31 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 586 +173.203.139.108 - - [17/May/2015:08:05:14 +0000] "GET /downloads/product_1 HTTP/1.1" 404 334 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 5036 +80.91.33.133 - - [17/May/2015:08:05:46 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 20358 +50.57.209.92 - - [17/May/2015:08:05:01 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 2106 +80.91.33.133 - - [17/May/2015:08:05:41 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 9757 +37.26.93.214 - - [17/May/2015:08:05:52 +0000] "GET /downloads/product_2 HTTP/1.1" 200 3318 "-" "Go 1.1 package http" 6222 +23.23.226.37 - - [17/May/2015:08:05:19 +0000] "GET /downloads/product_2 HTTP/1.1" 200 2578 "-" "urlgrabber/3.9.1 yum/3.4.3" 9523 +93.180.71.3 - - [17/May/2015:08:05:20 +0000] "GET /downloads/product_1 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 7228 +173.203.139.108 - - [17/May/2015:08:05:56 +0000] "GET /downloads/product_1 HTTP/1.1" 404 331 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 31464 +62.75.198.179 - - [17/May/2015:08:05:13 +0000] "GET /downloads/product_2 HTTP/1.1" 404 346 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 22462 +31.22.86.126 - - [17/May/2015:08:05:10 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 29906 +50.57.209.92 - - [17/May/2015:08:05:58 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 16217 +91.239.186.133 - - [17/May/2015:08:05:11 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 18335 +46.4.66.76 - - [17/May/2015:08:05:00 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 27375 +200.6.73.40 - - [17/May/2015:08:05:23 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 32073 +173.203.139.108 - - [17/May/2015:08:05:13 +0000] "GET /downloads/product_1 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 31071 +93.190.71.150 - - [17/May/2015:08:05:35 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 1200 +91.234.194.89 - - [17/May/2015:08:05:26 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 13143 +173.203.139.108 - - [17/May/2015:08:05:18 +0000] "GET /downloads/product_1 HTTP/1.1" 404 333 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 16138 +80.91.33.133 - - [17/May/2015:08:05:23 +0000] "GET /downloads/product_1 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 21432 +217.168.17.5 - - [17/May/2015:08:05:27 +0000] "GET /downloads/product_2 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.8.10.3)" 1419 +46.4.83.163 - - [17/May/2015:08:05:54 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 28449 +80.91.33.133 - - [17/May/2015:08:05:25 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 25906 +50.57.209.92 - - [17/May/2015:08:05:56 +0000] "GET /downloads/product_2 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 27099 +173.203.139.108 - - [17/May/2015:08:05:52 +0000] "GET /downloads/product_1 HTTP/1.1" 404 334 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 32238 +188.138.60.101 - - [17/May/2015:08:05:04 +0000] "GET /downloads/product_2 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 237 +80.91.33.133 - - [17/May/2015:08:05:11 +0000] "GET /downloads/product_1 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 7103 +134.119.20.172 - - [17/May/2015:08:05:26 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 5423 +173.203.139.108 - - [17/May/2015:08:05:29 +0000] "GET /downloads/product_1 HTTP/1.1" 404 331 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 6373 +80.91.33.133 - - [17/May/2015:08:05:44 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 22230 +91.121.161.213 - - [17/May/2015:08:05:14 +0000] "GET /downloads/product_2 HTTP/1.1" 200 490 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 14196 +80.91.33.133 - - [17/May/2015:08:05:17 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 17820 +80.91.33.133 - - [17/May/2015:08:05:27 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 9097 +37.26.93.214 - - [17/May/2015:08:05:03 +0000] "GET /downloads/product_2 HTTP/1.1" 200 490 "-" "Go 1.1 package http" 27632 +5.83.131.103 - - [17/May/2015:08:05:57 +0000] "GET /downloads/product_1 HTTP/1.1" 404 346 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 14609 +50.57.209.92 - - [17/May/2015:08:05:39 +0000] "GET /downloads/product_2 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 21926 +173.203.139.108 - - [17/May/2015:08:05:52 +0000] "GET /downloads/product_1 HTTP/1.1" 404 331 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 4915 +54.64.16.235 - - [17/May/2015:08:05:13 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.20.1)" 12816 +93.180.71.3 - - [17/May/2015:08:05:28 +0000] "GET /downloads/product_1 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 30742 +202.143.95.26 - - [17/May/2015:08:05:55 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 24544 +202.143.95.26 - - [17/May/2015:08:05:58 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 25819 +202.143.95.26 - - [17/May/2015:08:05:01 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 26831 +80.91.33.133 - - [17/May/2015:08:05:14 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 1344 +91.239.186.133 - - [17/May/2015:08:05:03 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 4987 +173.203.139.108 - - [17/May/2015:08:05:35 +0000] "GET /downloads/product_1 HTTP/1.1" 404 328 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 13419 +80.91.33.133 - - [17/May/2015:08:05:39 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 12879 +87.233.156.242 - - [17/May/2015:08:05:37 +0000] "GET /downloads/product_2 HTTP/1.1" 404 318 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 20611 +62.75.198.179 - - [17/May/2015:08:05:33 +0000] "GET /downloads/product_2 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 1387 +50.57.209.92 - - [17/May/2015:08:05:16 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 31286 +80.91.33.133 - - [17/May/2015:08:05:53 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 15247 +93.190.71.150 - - [17/May/2015:08:05:34 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 134 +46.4.66.76 - - [17/May/2015:08:05:38 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 23909 +80.91.33.133 - - [17/May/2015:08:05:09 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 15771 +91.234.194.89 - - [17/May/2015:08:05:58 +0000] "GET /downloads/product_2 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 4641 +217.168.17.5 - - [17/May/2015:08:05:07 +0000] "GET /downloads/product_1 HTTP/1.1" 404 341 "-" "Debian APT-HTTP/1.3 (0.8.10.3)" 6382 +46.4.83.163 - - [17/May/2015:08:05:23 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 14599 +50.57.209.92 - - [17/May/2015:08:05:19 +0000] "GET /downloads/product_1 HTTP/1.1" 404 335 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 8263 +200.6.73.40 - - [17/May/2015:08:05:46 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 23514 +91.121.161.213 - - [17/May/2015:08:05:28 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 29473 +80.91.33.133 - - [17/May/2015:08:05:52 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 26659 +188.138.60.101 - - [17/May/2015:08:05:22 +0000] "GET /downloads/product_2 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 5147 +144.76.151.58 - - [17/May/2015:08:05:54 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 21698 +134.119.20.172 - - [17/May/2015:09:05:28 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 21077 +80.91.33.133 - - [17/May/2015:09:05:26 +0000] "GET /downloads/product_1 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 7173 +80.91.33.133 - - [17/May/2015:09:05:55 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 1878 +5.83.131.103 - - [17/May/2015:09:05:08 +0000] "GET /downloads/product_1 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 24451 +93.180.71.3 - - [17/May/2015:09:05:18 +0000] "GET /downloads/product_1 HTTP/1.1" 404 341 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 30170 +80.91.33.133 - - [17/May/2015:09:05:05 +0000] "GET /downloads/product_1 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 13156 +50.57.209.92 - - [17/May/2015:09:05:25 +0000] "GET /downloads/product_1 HTTP/1.1" 404 332 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 306 +5.83.131.103 - - [17/May/2015:09:05:18 +0000] "GET /downloads/product_1 HTTP/1.1" 404 345 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 24862 +62.75.167.106 - - [17/May/2015:09:05:56 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 10227 +37.26.93.214 - - [17/May/2015:09:05:42 +0000] "GET /downloads/product_2 HTTP/1.1" 404 339 "-" "Go 1.1 package http" 28504 +93.64.134.186 - - [17/May/2015:09:05:29 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 27681 +87.233.156.242 - - [17/May/2015:09:05:36 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 1502 +80.91.33.133 - - [17/May/2015:09:05:26 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 18177 +80.91.33.133 - - [17/May/2015:09:05:15 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 7934 +54.193.30.212 - - [17/May/2015:09:05:23 +0000] "GET /downloads/product_2 HTTP/1.1" 200 951 "-" "urlgrabber/3.9.1 yum/3.4.3" 2 +62.75.198.179 - - [17/May/2015:09:05:09 +0000] "GET /downloads/product_2 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 23920 +91.239.186.133 - - [17/May/2015:09:05:46 +0000] "GET /downloads/product_2 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 9333 +83.161.14.106 - - [17/May/2015:09:05:09 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 19640 +80.91.33.133 - - [17/May/2015:09:05:54 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 11061 +80.91.33.133 - - [17/May/2015:09:05:46 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 24501 +93.190.71.150 - - [17/May/2015:09:05:38 +0000] "GET /downloads/product_2 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 15895 +50.57.209.92 - - [17/May/2015:09:05:40 +0000] "GET /downloads/product_2 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 20558 +80.91.33.133 - - [17/May/2015:09:05:49 +0000] "GET /downloads/product_1 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 2338 +80.91.33.133 - - [17/May/2015:09:05:25 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 12192 +217.168.17.5 - - [17/May/2015:09:05:09 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.10.3)" 9824 +80.91.33.133 - - [17/May/2015:09:05:59 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 2246 +54.191.136.177 - - [17/May/2015:09:05:08 +0000] "GET /downloads/product_2 HTTP/1.1" 200 951 "-" "urlgrabber/3.9.1 yum/3.4.3" 7239 +80.91.33.133 - - [17/May/2015:09:05:27 +0000] "GET /downloads/product_1 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 21154 +91.234.194.89 - - [17/May/2015:09:05:57 +0000] "GET /downloads/product_2 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 2966 +80.91.33.133 - - [17/May/2015:09:05:05 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 10715 +80.91.33.133 - - [17/May/2015:09:05:22 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 14856 +46.4.83.163 - - [17/May/2015:09:05:12 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 17717 +91.121.161.213 - - [17/May/2015:09:05:58 +0000] "GET /downloads/product_2 HTTP/1.1" 404 346 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 9951 +188.138.60.101 - - [17/May/2015:09:05:57 +0000] "GET /downloads/product_2 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 25787 +144.76.151.58 - - [17/May/2015:09:05:33 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 4930 +195.154.77.170 - - [17/May/2015:09:05:00 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 21921 +50.57.209.92 - - [17/May/2015:09:05:19 +0000] "GET /downloads/product_2 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 29773 +31.22.86.126 - - [17/May/2015:09:05:41 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 7593 +54.64.16.235 - - [17/May/2015:09:05:51 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.20.1)" 26867 +202.143.95.26 - - [17/May/2015:09:05:20 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 31361 +202.143.95.26 - - [17/May/2015:09:05:28 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 13167 +87.233.156.242 - - [17/May/2015:09:05:47 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 22554 +62.75.167.106 - - [17/May/2015:09:05:37 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 29795 +152.90.220.17 - - [17/May/2015:09:05:01 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 18753 +80.91.33.133 - - [17/May/2015:09:05:02 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 27083 +93.180.71.3 - - [17/May/2015:09:05:38 +0000] "GET /downloads/product_1 HTTP/1.1" 404 338 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 28187 +80.91.33.133 - - [17/May/2015:09:05:03 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 25595 +5.83.131.103 - - [17/May/2015:09:05:15 +0000] "GET /downloads/product_1 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 26070 +5.83.131.103 - - [17/May/2015:09:05:56 +0000] "GET /downloads/product_1 HTTP/1.1" 404 338 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 27724 +200.6.73.40 - - [17/May/2015:09:05:33 +0000] "GET /downloads/product_1 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 8086 +46.4.88.134 - - [17/May/2015:09:05:49 +0000] "GET /downloads/product_1 HTTP/1.1" 404 318 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 4853 +50.57.209.92 - - [17/May/2015:09:05:34 +0000] "GET /downloads/product_1 HTTP/1.1" 404 334 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 9464 +93.64.134.186 - - [17/May/2015:09:05:28 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 12194 +80.91.33.133 - - [17/May/2015:09:05:50 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 26621 +62.75.198.180 - - [17/May/2015:09:05:55 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 29857 +80.91.33.133 - - [17/May/2015:09:05:07 +0000] "GET /downloads/product_1 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 20514 +80.91.33.133 - - [17/May/2015:09:05:36 +0000] "GET /downloads/product_1 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 5526 +62.75.198.179 - - [17/May/2015:09:05:46 +0000] "GET /downloads/product_2 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 14143 +80.91.33.133 - - [17/May/2015:09:05:17 +0000] "GET /downloads/product_1 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 20873 +91.239.186.133 - - [17/May/2015:09:05:16 +0000] "GET /downloads/product_2 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 23230 +80.91.33.133 - - [17/May/2015:09:05:25 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 25246 +83.161.14.106 - - [17/May/2015:09:05:45 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 19052 +80.91.33.133 - - [17/May/2015:09:05:31 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 12362 +195.154.77.170 - - [17/May/2015:09:05:35 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 10153 +93.190.71.150 - - [17/May/2015:09:05:56 +0000] "GET /downloads/product_2 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 22418 +80.91.33.133 - - [17/May/2015:09:05:43 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 6565 +80.91.33.133 - - [17/May/2015:09:05:44 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 9883 +144.76.160.62 - - [17/May/2015:09:05:38 +0000] "GET /downloads/product_2 HTTP/1.1" 404 318 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 2564 +91.121.161.213 - - [17/May/2015:09:05:34 +0000] "GET /downloads/product_2 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 17140 +46.4.83.163 - - [17/May/2015:09:05:10 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 22794 +91.234.194.89 - - [17/May/2015:09:05:42 +0000] "GET /downloads/product_2 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 17718 +50.57.209.92 - - [17/May/2015:09:05:40 +0000] "GET /downloads/product_1 HTTP/1.1" 404 331 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 5434 +188.138.60.101 - - [17/May/2015:09:05:41 +0000] "GET /downloads/product_2 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 573 +210.245.80.75 - - [17/May/2015:09:05:07 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 28482 +144.76.151.58 - - [17/May/2015:09:05:28 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 31161 +80.91.33.133 - - [17/May/2015:09:05:11 +0000] "GET /downloads/product_1 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 24151 +144.76.117.56 - - [17/May/2015:09:05:59 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 6185 +80.91.33.133 - - [17/May/2015:09:05:07 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 6276 +31.22.86.126 - - [17/May/2015:09:05:19 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 27127 +80.91.33.133 - - [17/May/2015:09:05:17 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 9549 +62.75.167.106 - - [17/May/2015:09:05:03 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 21397 +87.233.156.242 - - [17/May/2015:09:05:17 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 10781 +152.90.220.18 - - [17/May/2015:09:05:11 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 19773 +93.180.71.3 - - [17/May/2015:09:05:01 +0000] "GET /downloads/product_1 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 11889 +80.91.33.133 - - [17/May/2015:09:05:54 +0000] "GET /downloads/product_1 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 14111 +31.22.86.126 - - [17/May/2015:09:05:07 +0000] "GET /downloads/product_1 HTTP/1.1" 404 319 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 17787 +50.57.209.92 - - [17/May/2015:09:05:42 +0000] "GET /downloads/product_2 HTTP/1.1" 404 341 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 18330 +5.83.131.103 - - [17/May/2015:09:05:49 +0000] "GET /downloads/product_1 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 8993 +46.4.88.134 - - [17/May/2015:09:05:51 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 17460 +80.91.33.133 - - [17/May/2015:09:05:06 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 32412 +80.91.33.133 - - [17/May/2015:09:05:19 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 12639 +62.75.198.180 - - [17/May/2015:09:05:43 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 32511 +80.91.33.133 - - [17/May/2015:09:05:22 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 29012 +80.91.33.133 - - [17/May/2015:09:05:23 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 9767 +5.83.131.103 - - [17/May/2015:09:05:07 +0000] "GET /downloads/product_1 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 12212 +5.83.131.103 - - [17/May/2015:09:05:22 +0000] "GET /downloads/product_1 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 2440 +5.83.131.103 - - [17/May/2015:09:05:27 +0000] "GET /downloads/product_1 HTTP/1.1" 404 338 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 8157 +195.154.77.170 - - [17/May/2015:09:05:23 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 16242 +202.143.95.26 - - [17/May/2015:09:05:08 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 22261 +93.64.134.186 - - [17/May/2015:09:05:19 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 15048 +85.214.47.178 - - [17/May/2015:09:05:39 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 27105 +83.161.14.106 - - [17/May/2015:09:05:15 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 32234 +80.70.214.71 - - [17/May/2015:09:05:20 +0000] "HEAD /downloads/product_1 HTTP/1.1" 200 0 "-" "Wget/1.13.4 (linux-gnu)" 8479 +87.233.156.242 - - [17/May/2015:09:05:08 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 20831 +54.64.16.235 - - [17/May/2015:09:05:55 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.20.1)" 18289 +50.57.209.92 - - [17/May/2015:09:05:29 +0000] "GET /downloads/product_2 HTTP/1.1" 404 338 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 9858 +91.239.186.133 - - [17/May/2015:09:05:00 +0000] "GET /downloads/product_2 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 20442 +91.121.161.213 - - [17/May/2015:09:05:09 +0000] "GET /downloads/product_2 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 9004 +200.6.73.40 - - [17/May/2015:09:05:30 +0000] "GET /downloads/product_1 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 13221 +62.75.198.179 - - [17/May/2015:09:05:49 +0000] "GET /downloads/product_2 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 954 +93.190.71.150 - - [17/May/2015:09:05:13 +0000] "GET /downloads/product_2 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 26398 +80.91.33.133 - - [17/May/2015:09:05:33 +0000] "GET /downloads/product_1 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 22775 +80.91.33.133 - - [17/May/2015:09:05:32 +0000] "GET /downloads/product_1 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 13886 +80.91.33.133 - - [17/May/2015:09:05:49 +0000] "GET /downloads/product_1 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 19340 +144.76.160.62 - - [17/May/2015:09:05:11 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 17157 +80.91.33.133 - - [17/May/2015:09:05:59 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 9971 +217.168.17.5 - - [17/May/2015:09:05:12 +0000] "GET /downloads/product_1 HTTP/1.1" 404 334 "-" "Debian APT-HTTP/1.3 (0.8.10.3)" 26268 +80.91.33.133 - - [17/May/2015:09:05:47 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 5983 +80.91.33.133 - - [17/May/2015:09:05:09 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 15296 +144.76.117.56 - - [17/May/2015:09:05:52 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 13922 +144.76.151.58 - - [17/May/2015:09:05:42 +0000] "GET /downloads/product_2 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 10692 +80.91.33.133 - - [17/May/2015:10:05:40 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 22550 +62.75.167.106 - - [17/May/2015:10:05:47 +0000] "GET /downloads/product_2 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 20757 +80.91.33.133 - - [17/May/2015:10:05:51 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 25956 +37.187.238.39 - - [17/May/2015:10:05:22 +0000] "GET /downloads/product_2 HTTP/1.1" 404 318 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 16674 +80.70.214.71 - - [17/May/2015:10:05:13 +0000] "GET /downloads/product_2 HTTP/1.1" 404 327 "-" "Wget/1.13.4 (linux-gnu)" 15327 +91.234.194.89 - - [17/May/2015:10:05:48 +0000] "GET /downloads/product_2 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 21807 +80.91.33.133 - - [17/May/2015:10:05:10 +0000] "GET /downloads/product_1 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 20469 +188.138.60.101 - - [17/May/2015:10:05:58 +0000] "GET /downloads/product_2 HTTP/1.1" 404 341 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 10122 +80.91.33.133 - - [17/May/2015:10:05:01 +0000] "GET /downloads/product_1 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 1971 +80.91.33.133 - - [17/May/2015:10:05:32 +0000] "GET /downloads/product_1 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 7263 +93.180.71.3 - - [17/May/2015:10:05:28 +0000] "GET /downloads/product_1 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 953 +46.4.88.134 - - [17/May/2015:10:05:54 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 23703 +80.91.33.133 - - [17/May/2015:10:05:53 +0000] "GET /downloads/product_1 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 126 +62.210.138.59 - - [17/May/2015:10:05:22 +0000] "GET /downloads/product_2 HTTP/1.1" 404 318 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 19171 +31.22.86.126 - - [17/May/2015:10:05:38 +0000] "GET /downloads/product_1 HTTP/1.1" 404 335 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 31107 +80.91.33.133 - - [17/May/2015:10:05:16 +0000] "GET /downloads/product_1 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 8252 +54.86.157.236 - - [17/May/2015:10:05:24 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.20.1)" 25651 +195.154.233.202 - - [17/May/2015:10:05:39 +0000] "GET /downloads/product_2 HTTP/1.1" 404 318 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 3446 +54.86.157.236 - - [17/May/2015:10:05:43 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.20.1)" 20770 +80.91.33.133 - - [17/May/2015:10:05:14 +0000] "GET /downloads/product_1 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 27979 +94.23.21.169 - - [17/May/2015:10:05:09 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 28723 +54.86.157.236 - - [17/May/2015:10:05:18 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.20.1)" 13439 +195.154.77.170 - - [17/May/2015:10:05:17 +0000] "GET /downloads/product_2 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 22432 +54.86.157.236 - - [17/May/2015:10:05:36 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.20.1)" 1572 +85.214.47.178 - - [17/May/2015:10:05:57 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 27196 +5.83.131.103 - - [17/May/2015:10:05:55 +0000] "GET /downloads/product_1 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 9637 +5.83.131.103 - - [17/May/2015:10:05:03 +0000] "GET /downloads/product_1 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 18830 +5.83.131.103 - - [17/May/2015:10:05:05 +0000] "GET /downloads/product_1 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 844 +5.83.131.103 - - [17/May/2015:10:05:08 +0000] "GET /downloads/product_1 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 20882 +80.91.33.133 - - [17/May/2015:10:05:40 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 1325 +80.91.33.133 - - [17/May/2015:10:05:39 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 11125 +84.53.65.28 - - [17/May/2015:10:05:25 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 10771 +80.91.33.133 - - [17/May/2015:10:05:33 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 24891 +54.86.157.236 - - [17/May/2015:10:05:28 +0000] "GET /downloads/product_1 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.20.1)" 23541 +217.168.17.5 - - [17/May/2015:10:05:02 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.10.3)" 22323 +91.121.161.213 - - [17/May/2015:10:05:18 +0000] "GET /downloads/product_2 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 29114 +80.70.214.71 - - [17/May/2015:10:05:33 +0000] "GET /downloads/product_1 HTTP/1.1" 404 329 "-" "Wget/1.13.4 (linux-gnu)" 13629 +144.76.160.62 - - [17/May/2015:10:05:10 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 32440 +54.86.157.236 - - [17/May/2015:10:05:52 +0000] "GET /downloads/product_1 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.20.1)" 20402 +93.64.134.186 - - [17/May/2015:10:05:54 +0000] "GET /downloads/product_2 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 5113 +93.190.71.150 - - [17/May/2015:10:05:41 +0000] "GET /downloads/product_2 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 31729 +87.233.156.242 - - [17/May/2015:10:05:02 +0000] "GET /downloads/product_2 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 28958 +80.91.33.133 - - [17/May/2015:10:05:22 +0000] "GET /downloads/product_1 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 15630 +91.239.186.133 - - [17/May/2015:10:05:50 +0000] "GET /downloads/product_2 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 7488 +62.75.198.179 - - [17/May/2015:10:05:28 +0000] "GET /downloads/product_2 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 9316 +144.76.117.56 - - [17/May/2015:10:05:46 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 9965 +178.32.54.253 - - [17/May/2015:10:05:33 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 2881 +37.187.238.39 - - [17/May/2015:10:05:05 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 17544 +83.161.14.106 - - [17/May/2015:10:05:47 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 11419 +54.86.157.236 - - [17/May/2015:10:05:48 +0000] "GET /downloads/product_1 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.20.1)" 16406 +91.194.188.90 - - [17/May/2015:10:05:51 +0000] "HEAD /downloads/product_2 HTTP/1.1" 200 0 "-" "Wget/1.13.4 (linux-gnu)" 28324 +83.161.14.106 - - [17/May/2015:10:05:13 +0000] "GET /downloads/product_2 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 1893 +80.91.33.133 - - [17/May/2015:10:05:18 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 14697 +93.180.71.3 - - [17/May/2015:10:05:34 +0000] "GET /downloads/product_1 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 16168 +62.210.138.59 - - [17/May/2015:10:05:40 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 663 +46.4.88.134 - - [17/May/2015:10:05:16 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 27962 +202.143.95.26 - - [17/May/2015:10:05:50 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 18539 +202.143.95.26 - - [17/May/2015:10:05:02 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 13495 +202.143.95.26 - - [17/May/2015:10:05:10 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 3192 +62.75.198.180 - - [17/May/2015:10:05:36 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 4349 +144.76.137.134 - - [17/May/2015:10:05:03 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 1395 +80.91.33.133 - - [17/May/2015:10:05:23 +0000] "GET /downloads/product_1 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 12898 +54.86.157.236 - - [17/May/2015:10:05:11 +0000] "GET /downloads/product_1 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.20.1)" 26930 +80.70.214.71 - - [17/May/2015:10:05:22 +0000] "GET /downloads/product_2 HTTP/1.1" 404 326 "-" "Wget/1.13.4 (linux-gnu)" 16662 +91.234.194.89 - - [17/May/2015:10:05:06 +0000] "GET /downloads/product_2 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 9445 +188.138.60.101 - - [17/May/2015:10:05:38 +0000] "GET /downloads/product_2 HTTP/1.1" 404 338 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 18804 +80.91.33.133 - - [17/May/2015:10:05:33 +0000] "GET /downloads/product_1 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 22429 +195.154.233.202 - - [17/May/2015:10:05:47 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 8456 +94.23.21.169 - - [17/May/2015:10:05:58 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 32187 +144.76.151.58 - - [17/May/2015:10:05:10 +0000] "GET /downloads/product_2 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 29276 +80.91.33.133 - - [17/May/2015:10:05:42 +0000] "GET /downloads/product_1 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 9700 +62.75.167.106 - - [17/May/2015:10:05:31 +0000] "GET /downloads/product_2 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 10078 +80.91.33.133 - - [17/May/2015:10:05:41 +0000] "GET /downloads/product_1 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 7600 +50.57.209.92 - - [17/May/2015:10:05:16 +0000] "GET /downloads/product_1 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 8540 +202.143.95.26 - - [17/May/2015:10:05:43 +0000] "GET /downloads/product_2 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 24400 +200.6.73.40 - - [17/May/2015:10:05:38 +0000] "GET /downloads/product_1 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 29363 +195.154.77.170 - - [17/May/2015:10:05:33 +0000] "GET /downloads/product_2 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 17025 +54.187.216.43 - - [17/May/2015:10:05:55 +0000] "GET /downloads/product_2 HTTP/1.1" 200 951 "-" "urlgrabber/3.9.1 yum/3.4.3" 27997 +80.91.33.133 - - [17/May/2015:10:05:04 +0000] "GET /downloads/product_1 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 1806 +80.91.33.133 - - [17/May/2015:10:05:09 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 28234 +54.86.157.236 - - [17/May/2015:10:05:06 +0000] "GET /downloads/product_1 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.20.1)" 19286 +202.143.95.26 - - [17/May/2015:10:05:05 +0000] "GET /downloads/product_2 HTTP/1.1" 404 325 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 19522 +202.143.95.26 - - [17/May/2015:10:05:40 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 23841 +54.86.157.236 - - [17/May/2015:10:05:02 +0000] "GET /downloads/product_1 HTTP/1.1" 404 341 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.20.1)" 31135 +80.91.33.133 - - [17/May/2015:10:05:50 +0000] "GET /downloads/product_1 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 21510 +80.91.33.133 - - [17/May/2015:10:05:51 +0000] "GET /downloads/product_1 HTTP/1.1" 404 341 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 26977 +80.91.33.133 - - [17/May/2015:10:05:55 +0000] "GET /downloads/product_1 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 1078 +80.91.33.133 - - [17/May/2015:10:05:47 +0000] "GET /downloads/product_1 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 7473 +84.53.65.28 - - [17/May/2015:10:05:30 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 28347 +92.50.100.22 - - [17/May/2015:10:05:15 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 8699 +85.214.47.178 - - [17/May/2015:10:05:30 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 2078 +80.91.33.133 - - [17/May/2015:10:05:08 +0000] "GET /downloads/product_1 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 7013 +54.86.157.236 - - [17/May/2015:10:05:36 +0000] "GET /downloads/product_1 HTTP/1.1" 404 338 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.20.1)" 29440 +5.83.131.103 - - [17/May/2015:10:05:05 +0000] "GET /downloads/product_1 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 24206 +37.187.238.39 - - [17/May/2015:10:05:33 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 5674 +80.91.33.133 - - [17/May/2015:10:05:04 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 15781 +195.210.47.239 - - [17/May/2015:10:05:49 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 1462 +80.91.33.133 - - [17/May/2015:10:05:11 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 9446 +54.64.16.235 - - [17/May/2015:10:05:12 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.20.1)" 23687 +178.32.54.253 - - [17/May/2015:10:05:54 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 17314 +144.92.16.161 - - [17/May/2015:10:05:39 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 4021 +54.86.157.236 - - [17/May/2015:10:05:51 +0000] "GET /downloads/product_1 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.20.1)" 13168 +87.233.156.242 - - [17/May/2015:10:05:49 +0000] "GET /downloads/product_2 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 8142 +31.22.86.126 - - [17/May/2015:10:05:18 +0000] "GET /downloads/product_1 HTTP/1.1" 404 332 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 28923 +80.91.33.133 - - [17/May/2015:10:05:49 +0000] "GET /downloads/product_1 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 17021 +91.121.161.213 - - [17/May/2015:10:05:48 +0000] "GET /downloads/product_2 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 711 +80.91.33.133 - - [17/May/2015:10:05:06 +0000] "GET /downloads/product_1 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 15815 +50.57.209.92 - - [17/May/2015:10:05:19 +0000] "GET /downloads/product_1 HTTP/1.1" 404 333 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 12290 +91.239.186.133 - - [17/May/2015:10:05:15 +0000] "GET /downloads/product_2 HTTP/1.1" 404 341 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 9172 +144.76.117.56 - - [17/May/2015:10:05:31 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 27106 +144.76.160.62 - - [17/May/2015:10:05:47 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 2607 +62.210.138.59 - - [17/May/2015:10:05:45 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 26922 +54.86.157.236 - - [17/May/2015:10:05:07 +0000] "GET /downloads/product_1 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.20.1)" 2045 +62.75.198.179 - - [17/May/2015:10:05:14 +0000] "GET /downloads/product_2 HTTP/1.1" 404 338 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 14090 +93.190.71.150 - - [17/May/2015:10:05:07 +0000] "GET /downloads/product_2 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 2233 +144.76.117.56 - - [17/May/2015:10:05:18 +0000] "GET /downloads/product_1 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 14988 +94.23.21.169 - - [17/May/2015:10:05:23 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 11645 +91.194.188.90 - - [17/May/2015:10:05:05 +0000] "HEAD /downloads/product_2 HTTP/1.1" 200 0 "-" "Wget/1.13.4 (linux-gnu)" 28064 +93.64.134.186 - - [17/May/2015:10:05:51 +0000] "GET /downloads/product_2 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 16583 +54.86.157.236 - - [17/May/2015:10:05:48 +0000] "GET /downloads/product_1 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.20.1)" 23208 +80.70.214.71 - - [17/May/2015:10:05:23 +0000] "HEAD /downloads/product_2 HTTP/1.1" 200 0 "-" "Wget/1.13.4 (linux-gnu)" 1059 +93.180.71.3 - - [17/May/2015:10:05:22 +0000] "GET /downloads/product_1 HTTP/1.1" 404 333 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 16367 +195.154.233.202 - - [17/May/2015:10:05:43 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 26788 +193.192.58.163 - - [17/May/2015:11:05:31 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 6753 +144.76.137.134 - - [17/May/2015:11:05:00 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 18307 +54.86.157.236 - - [17/May/2015:11:05:22 +0000] "GET /downloads/product_1 HTTP/1.1" 404 333 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.20.1)" 10520 +83.161.14.106 - - [17/May/2015:11:05:09 +0000] "GET /downloads/product_2 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 5640 +144.76.151.58 - - [17/May/2015:11:05:16 +0000] "GET /downloads/product_2 HTTP/1.1" 404 338 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 9992 +144.92.16.161 - - [17/May/2015:11:05:06 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 3262 +195.154.77.170 - - [17/May/2015:11:05:20 +0000] "GET /downloads/product_2 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 17687 +62.75.198.180 - - [17/May/2015:11:05:05 +0000] "GET /downloads/product_2 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 18911 +91.234.194.89 - - [17/May/2015:11:05:29 +0000] "GET /downloads/product_2 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 22038 +80.91.33.133 - - [17/May/2015:11:05:28 +0000] "GET /downloads/product_1 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 2238 +188.138.60.101 - - [17/May/2015:11:05:38 +0000] "GET /downloads/product_2 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 10581 +62.75.167.106 - - [17/May/2015:11:05:58 +0000] "GET /downloads/product_2 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 14869 +46.4.88.134 - - [17/May/2015:11:05:51 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 6669 +80.91.33.133 - - [17/May/2015:11:05:35 +0000] "GET /downloads/product_1 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 12780 +80.91.33.133 - - [17/May/2015:11:05:05 +0000] "GET /downloads/product_1 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 24133 +84.53.65.28 - - [17/May/2015:11:05:25 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 14350 +152.90.220.17 - - [17/May/2015:11:05:08 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 23513 +80.91.33.133 - - [17/May/2015:11:05:07 +0000] "GET /downloads/product_1 HTTP/1.1" 404 341 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 31695 +80.91.33.133 - - [17/May/2015:11:05:21 +0000] "GET /downloads/product_1 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 12243 +178.32.54.253 - - [17/May/2015:11:05:44 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 2641 +54.72.39.202 - - [17/May/2015:11:05:19 +0000] "GET /downloads/product_2 HTTP/1.1" 200 951 "-" "urlgrabber/3.9.1 yum/3.4.3" 27639 +91.120.61.154 - - [17/May/2015:11:05:03 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 21180 +37.187.238.39 - - [17/May/2015:11:05:25 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 30661 +85.214.47.178 - - [17/May/2015:11:05:12 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 20380 +80.91.33.133 - - [17/May/2015:11:05:47 +0000] "GET /downloads/product_1 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 11957 +5.83.131.103 - - [17/May/2015:11:05:10 +0000] "GET /downloads/product_1 HTTP/1.1" 404 338 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 19230 +200.6.73.40 - - [17/May/2015:11:05:19 +0000] "GET /downloads/product_1 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 4087 +5.83.131.103 - - [17/May/2015:11:05:45 +0000] "GET /downloads/product_1 HTTP/1.1" 404 341 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 16383 +91.121.161.213 - - [17/May/2015:11:05:08 +0000] "GET /downloads/product_2 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 11487 +91.239.186.133 - - [17/May/2015:11:05:40 +0000] "GET /downloads/product_2 HTTP/1.1" 404 338 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 11774 +50.57.209.92 - - [17/May/2015:11:05:39 +0000] "GET /downloads/product_2 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 28472 +80.91.33.133 - - [17/May/2015:11:05:18 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 24011 +144.92.16.161 - - [17/May/2015:11:05:44 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 26633 +87.233.156.242 - - [17/May/2015:11:05:33 +0000] "GET /downloads/product_2 HTTP/1.1" 404 338 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 16170 +94.23.21.169 - - [17/May/2015:11:05:56 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 15992 +5.83.131.103 - - [17/May/2015:11:05:31 +0000] "GET /downloads/product_1 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 20999 +80.91.33.133 - - [17/May/2015:11:05:40 +0000] "GET /downloads/product_1 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 23097 +202.143.95.26 - - [17/May/2015:11:05:30 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 3282 +202.143.95.26 - - [17/May/2015:11:05:44 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 4869 +80.91.33.133 - - [17/May/2015:11:05:28 +0000] "GET /downloads/product_1 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 9310 +80.91.33.133 - - [17/May/2015:11:05:51 +0000] "GET /downloads/product_1 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 23547 +80.91.33.133 - - [17/May/2015:11:05:11 +0000] "GET /downloads/product_1 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 5516 +80.91.33.133 - - [17/May/2015:11:05:13 +0000] "GET /downloads/product_1 HTTP/1.1" 404 338 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 26601 +62.210.138.59 - - [17/May/2015:11:05:23 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 26830 +144.76.160.62 - - [17/May/2015:11:05:06 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 15405 +93.190.71.150 - - [17/May/2015:11:05:29 +0000] "GET /downloads/product_2 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 16982 +80.91.33.133 - - [17/May/2015:11:05:00 +0000] "GET /downloads/product_1 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 6019 +202.143.95.26 - - [17/May/2015:11:05:29 +0000] "GET /downloads/product_2 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 3822 +193.192.58.163 - - [17/May/2015:11:05:54 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 13461 +195.154.233.202 - - [17/May/2015:11:05:46 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 32439 +80.70.214.71 - - [17/May/2015:11:05:59 +0000] "HEAD /downloads/product_2 HTTP/1.1" 200 0 "-" "Wget/1.13.4 (linux-gnu)" 31402 +62.75.198.179 - - [17/May/2015:11:05:17 +0000] "GET /downloads/product_2 HTTP/1.1" 404 341 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 452 +80.91.33.133 - - [17/May/2015:11:05:51 +0000] "GET /downloads/product_1 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 25508 +144.92.16.161 - - [17/May/2015:11:05:39 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 29252 +195.154.77.170 - - [17/May/2015:11:05:28 +0000] "GET /downloads/product_2 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 19649 +50.57.209.92 - - [17/May/2015:11:05:56 +0000] "GET /downloads/product_2 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 24457 +144.76.117.56 - - [17/May/2015:11:05:49 +0000] "GET /downloads/product_1 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 10519 +80.91.33.133 - - [17/May/2015:11:05:36 +0000] "GET /downloads/product_1 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 6815 +144.76.137.134 - - [17/May/2015:11:05:07 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 798 +188.138.60.101 - - [17/May/2015:11:05:00 +0000] "GET /downloads/product_2 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 19441 +54.172.198.124 - - [17/May/2015:11:05:43 +0000] "GET /downloads/product_2 HTTP/1.1" 200 2582 "-" "urlgrabber/3.9.1 yum/3.4.3" 17903 +37.187.238.39 - - [17/May/2015:11:05:27 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 3443 +178.32.54.253 - - [17/May/2015:11:05:03 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 9634 +62.75.198.180 - - [17/May/2015:11:05:16 +0000] "GET /downloads/product_2 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 5417 +62.75.167.106 - - [17/May/2015:11:05:26 +0000] "GET /downloads/product_2 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 1055 +195.210.47.239 - - [17/May/2015:11:05:36 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 4218 +91.234.194.89 - - [17/May/2015:11:05:48 +0000] "GET /downloads/product_2 HTTP/1.1" 404 341 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 23355 +31.22.86.126 - - [17/May/2015:11:05:19 +0000] "GET /downloads/product_1 HTTP/1.1" 404 334 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 29547 +91.194.188.90 - - [17/May/2015:11:05:42 +0000] "GET /downloads/product_2 HTTP/1.1" 404 340 "-" "Wget/1.13.4 (linux-gnu)" 26988 +92.50.100.22 - - [17/May/2015:11:05:35 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 13600 +144.76.151.58 - - [17/May/2015:11:05:45 +0000] "GET /downloads/product_2 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 18988 +93.64.134.186 - - [17/May/2015:11:05:48 +0000] "GET /downloads/product_2 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 2281 +85.214.47.178 - - [17/May/2015:11:05:19 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 16054 +94.23.21.169 - - [17/May/2015:11:05:11 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 21647 +80.91.33.133 - - [17/May/2015:11:05:31 +0000] "GET /downloads/product_1 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 31277 +80.91.33.133 - - [17/May/2015:11:05:20 +0000] "GET /downloads/product_1 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 19500 +91.121.161.213 - - [17/May/2015:11:05:03 +0000] "GET /downloads/product_2 HTTP/1.1" 404 338 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 29579 +83.161.14.106 - - [17/May/2015:11:05:52 +0000] "GET /downloads/product_2 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 1080 +54.64.16.235 - - [17/May/2015:11:05:43 +0000] "GET /downloads/product_2 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.20.1)" 15057 +84.53.65.28 - - [17/May/2015:11:05:31 +0000] "GET /downloads/product_2 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 5805 +80.91.33.133 - - [17/May/2015:11:05:09 +0000] "GET /downloads/product_1 HTTP/1.1" 404 338 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 32764 +50.57.209.92 - - [17/May/2015:11:05:15 +0000] "GET /downloads/product_1 HTTP/1.1" 404 334 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 28248 +91.239.186.133 - - [17/May/2015:11:05:17 +0000] "GET /downloads/product_2 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 32046 +144.92.16.161 - - [17/May/2015:11:05:30 +0000] "GET /downloads/product_1 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 31342 +62.210.138.59 - - [17/May/2015:11:05:29 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 22861 +210.245.80.75 - - [17/May/2015:11:05:05 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 32649 +80.91.33.133 - - [17/May/2015:11:05:12 +0000] "GET /downloads/product_1 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 11268 +83.161.14.106 - - [17/May/2015:11:05:55 +0000] "GET /downloads/product_2 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 8233 +87.233.156.242 - - [17/May/2015:11:05:02 +0000] "GET /downloads/product_2 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 10052 +5.83.131.103 - - [17/May/2015:11:05:49 +0000] "GET /downloads/product_1 HTTP/1.1" 404 338 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 20084 +80.91.33.133 - - [17/May/2015:11:05:05 +0000] "GET /downloads/product_1 HTTP/1.1" 404 341 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 9007 +91.120.61.154 - - [17/May/2015:11:05:48 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 8410 +195.154.233.202 - - [17/May/2015:11:05:55 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 20582 +80.91.33.133 - - [17/May/2015:11:05:56 +0000] "GET /downloads/product_1 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 8327 +193.192.58.163 - - [17/May/2015:11:05:58 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 4041 +93.190.71.150 - - [17/May/2015:11:05:11 +0000] "GET /downloads/product_2 HTTP/1.1" 404 338 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 26973 +144.76.160.62 - - [17/May/2015:11:05:20 +0000] "GET /downloads/product_2 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 24342 +50.57.209.92 - - [17/May/2015:11:05:56 +0000] "GET /downloads/product_1 HTTP/1.1" 404 331 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 27744 +62.75.198.179 - - [17/May/2015:11:05:19 +0000] "GET /downloads/product_2 HTTP/1.1" 404 338 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 2455 +193.192.59.41 - - [17/May/2015:11:05:55 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 19596 +195.154.77.170 - - [17/May/2015:11:05:35 +0000] "GET /downloads/product_2 HTTP/1.1" 404 341 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 23424 +80.91.33.133 - - [17/May/2015:11:05:17 +0000] "GET /downloads/product_1 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 4171 +200.6.73.40 - - [17/May/2015:11:05:26 +0000] "GET /downloads/product_1 HTTP/1.1" 404 341 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 8274 +188.138.60.101 - - [17/May/2015:11:05:56 +0000] "GET /downloads/product_2 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 2949 +80.91.33.133 - - [17/May/2015:11:05:53 +0000] "GET /downloads/product_1 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 5641 +80.91.33.133 - - [17/May/2015:11:05:42 +0000] "GET /downloads/product_1 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 28746 +80.91.33.133 - - [17/May/2015:11:05:17 +0000] "GET /downloads/product_1 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 18396 +80.91.33.133 - - [17/May/2015:11:05:32 +0000] "GET /downloads/product_1 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 17638 +80.91.33.133 - - [17/May/2015:11:05:23 +0000] "GET /downloads/product_1 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.17)" 7865 +144.76.137.134 - - [17/May/2015:11:05:57 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 4280 +80.70.214.71 - - [17/May/2015:11:05:16 +0000] "GET /downloads/product_2 HTTP/1.1" 404 339 "-" "Wget/1.13.4 (linux-gnu)" 32436 +144.76.117.56 - - [17/May/2015:11:05:28 +0000] "GET /downloads/product_1 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 30048 +94.23.21.169 - - [17/May/2015:11:05:21 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 6186 +198.61.216.151 - - [17/May/2015:11:05:16 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 21567 +80.91.33.133 - - [17/May/2015:11:05:11 +0000] "GET /downloads/product_1 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 674 +91.194.188.90 - - [17/May/2015:11:05:32 +0000] "HEAD /downloads/product_2 HTTP/1.1" 200 0 "-" "Wget/1.13.4 (linux-gnu)" 5354 +62.75.198.180 - - [17/May/2015:11:05:39 +0000] "GET /downloads/product_2 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 5345 +80.91.33.133 - - [17/May/2015:11:05:52 +0000] "GET /downloads/product_1 HTTP/1.1" 404 341 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 2326 +31.22.86.126 - - [17/May/2015:12:05:15 +0000] "GET /downloads/product_1 HTTP/1.1" 404 331 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 3114 +84.53.65.28 - - [17/May/2015:12:05:38 +0000] "GET /downloads/product_2 HTTP/1.1" 404 337 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 9036 +144.92.16.161 - - [17/May/2015:12:05:32 +0000] "GET /downloads/product_1 HTTP/1.1" 404 324 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)" 9410 +50.57.209.92 - - [17/May/2015:12:05:38 +0000] "GET /downloads/product_2 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 2039 +5.83.131.103 - - [17/May/2015:12:05:26 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 14852 +5.83.131.103 - - [17/May/2015:12:05:27 +0000] "GET /downloads/product_1 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 71 +62.75.167.106 - - [17/May/2015:12:05:01 +0000] "GET /downloads/product_2 HTTP/1.1" 404 341 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 6439 +178.32.54.253 - - [17/May/2015:12:05:26 +0000] "GET /downloads/product_1 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 8721 +91.121.161.213 - - [17/May/2015:12:05:00 +0000] "GET /downloads/product_2 HTTP/1.1" 404 341 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 1795 +91.234.194.89 - - [17/May/2015:12:05:11 +0000] "GET /downloads/product_2 HTTP/1.1" 404 338 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 8556 +37.187.238.39 - - [17/May/2015:12:05:29 +0000] "GET /downloads/product_2 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 17627 +91.239.186.133 - - [17/May/2015:12:05:38 +0000] "GET /downloads/product_2 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 10970 +87.233.156.242 - - [17/May/2015:12:05:34 +0000] "GET /downloads/product_2 HTTP/1.1" 404 333 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 409 +202.143.95.26 - - [17/May/2015:12:05:22 +0000] "GET /downloads/product_2 HTTP/1.1" 404 338 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 10283 +144.76.151.58 - - [17/May/2015:12:05:25 +0000] "GET /downloads/product_2 HTTP/1.1" 404 333 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 22461 +62.210.138.59 - - [17/May/2015:12:05:12 +0000] "GET /downloads/product_2 HTTP/1.1" 404 340 "-" "Debian APT-HTTP/1.3 (1.0.1ubuntu2)" 22736 +80.91.33.133 - - [17/May/2015:12:05:05 +0000] "GET /downloads/product_1 HTTP/1.1" 404 336 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 21014 +83.161.14.106 - - [17/May/2015:12:05:48 +0000] "GET /downloads/product_2 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 18047 +80.91.33.133 - - [17/May/2015:12:05:31 +0000] "GET /downloads/product_1 HTTP/1.1" 404 341 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 25206 +5.83.131.103 - - [17/May/2015:12:05:21 +0000] "GET /downloads/product_1 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 15330 +80.91.33.133 - - [17/May/2015:12:05:54 +0000] "GET /downloads/product_1 HTTP/1.1" 404 339 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.16)" 8763 +198.61.216.151 - - [17/May/2015:12:05:59 +0000] "GET /downloads/product_2 HTTP/1.1" 304 0 "-" "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.22)" 11132 +195.154.77.170 - - [17/May/2015:12:05:05 +0000] "GET /downloads/product_2 HTTP/1.1" 404 338 "-" "Debian APT-HTTP/1.3 (0.9.7.9)" 23768 diff --git a/plugins/inputs/ethtool/ethtool_linux.go b/plugins/inputs/ethtool/ethtool_linux.go index 13dabd2f8a6b6..08e21db50dede 100644 --- a/plugins/inputs/ethtool/ethtool_linux.go +++ b/plugins/inputs/ethtool/ethtool_linux.go @@ -6,15 +6,16 @@ import ( "net" "sync" + "github.com/pkg/errors" + ethtoolLib "github.com/safchain/ethtool" + "github.com/influxdata/telegraf" "github.com/influxdata/telegraf/filter" "github.com/influxdata/telegraf/plugins/inputs" - "github.com/pkg/errors" - "github.com/safchain/ethtool" ) type CommandEthtool struct { - ethtool *ethtool.Ethtool + ethtool *ethtoolLib.Ethtool } func (e *Ethtool) Gather(acc telegraf.Accumulator) error { @@ -98,7 +99,7 @@ func (c *CommandEthtool) Init() error { return nil } - e, err := ethtool.NewEthtool() + e, err := ethtoolLib.NewEthtool() if err == nil { c.ethtool = e } diff --git a/plugins/inputs/ethtool/ethtool_test.go b/plugins/inputs/ethtool/ethtool_test.go index ac5527733ce73..87bc136d2db11 100644 --- a/plugins/inputs/ethtool/ethtool_test.go +++ b/plugins/inputs/ethtool/ethtool_test.go @@ -6,9 +6,10 @@ import ( "net" "testing" - "github.com/influxdata/telegraf/testutil" "github.com/pkg/errors" "github.com/stretchr/testify/assert" + + "github.com/influxdata/telegraf/testutil" ) var command *Ethtool @@ -31,13 +32,12 @@ func (c *CommandEthtoolMock) Init() error { return nil } -func (c *CommandEthtoolMock) DriverName(intf string) (driverName string, err error) { +func (c *CommandEthtoolMock) DriverName(intf string) (string, error) { i := c.InterfaceMap[intf] if i != nil { - driverName = i.DriverName - return + return i.DriverName, nil } - return driverName, errors.New("interface not found") + return "", errors.New("interface not found") } func (c *CommandEthtoolMock) Interfaces() ([]net.Interface, error) { @@ -66,13 +66,12 @@ func (c *CommandEthtoolMock) Interfaces() ([]net.Interface, error) { return interfaceNames, nil } -func (c *CommandEthtoolMock) Stats(intf string) (stat map[string]uint64, err error) { +func (c *CommandEthtoolMock) Stats(intf string) (map[string]uint64, error) { i := c.InterfaceMap[intf] if i != nil { - stat = i.Stat - return + return i.Stat, nil } - return stat, errors.New("interface not found") + return nil, errors.New("interface not found") } func setup() { diff --git a/plugins/inputs/eventhub_consumer/eventhub_consumer.go b/plugins/inputs/eventhub_consumer/eventhub_consumer.go index 114a6335060ca..064502b0ed831 100644 --- a/plugins/inputs/eventhub_consumer/eventhub_consumer.go +++ b/plugins/inputs/eventhub_consumer/eventhub_consumer.go @@ -6,8 +6,9 @@ import ( "sync" "time" - eventhub "github.com/Azure/azure-event-hubs-go/v3" + eventhubClient "github.com/Azure/azure-event-hubs-go/v3" "github.com/Azure/azure-event-hubs-go/v3/persist" + "github.com/influxdata/telegraf" "github.com/influxdata/telegraf/internal" "github.com/influxdata/telegraf/plugins/inputs" @@ -54,7 +55,7 @@ type EventHub struct { Log telegraf.Logger `toml:"-"` // Azure - hub *eventhub.Hub + hub *eventhubClient.Hub cancel context.CancelFunc wg sync.WaitGroup @@ -172,7 +173,7 @@ func (e *EventHub) Init() (err error) { } // Set hub options - hubOpts := []eventhub.HubOption{} + hubOpts := []eventhubClient.HubOption{} if e.PersistenceDir != "" { persister, err := persist.NewFilePersister(e.PersistenceDir) @@ -180,20 +181,20 @@ func (e *EventHub) Init() (err error) { return err } - hubOpts = append(hubOpts, eventhub.HubWithOffsetPersistence(persister)) + hubOpts = append(hubOpts, eventhubClient.HubWithOffsetPersistence(persister)) } if e.UserAgent != "" { - hubOpts = append(hubOpts, eventhub.HubWithUserAgent(e.UserAgent)) + hubOpts = append(hubOpts, eventhubClient.HubWithUserAgent(e.UserAgent)) } else { - hubOpts = append(hubOpts, eventhub.HubWithUserAgent(internal.ProductToken())) + hubOpts = append(hubOpts, eventhubClient.HubWithUserAgent(internal.ProductToken())) } // Create event hub connection if e.ConnectionString != "" { - e.hub, err = eventhub.NewHubFromConnectionString(e.ConnectionString, hubOpts...) + e.hub, err = eventhubClient.NewHubFromConnectionString(e.ConnectionString, hubOpts...) } else { - e.hub, err = eventhub.NewHubFromEnvironment(hubOpts...) + e.hub, err = eventhubClient.NewHubFromEnvironment(hubOpts...) } return err @@ -236,25 +237,25 @@ func (e *EventHub) Start(acc telegraf.Accumulator) error { return nil } -func (e *EventHub) configureReceiver() []eventhub.ReceiveOption { - receiveOpts := []eventhub.ReceiveOption{} +func (e *EventHub) configureReceiver() []eventhubClient.ReceiveOption { + receiveOpts := []eventhubClient.ReceiveOption{} if e.ConsumerGroup != "" { - receiveOpts = append(receiveOpts, eventhub.ReceiveWithConsumerGroup(e.ConsumerGroup)) + receiveOpts = append(receiveOpts, eventhubClient.ReceiveWithConsumerGroup(e.ConsumerGroup)) } if !e.FromTimestamp.IsZero() { - receiveOpts = append(receiveOpts, eventhub.ReceiveFromTimestamp(e.FromTimestamp)) + receiveOpts = append(receiveOpts, eventhubClient.ReceiveFromTimestamp(e.FromTimestamp)) } else if e.Latest { - receiveOpts = append(receiveOpts, eventhub.ReceiveWithLatestOffset()) + receiveOpts = append(receiveOpts, eventhubClient.ReceiveWithLatestOffset()) } if e.PrefetchCount != 0 { - receiveOpts = append(receiveOpts, eventhub.ReceiveWithPrefetchCount(e.PrefetchCount)) + receiveOpts = append(receiveOpts, eventhubClient.ReceiveWithPrefetchCount(e.PrefetchCount)) } if e.Epoch != 0 { - receiveOpts = append(receiveOpts, eventhub.ReceiveWithEpoch(e.Epoch)) + receiveOpts = append(receiveOpts, eventhubClient.ReceiveWithEpoch(e.Epoch)) } return receiveOpts @@ -263,7 +264,7 @@ func (e *EventHub) configureReceiver() []eventhub.ReceiveOption { // OnMessage handles an Event. When this function returns without error the // Event is immediately accepted and the offset is updated. If an error is // returned the Event is marked for redelivery. -func (e *EventHub) onMessage(ctx context.Context, event *eventhub.Event) error { +func (e *EventHub) onMessage(ctx context.Context, event *eventhubClient.Event) error { metrics, err := e.createMetrics(event) if err != nil { return err @@ -345,7 +346,7 @@ func deepCopyMetrics(in []telegraf.Metric) []telegraf.Metric { } // CreateMetrics returns the Metrics from the Event. -func (e *EventHub) createMetrics(event *eventhub.Event) ([]telegraf.Metric, error) { +func (e *EventHub) createMetrics(event *eventhubClient.Event) ([]telegraf.Metric, error) { metrics, err := e.parser.Parse(event.Data) if err != nil { return nil, err diff --git a/plugins/inputs/exec/exec.go b/plugins/inputs/exec/exec.go index afc6beb6a7a80..e8ba23db44522 100644 --- a/plugins/inputs/exec/exec.go +++ b/plugins/inputs/exec/exec.go @@ -4,20 +4,21 @@ import ( "bytes" "fmt" "io" - "os/exec" + osExec "os/exec" "path/filepath" "runtime" "strings" "sync" "time" + "github.com/kballard/go-shellquote" + "github.com/influxdata/telegraf" "github.com/influxdata/telegraf/config" "github.com/influxdata/telegraf/internal" "github.com/influxdata/telegraf/plugins/inputs" "github.com/influxdata/telegraf/plugins/parsers" "github.com/influxdata/telegraf/plugins/parsers/nagios" - "github.com/kballard/go-shellquote" ) const sampleConfig = ` @@ -76,7 +77,7 @@ func (c CommandRunner) Run( return nil, nil, fmt.Errorf("exec: unable to parse command, %s", err) } - cmd := exec.Command(splitCmd[0], splitCmd[1:]...) + cmd := osExec.Command(splitCmd[0], splitCmd[1:]...) var ( out bytes.Buffer diff --git a/plugins/inputs/execd/execd_test.go b/plugins/inputs/execd/execd_test.go index 72c84e1d12cc6..a8c8364394480 100644 --- a/plugins/inputs/execd/execd_test.go +++ b/plugins/inputs/execd/execd_test.go @@ -139,8 +139,8 @@ func (tm *TestMetricMaker) LogName() string { return tm.Name() } -func (tm *TestMetricMaker) MakeMetric(metric telegraf.Metric) telegraf.Metric { - return metric +func (tm *TestMetricMaker) MakeMetric(aMetric telegraf.Metric) telegraf.Metric { + return aMetric } func (tm *TestMetricMaker) Log() telegraf.Logger { diff --git a/plugins/inputs/knx_listener/README.md b/plugins/inputs/knx_listener/README.md index de015ddc2793b..7a06462ffbb3e 100644 --- a/plugins/inputs/knx_listener/README.md +++ b/plugins/inputs/knx_listener/README.md @@ -1,4 +1,4 @@ -# KNX input plugin +# KNX Input Plugin The KNX input plugin that listens for messages on the KNX home-automation bus. This plugin connects to the KNX bus via a KNX-IP interface. diff --git a/plugins/inputs/kube_inventory/README.md b/plugins/inputs/kube_inventory/README.md index c9d6fb0be467d..7803d4fc4e9eb 100644 --- a/plugins/inputs/kube_inventory/README.md +++ b/plugins/inputs/kube_inventory/README.md @@ -68,8 +68,11 @@ avoid cardinality issues: selector_exclude = ["*"] ## Optional TLS Config + ## Trusted root certificates for server # tls_ca = "/path/to/cafile" + ## Used for TLS client certificate authentication # tls_cert = "/path/to/certfile" + ## Used for TLS client certificate authentication # tls_key = "/path/to/keyfile" ## Use TLS but skip chain & host verification # insecure_skip_verify = false @@ -127,6 +130,26 @@ subjects: namespace: default ``` +## Quickstart in k3s + +When monitoring [k3s](https://k3s.io) server instances one can re-use already generated administration token. +This is less secure than using the more restrictive dedicated telegraf user but more convienient to set up. + +```console +# an empty token will make telegraf use the client cert/key files instead +$ touch /run/telegraf-kubernetes-token +# replace `telegraf` with the user the telegraf process is running as +$ install -o telegraf -m400 /var/lib/rancher/k3s/server/tls/client-admin.crt /run/telegraf-kubernetes-cert +$ install -o telegraf -m400 /var/lib/rancher/k3s/server/tls/client-admin.key /run/telegraf-kubernetes-key +``` + +```toml +[kube_inventory] +bearer_token = "/run/telegraf-kubernetes-token" +tls_cert = "/run/telegraf-kubernetes-cert" +tls_key = "/run/telegraf-kubernetes-key" +``` + ### Metrics: - kubernetes_daemonset diff --git a/plugins/inputs/snmp/snmp.go b/plugins/inputs/snmp/snmp.go index ec881205c6f68..7f2df6b689eac 100644 --- a/plugins/inputs/snmp/snmp.go +++ b/plugins/inputs/snmp/snmp.go @@ -173,6 +173,12 @@ type Table struct { // Init() builds & initializes the nested fields. func (t *Table) Init() error { + //makes sure oid or name is set in config file + //otherwise snmp will produce metrics with an empty name + if t.Oid == "" && t.Name == "" { + return fmt.Errorf("SNMP table in config file is not named. One or both of the oid and name settings must be set") + } + if t.initialized { return nil } diff --git a/plugins/inputs/snmp/snmp_test.go b/plugins/inputs/snmp/snmp_test.go index ef849f07b138c..f447f13c54e67 100644 --- a/plugins/inputs/snmp/snmp_test.go +++ b/plugins/inputs/snmp/snmp_test.go @@ -199,11 +199,12 @@ func TestSnmpInit_noTranslate(t *testing.T) { {Oid: ".1.1.1.3"}, }, Tables: []Table{ - {Fields: []Field{ - {Oid: ".1.1.1.4", Name: "four", IsTag: true}, - {Oid: ".1.1.1.5", Name: "five"}, - {Oid: ".1.1.1.6"}, - }}, + {Name: "testing", + Fields: []Field{ + {Oid: ".1.1.1.4", Name: "four", IsTag: true}, + {Oid: ".1.1.1.5", Name: "five"}, + {Oid: ".1.1.1.6"}, + }}, }, } @@ -235,6 +236,21 @@ func TestSnmpInit_noTranslate(t *testing.T) { assert.Equal(t, false, s.Tables[0].Fields[2].IsTag) } +func TestSnmpInit_noName_noOid(t *testing.T) { + s := &Snmp{ + Tables: []Table{ + {Fields: []Field{ + {Oid: ".1.1.1.4", Name: "four", IsTag: true}, + {Oid: ".1.1.1.5", Name: "five"}, + {Oid: ".1.1.1.6"}, + }}, + }, + } + + err := s.init() + require.Error(t, err) +} + func TestGetSNMPConnection_v2(t *testing.T) { s := &Snmp{ Agents: []string{"1.2.3.4:567", "1.2.3.4", "udp://127.0.0.1"}, diff --git a/plugins/inputs/x509_cert/x509_cert.go b/plugins/inputs/x509_cert/x509_cert.go index 7c1b0657c7e80..4ac115931a26a 100644 --- a/plugins/inputs/x509_cert/x509_cert.go +++ b/plugins/inputs/x509_cert/x509_cert.go @@ -66,8 +66,7 @@ func (c *X509Cert) SampleConfig() string { func (c *X509Cert) sourcesToURLs() error { for _, source := range c.Sources { if strings.HasPrefix(source, "file://") || - strings.HasPrefix(source, "/") || - strings.Index(source, ":\\") != 1 { + strings.HasPrefix(source, "/") { source = filepath.ToSlash(strings.TrimPrefix(source, "file://")) g, err := globpath.Compile(source) if err != nil { @@ -82,7 +81,6 @@ func (c *X509Cert) sourcesToURLs() error { if err != nil { return fmt.Errorf("failed to parse cert location - %s", err.Error()) } - c.locations = append(c.locations, u) } } @@ -104,14 +102,15 @@ func (c *X509Cert) serverName(u *url.URL) (string, error) { } func (c *X509Cert) getCert(u *url.URL, timeout time.Duration) ([]*x509.Certificate, error) { + protocol := u.Scheme switch u.Scheme { case "https": - u.Scheme = "tcp" + protocol = "tcp" fallthrough case "udp", "udp4", "udp6": fallthrough case "tcp", "tcp4", "tcp6": - ipConn, err := net.DialTimeout(u.Scheme, u.Host, timeout) + ipConn, err := net.DialTimeout(protocol, u.Host, timeout) if err != nil { return nil, err } @@ -127,6 +126,9 @@ func (c *X509Cert) getCert(u *url.URL, timeout time.Duration) ([]*x509.Certifica conn := tls.Client(ipConn, c.tlsCfg) defer conn.Close() + // reset SNI between requests + defer func() { c.tlsCfg.ServerName = "" }() + hsErr := conn.Handshake() if hsErr != nil { return nil, hsErr @@ -313,6 +315,14 @@ func (c *X509Cert) Init() error { tlsCfg = &tls.Config{} } + if tlsCfg.ServerName != "" && c.ServerName == "" { + // Save SNI from tlsCfg.ServerName to c.ServerName and reset tlsCfg.ServerName. + // We need to reset c.tlsCfg.ServerName for each certificate when there's + // no explicit SNI (c.tlsCfg.ServerName or c.ServerName) otherwise we'll always (re)use + // first uri HostName for all certs (see issue 8914) + c.ServerName = tlsCfg.ServerName + tlsCfg.ServerName = "" + } c.tlsCfg = tlsCfg return nil diff --git a/plugins/inputs/x509_cert/x509_cert_test.go b/plugins/inputs/x509_cert/x509_cert_test.go index 3253c9ac9c7ae..4f09b903b4c24 100644 --- a/plugins/inputs/x509_cert/x509_cert_test.go +++ b/plugins/inputs/x509_cert/x509_cert_test.go @@ -316,6 +316,16 @@ func TestGatherCertMustNotTimeout(t *testing.T) { assert.True(t, acc.HasMeasurement("x509_cert")) } +func TestSourcesToURLs(t *testing.T) { + m := &X509Cert{ + Sources: []string{"https://www.influxdata.com:443", "tcp://influxdata.com:443", "file:///dummy_test_path_file.pem", "/tmp/dummy_test_path_glob*.pem"}, + } + require.NoError(t, m.Init()) + + assert.Equal(t, len(m.globpaths), 2) + assert.Equal(t, len(m.locations), 2) +} + func TestServerName(t *testing.T) { tests := []struct { name string diff --git a/plugins/outputs/http/http.go b/plugins/outputs/http/http.go index 5da273f2d40a6..76d97aa9040bc 100644 --- a/plugins/outputs/http/http.go +++ b/plugins/outputs/http/http.go @@ -13,7 +13,6 @@ import ( "github.com/influxdata/telegraf" "github.com/influxdata/telegraf/internal" httpconfig "github.com/influxdata/telegraf/plugins/common/http" - "github.com/influxdata/telegraf/plugins/common/tls" "github.com/influxdata/telegraf/plugins/outputs" "github.com/influxdata/telegraf/plugins/serializers" ) @@ -83,7 +82,6 @@ type HTTP struct { Password string `toml:"password"` Headers map[string]string `toml:"headers"` ContentEncoding string `toml:"content_encoding"` - tls.ClientConfig httpconfig.HTTPClientConfig client *http.Client diff --git a/plugins/parsers/json_v2/parser.go b/plugins/parsers/json_v2/parser.go index e586b35ebddc2..d013f6b35e24f 100644 --- a/plugins/parsers/json_v2/parser.go +++ b/plugins/parsers/json_v2/parser.go @@ -484,6 +484,8 @@ func (p *Parser) isIncluded(key string, val gjson.Result) bool { if len(p.currentSettings.IncludedKeys) == 0 { return true } + // automatically adds tags to included_keys so it does NOT have to be repeated in the config + p.currentSettings.IncludedKeys = append(p.currentSettings.IncludedKeys, p.currentSettings.Tags...) for _, i := range p.currentSettings.IncludedKeys { if i == key { return true diff --git a/plugins/parsers/json_v2/testdata/nested_and_nonnested_tags/telegraf.conf b/plugins/parsers/json_v2/testdata/nested_and_nonnested_tags/telegraf.conf index 45692dc5df0e2..e1748b463690b 100644 --- a/plugins/parsers/json_v2/testdata/nested_and_nonnested_tags/telegraf.conf +++ b/plugins/parsers/json_v2/testdata/nested_and_nonnested_tags/telegraf.conf @@ -9,10 +9,8 @@ data_format = "json_v2" disable_prepend_keys = true path = "@this" included_keys = [ - "hostname", "systemVoltage", "systemCurrent", "tables", - "tables_outputname", ] tags = ["hostname", "tables_outputname"] diff --git a/plugins/parsers/json_v2/testdata/object/telegraf.conf b/plugins/parsers/json_v2/testdata/object/telegraf.conf index 6ad244fd71418..50ed245a3cf00 100644 --- a/plugins/parsers/json_v2/testdata/object/telegraf.conf +++ b/plugins/parsers/json_v2/testdata/object/telegraf.conf @@ -6,7 +6,7 @@ [[inputs.file.json_v2.object]] path = "root.station" disable_prepend_keys = true - included_keys = ["name", "etd_destination", "etd_estimate_minutes"] + included_keys = ["etd_estimate_minutes"] tags = ["name", "etd_destination"] [inputs.file.json_v2.object.fields] etd_estimate_minutes = "int"