Skip to content

Commit

Permalink
Make stackdriver agent work for detecting Json log (#1169)
Browse files Browse the repository at this point in the history
* change fluentd config

* rename

* change kubernetes_metadata version requirement
  • Loading branch information
yanweiguo authored and google-prow-robot committed Jun 13, 2018
1 parent d70a78b commit a67cd50
Show file tree
Hide file tree
Showing 9 changed files with 74 additions and 23 deletions.
18 changes: 18 additions & 0 deletions config/config-observability.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,24 @@ data:

# The fluentd sidecar output config to specify logging destination.
logging.fluentd-sidecar-output-config: |
# Parse json log before sending to Elastic Search
<filter **>
@type parser
key_name log
<parse>
@type multi_format
<pattern>
format json
time_key fluentd-time # fluentd-time is reserved for structured logs
time_format %Y-%m-%dT%H:%M:%S.%NZ
</pattern>
<pattern>
format none
message_key log
</pattern>
</parse>
</filter>
# Send to Elastic Search
<match **>
@id elasticsearch
@type elasticsearch
Expand Down
19 changes: 19 additions & 0 deletions config/monitoring/150-elasticsearch-dev/100-fluentd-configmap.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,8 @@ data:
# Add Kubernetes metadata
<filter kubernetes.**>
@type kubernetes_metadata
merge_json_log false # Don't parse json log
preserve_json_log false
</filter>
300.forward.input.conf: |-
# Takes the messages sent over TCP, e.g. request logs from Istio
Expand All @@ -57,6 +59,23 @@ data:
port 24224
</source>
900.output.conf: |-
# Parse json log before sending to Elastic Search
<filter **>
@type parser
key_name log
<parse>
@type multi_format
<pattern>
format json
time_key fluentd-time # fluentd-time is reserved for structured logs
time_format %Y-%m-%dT%H:%M:%S.%NZ
</pattern>
<pattern>
format none
message_key log
</pattern>
</parse>
</filter>
# Send to Elastic Search
<match **>
@id elasticsearch
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,8 @@ data:
# Add Kubernetes metadata
<filter kubernetes.**>
@type kubernetes_metadata
merge_json_log false # Don't parse json log
preserve_json_log false
</filter>
300.forward.input.conf: |-
# Takes the messages sent over TCP, e.g. request logs from Istio
Expand All @@ -57,6 +59,23 @@ data:
port 24224
</source>
900.output.conf: |-
# Parse json log before sending to Elastic Search
<filter **>
@type parser
key_name log
<parse>
@type multi_format
<pattern>
format json
time_key fluentd-time # fluentd-time is reserved for structured logs
time_format %Y-%m-%dT%H:%M:%S.%NZ
</pattern>
<pattern>
format none
message_key log
</pattern>
</parse>
</filter>
# Send to Elastic Search
<match **>
@id elasticsearch
Expand Down
2 changes: 2 additions & 0 deletions config/monitoring/150-stackdriver-dev/fluentd-configmap.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,8 @@ data:
# Add Kubernetes metadata
<filter kubernetes.**>
@type kubernetes_metadata
merge_json_log false # Don't parse json log
preserve_json_log false
</filter>
300.forward.input.conf: |-
# Takes the messages sent over TCP, e.g. request logs from Istio
Expand Down
2 changes: 2 additions & 0 deletions config/monitoring/150-stackdriver-prod/fluentd-configmap.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,8 @@ data:
# Add Kubernetes metadata
<filter kubernetes.**>
@type kubernetes_metadata
merge_json_log false # Don't parse json log
preserve_json_log false
</filter>
300.forward.input.conf: |-
# Takes the messages sent over TCP, e.g. request logs from Istio
Expand Down
2 changes: 1 addition & 1 deletion image/fluentd/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ define logging output.
Knative requires the following Fluentd plugins to process log records:

* [fluentd](https://github.com/fluent/fluentd) >= v0.14.0
* [fluent-plugin-kubernetes_metadata_filter](https://github.com/fabric8io/fluent-plugin-kubernetes_metadata_filter) >= 1.0.0
* [fluent-plugin-kubernetes_metadata_filter](https://github.com/fabric8io/fluent-plugin-kubernetes_metadata_filter) >= 1.0.0 AND < 2.1.0
* [fluent-plugin-detect-exceptions](https://github.com/GoogleCloudPlatform/fluent-plugin-detect-exceptions) >= 0.0.9
* [fluent-plugin-multi-format-parser](https://github.com/repeatedly/fluent-plugin-multi-format-parser) >= 1.0.0

Expand Down
14 changes: 2 additions & 12 deletions pkg/controller/revision/fluentd.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,18 +29,8 @@ const fluentdSidecarPreOutputConfig = `
path /var/log/revisions/**/*.*
pos_file /var/log/varlog.log.pos
tag raw.*
<parse>
@type multi_format
<pattern>
format json
time_key fluentd-time # fluentd-time is reserved for structured logs
time_format %Y-%m-%dT%H:%M:%S.%NZ
</pattern>
<pattern>
format none
message_key log
</pattern>
</parse>
format none
message_key log
read_from_head true
</source>
Expand Down
3 changes: 2 additions & 1 deletion sample/telemetrysample/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@ that is installed by default as a showcase of installing dedicated Prometheus in
## Prerequisites

1. [Install Knative Serving](https://github.com/knative/install/blob/master/README.md)
1. Install [docker](https://www.docker.com/)
2. [Install Knative monitoring component](docs/telemetry.md)
3. Install [docker](https://www.docker.com/)


## Setup
Expand Down
18 changes: 9 additions & 9 deletions sample/telemetrysample/telemetrysample.go
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,15 @@ func logHandler(client *http.Client) http.HandlerFunc {

data := map[string]string{
"log": "A log in json format to STDOUT",
"foo": "bar",
"time": timestamp.String(),
// Cluster operator can configure which field is used as time key and what
// the format is. For example, in config/monitoring/150-elasticsearch-dev/100-fluentd-configmap.yaml,
// fluentd-time is the reserved key to tell fluentd the logging time. It
// must be in the format of RFC3339Nano, i.e. %Y-%m-%dT%H:%M:%S.%NZ.
// Without this, fluentd uses the time when it collect the log as an
// event time.
"fluentd-time": timestamp.Format(time.RFC3339Nano),
}
jsonOutput, _ := json.Marshal(data)
fmt.Fprintln(os.Stdout, string(jsonOutput))
Expand All @@ -246,15 +254,7 @@ func logHandler(client *http.Client) http.HandlerFunc {
fmt.Fprintf(os.Stderr, "Failed to write to %s: %v", fileName, err)
}

data := map[string]string{
"log": "A log in json format to /var/log",
"time": timestamp.String(),
// fluentd-time is the reserved key to tell fluentd the logging time. It
// must be in the format of RFC3339Nano, i.e. %Y-%m-%dT%H:%M:%S.%NZ.
// Without this, fluentd uses current time when it collect the log as an
// event time.
"fluentd-time": timestamp.Format(time.RFC3339Nano),
}
data["log"] = "A log in json format to /var/log"
jsonOutput, _ := json.Marshal(data)
if _, err := f.WriteString(string(jsonOutput) + "\n"); err != nil {
fmt.Fprintf(os.Stderr, "Failed to write to %s: %v", fileName, err)
Expand Down

0 comments on commit a67cd50

Please sign in to comment.