From 1690b8db518a4ca06faf0955066619dceeb65ac4 Mon Sep 17 00:00:00 2001 From: Mayur Kale Date: Fri, 15 Feb 2019 13:52:04 -0800 Subject: [PATCH] Node.js: Update examples (#579) * Node.js: Update examples * shell->yaml, remove extra dot, HTTP 2->HTTP/2 --- .../custom-exporter/Node.js/Metrics.md | 29 +- .../custom-exporter/Node.js/Trace.md | 10 +- .../supported-exporters/Node.js/Instana.md | 7 +- .../supported-exporters/Node.js/Jaeger.md | 25 +- .../supported-exporters/Node.js/Prometheus.md | 53 ++- .../supported-exporters/Node.js/Zipkin.md | 20 +- .../Node.js/stackdriver-stats.md | 10 +- .../Node.js/stackdriver-trace.md | 6 +- content/quickstart/nodejs/metrics.md | 374 +++++++++++------- content/stats/measure.md | 25 +- content/stats/view.md | 15 +- content/tag/key.md | 5 +- content/tag/map.md | 10 +- content/tag/value.md | 5 +- 14 files changed, 377 insertions(+), 217 deletions(-) diff --git a/content/exporters/custom-exporter/Node.js/Metrics.md b/content/exporters/custom-exporter/Node.js/Metrics.md index a72eefa3..e4f8b4f5 100644 --- a/content/exporters/custom-exporter/Node.js/Metrics.md +++ b/content/exporters/custom-exporter/Node.js/Metrics.md @@ -73,25 +73,34 @@ export class MyConsoleStatsExporter implements StatsEventListener { And now to test it out as we would in a typically linked program, let's create a `expample.js` file: {{}} -var opencensus = require('@opencensus/core'); -var stats = new opencensus.Stats(); +const { globalStats, AggregationType, TagMap } = require('@opencensus/core'); // Let's create an instance of our just created exporter -var exporter = new MyConsoleStatsExporter(); +const exporter = new MyConsoleStatsExporter(); // And register it -stats.registerExporter(exporter); +globalStats.registerExporter(exporter); // Let's create a measure -var measure = stats.createMeasureInt64('my/measure', "1"); +const measure = globalStats.createMeasureInt64('my/measure', "1"); // our tags -var tags = {myTagKey: 'myTagValue'}; -// a view -var view = stats.createView('my/view', measure, 2, ['myTagKey'], 'my view'); +const myTagKey = { name: "myTagKey" }; +const tags = new TagMap(); +tags.set(myTagKey, { value: "myTagValue" }); + +// Create and Register the view +const view = globalStats.createView( + /* name */ 'my/view', + measure, + AggregationType.LAST_VALUE, + [myTagKey], + /* description */ 'my view' +); +globalStats.registerView(view); // and our measurement -var measurement = {measure, tags, value: 10}; +const measurement = {measure, value: 10}; // finaly, let's record it -stats.record(measurement); +globalStats.record([measurement], tags); {{}} Now, run it with `node example.js` and you should see logs for our view beeing created and our measurement beeing recorded. diff --git a/content/exporters/custom-exporter/Node.js/Trace.md b/content/exporters/custom-exporter/Node.js/Trace.md index ea4babf9..7e2e5c51 100644 --- a/content/exporters/custom-exporter/Node.js/Trace.md +++ b/content/exporters/custom-exporter/Node.js/Trace.md @@ -114,16 +114,16 @@ export class MyConsoleTraceExporter implements types.Exporter { And now to test it out as we would in a typically linked program, let's create a `expample.js` file: ```javascript -var tracing = require('@opencensus/opencensus-nodejs'); +const tracing = require('@opencensus/opencensus-nodejs'); // Let's create an instance of our just created exporter -var exporter = new MyConsoleTraceExporter(); +const exporter = new MyConsoleTraceExporter(); // And start tracing with it tracing.registerExporter(exporter).start(); -// Now, lets create a simple HTTP 2 server -var http2 = require('http2') -var server2 = http2.createServer(); +// Now, lets create a simple HTTP/2 server +const http2 = require('http2') +const server2 = http2.createServer(); // On every call to http://localhost:8080 we will return a Hello World message server2.on('stream', (stream, requestHeaders) => { diff --git a/content/exporters/supported-exporters/Node.js/Instana.md b/content/exporters/supported-exporters/Node.js/Instana.md index 19c3eec8..44b4c779 100644 --- a/content/exporters/supported-exporters/Node.js/Instana.md +++ b/content/exporters/supported-exporters/Node.js/Instana.md @@ -33,10 +33,9 @@ To use Instana as your exporter, first ensure that you have an [Instana agent ru Now let's use the Instana exporter: ```js -var tracing = require('@opencensus/nodejs'); -var instana = require('@opencensus/exporter-instana'); - -var exporter = new instana.InstanaTraceExporter(); +const tracing = require('@opencensus/nodejs'); +const { InstanaTraceExporter }= require('@opencensus/exporter-instana'); +const exporter = new InstanaTraceExporter(); tracing.registerExporter(exporter).start(); ``` diff --git a/content/exporters/supported-exporters/Node.js/Jaeger.md b/content/exporters/supported-exporters/Node.js/Jaeger.md index a989b264..0ee1c228 100644 --- a/content/exporters/supported-exporters/Node.js/Jaeger.md +++ b/content/exporters/supported-exporters/Node.js/Jaeger.md @@ -10,6 +10,8 @@ logo: /img/partners/jaeger_logo.svg - [Introduction](#introduction) - [Installing the exporter](#installing-the-exporter) - [Creating the exporter](#creating-the-exporter) +- [Viewing your traces](#viewing-your-traces) +- [Project link](#project-link) ## Introduction Jaeger, inspired by Dapper and OpenZipkin, is a distributed tracing system released as open source by Uber Technologies. @@ -40,21 +42,26 @@ npm install @opencensus/exporter-jaeger Now let's use the Jaeger exporter: ```js -var core = require('@opencensus/core'); -var tracing = require('@opencensus/nodejs'); -var jaeger = require('@opencensus/exporter-jaeger'); +const { logger } = require('@opencensus/core'); +const { JaegerTraceExporter } = require('@opencensus/exporter-jaeger'); +const tracing = require('@opencensus/nodejs'); -var jaegerOptions = { +// Add service name and jaeger options +const jaegerOptions = { serviceName: 'opencensus-exporter-jaeger', host: 'localhost', port: 6832, - tags: [{key: 'opencensus-exporter-jeager', value: '0.0.1'}], + tags: [{key: 'opencensus-exporter-jeager', value: '0.0.9'}], bufferTimeout: 10, // time in milliseconds - logger: core.logger.logger('debug'), - maxPacketSize: 1000 + logger: logger.logger('debug') }; -var exporter = new jaeger.JaegerTraceExporter(jaegerOptions); - +const exporter = new JaegerTraceExporter(jaegerOptions); tracing.registerExporter(exporter).start(); ``` + +## Viewing your traces +Please visit the Jaeger UI endpoint [http://localhost:16686](http://localhost:16686) + +## Project link +You can find out more about the Jaeger project at [https://www.jaegertracing.io/](https://www.jaegertracing.io/) diff --git a/content/exporters/supported-exporters/Node.js/Prometheus.md b/content/exporters/supported-exporters/Node.js/Prometheus.md index 6ff98b30..5ca5b950 100644 --- a/content/exporters/supported-exporters/Node.js/Prometheus.md +++ b/content/exporters/supported-exporters/Node.js/Prometheus.md @@ -10,6 +10,9 @@ logo: /img/prometheus-logo.png - [Introduction](#introduction) - [Installing the exporter](#installing-the-exporter) - [Creating the exporter](#creating-the-exporter) +- [Running Prometheus](#running-prometheus) +- [Viewing your metrics](#viewing-your-metrics) +- [Project link](#project-link) ## Introduction Prometheus is a monitoring system that collects metrics, by scraping @@ -31,26 +34,56 @@ npm install @opencensus/exporter-prometheus ``` ## Creating the exporter +To create the exporter, we'll need to: + +* Import and use the Prometheus exporter package +* Define a namespace that will uniquely identify our metrics when viewed on Prometheus +* Expose a port on which we shall run a `/metrics` endpoint +* With the defined port, we'll need a Promethus configuration file so that Prometheus can scrape from this endpoint + Now let's use the Prometheus exporter: ```js -const { Stats } = require('@opencensus/core'); +const { globalStats } = require('@opencensus/core'); const { PrometheusStatsExporter } = require('@opencensus/exporter-prometheus'); // Add your port and startServer to the Prometheus options const exporter = new PrometheusStatsExporter({ port: 9464, - startServer: false + startServer: true }); -// Our Stats manager -const stats = new Stats(); - // Pass the created exporter to Stats -stats.registerExporter(exporter); +globalStats.registerExporter(exporter); +``` -// Run the server -exporter.startServer(function callback() { - // Callback -}); +and then for our corresponding `prometheus.yaml` file: + +```yaml +global: + scrape_interval: 10s + + external_labels: + monitor: 'demo' + +scrape_configs: + - job_name: 'demo' + + scrape_interval: 10s + + static_configs: + - targets: ['localhost:8888'] +``` + +## Running Prometheus +And then run Prometheus with your configuration +```shell +prometheus --config.file=prometheus.yaml ``` + +## Viewing your metrics +Please visit [http://localhost:9090](http://localhost:9090) + +## Project link +You can find out more about the Prometheus project at [https://prometheus.io/](https://prometheus.io/) + diff --git a/content/exporters/supported-exporters/Node.js/Zipkin.md b/content/exporters/supported-exporters/Node.js/Zipkin.md index dd25e4f5..da9dbd96 100644 --- a/content/exporters/supported-exporters/Node.js/Zipkin.md +++ b/content/exporters/supported-exporters/Node.js/Zipkin.md @@ -10,6 +10,8 @@ logo: /img/zipkin-logo.jpg - [Introduction](#introduction) - [Installing the exporter](#installing-the-exporter) - [Creating the exporter](#creating-the-exporter) +- [Viewing your traces](#viewing-your-traces) +- [Project link](#project-link) ## Introduction Zipkin is a distributed tracing system. It helps gather timing data needed to troubleshoot latency problems in microservice architectures. @@ -18,8 +20,8 @@ It manages both the collection and lookup of this data. Zipkin’s design is bas OpenCensus Node.js has support for this exporter available, distributed through NPM package [@opencensus/exporter-zipkin](https://www.npmjs.com/package/@opencensus/exporter-zipkin) -{{% notice tip %}} -For assistance setting up Zipkin, [Click here](/codelabs/zipkin) for a guided codelab. +{{% notice note %}} +This guide makes use of Zipkin for visualizing your data. For assistance setting up Zipkin, [Click here](/codelabs/zipkin) for a guided codelab. {{% /notice %}} ## Installing the exporter @@ -34,16 +36,22 @@ npm install @opencensus/exporter-zipkin Now let's use the Zipkin exporter: ```js -var tracing = require('@opencensus/nodejs'); -var zipkin = require('@opencensus/exporter-zipkin'); +const tracing = require('@opencensus/nodejs'); +const { ZipkinTraceExporter } = require('@opencensus/exporter-zipkin'); // Add your zipkin url (ex http://localhost:9411/api/v2/spans) // and application name to the Zipkin options -var options = { +const zipkinOptions = { url: 'your-zipkin-url', serviceName: 'your-application-name' }; -var exporter = new zipkin.ZipkinTraceExporter(options); +const exporter = new ZipkinTraceExporter(zipkinOptions); tracing.registerExporter(exporter).start(); ``` + +## Viewing your traces +Please visit the Zipkin UI endpoint [http://localhost:9411](http://localhost:9411) + +## Project link +You can find out more about the Zipkin project at [https://zipkin.io/](https://zipkin.io/) diff --git a/content/exporters/supported-exporters/Node.js/stackdriver-stats.md b/content/exporters/supported-exporters/Node.js/stackdriver-stats.md index d4aa59bd..528884aa 100644 --- a/content/exporters/supported-exporters/Node.js/stackdriver-stats.md +++ b/content/exporters/supported-exporters/Node.js/stackdriver-stats.md @@ -49,15 +49,13 @@ Stackdriver's minimum stats reporting period must be >= 60 seconds. Find out why {{% /notice %}} {{}} -var opencensus = require('@opencensus/core'); -var stackdriver = require('@opencensus/exporter-stackdriver'); +const { globalStats } = require('@opencensus/core'); +const { StackdriverStatsExporter } = require('@opencensus/exporter-stackdriver'); // Add your project id to the Stackdriver options -var exporter = new stackdriver.StackdriverStatsExporter({projectId: "your-project-id"}); +const exporter = new StackdriverStatsExporter({projectId: "your-project-id"}); -var stats = new opencensus.Stats(); - -stats.registerExporter(exporter); +globalStats.registerExporter(exporter); {{}} ## Viewing your metrics diff --git a/content/exporters/supported-exporters/Node.js/stackdriver-trace.md b/content/exporters/supported-exporters/Node.js/stackdriver-trace.md index 59475c6b..6b09784e 100644 --- a/content/exporters/supported-exporters/Node.js/stackdriver-trace.md +++ b/content/exporters/supported-exporters/Node.js/stackdriver-trace.md @@ -48,11 +48,11 @@ export GOOGLE_APPLICATION_CREDENTIALS=path/to/your/credential.json To create the exporter, in code: {{}} -var tracing = require('@opencensus/nodejs'); -var stackdriver = require('@opencensus/exporter-stackdriver'); +const tracing = require('@opencensus/nodejs'); +const { StackdriverTraceExporter } = require('@opencensus/exporter-stackdriver'); // Add your project id to the Stackdriver options -var exporter = new stackdriver.StackdriverTraceExporter({projectId: "your-project-id"}); +const exporter = new StackdriverTraceExporter({projectId: "your-project-id"}); tracing.registerExporter(exporter).start(); {{}} diff --git a/content/quickstart/nodejs/metrics.md b/content/quickstart/nodejs/metrics.md index 6abff90f..ccc6da12 100644 --- a/content/quickstart/nodejs/metrics.md +++ b/content/quickstart/nodejs/metrics.md @@ -16,8 +16,9 @@ aliases: [/quickstart/node.js/metrics] - [Record and Aggregate Data](#record-and-aggregate-data) - [Create Views and Tags](#create-views-and-tags) - [Recording Metrics](#recording-metrics) -- [Exporting to Stackdriver](#exporting-to-stackdriver) +- [Exporting to Prometheus](#exporting-to-prometheus) - [Viewing your metrics](#viewing-your-metrics) +- [References](#references) In this quickstart, we’ll glean insights from code segments and learn how to: @@ -27,11 +28,17 @@ In this quickstart, we’ll glean insights from code segments and learn how to: ## Requirements - [Node.js](https://nodejs.org/) 6 or above and `npm` (already comes with Node.js) -- Google Cloud Platform account and project -- Google Stackdriver Monitoring enabled on your project {{% notice tip %}} -For assistance setting up Stackdriver, [Click here](/codelabs/stackdriver) for a guided codelab. +For assistance setting up Node.js, [Click here](https://nodejs.org/) for instructions. +{{% /notice %}} + +- Prometheus as our choice of metrics backend: we are picking it beause it is free, open source and easy to setup + +{{% notice tip %}} +For assistance setting up Prometheus, [Click here](/codelabs/prometheus) for a guided codelab. + +You can swap out any other exporter from the [list of Node.js exporters](/guides/exporters/supported-exporters/node.js/) {{% /notice %}} ## Installation @@ -43,11 +50,11 @@ mkdir repl-app cd repl-app ``` -Then, let's install the OpenCensus and Stackdriver packages with: +Then, let's install the OpenCensus and Prometheus packages with: ```bash npm install @opencensus/core -npm install @opencensus/exporter-stackdriver +npm install @opencensus/exporter-prometheus ``` ## Brief Overview @@ -56,7 +63,7 @@ By the end of this tutorial, we will do these four things to obtain metrics usin 1. Create quantifiable `metrics` (numerical) that we will **record** 2. Create [tags](/core-concepts/tags) that we will associate with our metrics 3. Organize our metrics, similar to writing a report, in to a `View` -4. Export our views to a backend (Stackdriver in this case) +4. Export our views to a backend (Prometheus in this case) ## Getting Started @@ -131,11 +138,11 @@ To enable metrics, we'll import a few items from OpenCensus Core package. {{}} {{}} -const { Stats, MeasureUnit, AggregationType } = require('@opencensus/core'); +const { globalStats, MeasureUnit, AggregationType, TagMap } = require('@opencensus/core'); {{}} {{}} -const { Stats, MeasureUnit, AggregationType } = require('@opencensus/core'); +const { globalStats, MeasureUnit, AggregationType, TagMap } = require('@opencensus/core'); const fs = require('fs'); const readline = require('readline'); @@ -169,30 +176,24 @@ First, we will create the variables needed to later record our metrics. {{}} {{}} -// Our Stats manager -const stats = new Stats(); - // The latency in milliseconds -const mLatencyMs = stats.createMeasureDouble("repl/latency", MeasureUnit.MS, "The latency in milliseconds per REPL loop"); +const mLatencyMs = globalStats.createMeasureDouble("repl/latency", MeasureUnit.MS, "The latency in milliseconds per REPL loop"); // Counts/groups the lengths of lines read in. -const mLineLengths = stats.createMeasureInt64("repl/line_lengths", MeasureUnit.BYTE, "The distribution of line lengths"); +const mLineLengths = globalStats.createMeasureInt64("repl/line_lengths", MeasureUnit.BYTE, "The distribution of line lengths"); {{}} {{}} -const { Stats, MeasureUnit, AggregationType } = require('@opencensus/core'); +const { globalStats, MeasureUnit, AggregationType, TagMap } = require('@opencensus/core'); const fs = require('fs'); const readline = require('readline'); -// Create the Stats manager -const stats = new Stats(); - // The latency in milliseconds -const mLatencyMs = stats.createMeasureDouble("repl/latency", MeasureUnit.MS, "The latency in milliseconds per REPL loop"); +const mLatencyMs = globalStats.createMeasureDouble("repl/latency", MeasureUnit.MS, "The latency in milliseconds per REPL loop"); // Counts/groups the lengths of lines read in. -const mLineLengths = stats.createMeasureInt64("repl/line_lengths", MeasureUnit.BYTE, "The distribution of line lengths"); +const mLineLengths = globalStats.createMeasureInt64("repl/line_lengths", MeasureUnit.BYTE, "The distribution of line lengths"); // Creates a stream to read our file const stream = fs.createReadStream("./test.txt"); @@ -224,11 +225,12 @@ We now determine how our metrics will be organized by creating `Views`. We will {{}} {{}} -const methodTagKey = "method"; -const statusTagKey = "status"; -const errorTagKey = "error"; +const methodTagKey = { name: "method" }; +const statusTagKey = { name: "status" }; +const errorTagKey = { name: "error" }; -const latencyView = stats.createView( +// Create and Register the view. +const latencyView = globalStats.createView( "demo/latency", mLatencyMs, AggregationType.DISTRIBUTION, @@ -238,16 +240,20 @@ const latencyView = stats.createView( // [>=0ms, >=25ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms, >=1s, >=2s, >=4s, >=6s] [0, 25, 50, 75, 100, 200, 400, 600, 800, 1000, 2000, 4000, 6000] ); -1 -const lineCountView = stats.createView( +globalStats.registerView(latencyView); + +// Create and Register the view. +const lineCountView = globalStats.createView( "demo/lines_in", mLineLengths, AggregationType.COUNT, [methodTagKey], "The number of lines from standard input" -) +); +globalStats.registerView(lineCountView); -const lineLengthView = stats.createView( +// Create and Register the view. +const lineLengthView = globalStats.createView( "demo/line_lengths", mLineLengths, AggregationType.DISTRIBUTION, @@ -256,23 +262,21 @@ const lineLengthView = stats.createView( // Bucket Boudaries: // [>=0B, >=5B, >=10B, >=15B, >=20B, >=40B, >=60B, >=80, >=100B, >=200B, >=400, >=600, >=800, >=1000] [0, 5, 10, 15, 20, 40, 60, 80, 100, 200, 400, 600, 800, 1000] -) +); +globalStats.registerView(lineLengthView); {{}} {{}} -const { Stats, MeasureUnit, AggregationType } = require('@opencensus/core'); +const { globalStats, MeasureUnit, AggregationType, TagMap } = require('@opencensus/core'); const fs = require('fs'); const readline = require('readline'); -// Create the Stats manager -const stats = new Stats(); - // The latency in milliseconds -const mLatencyMs = stats.createMeasureDouble("repl/latency", MeasureUnit.MS, "The latency in milliseconds per REPL loop"); +const mLatencyMs = globalStats.createMeasureDouble("repl/latency", MeasureUnit.MS, "The latency in milliseconds per REPL loop"); // Counts/groups the lengths of lines read in. -const mLineLengths = stats.createMeasureInt64("repl/line_lengths", MeasureUnit.BYTE, "The distribution of line lengths"); +const mLineLengths = globalStats.createMeasureInt64("repl/line_lengths", MeasureUnit.BYTE, "The distribution of line lengths"); // Creates a stream to read our file const stream = fs.createReadStream("./test.txt"); @@ -280,11 +284,12 @@ const stream = fs.createReadStream("./test.txt"); // Creates an interface to read and process our file line by line const lineReader = readline.createInterface({ input: stream }); -const methodTagKey = "method"; -const statusTagKey = "status"; -const errorTagKey = "error"; +const methodTagKey = { name: "method" }; +const statusTagKey = { name: "status" }; +const errorTagKey = { name: "error" }; -const latencyView = stats.createView( +// Create and Register the view. +const latencyView = globalStats.createView( "demo/latency", mLatencyMs, AggregationType.DISTRIBUTION, @@ -294,25 +299,30 @@ const latencyView = stats.createView( // [>=0ms, >=25ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms, >=1s, >=2s, >=4s, >=6s] [0, 25, 50, 75, 100, 200, 400, 600, 800, 1000, 2000, 4000, 6000] ); +globalStats.registerView(latencyView); -const lineCountView = stats.createView( +// Create and Register the view. +const lineCountView = globalStats.createView( "demo/lines_in", mLineLengths, AggregationType.COUNT, - [methodTagKey], + [methodTagKey, statusTagKey], "The number of lines from standard input" -) +); +globalStats.registerView(lineCountView); -const lineLengthView = stats.createView( +// Create and Register the view. +const lineLengthView = globalStats.createView( "demo/line_lengths", mLineLengths, AggregationType.DISTRIBUTION, - [methodTagKey], + [methodTagKey, statusTagKey], "Groups the lengths of keys in buckets", // Bucket Boudaries: // [>=0B, >=5B, >=10B, >=15B, >=20B, >=40B, >=60B, >=80, >=100B, >=200B, >=400, >=600, >=800, >=1000] [0, 5, 10, 15, 20, 40, 60, 80, 100, 200, 400, 600, 800, 1000] -) +); +globalStats.registerView(lineLengthView); // REPL is the read, evaluate, print and loop lineReader.on("line", function (line) { // Read @@ -337,52 +347,54 @@ Again, this is arbitrary and purely up the user. For example, if we wanted to tr ### Recording Metrics -Now we will record the desired metrics. To do so, we will use `stats.record()` and pass in our measurements. +Now we will record the desired metrics. To do so, we will use `globalStats.record()` and pass in our list of measurements. {{}} {{}} lineReader.on("line", function (line) { // Registers the Tags for our measurements - const tags = {method: "repl", status: "OK"}; + const tags = new TagMap(); + tags.set(methodTagKey, { value: "REPL" }); + tags.set(statusTagKey, { value: "OK" }); try { // ... - stats.record({ + globalStats.record([{ measure: mLineLengths, - tags, value: processedLine.length - }); + }, { + measure: mLatencyMs, + value: (new Date()) - startTime.getTime() + }], tags); } catch (err) { - tags.status = "ERROR"; - tags.error = err.message; + const errTags = new TagMap(); + errTags.set(methodTagKey, { value: "REPL" }); + errTags.set(statusTagKey, { value: "ERROR" }); + errTags.set(errorTagKey, { value: err.message }); + + globalStats.record([{ + measure: mLatencyMs, + value: (new Date()) - startTime.getTime() + }], errTags); } - stats.record({ - measure: mLatencyMs, - tags, - value: (new Date()) - startTime.getTime() - }); - // Restarts the start time for the REPL startTime = new Date(); }); {{}} {{}} -const { Stats, MeasureUnit, AggregationType } = require('@opencensus/core'); +const { globalStats, MeasureUnit, AggregationType, TagMap } = require('@opencensus/core'); const fs = require('fs'); const readline = require('readline'); -// Create the Stats manager -const stats = new Stats(); - // The latency in milliseconds -const mLatencyMs = stats.createMeasureDouble("repl/latency", MeasureUnit.MS, "The latency in milliseconds per REPL loop"); +const mLatencyMs = globalStats.createMeasureDouble("repl/latency", MeasureUnit.MS, "The latency in milliseconds per REPL loop"); // Counts/groups the lengths of lines read in. -const mLineLengths = stats.createMeasureInt64("repl/line_lengths", MeasureUnit.BYTE, "The distribution of line lengths"); +const mLineLengths = globalStats.createMeasureInt64("repl/line_lengths", MeasureUnit.BYTE, "The distribution of line lengths"); // Creates a stream to read our file const stream = fs.createReadStream("./test.txt"); @@ -390,37 +402,45 @@ const stream = fs.createReadStream("./test.txt"); // Creates an interface to read and process our file line by line const lineReader = readline.createInterface({ input: stream }); -const tagKey = "method"; +const methodTagKey = { name: "method" }; +const statusTagKey = { name: "status" }; +const errorTagKey = { name: "error" }; -const latencyView = stats.createView( +// Create and Register the view. +const latencyView = globalStats.createView( "demo/latency", mLatencyMs, AggregationType.DISTRIBUTION, - [tagKey], + [methodTagKey, statusTagKey, errorTagKey], "The distribution of the latencies", // Bucket Boundaries: // [>=0ms, >=25ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms, >=1s, >=2s, >=4s, >=6s] [0, 25, 50, 75, 100, 200, 400, 600, 800, 1000, 2000, 4000, 6000] ); +globalStats.registerView(latencyView); -const lineCountView = stats.createView( +// Create and Register the view. +const lineCountView = globalStats.createView( "demo/lines_in", mLineLengths, AggregationType.COUNT, - [tagKey], + [methodTagKey, statusTagKey], "The number of lines from standard input" -) +); +globalStats.registerView(lineCountView); -const lineLengthView = stats.createView( +// Create and Register the view. +const lineLengthView = globalStats.createView( "demo/line_lengths", mLineLengths, AggregationType.DISTRIBUTION, - [tagKey], + [methodTagKey, statusTagKey], "Groups the lengths of keys in buckets", // Bucket Boudaries: // [>=0B, >=5B, >=10B, >=15B, >=20B, >=40B, >=60B, >=80, >=100B, >=200B, >=400, >=600, >=800, >=1000] [0, 5, 10, 15, 20, 40, 60, 80, 100, 200, 400, 600, 800, 1000] -) +); +globalStats.registerView(lineLengthView); // The begining of our REPL loop let startTime = new Date(); @@ -429,28 +449,33 @@ let endTime; // REPL is the read, evaluate, print and loop lineReader.on("line", function (line) { // Read // Registers the Tags for our measurements - const tags = {method: "repl", status: "OK"}; + const tags = new TagMap(); + tags.set(methodTagKey, { value: "REPL" }); + tags.set(statusTagKey, { value: "OK" }); try { const processedLine = processLine(line); // Evaluate console.log(processedLine); // Print - stats.record({ + globalStats.record([{ measure: mLineLengths, - tags, value: processedLine.length - }); + }, { + measure: mLatencyMs, + value: (new Date()) - startTime.getTime() + }], tags); } catch (err) { - tags.status = "ERROR"; - tags.error = err.message; + const errTags = new TagMap(); + errTags.set(methodTagKey, { value: "REPL" }); + errTags.set(statusTagKey, { value: "ERROR" }); + errTags.set(errorTagKey, { value: err.message }); + + globalStats.record([{ + measure: mLatencyMs, + value: (new Date()) - startTime.getTime() + }], errTags); } - stats.record({ - measure: mLatencyMs, - tags, - value: (new Date()) - startTime.getTime() - }); - // Restarts the start time for the REPL startTime = new Date(); }); @@ -466,49 +491,51 @@ function processLine(line) { {{}} {{}} -## Exporting to Stackdriver +## Exporting to Prometheus -We will be adding the Stackdriver package: `@opencensus/exporter-stackdriver`, create the Stackdriver exporter and pass it to the stats manager: +We will be adding the Prometheus package: `@opencensus/exporter-prometheus`, create the Prometheus exporter and pass it to the global stats manager: {{}} {{}} -const { Stats, MeasureUnit, AggregationType } = require('@opencensus/core'); -const { StackdriverStatsExporter } = require('@opencensus/exporter-stackdriver'); +const { globalStats, MeasureUnit, AggregationType, TagMap } = require('@opencensus/core'); +const { PrometheusStatsExporter } = require("@opencensus/exporter-prometheus"); const fs = require('fs'); const readline = require('readline'); -// Create the Stats manager -const stats = new Stats(); - -// Add your project id to the Stackdriver options -const exporter = new StackdriverStatsExporter({projectId: "your-project-id"}); +// Enable OpenCensus exporters to export metrics to Prometheus Monitoring. +const exporter = new PrometheusStatsExporter({ + // Metrics will be exported on https://localhost:{port}/metrics + port: 9464, + startServer: true +}); -// Pass the created exporter to Stats -stats.registerExporter(exporter); +// Pass the created exporter to global Stats +globalStats.registerExporter(exporter); {{}} {{}} -const { Stats, MeasureUnit, AggregationType } = require('@opencensus/core'); -const { StackdriverStatsExporter } = require('@opencensus/exporter-stackdriver'); +const { globalStats, MeasureUnit, AggregationType, TagMap } = require('@opencensus/core'); +const { PrometheusStatsExporter } = require("@opencensus/exporter-prometheus"); const fs = require('fs'); const readline = require('readline'); -// Create the Stats manager -const stats = new Stats(); - -// Add your project id to the Stackdriver options -const exporter = new StackdriverStatsExporter({projectId: "your-project-id"}); +// Enable OpenCensus exporters to export metrics to Prometheus Monitoring. +const exporter = new PrometheusStatsExporter({ + // Metrics will be exported on https://localhost:{port}/metrics + port: 9464, + startServer: true +}); -// Pass the created exporter to Stats -stats.registerExporter(exporter); +// Pass the created exporter to global Stats +globalStats.registerExporter(exporter); // The latency in milliseconds -const mLatencyMs = stats.createMeasureDouble("repl/latency", MeasureUnit.MS, "The latency in milliseconds per REPL loop"); +const mLatencyMs = globalStats.createMeasureDouble("repl/latency", MeasureUnit.MS, "The latency in milliseconds per REPL loop"); // Counts/groups the lengths of lines read in. -const mLineLengths = stats.createMeasureInt64("repl/line_lengths", MeasureUnit.BYTE, "The distribution of line lengths"); +const mLineLengths = globalStats.createMeasureInt64("repl/line_lengths", MeasureUnit.BYTE, "The distribution of line lengths"); // Creates a stream to read our file const stream = fs.createReadStream("./test.txt"); @@ -516,40 +543,45 @@ const stream = fs.createReadStream("./test.txt"); // Creates an interface to read and process our file line by line const lineReader = readline.createInterface({ input: stream }); -const tagKey = "method"; +const methodTagKey = { name: "method" }; +const statusTagKey = { name: "status" }; +const errorTagKey = { name: "error" }; -// Register the view. -const latencyView = stats.createView( +// Create and Register the view. +const latencyView = globalStats.createView( "demo/latency", mLatencyMs, AggregationType.DISTRIBUTION, - [tagKey], + [methodTagKey, statusTagKey, errorTagKey], "The distribution of the latencies", // Bucket Boundaries: // [>=0ms, >=25ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms, >=1s, >=2s, >=4s, >=6s] [0, 25, 50, 75, 100, 200, 400, 600, 800, 1000, 2000, 4000, 6000] ); +globalStats.registerView(latencyView); -// Register the view. -const lineCountView = stats.createView( +// Create and Register the view. +const lineCountView = globalStats.createView( "demo/lines_in", mLineLengths, AggregationType.COUNT, - [tagKey], + [methodTagKey, statusTagKey], "The number of lines from standard input" -) +); +globalStats.registerView(lineCountView); -// Register the view. -const lineLengthView = stats.createView( +// Create and Register the view. +const lineLengthView = globalStats.createView( "demo/line_lengths", mLineLengths, AggregationType.DISTRIBUTION, - [tagKey], + [methodTagKey, statusTagKey], "Groups the lengths of keys in buckets", // Bucket Boudaries: // [>=0B, >=5B, >=10B, >=15B, >=20B, >=40B, >=60B, >=80, >=100B, >=200B, >=400, >=600, >=800, >=1000] [0, 5, 10, 15, 20, 40, 60, 80, 100, 200, 400, 600, 800, 1000] -) +); +globalStats.registerView(lineLengthView); // The begining of our REPL loop let startTime = new Date(); @@ -557,27 +589,33 @@ let endTime; // REPL is the read, evaluate, print and loop lineReader.on("line", function (line) { // Read // Registers the Tags for our measurements - const tags = {method: "repl", status: "OK"}; + const tags = new TagMap(); + tags.set(methodTagKey, { value: "REPL" }); + tags.set(statusTagKey, { value: "OK" }); try { const processedLine = processLine(line); // Evaluate console.log(processedLine); // Print - stats.record({ + globalStats.record([{ measure: mLineLengths, - tags, value: processedLine.length - }); + }, { + measure: mLatencyMs, + value: (new Date()) - startTime.getTime() + }], tags); } catch (err) { - tags.status = "ERROR"; - tags.error = err.message; + const errTags = new TagMap(); + errTags.set(methodTagKey, { value: "REPL" }); + errTags.set(statusTagKey, { value: "ERROR" }); + errTags.set(errorTagKey, { value: err.message }); + + globalStats.record([{ + measure: mLatencyMs, + value: (new Date()) - startTime.getTime() + }], errTags); } - stats.record({ - measure: mLatencyMs, - tags, - value: (new Date()) - startTime.getTime() - }); // Restarts the start time for the REPL startTime = new Date(); @@ -594,16 +632,84 @@ function processLine(line) { {{}} {{}} +### Prometheus configuration file + +To allow Prometheus to scrape from our application, we have to point it towards the tutorial application whose +server is running on "localhost:8889". + +To do this, we firstly need to create a YAML file with the configuration e.g. `promconfig.yaml` +whose contents are: +```yaml +scrape_configs: + - job_name: 'ocnodejsmetricstutorial' + + scrape_interval: 10s + + static_configs: + - targets: ['localhost:8889'] +``` + +### Running Prometheus + +With that file saved as `promconfig.yaml` we should now be able to run Prometheus like this + +```shell +prometheus --config.file=promconfig.yaml +``` + ## Viewing your metrics -Once registed, the Stackdriver exporter will be notified on every view registered and measurement recorded. It will translate and send the collected data on its own. Now, simply go to the [monitoring console](https://app.google.stackdriver.com/) and check the collected data. -![](/images/metrics-node-stackdriver.png) -Each bar in the heatmap represents one run of the program, and the colored components of each bar represent part of the latency distribution. +With the above you should now be able to navigate to the Prometheus UI at http://localhost:9464/metrics + +``` +# HELP demo_lines_in The number of lines from standard input +# TYPE demo_lines_in counter +demo_lines_in{method="REPL",status="OK"} 6 + +# HELP demo_line_lengths Groups the lengths of keys in buckets +# TYPE demo_line_lengths histogram +demo_line_lengths_bucket{le="5",method="REPL",status="OK"} 0 +demo_line_lengths_bucket{le="10",method="REPL",status="OK"} 0 +demo_line_lengths_bucket{le="15",method="REPL",status="OK"} 0 +demo_line_lengths_bucket{le="20",method="REPL",status="OK"} 0 +demo_line_lengths_bucket{le="40",method="REPL",status="OK"} 0 +demo_line_lengths_bucket{le="60",method="REPL",status="OK"} 1 +demo_line_lengths_bucket{le="80",method="REPL",status="OK"} 6 +demo_line_lengths_bucket{le="100",method="REPL",status="OK"} 6 +demo_line_lengths_bucket{le="200",method="REPL",status="OK"} 6 +demo_line_lengths_bucket{le="400",method="REPL",status="OK"} 6 +demo_line_lengths_bucket{le="600",method="REPL",status="OK"} 6 +demo_line_lengths_bucket{le="800",method="REPL",status="OK"} 6 +demo_line_lengths_bucket{le="1000",method="REPL",status="OK"} 6 +demo_line_lengths_bucket{le="+Inf",method="REPL",status="OK"} 6 +demo_line_lengths_sum{method="REPL",status="OK"} 440 +demo_line_lengths_count{method="REPL",status="OK"} 6 + +# HELP demo_latency The distribution of the latencies +# TYPE demo_latency histogram +demo_latency_bucket{le="25",method="REPL",status="OK"} 6 +demo_latency_bucket{le="50",method="REPL",status="OK"} 6 +demo_latency_bucket{le="75",method="REPL",status="OK"} 6 +demo_latency_bucket{le="100",method="REPL",status="OK"} 6 +demo_latency_bucket{le="200",method="REPL",status="OK"} 6 +demo_latency_bucket{le="400",method="REPL",status="OK"} 6 +demo_latency_bucket{le="600",method="REPL",status="OK"} 6 +demo_latency_bucket{le="800",method="REPL",status="OK"} 6 +demo_latency_bucket{le="1000",method="REPL",status="OK"} 6 +demo_latency_bucket{le="2000",method="REPL",status="OK"} 6 +demo_latency_bucket{le="4000",method="REPL",status="OK"} 6 +demo_latency_bucket{le="6000",method="REPL",status="OK"} 6 +demo_latency_bucket{le="+Inf",method="REPL",status="OK"} 6 +demo_latency_sum{method="REPL",status="OK"} 5 +demo_latency_count{method="REPL",status="OK"} 6 +``` ## References Resource|URL ---|--- -NPM: @opencensus/exporter-stackdriver|https://www.npmjs.com/package/@opencensus/exporter-stackdriver +Prometheus project|https://prometheus.io/ +Setting up Prometheus|[Prometheus Codelab](/codelabs/prometheus) +NPM: @opencensus/exporter-prometheus|https://www.npmjs.com/package/@opencensus/exporter-prometheus NPM: @opencensus/nodejs|https://www.npmjs.com/package/@opencensus/nodejs Github: OpenCensus for Node.js|https://github.com/census-instrumentation/opencensus-node/tree/master/packages/opencensus-nodejs diff --git a/content/stats/measure.md b/content/stats/measure.md index 4cb54c16..a3bd5f3c 100644 --- a/content/stats/measure.md +++ b/content/stats/measure.md @@ -143,22 +143,15 @@ int main(int argc, char** argv) { {{}} {{}} -import { Stats, MeasureUnit } from "@opencensus/core"; - -// Our Stats manager -const stats = new Stats(); - -const mLatencyMs = stats.createMeasureDouble("latency", MeasureUnit.MS, "The latency in milliseconds"); -const mBytesIn = stats.createMeasureInt64("size", MeasureUnit.BYTE, "The number of bytes received"); - -stats.record({ - measure: mLatencyMs, - value: 17 -}); -stats.record({ - measure: mBytesIn, - value: 7000 -}); +import { globalStats, MeasureUnit } from "@opencensus/core"; + +const mLatencyMs = globalStats.createMeasureDouble("latency", MeasureUnit.MS, "The latency in milliseconds"); +const mBytesIn = globalStats.createMeasureInt64("size", MeasureUnit.BYTE, "The number of bytes received"); + +const measurement1 = {measure: mLatencyMs, value: 17}; +const measurement2 = {measure: mBytesIn, value: 7000}; + +globalStats.record([measurement1, measurement2]); {{}} {{}} diff --git a/content/stats/view.md b/content/stats/view.md index 11ac7735..6ec4dcdd 100644 --- a/content/stats/view.md +++ b/content/stats/view.md @@ -180,13 +180,11 @@ void enableViews() { {{}} {{}} -const { Stats, AggregationType } = require('@opencensus/core'); +const { globalStats, AggregationType, TagMap } = require('@opencensus/core'); -// Our Stats manager -const stats = new Stats(); -const tagKey = "method"; +const tagKey = { name: "method" }; -const latencyView = stats.createView( +const latencyView = globalStats.createView( "myapp/latency", mLatencyMs, AggregationType.DISTRIBUTION, @@ -197,7 +195,7 @@ const latencyView = stats.createView( [0, 25, 100, 200, 400, 800, 1000] ); -const lineCountView = stats.createView( +const lineCountView = globalStats.createView( "demo/lines_in", mLatencyMs, AggregationType.COUNT, @@ -205,8 +203,9 @@ const lineCountView = stats.createView( "The number of lines from standard input" ); -stats.registerView(latencyView); -stats.registerView(lineCountView); +globalStats.registerView(latencyView); +globalStats.registerView(lineCountView); + {{}} {{}} diff --git a/content/tag/key.md b/content/tag/key.md index 715a4398..5cdc46c8 100644 --- a/content/tag/key.md +++ b/content/tag/key.md @@ -46,8 +46,8 @@ opencensus::tags::TagKey MethodKey() { } {{}} -{{}} -const keyMethod = "method"; +{{}} +const methodKey = { name: "method" }; {{}} {{}} @@ -60,3 +60,4 @@ Go TagKey API|[TagKey](https://godoc.org/go.opencensus.io/tag#Key) Java TagKey API|[TagKey JavaDoc](https://static.javadoc.io/io.opencensus/opencensus-api/0.16.1/io/opencensus/tags/TagKey.html) Python TagKey reference|[Github implementation](https://github.com/census-instrumentation/opencensus-python/blob/fc42d70f0c9f423b22d0d6a55cc1ffb0e3e478c8/opencensus/tags/tag_key.py#L15-L34) C++ Tags reference|[Tags source](https://github.com/census-instrumentation/opencensus-cpp/tree/master/opencensus/tags) +Node.js TagKey reference|[Github implementation](https://github.com/census-instrumentation/opencensus-node/blob/master/packages/opencensus-core/src/tags/types.ts#L17-L21) diff --git a/content/tag/map.md b/content/tag/map.md index f5a58512..b7828fe1 100644 --- a/content/tag/map.md +++ b/content/tag/map.md @@ -75,8 +75,14 @@ void Put() { } {{}} -{{}} -const keyMethod = "method"; +{{}} +const { TagMap } = require('@opencensus/core'); +const methodTagKey = { name: "method" }; +const statusTagKey = { name: "status" }; + +const tags = new TagMap(); +tags.set(methodTagKey, { value: "REPL" }); +tags.set(statusTagKey, { value: "OK" }); {{}} {{}} diff --git a/content/tag/value.md b/content/tag/value.md index 6464c35e..174bd3c5 100644 --- a/content/tag/value.md +++ b/content/tag/value.md @@ -42,8 +42,8 @@ ABSL_CONST_INIT const absl::string_view kGetMethod = "memcache.Client.Get"; {{}} -{{}} -const methodValue = "memcache.Client.Get"; +{{}} +const methodValue = { value: "memcache.Client.Get" }; {{}} {{}} @@ -56,3 +56,4 @@ Go TagValue API|[TagMutator](https://godoc.org/go.opencensus.io/tag#Mutator) Java TagValue API|[TagValue JavaDoc](https://static.javadoc.io/io.opencensus/opencensus-api/0.16.1/io/opencensus/tags/TagValue.html) Python TagValue reference|[Github implementation](https://github.com/census-instrumentation/opencensus-python/blob/fc42d70f0c9f423b22d0d6a55cc1ffb0e3e478c8/opencensus/tags/tag_value.py#L15-L34) C++ Tags reference|[Tags source](https://github.com/census-instrumentation/opencensus-cpp/tree/master/opencensus/tags) +Node.js TagValue reference|[Github implementation](https://github.com/census-instrumentation/opencensus-node/blob/master/packages/opencensus-core/src/tags/types.ts#L23-L27)