diff --git a/content/quickstart/python/metrics.md b/content/quickstart/python/metrics.md index 0695f234..b392a9c4 100644 --- a/content/quickstart/python/metrics.md +++ b/content/quickstart/python/metrics.md @@ -98,16 +98,16 @@ import opencensus.tags import tag_value as tag_value_module # Create the measures # The latency in milliseconds -m_latency_ms = measure_module.MeasureFloat("repl/latency", "The latency in milliseconds per REPL loop", "ms") +m_latency_ms = measure_module.MeasureFloat("repl_latency", "The latency in milliseconds per REPL loop", "ms") # Counts the number of lines read in from standard input -m_lines_in = measure_module.MeasureInt("repl/lines_in", "The number of lines read in", "1") +m_lines_in = measure_module.MeasureInt("repl_lines_in", "The number of lines read in", "1") # Encounters the number of non EOF(end-of-file) errors. -m_errors = measure_module.Int("repl/errors", "The number of errors encountered", "1") +m_errors = measure_module.Int("repl_errors", "The number of errors encountered", "1") # Counts/groups the lengths of lines read in. -m_line_lengths = measure_module.Int("repl/line_lengths", "The distribution of line lengths", "By") +m_line_lengths = measure_module.Int("repl_ine_lengths", "The distribution of line lengths", "By") {{}} {{}} @@ -125,16 +125,16 @@ from opencensus.tags import tag_value as tag_value_module # Create the measures # The latency in milliseconds -m_latency_ms = measure_module.MeasureFloat("repl/latency", "The latency in milliseconds per REPL loop", "ms") +m_latency_ms = measure_module.MeasureFloat("repl_latency", "The latency in milliseconds per REPL loop", "ms") # Counts the number of lines read in from standard input -m_lines_in = measure_module.MeasureInt("repl/lines_in", "The number of lines read in", "1") +m_lines_in = measure_module.MeasureInt("repl_lines_in", "The number of lines read in", "1") # Encounters the number of non EOF(end-of-file) errors. -m_errors = measure_module.MeasureInt("repl/errors", "The number of errors encountered", "1") +m_errors = measure_module.MeasureInt("repl_errors", "The number of errors encountered", "1") # Counts/groups the lengths of lines read in. -m_line_lengths = measure_module.MeasureInt("repl/line_lengths", "The distribution of line lengths", "By") +m_line_lengths = measure_module.MeasureInt("repl_line_lengths", "The distribution of line lengths", "By") # The stats recorder stats_recorder = stats.Stats().stats_recorder @@ -199,16 +199,16 @@ from opencensus.tags import tag_value as tag_value_module # Create the measures # The latency in milliseconds -m_latency_ms = measure_module.MeasureFloat("repl/latency", "The latency in milliseconds per REPL loop", "ms") +m_latency_ms = measure_module.MeasureFloat("repl_latency", "The latency in milliseconds per REPL loop", "ms") # Counts the number of lines read in from standard input -m_lines_in = measure_module.MeasureInt("repl/lines_in", "The number of lines read in", "1") +m_lines_in = measure_module.MeasureInt("repl_lines_in", "The number of lines read in", "1") # Encounters the number of non EOF(end-of-file) errors. -m_errors = measure_module.MeasureInt("repl/errors", "The number of errors encountered", "1") +m_errors = measure_module.MeasureInt("repl_errors", "The number of errors encountered", "1") # Counts/groups the lengths of lines read in. -m_line_lengths = measure_module.MeasureInt("repl/line_lengths", "The distribution of line lengths", "By") +m_line_lengths = measure_module.MeasureInt("repl_line_lengths", "The distribution of line lengths", "By") # The stats recorder stats_recorder = stats.Stats().stats_recorder @@ -216,24 +216,24 @@ stats_recorder = stats.Stats().stats_recorder # Create the tag key key_method = tag_key_module.TagKey("method") -latency_view = view_module.View("demo/latency", "The distribution of the latencies", +latency_view = view_module.View("demo_latency", "The distribution of the latencies", [key_method], m_latency_ms, # Latency in buckets: # [>=0ms, >=25ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms, >=1s, >=2s, >=4s, >=6s] aggregation_module.DistributionAggregation([0, 25, 50, 75, 100, 200, 400, 600, 800, 1000, 2000, 4000, 6000])) -line_count_view = view_module.View("demo/lines_in", "The number of lines from standard input", +line_count_view = view_module.View("demo_lines_in", "The number of lines from standard input", [], m_lines_in, aggregation_module.CountAggregation()) -error_count_view = view_module.View("demo/errors", "The number of errors encountered", +error_count_view = view_module.View("demo_errors", "The number of errors encountered", [key_method], m_errors, aggregation_module.CountAggregation()) -line_length_view = view_module.View("demo/line_lengths", "Groups the lengths of keys in buckets", +line_length_view = view_module.View("demo_line_lengths", "Groups the lengths of keys in buckets", [], m_line_lengths, # Lengths: [>=0B, >=5B, >=10B, >=15B, >=20B, >=40B, >=60B, >=80, >=100B, >=200B, >=400, >=600, >=800, >=1000] @@ -319,16 +319,16 @@ from opencensus.tags import tag_value as tag_value_module # Create the measures # The latency in milliseconds -m_latency_ms = measure_module.MeasureFloat("repl/latency", "The latency in milliseconds per REPL loop", "ms") +m_latency_ms = measure_module.MeasureFloat("repl_latency", "The latency in milliseconds per REPL loop", "ms") # Counts the number of lines read in from standard input -m_lines_in = measure_module.MeasureInt("repl/lines_in", "The number of lines read in", "1") +m_lines_in = measure_module.MeasureInt("repl_lines_in", "The number of lines read in", "1") # Encounters the number of non EOF(end-of-file) errors. -m_errors = measure_module.MeasureInt("repl/errors", "The number of errors encountered", "1") +m_errors = measure_module.MeasureInt("repl_errors", "The number of errors encountered", "1") # Counts/groups the lengths of lines read in. -m_line_lengths = measure_module.MeasureInt("repl/line_lengths", "The distribution of line lengths", "By") +m_line_lengths = measure_module.MeasureInt("repl_line_lengths", "The distribution of line lengths", "By") # The stats recorder stats_recorder = stats.Stats().stats_recorder @@ -336,24 +336,24 @@ stats_recorder = stats.Stats().stats_recorder # Create the tag key key_method = tag_key_module.TagKey("method") -latency_view = view_module.View("demo/latency", "The distribution of the latencies", +latency_view = view_module.View("demo_latency", "The distribution of the latencies", [key_method], m_latency_ms, # Latency in buckets: # [>=0ms, >=25ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms, >=1s, >=2s, >=4s, >=6s] aggregation_module.DistributionAggregation([0, 25, 50, 75, 100, 200, 400, 600, 800, 1000, 2000, 4000, 6000])) -line_count_view = view_module.View("demo/lines_in", "The number of lines from standard input", +line_count_view = view_module.View("demo_lines_in", "The number of lines from standard input", [], m_lines_in, aggregation_module.CountAggregation()) -error_count_view = view_module.View("demo/errors", "The number of errors encountered", +error_count_view = view_module.View("demo_errors", "The number of errors encountered", [key_method], m_errors, aggregation_module.CountAggregation()) -line_length_view = view_module.View("demo/line_lengths", "Groups the lengths of keys in buckets", +line_length_view = view_module.View("demo_line_lengths", "Groups the lengths of keys in buckets", [], m_line_lengths, # Lengths: [>=0B, >=5B, >=10B, >=15B, >=20B, >=40B, >=60B, >=80, >=100B, >=200B, >=400, >=600, >=800, >=1000] @@ -456,16 +456,16 @@ from opencensus.tags import tag_value as tag_value_module # Create the measures # The latency in milliseconds -m_latency_ms = measure_module.MeasureFloat("repl/latency", "The latency in milliseconds per REPL loop", "ms") +m_latency_ms = measure_module.MeasureFloat("repl_latency", "The latency in milliseconds per REPL loop", "ms") # Counts the number of lines read in from standard input -m_lines_in = measure_module.MeasureInt("repl/lines_in", "The number of lines read in", "1") +m_lines_in = measure_module.MeasureInt("repl_lines_in", "The number of lines read in", "1") # Encounters the number of non EOF(end-of-file) errors. -m_errors = measure_module.MeasureInt("repl/errors", "The number of errors encountered", "1") +m_errors = measure_module.MeasureInt("repl_errors", "The number of errors encountered", "1") # Counts/groups the lengths of lines read in. -m_line_lengths = measure_module.MeasureInt("repl/line_lengths", "The distribution of line lengths", "By") +m_line_lengths = measure_module.MeasureInt("repl_line_lengths", "The distribution of line lengths", "By") # The stats recorder stats_recorder = stats.Stats().stats_recorder @@ -473,24 +473,24 @@ stats_recorder = stats.Stats().stats_recorder # Create the tag key key_method = tag_key_module.TagKey("method") -latency_view = view_module.View("demo/latency", "The distribution of the latencies", +latency_view = view_module.View("demo_latency", "The distribution of the latencies", [key_method], m_latency_ms, # Latency in buckets: # [>=0ms, >=25ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms, >=1s, >=2s, >=4s, >=6s] aggregation_module.DistributionAggregation([0, 25, 50, 75, 100, 200, 400, 600, 800, 1000, 2000, 4000, 6000])) -line_count_view = view_module.View("demo/lines_in", "The number of lines from standard input", +line_count_view = view_module.View("demo_lines_in", "The number of lines from standard input", [], m_lines_in, aggregation_module.CountAggregation()) -error_count_view = view_module.View("demo/errors", "The number of errors encountered", +error_count_view = view_module.View("demo_errors", "The number of errors encountered", [key_method], m_errors, aggregation_module.CountAggregation()) -line_length_view = view_module.View("demo/line_lengths", "Groups the lengths of keys in buckets", +line_length_view = view_module.View("demo_line_lengths", "Groups the lengths of keys in buckets", [], m_line_lengths, # Lengths: [>=0B, >=5B, >=10B, >=15B, >=20B, >=40B, >=60B, >=80, >=100B, >=200B, >=400, >=600, >=800, >=1000] @@ -583,12 +583,21 @@ prometheus --config.file=promconfig.yaml and then return to the terminal that's running the Python metrics quickstart and generate some work by typing inside it. ## Viewing your metrics -With the above you should now be able to navigate to the Prometheus endpoint at http://localhost:8000 - +With the above you should now be able to navigate to the Prometheus UI at http://localhost:9090 which should show: +* Available metrics ![](/images/metrics-python-prometheus-all-metrics.png) +* Lines-in counts +![](/images/metrics-python-prometheus-lines_in.png) + +* Latency distributions +![](/images/metrics-python-prometheus-latency-distribution.png) + +* Line lengths distributions +![](/images/metrics-python-prometheus-line_lengths-distribution.png) + ## References Resource|URL diff --git a/static/images/metrics-python-prometheus-all-metrics.png b/static/images/metrics-python-prometheus-all-metrics.png index 98e0dc72..95205673 100644 Binary files a/static/images/metrics-python-prometheus-all-metrics.png and b/static/images/metrics-python-prometheus-all-metrics.png differ diff --git a/static/images/metrics-python-prometheus-latency-distribution.png b/static/images/metrics-python-prometheus-latency-distribution.png new file mode 100644 index 00000000..5f1e6d8c Binary files /dev/null and b/static/images/metrics-python-prometheus-latency-distribution.png differ diff --git a/static/images/metrics-python-prometheus-line_lengths-distribution.png b/static/images/metrics-python-prometheus-line_lengths-distribution.png new file mode 100644 index 00000000..cf4989cb Binary files /dev/null and b/static/images/metrics-python-prometheus-line_lengths-distribution.png differ diff --git a/static/images/metrics-python-prometheus-lines_in.png b/static/images/metrics-python-prometheus-lines_in.png new file mode 100644 index 00000000..44ddb1ba Binary files /dev/null and b/static/images/metrics-python-prometheus-lines_in.png differ