Skip to content
This repository has been archived by the owner on Oct 3, 2023. It is now read-only.

Commit

Permalink
Update Python metrics screenshots
Browse files Browse the repository at this point in the history
  • Loading branch information
PikkaPikkachu committed Jan 24, 2019
1 parent dbf6819 commit 4ffd4a9
Show file tree
Hide file tree
Showing 5 changed files with 43 additions and 34 deletions.
77 changes: 43 additions & 34 deletions content/quickstart/python/metrics.md
Original file line number Diff line number Diff line change
Expand Up @@ -98,16 +98,16 @@ import opencensus.tags import tag_value as tag_value_module

# Create the measures
# The latency in milliseconds
m_latency_ms = measure_module.MeasureFloat("repl/latency", "The latency in milliseconds per REPL loop", "ms")
m_latency_ms = measure_module.MeasureFloat("repl_latency", "The latency in milliseconds per REPL loop", "ms")

# Counts the number of lines read in from standard input
m_lines_in = measure_module.MeasureInt("repl/lines_in", "The number of lines read in", "1")
m_lines_in = measure_module.MeasureInt("repl_lines_in", "The number of lines read in", "1")

# Encounters the number of non EOF(end-of-file) errors.
m_errors = measure_module.Int("repl/errors", "The number of errors encountered", "1")
m_errors = measure_module.Int("repl_errors", "The number of errors encountered", "1")

# Counts/groups the lengths of lines read in.
m_line_lengths = measure_module.Int("repl/line_lengths", "The distribution of line lengths", "By")
m_line_lengths = measure_module.Int("repl_ine_lengths", "The distribution of line lengths", "By")
{{</highlight>}}

{{<highlight python>}}
Expand All @@ -125,16 +125,16 @@ from opencensus.tags import tag_value as tag_value_module

# Create the measures
# The latency in milliseconds
m_latency_ms = measure_module.MeasureFloat("repl/latency", "The latency in milliseconds per REPL loop", "ms")
m_latency_ms = measure_module.MeasureFloat("repl_latency", "The latency in milliseconds per REPL loop", "ms")

# Counts the number of lines read in from standard input
m_lines_in = measure_module.MeasureInt("repl/lines_in", "The number of lines read in", "1")
m_lines_in = measure_module.MeasureInt("repl_lines_in", "The number of lines read in", "1")

# Encounters the number of non EOF(end-of-file) errors.
m_errors = measure_module.MeasureInt("repl/errors", "The number of errors encountered", "1")
m_errors = measure_module.MeasureInt("repl_errors", "The number of errors encountered", "1")

# Counts/groups the lengths of lines read in.
m_line_lengths = measure_module.MeasureInt("repl/line_lengths", "The distribution of line lengths", "By")
m_line_lengths = measure_module.MeasureInt("repl_line_lengths", "The distribution of line lengths", "By")

# The stats recorder
stats_recorder = stats.Stats().stats_recorder
Expand Down Expand Up @@ -199,41 +199,41 @@ from opencensus.tags import tag_value as tag_value_module

# Create the measures
# The latency in milliseconds
m_latency_ms = measure_module.MeasureFloat("repl/latency", "The latency in milliseconds per REPL loop", "ms")
m_latency_ms = measure_module.MeasureFloat("repl_latency", "The latency in milliseconds per REPL loop", "ms")

# Counts the number of lines read in from standard input
m_lines_in = measure_module.MeasureInt("repl/lines_in", "The number of lines read in", "1")
m_lines_in = measure_module.MeasureInt("repl_lines_in", "The number of lines read in", "1")

# Encounters the number of non EOF(end-of-file) errors.
m_errors = measure_module.MeasureInt("repl/errors", "The number of errors encountered", "1")
m_errors = measure_module.MeasureInt("repl_errors", "The number of errors encountered", "1")

# Counts/groups the lengths of lines read in.
m_line_lengths = measure_module.MeasureInt("repl/line_lengths", "The distribution of line lengths", "By")
m_line_lengths = measure_module.MeasureInt("repl_line_lengths", "The distribution of line lengths", "By")

# The stats recorder
stats_recorder = stats.Stats().stats_recorder

# Create the tag key
key_method = tag_key_module.TagKey("method")

latency_view = view_module.View("demo/latency", "The distribution of the latencies",
latency_view = view_module.View("demo_latency", "The distribution of the latencies",
[key_method],
m_latency_ms,
# Latency in buckets:
# [>=0ms, >=25ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms, >=1s, >=2s, >=4s, >=6s]
aggregation_module.DistributionAggregation([0, 25, 50, 75, 100, 200, 400, 600, 800, 1000, 2000, 4000, 6000]))

line_count_view = view_module.View("demo/lines_in", "The number of lines from standard input",
line_count_view = view_module.View("demo_lines_in", "The number of lines from standard input",
[],
m_lines_in,
aggregation_module.CountAggregation())

error_count_view = view_module.View("demo/errors", "The number of errors encountered",
error_count_view = view_module.View("demo_errors", "The number of errors encountered",
[key_method],
m_errors,
aggregation_module.CountAggregation())

line_length_view = view_module.View("demo/line_lengths", "Groups the lengths of keys in buckets",
line_length_view = view_module.View("demo_line_lengths", "Groups the lengths of keys in buckets",
[],
m_line_lengths,
# Lengths: [>=0B, >=5B, >=10B, >=15B, >=20B, >=40B, >=60B, >=80, >=100B, >=200B, >=400, >=600, >=800, >=1000]
Expand Down Expand Up @@ -319,41 +319,41 @@ from opencensus.tags import tag_value as tag_value_module

# Create the measures
# The latency in milliseconds
m_latency_ms = measure_module.MeasureFloat("repl/latency", "The latency in milliseconds per REPL loop", "ms")
m_latency_ms = measure_module.MeasureFloat("repl_latency", "The latency in milliseconds per REPL loop", "ms")

# Counts the number of lines read in from standard input
m_lines_in = measure_module.MeasureInt("repl/lines_in", "The number of lines read in", "1")
m_lines_in = measure_module.MeasureInt("repl_lines_in", "The number of lines read in", "1")

# Encounters the number of non EOF(end-of-file) errors.
m_errors = measure_module.MeasureInt("repl/errors", "The number of errors encountered", "1")
m_errors = measure_module.MeasureInt("repl_errors", "The number of errors encountered", "1")

# Counts/groups the lengths of lines read in.
m_line_lengths = measure_module.MeasureInt("repl/line_lengths", "The distribution of line lengths", "By")
m_line_lengths = measure_module.MeasureInt("repl_line_lengths", "The distribution of line lengths", "By")

# The stats recorder
stats_recorder = stats.Stats().stats_recorder

# Create the tag key
key_method = tag_key_module.TagKey("method")

latency_view = view_module.View("demo/latency", "The distribution of the latencies",
latency_view = view_module.View("demo_latency", "The distribution of the latencies",
[key_method],
m_latency_ms,
# Latency in buckets:
# [>=0ms, >=25ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms, >=1s, >=2s, >=4s, >=6s]
aggregation_module.DistributionAggregation([0, 25, 50, 75, 100, 200, 400, 600, 800, 1000, 2000, 4000, 6000]))

line_count_view = view_module.View("demo/lines_in", "The number of lines from standard input",
line_count_view = view_module.View("demo_lines_in", "The number of lines from standard input",
[],
m_lines_in,
aggregation_module.CountAggregation())

error_count_view = view_module.View("demo/errors", "The number of errors encountered",
error_count_view = view_module.View("demo_errors", "The number of errors encountered",
[key_method],
m_errors,
aggregation_module.CountAggregation())

line_length_view = view_module.View("demo/line_lengths", "Groups the lengths of keys in buckets",
line_length_view = view_module.View("demo_line_lengths", "Groups the lengths of keys in buckets",
[],
m_line_lengths,
# Lengths: [>=0B, >=5B, >=10B, >=15B, >=20B, >=40B, >=60B, >=80, >=100B, >=200B, >=400, >=600, >=800, >=1000]
Expand Down Expand Up @@ -456,41 +456,41 @@ from opencensus.tags import tag_value as tag_value_module

# Create the measures
# The latency in milliseconds
m_latency_ms = measure_module.MeasureFloat("repl/latency", "The latency in milliseconds per REPL loop", "ms")
m_latency_ms = measure_module.MeasureFloat("repl_latency", "The latency in milliseconds per REPL loop", "ms")

# Counts the number of lines read in from standard input
m_lines_in = measure_module.MeasureInt("repl/lines_in", "The number of lines read in", "1")
m_lines_in = measure_module.MeasureInt("repl_lines_in", "The number of lines read in", "1")

# Encounters the number of non EOF(end-of-file) errors.
m_errors = measure_module.MeasureInt("repl/errors", "The number of errors encountered", "1")
m_errors = measure_module.MeasureInt("repl_errors", "The number of errors encountered", "1")

# Counts/groups the lengths of lines read in.
m_line_lengths = measure_module.MeasureInt("repl/line_lengths", "The distribution of line lengths", "By")
m_line_lengths = measure_module.MeasureInt("repl_line_lengths", "The distribution of line lengths", "By")

# The stats recorder
stats_recorder = stats.Stats().stats_recorder

# Create the tag key
key_method = tag_key_module.TagKey("method")

latency_view = view_module.View("demo/latency", "The distribution of the latencies",
latency_view = view_module.View("demo_latency", "The distribution of the latencies",
[key_method],
m_latency_ms,
# Latency in buckets:
# [>=0ms, >=25ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms, >=1s, >=2s, >=4s, >=6s]
aggregation_module.DistributionAggregation([0, 25, 50, 75, 100, 200, 400, 600, 800, 1000, 2000, 4000, 6000]))

line_count_view = view_module.View("demo/lines_in", "The number of lines from standard input",
line_count_view = view_module.View("demo_lines_in", "The number of lines from standard input",
[],
m_lines_in,
aggregation_module.CountAggregation())

error_count_view = view_module.View("demo/errors", "The number of errors encountered",
error_count_view = view_module.View("demo_errors", "The number of errors encountered",
[key_method],
m_errors,
aggregation_module.CountAggregation())

line_length_view = view_module.View("demo/line_lengths", "Groups the lengths of keys in buckets",
line_length_view = view_module.View("demo_line_lengths", "Groups the lengths of keys in buckets",
[],
m_line_lengths,
# Lengths: [>=0B, >=5B, >=10B, >=15B, >=20B, >=40B, >=60B, >=80, >=100B, >=200B, >=400, >=600, >=800, >=1000]
Expand Down Expand Up @@ -583,12 +583,21 @@ prometheus --config.file=promconfig.yaml
and then return to the terminal that's running the Python metrics quickstart and generate some work by typing inside it.

## Viewing your metrics
With the above you should now be able to navigate to the Prometheus endpoint at http://localhost:8000

With the above you should now be able to navigate to the Prometheus UI at http://localhost:9090
which should show:

* Available metrics
![](/images/metrics-python-prometheus-all-metrics.png)

* Lines-in counts
![](/images/metrics-python-prometheus-lines_in.png)

* Latency distributions
![](/images/metrics-python-prometheus-latency-distribution.png)

* Line lengths distributions
![](/images/metrics-python-prometheus-line_lengths-distribution.png)

## References

Resource|URL
Expand Down
Binary file modified static/images/metrics-python-prometheus-all-metrics.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.

0 comments on commit 4ffd4a9

Please sign in to comment.