Skip to content

Commit

Permalink
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[Obs][kbn-data-forge] Adding example config
Browse files Browse the repository at this point in the history
simianhacker committed Feb 12, 2024

Verified

This commit was created on GitHub.com and signed with GitHub’s verified signature. The key has expired.
1 parent 77fe14e commit 657586d
Showing 32 changed files with 1,133 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
---
elasticsearch:
installKibanaUser: false

kibana:
installAssets: false

indexing:
eventsPerCycle: 5000
dataset: "fake_logs"

schedule:
# Start with normal logs
- template: "good"
start: "now-45m"
end: "now+1m"
randomness: 0.1
eventsPerCycle: 5000
# Sudden change into new number of logs
- template: "good"
start: "now+1m"
end: "now+10m"
randomness: 0.1
eventsPerCycle: 1000


Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
---
elasticsearch:
installKibanaUser: false

kibana:
installAssets: false

indexing:
eventsPerCycle: 1000
dataset: "fake_logs"

schedule:
# Start with normal logs
- template: "good"
start: "now-10m"
end: "now+1m"
randomness: 0.1
eventsPerCycle: 1000
# Progresively change into a new number of logs
- template: "good"
start: "now+1m"
end: "now+5m"
randomness: 0.1
eventsPerCycle:
start: 1000
end: 5000
method: "linear"
# Stay at the new number of logs
- template: "good"
start: "now+5m"
end: "now+10m"
randomness: 0.1
eventsPerCycle: 5000


Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
---
# Define the connection to Elasticsearch
elasticsearch:
installKibanaUser: false

# Define the connection to Kibana
kibana:
host: "http://localhost:5601"
username: "elastic"
password: "changeme"
installAssets: false

# Define the indexing options
indexing:
dataset: "fake_logs"
eventsPerCycle: 2000
interval: 6000

# Define the schedule
schedule:
- template: "bad"
start: "now-5m"
end: "now+1m"
eventsPerCycle:
start: 5000
end: 3000
method: "sine"
- template: "good"
start: "now+1m"
end: "now+2m"
eventsPerCycle: 3000
- template: "bad"
start: "now+2m"
end: "now+10m"
eventsPerCycle:
start: 5000
end: 3000
method: "sine"
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
---
# Define the connection to Elasticsearch
elasticsearch:
installKibanaUser: false

# Define the connection to Kibana
kibana:
installAssets: false

# Define the indexing options
indexing:
dataset: "fake_logs"
eventsPerCycle: 5000

# Define the schedule
schedule:
# Normal logs
- template: "good"
eventsPerCycle: 5000
start: "now-5m"
end: "now+5m"
randomness: 0.1
# Spike in logs
- template: "bad"
eventsPerCycle: 10000
start: "now+1m"
end: "now+2m"
randomness: 0.1
# Drop in logs, stop normal logs and send few logs
- template: "bad"
eventsPerCycle: 500
start: "now+5m"
end: "now+7m"
randomness: 0.1
# Return to normal logs
- template: "good"
eventsPerCycle: 5000
start: "now+7m"
end: "now+17m"
randomness: 0.1
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
---
elasticsearch:
installKibanaUser: false

kibana:
installAssets: true

indexing:
eventsPerCycle: 1000
dataset: "fake_logs"

schedule:
- template: "good"
start: "now-2h"
end: "now-1h"
- template: "bad"
start: "now-1h"
end: "now-15m"
eventsPerCycle: 5000
- template: "good"
start: "now-15m"
end: false

31 changes: 31 additions & 0 deletions x-pack/packages/kbn-data-forge/example_config/fake_logs_sine.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
---
# Define the connection to Elasticsearch
elasticsearch:
installKibanaUser: false

# Define the connection to Kibana
kibana:
host: "http://localhost:5601"
username: "elastic"
password: "changeme"
installAssets: false

# Define the indexing options
indexing:
dataset: "fake_logs"
eventsPerCycle: 2000
interval: 6000
scenario: "sine_logs"

# Define the schedule
schedule:
- template: "bad"
start: "now-5m"
end: "now+10m"
eventsPerCycle:
start: 2000
end: 3000
method: "sine"
options:
period: 60
randomness: 0.1
39 changes: 39 additions & 0 deletions x-pack/packages/kbn-data-forge/example_config/fake_stack.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
---
elasticsearch:
installKibanaUser: false

kibana:
installAssets: true

indexing:
dataset: "fake_stack"
eventsPerCycle: 200
reduceWeekendTrafficBy: 0.5

schedule:
# Start with good events
- template: "good"
start: "now-14d"
end: "now-6d-5h-3m"
eventsPerCycle: 200
randomness: 0.2
- template: "connectionTimeout"
start: "now-6d-5h-3m"
end: "now-6d-4h-44m"
eventsPerCycle: 200
randomness: 0.2
- template: "good"
start: "now-6d-4h-44m"
end: "now-1d"
eventsPerCycle: 200
randomness: 0.2
- template: "bad"
start: "now-1d"
end: "now-1d+45m"
eventsPerCycle: 200
randomness: 0.2
- template: "good"
start: "now-1d+45m"
end: false
eventsPerCycle: 200
randomness: 0.2
41 changes: 41 additions & 0 deletions x-pack/packages/kbn-data-forge/example_config/full_example.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
---
# Define the connection to Elasticsearch
elasticsearch:
host: "http://localhost:9200"
username: "elastic"
password: "changeme"
installKibanaUser: false

# Define the connection to Kibana
kibana:
host: "http://localhost:5601"
username: "elastic"
password: "changeme"
installAssets: true

# Define the indexing options
indexing:
dataset: "fake_stack"
interval: 6000
eventsPerCycle: 1
payloadSize: 10000
concurrency: 5

# Define the schedule
schedule:
- template: "good"
start: "now-1h"
end: "now-15m"
- template: "bad"
start: "now-15m"
end: "now"
# This schedule will run indefinitely
- template: "good"
start: "now"
end: false
# This will add a 2 minute delay to the indexing every 5 mintes.
# Once the 2 minutes is up, the queue will back fill the events it collected
# during the delay. This only makes sense if `end` is `false`
delayInMinutes: 2
delayEveryMinutes: 5

20 changes: 20 additions & 0 deletions x-pack/packages/kbn-data-forge/example_config/future_example.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
---
elasticsearch:
installKibanaUser: false

kibana:
installAssets: true

indexing:
dataset: "fake_stack"

schedule:
- template: "good"
start: "now"
end: "now+1h"
- template: "bad"
start: "now+1h"
end: "now+90m"
- template: "good"
start: "now+90m"
end: "now+2h"
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
---
elasticsearch:
installKibanaUser: false

kibana:
installAssets: true

indexing:
dataset: "fake_stack"

schedule:
- template: "good"
start: "now-2h"
end: "now-1h"
- template: "bad"
start: "now-1h"
end: "now-15m"
- template: "good"
start: "now-15m"
end: false
39 changes: 39 additions & 0 deletions x-pack/packages/kbn-data-forge/example_config/log_drop.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
---
# Define the connection to Elasticsearch
elasticsearch:
installKibanaUser: false

# Define the connection to Kibana
kibana:
host: "http://localhost:5601"
username: "elastic"
password: "changeme"
installAssets: false

indexing:
eventsPerCycle: 100
dataset: "fake_logs"
scenario: "log_drop"

schedule:
# Start with good events at 100 rate
- template: "good"
start: "now-30m"
end: "now-5m"
eventsPerCycle: 50
randomness: 0.1
# Create a ramp-up of bad events
- template: "good"
start: "now-5m"
end: "now+5m"
eventsPerCycle:
start: 50
end: 100
method: "linear"
randomness: 0.1
# Drop to very little good events
- template: "good"
start: "now+5m"
end: false
eventsPerCycle: 10
randomness: 0.1
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
---
# Define the connection to Elasticsearch
elasticsearch:
host: "http://localhost:9200"
username: "elastic"
password: "changeme"
installKibanaUser: false

# Define the connection to Kibana
kibana:
host: "http://localhost:5601"
username: "elastic"
password: "changeme"
installAssets: false

# Define the indexing options
indexing:
dataset: "fake_logs"
eventsPerCycle: 2000
interval: 6000

# Define the schedule
schedule:
# This step send 2000 logs every 30 seconds
- template: "good"
eventsPerCycle: 2000
start: "now-5m"
end: "now+5m"

Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
---
# Define the connection to Elasticsearch
elasticsearch:
host: "http://localhost:9200"
username: "elastic"
password: "changeme"
installKibanaUser: false

# Define the connection to Kibana
kibana:
host: "http://localhost:5601"
username: "elastic"
password: "changeme"
installAssets: false

# Define the indexing options
indexing:
dataset: "fake_logs"
eventsPerCycle: 2000
interval: 6000

# Define the schedule
schedule:
# This step send 2000 logs every 30 seconds
- template: "good"
eventsPerCycle: 2000
start: "now-2m"
end: "now+1m"
# This step send 6000 logs every 30 seconds
- template: "bad"
start: "now+1m"
end: "now+10m"
eventsPerCycle: 6000
randomness: 0.1

Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
---
# Define the connection to Elasticsearch
elasticsearch:
host: "http://localhost:9200"
username: "elastic"
password: "changeme"
installKibanaUser: false

# Define the connection to Kibana
kibana:
host: "http://localhost:5601"
username: "elastic"
password: "changeme"
installAssets: false

# Define the indexing options
indexing:
dataset: "fake_logs"
eventsPerCycle: 2000
interval: 6000
concurrency: 4

# Define the schedule
schedule:
# This step send 2000 logs every 30 seconds for 4 hosts
- template: "good"
eventsPerCycle: 2000
start: "now-2m"
end: "now+1m"
# This step send 8000 logs every 30 seconds for 4 hosts
- template: "bad"
start: "now+1m"
end: "now+5m"
eventsPerCycle: 8000
randomness: 0.1
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
---
# Define the connection to Elasticsearch
elasticsearch:
host: "http://localhost:9200"
username: "elastic"
password: "changeme"
installKibanaUser: false

# Define the connection to Kibana
kibana:
host: "http://localhost:5601"
username: "elastic"
password: "changeme"
installAssets: false

# Define the indexing options
indexing:
dataset: "fake_stack"
interval: 6000
eventsPerCycle: 1
payloadSize: 10000
concurrency: 5

# Define the schedule
schedule:
# This step will send "good" events
- template: "good"
start: "now-2m"
end: "now+1m"
# This step will send "bad" events, with errors
- template: "bad"
start: "now+1m"
end: "now+10m"
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
---
# Define the connection to Elasticsearch
elasticsearch:
host: "http://localhost:9200"
username: "elastic"
password: "changeme"
installKibanaUser: false

# Define the connection to Kibana
kibana:
host: "http://localhost:5601"
username: "elastic"
password: "changeme"
installAssets: false

# Define the indexing options
indexing:
dataset: "fake_stack"
interval: 6000
eventsPerCycle: 1
payloadSize: 10000
concurrency: 5

# Define the schedule
schedule:
# This step will send "good" events
- template: "good"
start: "now-2m"
end: "now+1m"
# This step will send "bad" events, with errors
- template: "bad"
start: "now+1m"
end: "now+5m"
# Recover to good events
- template: "good"
start: "now+5m"
end: "now+10m"
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
---
# Define the connection to Elasticsearch
elasticsearch:
installKibanaUser: false

# Define the connection to Kibana
kibana:
host: "http://localhost:5601"
username: "elastic"
password: "changeme"
installAssets: false

# Define the indexing options
indexing:
dataset: "fake_logs"
eventsPerCycle: 2000
interval: 6000

# Define the schedule
schedule:
# This step send 2000 logs every 30 seconds
- template: "good"
eventsPerCycle: 2000
start: "now-2m"
end: "now+1m"
- template: "good"
start: "now+1m"
end: "now+5m"
randomness: 0.1
eventsPerCycle:
start: 2000
end: 6000
method: "linear"
# This step send 6000 logs every 30 seconds
- template: "bad"
start: "now+5m"
end: "now+10m"
eventsPerCycle: 6000
randomness: 0.1

31 changes: 31 additions & 0 deletions x-pack/packages/kbn-data-forge/example_config/metric_example.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
---
elasticsearch:
installKibanaUser: false

kibana:
installAssets: true

indexing:
dataset: "fake_hosts"
eventsPerCycle: 1
interval: 10000

schedule:
# Start with good events
- template: "good"
start: "now-2h"
end: false
eventsPerCycle: 1
metrics:
- name: "system.cpu.user.pct"
method: "sine"
start: 1
end: 4
period: 2500
randomness: 0.1
- name: "system.cpu.system.pct"
method: "exp"
start: 1
end: 4
randomness: 0.1

Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
---
elasticsearch:
installKibanaUser: false

kibana:
installAssets: false

indexing:
dataset: "fake_stack"
eventsPerCycle: 100

schedule:
# Ramp up
- template: "good"
start: "now"
end: "now+60m"
eventsPerCycle:
start: 100
end: 1
method: 'linear'
randomness: 0.1
- template: "bad"
start: "now"
end: "now+60m"
eventsPerCycle:
start: 1
end: 100
method: 'linear'
randomness: 0.1
# Level off
- template: "bad"
start: "now+60m"
end: "now+90m"
eventsPerCycle: 100
randomness: 0.1
# Ramp down
- template: "good"
start: "now+90m"
end: "now+150m"
eventsPerCycle:
start: 1
end: 100
method: 'linear'
randomness: 0.1
- template: "bad"
start: "now+90m"
end: "now+150m"
eventsPerCycle:
start: 100
end: 1
method: 'linear'
randomness: 0.1
# Back to normal
- template: "good"
start: "now+150m"
end: "now+210m"
eventsPerCycle: 100
randomness: 0.1

Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
---
elasticsearch:
installKibanaUser: false

kibana:
installAssets: true

indexing:
dataset: "fake_hosts"
scenario: "scenario0"
eventsPerCycle: 1
interval: 10000

schedule:
# Start events at 100 rate
- template: "good"
start: "now-30m"
end: "now+2m"
eventsPerCycle: 100
randomness: 0.1
# Step change to events at 10 rate
- template: "good"
start: "now+2m"
end: "now+10m"
eventsPerCycle: 10

Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
---
elasticsearch:
installKibanaUser: false

kibana:
installAssets: true

indexing:
dataset: "fake_hosts"
scenario: "scenario1"
eventsPerCycle: 1
interval: 10000

schedule:
# Start events at 100 rate
- template: "good"
start: "now-30m"
end: "now+2m"
eventsPerCycle: 50
randomness: 0.1
# Step change to events at 20 rate
- template: "good"
start: "now+2m"
end: "now+3m"
eventsPerCycle:
start: 50
end: 200
method: "linear"
randomness: 0.1
- template: "good"
start: "now+3m"
end: "now+15m"
eventsPerCycle: 200
randomness: 0.1

Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
---
elasticsearch:
installKibanaUser: false

kibana:
installAssets: true

indexing:
dataset: "fake_hosts"
scenario: "scenario2"
eventsPerCycle: 50
interval: 10000

schedule:
# Start with good events
- template: "good"
start: "now-30m"
end: "now+2m"
randomness: 0.1
- template: "good"
start: "now+2m"
end: "now+5m"
metrics:
- name: "system.cpu.user.pct"
method: "exp"
start: 10
end: 2.5
randomness: 0.1
- name: "system.memory.actual.used.pct"
method: "exp"
start: 2.5
end: 0.5
randomness: 0.1
- name: "system.filesystem.used.pct"
method: "linear"
start: 2.5
end: 0.5
randomness: 0.1
- template: "good"
start: "now+5m"
end: "now+15m"
randomness: 0.1

Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
---
elasticsearch:
installKibanaUser: false

kibana:
installAssets: true

indexing:
dataset: "fake_hosts"
scenario: "scenario3"
eventsPerCycle: 50
interval: 10000

schedule:
# Start with good events
- template: "good"
start: "now-30m"
end: "now+2m"
randomness: 0.1
# Create a spike on a metric
- template: "good"
start: "now+1m"
end: "now+2m"
randomness: 0.1
metrics:
- name: "system.load.1"
method: "linear"
start: 2.5
end: 30
randomness: 0.1
- template: "good"
start: "now+2m"
end: "now+3m"
randomness: 0.1
metrics:
- name: "system.cpu.user.pct"
method: "exp"
start: 2.5
end: 10
randomness: 0.1
- template: "good"
start: "now+3m"
end: "now+5m"
metrics:
- name: "system.memory.actual.used.pct"
method: "exp"
start: 2.5
end: 0.5
randomness: 0.1
- name: "system.filesystem.used.pct"
method: "linear"
start: 2.5
end: 0.5
randomness: 0.1
- template: "good"
start: "now+5m"
end: "now+15m"
randomness: 0.1

Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
---
elasticsearch:
installKibanaUser: false

kibana:
installAssets: false

indexing:
dataset: "fake_hosts"
scenario: "custom_threshold_log_count"
eventsPerCycle: 50
concurrency: 5

# Conditions: count logs < 100 in the last minute
schedule:
# Start events at 100 rate
- template: "good"
start: "now-30m"
end: "now-1m"
eventsPerCycle: 100
randomness: 0.1
# Step change to events at 10 rate
- template: "good"
start: "now-1m"
end: "now+20m"
eventsPerCycle: 10

Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
---
elasticsearch:
installKibanaUser: false

kibana:
installAssets: true

indexing:
dataset: "fake_hosts"
scenario: "custom_threshold_log_count_groupby"
eventsPerCycle: 50
concurrency: 5

# Conditions: count logs < 40 in the last minute, grouped by event.dataset
schedule:
# Start events at 100 rate
- template: "good"
start: "now-30m"
end: "now-1m"
eventsPerCycle: 100
randomness: 0.1
# Step change to events at 10 rate
- template: "good"
start: "now-1m"
end: "now+20m"
eventsPerCycle: 10

Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
---
elasticsearch:
installKibanaUser: false

kibana:
installAssets: false

indexing:
dataset: "fake_hosts"
scenario: "custom_threshold_log_count_nodata"
eventsPerCycle: 50
concurrency: 5

# Conditions: count logs < 5 in the last minute (will trigger no data after 1 min)
schedule:
# Start events at 10 rate
- template: "good"
start: "now-30m"
end: "now-1m"
eventsPerCycle: 10
randomness: 0.1
# Stop data

Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
---
elasticsearch:
installKibanaUser: false

kibana:
installAssets: false

indexing:
dataset: "fake_hosts"
scenario: "custom_threshold_metric_avg"
eventsPerCycle: 50

# Conditions: avg. system.cpu.user.pct > 80 in the last minute
schedule:
# Start with avg. system.cpu.user.pct = 2.5
- template: "good"
start: "now-30m"
end: "now-3m"
randomness: 0.1
# Transition to avg. system.cpu.user.pct = 90
- template: "good"
start: "now-3m"
end: "now-2m"
metrics:
- name: "system.cpu.user.pct"
method: "linear"
start: 2.5
end: 90
randomness: 0.1
- template: "good"
start: "now-2m"
end: "now+13m"
metrics:
- name: "system.cpu.user.pct"
method: "linear"
start: 90
end: 90
randomness: 0.05
# Go back to avg. system.cpu.user.pct = 2.5
- template: "good"
start: "now+13m"
end: "now+25m"
randomness: 0.1

Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
---
elasticsearch:
installKibanaUser: false

kibana:
installAssets: false

indexing:
dataset: "fake_hosts"
scenario: "custom_threshold_metric_avg_groupby"
eventsPerCycle: 5
interval: 5000

# Conditions: avg. system.cpu.user.pct > 80 in the last 5 minutes, grouped by host.name
schedule:
# Start with avg. system.cpu.user.pct = 2.5
- template: "good"
start: "now-30m"
end: "now-6m"
randomness: 0.1
# Transition to avg. system.cpu.user.pct = 90
- template: "good"
start: "now-6m"
end: "now-5m"
metrics:
- name: "system.cpu.user.pct"
method: "linear"
start: 2.5
end: 90
randomness: 0.1
- template: "good"
start: "now-5m"
end: "now+23m"
metrics:
- name: "system.cpu.user.pct"
method: "linear"
start: 90
end: 90
randomness: 0.05
# Go back to avg. system.cpu.user.pct = 2.5
- template: "good"
start: "now+23m"
end: "now+45m"
randomness: 0.1

Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
---
elasticsearch:
installKibanaUser: false

kibana:
installAssets: false

indexing:
dataset: "fake_hosts"
scenario: "custom_threshold_metric_avg_nodata"
eventsPerCycle: 50

# Conditions: avg. system.cpu.user.pct < 1 in the last minute (will trigger no data after 2 min)
schedule:
# Start with avg. system.cpu.user.pct = 2.5
- template: "good"
start: "now-30m"
end: "now-1m"
randomness: 0.1
# Stop data
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
---
elasticsearch:
installKibanaUser: false

kibana:
installAssets: false

indexing:
dataset: "fake_stack"
interval: 3600000

schedule:
# Start with good events
- template: "good"
start: "now-7d"
end: "now-45m"
- template: "bad"
start: "now-2h"
end: "now+25m"
randomness: 0.2
interval: 60000

Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
---
elasticsearch:
installKibanaUser: false

kibana:
installAssets: true

indexing:
eventsPerCycle: 1000
dataset: "fake_logs"

schedule:
- template: "good"
start: "now-90m"
end: "now-75m"
eventsPerCycle: 1000
randomness: 0.1
- template: "bad"
start: "now-75m"
end: "now-60m"
randomness: 0.1
eventsPerCycle:
start: 1000
end: 5000
method: "exp"
- template: "bad"
start: "now-60m"
end: "now-45m"
eventsPerCycle: 5000
randomness: 0.1
- template: "good"
start: "now-45m"
end: "now-30m"
eventsPerCycle:
start: 5000
end: 1000
method: "exp"
randomness: 0.1
- template: "good"
start: "now-30m"
end: false
eventsPerCycle: 1000
randomness: 0.1


Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
---
elasticsearch:
installKibanaUser: false

kibana:
installAssets: true

indexing:
eventsPerCycle: 100
dataset: "fake_stack"

schedule:
# Start with good events
- template: "good"
start: "now-90m"
end: "now-45m"
eventsPerCycle: 100
randomness: 0.1
# Transition from good to bad by setting up two schedules witht he same
# time frames but different templates to transition between 0 to 100 and vice
# versa
- template: "good"
start: "now-45m"
end: "now-35m"
eventsPerCycle:
start: 100
end: 1
method: "linear"
randomness: 0.1
- template: "bad"
start: "now-45m"
end: "now-35m"
eventsPerCycle:
start: 1
end: 100
method: "linear"
randomness: 0.1
# Bad for 10 minutes
- template: "bad"
start: "now-35m"
end: "now-25m"
eventsPerCycle: 100
randomness: 0.1
# Transition back from bad to good
- template: "good"
start: "now-25m"
end: "now-15m"
eventsPerCycle:
start: 1
end: 100
method: "linear"
randomness: 0.1
- template: "bad"
start: "now-25m"
end: "now-15m"
eventsPerCycle:
start: 100
end: 1
method: "linear"
randomness: 0.1
# continue with good
- template: "good"
start: "now-15m"
end: false
eventsPerCycle: 100
randomness: 0.1

0 comments on commit 657586d

Please sign in to comment.