Grafana as Code
Transfer our dashboard into a code representation via grafanalib.
This commit is contained in:
@@ -0,0 +1,19 @@
|
||||
import "date"
|
||||
|
||||
// The need for the date truncation is caused by Poseidon sending all influx events at the same time when starting up. This way not the last but a random value is displayed.
|
||||
// Since in this startup process the highest value is the correct one, we choose the highest value of the last events.
|
||||
|
||||
data = from(bucket: "poseidon/autogen")
|
||||
|> range(start: -1y)
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|
||||
|> group(columns: ["stage"], mode:"by")
|
||||
|> map(fn: (r) => ({ r with _time: date.truncate(t: r._time, unit: 1m) }))
|
||||
|
||||
deploy_times = data
|
||||
|> last()
|
||||
|> keep(columns: ["stage", "_time"])
|
||||
|
||||
join(tables: {key1: data, key2: deploy_times}, on: ["stage", "_time"], method: "inner")
|
||||
|> max()
|
||||
|> keep(columns: ["stage", "_value"])
|
||||
|> rename(columns: {_value: ""})
|
@@ -0,0 +1,8 @@
|
||||
from(bucket: "poseidon/autogen")
|
||||
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_used_runners")
|
||||
|> filter(fn: (r) => r["_field"] == "count")
|
||||
|> group(columns: ["stage"], mode:"by")
|
||||
|> last()
|
||||
|> keep(columns: ["_value", "stage"])
|
||||
|> rename(columns: {_value: ""})
|
6
deploy/grafana-dashboard/queries/environment-ids.flux
Normal file
6
deploy/grafana-dashboard/queries/environment-ids.flux
Normal file
@@ -0,0 +1,6 @@
|
||||
from(bucket: "poseidon/autogen")
|
||||
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|
||||
|> filter(fn: (r) => r["_field"] == "duration")
|
||||
|> keep(columns: ["environment_id"])
|
||||
|> distinct(column: "environment_id")
|
||||
|> keep(columns: ["_value"])
|
@@ -0,0 +1,27 @@
|
||||
import "strings"
|
||||
|
||||
result = from(bucket: "poseidon/autogen")
|
||||
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|
||||
|> filter(fn: (r) => r["_field"] == "duration")
|
||||
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))
|
||||
|> filter(fn: (r) => (not exists r.stage) or contains(value: r["stage"], set: ${stages:json}))
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_/execute" or r["_measurement"] == "poseidon_/files" or r["_measurement"] == "poseidon_/websocket")
|
||||
|> filter(fn: (r) => exists r.environment_id)
|
||||
|> keep(columns: ["_time", "_value", "environment_id", "stage"])
|
||||
|> aggregateWindow(every: v.windowPeriod, fn: mean)
|
||||
|> map(fn: (r) => ({r with _value: r._value * 3.0})) // Each execution has three requests
|
||||
|
||||
envMapping = from(bucket: "poseidon/autogen")
|
||||
|> range(start: -1y)
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|
||||
|> filter(fn: (r) => r["event_type"] == "creation")
|
||||
|> group(columns: ["id", "stage"], mode:"by")
|
||||
|> last()
|
||||
|> keep(columns: ["id", "image", "stage"])
|
||||
|> rename(columns: {id: "environment_id"})
|
||||
|> map(fn: (r) => ({ r with image: strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_") + "(" + strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + ")" }))
|
||||
|
||||
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|
||||
|> keep(columns: ["_value", "image", "_time"])
|
||||
|> group(columns: ["image"], mode: "by")
|
||||
|> rename(columns: {_value: ""})
|
28
deploy/grafana-dashboard/queries/execution-duration.flux
Normal file
28
deploy/grafana-dashboard/queries/execution-duration.flux
Normal file
@@ -0,0 +1,28 @@
|
||||
import "strings"
|
||||
|
||||
result = from(bucket: "poseidon/autogen")
|
||||
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|
||||
|> filter(fn: (r) => r["_field"] == "duration")
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_/execute" or r["_measurement"] == "poseidon_/files" or r["_measurement"] == "poseidon_/websocket")
|
||||
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))
|
||||
|> filter(fn: (r) => (not exists r.stage) or contains(value: r["stage"], set: ${stages:json}))
|
||||
|> filter(fn: (r) => exists r.environment_id)
|
||||
|> keep(columns: ["_value", "runner_id", "environment_id", "stage"])
|
||||
|> group(columns: ["environment_id", "stage"])
|
||||
|> mean()
|
||||
|> map(fn: (r) => ({r with _value: r._value * 3.0})) // Each execution has three requests
|
||||
|
||||
envMapping = from(bucket: "poseidon/autogen")
|
||||
|> range(start: -1y)
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|
||||
|> filter(fn: (r) => r["event_type"] == "creation")
|
||||
|> group(columns: ["id", "stage"], mode:"by")
|
||||
|> last()
|
||||
|> keep(columns: ["id", "image", "stage"])
|
||||
|> rename(columns: {id: "environment_id"})
|
||||
|> map(fn: (r) => ({ r with image: strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_") + "(" + strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + ")" }))
|
||||
|
||||
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|
||||
|> keep(columns: ["_value", "image"])
|
||||
|> group(columns: ["image"], mode: "by")
|
||||
|> rename(columns: {_value: ""})
|
@@ -0,0 +1,27 @@
|
||||
import "strings"
|
||||
import "date"
|
||||
|
||||
result = from(bucket: "poseidon/autogen")
|
||||
|> range(start: date.truncate(t: v.timeRangeStart, unit: 1m), stop: date.truncate(t: v.timeRangeStop, unit: 1m))
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_aws_executions" or r["_measurement"] == "poseidon_nomad_executions")
|
||||
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))
|
||||
|> filter(fn: (r) => (not exists r.stage) or contains(value: r["stage"], set: ${stages:json}))
|
||||
|> filter(fn: (r) => r["event_type"] == "creation")
|
||||
|> group(columns: ["environment_id", "stage"], mode:"by")
|
||||
|> aggregateWindow(every: 1m, fn: count, createEmpty: true)
|
||||
|> aggregateWindow(every: duration(v: int(v: v.windowPeriod) * 5), fn: mean, createEmpty: true)
|
||||
|
||||
envMapping = from(bucket: "poseidon/autogen")
|
||||
|> range(start: -1y)
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|
||||
|> filter(fn: (r) => r["event_type"] == "creation")
|
||||
|> group(columns: ["id", "stage"], mode:"by")
|
||||
|> last()
|
||||
|> keep(columns: ["id", "image", "stage"])
|
||||
|> rename(columns: {id: "environment_id"})
|
||||
|> map(fn: (r) => ({ r with image: strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_") + "(" + strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + ")" }))
|
||||
|
||||
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|
||||
|> keep(columns: ["_value", "image", "_time"])
|
||||
|> group(columns: ["image"], mode: "by")
|
||||
|> rename(columns: {_value: ""})
|
28
deploy/grafana-dashboard/queries/executions-per-minute.flux
Normal file
28
deploy/grafana-dashboard/queries/executions-per-minute.flux
Normal file
@@ -0,0 +1,28 @@
|
||||
import "date"
|
||||
import "strings"
|
||||
|
||||
data = from(bucket: "poseidon/autogen")
|
||||
|> range(start: date.truncate(t: v.timeRangeStart, unit: 1m), stop: date.truncate(t: v.timeRangeStop, unit: 1m))
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_aws_executions" or r["_measurement"] == "poseidon_nomad_executions")
|
||||
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))
|
||||
|> filter(fn: (r) => (not exists r.stage) or contains(value: r["stage"], set: ${stages:json}))
|
||||
|> filter(fn: (r) => r["event_type"] == "creation")
|
||||
|> group(columns: ["environment_id", "stage"], mode:"by")
|
||||
|> aggregateWindow(every: 1m, fn: count, createEmpty: true)
|
||||
|> keep(columns: ["_value", "environment_id", "stage"])
|
||||
|> mean()
|
||||
|
||||
envMapping = from(bucket: "poseidon/autogen")
|
||||
|> range(start: -1y)
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|
||||
|> filter(fn: (r) => r["event_type"] == "creation")
|
||||
|> group(columns: ["id", "stage"], mode:"by")
|
||||
|> last()
|
||||
|> keep(columns: ["id", "image", "stage"])
|
||||
|> rename(columns: {id: "environment_id"})
|
||||
|> map(fn: (r) => ({ r with image: strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_") + "(" + strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + ")" }))
|
||||
|
||||
join(tables: {key1: data, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|
||||
|> keep(columns: ["_value", "image"])
|
||||
|> group(columns: ["image"], mode: "by")
|
||||
|> rename(columns: {_value: ""})
|
@@ -0,0 +1,37 @@
|
||||
import "strings"
|
||||
|
||||
data = from(bucket: "poseidon/autogen")
|
||||
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|
||||
|> filter(fn: (r) => (not exists r.stage) or contains(value: r["stage"], set: ${stages:json}))
|
||||
|
||||
runner_deletions = data
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_used_runners")
|
||||
|> filter(fn: (r) => r["event_type"] == "deletion")
|
||||
|> keep(columns: ["_time", "id", "stage"])
|
||||
|> rename(columns: {id: "runner_id"})
|
||||
|
||||
executions = data
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_nomad_executions" or r["_measurement"] == "poseidon_aws_executions")
|
||||
|> filter(fn: (r) => r["event_type"] == "creation")
|
||||
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))
|
||||
|> keep(columns: ["_value", "environment_id", "runner_id"])
|
||||
|> count()
|
||||
|
||||
result = join(tables: {key1: executions, key2: runner_deletions}, on: ["runner_id"], method: "inner")
|
||||
|> keep(columns: ["_value", "_time", "environment_id", "stage"])
|
||||
|> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false)
|
||||
|
||||
envMapping = from(bucket: "poseidon/autogen")
|
||||
|> range(start: -1y)
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|
||||
|> filter(fn: (r) => r["event_type"] == "creation")
|
||||
|> group(columns: ["id", "stage"], mode:"by")
|
||||
|> last()
|
||||
|> keep(columns: ["id", "image", "stage"])
|
||||
|> rename(columns: {id: "environment_id"})
|
||||
|> map(fn: (r) => ({ r with image: strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_") + "(" + strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + ")" }))
|
||||
|
||||
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|
||||
|> keep(columns: ["_value", "image", "_time"])
|
||||
|> group(columns: ["image"], mode: "by")
|
||||
|> rename(columns: {_value: ""})
|
37
deploy/grafana-dashboard/queries/executions-per-runner.flux
Normal file
37
deploy/grafana-dashboard/queries/executions-per-runner.flux
Normal file
@@ -0,0 +1,37 @@
|
||||
import "strings"
|
||||
|
||||
data = from(bucket: "poseidon/autogen")
|
||||
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|
||||
|> filter(fn: (r) => (not exists r.stage) or contains(value: r["stage"], set: ${stages:json}))
|
||||
|
||||
runner_deletions = data
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_used_runners")
|
||||
|> filter(fn: (r) => r["event_type"] == "deletion")
|
||||
|> keep(columns: ["id", "stage"])
|
||||
|> rename(columns: {id: "runner_id"})
|
||||
|
||||
executions = data
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_nomad_executions" or r["_measurement"] == "poseidon_aws_executions")
|
||||
|> filter(fn: (r) => r["event_type"] == "creation")
|
||||
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))
|
||||
|> keep(columns: ["_value", "environment_id", "runner_id"])
|
||||
|> count()
|
||||
|
||||
result = join(tables: {key1: executions, key2: runner_deletions}, on: ["runner_id"], method: "inner")
|
||||
|> keep(columns: ["_value", "environment_id", "stage"])
|
||||
|> mean()
|
||||
|
||||
envMapping = from(bucket: "poseidon/autogen")
|
||||
|> range(start: -1y)
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|
||||
|> filter(fn: (r) => r["event_type"] == "creation")
|
||||
|> group(columns: ["id", "stage"], mode:"by")
|
||||
|> last()
|
||||
|> keep(columns: ["id", "image", "stage"])
|
||||
|> rename(columns: {id: "environment_id"})
|
||||
|> map(fn: (r) => ({ r with image: strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_") + "(" + strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + ")" }))
|
||||
|
||||
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|
||||
|> keep(columns: ["_value", "image"])
|
||||
|> group(columns: ["image"], mode: "by")
|
||||
|> rename(columns: {_value: ""})
|
25
deploy/grafana-dashboard/queries/idle-runner.flux
Normal file
25
deploy/grafana-dashboard/queries/idle-runner.flux
Normal file
@@ -0,0 +1,25 @@
|
||||
import "strings"
|
||||
|
||||
myWindowPeriod = if int(v: v.windowPeriod) >= int(v: 30s) then duration(v: int(v: v.windowPeriod) * 5) else v.windowPeriod
|
||||
result = from(bucket: "poseidon/autogen")
|
||||
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_nomad_idle_runners" and r["_field"] == "count")
|
||||
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))
|
||||
|> filter(fn: (r) => (not exists r.stage) or contains(value: r["stage"], set: ${stages:json}))
|
||||
|> keep(columns: ["_value", "_time", "environment_id", "stage"])
|
||||
|> aggregateWindow(every: myWindowPeriod, fn: min, createEmpty: false)
|
||||
|
||||
envMapping = from(bucket: "poseidon/autogen")
|
||||
|> range(start: -1y)
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|
||||
|> filter(fn: (r) => r["event_type"] == "creation")
|
||||
|> group(columns: ["id", "stage"], mode:"by")
|
||||
|> last()
|
||||
|> keep(columns: ["id", "image", "stage"])
|
||||
|> rename(columns: {id: "environment_id"})
|
||||
|> map(fn: (r) => ({ r with image: strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_") + "(" + strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + ")" }))
|
||||
|
||||
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|
||||
|> keep(columns: ["_value", "image", "_time"])
|
||||
|> group(columns: ["image"], mode: "by")
|
||||
|> rename(columns: {_value: ""})
|
26
deploy/grafana-dashboard/queries/number-of-executions.flux
Normal file
26
deploy/grafana-dashboard/queries/number-of-executions.flux
Normal file
@@ -0,0 +1,26 @@
|
||||
import "strings"
|
||||
|
||||
result = from(bucket: "poseidon/autogen")
|
||||
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_aws_executions" or r["_measurement"] == "poseidon_nomad_executions")
|
||||
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))
|
||||
|> filter(fn: (r) => (not exists r.stage) or contains(value: r["stage"], set: ${stages:json}))
|
||||
|> filter(fn: (r) => r["event_type"] == "creation")
|
||||
|> group(columns: ["environment_id", "stage"], mode:"by")
|
||||
|> count()
|
||||
|> keep(columns: ["_value", "environment_id", "stage"])
|
||||
|
||||
envMapping = from(bucket: "poseidon/autogen")
|
||||
|> range(start: -1y)
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|
||||
|> filter(fn: (r) => r["isDeletion"] == "false")
|
||||
|> group(columns: ["id", "stage"], mode:"by")
|
||||
|> last()
|
||||
|> keep(columns: ["id", "image", "stage"])
|
||||
|> rename(columns: {id: "environment_id"})
|
||||
|> map(fn: (r) => ({ r with image: strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_") + "(" + strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + ")" }))
|
||||
|
||||
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|
||||
|> keep(columns: ["_value", "image"])
|
||||
|> group(columns: ["image"], mode: "by")
|
||||
|> rename(columns: {_value: ""})
|
25
deploy/grafana-dashboard/queries/prewarming-pool-size.flux
Normal file
25
deploy/grafana-dashboard/queries/prewarming-pool-size.flux
Normal file
@@ -0,0 +1,25 @@
|
||||
import "strings"
|
||||
|
||||
result = from(bucket: "poseidon/autogen")
|
||||
|> range(start: -1y)
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_poolsize")
|
||||
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))
|
||||
|> filter(fn: (r) => (not exists r.stage) or contains(value: r["stage"], set: ${stages:json}))
|
||||
|> group(columns: ["environment_id", "stage"], mode:"by")
|
||||
|> last()
|
||||
|> keep(columns: ["_value", "environment_id", "stage"])
|
||||
|
||||
envMapping = from(bucket: "poseidon/autogen")
|
||||
|> range(start: -1y)
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|
||||
|> filter(fn: (r) => r["event_type"] == "creation")
|
||||
|> group(columns: ["id", "stage"], mode:"by")
|
||||
|> last()
|
||||
|> keep(columns: ["id", "image", "stage"])
|
||||
|> rename(columns: {id: "environment_id"})
|
||||
|> map(fn: (r) => ({ r with image: strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_") + "(" + strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + ")" }))
|
||||
|
||||
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|
||||
|> keep(columns: ["_value", "image", "_time"])
|
||||
|> group(columns: ["image"], mode: "by")
|
||||
|> rename(columns: {_value: ""})
|
25
deploy/grafana-dashboard/queries/request-body-size.flux
Normal file
25
deploy/grafana-dashboard/queries/request-body-size.flux
Normal file
@@ -0,0 +1,25 @@
|
||||
import "strings"
|
||||
|
||||
myWindowPeriod = if int(v: v.windowPeriod) > int(v: 1m) then duration(v: int(v: v.windowPeriod) * 10) else duration(v: int(v: v.windowPeriod) * 5)
|
||||
result = from(bucket: "poseidon/autogen")
|
||||
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|
||||
|> filter(fn: (r) => r["_field"] == "request_size")
|
||||
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))
|
||||
|> filter(fn: (r) => (not exists r.stage) or contains(value: r["stage"], set: ${stages:json}))
|
||||
|> keep(columns: ["_time", "_value", "environment_id", "stage"])
|
||||
|> aggregateWindow(every: myWindowPeriod, fn: mean, createEmpty: false)
|
||||
|
||||
envMapping = from(bucket: "poseidon/autogen")
|
||||
|> range(start: -1y)
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|
||||
|> filter(fn: (r) => r["event_type"] == "creation")
|
||||
|> group(columns: ["id", "stage"], mode:"by")
|
||||
|> last()
|
||||
|> keep(columns: ["id", "image", "stage"])
|
||||
|> rename(columns: {id: "environment_id"})
|
||||
|> map(fn: (r) => ({ r with image: strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_") + "(" + strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + ")" }))
|
||||
|
||||
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|
||||
|> keep(columns: ["_value", "image", "_time"])
|
||||
|> group(columns: ["image"], mode: "by")
|
||||
|> rename(columns: {_value: ""})
|
7
deploy/grafana-dashboard/queries/request-latency.flux
Normal file
7
deploy/grafana-dashboard/queries/request-latency.flux
Normal file
@@ -0,0 +1,7 @@
|
||||
from(bucket: "poseidon/autogen")
|
||||
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|
||||
|> filter(fn: (r) => r["_field"] == "duration")
|
||||
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))
|
||||
|> filter(fn: (r) => (not exists r.stage) or contains(value: r["stage"], set: ${stages:json}))
|
||||
|> keep(columns: ["_time", "_value"])
|
||||
|> aggregateWindow(every: v.windowPeriod, fn: mean)
|
17
deploy/grafana-dashboard/queries/requests-per-minute.flux
Normal file
17
deploy/grafana-dashboard/queries/requests-per-minute.flux
Normal file
@@ -0,0 +1,17 @@
|
||||
import "date"
|
||||
|
||||
data = from(bucket: "poseidon/autogen")
|
||||
|> range(start: date.truncate(t: v.timeRangeStart, unit: 1m), stop: date.truncate(t: v.timeRangeStop, unit: 1m))
|
||||
|> filter(fn: (r) => r._field == "duration")
|
||||
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))
|
||||
|> filter(fn: (r) => (not exists r.stage) or contains(value: r["stage"], set: ${stages:json}))
|
||||
|> keep(columns: ["_time", "_value", "status"])
|
||||
|
||||
all = data |> set(key: "status", value: "all")
|
||||
|
||||
result = union(tables: [data, all])
|
||||
|> aggregateWindow(every: 1m, fn: count, createEmpty: true)
|
||||
|
||||
if int(v: v.windowPeriod) > int(v: 1m)
|
||||
then result |> aggregateWindow(every: duration(v: int(v: v.windowPeriod) * 2), fn: mean, createEmpty: true)
|
||||
else result |> aggregateWindow(every: duration(v: int(v: v.windowPeriod) * 5), fn: mean, createEmpty: false)
|
29
deploy/grafana-dashboard/queries/runner-per-minute.flux
Normal file
29
deploy/grafana-dashboard/queries/runner-per-minute.flux
Normal file
@@ -0,0 +1,29 @@
|
||||
import "strings"
|
||||
import "date"
|
||||
|
||||
myWindowPeriod = if int(v: v.windowPeriod) > int(v: 2m) then duration(v: int(v: v.windowPeriod) * 30) else duration(v: int(v: v.windowPeriod) * 15)
|
||||
result = from(bucket: "poseidon/autogen")
|
||||
|> range(start: date.truncate(t: v.timeRangeStart, unit: 1m), stop: date.truncate(t: v.timeRangeStop, unit: 1m))
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_used_runners")
|
||||
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))
|
||||
|> filter(fn: (r) => (not exists r.stage) or contains(value: r["stage"], set: ${stages:json}))
|
||||
|> filter(fn: (r) => r["event_type"] == "creation")
|
||||
|> group(columns: ["environment_id", "stage"], mode:"by")
|
||||
|> aggregateWindow(every: 1m, fn: count, createEmpty: true)
|
||||
|> keep(columns: ["_value", "_time", "environment_id", "stage"])
|
||||
|> aggregateWindow(every: myWindowPeriod, fn: mean, createEmpty: true)
|
||||
|
||||
envMapping = from(bucket: "poseidon/autogen")
|
||||
|> range(start: -1y)
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|
||||
|> filter(fn: (r) => r["event_type"] == "creation")
|
||||
|> group(columns: ["id", "stage"], mode:"by")
|
||||
|> last()
|
||||
|> keep(columns: ["id", "image", "stage"])
|
||||
|> rename(columns: {id: "environment_id"})
|
||||
|> map(fn: (r) => ({ r with image: strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_") + "(" + strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + ")" }))
|
||||
|
||||
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|
||||
|> keep(columns: ["_value", "image", "_time"])
|
||||
|> group(columns: ["image"], mode: "by")
|
||||
|> rename(columns: {_value: ""})
|
@@ -0,0 +1,25 @@
|
||||
import "strings"
|
||||
|
||||
result = from(bucket: "poseidon/autogen")
|
||||
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_nomad_idle_runners")
|
||||
|> filter(fn: (r) => r["_field"] == "startup_duration")
|
||||
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))
|
||||
|> filter(fn: (r) => (not exists r.stage) or contains(value: r["stage"], set: ${stages:json}))
|
||||
|> keep(columns: ["_value", "_time", "environment_id", "stage"])
|
||||
|> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false)
|
||||
|
||||
envMapping = from(bucket: "poseidon/autogen")
|
||||
|> range(start: -1y)
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|
||||
|> filter(fn: (r) => r["event_type"] == "creation")
|
||||
|> group(columns: ["id", "stage"], mode:"by")
|
||||
|> last()
|
||||
|> keep(columns: ["id", "image", "stage"])
|
||||
|> rename(columns: {id: "environment_id"})
|
||||
|> map(fn: (r) => ({ r with image: strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_") + "(" + strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + ")" }))
|
||||
|
||||
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|
||||
|> keep(columns: ["_value", "image", "_time"])
|
||||
|> group(columns: ["image"], mode: "by")
|
||||
|> rename(columns: {_value: ""})
|
7
deploy/grafana-dashboard/queries/service-time.flux
Normal file
7
deploy/grafana-dashboard/queries/service-time.flux
Normal file
@@ -0,0 +1,7 @@
|
||||
from(bucket: "poseidon/autogen")
|
||||
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|
||||
|> filter(fn: (r) => r["_field"] == "duration")
|
||||
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))
|
||||
|> filter(fn: (r) => (not exists r.stage) or contains(value: r["stage"], set: ${stages:json}))
|
||||
|> keep(columns: ["_time", "_value", "_measurement"])
|
||||
|> aggregateWindow(every: duration(v: int(v: v.windowPeriod) * 10), fn: (tables=<-, column) => tables |> quantile(q: 0.999))
|
6
deploy/grafana-dashboard/queries/stages.flux
Normal file
6
deploy/grafana-dashboard/queries/stages.flux
Normal file
@@ -0,0 +1,6 @@
|
||||
from(bucket: "poseidon/autogen")
|
||||
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|
||||
|> filter(fn: (r) => r["_field"] == "duration")
|
||||
|> keep(columns: ["stage"])
|
||||
|> distinct(column: "stage")
|
||||
|> keep(columns: ["_value"])
|
8
deploy/grafana-dashboard/queries/used-runner.flux
Normal file
8
deploy/grafana-dashboard/queries/used-runner.flux
Normal file
@@ -0,0 +1,8 @@
|
||||
from(bucket: "poseidon/autogen")
|
||||
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|
||||
|> filter(fn: (r) => r["_measurement"] == "poseidon_used_runners")
|
||||
|> filter(fn: (r) => r["_field"] == "count")
|
||||
|> filter(fn: (r) => (not exists r.stage) or contains(value: r["stage"], set: ${stages:json}))
|
||||
|> group(columns: ["stage"], mode:"by")
|
||||
|> keep(columns: ["_value", "_time", "stage"])
|
||||
|> aggregateWindow(every: duration(v: int(v: v.windowPeriod) * 5), fn: mean, createEmpty: false)
|
Reference in New Issue
Block a user