Separate Poseidon Dashboard Generation

This commit is contained in:
Maximilian Paß
2023-01-12 18:07:25 +00:00
parent 2db66f4b97
commit 0c9ffea369
29 changed files with 61 additions and 59 deletions

View File

@ -1,13 +1,15 @@
[
"p10/java:8-antlr",
"p28/r:4",
"p29/python:3.8",
"p31/java:17",
"p33/openhpi/docker_exec_phusion",
"p11/java:8-antlr",
"p14/python:3.4",
"p18/node:0.12",
"p22/python:3.4-rpi-web",
"p25/ruby:2.5",
"p30/python:3.7-ml"
{
"executionEnvironments": [
{"image": "openhpi/co_execenv_java:8-antlr", "id": 10},
{"image": "openhpi/co_execenv_r:4", "id": 28},
{"image": "openhpi/co_execenv_python:3.8", "id": 29},
{"image": "openhpi/co_execenv_java:17", "id": 31},
{"image": "openhpi/docker_exec_phusion", "id": 33},
{"image": "openhpi/co_execenv_java:8-antlr", "id": 11},
{"image": "openhpi/co_execenv_python:3.4", "id": 14},
{"image": "openhpi/co_execenv_node:0.12", "id": 18},
{"image": "openhpi/co_execenv_python:3.4-rpi-web", "id": 22},
{"image": "openhpi/co_execenv_ruby:2.5", "id": 25},
{"image": "openhpi/co_execenv_python:3.7-ml", "id": 30}
]
}

View File

@ -7,7 +7,7 @@ from utils.utils import read_query
prewarming_pool_size = BarGauge(
title="Prewarming Pool Size",
dataSource="Poseidon",
dataSource="Flux",
targets=[InfluxDBTarget(query=read_query("prewarming-pool-size", "environment-mapping"))],
gridPos=GridPos(h=10, w=11, x=0, y=1),
allValues=True,
@ -19,7 +19,7 @@ prewarming_pool_size = BarGauge(
idle_runner = TimeSeries(
title="Idle Runner",
dataSource="Poseidon",
dataSource="Flux",
targets=[InfluxDBTarget(query=read_query("idle-runner", "environment-mapping"))],
gridPos=GridPos(h=10, w=13, x=11, y=1),
lineInterpolation="stepAfter",
@ -29,7 +29,7 @@ idle_runner = TimeSeries(
runner_startup_duration = TimeSeries(
title="Runner startup duration",
dataSource="Poseidon",
dataSource="Flux",
targets=[InfluxDBTarget(query=read_query("runner-startup-duration", "environment-mapping"))],
gridPos=GridPos(h=10, w=12, x=0, y=11),
unit="ns",
@ -40,7 +40,7 @@ runner_startup_duration = TimeSeries(
used_runner = TimeSeries(
title="Used Runner",
dataSource="Poseidon",
dataSource="Flux",
targets=[InfluxDBTarget(query=read_query("used-runner"))],
gridPos=GridPos(h=10, w=12, x=12, y=11),
maxDataPoints=None,

View File

@ -7,7 +7,7 @@ from utils.utils import read_query
requests_per_minute = TimeSeries(
title="Requests per minute",
dataSource="Poseidon",
dataSource="Flux",
targets=[InfluxDBTarget(query=read_query("requests-per-minute"))],
gridPos=GridPos(h=9, w=8, x=0, y=22),
scaleDistributionType="log",
@ -17,7 +17,7 @@ requests_per_minute = TimeSeries(
request_latency = Heatmap(
title="Request Latency",
dataSource="Poseidon",
dataSource="Flux",
dataFormat="timeseries",
targets=[InfluxDBTarget(query=read_query("request-latency"))],
gridPos=GridPos(h=9, w=8, x=8, y=22),
@ -32,7 +32,7 @@ request_latency = Heatmap(
service_time = TimeSeries(
title="Service time (99.9%)",
dataSource="Poseidon",
dataSource="Flux",
targets=[InfluxDBTarget(query=read_query("service-time"))],
gridPos=GridPos(h=9, w=8, x=16, y=22),
scaleDistributionType="log",
@ -44,7 +44,7 @@ service_time = TimeSeries(
current_environment_count = Stat(
title="Current environment count",
dataSource="Poseidon",
dataSource="Flux",
targets=[InfluxDBTarget(query=read_query("current-environment-count"))],
gridPos=GridPos(h=6, w=8, x=0, y=31),
alignment="center",
@ -52,7 +52,7 @@ current_environment_count = Stat(
currently_used_runners = Stat(
title="Currently used runners",
dataSource="Poseidon",
dataSource="Flux",
targets=[InfluxDBTarget(query=read_query("currently-used-runners"))],
gridPos=GridPos(h=6, w=8, x=8, y=31),
alignment="center",
@ -60,7 +60,7 @@ currently_used_runners = Stat(
number_of_executions = BarGauge(
title="Number of Executions",
dataSource="Poseidon",
dataSource="Flux",
targets=[InfluxDBTarget(query=read_query("number-of-executions", "environment-mapping"))],
gridPos=GridPos(h=6, w=8, x=16, y=31),
allValues=True,
@ -72,7 +72,7 @@ number_of_executions = BarGauge(
execution_duration = BarGauge(
title="Execution duration",
dataSource="Poseidon",
dataSource="Flux",
targets=[InfluxDBTarget(query=read_query("execution-duration", "environment-mapping"))],
gridPos=GridPos(h=11, w=8, x=0, y=37),
allValues=True,
@ -85,7 +85,7 @@ execution_duration = BarGauge(
executions_per_runner = BarGauge(
title="Executions per runner",
dataSource="Poseidon",
dataSource="Flux",
targets=[InfluxDBTarget(query=read_query("executions-per-runner", "environment-mapping"))],
gridPos=GridPos(h=11, w=8, x=8, y=37),
allValues=True,
@ -97,7 +97,7 @@ executions_per_runner = BarGauge(
executions_per_minute = BarGauge(
title="Executions per minute",
dataSource="Poseidon",
dataSource="Flux",
targets=[InfluxDBTarget(query=read_query("executions-per-minute", "environment-mapping"))],
gridPos=GridPos(h=11, w=8, x=16, y=37),
allValues=True,

View File

@ -15,7 +15,7 @@ execution_duration_extra_json = {
deep_update_dict(execution_duration_extra_json, color_mapping_environments)
execution_duration = Histogram(
title="Execution duration",
dataSource="Poseidon",
dataSource="Flux",
targets=[InfluxDBTarget(query=read_query("execution-duration-hist", "environment-mapping"))],
gridPos=GridPos(h=8, w=24, x=0, y=49),
bucketSize=100000000,
@ -28,7 +28,7 @@ execution_duration = Histogram(
executions_per_runner = Histogram(
title="Executions per runner",
dataSource="Poseidon",
dataSource="Flux",
targets=[InfluxDBTarget(query=read_query("executions-per-runner-hist", "environment-mapping"))],
gridPos=GridPos(h=10, w=11, x=0, y=57),
bucketSize=1,
@ -41,7 +41,7 @@ executions_per_runner = Histogram(
executions_per_minute = TimeSeries(
title="Executions per minute",
dataSource="Poseidon",
dataSource="Flux",
targets=[InfluxDBTarget(query=read_query("executions-per-minute-time", "environment-mapping"))],
gridPos=GridPos(h=10, w=13, x=11, y=57),
maxDataPoints=None,
@ -51,7 +51,7 @@ executions_per_minute = TimeSeries(
file_upload = TimeSeries(
title="File Upload",
dataSource="Poseidon",
dataSource="Flux",
targets=[InfluxDBTarget(query=read_query("file-upload", "environment-mapping"))],
gridPos=GridPos(h=10, w=11, x=0, y=67),
scaleDistributionType="log",
@ -63,7 +63,7 @@ file_upload = TimeSeries(
runner_per_minute = TimeSeries(
title="Runner per minute",
dataSource="Poseidon",
dataSource="Flux",
targets=[InfluxDBTarget(query=read_query("runner-per-minute", "environment-mapping"))],
gridPos=GridPos(h=10, w=13, x=11, y=67),
maxDataPoints=None,
@ -73,7 +73,7 @@ runner_per_minute = TimeSeries(
file_download = TimeSeries(
title="File Download",
dataSource="Poseidon",
dataSource="Flux",
targets=[InfluxDBTarget(query=read_query("file-download", "environment-mapping"))],
gridPos=GridPos(h=10, w=11, x=0, y=77),
scaleDistributionType="log",
@ -85,7 +85,7 @@ file_download = TimeSeries(
file_download_ratio = BarGauge(
title="File Download Ratio",
dataSource="Poseidon",
dataSource="Flux",
targets=[InfluxDBTarget(query=read_query("file-download-ratio", "environment-mapping"))],
gridPos=GridPos(h=10, w=13, x=11, y=77),
max=1,

View File

@ -3,7 +3,7 @@ import "date"
// The need for the date truncation is caused by Poseidon sending all influx events at the same time when starting up. This way not the last but a random value is displayed.
// Since in this startup process the highest value is the correct one, we choose the highest value of the last events.
data = from(bucket: "poseidon/autogen")
data = from(bucket: "poseidon")
|> range(start: -1y)
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|> group(columns: ["stage"], mode:"by")

View File

@ -1,4 +1,4 @@
from(bucket: "poseidon/autogen")
from(bucket: "poseidon")
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|> filter(fn: (r) => r["_measurement"] == "poseidon_used_runners")
|> filter(fn: (r) => r["_field"] == "count")

View File

@ -1,4 +1,4 @@
from(bucket: "poseidon/autogen")
from(bucket: "poseidon")
|> range(start: -1y)
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|> filter(fn: (r) => (not exists r.stage) or contains(value: r["stage"], set: ${stages:json}))

View File

@ -1,4 +1,4 @@
envMapping = from(bucket: "poseidon/autogen")
envMapping = from(bucket: "poseidon")
|> range(start: -1y)
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|> filter(fn: (r) => r["event_type"] == "creation")

View File

@ -1,6 +1,6 @@
import "strings"
result = from(bucket: "poseidon/autogen")
result = from(bucket: "poseidon")
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|> filter(fn: (r) => r["_field"] == "duration")
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))

View File

@ -1,6 +1,6 @@
import "strings"
result = from(bucket: "poseidon/autogen")
result = from(bucket: "poseidon")
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|> filter(fn: (r) => r["_field"] == "duration")
|> filter(fn: (r) => r["_measurement"] == "poseidon_/execute" or r["_measurement"] == "poseidon_/files" or r["_measurement"] == "poseidon_/websocket")

View File

@ -1,7 +1,7 @@
import "strings"
import "date"
result = from(bucket: "poseidon/autogen")
result = from(bucket: "poseidon")
|> range(start: date.truncate(t: v.timeRangeStart, unit: 1m), stop: date.truncate(t: v.timeRangeStop, unit: 1m))
|> filter(fn: (r) => r["_measurement"] == "poseidon_aws_executions" or r["_measurement"] == "poseidon_nomad_executions")
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))

View File

@ -1,7 +1,7 @@
import "date"
import "strings"
result = from(bucket: "poseidon/autogen")
result = from(bucket: "poseidon")
|> range(start: date.truncate(t: v.timeRangeStart, unit: 1m), stop: date.truncate(t: v.timeRangeStop, unit: 1m))
|> filter(fn: (r) => r["_measurement"] == "poseidon_aws_executions" or r["_measurement"] == "poseidon_nomad_executions")
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))

View File

@ -1,6 +1,6 @@
import "strings"
data = from(bucket: "poseidon/autogen")
data = from(bucket: "poseidon")
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|> filter(fn: (r) => (not exists r.stage) or contains(value: r["stage"], set: ${stages:json}))

View File

@ -1,6 +1,6 @@
import "strings"
data = from(bucket: "poseidon/autogen")
data = from(bucket: "poseidon")
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|> filter(fn: (r) => (not exists r.stage) or contains(value: r["stage"], set: ${stages:json}))

View File

@ -1,7 +1,7 @@
import "strings"
myWindowPeriod = if int(v: v.windowPeriod) > int(v: 1m) then duration(v: int(v: v.windowPeriod) * 10) else duration(v: int(v: v.windowPeriod) * 5)
data = from(bucket: "poseidon/autogen")
data = from(bucket: "poseidon")
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|> filter(fn: (r) => r["_measurement"] == "poseidon_file_download")
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))

View File

@ -1,7 +1,7 @@
import "strings"
myWindowPeriod = if int(v: v.windowPeriod) > int(v: 1m) then duration(v: int(v: v.windowPeriod) * 100) else duration(v: int(v: v.windowPeriod) * 5)
result = from(bucket: "poseidon/autogen")
result = from(bucket: "poseidon")
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|> filter(fn: (r) => r["_measurement"] == "poseidon_file_download")
|> filter(fn: (r) => r["_field"] == "actual_length")

View File

@ -1,7 +1,7 @@
import "strings"
myWindowPeriod = if int(v: v.windowPeriod) > int(v: 1m) then duration(v: int(v: v.windowPeriod) * 10) else duration(v: int(v: v.windowPeriod) * 5)
result = from(bucket: "poseidon/autogen")
result = from(bucket: "poseidon")
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|> filter(fn: (r) => r["_field"] == "request_size")
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))

View File

@ -1,7 +1,7 @@
import "strings"
myWindowPeriod = if int(v: v.windowPeriod) >= int(v: 30s) then duration(v: int(v: v.windowPeriod) * 5) else v.windowPeriod
result = from(bucket: "poseidon/autogen")
result = from(bucket: "poseidon")
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|> filter(fn: (r) => r["_measurement"] == "poseidon_nomad_idle_runners" and r["_field"] == "count")
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))

View File

@ -1,6 +1,6 @@
import "strings"
result = from(bucket: "poseidon/autogen")
result = from(bucket: "poseidon")
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|> filter(fn: (r) => r["_measurement"] == "poseidon_aws_executions" or r["_measurement"] == "poseidon_nomad_executions")
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))

View File

@ -1,6 +1,6 @@
import "strings"
result = from(bucket: "poseidon/autogen")
result = from(bucket: "poseidon")
|> range(start: -1y)
|> filter(fn: (r) => r["_measurement"] == "poseidon_poolsize")
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))

View File

@ -1,4 +1,4 @@
from(bucket: "poseidon/autogen")
from(bucket: "poseidon")
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|> filter(fn: (r) => r["_field"] == "duration")
|> filter(fn: (r) => (not exists r.environment_id) or contains(value: r["environment_id"], set: ${environment_ids:json}))

View File

@ -1,6 +1,6 @@
import "date"
data = from(bucket: "poseidon/autogen")
data = from(bucket: "poseidon")
|> range(start: date.truncate(t: v.timeRangeStart, unit: 1m), stop: date.truncate(t: v.timeRangeStop, unit: 1m))
|> filter(fn: (r) => r._field == "duration")
|> filter(fn: (r) => (not exists r.environment_id) or contains(value: r["environment_id"], set: ${environment_ids:json}))

View File

@ -2,7 +2,7 @@ import "strings"
import "date"
myWindowPeriod = if int(v: v.windowPeriod) > int(v: 2m) then duration(v: int(v: v.windowPeriod) * 30) else duration(v: int(v: v.windowPeriod) * 15)
result = from(bucket: "poseidon/autogen")
result = from(bucket: "poseidon")
|> range(start: date.truncate(t: v.timeRangeStart, unit: 1m), stop: date.truncate(t: v.timeRangeStop, unit: 1m))
|> filter(fn: (r) => r["_measurement"] == "poseidon_used_runners")
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))

View File

@ -1,6 +1,6 @@
import "strings"
result = from(bucket: "poseidon/autogen")
result = from(bucket: "poseidon")
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|> filter(fn: (r) => r["_measurement"] == "poseidon_nomad_idle_runners")
|> filter(fn: (r) => r["_field"] == "startup_duration")

View File

@ -1,4 +1,4 @@
from(bucket: "poseidon/autogen")
from(bucket: "poseidon")
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|> filter(fn: (r) => r["_field"] == "duration")
|> filter(fn: (r) => (not exists r.environment_id) or contains(value: r["environment_id"], set: ${environment_ids:json}))

View File

@ -1,4 +1,4 @@
from(bucket: "poseidon/autogen")
from(bucket: "poseidon")
|> range(start: -1y)
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|> keep(columns: ["stage"])

View File

@ -1,4 +1,4 @@
from(bucket: "poseidon/autogen")
from(bucket: "poseidon")
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|> filter(fn: (r) => r["_measurement"] == "poseidon_used_runners")
|> filter(fn: (r) => r["_field"] == "count")

View File

@ -3,7 +3,7 @@ from grafanalib.core import Template
from utils.utils import read_query
stage_variable = Template(
dataSource="Poseidon",
dataSource="Flux",
label="Stage",
name="stages",
query=read_query("stages"),
@ -14,7 +14,7 @@ stage_variable = Template(
)
environment_variable = Template(
dataSource="Poseidon",
dataSource="Flux",
label="Environment IDs",
name="environment_ids",
query=read_query("environment-ids"),