Refactor environment mapping

to be not duplicated 12 times.
This commit is contained in:
Maximilian Paß
2022-11-09 22:28:54 +00:00
parent 77d3f3388e
commit 37ee649675
18 changed files with 34 additions and 197 deletions

View File

@ -7,7 +7,7 @@ from utils.utils import read_query
prewarming_pool_size = BarGauge(
title="Prewarming Pool Size",
dataSource="Poseidon",
targets=[InfluxDBTarget(query=read_query("prewarming-pool-size"))],
targets=[InfluxDBTarget(query=read_query("prewarming-pool-size", "environment-mapping"))],
gridPos=GridPos(h=10, w=11, x=0, y=1),
allValues=True,
orientation=ORIENTATION_VERTICAL,
@ -18,7 +18,7 @@ prewarming_pool_size = BarGauge(
idle_runner = TimeSeries(
title="Idle Runner",
dataSource="Poseidon",
targets=[InfluxDBTarget(query=read_query("idle-runner"))],
targets=[InfluxDBTarget(query=read_query("idle-runner", "environment-mapping"))],
gridPos=GridPos(h=10, w=13, x=11, y=1),
lineInterpolation="stepAfter",
maxDataPoints=None,
@ -27,7 +27,7 @@ idle_runner = TimeSeries(
runner_startup_duration = TimeSeries(
title="Runner startup duration",
dataSource="Poseidon",
targets=[InfluxDBTarget(query=read_query("runner-startup-duration"))],
targets=[InfluxDBTarget(query=read_query("runner-startup-duration", "environment-mapping"))],
gridPos=GridPos(h=10, w=12, x=0, y=11),
unit="ns",
maxDataPoints=None,

View File

@ -61,7 +61,7 @@ currently_used_runners = Stat(
number_of_executions = BarGauge(
title="Number of Executions",
dataSource="Poseidon",
targets=[InfluxDBTarget(query=read_query("number-of-executions"))],
targets=[InfluxDBTarget(query=read_query("number-of-executions", "environment-mapping"))],
gridPos=GridPos(h=6, w=8, x=16, y=31),
allValues=True,
orientation=ORIENTATION_VERTICAL,
@ -72,7 +72,7 @@ number_of_executions = BarGauge(
execution_duration = BarGauge(
title="Execution duration",
dataSource="Poseidon",
targets=[InfluxDBTarget(query=read_query("execution-duration"))],
targets=[InfluxDBTarget(query=read_query("execution-duration", "environment-mapping"))],
gridPos=GridPos(h=11, w=8, x=0, y=37),
allValues=True,
displayMode=GAUGE_DISPLAY_MODE_GRADIENT,
@ -84,7 +84,7 @@ execution_duration = BarGauge(
executions_per_runner = BarGauge(
title="Executions per runner",
dataSource="Poseidon",
targets=[InfluxDBTarget(query=read_query("executions-per-runner"))],
targets=[InfluxDBTarget(query=read_query("executions-per-runner", "environment-mapping"))],
gridPos=GridPos(h=11, w=8, x=8, y=37),
allValues=True,
displayMode=GAUGE_DISPLAY_MODE_GRADIENT,
@ -95,7 +95,7 @@ executions_per_runner = BarGauge(
executions_per_minute = BarGauge(
title="Executions per minute",
dataSource="Poseidon",
targets=[InfluxDBTarget(query=read_query("executions-per-minute"))],
targets=[InfluxDBTarget(query=read_query("executions-per-minute", "environment-mapping"))],
gridPos=GridPos(h=11, w=8, x=16, y=37),
allValues=True,
displayMode=GAUGE_DISPLAY_MODE_GRADIENT,

View File

@ -13,7 +13,7 @@ execution_duration_extra_json = {
execution_duration = Histogram(
title="Execution duration",
dataSource="Poseidon",
targets=[InfluxDBTarget(query=read_query("execution-duration-hist"))],
targets=[InfluxDBTarget(query=read_query("execution-duration-hist", "environment-mapping"))],
gridPos=GridPos(h=8, w=24, x=0, y=49),
bucketSize=100000000,
colorMode="palette-classic",
@ -26,7 +26,7 @@ execution_duration = Histogram(
executions_per_runner = Histogram(
title="Executions per runner",
dataSource="Poseidon",
targets=[InfluxDBTarget(query=read_query("executions-per-runner-hist"))],
targets=[InfluxDBTarget(query=read_query("executions-per-runner-hist", "environment-mapping"))],
gridPos=GridPos(h=10, w=11, x=0, y=57),
bucketSize=1,
colorMode="palette-classic",
@ -38,7 +38,7 @@ executions_per_runner = Histogram(
executions_per_minute = TimeSeries(
title="Executions per minute",
dataSource="Poseidon",
targets=[InfluxDBTarget(query=read_query("executions-per-minute-time"))],
targets=[InfluxDBTarget(query=read_query("executions-per-minute-time", "environment-mapping"))],
gridPos=GridPos(h=10, w=13, x=11, y=57),
maxDataPoints=None,
lineInterpolation="smooth",
@ -47,7 +47,7 @@ executions_per_minute = TimeSeries(
request_body_size = TimeSeries(
title="Request Body Size",
dataSource="Poseidon",
targets=[InfluxDBTarget(query=read_query("request-body-size"))],
targets=[InfluxDBTarget(query=read_query("request-body-size", "environment-mapping"))],
gridPos=GridPos(h=10, w=11, x=0, y=67),
scaleDistributionType="log",
unit="bytes",
@ -58,7 +58,7 @@ request_body_size = TimeSeries(
runner_per_minute = TimeSeries(
title="Runner per minute",
dataSource="Poseidon",
targets=[InfluxDBTarget(query=read_query("runner-per-minute"))],
targets=[InfluxDBTarget(query=read_query("runner-per-minute", "environment-mapping"))],
gridPos=GridPos(h=10, w=13, x=11, y=67),
maxDataPoints=None,
lineInterpolation="smooth",

View File

@ -0,0 +1,14 @@
envMapping = from(bucket: "poseidon/autogen")
|> range(start: -1y)
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|> filter(fn: (r) => r["event_type"] == "creation")
|> group(columns: ["id", "stage"], mode:"by")
|> last()
|> keep(columns: ["id", "image", "stage"])
|> rename(columns: {id: "environment_id"})
|> map(fn: (r) => ({ r with image: strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + "/" + strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_")}))
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|> keep(columns: ["_value", "image", "_time"])
|> group(columns: ["image"], mode: "by")
|> rename(columns: {_value: ""})

View File

@ -10,18 +10,3 @@ result = from(bucket: "poseidon/autogen")
|> keep(columns: ["_time", "_value", "environment_id", "stage"])
|> aggregateWindow(every: v.windowPeriod, fn: mean)
|> map(fn: (r) => ({r with _value: r._value * 3.0})) // Each execution has three requests
envMapping = from(bucket: "poseidon/autogen")
|> range(start: -1y)
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|> filter(fn: (r) => r["event_type"] == "creation")
|> group(columns: ["id", "stage"], mode:"by")
|> last()
|> keep(columns: ["id", "image", "stage"])
|> rename(columns: {id: "environment_id"})
|> map(fn: (r) => ({ r with image: strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + "/" + strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_")}))
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|> keep(columns: ["_value", "image", "_time"])
|> group(columns: ["image"], mode: "by")
|> rename(columns: {_value: ""})

View File

@ -11,18 +11,3 @@ result = from(bucket: "poseidon/autogen")
|> group(columns: ["environment_id", "stage"])
|> mean()
|> map(fn: (r) => ({r with _value: r._value * 3.0})) // Each execution has three requests
envMapping = from(bucket: "poseidon/autogen")
|> range(start: -1y)
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|> filter(fn: (r) => r["event_type"] == "creation")
|> group(columns: ["id", "stage"], mode:"by")
|> last()
|> keep(columns: ["id", "image", "stage"])
|> rename(columns: {id: "environment_id"})
|> map(fn: (r) => ({ r with image: strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + "/" + strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_")}))
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|> keep(columns: ["_value", "image"])
|> group(columns: ["image"], mode: "by")
|> rename(columns: {_value: ""})

View File

@ -10,18 +10,3 @@ result = from(bucket: "poseidon/autogen")
|> group(columns: ["environment_id", "stage"], mode:"by")
|> aggregateWindow(every: 1m, fn: count, createEmpty: true)
|> aggregateWindow(every: duration(v: int(v: v.windowPeriod) * 5), fn: mean, createEmpty: true)
envMapping = from(bucket: "poseidon/autogen")
|> range(start: -1y)
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|> filter(fn: (r) => r["event_type"] == "creation")
|> group(columns: ["id", "stage"], mode:"by")
|> last()
|> keep(columns: ["id", "image", "stage"])
|> rename(columns: {id: "environment_id"})
|> map(fn: (r) => ({ r with image: strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + "/" + strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_")}))
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|> keep(columns: ["_value", "image", "_time"])
|> group(columns: ["image"], mode: "by")
|> rename(columns: {_value: ""})

View File

@ -1,7 +1,7 @@
import "date"
import "strings"
data = from(bucket: "poseidon/autogen")
result = from(bucket: "poseidon/autogen")
|> range(start: date.truncate(t: v.timeRangeStart, unit: 1m), stop: date.truncate(t: v.timeRangeStop, unit: 1m))
|> filter(fn: (r) => r["_measurement"] == "poseidon_aws_executions" or r["_measurement"] == "poseidon_nomad_executions")
|> filter(fn: (r) => contains(value: r["environment_id"], set: ${environment_ids:json}))
@ -11,18 +11,3 @@ data = from(bucket: "poseidon/autogen")
|> aggregateWindow(every: 1m, fn: count, createEmpty: true)
|> keep(columns: ["_value", "environment_id", "stage"])
|> mean()
envMapping = from(bucket: "poseidon/autogen")
|> range(start: -1y)
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|> filter(fn: (r) => r["event_type"] == "creation")
|> group(columns: ["id", "stage"], mode:"by")
|> last()
|> keep(columns: ["id", "image", "stage"])
|> rename(columns: {id: "environment_id"})
|> map(fn: (r) => ({ r with image: strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + "/" + strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_")}))
join(tables: {key1: data, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|> keep(columns: ["_value", "image"])
|> group(columns: ["image"], mode: "by")
|> rename(columns: {_value: ""})

View File

@ -20,18 +20,3 @@ executions = data
result = join(tables: {key1: executions, key2: runner_deletions}, on: ["runner_id"], method: "inner")
|> keep(columns: ["_value", "_time", "environment_id", "stage"])
|> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false)
envMapping = from(bucket: "poseidon/autogen")
|> range(start: -1y)
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|> filter(fn: (r) => r["event_type"] == "creation")
|> group(columns: ["id", "stage"], mode:"by")
|> last()
|> keep(columns: ["id", "image", "stage"])
|> rename(columns: {id: "environment_id"})
|> map(fn: (r) => ({ r with image: strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + "/" + strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_")}))
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|> keep(columns: ["_value", "image", "_time"])
|> group(columns: ["image"], mode: "by")
|> rename(columns: {_value: ""})

View File

@ -20,18 +20,3 @@ executions = data
result = join(tables: {key1: executions, key2: runner_deletions}, on: ["runner_id"], method: "inner")
|> keep(columns: ["_value", "environment_id", "stage"])
|> mean()
envMapping = from(bucket: "poseidon/autogen")
|> range(start: -1y)
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|> filter(fn: (r) => r["event_type"] == "creation")
|> group(columns: ["id", "stage"], mode:"by")
|> last()
|> keep(columns: ["id", "image", "stage"])
|> rename(columns: {id: "environment_id"})
|> map(fn: (r) => ({ r with image: strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + "/" + strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_")}))
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|> keep(columns: ["_value", "image"])
|> group(columns: ["image"], mode: "by")
|> rename(columns: {_value: ""})

View File

@ -8,18 +8,3 @@ result = from(bucket: "poseidon/autogen")
|> filter(fn: (r) => (not exists r.stage) or contains(value: r["stage"], set: ${stages:json}))
|> keep(columns: ["_value", "_time", "environment_id", "stage"])
|> aggregateWindow(every: myWindowPeriod, fn: min, createEmpty: false)
envMapping = from(bucket: "poseidon/autogen")
|> range(start: -1y)
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|> filter(fn: (r) => r["event_type"] == "creation")
|> group(columns: ["id", "stage"], mode:"by")
|> last()
|> keep(columns: ["id", "image", "stage"])
|> rename(columns: {id: "environment_id"})
|> map(fn: (r) => ({ r with image: strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + "/" + strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_")}))
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|> keep(columns: ["_value", "image", "_time"])
|> group(columns: ["image"], mode: "by")
|> rename(columns: {_value: ""})

View File

@ -9,18 +9,3 @@ result = from(bucket: "poseidon/autogen")
|> group(columns: ["environment_id", "stage"], mode:"by")
|> count()
|> keep(columns: ["_value", "environment_id", "stage"])
envMapping = from(bucket: "poseidon/autogen")
|> range(start: -1y)
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|> filter(fn: (r) => r["isDeletion"] == "false")
|> group(columns: ["id", "stage"], mode:"by")
|> last()
|> keep(columns: ["id", "image", "stage"])
|> rename(columns: {id: "environment_id"})
|> map(fn: (r) => ({ r with image: strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + "/" + strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_")}))
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|> keep(columns: ["_value", "image"])
|> group(columns: ["image"], mode: "by")
|> rename(columns: {_value: ""})

View File

@ -8,18 +8,3 @@ result = from(bucket: "poseidon/autogen")
|> group(columns: ["environment_id", "stage"], mode:"by")
|> last()
|> keep(columns: ["_value", "environment_id", "stage"])
envMapping = from(bucket: "poseidon/autogen")
|> range(start: -1y)
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|> filter(fn: (r) => r["event_type"] == "creation")
|> group(columns: ["id", "stage"], mode:"by")
|> last()
|> keep(columns: ["id", "image", "stage"])
|> rename(columns: {id: "environment_id"})
|> map(fn: (r) => ({ r with image: strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + "/" + strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_")}))
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|> keep(columns: ["_value", "image", "_time"])
|> group(columns: ["image"], mode: "by")
|> rename(columns: {_value: ""})

View File

@ -8,18 +8,3 @@ result = from(bucket: "poseidon/autogen")
|> filter(fn: (r) => (not exists r.stage) or contains(value: r["stage"], set: ${stages:json}))
|> keep(columns: ["_time", "_value", "environment_id", "stage"])
|> aggregateWindow(every: myWindowPeriod, fn: mean, createEmpty: false)
envMapping = from(bucket: "poseidon/autogen")
|> range(start: -1y)
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|> filter(fn: (r) => r["event_type"] == "creation")
|> group(columns: ["id", "stage"], mode:"by")
|> last()
|> keep(columns: ["id", "image", "stage"])
|> rename(columns: {id: "environment_id"})
|> map(fn: (r) => ({ r with image: strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + "/" + strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_")}))
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|> keep(columns: ["_value", "image", "_time"])
|> group(columns: ["image"], mode: "by")
|> rename(columns: {_value: ""})

View File

@ -12,18 +12,3 @@ result = from(bucket: "poseidon/autogen")
|> aggregateWindow(every: 1m, fn: count, createEmpty: true)
|> keep(columns: ["_value", "_time", "environment_id", "stage"])
|> aggregateWindow(every: myWindowPeriod, fn: mean, createEmpty: true)
envMapping = from(bucket: "poseidon/autogen")
|> range(start: -1y)
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|> filter(fn: (r) => r["event_type"] == "creation")
|> group(columns: ["id", "stage"], mode:"by")
|> last()
|> keep(columns: ["id", "image", "stage"])
|> rename(columns: {id: "environment_id"})
|> map(fn: (r) => ({ r with image: strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + "/" + strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_")}))
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|> keep(columns: ["_value", "image", "_time"])
|> group(columns: ["image"], mode: "by")
|> rename(columns: {_value: ""})

View File

@ -8,18 +8,3 @@ result = from(bucket: "poseidon/autogen")
|> filter(fn: (r) => (not exists r.stage) or contains(value: r["stage"], set: ${stages:json}))
|> keep(columns: ["_value", "_time", "environment_id", "stage"])
|> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false)
envMapping = from(bucket: "poseidon/autogen")
|> range(start: -1y)
|> filter(fn: (r) => r["_measurement"] == "poseidon_environments")
|> filter(fn: (r) => r["event_type"] == "creation")
|> group(columns: ["id", "stage"], mode:"by")
|> last()
|> keep(columns: ["id", "image", "stage"])
|> rename(columns: {id: "environment_id"})
|> map(fn: (r) => ({ r with image: strings.substring(v: r.stage, start: 0, end: 1) + r.environment_id + "/" + strings.trimPrefix(v: r.image, prefix: "openhpi/co_execenv_")}))
join(tables: {key1: result, key2: envMapping}, on: ["environment_id", "stage"], method: "inner")
|> keep(columns: ["_value", "image", "_time"])
|> group(columns: ["image"], mode: "by")
|> rename(columns: {_value: ""})

View File

@ -1,3 +1,6 @@
def read_query(name):
with open("queries/" + name + ".flux", "r") as file:
return file.read()
def read_query(*names):
result = ""
for name in names:
with open("queries/" + name + ".flux", "r") as file:
result += file.read()
return result