2026-01-09 22:20:36 -03:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
"""Generate OpenSearch Dashboards saved objects and render them into ConfigMaps.
|
|
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
|
scripts/dashboards_render_logs.py --build # rebuild NDJSON + ConfigMap
|
|
|
|
|
scripts/dashboards_render_logs.py # re-render ConfigMap from NDJSON
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
from __future__ import annotations
|
|
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
|
import json
|
|
|
|
|
import textwrap
|
|
|
|
|
from dataclasses import dataclass
|
|
|
|
|
from pathlib import Path
|
|
|
|
|
|
|
|
|
|
ROOT = Path(__file__).resolve().parents[1]
|
|
|
|
|
DASHBOARD_DIR = ROOT / "services" / "logging" / "dashboards"
|
|
|
|
|
NDJSON_PATH = DASHBOARD_DIR / "logs.ndjson"
|
|
|
|
|
CONFIG_PATH = ROOT / "services" / "logging" / "opensearch-dashboards-objects.yaml"
|
|
|
|
|
|
|
|
|
|
CONFIG_TEMPLATE = textwrap.dedent(
|
|
|
|
|
"""# {relative_path}
|
|
|
|
|
# Generated by scripts/dashboards_render_logs.py --build
|
|
|
|
|
apiVersion: v1
|
|
|
|
|
kind: ConfigMap
|
|
|
|
|
metadata:
|
|
|
|
|
name: opensearch-dashboards-objects
|
|
|
|
|
namespace: logging
|
|
|
|
|
data:
|
|
|
|
|
objects.ndjson: |
|
|
|
|
|
{payload}
|
|
|
|
|
"""
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
DASHBOARD_VERSION = "7.10.0"
|
2026-01-09 22:55:39 -03:00
|
|
|
GRID_COLUMNS = 48
|
|
|
|
|
GRID_HALF = GRID_COLUMNS // 2
|
|
|
|
|
H_CHART = 10
|
|
|
|
|
H_ERRORS = 8
|
|
|
|
|
H_TABLE = 12
|
|
|
|
|
H_SEARCH = 14
|
2026-01-09 22:20:36 -03:00
|
|
|
|
|
|
|
|
ERROR_TERMS = ("*error*", "*exception*", "*fail*")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@dataclass(frozen=True)
|
|
|
|
|
class AppSpec:
|
|
|
|
|
slug: str
|
|
|
|
|
title: str
|
|
|
|
|
query: str
|
|
|
|
|
index_id: str = "kube-logs"
|
|
|
|
|
kind: str = "kube"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def error_query(base: str | None = None) -> str:
|
|
|
|
|
parts = [f'(log : "{term}" or message : "{term}")' for term in ERROR_TERMS]
|
|
|
|
|
expr = " or ".join(parts)
|
|
|
|
|
if base:
|
|
|
|
|
return f"({base}) and ({expr})"
|
|
|
|
|
return f"({expr})"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def json_line(obj: dict) -> str:
|
|
|
|
|
return json.dumps(obj, separators=(",", ":"))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def search_source(query: str) -> dict:
|
|
|
|
|
return {
|
|
|
|
|
"query": {"language": "kuery", "query": query},
|
|
|
|
|
"filter": [],
|
|
|
|
|
"indexRefName": "kibanaSavedObjectMeta.searchSourceJSON.index",
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def index_pattern(object_id: str, title: str, time_field: str = "@timestamp") -> dict:
|
|
|
|
|
return {
|
|
|
|
|
"type": "index-pattern",
|
|
|
|
|
"id": object_id,
|
|
|
|
|
"attributes": {"title": title, "timeFieldName": time_field},
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def histogram_vis(object_id: str, title: str, query: str, index_id: str) -> dict:
|
|
|
|
|
vis_state = {
|
|
|
|
|
"title": title,
|
|
|
|
|
"type": "histogram",
|
|
|
|
|
"aggs": [
|
|
|
|
|
{"id": "1", "enabled": True, "type": "count", "schema": "metric"},
|
|
|
|
|
{
|
|
|
|
|
"id": "2",
|
|
|
|
|
"enabled": True,
|
|
|
|
|
"type": "date_histogram",
|
|
|
|
|
"schema": "segment",
|
|
|
|
|
"params": {"field": "@timestamp", "interval": "auto", "min_doc_count": 1},
|
|
|
|
|
},
|
|
|
|
|
],
|
|
|
|
|
"params": {"addTooltip": True, "addLegend": False, "scale": "linear", "interpolate": "linear"},
|
|
|
|
|
}
|
|
|
|
|
return {
|
|
|
|
|
"type": "visualization",
|
|
|
|
|
"id": object_id,
|
|
|
|
|
"attributes": {
|
|
|
|
|
"title": title,
|
|
|
|
|
"visState": json.dumps(vis_state, separators=(",", ":")),
|
|
|
|
|
"uiStateJSON": "{}",
|
|
|
|
|
"description": "",
|
|
|
|
|
"version": 1,
|
|
|
|
|
"kibanaSavedObjectMeta": {
|
|
|
|
|
"searchSourceJSON": json.dumps(search_source(query), separators=(",", ":"))
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
"references": [
|
|
|
|
|
{
|
|
|
|
|
"name": "kibanaSavedObjectMeta.searchSourceJSON.index",
|
|
|
|
|
"type": "index-pattern",
|
|
|
|
|
"id": index_id,
|
|
|
|
|
}
|
|
|
|
|
],
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def table_vis(object_id: str, title: str, field: str, query: str, index_id: str) -> dict:
|
|
|
|
|
vis_state = {
|
|
|
|
|
"title": title,
|
|
|
|
|
"type": "table",
|
|
|
|
|
"aggs": [
|
|
|
|
|
{"id": "1", "enabled": True, "type": "count", "schema": "metric"},
|
|
|
|
|
{
|
|
|
|
|
"id": "2",
|
|
|
|
|
"enabled": True,
|
|
|
|
|
"type": "terms",
|
|
|
|
|
"schema": "bucket",
|
|
|
|
|
"params": {"field": field, "size": 10, "order": "desc", "orderBy": "1"},
|
|
|
|
|
},
|
|
|
|
|
],
|
|
|
|
|
"params": {
|
|
|
|
|
"perPage": 10,
|
|
|
|
|
"showPartialRows": False,
|
|
|
|
|
"showMetricsAtAllLevels": False,
|
|
|
|
|
"sort": {"columnIndex": 1, "direction": "desc"},
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
return {
|
|
|
|
|
"type": "visualization",
|
|
|
|
|
"id": object_id,
|
|
|
|
|
"attributes": {
|
|
|
|
|
"title": title,
|
|
|
|
|
"visState": json.dumps(vis_state, separators=(",", ":")),
|
|
|
|
|
"uiStateJSON": "{}",
|
|
|
|
|
"description": "",
|
|
|
|
|
"version": 1,
|
|
|
|
|
"kibanaSavedObjectMeta": {
|
|
|
|
|
"searchSourceJSON": json.dumps(search_source(query), separators=(",", ":"))
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
"references": [
|
|
|
|
|
{
|
|
|
|
|
"name": "kibanaSavedObjectMeta.searchSourceJSON.index",
|
|
|
|
|
"type": "index-pattern",
|
|
|
|
|
"id": index_id,
|
|
|
|
|
}
|
|
|
|
|
],
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def search_object(object_id: str, title: str, columns: list[str], query: str, index_id: str) -> dict:
|
|
|
|
|
return {
|
|
|
|
|
"type": "search",
|
|
|
|
|
"id": object_id,
|
|
|
|
|
"attributes": {
|
|
|
|
|
"title": title,
|
|
|
|
|
"description": "",
|
|
|
|
|
"columns": columns,
|
|
|
|
|
"sort": [["@timestamp", "desc"]],
|
|
|
|
|
"kibanaSavedObjectMeta": {
|
|
|
|
|
"searchSourceJSON": json.dumps(search_source(query), separators=(",", ":"))
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
"references": [
|
|
|
|
|
{
|
|
|
|
|
"name": "kibanaSavedObjectMeta.searchSourceJSON.index",
|
|
|
|
|
"type": "index-pattern",
|
|
|
|
|
"id": index_id,
|
|
|
|
|
}
|
|
|
|
|
],
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2026-01-09 22:55:39 -03:00
|
|
|
def grid(x: int, y: int, w: int, h: int, i: int) -> dict:
|
|
|
|
|
return {"x": x, "y": y, "w": w, "h": h, "i": str(i)}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def panel(panel_id: str, panel_type: str, grid_data: dict, index: int) -> dict:
|
2026-01-09 22:20:36 -03:00
|
|
|
return {
|
|
|
|
|
"panelIndex": str(index),
|
2026-01-09 22:55:39 -03:00
|
|
|
"gridData": grid_data,
|
2026-01-09 22:20:36 -03:00
|
|
|
"id": panel_id,
|
|
|
|
|
"type": panel_type,
|
|
|
|
|
"version": DASHBOARD_VERSION,
|
|
|
|
|
"embeddableConfig": {},
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def dashboard_object(object_id: str, title: str, panels: list[dict]) -> dict:
|
|
|
|
|
return {
|
|
|
|
|
"type": "dashboard",
|
|
|
|
|
"id": object_id,
|
|
|
|
|
"attributes": {
|
|
|
|
|
"title": title,
|
|
|
|
|
"description": "",
|
|
|
|
|
"hits": 0,
|
|
|
|
|
"panelsJSON": json.dumps(panels, separators=(",", ":")),
|
|
|
|
|
"optionsJSON": json.dumps({"useMargins": True, "hidePanelTitles": False}, separators=(",", ":")),
|
|
|
|
|
"version": 1,
|
|
|
|
|
"timeRestore": False,
|
|
|
|
|
"kibanaSavedObjectMeta": {
|
|
|
|
|
"searchSourceJSON": json.dumps({"query": {"language": "kuery", "query": ""}, "filter": []})
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def app_dashboard_objects(app: AppSpec) -> list[dict]:
|
|
|
|
|
prefix = f"logs-{app.slug}"
|
|
|
|
|
objects = []
|
|
|
|
|
|
|
|
|
|
if app.kind == "journald":
|
|
|
|
|
columns = ["@timestamp", "_HOSTNAME", "_SYSTEMD_UNIT", "MESSAGE"]
|
|
|
|
|
objects.append(histogram_vis(f"{prefix}-volume", f"{app.title} logs", app.query, app.index_id))
|
|
|
|
|
objects.append(histogram_vis(f"{prefix}-errors", f"{app.title} errors", error_query(app.query), app.index_id))
|
|
|
|
|
objects.append(table_vis(f"{prefix}-top-units", "Top units", "_SYSTEMD_UNIT.keyword", app.query, app.index_id))
|
|
|
|
|
objects.append(search_object(f"{prefix}-recent", "Recent logs", columns, app.query, app.index_id))
|
|
|
|
|
objects.append(
|
|
|
|
|
search_object(
|
|
|
|
|
f"{prefix}-recent-errors",
|
|
|
|
|
"Recent errors",
|
|
|
|
|
columns,
|
|
|
|
|
error_query(app.query),
|
|
|
|
|
app.index_id,
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
panels = [
|
2026-01-09 22:55:39 -03:00
|
|
|
panel(f"{prefix}-volume", "visualization", grid(0, 0, GRID_COLUMNS, H_CHART, 1), 1),
|
|
|
|
|
panel(f"{prefix}-errors", "visualization", grid(0, H_CHART, GRID_COLUMNS, H_ERRORS, 2), 2),
|
|
|
|
|
panel(
|
|
|
|
|
f"{prefix}-top-units",
|
|
|
|
|
"visualization",
|
|
|
|
|
grid(0, H_CHART + H_ERRORS, GRID_HALF, H_TABLE, 3),
|
|
|
|
|
3,
|
|
|
|
|
),
|
|
|
|
|
panel(
|
|
|
|
|
f"{prefix}-recent",
|
|
|
|
|
"search",
|
|
|
|
|
grid(GRID_HALF, H_CHART + H_ERRORS, GRID_HALF, H_TABLE, 4),
|
|
|
|
|
4,
|
|
|
|
|
),
|
|
|
|
|
panel(
|
|
|
|
|
f"{prefix}-recent-errors",
|
|
|
|
|
"search",
|
|
|
|
|
grid(0, H_CHART + H_ERRORS + H_TABLE, GRID_COLUMNS, H_SEARCH, 5),
|
|
|
|
|
5,
|
|
|
|
|
),
|
2026-01-09 22:20:36 -03:00
|
|
|
]
|
|
|
|
|
objects.append(dashboard_object(prefix, f"{app.title} Logs", panels))
|
|
|
|
|
return objects
|
|
|
|
|
|
|
|
|
|
columns = ["@timestamp", "kubernetes.pod_name", "kubernetes.container_name", "log", "message"]
|
|
|
|
|
objects.append(histogram_vis(f"{prefix}-volume", f"{app.title} logs", app.query, app.index_id))
|
|
|
|
|
objects.append(histogram_vis(f"{prefix}-errors", f"{app.title} errors", error_query(app.query), app.index_id))
|
|
|
|
|
objects.append(table_vis(f"{prefix}-top-pods", "Top pods", "kubernetes.pod_name.keyword", app.query, app.index_id))
|
|
|
|
|
objects.append(
|
|
|
|
|
table_vis(f"{prefix}-top-containers", "Top containers", "kubernetes.container_name.keyword", app.query, app.index_id)
|
|
|
|
|
)
|
|
|
|
|
objects.append(search_object(f"{prefix}-recent", "Recent logs", columns, app.query, app.index_id))
|
|
|
|
|
objects.append(
|
|
|
|
|
search_object(
|
|
|
|
|
f"{prefix}-recent-errors",
|
|
|
|
|
"Recent errors",
|
|
|
|
|
columns,
|
|
|
|
|
error_query(app.query),
|
|
|
|
|
app.index_id,
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
panels = [
|
2026-01-09 22:55:39 -03:00
|
|
|
panel(f"{prefix}-volume", "visualization", grid(0, 0, GRID_COLUMNS, H_CHART, 1), 1),
|
|
|
|
|
panel(f"{prefix}-errors", "visualization", grid(0, H_CHART, GRID_COLUMNS, H_ERRORS, 2), 2),
|
|
|
|
|
panel(
|
|
|
|
|
f"{prefix}-top-pods",
|
|
|
|
|
"visualization",
|
|
|
|
|
grid(0, H_CHART + H_ERRORS, GRID_HALF, H_TABLE, 3),
|
|
|
|
|
3,
|
|
|
|
|
),
|
2026-01-09 22:20:36 -03:00
|
|
|
panel(
|
|
|
|
|
f"{prefix}-top-containers",
|
|
|
|
|
"visualization",
|
2026-01-09 22:55:39 -03:00
|
|
|
grid(GRID_HALF, H_CHART + H_ERRORS, GRID_HALF, H_TABLE, 4),
|
2026-01-09 22:20:36 -03:00
|
|
|
4,
|
|
|
|
|
),
|
2026-01-09 22:55:39 -03:00
|
|
|
panel(
|
|
|
|
|
f"{prefix}-recent",
|
|
|
|
|
"search",
|
|
|
|
|
grid(0, H_CHART + H_ERRORS + H_TABLE, GRID_HALF, H_SEARCH, 5),
|
|
|
|
|
5,
|
|
|
|
|
),
|
|
|
|
|
panel(
|
|
|
|
|
f"{prefix}-recent-errors",
|
|
|
|
|
"search",
|
|
|
|
|
grid(GRID_HALF, H_CHART + H_ERRORS + H_TABLE, GRID_HALF, H_SEARCH, 6),
|
|
|
|
|
6,
|
|
|
|
|
),
|
2026-01-09 22:20:36 -03:00
|
|
|
]
|
|
|
|
|
objects.append(dashboard_object(prefix, f"{app.title} Logs", panels))
|
|
|
|
|
return objects
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def overview_objects() -> list[dict]:
|
|
|
|
|
objects = []
|
|
|
|
|
objects.append(histogram_vis("logs-overview-volume", "Logs per minute", "*", "kube-logs"))
|
|
|
|
|
objects.append(histogram_vis("logs-overview-errors", "Errors per minute", error_query(), "kube-logs"))
|
|
|
|
|
objects.append(
|
|
|
|
|
table_vis(
|
|
|
|
|
"logs-overview-top-ns",
|
|
|
|
|
"Top namespaces",
|
|
|
|
|
"kubernetes.namespace_name.keyword",
|
|
|
|
|
"*",
|
|
|
|
|
"kube-logs",
|
|
|
|
|
)
|
|
|
|
|
)
|
2026-01-09 22:55:39 -03:00
|
|
|
objects.append(
|
|
|
|
|
table_vis(
|
|
|
|
|
"logs-overview-top-error-ns",
|
|
|
|
|
"Top error namespaces",
|
|
|
|
|
"kubernetes.namespace_name.keyword",
|
|
|
|
|
error_query(),
|
|
|
|
|
"kube-logs",
|
|
|
|
|
)
|
|
|
|
|
)
|
2026-01-09 22:20:36 -03:00
|
|
|
objects.append(table_vis("logs-overview-top-pods", "Top pods", "kubernetes.pod_name.keyword", "*", "kube-logs"))
|
2026-01-09 22:55:39 -03:00
|
|
|
objects.append(
|
|
|
|
|
table_vis(
|
|
|
|
|
"logs-overview-top-nodes",
|
|
|
|
|
"Top nodes",
|
|
|
|
|
"kubernetes.node_name.keyword",
|
|
|
|
|
"*",
|
|
|
|
|
"kube-logs",
|
|
|
|
|
)
|
|
|
|
|
)
|
2026-01-09 22:20:36 -03:00
|
|
|
objects.append(
|
|
|
|
|
search_object(
|
|
|
|
|
"logs-overview-recent-errors",
|
|
|
|
|
"Recent errors",
|
|
|
|
|
["@timestamp", "kubernetes.namespace_name", "kubernetes.pod_name", "log", "message"],
|
|
|
|
|
error_query(),
|
|
|
|
|
"kube-logs",
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
panels = [
|
2026-01-09 22:55:39 -03:00
|
|
|
panel("logs-overview-volume", "visualization", grid(0, 0, GRID_COLUMNS, H_CHART, 1), 1),
|
|
|
|
|
panel("logs-overview-errors", "visualization", grid(0, H_CHART, GRID_COLUMNS, H_ERRORS, 2), 2),
|
|
|
|
|
panel(
|
|
|
|
|
"logs-overview-top-ns",
|
|
|
|
|
"visualization",
|
|
|
|
|
grid(0, H_CHART + H_ERRORS, GRID_HALF, H_TABLE, 3),
|
|
|
|
|
3,
|
|
|
|
|
),
|
|
|
|
|
panel(
|
|
|
|
|
"logs-overview-top-error-ns",
|
|
|
|
|
"visualization",
|
|
|
|
|
grid(GRID_HALF, H_CHART + H_ERRORS, GRID_HALF, H_TABLE, 4),
|
|
|
|
|
4,
|
|
|
|
|
),
|
|
|
|
|
panel(
|
|
|
|
|
"logs-overview-top-pods",
|
|
|
|
|
"visualization",
|
|
|
|
|
grid(0, H_CHART + H_ERRORS + H_TABLE, GRID_HALF, H_TABLE, 5),
|
|
|
|
|
5,
|
|
|
|
|
),
|
|
|
|
|
panel(
|
|
|
|
|
"logs-overview-top-nodes",
|
|
|
|
|
"visualization",
|
|
|
|
|
grid(GRID_HALF, H_CHART + H_ERRORS + H_TABLE, GRID_HALF, H_TABLE, 6),
|
|
|
|
|
6,
|
|
|
|
|
),
|
|
|
|
|
panel(
|
|
|
|
|
"logs-overview-recent-errors",
|
|
|
|
|
"search",
|
|
|
|
|
grid(0, H_CHART + H_ERRORS + (H_TABLE * 2), GRID_COLUMNS, H_SEARCH, 7),
|
|
|
|
|
7,
|
|
|
|
|
),
|
2026-01-09 22:20:36 -03:00
|
|
|
]
|
|
|
|
|
objects.append(dashboard_object("logs-overview", "Atlas Logs Overview", panels))
|
|
|
|
|
return objects
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def build_objects() -> list[dict]:
|
|
|
|
|
objects = [
|
|
|
|
|
index_pattern("kube-logs", "kube-*"),
|
|
|
|
|
index_pattern("journald-logs", "journald-*"),
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
objects.extend(overview_objects())
|
|
|
|
|
|
|
|
|
|
apps = [
|
|
|
|
|
AppSpec("bstein-dev-home", "bstein-dev-home", 'kubernetes.namespace_name: "bstein-dev-home"'),
|
|
|
|
|
AppSpec(
|
|
|
|
|
"pegasus",
|
|
|
|
|
"pegasus",
|
|
|
|
|
'kubernetes.namespace_name: "jellyfin" and kubernetes.labels.app: "pegasus"',
|
|
|
|
|
),
|
2026-01-09 22:55:39 -03:00
|
|
|
AppSpec(
|
|
|
|
|
"jellyfin",
|
|
|
|
|
"jellyfin",
|
|
|
|
|
'kubernetes.namespace_name: "jellyfin" and kubernetes.labels.app: "jellyfin"',
|
|
|
|
|
),
|
2026-01-09 22:20:36 -03:00
|
|
|
AppSpec("vaultwarden", "vaultwarden", 'kubernetes.namespace_name: "vaultwarden"'),
|
|
|
|
|
AppSpec("mailu", "mailu", 'kubernetes.namespace_name: "mailu-mailserver"'),
|
|
|
|
|
AppSpec("nextcloud", "nextcloud", 'kubernetes.namespace_name: "nextcloud"'),
|
|
|
|
|
AppSpec("gitea", "gitea", 'kubernetes.namespace_name: "gitea"'),
|
|
|
|
|
AppSpec("jenkins", "jenkins", 'kubernetes.namespace_name: "jenkins"'),
|
|
|
|
|
AppSpec("harbor", "harbor", 'kubernetes.namespace_name: "harbor"'),
|
|
|
|
|
AppSpec("vault", "vault", 'kubernetes.namespace_name: "vault"'),
|
|
|
|
|
AppSpec("keycloak", "keycloak", 'kubernetes.namespace_name: "sso"'),
|
|
|
|
|
AppSpec("flux-system", "flux-system", 'kubernetes.namespace_name: "flux-system"'),
|
|
|
|
|
AppSpec("comms", "comms", 'kubernetes.namespace_name: "comms"'),
|
2026-01-09 22:55:39 -03:00
|
|
|
AppSpec(
|
|
|
|
|
"element-web",
|
|
|
|
|
"element-web",
|
|
|
|
|
'kubernetes.namespace_name: "comms" and kubernetes.container_name: "element-web"',
|
|
|
|
|
),
|
|
|
|
|
AppSpec(
|
|
|
|
|
"element-call",
|
|
|
|
|
"element-call",
|
|
|
|
|
'kubernetes.namespace_name: "comms" and kubernetes.labels.app: "element-call"',
|
|
|
|
|
),
|
|
|
|
|
AppSpec(
|
|
|
|
|
"matrix-synapse",
|
|
|
|
|
"matrix-synapse",
|
|
|
|
|
'kubernetes.namespace_name: "comms" and kubernetes.container_name: "synapse"',
|
|
|
|
|
),
|
|
|
|
|
AppSpec(
|
|
|
|
|
"livekit",
|
|
|
|
|
"livekit",
|
|
|
|
|
'kubernetes.namespace_name: "comms" and kubernetes.labels.app: "livekit"',
|
|
|
|
|
),
|
|
|
|
|
AppSpec(
|
|
|
|
|
"coturn",
|
|
|
|
|
"coturn",
|
|
|
|
|
'kubernetes.namespace_name: "comms" and kubernetes.labels.app: "coturn"',
|
|
|
|
|
),
|
2026-01-09 22:20:36 -03:00
|
|
|
AppSpec("lesavka", "lesavka", '_HOSTNAME: "titan-jh"', index_id="journald-logs", kind="journald"),
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
for app in apps:
|
|
|
|
|
objects.extend(app_dashboard_objects(app))
|
|
|
|
|
|
|
|
|
|
return objects
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def write_ndjson(objects: list[dict], path: Path) -> None:
|
|
|
|
|
path.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
payload = "\n".join(json_line(obj) for obj in objects)
|
|
|
|
|
path.write_text(payload + "\n")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def render_configmap(ndjson_path: Path, output_path: Path) -> None:
|
|
|
|
|
payload_lines = ndjson_path.read_text().splitlines()
|
|
|
|
|
payload = "\n".join(" " + line for line in payload_lines)
|
|
|
|
|
relative_path = output_path.relative_to(ROOT)
|
|
|
|
|
output_path.write_text(CONFIG_TEMPLATE.format(relative_path=relative_path, payload=payload))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def main() -> None:
|
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
|
parser.add_argument("--build", action="store_true", help="Regenerate saved object NDJSON and ConfigMap")
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
|
|
if args.build:
|
|
|
|
|
objects = build_objects()
|
|
|
|
|
write_ndjson(objects, NDJSON_PATH)
|
|
|
|
|
|
|
|
|
|
if not NDJSON_PATH.exists():
|
|
|
|
|
raise SystemExit(f"Missing NDJSON file: {NDJSON_PATH}. Run with --build first.")
|
|
|
|
|
|
|
|
|
|
render_configmap(NDJSON_PATH, CONFIG_PATH)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
|
main()
|