feat: add structured logging and schedule glue jobs

This commit is contained in:
Brad Stein 2026-01-20 03:01:22 -03:00
parent f0f5164942
commit ef912df950
11 changed files with 484 additions and 2 deletions

View File

@ -13,18 +13,24 @@ from .db.database import Database
from .db.storage import Storage
from .manager.provisioning import ProvisioningManager
from .scheduler.cron import CronScheduler
from .services.comms import comms
from .services.firefly import firefly
from .services.keycloak_admin import keycloak_admin
from .services.mailu import mailu
from .services.nextcloud import nextcloud
from .services.vaultwarden_sync import run_vaultwarden_sync
from .services.vault import vault
from .services.wger import wger
from .settings import settings
from .utils.errors import safe_error_detail
from .utils.http import extract_bearer_token
from .utils.logging import LogConfig, configure_logging, get_logger
from .utils.passwords import random_password
configure_logging(LogConfig(level=settings.log_level))
logger = get_logger(__name__)
db = Database(settings.portal_database_url)
storage = Storage(db)
provisioning = ProvisioningManager(db, storage)
@ -73,7 +79,53 @@ def _startup() -> None:
)
scheduler.add_task("schedule.vaultwarden_sync", settings.vaultwarden_sync_cron, run_vaultwarden_sync)
scheduler.add_task("schedule.wger_admin", settings.wger_admin_cron, lambda: wger.ensure_admin(wait=False))
scheduler.add_task(
"schedule.vault_k8s_auth",
settings.vault_k8s_auth_cron,
lambda: vault.sync_k8s_auth(wait=True),
)
scheduler.add_task(
"schedule.vault_oidc",
settings.vault_oidc_cron,
lambda: vault.sync_oidc(wait=True),
)
scheduler.add_task(
"schedule.comms_guest_name",
settings.comms_guest_name_cron,
lambda: comms.run_guest_name_randomizer(wait=True),
)
scheduler.add_task(
"schedule.comms_pin_invite",
settings.comms_pin_invite_cron,
lambda: comms.run_pin_invite(wait=True),
)
scheduler.add_task(
"schedule.comms_reset_room",
settings.comms_reset_room_cron,
lambda: comms.run_reset_room(wait=True),
)
scheduler.add_task(
"schedule.comms_seed_room",
settings.comms_seed_room_cron,
lambda: comms.run_seed_room(wait=True),
)
scheduler.start()
logger.info(
"ariadne started",
extra={
"event": "startup",
"mailu_cron": settings.mailu_sync_cron,
"nextcloud_cron": settings.nextcloud_sync_cron,
"vaultwarden_cron": settings.vaultwarden_sync_cron,
"wger_admin_cron": settings.wger_admin_cron,
"vault_k8s_auth_cron": settings.vault_k8s_auth_cron,
"vault_oidc_cron": settings.vault_oidc_cron,
"comms_guest_name_cron": settings.comms_guest_name_cron,
"comms_pin_invite_cron": settings.comms_pin_invite_cron,
"comms_reset_room_cron": settings.comms_reset_room_cron,
"comms_seed_room_cron": settings.comms_seed_room_cron,
},
)
@app.on_event("shutdown")
@ -81,6 +133,7 @@ def _shutdown() -> None:
scheduler.stop()
provisioning.stop()
db.close()
logger.info("ariadne stopped", extra={"event": "shutdown"})
@app.get("/health")
@ -97,6 +150,10 @@ def metrics() -> Response:
@app.get("/api/admin/access/requests")
def list_access_requests(ctx: AuthContext = Depends(_require_auth)) -> JSONResponse:
_require_admin(ctx)
logger.info(
"list access requests",
extra={"event": "access_requests_list", "actor": ctx.username or ""},
)
try:
rows = storage.list_pending_requests()
except Exception:
@ -157,6 +214,10 @@ async def approve_access_request(
raise HTTPException(status_code=502, detail="failed to approve request")
if not row:
logger.info(
"access request approval ignored",
extra={"event": "access_request_approve", "actor": decided_by, "username": username, "status": "skipped"},
)
return JSONResponse({"ok": True, "request_code": ""})
request_code = row.get("request_code") or ""
@ -166,6 +227,15 @@ async def approve_access_request(
args=(request_code,),
daemon=True,
).start()
logger.info(
"access request approved",
extra={
"event": "access_request_approve",
"actor": decided_by,
"username": username,
"request_code": request_code,
},
)
return JSONResponse({"ok": True, "request_code": request_code})
@ -201,7 +271,20 @@ async def deny_access_request(
raise HTTPException(status_code=502, detail="failed to deny request")
if not row:
logger.info(
"access request denial ignored",
extra={"event": "access_request_deny", "actor": decided_by, "username": username, "status": "skipped"},
)
return JSONResponse({"ok": True, "request_code": ""})
logger.info(
"access request denied",
extra={
"event": "access_request_deny",
"actor": decided_by,
"username": username,
"request_code": row.get("request_code") or "",
},
)
return JSONResponse({"ok": True, "request_code": row.get("request_code")})
@ -215,6 +298,10 @@ def rotate_mailu_password(ctx: AuthContext = Depends(_require_auth)) -> JSONResp
if not username:
raise HTTPException(status_code=400, detail="missing username")
logger.info(
"mailu password rotate requested",
extra={"event": "mailu_rotate", "username": username},
)
password = random_password()
try:
keycloak_admin.set_user_attribute(username, "mailu_app_password", password)
@ -237,6 +324,16 @@ def rotate_mailu_password(ctx: AuthContext = Depends(_require_auth)) -> JSONResp
except Exception as exc:
nextcloud_sync = {"status": "error", "detail": safe_error_detail(exc, "failed to sync nextcloud")}
logger.info(
"mailu password rotate completed",
extra={
"event": "mailu_rotate",
"username": username,
"sync_enabled": sync_enabled,
"sync_ok": sync_ok,
"nextcloud_status": nextcloud_sync.get("status") if isinstance(nextcloud_sync, dict) else "",
},
)
return JSONResponse(
{
"password": password,
@ -271,6 +368,7 @@ def reset_wger_password(ctx: AuthContext = Depends(_require_auth)) -> JSONRespon
except Exception:
pass
logger.info("wger password reset requested", extra={"event": "wger_reset", "username": username})
password = random_password()
try:
result = wger.sync_user(username, mailu_email, password, wait=True)
@ -290,6 +388,7 @@ def reset_wger_password(ctx: AuthContext = Depends(_require_auth)) -> JSONRespon
except Exception:
raise HTTPException(status_code=502, detail="failed to store wger password")
logger.info("wger password reset completed", extra={"event": "wger_reset", "username": username})
return JSONResponse({"status": "ok", "password": password})
@ -316,6 +415,7 @@ def reset_firefly_password(ctx: AuthContext = Depends(_require_auth)) -> JSONRes
except Exception:
pass
logger.info("firefly password reset requested", extra={"event": "firefly_reset", "username": username})
password = random_password(24)
try:
result = firefly.sync_user(mailu_email, password, wait=True)
@ -335,6 +435,7 @@ def reset_firefly_password(ctx: AuthContext = Depends(_require_auth)) -> JSONRes
except Exception:
raise HTTPException(status_code=502, detail="failed to store firefly password")
logger.info("firefly password reset completed", extra={"event": "firefly_reset", "username": username})
return JSONResponse({"status": "ok", "password": password})
@ -354,8 +455,24 @@ async def nextcloud_mail_sync(request: Request, ctx: AuthContext = Depends(_requ
payload = {}
wait = bool(payload.get("wait", True)) if isinstance(payload, dict) else True
logger.info(
"nextcloud mail sync requested",
extra={"event": "nextcloud_sync", "username": username, "wait": wait},
)
try:
result = nextcloud.sync_mail(username, wait=wait)
logger.info(
"nextcloud mail sync completed",
extra={
"event": "nextcloud_sync",
"username": username,
"status": result.get("status") if isinstance(result, dict) else "",
},
)
return JSONResponse(result)
except Exception as exc:
logger.info(
"nextcloud mail sync failed",
extra={"event": "nextcloud_sync", "username": username, "error": safe_error_detail(exc, "failed")},
)
raise HTTPException(status_code=502, detail=safe_error_detail(exc, "failed to sync nextcloud mail"))

View File

@ -5,12 +5,14 @@ import time
from typing import Any
from .client import get_json, post_json
from ..utils.logging import get_logger
class JobSpawner:
def __init__(self, namespace: str, cronjob_name: str) -> None:
self._namespace = namespace
self._cronjob_name = cronjob_name
self._logger = get_logger(__name__)
@staticmethod
def _safe_name_fragment(value: str, max_len: int = 24) -> str:
@ -85,6 +87,15 @@ class JobSpawner:
)
if not isinstance(job_name, str) or not job_name:
raise RuntimeError("job name missing")
self._logger.info(
"job triggered",
extra={
"event": "job_trigger",
"namespace": self._namespace,
"cronjob": self._cronjob_name,
"job": job_name,
},
)
return {"job": job_name, "status": "queued"}
def wait_for_completion(self, job_name: str, timeout_sec: float) -> dict[str, Any]:
@ -105,6 +116,15 @@ class JobSpawner:
if cond.get("type") == "Failed" and cond.get("status") == "True":
return {"job": job_name, "status": "error"}
time.sleep(2)
self._logger.info(
"job wait timeout",
extra={
"event": "job_timeout",
"namespace": self._namespace,
"cronjob": self._cronjob_name,
"job": job_name,
},
)
return {"job": job_name, "status": "running"}
def trigger_and_wait(

View File

@ -19,6 +19,7 @@ from ..services.vaultwarden import vaultwarden
from ..services.wger import wger
from ..settings import settings
from ..utils.errors import safe_error_detail
from ..utils.logging import get_logger
from ..utils.passwords import random_password
@ -30,6 +31,8 @@ WGER_PASSWORD_UPDATED_ATTR = "wger_password_updated_at"
FIREFLY_PASSWORD_ATTR = "firefly_password"
FIREFLY_PASSWORD_UPDATED_ATTR = "firefly_password_updated_at"
logger = get_logger(__name__)
@dataclass(frozen=True)
class ProvisionOutcome:
@ -108,6 +111,10 @@ class ProvisioningManager:
return ProvisionOutcome(ok=False, status="accounts_building")
required_tasks = list(REQUIRED_TASKS)
logger.info(
"provisioning started",
extra={"event": "provision_start", "request_code": request_code},
)
with self._db.connection() as conn:
lock_id = _advisory_lock_id(request_code)
@ -329,6 +336,10 @@ class ProvisioningManager:
self._record_task(request_code, "mailu_sync", "error", detail, start)
if not mailbox_ready:
logger.info(
"mailbox not ready after sync",
extra={"event": "mailu_mailbox_wait", "request_code": request_code, "status": "retry"},
)
return ProvisionOutcome(ok=False, status="accounts_building")
# Task: trigger Nextcloud mail sync
@ -449,8 +460,21 @@ class ProvisioningManager:
(request_code,),
)
self._send_welcome_email(request_code, username, contact_email)
logger.info(
"provisioning complete",
extra={
"event": "provision_complete",
"request_code": request_code,
"username": username,
"status": "awaiting_onboarding",
},
)
return ProvisionOutcome(ok=True, status="awaiting_onboarding")
logger.info(
"provisioning pending",
extra={"event": "provision_pending", "request_code": request_code, "status": "accounts_building"},
)
return ProvisionOutcome(ok=False, status="accounts_building")
finally:
conn.execute("SELECT pg_advisory_unlock(%s)", (lock_id,))
@ -503,6 +527,17 @@ class ProvisioningManager:
finished = datetime.now(timezone.utc)
duration_sec = (finished - started).total_seconds()
record_task_run(task, status, duration_sec)
logger.info(
"task run",
extra={
"event": "task_run",
"request_code": request_code,
"task": task,
"status": status,
"duration_sec": round(duration_sec, 3),
"detail": detail or "",
},
)
try:
self._storage.record_task_run(
request_code,

View File

@ -10,6 +10,7 @@ from croniter import croniter
from ..db.storage import Storage
from ..metrics.metrics import record_schedule_state, record_task_run
from ..utils.logging import get_logger
@dataclass(frozen=True)
@ -29,6 +30,7 @@ class CronScheduler:
self._lock = threading.Lock()
self._stop_event = threading.Event()
self._thread: threading.Thread | None = None
self._logger = get_logger(__name__)
def add_task(self, name: str, cron_expr: str, runner: Callable[[], None]) -> None:
task = CronTask(name=name, cron_expr=cron_expr, runner=runner)
@ -84,6 +86,10 @@ class CronScheduler:
started = datetime.now(timezone.utc)
status = "ok"
detail = None
self._logger.info(
"schedule task started",
extra={"event": "schedule_start", "task": task.name},
)
try:
task.runner()
except Exception as exc:
@ -92,6 +98,16 @@ class CronScheduler:
finished = datetime.now(timezone.utc)
duration_sec = (finished - started).total_seconds()
record_task_run(task.name, status, duration_sec)
self._logger.info(
"schedule task finished",
extra={
"event": "schedule_finish",
"task": task.name,
"status": status,
"duration_sec": round(duration_sec, 3),
"detail": detail or "",
},
)
record_schedule_state(
task.name,
started.timestamp(),

42
ariadne/services/comms.py Normal file
View File

@ -0,0 +1,42 @@
from __future__ import annotations
from typing import Any
from ..k8s.jobs import JobSpawner
from ..settings import settings
class CommsService:
def __init__(self) -> None:
self._guest_name_spawner = JobSpawner(settings.comms_namespace, settings.comms_guest_name_cronjob)
self._pin_invite_spawner = JobSpawner(settings.comms_namespace, settings.comms_pin_invite_cronjob)
self._reset_room_spawner = JobSpawner(settings.comms_namespace, settings.comms_reset_room_cronjob)
self._seed_room_spawner = JobSpawner(settings.comms_namespace, settings.comms_seed_room_cronjob)
def _trigger(self, spawner: JobSpawner, label_suffix: str, wait: bool) -> dict[str, Any]:
if wait:
return spawner.trigger_and_wait(label_suffix, None, settings.comms_job_wait_timeout_sec)
return spawner.trigger(label_suffix, None)
def run_guest_name_randomizer(self, wait: bool = True) -> dict[str, Any]:
if not settings.comms_namespace or not settings.comms_guest_name_cronjob:
raise RuntimeError("comms guest name job not configured")
return self._trigger(self._guest_name_spawner, "guest-name", wait)
def run_pin_invite(self, wait: bool = True) -> dict[str, Any]:
if not settings.comms_namespace or not settings.comms_pin_invite_cronjob:
raise RuntimeError("comms pin invite job not configured")
return self._trigger(self._pin_invite_spawner, "pin-invite", wait)
def run_reset_room(self, wait: bool = True) -> dict[str, Any]:
if not settings.comms_namespace or not settings.comms_reset_room_cronjob:
raise RuntimeError("comms reset room job not configured")
return self._trigger(self._reset_room_spawner, "reset-room", wait)
def run_seed_room(self, wait: bool = True) -> dict[str, Any]:
if not settings.comms_namespace or not settings.comms_seed_room_cronjob:
raise RuntimeError("comms seed room job not configured")
return self._trigger(self._seed_room_spawner, "seed-room", wait)
comms = CommsService()

37
ariadne/services/vault.py Normal file
View File

@ -0,0 +1,37 @@
from __future__ import annotations
from typing import Any
from ..k8s.jobs import JobSpawner
from ..settings import settings
class VaultService:
def __init__(self) -> None:
self._k8s_auth_spawner = JobSpawner(settings.vault_namespace, settings.vault_k8s_auth_cronjob)
self._oidc_spawner = JobSpawner(settings.vault_namespace, settings.vault_oidc_cronjob)
def sync_k8s_auth(self, wait: bool = True) -> dict[str, Any]:
if not settings.vault_namespace or not settings.vault_k8s_auth_cronjob:
raise RuntimeError("vault k8s auth sync not configured")
if wait:
return self._k8s_auth_spawner.trigger_and_wait(
"k8s-auth",
None,
settings.vault_job_wait_timeout_sec,
)
return self._k8s_auth_spawner.trigger("k8s-auth", None)
def sync_oidc(self, wait: bool = True) -> dict[str, Any]:
if not settings.vault_namespace or not settings.vault_oidc_cronjob:
raise RuntimeError("vault oidc sync not configured")
if wait:
return self._oidc_spawner.trigger_and_wait(
"oidc",
None,
settings.vault_job_wait_timeout_sec,
)
return self._oidc_spawner.trigger("oidc", None)
vault = VaultService()

View File

@ -6,6 +6,7 @@ import time
from typing import Any
from ..settings import settings
from ..utils.logging import get_logger
from .keycloak_admin import keycloak_admin
from .mailu import mailu
from .vaultwarden import vaultwarden
@ -15,6 +16,8 @@ VAULTWARDEN_EMAIL_ATTR = "vaultwarden_email"
VAULTWARDEN_STATUS_ATTR = "vaultwarden_status"
VAULTWARDEN_SYNCED_AT_ATTR = "vaultwarden_synced_at"
logger = get_logger(__name__)
@dataclass(frozen=True)
class VaultwardenSyncSummary:
@ -102,7 +105,12 @@ def run_vaultwarden_sync() -> VaultwardenSyncSummary:
consecutive_failures = 0
if not keycloak_admin.ready():
return VaultwardenSyncSummary(0, 0, 0, 1, detail="keycloak admin not configured")
summary = VaultwardenSyncSummary(0, 0, 0, 1, detail="keycloak admin not configured")
logger.info(
"vaultwarden sync skipped",
extra={"event": "vaultwarden_sync", "status": "error", "detail": summary.detail},
)
return summary
users = keycloak_admin.iter_users(page_size=200, brief=False)
for user in users:
@ -194,4 +202,16 @@ def run_vaultwarden_sync() -> VaultwardenSyncSummary:
if consecutive_failures >= settings.vaultwarden_failure_bailout:
break
return VaultwardenSyncSummary(processed, created, skipped, failures)
summary = VaultwardenSyncSummary(processed, created, skipped, failures)
logger.info(
"vaultwarden sync finished",
extra={
"event": "vaultwarden_sync",
"status": "ok" if failures == 0 else "error",
"processed": processed,
"created_or_present": created,
"skipped": skipped,
"failures": failures,
},
)
return summary

View File

@ -36,6 +36,7 @@ class Settings:
bind_port: int
portal_database_url: str
portal_public_base_url: str
log_level: str
keycloak_url: str
keycloak_realm: str
@ -78,6 +79,18 @@ class Settings:
firefly_user_sync_cronjob: str
firefly_user_sync_wait_timeout_sec: float
vault_namespace: str
vault_k8s_auth_cronjob: str
vault_oidc_cronjob: str
vault_job_wait_timeout_sec: float
comms_namespace: str
comms_guest_name_cronjob: str
comms_pin_invite_cronjob: str
comms_reset_room_cronjob: str
comms_seed_room_cronjob: str
comms_job_wait_timeout_sec: float
vaultwarden_namespace: str
vaultwarden_pod_label: str
vaultwarden_pod_port: int
@ -108,6 +121,12 @@ class Settings:
nextcloud_sync_cron: str
vaultwarden_sync_cron: str
wger_admin_cron: str
vault_k8s_auth_cron: str
vault_oidc_cron: str
comms_guest_name_cron: str
comms_pin_invite_cron: str
comms_reset_room_cron: str
comms_seed_room_cron: str
metrics_path: str
@ -135,6 +154,7 @@ class Settings:
bind_port=_env_int("ARIADNE_BIND_PORT", 8080),
portal_database_url=_env("PORTAL_DATABASE_URL", ""),
portal_public_base_url=_env("PORTAL_PUBLIC_BASE_URL", "https://bstein.dev").rstrip("/"),
log_level=_env("ARIADNE_LOG_LEVEL", "INFO"),
keycloak_url=keycloak_url,
keycloak_realm=keycloak_realm,
keycloak_client_id=keycloak_client_id,
@ -172,6 +192,16 @@ class Settings:
firefly_namespace=_env("FIREFLY_NAMESPACE", "finance"),
firefly_user_sync_cronjob=_env("FIREFLY_USER_SYNC_CRONJOB", "firefly-user-sync"),
firefly_user_sync_wait_timeout_sec=_env_float("FIREFLY_USER_SYNC_WAIT_TIMEOUT_SEC", 90.0),
vault_namespace=_env("VAULT_NAMESPACE", "vault"),
vault_k8s_auth_cronjob=_env("VAULT_K8S_AUTH_CRONJOB", "vault-k8s-auth-config"),
vault_oidc_cronjob=_env("VAULT_OIDC_CRONJOB", "vault-oidc-config"),
vault_job_wait_timeout_sec=_env_float("VAULT_JOB_WAIT_TIMEOUT_SEC", 120.0),
comms_namespace=_env("COMMS_NAMESPACE", "comms"),
comms_guest_name_cronjob=_env("COMMS_GUEST_NAME_CRONJOB", "guest-name-randomizer"),
comms_pin_invite_cronjob=_env("COMMS_PIN_INVITE_CRONJOB", "pin-othrys-invite"),
comms_reset_room_cronjob=_env("COMMS_RESET_ROOM_CRONJOB", "othrys-room-reset"),
comms_seed_room_cronjob=_env("COMMS_SEED_ROOM_CRONJOB", "seed-othrys-room"),
comms_job_wait_timeout_sec=_env_float("COMMS_JOB_WAIT_TIMEOUT_SEC", 60.0),
vaultwarden_namespace=_env("VAULTWARDEN_NAMESPACE", "vaultwarden"),
vaultwarden_pod_label=_env("VAULTWARDEN_POD_LABEL", "app=vaultwarden"),
vaultwarden_pod_port=_env_int("VAULTWARDEN_POD_PORT", 80),
@ -202,6 +232,12 @@ class Settings:
nextcloud_sync_cron=_env("ARIADNE_SCHEDULE_NEXTCLOUD_SYNC", "0 5 * * *"),
vaultwarden_sync_cron=_env("ARIADNE_SCHEDULE_VAULTWARDEN_SYNC", "*/15 * * * *"),
wger_admin_cron=_env("ARIADNE_SCHEDULE_WGER_ADMIN", "15 3 * * *"),
vault_k8s_auth_cron=_env("ARIADNE_SCHEDULE_VAULT_K8S_AUTH", "*/15 * * * *"),
vault_oidc_cron=_env("ARIADNE_SCHEDULE_VAULT_OIDC", "*/15 * * * *"),
comms_guest_name_cron=_env("ARIADNE_SCHEDULE_COMMS_GUEST_NAME", "*/1 * * * *"),
comms_pin_invite_cron=_env("ARIADNE_SCHEDULE_COMMS_PIN_INVITE", "*/30 * * * *"),
comms_reset_room_cron=_env("ARIADNE_SCHEDULE_COMMS_RESET_ROOM", "0 0 1 1 *"),
comms_seed_room_cron=_env("ARIADNE_SCHEDULE_COMMS_SEED_ROOM", "*/10 * * * *"),
metrics_path=_env("METRICS_PATH", "/metrics"),
)

88
ariadne/utils/logging.py Normal file
View File

@ -0,0 +1,88 @@
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime, timezone
import json
import logging
from typing import Any
_STANDARD_ATTRS = {
"args",
"asctime",
"created",
"exc_info",
"exc_text",
"filename",
"funcName",
"levelname",
"levelno",
"lineno",
"module",
"msecs",
"message",
"msg",
"name",
"pathname",
"process",
"processName",
"relativeCreated",
"stack_info",
"thread",
"threadName",
}
@dataclass(frozen=True)
class LogConfig:
level: str = "INFO"
class JsonFormatter(logging.Formatter):
def format(self, record: logging.LogRecord) -> str:
payload: dict[str, Any] = {
"timestamp": datetime.fromtimestamp(record.created, tz=timezone.utc).isoformat(),
"level": record.levelname.lower(),
"logger": record.name,
"message": record.getMessage(),
}
if record.exc_info:
payload["exc_info"] = self.formatException(record.exc_info)
if record.stack_info:
payload["stack_info"] = record.stack_info
for key, value in record.__dict__.items():
if key in _STANDARD_ATTRS or key in payload:
continue
payload[key] = value
return json.dumps(payload, ensure_ascii=True)
_LOGGING_CONFIGURED = False
def configure_logging(config: LogConfig | None = None) -> None:
global _LOGGING_CONFIGURED
if _LOGGING_CONFIGURED:
return
cfg = config or LogConfig()
level = logging.getLevelName(cfg.level.upper())
if isinstance(level, str):
level = logging.INFO
handler = logging.StreamHandler()
handler.setFormatter(JsonFormatter())
root = logging.getLogger()
root.setLevel(level)
root.handlers.clear()
root.addHandler(handler)
_LOGGING_CONFIGURED = True
def get_logger(name: str) -> logging.Logger:
return logging.getLogger(name)

27
tests/test_logging.py Normal file
View File

@ -0,0 +1,27 @@
from __future__ import annotations
import json
import logging
from ariadne.utils.logging import JsonFormatter
def test_json_formatter_includes_extra_fields() -> None:
record = logging.LogRecord(
name="ariadne.test",
level=logging.INFO,
pathname=__file__,
lineno=10,
msg="hello",
args=(),
exc_info=None,
)
record.event = "unit_test"
record.request_code = "REQ123"
formatter = JsonFormatter()
payload = json.loads(formatter.format(record))
assert payload["message"] == "hello"
assert payload["event"] == "unit_test"
assert payload["request_code"] == "REQ123"

View File

@ -4,9 +4,11 @@ import types
import pytest
from ariadne.services.comms import CommsService
from ariadne.services.firefly import FireflyService
from ariadne.services.mailu import MailuService
from ariadne.services.nextcloud import NextcloudService
from ariadne.services.vault import VaultService
from ariadne.services.wger import WgerService
from ariadne.services.vaultwarden import VaultwardenService
@ -129,6 +131,48 @@ def test_firefly_sync_user_env(monkeypatch) -> None:
assert env_map["FIREFLY_USER_EMAIL"] == "alice@bstein.dev"
def test_vault_sync_jobs(monkeypatch) -> None:
dummy = types.SimpleNamespace(
vault_namespace="vault",
vault_k8s_auth_cronjob="vault-k8s-auth-config",
vault_oidc_cronjob="vault-oidc-config",
vault_job_wait_timeout_sec=120.0,
)
monkeypatch.setattr("ariadne.services.vault.settings", dummy)
monkeypatch.setattr("ariadne.services.vault.JobSpawner", lambda ns, cj: DummySpawner(ns, cj))
svc = VaultService()
result = svc.sync_k8s_auth(wait=True)
assert result["status"] == "ok"
spawner = svc._k8s_auth_spawner
label, _, timeout, _ = spawner.calls[0]
assert label == "k8s-auth"
assert timeout == 120.0
def test_comms_jobs(monkeypatch) -> None:
dummy = types.SimpleNamespace(
comms_namespace="comms",
comms_guest_name_cronjob="guest-name-randomizer",
comms_pin_invite_cronjob="pin-othrys-invite",
comms_reset_room_cronjob="othrys-room-reset",
comms_seed_room_cronjob="seed-othrys-room",
comms_job_wait_timeout_sec=60.0,
)
monkeypatch.setattr("ariadne.services.comms.settings", dummy)
monkeypatch.setattr("ariadne.services.comms.JobSpawner", lambda ns, cj: DummySpawner(ns, cj))
svc = CommsService()
result = svc.run_guest_name_randomizer(wait=True)
assert result["status"] == "ok"
spawner = svc._guest_name_spawner
label, _, timeout, _ = spawner.calls[0]
assert label == "guest-name"
assert timeout == 60.0
def test_mailu_sync_includes_force(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
mailu_sync_url="http://mailu",