feat: harden ariadne scheduling and audit

This commit is contained in:
Brad Stein 2026-01-20 18:11:02 -03:00
parent 681e9aa358
commit 1c6e16e8c9
49 changed files with 8918 additions and 275 deletions

27
Jenkinsfile vendored
View File

@ -72,6 +72,12 @@ spec:
IMAGE = "${REGISTRY}/ariadne"
VERSION_TAG = 'dev'
SEMVER = 'dev'
COVERAGE_MIN = '99'
COVERAGE_JSON = 'build/coverage.json'
JUNIT_XML = 'build/junit.xml'
METRICS_PREFIX = 'ariadne_ci'
VM_IMPORT_URL = 'http://victoria-metrics-single-server.monitoring.svc.cluster.local:8428/api/v1/import/prometheus'
REPO_NAME = 'ariadne'
}
options {
disableConcurrentBuilds()
@ -92,7 +98,24 @@ spec:
sh '''
set -euo pipefail
python -m pip install --no-cache-dir -r requirements.txt -r requirements-dev.txt
pytest -q
mkdir -p build
python -m slipcover \
--json \
--out "${COVERAGE_JSON}" \
--source ariadne \
--fail-under "${COVERAGE_MIN}" \
-m pytest -q --junitxml "${JUNIT_XML}"
'''
}
}
}
stage('Publish test metrics') {
steps {
container('tester') {
sh '''
set -euo pipefail
python scripts/publish_test_metrics.py
'''
}
}
@ -122,7 +145,7 @@ spec:
else
SEMVER="0.1.0-${BUILD_NUMBER}"
fi
if ! echo "$SEMVER" | grep -Eq '^v?[0-9]+\.[0-9]+\.[0-9]+([-.][0-9A-Za-z]+)?$'; then
if ! echo "$SEMVER" | grep -Eq '^v?[0-9]+[.][0-9]+[.][0-9]+([-.][0-9A-Za-z]+)?$'; then
SEMVER="0.1.0-${BUILD_NUMBER}"
fi
echo "SEMVER=${SEMVER}" > build.env

View File

@ -1,6 +1,7 @@
from __future__ import annotations
from datetime import datetime, timezone
import json
import threading
from typing import Any
@ -12,10 +13,12 @@ from .auth.keycloak import AuthContext, authenticator
from .db.database import Database
from .db.storage import Storage
from .manager.provisioning import ProvisioningManager
from .metrics.metrics import record_task_run
from .scheduler.cron import CronScheduler
from .services.comms import comms
from .services.firefly import firefly
from .services.keycloak_admin import keycloak_admin
from .services.keycloak_profile import run_profile_sync
from .services.mailu import mailu
from .services.nextcloud import nextcloud
from .services.vaultwarden_sync import run_vaultwarden_sync
@ -24,7 +27,7 @@ from .services.wger import wger
from .settings import settings
from .utils.errors import safe_error_detail
from .utils.http import extract_bearer_token
from .utils.logging import LogConfig, configure_logging, get_logger
from .utils.logging import LogConfig, configure_logging, get_logger, task_context
from .utils.passwords import random_password
@ -37,6 +40,22 @@ provisioning = ProvisioningManager(db, storage)
scheduler = CronScheduler(storage, settings.schedule_tick_sec)
def _record_event(event_type: str, detail: dict[str, Any] | str | None) -> None:
try:
storage.record_event(event_type, detail)
except Exception:
pass
def _parse_event_detail(detail: str | None) -> Any:
if not isinstance(detail, str) or not detail:
return ""
try:
return json.loads(detail)
except Exception:
return detail
app = FastAPI(title=settings.app_name)
@ -78,6 +97,11 @@ def _startup() -> None:
lambda: nextcloud.sync_mail(wait=False),
)
scheduler.add_task("schedule.vaultwarden_sync", settings.vaultwarden_sync_cron, run_vaultwarden_sync)
scheduler.add_task(
"schedule.keycloak_profile",
settings.keycloak_profile_cron,
run_profile_sync,
)
scheduler.add_task("schedule.wger_admin", settings.wger_admin_cron, lambda: wger.ensure_admin(wait=False))
scheduler.add_task(
"schedule.vault_k8s_auth",
@ -124,6 +148,7 @@ def _startup() -> None:
"comms_pin_invite_cron": settings.comms_pin_invite_cron,
"comms_reset_room_cron": settings.comms_reset_room_cron,
"comms_seed_room_cron": settings.comms_seed_room_cron,
"keycloak_profile_cron": settings.keycloak_profile_cron,
},
)
@ -175,6 +200,76 @@ def list_access_requests(ctx: AuthContext = Depends(_require_auth)) -> JSONRespo
return JSONResponse({"requests": output})
@app.get("/api/admin/access/flags")
def list_access_flags(ctx: AuthContext = Depends(_require_auth)) -> JSONResponse:
_require_admin(ctx)
flags = settings.allowed_flag_groups
if keycloak_admin.ready():
try:
flags = keycloak_admin.list_group_names(exclude={"admin"})
except Exception:
flags = settings.allowed_flag_groups
return JSONResponse({"flags": flags})
@app.get("/api/admin/audit/events")
def list_audit_events(
limit: int = 200,
event_type: str | None = None,
ctx: AuthContext = Depends(_require_auth),
) -> JSONResponse:
_require_admin(ctx)
try:
rows = storage.list_events(limit=limit, event_type=event_type)
except Exception:
raise HTTPException(status_code=502, detail="failed to load audit events")
output: list[dict[str, Any]] = []
for row in rows:
created_at = row.get("created_at")
output.append(
{
"id": row.get("id"),
"event_type": row.get("event_type"),
"detail": _parse_event_detail(row.get("detail")),
"created_at": created_at.isoformat() if isinstance(created_at, datetime) else "",
}
)
return JSONResponse({"events": output})
@app.get("/api/admin/audit/task-runs")
def list_audit_task_runs(
limit: int = 200,
request_code: str | None = None,
task: str | None = None,
ctx: AuthContext = Depends(_require_auth),
) -> JSONResponse:
_require_admin(ctx)
try:
rows = storage.list_task_runs(limit=limit, request_code=request_code, task=task)
except Exception:
raise HTTPException(status_code=502, detail="failed to load task runs")
output: list[dict[str, Any]] = []
for row in rows:
started_at = row.get("started_at")
finished_at = row.get("finished_at")
output.append(
{
"id": row.get("id"),
"request_code": row.get("request_code") or "",
"task": row.get("task") or "",
"status": row.get("status") or "",
"detail": _parse_event_detail(row.get("detail")),
"started_at": started_at.isoformat() if isinstance(started_at, datetime) else "",
"finished_at": finished_at.isoformat() if isinstance(finished_at, datetime) else "",
"duration_ms": row.get("duration_ms"),
}
)
return JSONResponse({"task_runs": output})
@app.post("/api/admin/access/requests/{username}/approve")
async def approve_access_request(
username: str,
@ -182,61 +277,87 @@ async def approve_access_request(
ctx: AuthContext = Depends(_require_auth),
) -> JSONResponse:
_require_admin(ctx)
try:
payload = await request.json()
except Exception:
payload = {}
with task_context("admin.access.approve"):
try:
payload = await request.json()
except Exception:
payload = {}
flags_raw = payload.get("flags") if isinstance(payload, dict) else None
flags = [f for f in flags_raw if isinstance(f, str)] if isinstance(flags_raw, list) else []
flags = [f for f in flags if f in settings.allowed_flag_groups]
note = payload.get("note") if isinstance(payload, dict) else None
note = str(note).strip() if isinstance(note, str) else None
flags_raw = payload.get("flags") if isinstance(payload, dict) else None
flags = [f for f in flags_raw if isinstance(f, str)] if isinstance(flags_raw, list) else []
allowed_flags = settings.allowed_flag_groups
if keycloak_admin.ready():
try:
allowed_flags = keycloak_admin.list_group_names(exclude={"admin"})
except Exception:
allowed_flags = settings.allowed_flag_groups
flags = [f for f in flags if f in allowed_flags]
note = payload.get("note") if isinstance(payload, dict) else None
note = str(note).strip() if isinstance(note, str) else None
decided_by = ctx.username or ""
try:
row = db.fetchone(
"""
UPDATE access_requests
SET status = 'approved',
decided_at = NOW(),
decided_by = %s,
approval_flags = %s,
approval_note = %s
WHERE username = %s
AND status = 'pending'
AND email_verified_at IS NOT NULL
RETURNING request_code
""",
(decided_by or None, flags or None, note, username),
)
except Exception:
raise HTTPException(status_code=502, detail="failed to approve request")
decided_by = ctx.username or ""
try:
row = db.fetchone(
"""
UPDATE access_requests
SET status = 'approved',
decided_at = NOW(),
decided_by = %s,
approval_flags = %s,
approval_note = %s
WHERE username = %s
AND status = 'pending'
AND email_verified_at IS NOT NULL
RETURNING request_code
""",
(decided_by or None, flags or None, note, username),
)
except Exception:
raise HTTPException(status_code=502, detail="failed to approve request")
if not row:
if not row:
logger.info(
"access request approval ignored",
extra={"event": "access_request_approve", "actor": decided_by, "username": username, "status": "skipped"},
)
_record_event(
"access_request_approve",
{
"actor": decided_by,
"username": username,
"status": "skipped",
},
)
return JSONResponse({"ok": True, "request_code": ""})
request_code = row.get("request_code") or ""
if request_code:
threading.Thread(
target=provisioning.provision_access_request,
args=(request_code,),
daemon=True,
).start()
logger.info(
"access request approval ignored",
extra={"event": "access_request_approve", "actor": decided_by, "username": username, "status": "skipped"},
"access request approved",
extra={
"event": "access_request_approve",
"actor": decided_by,
"username": username,
"request_code": request_code,
},
)
return JSONResponse({"ok": True, "request_code": ""})
request_code = row.get("request_code") or ""
if request_code:
threading.Thread(
target=provisioning.provision_access_request,
args=(request_code,),
daemon=True,
).start()
logger.info(
"access request approved",
extra={
"event": "access_request_approve",
"actor": decided_by,
"username": username,
"request_code": request_code,
},
)
return JSONResponse({"ok": True, "request_code": request_code})
_record_event(
"access_request_approve",
{
"actor": decided_by,
"username": username,
"request_code": request_code,
"status": "ok",
"flags": flags,
"note": note or "",
},
)
return JSONResponse({"ok": True, "request_code": request_code})
@app.post("/api/admin/access/requests/{username}/deny")
@ -246,46 +367,65 @@ async def deny_access_request(
ctx: AuthContext = Depends(_require_auth),
) -> JSONResponse:
_require_admin(ctx)
try:
payload = await request.json()
except Exception:
payload = {}
note = payload.get("note") if isinstance(payload, dict) else None
note = str(note).strip() if isinstance(note, str) else None
decided_by = ctx.username or ""
with task_context("admin.access.deny"):
try:
payload = await request.json()
except Exception:
payload = {}
note = payload.get("note") if isinstance(payload, dict) else None
note = str(note).strip() if isinstance(note, str) else None
decided_by = ctx.username or ""
try:
row = db.fetchone(
"""
UPDATE access_requests
SET status = 'denied',
decided_at = NOW(),
decided_by = %s,
denial_note = %s
WHERE username = %s AND status = 'pending'
RETURNING request_code
""",
(decided_by or None, note, username),
)
except Exception:
raise HTTPException(status_code=502, detail="failed to deny request")
try:
row = db.fetchone(
"""
UPDATE access_requests
SET status = 'denied',
decided_at = NOW(),
decided_by = %s,
denial_note = %s
WHERE username = %s AND status = 'pending'
RETURNING request_code
""",
(decided_by or None, note, username),
)
except Exception:
raise HTTPException(status_code=502, detail="failed to deny request")
if not row:
if not row:
logger.info(
"access request denial ignored",
extra={"event": "access_request_deny", "actor": decided_by, "username": username, "status": "skipped"},
)
_record_event(
"access_request_deny",
{
"actor": decided_by,
"username": username,
"status": "skipped",
},
)
return JSONResponse({"ok": True, "request_code": ""})
logger.info(
"access request denial ignored",
extra={"event": "access_request_deny", "actor": decided_by, "username": username, "status": "skipped"},
"access request denied",
extra={
"event": "access_request_deny",
"actor": decided_by,
"username": username,
"request_code": row.get("request_code") or "",
},
)
return JSONResponse({"ok": True, "request_code": ""})
logger.info(
"access request denied",
extra={
"event": "access_request_deny",
"actor": decided_by,
"username": username,
"request_code": row.get("request_code") or "",
},
)
return JSONResponse({"ok": True, "request_code": row.get("request_code")})
_record_event(
"access_request_deny",
{
"actor": decided_by,
"username": username,
"request_code": row.get("request_code") or "",
"status": "ok",
"note": note or "",
},
)
return JSONResponse({"ok": True, "request_code": row.get("request_code")})
@app.post("/api/account/mailu/rotate")
@ -297,52 +437,89 @@ def rotate_mailu_password(ctx: AuthContext = Depends(_require_auth)) -> JSONResp
username = ctx.username or ""
if not username:
raise HTTPException(status_code=400, detail="missing username")
with task_context("account.mailu_rotate"):
started = datetime.now(timezone.utc)
status = "ok"
error_detail = ""
sync_enabled = bool(settings.mailu_sync_url)
sync_ok = False
sync_error = ""
nextcloud_sync: dict[str, Any] = {"status": "skipped"}
logger.info(
"mailu password rotate requested",
extra={"event": "mailu_rotate", "username": username},
)
password = random_password()
try:
keycloak_admin.set_user_attribute(username, "mailu_app_password", password)
except Exception:
raise HTTPException(status_code=502, detail="failed to update mail password")
sync_enabled = bool(settings.mailu_sync_url)
sync_ok = False
sync_error = ""
if sync_enabled:
logger.info(
"mailu password rotate requested",
extra={"event": "mailu_rotate", "username": username},
)
try:
mailu.sync("ariadne_mailu_rotate")
sync_ok = True
password = random_password()
keycloak_admin.set_user_attribute(username, "mailu_app_password", password)
if sync_enabled:
try:
mailu.sync("ariadne_mailu_rotate")
sync_ok = True
except Exception as exc:
sync_error = safe_error_detail(exc, "sync request failed")
try:
nextcloud_sync = nextcloud.sync_mail(username, wait=True)
except Exception as exc:
nextcloud_sync = {"status": "error", "detail": safe_error_detail(exc, "failed to sync nextcloud")}
logger.info(
"mailu password rotate completed",
extra={
"event": "mailu_rotate",
"username": username,
"sync_enabled": sync_enabled,
"sync_ok": sync_ok,
"nextcloud_status": nextcloud_sync.get("status") if isinstance(nextcloud_sync, dict) else "",
},
)
return JSONResponse(
{
"password": password,
"sync_enabled": sync_enabled,
"sync_ok": sync_ok,
"sync_error": sync_error,
"nextcloud_sync": nextcloud_sync,
}
)
except HTTPException as exc:
status = "error"
error_detail = str(exc.detail)
raise
except Exception as exc:
sync_error = safe_error_detail(exc, "sync request failed")
nextcloud_sync: dict[str, Any] = {"status": "skipped"}
try:
nextcloud_sync = nextcloud.sync_mail(username, wait=True)
except Exception as exc:
nextcloud_sync = {"status": "error", "detail": safe_error_detail(exc, "failed to sync nextcloud")}
logger.info(
"mailu password rotate completed",
extra={
"event": "mailu_rotate",
"username": username,
"sync_enabled": sync_enabled,
"sync_ok": sync_ok,
"nextcloud_status": nextcloud_sync.get("status") if isinstance(nextcloud_sync, dict) else "",
},
)
return JSONResponse(
{
"password": password,
"sync_enabled": sync_enabled,
"sync_ok": sync_ok,
"sync_error": sync_error,
"nextcloud_sync": nextcloud_sync,
}
)
status = "error"
error_detail = safe_error_detail(exc, "mailu rotate failed")
raise HTTPException(status_code=502, detail=error_detail)
finally:
finished = datetime.now(timezone.utc)
duration_sec = (finished - started).total_seconds()
record_task_run("mailu_rotate", status, duration_sec)
try:
storage.record_task_run(
None,
"mailu_rotate",
status,
error_detail or None,
started,
finished,
int(duration_sec * 1000),
)
except Exception:
pass
_record_event(
"mailu_rotate",
{
"username": username,
"status": status,
"sync_enabled": sync_enabled,
"sync_ok": sync_ok,
"nextcloud_status": nextcloud_sync.get("status") if isinstance(nextcloud_sync, dict) else "",
"error": error_detail,
},
)
@app.post("/api/account/wger/reset")
@ -355,41 +532,72 @@ def reset_wger_password(ctx: AuthContext = Depends(_require_auth)) -> JSONRespon
if not username:
raise HTTPException(status_code=400, detail="missing username")
mailu_email = f"{username}@{settings.mailu_domain}"
try:
user = keycloak_admin.find_user(username) or {}
attrs = user.get("attributes") if isinstance(user, dict) else None
if isinstance(attrs, dict):
raw_mailu = attrs.get("mailu_email")
if isinstance(raw_mailu, list) and raw_mailu:
mailu_email = str(raw_mailu[0])
elif isinstance(raw_mailu, str) and raw_mailu:
mailu_email = raw_mailu
except Exception:
pass
with task_context("account.wger_reset"):
mailu_email = f"{username}@{settings.mailu_domain}"
try:
user = keycloak_admin.find_user(username) or {}
attrs = user.get("attributes") if isinstance(user, dict) else None
if isinstance(attrs, dict):
raw_mailu = attrs.get("mailu_email")
if isinstance(raw_mailu, list) and raw_mailu:
mailu_email = str(raw_mailu[0])
elif isinstance(raw_mailu, str) and raw_mailu:
mailu_email = raw_mailu
except Exception:
pass
logger.info("wger password reset requested", extra={"event": "wger_reset", "username": username})
password = random_password()
try:
result = wger.sync_user(username, mailu_email, password, wait=True)
status_val = result.get("status") if isinstance(result, dict) else "error"
if status_val != "ok":
raise RuntimeError(f"wger sync {status_val}")
except Exception as exc:
raise HTTPException(status_code=502, detail=safe_error_detail(exc, "wger sync failed"))
started = datetime.now(timezone.utc)
status = "ok"
error_detail = ""
logger.info("wger password reset requested", extra={"event": "wger_reset", "username": username})
try:
password = random_password()
result = wger.sync_user(username, mailu_email, password, wait=True)
status_val = result.get("status") if isinstance(result, dict) else "error"
if status_val != "ok":
raise RuntimeError(f"wger sync {status_val}")
try:
keycloak_admin.set_user_attribute(username, "wger_password", password)
keycloak_admin.set_user_attribute(
username,
"wger_password_updated_at",
datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
)
except Exception:
raise HTTPException(status_code=502, detail="failed to store wger password")
keycloak_admin.set_user_attribute(username, "wger_password", password)
keycloak_admin.set_user_attribute(
username,
"wger_password_updated_at",
datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
)
logger.info("wger password reset completed", extra={"event": "wger_reset", "username": username})
return JSONResponse({"status": "ok", "password": password})
logger.info("wger password reset completed", extra={"event": "wger_reset", "username": username})
return JSONResponse({"status": "ok", "password": password})
except HTTPException as exc:
status = "error"
error_detail = str(exc.detail)
raise
except Exception as exc:
status = "error"
error_detail = safe_error_detail(exc, "wger sync failed")
raise HTTPException(status_code=502, detail=error_detail)
finally:
finished = datetime.now(timezone.utc)
duration_sec = (finished - started).total_seconds()
record_task_run("wger_reset", status, duration_sec)
try:
storage.record_task_run(
None,
"wger_reset",
status,
error_detail or None,
started,
finished,
int(duration_sec * 1000),
)
except Exception:
pass
_record_event(
"wger_reset",
{
"username": username,
"status": status,
"error": error_detail,
},
)
@app.post("/api/account/firefly/reset")
@ -402,41 +610,72 @@ def reset_firefly_password(ctx: AuthContext = Depends(_require_auth)) -> JSONRes
if not username:
raise HTTPException(status_code=400, detail="missing username")
mailu_email = f"{username}@{settings.mailu_domain}"
try:
user = keycloak_admin.find_user(username) or {}
attrs = user.get("attributes") if isinstance(user, dict) else None
if isinstance(attrs, dict):
raw_mailu = attrs.get("mailu_email")
if isinstance(raw_mailu, list) and raw_mailu:
mailu_email = str(raw_mailu[0])
elif isinstance(raw_mailu, str) and raw_mailu:
mailu_email = raw_mailu
except Exception:
pass
with task_context("account.firefly_reset"):
mailu_email = f"{username}@{settings.mailu_domain}"
try:
user = keycloak_admin.find_user(username) or {}
attrs = user.get("attributes") if isinstance(user, dict) else None
if isinstance(attrs, dict):
raw_mailu = attrs.get("mailu_email")
if isinstance(raw_mailu, list) and raw_mailu:
mailu_email = str(raw_mailu[0])
elif isinstance(raw_mailu, str) and raw_mailu:
mailu_email = raw_mailu
except Exception:
pass
logger.info("firefly password reset requested", extra={"event": "firefly_reset", "username": username})
password = random_password(24)
try:
result = firefly.sync_user(mailu_email, password, wait=True)
status_val = result.get("status") if isinstance(result, dict) else "error"
if status_val != "ok":
raise RuntimeError(f"firefly sync {status_val}")
except Exception as exc:
raise HTTPException(status_code=502, detail=safe_error_detail(exc, "firefly sync failed"))
started = datetime.now(timezone.utc)
status = "ok"
error_detail = ""
logger.info("firefly password reset requested", extra={"event": "firefly_reset", "username": username})
try:
password = random_password(24)
result = firefly.sync_user(mailu_email, password, wait=True)
status_val = result.get("status") if isinstance(result, dict) else "error"
if status_val != "ok":
raise RuntimeError(f"firefly sync {status_val}")
try:
keycloak_admin.set_user_attribute(username, "firefly_password", password)
keycloak_admin.set_user_attribute(
username,
"firefly_password_updated_at",
datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
)
except Exception:
raise HTTPException(status_code=502, detail="failed to store firefly password")
keycloak_admin.set_user_attribute(username, "firefly_password", password)
keycloak_admin.set_user_attribute(
username,
"firefly_password_updated_at",
datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
)
logger.info("firefly password reset completed", extra={"event": "firefly_reset", "username": username})
return JSONResponse({"status": "ok", "password": password})
logger.info("firefly password reset completed", extra={"event": "firefly_reset", "username": username})
return JSONResponse({"status": "ok", "password": password})
except HTTPException as exc:
status = "error"
error_detail = str(exc.detail)
raise
except Exception as exc:
status = "error"
error_detail = safe_error_detail(exc, "firefly sync failed")
raise HTTPException(status_code=502, detail=error_detail)
finally:
finished = datetime.now(timezone.utc)
duration_sec = (finished - started).total_seconds()
record_task_run("firefly_reset", status, duration_sec)
try:
storage.record_task_run(
None,
"firefly_reset",
status,
error_detail or None,
started,
finished,
int(duration_sec * 1000),
)
except Exception:
pass
_record_event(
"firefly_reset",
{
"username": username,
"status": status,
"error": error_detail,
},
)
@app.post("/api/account/nextcloud/mail/sync")
@ -449,30 +688,65 @@ async def nextcloud_mail_sync(request: Request, ctx: AuthContext = Depends(_requ
if not username:
raise HTTPException(status_code=400, detail="missing username")
try:
payload = await request.json()
except Exception:
payload = {}
wait = bool(payload.get("wait", True)) if isinstance(payload, dict) else True
with task_context("account.nextcloud_sync"):
try:
payload = await request.json()
except Exception:
payload = {}
wait = bool(payload.get("wait", True)) if isinstance(payload, dict) else True
logger.info(
"nextcloud mail sync requested",
extra={"event": "nextcloud_sync", "username": username, "wait": wait},
)
try:
result = nextcloud.sync_mail(username, wait=wait)
started = datetime.now(timezone.utc)
status = "ok"
error_detail = ""
logger.info(
"nextcloud mail sync completed",
extra={
"event": "nextcloud_sync",
"username": username,
"status": result.get("status") if isinstance(result, dict) else "",
},
"nextcloud mail sync requested",
extra={"event": "nextcloud_sync", "username": username, "wait": wait},
)
return JSONResponse(result)
except Exception as exc:
logger.info(
"nextcloud mail sync failed",
extra={"event": "nextcloud_sync", "username": username, "error": safe_error_detail(exc, "failed")},
)
raise HTTPException(status_code=502, detail=safe_error_detail(exc, "failed to sync nextcloud mail"))
try:
result = nextcloud.sync_mail(username, wait=wait)
logger.info(
"nextcloud mail sync completed",
extra={
"event": "nextcloud_sync",
"username": username,
"status": result.get("status") if isinstance(result, dict) else "",
},
)
return JSONResponse(result)
except HTTPException as exc:
status = "error"
error_detail = str(exc.detail)
raise
except Exception as exc:
status = "error"
error_detail = safe_error_detail(exc, "failed to sync nextcloud mail")
logger.info(
"nextcloud mail sync failed",
extra={"event": "nextcloud_sync", "username": username, "error": error_detail},
)
raise HTTPException(status_code=502, detail=error_detail)
finally:
finished = datetime.now(timezone.utc)
duration_sec = (finished - started).total_seconds()
record_task_run("nextcloud_sync", status, duration_sec)
try:
storage.record_task_run(
None,
"nextcloud_sync",
status,
error_detail or None,
started,
finished,
int(duration_sec * 1000),
)
except Exception:
pass
_record_event(
"nextcloud_sync",
{
"username": username,
"status": status,
"wait": wait,
"error": error_detail,
},
)

View File

@ -38,6 +38,10 @@ ARIADNE_TABLES_SQL = [
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
)
""",
"""
CREATE INDEX IF NOT EXISTS ariadne_events_type_idx
ON ariadne_events (event_type, created_at)
""",
]
ARIADNE_ACCESS_REQUEST_ALTER = [

View File

@ -2,6 +2,7 @@ from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime
import json
from typing import Any, Iterable
from .database import Database
@ -246,10 +247,86 @@ class Storage:
),
)
def record_event(self, event_type: str, detail: str | None) -> None:
def record_event(self, event_type: str, detail: dict[str, Any] | str | None) -> None:
payload = detail
if isinstance(detail, dict):
payload = json.dumps(detail, ensure_ascii=True)
self._db.execute(
"INSERT INTO ariadne_events (event_type, detail) VALUES (%s, %s)",
(event_type, detail),
(event_type, payload),
)
def list_events(self, limit: int = 200, event_type: str | None = None) -> list[dict[str, Any]]:
limit = max(1, min(int(limit or 200), 500))
if event_type:
return self._db.fetchall(
"""
SELECT id, event_type, detail, created_at
FROM ariadne_events
WHERE event_type = %s
ORDER BY created_at DESC
LIMIT %s
""",
(event_type, limit),
)
return self._db.fetchall(
"""
SELECT id, event_type, detail, created_at
FROM ariadne_events
ORDER BY created_at DESC
LIMIT %s
""",
(limit,),
)
def list_task_runs(
self,
limit: int = 200,
request_code: str | None = None,
task: str | None = None,
) -> list[dict[str, Any]]:
limit = max(1, min(int(limit or 200), 500))
if request_code and task:
return self._db.fetchall(
"""
SELECT id, request_code, task, status, detail, started_at, finished_at, duration_ms
FROM ariadne_task_runs
WHERE request_code = %s AND task = %s
ORDER BY started_at DESC
LIMIT %s
""",
(request_code, task, limit),
)
if request_code:
return self._db.fetchall(
"""
SELECT id, request_code, task, status, detail, started_at, finished_at, duration_ms
FROM ariadne_task_runs
WHERE request_code = %s
ORDER BY started_at DESC
LIMIT %s
""",
(request_code, limit),
)
if task:
return self._db.fetchall(
"""
SELECT id, request_code, task, status, detail, started_at, finished_at, duration_ms
FROM ariadne_task_runs
WHERE task = %s
ORDER BY started_at DESC
LIMIT %s
""",
(task, limit),
)
return self._db.fetchall(
"""
SELECT id, request_code, task, status, detail, started_at, finished_at, duration_ms
FROM ariadne_task_runs
ORDER BY started_at DESC
LIMIT %s
""",
(limit,),
)
@staticmethod

View File

@ -0,0 +1 @@
"""Embedded job manifests for Ariadne-managed tasks."""

View File

@ -0,0 +1,471 @@
# services/comms/guest-name-job.yaml
apiVersion: batch/v1
kind: CronJob
metadata:
name: guest-name-randomizer
namespace: comms
labels:
atlas.bstein.dev/glue: "true"
spec:
schedule: "*/1 * * * *"
suspend: true
concurrencyPolicy: Forbid
successfulJobsHistoryLimit: 1
failedJobsHistoryLimit: 1
jobTemplate:
spec:
backoffLimit: 0
template:
metadata:
annotations:
vault.hashicorp.com/agent-inject: "true"
vault.hashicorp.com/agent-pre-populate-only: "true"
vault.hashicorp.com/role: "comms"
vault.hashicorp.com/agent-inject-secret-turn-secret: "kv/data/atlas/comms/turn-shared-secret"
vault.hashicorp.com/agent-inject-template-turn-secret: |
{{- with secret "kv/data/atlas/comms/turn-shared-secret" -}}{{ .Data.data.TURN_STATIC_AUTH_SECRET }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-livekit-primary: "kv/data/atlas/comms/livekit-api"
vault.hashicorp.com/agent-inject-template-livekit-primary: |
{{- with secret "kv/data/atlas/comms/livekit-api" -}}{{ .Data.data.primary }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-bot-pass: "kv/data/atlas/comms/atlasbot-credentials-runtime"
vault.hashicorp.com/agent-inject-template-bot-pass: |
{{- with secret "kv/data/atlas/comms/atlasbot-credentials-runtime" -}}{{ index .Data.data "bot-password" }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-seeder-pass: "kv/data/atlas/comms/atlasbot-credentials-runtime"
vault.hashicorp.com/agent-inject-template-seeder-pass: |
{{- with secret "kv/data/atlas/comms/atlasbot-credentials-runtime" -}}{{ index .Data.data "seeder-password" }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-chat-matrix: "kv/data/atlas/shared/chat-ai-keys-runtime"
vault.hashicorp.com/agent-inject-template-chat-matrix: |
{{- with secret "kv/data/atlas/shared/chat-ai-keys-runtime" -}}{{ .Data.data.matrix }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-chat-homepage: "kv/data/atlas/shared/chat-ai-keys-runtime"
vault.hashicorp.com/agent-inject-template-chat-homepage: |
{{- with secret "kv/data/atlas/shared/chat-ai-keys-runtime" -}}{{ .Data.data.homepage }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-mas-admin-secret: "kv/data/atlas/comms/mas-admin-client-runtime"
vault.hashicorp.com/agent-inject-template-mas-admin-secret: |
{{- with secret "kv/data/atlas/comms/mas-admin-client-runtime" -}}{{ .Data.data.client_secret }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-synapse-db-pass: "kv/data/atlas/comms/synapse-db"
vault.hashicorp.com/agent-inject-template-synapse-db-pass: |
{{- with secret "kv/data/atlas/comms/synapse-db" -}}{{ .Data.data.POSTGRES_PASSWORD }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-mas-db-pass: "kv/data/atlas/comms/mas-db"
vault.hashicorp.com/agent-inject-template-mas-db-pass: |
{{- with secret "kv/data/atlas/comms/mas-db" -}}{{ .Data.data.password }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-mas-matrix-shared: "kv/data/atlas/comms/mas-secrets-runtime"
vault.hashicorp.com/agent-inject-template-mas-matrix-shared: |
{{- with secret "kv/data/atlas/comms/mas-secrets-runtime" -}}{{ .Data.data.matrix_shared_secret }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-mas-kc-secret: "kv/data/atlas/comms/mas-secrets-runtime"
vault.hashicorp.com/agent-inject-template-mas-kc-secret: |
{{- with secret "kv/data/atlas/comms/mas-secrets-runtime" -}}{{ .Data.data.keycloak_client_secret }}{{- end -}}
spec:
restartPolicy: Never
serviceAccountName: comms-vault
nodeSelector:
hardware: rpi5
volumes:
- name: vault-scripts
configMap:
name: comms-vault-env
defaultMode: 0555
containers:
- name: rename
image: registry.bstein.dev/bstein/comms-guest-tools:0.1.0
volumeMounts:
- name: vault-scripts
mountPath: /vault/scripts
readOnly: true
env:
- name: SYNAPSE_BASE
value: http://othrys-synapse-matrix-synapse:8008
- name: MAS_ADMIN_CLIENT_ID
value: 01KDXMVQBQ5JNY6SEJPZW6Z8BM
- name: MAS_ADMIN_CLIENT_SECRET_FILE
value: /vault/secrets/mas-admin-secret
- name: MAS_ADMIN_API_BASE
value: http://matrix-authentication-service:8081/api/admin/v1
- name: MAS_TOKEN_URL
value: http://matrix-authentication-service:8080/oauth2/token
- name: SEEDER_USER
value: othrys-seeder
- name: PGHOST
value: postgres-service.postgres.svc.cluster.local
- name: PGPORT
value: "5432"
- name: PGDATABASE
value: synapse
- name: PGUSER
value: synapse
command:
- /bin/sh
- -c
- |
set -euo pipefail
. /vault/scripts/comms_vault_env.sh
python - <<'PY'
import base64
import os
import random
import requests
import time
import urllib.parse
import psycopg2
ADJ = [
"brisk","calm","eager","gentle","merry","nifty","rapid","sunny","witty","zesty",
"amber","bold","bright","crisp","daring","frosty","glad","jolly","lively","mellow",
"quiet","ripe","serene","spry","tidy","vivid","warm","wild","clever","kind",
]
NOUN = [
"otter","falcon","comet","ember","grove","harbor","meadow","raven","river","summit",
"breeze","cedar","cinder","cove","delta","forest","glade","lark","marsh","peak",
"pine","quartz","reef","ridge","sable","sage","shore","thunder","vale","zephyr",
]
BASE = os.environ["SYNAPSE_BASE"]
MAS_ADMIN_CLIENT_ID = os.environ["MAS_ADMIN_CLIENT_ID"]
MAS_ADMIN_CLIENT_SECRET_FILE = os.environ["MAS_ADMIN_CLIENT_SECRET_FILE"]
MAS_ADMIN_API_BASE = os.environ["MAS_ADMIN_API_BASE"].rstrip("/")
MAS_TOKEN_URL = os.environ["MAS_TOKEN_URL"]
SEEDER_USER = os.environ["SEEDER_USER"]
ROOM_ALIAS = "#othrys:live.bstein.dev"
SERVER_NAME = "live.bstein.dev"
STALE_GUEST_MS = 14 * 24 * 60 * 60 * 1000
def mas_admin_token():
with open(MAS_ADMIN_CLIENT_SECRET_FILE, "r", encoding="utf-8") as f:
secret = f.read().strip()
basic = base64.b64encode(f"{MAS_ADMIN_CLIENT_ID}:{secret}".encode()).decode()
last_err = None
for attempt in range(5):
try:
r = requests.post(
MAS_TOKEN_URL,
headers={"Authorization": f"Basic {basic}"},
data={"grant_type": "client_credentials", "scope": "urn:mas:admin"},
timeout=30,
)
r.raise_for_status()
return r.json()["access_token"]
except Exception as exc: # noqa: BLE001
last_err = exc
time.sleep(2 ** attempt)
raise last_err
def mas_user_id(token, username):
r = requests.get(
f"{MAS_ADMIN_API_BASE}/users/by-username/{urllib.parse.quote(username)}",
headers={"Authorization": f"Bearer {token}"},
timeout=30,
)
r.raise_for_status()
return r.json()["data"]["id"]
def mas_personal_session(token, user_id):
r = requests.post(
f"{MAS_ADMIN_API_BASE}/personal-sessions",
headers={"Authorization": f"Bearer {token}"},
json={
"actor_user_id": user_id,
"human_name": "guest-name-randomizer",
"scope": "urn:matrix:client:api:*",
"expires_in": 300,
},
timeout=30,
)
r.raise_for_status()
data = r.json().get("data", {}).get("attributes", {}) or {}
return data["access_token"], r.json()["data"]["id"]
def mas_revoke_session(token, session_id):
requests.post(
f"{MAS_ADMIN_API_BASE}/personal-sessions/{urllib.parse.quote(session_id)}/revoke",
headers={"Authorization": f"Bearer {token}"},
json={},
timeout=30,
)
def resolve_alias(token, alias):
headers = {"Authorization": f"Bearer {token}"}
enc = urllib.parse.quote(alias)
r = requests.get(f"{BASE}/_matrix/client/v3/directory/room/{enc}", headers=headers)
r.raise_for_status()
return r.json()["room_id"]
def room_members(token, room_id):
headers = {"Authorization": f"Bearer {token}"}
r = requests.get(f"{BASE}/_matrix/client/v3/rooms/{urllib.parse.quote(room_id)}/members", headers=headers)
r.raise_for_status()
members = set()
existing_names = set()
for ev in r.json().get("chunk", []):
user_id = ev.get("state_key")
if user_id:
members.add(user_id)
disp = (ev.get("content") or {}).get("displayname")
if disp:
existing_names.add(disp)
return members, existing_names
def mas_list_users(token):
headers = {"Authorization": f"Bearer {token}"}
users = []
cursor = None
while True:
url = f"{MAS_ADMIN_API_BASE}/users?page[size]=100"
if cursor:
url += f"&page[after]={urllib.parse.quote(cursor)}"
r = requests.get(url, headers=headers, timeout=30)
r.raise_for_status()
data = r.json().get("data", [])
if not data:
break
users.extend(data)
cursor = data[-1].get("meta", {}).get("page", {}).get("cursor")
if not cursor:
break
return users
def synapse_list_users(token):
headers = {"Authorization": f"Bearer {token}"}
users = []
from_token = None
while True:
url = f"{BASE}/_synapse/admin/v2/users?local=true&deactivated=false&limit=100"
if from_token:
url += f"&from={urllib.parse.quote(from_token)}"
r = requests.get(url, headers=headers, timeout=30)
r.raise_for_status()
payload = r.json()
users.extend(payload.get("users", []))
from_token = payload.get("next_token")
if not from_token:
break
return users
def should_prune_guest(entry, now_ms):
if not entry.get("is_guest"):
return False
last_seen = entry.get("last_seen_ts")
if last_seen is None:
return False
try:
last_seen = int(last_seen)
except (TypeError, ValueError):
return False
return now_ms - last_seen > STALE_GUEST_MS
def prune_guest(token, user_id):
headers = {"Authorization": f"Bearer {token}"}
try:
r = requests.delete(
f"{BASE}/_synapse/admin/v2/users/{urllib.parse.quote(user_id)}",
headers=headers,
params={"erase": "true"},
timeout=30,
)
except Exception as exc: # noqa: BLE001
print(f"guest prune failed for {user_id}: {exc}")
return False
if r.status_code in (200, 202, 204, 404):
return True
print(f"guest prune failed for {user_id}: {r.status_code} {r.text}")
return False
def user_id_for_username(username):
return f"@{username}:live.bstein.dev"
def get_displayname(token, user_id):
headers = {"Authorization": f"Bearer {token}"}
r = requests.get(f"{BASE}/_matrix/client/v3/profile/{urllib.parse.quote(user_id)}", headers=headers)
r.raise_for_status()
return r.json().get("displayname")
def get_displayname_admin(token, user_id):
headers = {"Authorization": f"Bearer {token}"}
r = requests.get(
f"{BASE}/_synapse/admin/v2/users/{urllib.parse.quote(user_id)}",
headers=headers,
timeout=30,
)
if r.status_code == 404:
return None
r.raise_for_status()
return r.json().get("displayname")
def set_displayname(token, room_id, user_id, name, in_room):
headers = {"Authorization": f"Bearer {token}"}
payload = {"displayname": name}
r = requests.put(
f"{BASE}/_matrix/client/v3/profile/{urllib.parse.quote(user_id)}/displayname",
headers=headers,
json=payload,
)
r.raise_for_status()
if not in_room:
return
state_url = f"{BASE}/_matrix/client/v3/rooms/{urllib.parse.quote(room_id)}/state/m.room.member/{urllib.parse.quote(user_id)}"
content = {"membership": "join", "displayname": name}
requests.put(state_url, headers=headers, json=content, timeout=30)
def set_displayname_admin(token, user_id, name):
headers = {"Authorization": f"Bearer {token}"}
payload = {"displayname": name}
r = requests.put(
f"{BASE}/_synapse/admin/v2/users/{urllib.parse.quote(user_id)}",
headers=headers,
json=payload,
timeout=30,
)
if r.status_code in (200, 201, 204):
return True
return False
def needs_rename_username(username):
return username.isdigit() or username.startswith("guest-")
def needs_rename_display(display):
return not display or display.isdigit() or display.startswith("guest-")
def db_rename_numeric(existing_names):
profile_rows = []
profile_index = {}
users = []
conn = psycopg2.connect(
host=os.environ["PGHOST"],
port=int(os.environ["PGPORT"]),
dbname=os.environ["PGDATABASE"],
user=os.environ["PGUSER"],
password=os.environ["PGPASSWORD"],
)
try:
with conn:
with conn.cursor() as cur:
cur.execute(
"SELECT user_id, full_user_id, displayname FROM profiles WHERE full_user_id ~ %s",
(f"^@\\d+:{SERVER_NAME}$",),
)
profile_rows = cur.fetchall()
profile_index = {row[1]: row for row in profile_rows}
for user_id, full_user_id, display in profile_rows:
if display and not needs_rename_display(display):
continue
new = None
for _ in range(30):
candidate = f"{random.choice(ADJ)}-{random.choice(NOUN)}"
if candidate not in existing_names:
new = candidate
existing_names.add(candidate)
break
if not new:
continue
cur.execute(
"UPDATE profiles SET displayname = %s WHERE full_user_id = %s",
(new, full_user_id),
)
cur.execute(
"SELECT name FROM users WHERE name ~ %s",
(f"^@\\d+:{SERVER_NAME}$",),
)
users = [row[0] for row in cur.fetchall()]
if not users:
return
cur.execute(
"SELECT user_id, full_user_id FROM profiles WHERE full_user_id = ANY(%s)",
(users,),
)
for existing_full in cur.fetchall():
profile_index.setdefault(existing_full[1], existing_full)
for full_user_id in users:
if full_user_id in profile_index:
continue
localpart = full_user_id.split(":", 1)[0].lstrip("@")
new = None
for _ in range(30):
candidate = f"{random.choice(ADJ)}-{random.choice(NOUN)}"
if candidate not in existing_names:
new = candidate
existing_names.add(candidate)
break
if not new:
continue
cur.execute(
"INSERT INTO profiles (user_id, displayname, full_user_id) VALUES (%s, %s, %s) "
"ON CONFLICT (full_user_id) DO UPDATE SET displayname = EXCLUDED.displayname",
(localpart, new, full_user_id),
)
finally:
conn.close()
admin_token = mas_admin_token()
seeder_id = mas_user_id(admin_token, SEEDER_USER)
seeder_token, seeder_session = mas_personal_session(admin_token, seeder_id)
try:
room_id = resolve_alias(seeder_token, ROOM_ALIAS)
members, existing = room_members(seeder_token, room_id)
users = mas_list_users(admin_token)
mas_usernames = set()
for user in users:
attrs = user.get("attributes") or {}
username = attrs.get("username") or ""
if username:
mas_usernames.add(username)
legacy_guest = attrs.get("legacy_guest")
if not username:
continue
if not (legacy_guest or needs_rename_username(username)):
continue
user_id = user_id_for_username(username)
access_token, session_id = mas_personal_session(admin_token, user["id"])
try:
display = get_displayname(access_token, user_id)
if display and not needs_rename_display(display):
continue
new = None
for _ in range(30):
candidate = f"{random.choice(ADJ)}-{random.choice(NOUN)}"
if candidate not in existing:
new = candidate
existing.add(candidate)
break
if not new:
continue
set_displayname(access_token, room_id, user_id, new, user_id in members)
finally:
mas_revoke_session(admin_token, session_id)
try:
entries = synapse_list_users(seeder_token)
except Exception as exc: # noqa: BLE001
print(f"synapse admin list skipped: {exc}")
entries = []
now_ms = int(time.time() * 1000)
for entry in entries:
user_id = entry.get("name") or ""
if not user_id.startswith("@"):
continue
localpart = user_id.split(":", 1)[0].lstrip("@")
if localpart in mas_usernames:
continue
is_guest = entry.get("is_guest")
if is_guest and should_prune_guest(entry, now_ms):
if prune_guest(seeder_token, user_id):
continue
if not (is_guest or needs_rename_username(localpart)):
continue
display = get_displayname_admin(seeder_token, user_id)
if display and not needs_rename_display(display):
continue
new = None
for _ in range(30):
candidate = f"{random.choice(ADJ)}-{random.choice(NOUN)}"
if candidate not in existing:
new = candidate
existing.add(candidate)
break
if not new:
continue
if not set_displayname_admin(seeder_token, user_id, new):
continue
db_rename_numeric(existing)
finally:
mas_revoke_session(admin_token, seeder_session)
PY

View File

@ -0,0 +1,169 @@
# services/comms/pin-othrys-job.yaml
apiVersion: batch/v1
kind: CronJob
metadata:
name: pin-othrys-invite
namespace: comms
labels:
atlas.bstein.dev/glue: "true"
spec:
schedule: "*/30 * * * *"
suspend: true
concurrencyPolicy: Forbid
successfulJobsHistoryLimit: 1
failedJobsHistoryLimit: 1
jobTemplate:
spec:
backoffLimit: 0
template:
metadata:
annotations:
vault.hashicorp.com/agent-inject: "true"
vault.hashicorp.com/agent-pre-populate-only: "true"
vault.hashicorp.com/role: "comms"
vault.hashicorp.com/agent-inject-secret-turn-secret: "kv/data/atlas/comms/turn-shared-secret"
vault.hashicorp.com/agent-inject-template-turn-secret: |
{{- with secret "kv/data/atlas/comms/turn-shared-secret" -}}{{ .Data.data.TURN_STATIC_AUTH_SECRET }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-livekit-primary: "kv/data/atlas/comms/livekit-api"
vault.hashicorp.com/agent-inject-template-livekit-primary: |
{{- with secret "kv/data/atlas/comms/livekit-api" -}}{{ .Data.data.primary }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-bot-pass: "kv/data/atlas/comms/atlasbot-credentials-runtime"
vault.hashicorp.com/agent-inject-template-bot-pass: |
{{- with secret "kv/data/atlas/comms/atlasbot-credentials-runtime" -}}{{ index .Data.data "bot-password" }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-seeder-pass: "kv/data/atlas/comms/atlasbot-credentials-runtime"
vault.hashicorp.com/agent-inject-template-seeder-pass: |
{{- with secret "kv/data/atlas/comms/atlasbot-credentials-runtime" -}}{{ index .Data.data "seeder-password" }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-chat-matrix: "kv/data/atlas/shared/chat-ai-keys-runtime"
vault.hashicorp.com/agent-inject-template-chat-matrix: |
{{- with secret "kv/data/atlas/shared/chat-ai-keys-runtime" -}}{{ .Data.data.matrix }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-chat-homepage: "kv/data/atlas/shared/chat-ai-keys-runtime"
vault.hashicorp.com/agent-inject-template-chat-homepage: |
{{- with secret "kv/data/atlas/shared/chat-ai-keys-runtime" -}}{{ .Data.data.homepage }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-mas-admin-secret: "kv/data/atlas/comms/mas-admin-client-runtime"
vault.hashicorp.com/agent-inject-template-mas-admin-secret: |
{{- with secret "kv/data/atlas/comms/mas-admin-client-runtime" -}}{{ .Data.data.client_secret }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-synapse-db-pass: "kv/data/atlas/comms/synapse-db"
vault.hashicorp.com/agent-inject-template-synapse-db-pass: |
{{- with secret "kv/data/atlas/comms/synapse-db" -}}{{ .Data.data.POSTGRES_PASSWORD }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-mas-db-pass: "kv/data/atlas/comms/mas-db"
vault.hashicorp.com/agent-inject-template-mas-db-pass: |
{{- with secret "kv/data/atlas/comms/mas-db" -}}{{ .Data.data.password }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-mas-matrix-shared: "kv/data/atlas/comms/mas-secrets-runtime"
vault.hashicorp.com/agent-inject-template-mas-matrix-shared: |
{{- with secret "kv/data/atlas/comms/mas-secrets-runtime" -}}{{ .Data.data.matrix_shared_secret }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-mas-kc-secret: "kv/data/atlas/comms/mas-secrets-runtime"
vault.hashicorp.com/agent-inject-template-mas-kc-secret: |
{{- with secret "kv/data/atlas/comms/mas-secrets-runtime" -}}{{ .Data.data.keycloak_client_secret }}{{- end -}}
spec:
restartPolicy: Never
serviceAccountName: comms-vault
containers:
- name: pin
image: python:3.11-slim
env:
- name: SYNAPSE_BASE
value: http://othrys-synapse-matrix-synapse:8008
- name: AUTH_BASE
value: http://matrix-authentication-service:8080
- name: SEEDER_USER
value: othrys-seeder
command:
- /bin/sh
- -c
- |
set -euo pipefail
. /vault/scripts/comms_vault_env.sh
pip install --no-cache-dir requests >/dev/null
python - <<'PY'
import os, requests, urllib.parse
BASE = os.environ["SYNAPSE_BASE"]
AUTH_BASE = os.environ.get("AUTH_BASE", BASE)
ROOM_ALIAS = "#othrys:live.bstein.dev"
MESSAGE = (
"Invite guests: share https://live.bstein.dev/#/room/#othrys:live.bstein.dev?action=join "
"and choose 'Continue' -> 'Join as guest'."
)
def auth(token): return {"Authorization": f"Bearer {token}"}
def canon_user(user):
u = (user or "").strip()
if u.startswith("@") and ":" in u:
return u
u = u.lstrip("@")
if ":" in u:
return f"@{u}"
return f"@{u}:live.bstein.dev"
def login(user, password):
r = requests.post(f"{AUTH_BASE}/_matrix/client/v3/login", json={
"type": "m.login.password",
"identifier": {"type": "m.id.user", "user": canon_user(user)},
"password": password,
})
r.raise_for_status()
return r.json()["access_token"]
def resolve(alias, token):
enc = urllib.parse.quote(alias)
r = requests.get(f"{BASE}/_matrix/client/v3/directory/room/{enc}", headers=auth(token))
r.raise_for_status()
return r.json()["room_id"]
def get_pinned(room_id, token):
r = requests.get(
f"{BASE}/_matrix/client/v3/rooms/{urllib.parse.quote(room_id)}/state/m.room.pinned_events",
headers=auth(token),
)
if r.status_code == 404:
return []
r.raise_for_status()
return r.json().get("pinned", [])
def get_event(room_id, event_id, token):
r = requests.get(
f"{BASE}/_matrix/client/v3/rooms/{urllib.parse.quote(room_id)}/event/{urllib.parse.quote(event_id)}",
headers=auth(token),
)
if r.status_code == 404:
return None
r.raise_for_status()
return r.json()
def send(room_id, token, body):
r = requests.post(
f"{BASE}/_matrix/client/v3/rooms/{urllib.parse.quote(room_id)}/send/m.room.message",
headers=auth(token),
json={"msgtype": "m.text", "body": body},
)
r.raise_for_status()
return r.json()["event_id"]
def pin(room_id, token, event_id):
r = requests.put(
f"{BASE}/_matrix/client/v3/rooms/{urllib.parse.quote(room_id)}/state/m.room.pinned_events",
headers=auth(token),
json={"pinned": [event_id]},
)
r.raise_for_status()
token = login(os.environ["SEEDER_USER"], os.environ["SEEDER_PASS"])
room_id = resolve(ROOM_ALIAS, token)
for event_id in get_pinned(room_id, token):
ev = get_event(room_id, event_id, token)
if ev and ev.get("content", {}).get("body") == MESSAGE:
raise SystemExit(0)
eid = send(room_id, token, MESSAGE)
pin(room_id, token, eid)
PY
volumeMounts:
- name: vault-scripts
mountPath: /vault/scripts
readOnly: true
volumes:
- name: vault-scripts
configMap:
name: comms-vault-env
defaultMode: 0555

View File

@ -0,0 +1,312 @@
# services/comms/reset-othrys-room-job.yaml
apiVersion: batch/v1
kind: CronJob
metadata:
name: othrys-room-reset
namespace: comms
labels:
atlas.bstein.dev/glue: "true"
spec:
schedule: "0 0 1 1 *"
suspend: true
concurrencyPolicy: Forbid
successfulJobsHistoryLimit: 1
failedJobsHistoryLimit: 1
jobTemplate:
spec:
backoffLimit: 0
template:
metadata:
annotations:
vault.hashicorp.com/agent-inject: "true"
vault.hashicorp.com/agent-pre-populate-only: "true"
vault.hashicorp.com/role: "comms"
vault.hashicorp.com/agent-inject-secret-turn-secret: "kv/data/atlas/comms/turn-shared-secret"
vault.hashicorp.com/agent-inject-template-turn-secret: |
{{- with secret "kv/data/atlas/comms/turn-shared-secret" -}}{{ .Data.data.TURN_STATIC_AUTH_SECRET }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-livekit-primary: "kv/data/atlas/comms/livekit-api"
vault.hashicorp.com/agent-inject-template-livekit-primary: |
{{- with secret "kv/data/atlas/comms/livekit-api" -}}{{ .Data.data.primary }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-bot-pass: "kv/data/atlas/comms/atlasbot-credentials-runtime"
vault.hashicorp.com/agent-inject-template-bot-pass: |
{{- with secret "kv/data/atlas/comms/atlasbot-credentials-runtime" -}}{{ index .Data.data "bot-password" }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-seeder-pass: "kv/data/atlas/comms/atlasbot-credentials-runtime"
vault.hashicorp.com/agent-inject-template-seeder-pass: |
{{- with secret "kv/data/atlas/comms/atlasbot-credentials-runtime" -}}{{ index .Data.data "seeder-password" }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-chat-matrix: "kv/data/atlas/shared/chat-ai-keys-runtime"
vault.hashicorp.com/agent-inject-template-chat-matrix: |
{{- with secret "kv/data/atlas/shared/chat-ai-keys-runtime" -}}{{ .Data.data.matrix }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-chat-homepage: "kv/data/atlas/shared/chat-ai-keys-runtime"
vault.hashicorp.com/agent-inject-template-chat-homepage: |
{{- with secret "kv/data/atlas/shared/chat-ai-keys-runtime" -}}{{ .Data.data.homepage }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-mas-admin-secret: "kv/data/atlas/comms/mas-admin-client-runtime"
vault.hashicorp.com/agent-inject-template-mas-admin-secret: |
{{- with secret "kv/data/atlas/comms/mas-admin-client-runtime" -}}{{ .Data.data.client_secret }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-synapse-db-pass: "kv/data/atlas/comms/synapse-db"
vault.hashicorp.com/agent-inject-template-synapse-db-pass: |
{{- with secret "kv/data/atlas/comms/synapse-db" -}}{{ .Data.data.POSTGRES_PASSWORD }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-mas-db-pass: "kv/data/atlas/comms/mas-db"
vault.hashicorp.com/agent-inject-template-mas-db-pass: |
{{- with secret "kv/data/atlas/comms/mas-db" -}}{{ .Data.data.password }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-mas-matrix-shared: "kv/data/atlas/comms/mas-secrets-runtime"
vault.hashicorp.com/agent-inject-template-mas-matrix-shared: |
{{- with secret "kv/data/atlas/comms/mas-secrets-runtime" -}}{{ .Data.data.matrix_shared_secret }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-mas-kc-secret: "kv/data/atlas/comms/mas-secrets-runtime"
vault.hashicorp.com/agent-inject-template-mas-kc-secret: |
{{- with secret "kv/data/atlas/comms/mas-secrets-runtime" -}}{{ .Data.data.keycloak_client_secret }}{{- end -}}
spec:
restartPolicy: Never
serviceAccountName: comms-vault
containers:
- name: reset
image: python:3.11-slim
env:
- name: SYNAPSE_BASE
value: http://othrys-synapse-matrix-synapse:8008
- name: AUTH_BASE
value: http://matrix-authentication-service:8080
- name: SERVER_NAME
value: live.bstein.dev
- name: ROOM_ALIAS
value: "#othrys:live.bstein.dev"
- name: ROOM_NAME
value: Othrys
- name: PIN_MESSAGE
value: "Invite guests: share https://live.bstein.dev/#/room/#othrys:live.bstein.dev?action=join and choose 'Continue' -> 'Join as guest'."
- name: SEEDER_USER
value: othrys-seeder
- name: BOT_USER
value: atlasbot
command:
- /bin/sh
- -c
- |
set -euo pipefail
. /vault/scripts/comms_vault_env.sh
pip install --no-cache-dir requests >/dev/null
python - <<'PY'
import os
import time
import urllib.parse
import requests
BASE = os.environ["SYNAPSE_BASE"]
AUTH_BASE = os.environ.get("AUTH_BASE", BASE)
SERVER_NAME = os.environ.get("SERVER_NAME", "live.bstein.dev")
ROOM_ALIAS = os.environ.get("ROOM_ALIAS", "#othrys:live.bstein.dev")
ROOM_NAME = os.environ.get("ROOM_NAME", "Othrys")
PIN_MESSAGE = os.environ["PIN_MESSAGE"]
SEEDER_USER = os.environ["SEEDER_USER"]
SEEDER_PASS = os.environ["SEEDER_PASS"]
BOT_USER = os.environ["BOT_USER"]
POWER_LEVELS = {
"ban": 50,
"events": {
"m.room.avatar": 50,
"m.room.canonical_alias": 50,
"m.room.encryption": 100,
"m.room.history_visibility": 100,
"m.room.name": 50,
"m.room.power_levels": 100,
"m.room.server_acl": 100,
"m.room.tombstone": 100,
},
"events_default": 0,
"historical": 100,
"invite": 50,
"kick": 50,
"m.call.invite": 50,
"redact": 50,
"state_default": 50,
"users": {f"@{SEEDER_USER}:{SERVER_NAME}": 100},
"users_default": 0,
}
def auth(token):
return {"Authorization": f"Bearer {token}"}
def canon_user(user):
u = (user or "").strip()
if u.startswith("@") and ":" in u:
return u
u = u.lstrip("@")
if ":" in u:
return f"@{u}"
return f"@{u}:{SERVER_NAME}"
def login(user, password):
r = requests.post(
f"{AUTH_BASE}/_matrix/client/v3/login",
json={
"type": "m.login.password",
"identifier": {"type": "m.id.user", "user": canon_user(user)},
"password": password,
},
)
if r.status_code != 200:
raise SystemExit(f"login failed: {r.status_code} {r.text}")
return r.json()["access_token"]
def resolve_alias(token, alias):
enc = urllib.parse.quote(alias)
r = requests.get(f"{BASE}/_matrix/client/v3/directory/room/{enc}", headers=auth(token))
if r.status_code == 404:
return None
r.raise_for_status()
return r.json()["room_id"]
def create_room(token):
r = requests.post(
f"{BASE}/_matrix/client/v3/createRoom",
headers=auth(token),
json={
"preset": "public_chat",
"name": ROOM_NAME,
"room_version": "11",
},
)
r.raise_for_status()
return r.json()["room_id"]
def put_state(token, room_id, ev_type, content):
r = requests.put(
f"{BASE}/_matrix/client/v3/rooms/{urllib.parse.quote(room_id)}/state/{ev_type}",
headers=auth(token),
json=content,
)
r.raise_for_status()
def set_directory_visibility(token, room_id, visibility):
r = requests.put(
f"{BASE}/_matrix/client/v3/directory/list/room/{urllib.parse.quote(room_id)}",
headers=auth(token),
json={"visibility": visibility},
)
r.raise_for_status()
def delete_alias(token, alias):
enc = urllib.parse.quote(alias)
r = requests.delete(f"{BASE}/_matrix/client/v3/directory/room/{enc}", headers=auth(token))
if r.status_code in (200, 202, 404):
return
r.raise_for_status()
def put_alias(token, alias, room_id):
enc = urllib.parse.quote(alias)
r = requests.put(
f"{BASE}/_matrix/client/v3/directory/room/{enc}",
headers=auth(token),
json={"room_id": room_id},
)
r.raise_for_status()
def list_joined_members(token, room_id):
r = requests.get(
f"{BASE}/_matrix/client/v3/rooms/{urllib.parse.quote(room_id)}/members?membership=join",
headers=auth(token),
)
r.raise_for_status()
members = []
for ev in r.json().get("chunk", []):
if ev.get("type") != "m.room.member":
continue
uid = ev.get("state_key")
if not isinstance(uid, str) or not uid.startswith("@"):
continue
members.append(uid)
return members
def invite_user(token, room_id, user_id):
r = requests.post(
f"{BASE}/_matrix/client/v3/rooms/{urllib.parse.quote(room_id)}/invite",
headers=auth(token),
json={"user_id": user_id},
)
if r.status_code in (200, 202):
return
r.raise_for_status()
def send_message(token, room_id, body):
r = requests.post(
f"{BASE}/_matrix/client/v3/rooms/{urllib.parse.quote(room_id)}/send/m.room.message",
headers=auth(token),
json={"msgtype": "m.text", "body": body},
)
r.raise_for_status()
return r.json()["event_id"]
def login_with_retry():
last = None
for attempt in range(1, 6):
try:
return login(SEEDER_USER, SEEDER_PASS)
except Exception as exc: # noqa: BLE001
last = exc
time.sleep(attempt * 2)
raise last
token = login_with_retry()
old_room_id = resolve_alias(token, ROOM_ALIAS)
if not old_room_id:
raise SystemExit(f"alias {ROOM_ALIAS} not found; refusing to proceed")
new_room_id = create_room(token)
# Configure the new room.
put_state(token, new_room_id, "m.room.join_rules", {"join_rule": "public"})
put_state(token, new_room_id, "m.room.guest_access", {"guest_access": "can_join"})
put_state(token, new_room_id, "m.room.history_visibility", {"history_visibility": "shared"})
put_state(token, new_room_id, "m.room.power_levels", POWER_LEVELS)
# Move the alias.
delete_alias(token, ROOM_ALIAS)
put_alias(token, ROOM_ALIAS, new_room_id)
put_state(token, new_room_id, "m.room.canonical_alias", {"alias": ROOM_ALIAS})
set_directory_visibility(token, new_room_id, "public")
# Invite the bot and all joined members of the old room.
bot_user_id = f"@{BOT_USER}:{SERVER_NAME}"
invite_user(token, new_room_id, bot_user_id)
for uid in list_joined_members(token, old_room_id):
if uid == f"@{SEEDER_USER}:{SERVER_NAME}":
continue
localpart = uid.split(":", 1)[0].lstrip("@")
if localpart.isdigit():
continue
invite_user(token, new_room_id, uid)
# Pin the guest invite message in the new room.
event_id = send_message(token, new_room_id, PIN_MESSAGE)
put_state(token, new_room_id, "m.room.pinned_events", {"pinned": [event_id]})
# De-list and tombstone the old room.
set_directory_visibility(token, old_room_id, "private")
put_state(token, old_room_id, "m.room.join_rules", {"join_rule": "invite"})
put_state(token, old_room_id, "m.room.guest_access", {"guest_access": "forbidden"})
put_state(
token,
old_room_id,
"m.room.tombstone",
{"body": "Othrys has been reset. Please join the new room.", "replacement_room": new_room_id},
)
send_message(
token,
old_room_id,
"Othrys was reset. Join the new room at https://live.bstein.dev/#/room/#othrys:live.bstein.dev?action=join",
)
print(f"old_room_id={old_room_id}")
print(f"new_room_id={new_room_id}")
PY
volumeMounts:
- name: vault-scripts
mountPath: /vault/scripts
readOnly: true
volumes:
- name: vault-scripts
configMap:
name: comms-vault-env
defaultMode: 0555

View File

@ -0,0 +1,185 @@
# services/comms/seed-othrys-room.yaml
apiVersion: batch/v1
kind: CronJob
metadata:
name: seed-othrys-room
namespace: comms
labels:
atlas.bstein.dev/glue: "true"
spec:
schedule: "*/10 * * * *"
suspend: true
concurrencyPolicy: Forbid
jobTemplate:
spec:
backoffLimit: 0
template:
metadata:
annotations:
vault.hashicorp.com/agent-inject: "true"
vault.hashicorp.com/agent-pre-populate-only: "true"
vault.hashicorp.com/role: "comms"
vault.hashicorp.com/agent-inject-secret-turn-secret: "kv/data/atlas/comms/turn-shared-secret"
vault.hashicorp.com/agent-inject-template-turn-secret: |
{{- with secret "kv/data/atlas/comms/turn-shared-secret" -}}{{ .Data.data.TURN_STATIC_AUTH_SECRET }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-livekit-primary: "kv/data/atlas/comms/livekit-api"
vault.hashicorp.com/agent-inject-template-livekit-primary: |
{{- with secret "kv/data/atlas/comms/livekit-api" -}}{{ .Data.data.primary }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-bot-pass: "kv/data/atlas/comms/atlasbot-credentials-runtime"
vault.hashicorp.com/agent-inject-template-bot-pass: |
{{- with secret "kv/data/atlas/comms/atlasbot-credentials-runtime" -}}{{ index .Data.data "bot-password" }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-seeder-pass: "kv/data/atlas/comms/atlasbot-credentials-runtime"
vault.hashicorp.com/agent-inject-template-seeder-pass: |
{{- with secret "kv/data/atlas/comms/atlasbot-credentials-runtime" -}}{{ index .Data.data "seeder-password" }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-chat-matrix: "kv/data/atlas/shared/chat-ai-keys-runtime"
vault.hashicorp.com/agent-inject-template-chat-matrix: |
{{- with secret "kv/data/atlas/shared/chat-ai-keys-runtime" -}}{{ .Data.data.matrix }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-chat-homepage: "kv/data/atlas/shared/chat-ai-keys-runtime"
vault.hashicorp.com/agent-inject-template-chat-homepage: |
{{- with secret "kv/data/atlas/shared/chat-ai-keys-runtime" -}}{{ .Data.data.homepage }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-mas-admin-secret: "kv/data/atlas/comms/mas-admin-client-runtime"
vault.hashicorp.com/agent-inject-template-mas-admin-secret: |
{{- with secret "kv/data/atlas/comms/mas-admin-client-runtime" -}}{{ .Data.data.client_secret }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-synapse-db-pass: "kv/data/atlas/comms/synapse-db"
vault.hashicorp.com/agent-inject-template-synapse-db-pass: |
{{- with secret "kv/data/atlas/comms/synapse-db" -}}{{ .Data.data.POSTGRES_PASSWORD }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-mas-db-pass: "kv/data/atlas/comms/mas-db"
vault.hashicorp.com/agent-inject-template-mas-db-pass: |
{{- with secret "kv/data/atlas/comms/mas-db" -}}{{ .Data.data.password }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-mas-matrix-shared: "kv/data/atlas/comms/mas-secrets-runtime"
vault.hashicorp.com/agent-inject-template-mas-matrix-shared: |
{{- with secret "kv/data/atlas/comms/mas-secrets-runtime" -}}{{ .Data.data.matrix_shared_secret }}{{- end -}}
vault.hashicorp.com/agent-inject-secret-mas-kc-secret: "kv/data/atlas/comms/mas-secrets-runtime"
vault.hashicorp.com/agent-inject-template-mas-kc-secret: |
{{- with secret "kv/data/atlas/comms/mas-secrets-runtime" -}}{{ .Data.data.keycloak_client_secret }}{{- end -}}
spec:
restartPolicy: Never
serviceAccountName: comms-vault
containers:
- name: seed
image: python:3.11-slim
env:
- name: SYNAPSE_BASE
value: http://othrys-synapse-matrix-synapse:8008
- name: AUTH_BASE
value: http://matrix-authentication-service:8080
- name: SEEDER_USER
value: othrys-seeder
- name: BOT_USER
value: atlasbot
command:
- /bin/sh
- -c
- |
set -euo pipefail
. /vault/scripts/comms_vault_env.sh
pip install --no-cache-dir requests pyyaml >/dev/null
python - <<'PY'
import os, requests, urllib.parse
BASE = os.environ["SYNAPSE_BASE"]
AUTH_BASE = os.environ.get("AUTH_BASE", BASE)
def canon_user(user):
u = (user or "").strip()
if u.startswith("@") and ":" in u:
return u
u = u.lstrip("@")
if ":" in u:
return f"@{u}"
return f"@{u}:live.bstein.dev"
def login(user, password):
r = requests.post(f"{AUTH_BASE}/_matrix/client/v3/login", json={
"type": "m.login.password",
"identifier": {"type": "m.id.user", "user": canon_user(user)},
"password": password,
})
if r.status_code != 200:
raise SystemExit(f"login failed: {r.status_code} {r.text}")
return r.json()["access_token"]
def ensure_user(token, localpart, password, admin):
headers = {"Authorization": f"Bearer {token}"}
user_id = f"@{localpart}:live.bstein.dev"
url = f"{BASE}/_synapse/admin/v2/users/{urllib.parse.quote(user_id)}"
res = requests.get(url, headers=headers)
if res.status_code == 200:
return
payload = {"password": password, "admin": admin, "deactivated": False}
create = requests.put(url, headers=headers, json=payload)
if create.status_code not in (200, 201):
raise SystemExit(f"create user {user_id} failed: {create.status_code} {create.text}")
def ensure_room(token):
headers = {"Authorization": f"Bearer {token}"}
alias = "#othrys:live.bstein.dev"
alias_enc = "%23othrys%3Alive.bstein.dev"
exists = requests.get(f"{BASE}/_matrix/client/v3/directory/room/{alias_enc}", headers=headers)
if exists.status_code == 200:
room_id = exists.json()["room_id"]
else:
create = requests.post(f"{BASE}/_matrix/client/v3/createRoom", headers=headers, json={
"preset": "public_chat",
"name": "Othrys",
"room_alias_name": "othrys",
"initial_state": [],
"power_level_content_override": {"events_default": 0, "users_default": 0, "state_default": 50},
})
if create.status_code not in (200, 409):
raise SystemExit(f"create room failed: {create.status_code} {create.text}")
exists = requests.get(f"{BASE}/_matrix/client/v3/directory/room/{alias_enc}", headers=headers)
room_id = exists.json()["room_id"]
state_events = [
("m.room.join_rules", {"join_rule": "public"}),
("m.room.guest_access", {"guest_access": "can_join"}),
("m.room.history_visibility", {"history_visibility": "shared"}),
("m.room.canonical_alias", {"alias": alias}),
]
for ev_type, content in state_events:
requests.put(f"{BASE}/_matrix/client/v3/rooms/{room_id}/state/{ev_type}", headers=headers, json=content)
requests.put(f"{BASE}/_matrix/client/v3/directory/list/room/{room_id}", headers=headers, json={"visibility": "public"})
return room_id
def join_user(token, room_id, user_id):
headers = {"Authorization": f"Bearer {token}"}
requests.post(f"{BASE}/_synapse/admin/v1/join/{urllib.parse.quote(room_id)}", headers=headers, json={"user_id": user_id})
def join_all_locals(token, room_id):
headers = {"Authorization": f"Bearer {token}"}
users = []
from_token = None
while True:
url = f"{BASE}/_synapse/admin/v2/users?local=true&deactivated=false&limit=100"
if from_token:
url += f"&from={from_token}"
res = requests.get(url, headers=headers).json()
users.extend([u["name"] for u in res.get("users", [])])
from_token = res.get("next_token")
if not from_token:
break
for uid in users:
join_user(token, room_id, uid)
token = login(os.environ["SEEDER_USER"], os.environ["SEEDER_PASS"])
ensure_user(token, os.environ["SEEDER_USER"], os.environ["SEEDER_PASS"], admin=True)
ensure_user(token, os.environ["BOT_USER"], os.environ["BOT_PASS"], admin=False)
room_id = ensure_room(token)
join_user(token, room_id, f"@{os.environ['BOT_USER']}:live.bstein.dev")
join_all_locals(token, room_id)
PY
volumeMounts:
- name: synapse-config
mountPath: /config
readOnly: true
- name: vault-scripts
mountPath: /vault/scripts
readOnly: true
volumes:
- name: synapse-config
secret:
secretName: othrys-synapse-matrix-synapse
- name: vault-scripts
configMap:
name: comms-vault-env
defaultMode: 0555

View File

@ -0,0 +1,199 @@
# services/finance/firefly-user-sync-cronjob.yaml
apiVersion: batch/v1
kind: CronJob
metadata:
name: firefly-user-sync
namespace: finance
labels:
atlas.bstein.dev/glue: "true"
spec:
schedule: "0 6 * * *"
suspend: true
concurrencyPolicy: Forbid
successfulJobsHistoryLimit: 1
failedJobsHistoryLimit: 3
jobTemplate:
spec:
backoffLimit: 0
template:
metadata:
annotations:
vault.hashicorp.com/agent-inject: "true"
vault.hashicorp.com/agent-pre-populate-only: "true"
vault.hashicorp.com/role: "finance"
vault.hashicorp.com/agent-inject-secret-firefly-env.sh: "kv/data/atlas/finance/firefly-db"
vault.hashicorp.com/agent-inject-template-firefly-env.sh: |
{{ with secret "kv/data/atlas/finance/firefly-db" }}
export DB_CONNECTION="pgsql"
export DB_HOST="{{ .Data.data.DB_HOST }}"
export DB_PORT="{{ .Data.data.DB_PORT }}"
export DB_DATABASE="{{ .Data.data.DB_DATABASE }}"
export DB_USERNAME="{{ .Data.data.DB_USERNAME }}"
export DB_PASSWORD="$(cat /vault/secrets/firefly-db-password)"
{{ end }}
{{ with secret "kv/data/atlas/finance/firefly-secrets" }}
export APP_KEY="$(cat /vault/secrets/firefly-app-key)"
{{ end }}
vault.hashicorp.com/agent-inject-secret-firefly-db-password: "kv/data/atlas/finance/firefly-db"
vault.hashicorp.com/agent-inject-template-firefly-db-password: |
{{- with secret "kv/data/atlas/finance/firefly-db" -}}
{{ .Data.data.DB_PASSWORD }}
{{- end -}}
vault.hashicorp.com/agent-inject-secret-firefly-app-key: "kv/data/atlas/finance/firefly-secrets"
vault.hashicorp.com/agent-inject-template-firefly-app-key: |
{{- with secret "kv/data/atlas/finance/firefly-secrets" -}}
{{ .Data.data.APP_KEY }}
{{- end -}}
spec:
serviceAccountName: finance-vault
restartPolicy: Never
affinity:
nodeAffinity:
preferredDuringSchedulingIgnoredDuringExecution:
- weight: 100
preference:
matchExpressions:
- key: hardware
operator: In
values: ["rpi5"]
- weight: 70
preference:
matchExpressions:
- key: hardware
operator: In
values: ["rpi4"]
nodeSelector:
kubernetes.io/arch: arm64
node-role.kubernetes.io/worker: "true"
containers:
- name: sync
image: fireflyiii/core:version-6.4.15
command: ["/bin/sh", "-c"]
args:
- |
set -eu
. /vault/secrets/firefly-env.sh
cat <<'PHP' > /tmp/firefly_user_sync.php
#!/usr/bin/env php
<?php
declare(strict_types=1);
use FireflyIII\Console\Commands\Correction\CreatesGroupMemberships;
use FireflyIII\Models\Role;
use FireflyIII\Repositories\User\UserRepositoryInterface;
use FireflyIII\Support\Facades\FireflyConfig;
use FireflyIII\User;
use Illuminate\Contracts\Console\Kernel as ConsoleKernel;
function log_line(string $message): void
{
fwrite(STDOUT, $message . PHP_EOL);
}
function error_line(string $message): void
{
fwrite(STDERR, $message . PHP_EOL);
}
function find_app_root(): string
{
$candidates = [];
$env_root = getenv('FIREFLY_APP_DIR') ?: '';
if ($env_root !== '') {
$candidates[] = $env_root;
}
$candidates[] = '/var/www/html';
$candidates[] = '/var/www/firefly-iii';
$candidates[] = '/app';
foreach ($candidates as $candidate) {
if (!is_dir($candidate)) {
continue;
}
if (file_exists($candidate . '/vendor/autoload.php')) {
return $candidate;
}
}
return '';
}
$email = trim((string) getenv('FIREFLY_USER_EMAIL'));
$password = (string) getenv('FIREFLY_USER_PASSWORD');
if ($email === '' || $password === '') {
error_line('missing FIREFLY_USER_EMAIL or FIREFLY_USER_PASSWORD');
exit(1);
}
$root = find_app_root();
if ($root === '') {
error_line('firefly app root not found');
exit(1);
}
$autoload = $root . '/vendor/autoload.php';
$app_bootstrap = $root . '/bootstrap/app.php';
if (!file_exists($autoload) || !file_exists($app_bootstrap)) {
error_line('firefly bootstrap files missing');
exit(1);
}
require $autoload;
$app = require $app_bootstrap;
$kernel = $app->make(ConsoleKernel::class);
$kernel->bootstrap();
try {
FireflyConfig::set('single_user_mode', true);
} catch (Throwable $exc) {
error_line('failed to enforce single_user_mode: '.$exc->getMessage());
}
$repository = $app->make(UserRepositoryInterface::class);
$existing_user = User::where('email', $email)->first();
$first_user = User::count() == 0;
if (!$existing_user) {
$existing_user = User::create(
[
'email' => $email,
'password' => bcrypt($password),
'blocked' => false,
'blocked_code' => null,
]
);
if ($first_user) {
$role = Role::where('name', 'owner')->first();
if ($role) {
$existing_user->roles()->attach($role);
}
}
log_line(sprintf('created firefly user %s', $email));
} else {
log_line(sprintf('updating firefly user %s', $email));
}
$existing_user->blocked = false;
$existing_user->blocked_code = null;
$existing_user->save();
$repository->changePassword($existing_user, $password);
CreatesGroupMemberships::createGroupMembership($existing_user);
log_line('firefly user sync complete');
PHP
exec php /tmp/firefly_user_sync.php
env:
- name: APP_ENV
value: production
- name: APP_DEBUG
value: "false"
- name: TZ
value: Etc/UTC

View File

@ -0,0 +1,233 @@
# services/health/wger-admin-ensure-cronjob.yaml
apiVersion: batch/v1
kind: CronJob
metadata:
name: wger-admin-ensure
namespace: health
labels:
atlas.bstein.dev/glue: "true"
spec:
schedule: "15 3 * * *"
suspend: true
concurrencyPolicy: Forbid
successfulJobsHistoryLimit: 1
failedJobsHistoryLimit: 3
jobTemplate:
spec:
backoffLimit: 1
template:
metadata:
annotations:
vault.hashicorp.com/agent-inject: "true"
vault.hashicorp.com/agent-pre-populate-only: "true"
vault.hashicorp.com/role: "health"
vault.hashicorp.com/agent-inject-secret-wger-env: "kv/data/atlas/health/wger-db"
vault.hashicorp.com/agent-inject-template-wger-env: |
{{ with secret "kv/data/atlas/health/wger-db" }}
export DJANGO_DB_HOST="{{ .Data.data.DJANGO_DB_HOST }}"
export DJANGO_DB_PORT="{{ .Data.data.DJANGO_DB_PORT }}"
export DJANGO_DB_DATABASE="{{ .Data.data.DJANGO_DB_DATABASE }}"
export DJANGO_DB_USER="{{ .Data.data.DJANGO_DB_USER }}"
export DJANGO_DB_PASSWORD="$(cat /vault/secrets/wger-db-password)"
{{ end }}
{{ with secret "kv/data/atlas/health/wger-secrets" }}
export SECRET_KEY="$(cat /vault/secrets/wger-secret-key)"
export SIGNING_KEY="$(cat /vault/secrets/wger-signing-key)"
{{ end }}
{{ with secret "kv/data/atlas/health/wger-admin" }}
export WGER_ADMIN_USERNAME="$(cat /vault/secrets/wger-admin-username)"
export WGER_ADMIN_PASSWORD="$(cat /vault/secrets/wger-admin-password)"
{{ end }}
vault.hashicorp.com/agent-inject-secret-wger-db-password: "kv/data/atlas/health/wger-db"
vault.hashicorp.com/agent-inject-template-wger-db-password: |
{{- with secret "kv/data/atlas/health/wger-db" -}}
{{ .Data.data.DJANGO_DB_PASSWORD }}
{{- end -}}
vault.hashicorp.com/agent-inject-secret-wger-secret-key: "kv/data/atlas/health/wger-secrets"
vault.hashicorp.com/agent-inject-template-wger-secret-key: |
{{- with secret "kv/data/atlas/health/wger-secrets" -}}
{{ .Data.data.SECRET_KEY }}
{{- end -}}
vault.hashicorp.com/agent-inject-secret-wger-signing-key: "kv/data/atlas/health/wger-secrets"
vault.hashicorp.com/agent-inject-template-wger-signing-key: |
{{- with secret "kv/data/atlas/health/wger-secrets" -}}
{{ .Data.data.SIGNING_KEY }}
{{- end -}}
vault.hashicorp.com/agent-inject-secret-wger-admin-username: "kv/data/atlas/health/wger-admin"
vault.hashicorp.com/agent-inject-template-wger-admin-username: |
{{- with secret "kv/data/atlas/health/wger-admin" -}}
{{ .Data.data.username }}
{{- end -}}
vault.hashicorp.com/agent-inject-secret-wger-admin-password: "kv/data/atlas/health/wger-admin"
vault.hashicorp.com/agent-inject-template-wger-admin-password: |
{{- with secret "kv/data/atlas/health/wger-admin" -}}
{{ .Data.data.password }}
{{- end -}}
spec:
serviceAccountName: health-vault-sync
restartPolicy: Never
affinity:
nodeAffinity:
preferredDuringSchedulingIgnoredDuringExecution:
- weight: 100
preference:
matchExpressions:
- key: hardware
operator: In
values: ["rpi5"]
- weight: 70
preference:
matchExpressions:
- key: hardware
operator: In
values: ["rpi4"]
nodeSelector:
kubernetes.io/arch: arm64
node-role.kubernetes.io/worker: "true"
containers:
- name: ensure
image: wger/server@sha256:710588b78af4e0aa0b4d8a8061e4563e16eae80eeaccfe7f9e0d9cbdd7f0cbc5
imagePullPolicy: IfNotPresent
command: ["/bin/sh", "-c"]
args:
- |
set -eu
. /vault/secrets/wger-env
cat <<'PY' > /tmp/wger_user_sync.py
#!/usr/bin/env python3
from __future__ import annotations
import os
import sys
import django
def _env(name: str, default: str = "") -> str:
value = os.getenv(name, default)
return value.strip() if isinstance(value, str) else ""
def _setup_django() -> None:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.main")
django.setup()
def _set_default_gym(user) -> None:
try:
from wger.gym.models import GymConfig
except Exception:
return
try:
config = GymConfig.objects.first()
except Exception:
return
if not config or not getattr(config, "default_gym", None):
return
profile = getattr(user, "userprofile", None)
if not profile or getattr(profile, "gym", None):
return
profile.gym = config.default_gym
profile.save()
def _ensure_profile(user) -> None:
profile = getattr(user, "userprofile", None)
if not profile:
return
if hasattr(profile, "email_verified") and not profile.email_verified:
profile.email_verified = True
if hasattr(profile, "is_temporary") and profile.is_temporary:
profile.is_temporary = False
profile.save()
def _ensure_admin(username: str, password: str, email: str) -> None:
from django.contrib.auth.models import User
if not username or not password:
raise RuntimeError("admin username/password missing")
user, created = User.objects.get_or_create(username=username)
if created:
user.is_active = True
if not user.is_staff:
user.is_staff = True
if email:
user.email = email
user.set_password(password)
user.save()
_ensure_profile(user)
_set_default_gym(user)
print(f"ensured admin user {username}")
def _ensure_user(username: str, password: str, email: str) -> None:
from django.contrib.auth.models import User
if not username or not password:
raise RuntimeError("username/password missing")
user, created = User.objects.get_or_create(username=username)
if created:
user.is_active = True
if email and user.email != email:
user.email = email
user.set_password(password)
user.save()
_ensure_profile(user)
_set_default_gym(user)
action = "created" if created else "updated"
print(f"{action} user {username}")
def main() -> int:
admin_user = _env("WGER_ADMIN_USERNAME")
admin_password = _env("WGER_ADMIN_PASSWORD")
admin_email = _env("WGER_ADMIN_EMAIL")
username = _env("WGER_USERNAME") or _env("ONLY_USERNAME")
password = _env("WGER_PASSWORD")
email = _env("WGER_EMAIL")
if not any([admin_user and admin_password, username and password]):
print("no admin or user payload provided; exiting")
return 0
_setup_django()
if admin_user and admin_password:
_ensure_admin(admin_user, admin_password, admin_email)
if username and password:
_ensure_user(username, password, email)
return 0
if __name__ == "__main__":
sys.exit(main())
PY
exec python3 /tmp/wger_user_sync.py
env:
- name: SITE_URL
value: https://health.bstein.dev
- name: TIME_ZONE
value: Etc/UTC
- name: TZ
value: Etc/UTC
- name: DJANGO_DEBUG
value: "False"
- name: DJANGO_DB_ENGINE
value: django.db.backends.postgresql
- name: DJANGO_CACHE_BACKEND
value: django.core.cache.backends.locmem.LocMemCache
- name: DJANGO_CACHE_LOCATION
value: wger-cache

View File

@ -0,0 +1,219 @@
# services/health/wger-user-sync-cronjob.yaml
apiVersion: batch/v1
kind: CronJob
metadata:
name: wger-user-sync
namespace: health
labels:
atlas.bstein.dev/glue: "true"
spec:
schedule: "0 5 * * *"
suspend: true
concurrencyPolicy: Forbid
successfulJobsHistoryLimit: 1
failedJobsHistoryLimit: 3
jobTemplate:
spec:
backoffLimit: 0
template:
metadata:
annotations:
vault.hashicorp.com/agent-inject: "true"
vault.hashicorp.com/agent-pre-populate-only: "true"
vault.hashicorp.com/role: "health"
vault.hashicorp.com/agent-inject-secret-wger-env: "kv/data/atlas/health/wger-db"
vault.hashicorp.com/agent-inject-template-wger-env: |
{{ with secret "kv/data/atlas/health/wger-db" }}
export DJANGO_DB_HOST="{{ .Data.data.DJANGO_DB_HOST }}"
export DJANGO_DB_PORT="{{ .Data.data.DJANGO_DB_PORT }}"
export DJANGO_DB_DATABASE="{{ .Data.data.DJANGO_DB_DATABASE }}"
export DJANGO_DB_USER="{{ .Data.data.DJANGO_DB_USER }}"
export DJANGO_DB_PASSWORD="$(cat /vault/secrets/wger-db-password)"
{{ end }}
{{ with secret "kv/data/atlas/health/wger-secrets" }}
export SECRET_KEY="$(cat /vault/secrets/wger-secret-key)"
export SIGNING_KEY="$(cat /vault/secrets/wger-signing-key)"
{{ end }}
vault.hashicorp.com/agent-inject-secret-wger-db-password: "kv/data/atlas/health/wger-db"
vault.hashicorp.com/agent-inject-template-wger-db-password: |
{{- with secret "kv/data/atlas/health/wger-db" -}}
{{ .Data.data.DJANGO_DB_PASSWORD }}
{{- end -}}
vault.hashicorp.com/agent-inject-secret-wger-secret-key: "kv/data/atlas/health/wger-secrets"
vault.hashicorp.com/agent-inject-template-wger-secret-key: |
{{- with secret "kv/data/atlas/health/wger-secrets" -}}
{{ .Data.data.SECRET_KEY }}
{{- end -}}
vault.hashicorp.com/agent-inject-secret-wger-signing-key: "kv/data/atlas/health/wger-secrets"
vault.hashicorp.com/agent-inject-template-wger-signing-key: |
{{- with secret "kv/data/atlas/health/wger-secrets" -}}
{{ .Data.data.SIGNING_KEY }}
{{- end -}}
spec:
serviceAccountName: health-vault-sync
restartPolicy: Never
affinity:
nodeAffinity:
preferredDuringSchedulingIgnoredDuringExecution:
- weight: 100
preference:
matchExpressions:
- key: hardware
operator: In
values: ["rpi5"]
- weight: 70
preference:
matchExpressions:
- key: hardware
operator: In
values: ["rpi4"]
nodeSelector:
kubernetes.io/arch: arm64
node-role.kubernetes.io/worker: "true"
containers:
- name: sync
image: wger/server@sha256:710588b78af4e0aa0b4d8a8061e4563e16eae80eeaccfe7f9e0d9cbdd7f0cbc5
imagePullPolicy: IfNotPresent
command: ["/bin/sh", "-c"]
args:
- |
set -eu
. /vault/secrets/wger-env
cat <<'PY' > /tmp/wger_user_sync.py
#!/usr/bin/env python3
from __future__ import annotations
import os
import sys
import django
def _env(name: str, default: str = "") -> str:
value = os.getenv(name, default)
return value.strip() if isinstance(value, str) else ""
def _setup_django() -> None:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.main")
django.setup()
def _set_default_gym(user) -> None:
try:
from wger.gym.models import GymConfig
except Exception:
return
try:
config = GymConfig.objects.first()
except Exception:
return
if not config or not getattr(config, "default_gym", None):
return
profile = getattr(user, "userprofile", None)
if not profile or getattr(profile, "gym", None):
return
profile.gym = config.default_gym
profile.save()
def _ensure_profile(user) -> None:
profile = getattr(user, "userprofile", None)
if not profile:
return
if hasattr(profile, "email_verified") and not profile.email_verified:
profile.email_verified = True
if hasattr(profile, "is_temporary") and profile.is_temporary:
profile.is_temporary = False
profile.save()
def _ensure_admin(username: str, password: str, email: str) -> None:
from django.contrib.auth.models import User
if not username or not password:
raise RuntimeError("admin username/password missing")
user, created = User.objects.get_or_create(username=username)
if created:
user.is_active = True
if not user.is_staff:
user.is_staff = True
if email:
user.email = email
user.set_password(password)
user.save()
_ensure_profile(user)
_set_default_gym(user)
print(f"ensured admin user {username}")
def _ensure_user(username: str, password: str, email: str) -> None:
from django.contrib.auth.models import User
if not username or not password:
raise RuntimeError("username/password missing")
user, created = User.objects.get_or_create(username=username)
if created:
user.is_active = True
if email and user.email != email:
user.email = email
user.set_password(password)
user.save()
_ensure_profile(user)
_set_default_gym(user)
action = "created" if created else "updated"
print(f"{action} user {username}")
def main() -> int:
admin_user = _env("WGER_ADMIN_USERNAME")
admin_password = _env("WGER_ADMIN_PASSWORD")
admin_email = _env("WGER_ADMIN_EMAIL")
username = _env("WGER_USERNAME") or _env("ONLY_USERNAME")
password = _env("WGER_PASSWORD")
email = _env("WGER_EMAIL")
if not any([admin_user and admin_password, username and password]):
print("no admin or user payload provided; exiting")
return 0
_setup_django()
if admin_user and admin_password:
_ensure_admin(admin_user, admin_password, admin_email)
if username and password:
_ensure_user(username, password, email)
return 0
if __name__ == "__main__":
sys.exit(main())
PY
exec python3 /tmp/wger_user_sync.py
env:
- name: SITE_URL
value: https://health.bstein.dev
- name: TIME_ZONE
value: Etc/UTC
- name: TZ
value: Etc/UTC
- name: DJANGO_DEBUG
value: "False"
- name: DJANGO_DB_ENGINE
value: django.db.backends.postgresql
- name: DJANGO_CACHE_BACKEND
value: django.core.cache.backends.locmem.LocMemCache
- name: DJANGO_CACHE_LOCATION
value: wger-cache

View File

@ -0,0 +1,335 @@
# services/nextcloud-mail-sync/cronjob.yaml
apiVersion: batch/v1
kind: CronJob
metadata:
name: nextcloud-mail-sync
namespace: nextcloud
labels:
atlas.bstein.dev/glue: "true"
spec:
schedule: "0 5 * * *"
suspend: true
concurrencyPolicy: Forbid
successfulJobsHistoryLimit: 3
failedJobsHistoryLimit: 1
jobTemplate:
spec:
template:
metadata:
annotations:
vault.hashicorp.com/agent-inject: "true"
vault.hashicorp.com/agent-pre-populate-only: "true"
vault.hashicorp.com/role: "nextcloud"
vault.hashicorp.com/agent-inject-secret-nextcloud-env.sh: "kv/data/atlas/nextcloud/nextcloud-db"
vault.hashicorp.com/agent-inject-template-nextcloud-env.sh: |
{{ with secret "kv/data/atlas/nextcloud/nextcloud-db" }}
export POSTGRES_DB="{{ .Data.data.database }}"
export POSTGRES_USER="{{ index .Data.data "db-username" }}"
export POSTGRES_PASSWORD="{{ index .Data.data "db-password" }}"
{{ end }}
{{ with secret "kv/data/atlas/nextcloud/nextcloud-admin" }}
export NEXTCLOUD_ADMIN_USER="{{ index .Data.data "admin-user" }}"
export NEXTCLOUD_ADMIN_PASSWORD="{{ index .Data.data "admin-password" }}"
{{ end }}
export ADMIN_USER="${NEXTCLOUD_ADMIN_USER}"
export ADMIN_PASS="${NEXTCLOUD_ADMIN_PASSWORD}"
{{ with secret "kv/data/atlas/nextcloud/nextcloud-oidc" }}
export OIDC_CLIENT_ID="{{ index .Data.data "client-id" }}"
export OIDC_CLIENT_SECRET="{{ index .Data.data "client-secret" }}"
{{ end }}
{{ with secret "kv/data/atlas/shared/postmark-relay" }}
export SMTP_NAME="{{ index .Data.data "apikey" }}"
export SMTP_PASSWORD="{{ index .Data.data "apikey" }}"
{{ end }}
{{ with secret "kv/data/atlas/shared/keycloak-admin" }}
export KC_ADMIN_USER="{{ .Data.data.username }}"
export KC_ADMIN_PASS="{{ .Data.data.password }}"
{{ end }}
spec:
nodeSelector:
kubernetes.io/arch: arm64
node-role.kubernetes.io/worker: "true"
restartPolicy: OnFailure
securityContext:
runAsUser: 0
runAsGroup: 0
serviceAccountName: nextcloud-vault
containers:
- name: mail-sync
image: nextcloud:29-apache
imagePullPolicy: IfNotPresent
command:
- /bin/sh
- -c
env:
- name: KC_BASE
value: http://keycloak.sso.svc.cluster.local
- name: KC_REALM
value: atlas
- name: MAILU_DOMAIN
value: bstein.dev
- name: POSTGRES_HOST
value: postgres-service.postgres.svc.cluster.local
resources:
requests:
cpu: 100m
memory: 256Mi
limits:
cpu: 500m
memory: 512Mi
volumeMounts:
- name: nextcloud-web
mountPath: /var/www/html
- name: nextcloud-config-pvc
mountPath: /var/www/html/config
- name: nextcloud-custom-apps
mountPath: /var/www/html/custom_apps
- name: nextcloud-user-data
mountPath: /var/www/html/data
args:
- |
set -eu
. /vault/secrets/nextcloud-env.sh
cat <<'SCRIPT' > /tmp/nextcloud-mail-sync.sh
#!/bin/bash
set -euo pipefail
KC_BASE="${KC_BASE:?}"
KC_REALM="${KC_REALM:?}"
KC_ADMIN_USER="${KC_ADMIN_USER:?}"
KC_ADMIN_PASS="${KC_ADMIN_PASS:?}"
MAILU_DOMAIN="${MAILU_DOMAIN:?}"
ONLY_USERNAME="${ONLY_USERNAME:-}"
POSTGRES_HOST="${POSTGRES_HOST:-}"
POSTGRES_DB="${POSTGRES_DB:-}"
POSTGRES_USER="${POSTGRES_USER:-}"
POSTGRES_PASSWORD="${POSTGRES_PASSWORD:-}"
if ! command -v jq >/dev/null 2>&1; then
apt-get update && apt-get install -y jq curl >/dev/null
fi
ensure_psql() {
if command -v psql >/dev/null 2>&1; then
return 0
fi
apt-get update && apt-get install -y postgresql-client >/dev/null
}
set_editor_mode_richtext() {
local ids=("$@")
if [[ ${#ids[@]} -eq 0 ]]; then
return 0
fi
if [[ -z "${POSTGRES_HOST}" || -z "${POSTGRES_DB}" || -z "${POSTGRES_USER}" || -z "${POSTGRES_PASSWORD}" ]]; then
echo "WARN: missing postgres env; cannot update mail editor_mode" >&2
return 0
fi
ensure_psql
local ids_csv
ids_csv=$(IFS=,; echo "${ids[*]}")
PGPASSWORD="${POSTGRES_PASSWORD}" psql \
-h "${POSTGRES_HOST}" \
-U "${POSTGRES_USER}" \
-d "${POSTGRES_DB}" \
-v ON_ERROR_STOP=1 \
-c "UPDATE oc_mail_accounts SET editor_mode='richtext' WHERE id IN (${ids_csv}) AND editor_mode <> 'richtext';" \
>/dev/null
}
list_mail_accounts() {
local user_id="${1}"
local export_out
# Nextcloud Mail does not provide a list command; export is safe (does not print passwords).
if ! export_out=$(/usr/sbin/runuser -u www-data -- php occ mail:account:export "${user_id}"); then
echo "WARN: unable to export mail accounts for ${user_id}; skipping sync for safety" >&2
return 1
fi
awk -v OFS='\t' '
BEGIN { IGNORECASE=1; id="" }
$1 == "Account" { id=$2; sub(":", "", id); next }
$1 == "-" && tolower($2) ~ /^e-?mail:$/ { if (id) print id, $3 }
' <<<"${export_out}" | sort -u
}
token=$(
curl -fsS \
--data-urlencode "grant_type=password" \
--data-urlencode "client_id=admin-cli" \
--data-urlencode "username=${KC_ADMIN_USER}" \
--data-urlencode "password=${KC_ADMIN_PASS}" \
"${KC_BASE}/realms/master/protocol/openid-connect/token" | jq -r '.access_token // empty'
)
if [[ -z "${token}" || "${token}" == "null" ]]; then
echo "Failed to obtain admin token"
exit 1
fi
cd /var/www/html
kc_users_url="${KC_BASE}/admin/realms/${KC_REALM}/users?max=2000&briefRepresentation=false"
if [[ -n "${ONLY_USERNAME}" ]]; then
username_q=$(jq -nr --arg v "${ONLY_USERNAME}" '$v|@uri')
kc_users_url="${KC_BASE}/admin/realms/${KC_REALM}/users?username=${username_q}&exact=true&max=1&briefRepresentation=false"
fi
users=$(curl -fsS -H "Authorization: Bearer ${token}" "${kc_users_url}")
if ! jq -e 'type == "array"' >/dev/null 2>&1 <<<"${users}"; then
echo "ERROR: Keycloak user list is not an array; aborting sync" >&2
exit 1
fi
kc_set_user_mail_meta() {
local user_id="${1}"
local primary_email="${2}"
local mailu_account_count="${3}"
local synced_at="${4}"
# Fetch the full user representation so we don't accidentally clobber attributes.
local user_json updated_json
if ! user_json=$(curl -fsS -H "Authorization: Bearer ${token}" \
"${KC_BASE}/admin/realms/${KC_REALM}/users/${user_id}"); then
echo "WARN: unable to fetch Keycloak user ${user_id} for metadata writeback" >&2
return 1
fi
updated_json=$(
jq -c \
--arg primary_email "${primary_email}" \
--arg mailu_account_count "${mailu_account_count}" \
--arg synced_at "${synced_at}" \
'
.attributes = (.attributes // {}) |
.attributes.nextcloud_mail_primary_email = [$primary_email] |
.attributes.nextcloud_mail_account_count = [$mailu_account_count] |
.attributes.nextcloud_mail_synced_at = [$synced_at] |
del(.access)
' <<<"${user_json}"
)
curl -fsS -X PUT \
-H "Authorization: Bearer ${token}" \
-H "Content-Type: application/json" \
-d "${updated_json}" \
"${KC_BASE}/admin/realms/${KC_REALM}/users/${user_id}" >/dev/null
}
while read -r user; do
user_id=$(jq -r '.id' <<<"${user}")
username=$(jq -r '.username' <<<"${user}")
keycloak_email=$(echo "${user}" | jq -r '.email // empty')
mailu_email=$(echo "${user}" | jq -r '(.attributes.mailu_email[0] // .attributes.mailu_email // empty)')
app_pw=$(echo "${user}" | jq -r '(.attributes.mailu_app_password[0] // .attributes.mailu_app_password // empty)')
if [[ -z "${mailu_email}" ]]; then
if [[ -n "${keycloak_email}" && "${keycloak_email,,}" == *"@${MAILU_DOMAIN,,}" ]]; then
mailu_email="${keycloak_email}"
else
mailu_email="${username}@${MAILU_DOMAIN}"
fi
fi
[[ -z "${mailu_email}" || -z "${app_pw}" ]] && continue
if ! accounts=$(list_mail_accounts "${username}"); then
continue
fi
# Manage only internal Mailu-domain accounts; leave any external accounts untouched.
mailu_accounts=$(awk -v d="${MAILU_DOMAIN,,}" 'tolower($2) ~ ("@" d "$") {print}' <<<"${accounts}" || true)
desired_email="${mailu_email}"
primary_id=""
primary_email=""
if [[ -n "${mailu_accounts}" ]]; then
while IFS=$'\t' read -r account_id account_email; do
if [[ -z "${primary_id}" ]]; then
primary_id="${account_id}"
primary_email="${account_email}"
fi
if [[ "${account_email,,}" == "${desired_email,,}" ]]; then
primary_id="${account_id}"
primary_email="${account_email}"
break
fi
done <<<"${mailu_accounts}"
echo "Updating ${username} mail account ${primary_id} (${primary_email})"
/usr/sbin/runuser -u www-data -- php occ mail:account:update -q "${primary_id}" \
--name "${username}" \
--email "${desired_email}" \
--imap-host mail.bstein.dev \
--imap-port 993 \
--imap-ssl-mode ssl \
--imap-user "${desired_email}" \
--imap-password "${app_pw}" \
--smtp-host mail.bstein.dev \
--smtp-port 587 \
--smtp-ssl-mode tls \
--smtp-user "${desired_email}" \
--smtp-password "${app_pw}" \
--auth-method password >/dev/null 2>&1 || true
# Remove any extra Mailu-domain accounts for this user to prevent duplicates.
while IFS=$'\t' read -r account_id account_email; do
if [[ "${account_id}" == "${primary_id}" ]]; then
continue
fi
echo "Deleting extra mail account ${account_id} (${account_email})"
/usr/sbin/runuser -u www-data -- php occ mail:account:delete -q "${account_id}" >/dev/null 2>&1 || true
done <<<"${mailu_accounts}"
else
echo "Creating mail account for ${username} (${desired_email})"
/usr/sbin/runuser -u www-data -- php occ mail:account:create -q \
"${username}" "${username}" "${desired_email}" \
--imap-host mail.bstein.dev \
--imap-port 993 \
--imap-ssl-mode ssl \
--imap-user "${desired_email}" \
--imap-password "${app_pw}" \
--smtp-host mail.bstein.dev \
--smtp-port 587 \
--smtp-ssl-mode tls \
--smtp-user "${desired_email}" \
--smtp-password "${app_pw}" \
--auth-method password >/dev/null 2>&1 || true
primary_id=$(list_mail_accounts "${username}" | awk -v d="${desired_email,,}" 'tolower($2) == d {print $1; exit}')
primary_email="${desired_email}"
fi
if [[ -n "${primary_id}" ]]; then
set_editor_mode_richtext "${primary_id}"
fi
mailu_account_count=$(wc -l <<<"${mailu_accounts}" | tr -d ' ')
if [[ -z "${mailu_account_count}" ]]; then
mailu_account_count="0"
fi
synced_at=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
kc_set_user_mail_meta "${user_id}" "${primary_email}" "${mailu_account_count}" "${synced_at}" || true
done < <(jq -c '.[]' <<<"${users}")
SCRIPT
exec /bin/bash /tmp/nextcloud-mail-sync.sh
volumes:
- name: nextcloud-config-pvc
persistentVolumeClaim:
claimName: nextcloud-config-v2
- name: nextcloud-custom-apps
persistentVolumeClaim:
claimName: nextcloud-custom-apps-v2
- name: nextcloud-user-data
persistentVolumeClaim:
claimName: nextcloud-user-data-v2
- name: nextcloud-web
persistentVolumeClaim:
claimName: nextcloud-web-v2

View File

@ -0,0 +1,308 @@
# services/vault/k8s-auth-config-cronjob.yaml
apiVersion: batch/v1
kind: CronJob
metadata:
name: vault-k8s-auth-config
namespace: vault
labels:
atlas.bstein.dev/glue: "true"
spec:
schedule: "*/15 * * * *"
suspend: true
concurrencyPolicy: Forbid
successfulJobsHistoryLimit: 1
failedJobsHistoryLimit: 3
jobTemplate:
spec:
backoffLimit: 1
template:
spec:
serviceAccountName: vault-admin
restartPolicy: Never
nodeSelector:
kubernetes.io/arch: arm64
node-role.kubernetes.io/worker: "true"
containers:
- name: configure-k8s-auth
image: hashicorp/vault:1.17.6
imagePullPolicy: IfNotPresent
command: ["/bin/sh", "-c"]
args:
- |
set -eu
cat <<'SH' > /tmp/vault_k8s_auth_configure.sh
#!/usr/bin/env sh
set -eu
log() { echo "[vault-k8s-auth] $*"; }
vault_cmd() {
for attempt in 1 2 3 4 5 6; do
set +e
output="$(vault "$@" 2>&1)"
status=$?
set -e
if [ "${status}" -eq 0 ]; then
printf '%s' "${output}"
return 0
fi
log "vault command failed; retrying (${attempt}/6)"
sleep $((attempt * 2))
done
log "vault command failed; giving up"
return 1
}
ensure_token() {
if [ -n "${VAULT_TOKEN:-}" ]; then
return
fi
role="${VAULT_K8S_ROLE:-vault}"
jwt="$(cat /var/run/secrets/kubernetes.io/serviceaccount/token)"
if ! VAULT_TOKEN="$(vault_cmd write -field=token auth/kubernetes/login role="${role}" jwt="${jwt}")"; then
log "kubernetes auth login failed; set VAULT_TOKEN or fix role ${role}"
exit 1
fi
export VAULT_TOKEN
}
if ! status_json="$(vault_cmd status -format=json)"; then
log "vault status failed; check VAULT_ADDR and VAULT_TOKEN"
exit 1
fi
if ! printf '%s' "${status_json}" | grep -q '"initialized":[[:space:]]*true'; then
log "vault not initialized; skipping"
exit 0
fi
if printf '%s' "${status_json}" | grep -q '"sealed":[[:space:]]*true'; then
log "vault sealed; skipping"
exit 0
fi
ensure_token
k8s_host="https://${KUBERNETES_SERVICE_HOST}:443"
k8s_ca="$(cat /var/run/secrets/kubernetes.io/serviceaccount/ca.crt)"
k8s_token="$(cat /var/run/secrets/kubernetes.io/serviceaccount/token)"
role_ttl="${VAULT_K8S_ROLE_TTL:-1h}"
token_reviewer_jwt="${VAULT_K8S_TOKEN_REVIEWER_JWT:-}"
if [ -z "${token_reviewer_jwt}" ] && [ -n "${VAULT_K8S_TOKEN_REVIEWER_JWT_FILE:-}" ] && [ -r "${VAULT_K8S_TOKEN_REVIEWER_JWT_FILE}" ]; then
token_reviewer_jwt="$(cat "${VAULT_K8S_TOKEN_REVIEWER_JWT_FILE}")"
fi
if [ -z "${token_reviewer_jwt}" ]; then
token_reviewer_jwt="${k8s_token}"
fi
if ! vault_cmd auth list -format=json | grep -q '"kubernetes/"'; then
log "enabling kubernetes auth"
vault_cmd auth enable kubernetes
fi
log "configuring kubernetes auth"
vault_cmd write auth/kubernetes/config \
token_reviewer_jwt="${token_reviewer_jwt}" \
kubernetes_host="${k8s_host}" \
kubernetes_ca_cert="${k8s_ca}"
write_raw_policy() {
name="$1"
body="$2"
log "writing policy ${name}"
printf '%s\n' "${body}" | vault_cmd policy write "${name}" -
}
write_policy_and_role() {
role="$1"
namespace="$2"
service_accounts="$3"
read_paths="$4"
write_paths="$5"
policy_body=""
for path in ${read_paths}; do
policy_body="${policy_body}
path \"kv/data/atlas/${path}\" {
capabilities = [\"read\"]
}
path \"kv/metadata/atlas/${path}\" {
capabilities = [\"list\"]
}
"
done
for path in ${write_paths}; do
policy_body="${policy_body}
path \"kv/data/atlas/${path}\" {
capabilities = [\"create\", \"update\", \"read\"]
}
path \"kv/metadata/atlas/${path}\" {
capabilities = [\"list\"]
}
"
done
log "writing policy ${role}"
printf '%s\n' "${policy_body}" | vault_cmd policy write "${role}" -
log "writing role ${role}"
vault_cmd write "auth/kubernetes/role/${role}" \
bound_service_account_names="${service_accounts}" \
bound_service_account_namespaces="${namespace}" \
policies="${role}" \
ttl="${role_ttl}"
}
vault_admin_policy='
path "sys/auth" {
capabilities = ["read"]
}
path "sys/auth/*" {
capabilities = ["create", "update", "delete", "sudo", "read"]
}
path "auth/kubernetes/*" {
capabilities = ["create", "update", "read"]
}
path "auth/oidc/*" {
capabilities = ["create", "update", "read"]
}
path "sys/policies/acl" {
capabilities = ["list"]
}
path "sys/policies/acl/*" {
capabilities = ["create", "update", "read"]
}
path "sys/internal/ui/mounts" {
capabilities = ["read"]
}
path "sys/mounts" {
capabilities = ["read"]
}
path "sys/mounts/auth/*" {
capabilities = ["read", "update", "sudo"]
}
path "kv/data/atlas/vault/*" {
capabilities = ["read"]
}
path "kv/metadata/atlas/vault/*" {
capabilities = ["list"]
}
path "kv/data/*" {
capabilities = ["create", "update", "read", "delete", "patch"]
}
path "kv/metadata" {
capabilities = ["list"]
}
path "kv/metadata/*" {
capabilities = ["read", "list", "delete"]
}
path "kv/data/atlas/shared/*" {
capabilities = ["create", "update", "read", "patch"]
}
path "kv/metadata/atlas/shared/*" {
capabilities = ["list"]
}
'
write_raw_policy "vault-admin" "${vault_admin_policy}"
dev_kv_policy='
path "kv/metadata" {
capabilities = ["list"]
}
path "kv/metadata/atlas" {
capabilities = ["list"]
}
path "kv/metadata/atlas/shared" {
capabilities = ["list"]
}
path "kv/metadata/atlas/shared/*" {
capabilities = ["list"]
}
path "kv/data/atlas/shared/*" {
capabilities = ["read"]
}
'
write_raw_policy "dev-kv" "${dev_kv_policy}"
log "writing role vault-admin"
vault_cmd write "auth/kubernetes/role/vault-admin" \
bound_service_account_names="vault-admin" \
bound_service_account_namespaces="vault" \
policies="vault-admin" \
ttl="${role_ttl}"
write_policy_and_role "outline" "outline" "outline-vault" \
"outline/* shared/postmark-relay" ""
write_policy_and_role "planka" "planka" "planka-vault" \
"planka/* shared/postmark-relay" ""
write_policy_and_role "bstein-dev-home" "bstein-dev-home" "bstein-dev-home,bstein-dev-home-vault-sync" \
"portal/* shared/chat-ai-keys-runtime shared/portal-e2e-client shared/postmark-relay mailu/mailu-initial-account-secret shared/harbor-pull" ""
write_policy_and_role "gitea" "gitea" "gitea-vault" \
"gitea/*" ""
write_policy_and_role "vaultwarden" "vaultwarden" "vaultwarden-vault" \
"vaultwarden/* mailu/mailu-initial-account-secret" ""
write_policy_and_role "sso" "sso" "sso-vault,sso-vault-sync,mas-secrets-ensure" \
"sso/* portal/bstein-dev-home-keycloak-admin shared/keycloak-admin shared/portal-e2e-client shared/postmark-relay shared/harbor-pull" ""
write_policy_and_role "mailu-mailserver" "mailu-mailserver" "mailu-vault-sync" \
"mailu/* shared/postmark-relay shared/harbor-pull" ""
write_policy_and_role "harbor" "harbor" "harbor-vault-sync" \
"harbor/* shared/harbor-pull" ""
write_policy_and_role "nextcloud" "nextcloud" "nextcloud-vault" \
"nextcloud/* shared/keycloak-admin shared/postmark-relay" ""
write_policy_and_role "comms" "comms" "comms-vault,atlasbot" \
"comms/* shared/chat-ai-keys-runtime shared/harbor-pull" ""
write_policy_and_role "jenkins" "jenkins" "jenkins" \
"jenkins/*" ""
write_policy_and_role "monitoring" "monitoring" "monitoring-vault-sync" \
"monitoring/* shared/postmark-relay shared/harbor-pull" ""
write_policy_and_role "logging" "logging" "logging-vault-sync" \
"logging/* shared/harbor-pull" ""
write_policy_and_role "pegasus" "jellyfin" "pegasus-vault-sync" \
"pegasus/* shared/harbor-pull" ""
write_policy_and_role "crypto" "crypto" "crypto-vault-sync" \
"crypto/* shared/harbor-pull" ""
write_policy_and_role "health" "health" "health-vault-sync" \
"health/*" ""
write_policy_and_role "maintenance" "maintenance" "ariadne,maintenance-vault-sync" \
"portal/atlas-portal-db portal/bstein-dev-home-keycloak-admin mailu/mailu-db-secret mailu/mailu-initial-account-secret shared/harbor-pull" ""
write_policy_and_role "finance" "finance" "finance-vault" \
"finance/* shared/postmark-relay" ""
write_policy_and_role "finance-secrets" "finance" "finance-secrets-ensure" \
"" \
"finance/*"
write_policy_and_role "longhorn" "longhorn-system" "longhorn-vault,longhorn-vault-sync" \
"longhorn/* shared/harbor-pull" ""
write_policy_and_role "postgres" "postgres" "postgres-vault" \
"postgres/postgres-db" ""
write_policy_and_role "vault" "vault" "vault" \
"vault/*" ""
write_policy_and_role "sso-secrets" "sso" "mas-secrets-ensure" \
"shared/keycloak-admin" \
"harbor/harbor-oidc vault/vault-oidc-config comms/synapse-oidc logging/oauth2-proxy-logs-oidc finance/actual-oidc"
write_policy_and_role "crypto-secrets" "crypto" "crypto-secrets-ensure" \
"" \
"crypto/wallet-monero-temp-rpc-auth"
write_policy_and_role "comms-secrets" "comms" \
"comms-secrets-ensure,mas-db-ensure,mas-admin-client-secret-writer,othrys-synapse-signingkey-job" \
"" \
"comms/turn-shared-secret comms/livekit-api comms/synapse-redis comms/synapse-macaroon comms/atlasbot-credentials-runtime comms/synapse-db comms/mas-db comms/mas-admin-client-runtime comms/mas-secrets-runtime comms/othrys-synapse-signingkey"
SH
exec /bin/sh /tmp/vault_k8s_auth_configure.sh
env:
- name: VAULT_ADDR
value: http://10.43.57.249:8200
- name: VAULT_K8S_ROLE
value: vault-admin
- name: VAULT_K8S_TOKEN_REVIEWER_JWT_FILE
value: /var/run/secrets/vault-token-reviewer/token
- name: VAULT_K8S_ROLE_TTL
value: 1h
volumeMounts:
- name: token-reviewer
mountPath: /var/run/secrets/vault-token-reviewer
readOnly: true
volumes:
- name: token-reviewer
secret:
secretName: vault-admin-token-reviewer

View File

@ -0,0 +1,236 @@
# services/vault/oidc-config-cronjob.yaml
apiVersion: batch/v1
kind: CronJob
metadata:
name: vault-oidc-config
namespace: vault
labels:
atlas.bstein.dev/glue: "true"
spec:
schedule: "*/15 * * * *"
suspend: true
concurrencyPolicy: Forbid
successfulJobsHistoryLimit: 1
failedJobsHistoryLimit: 3
jobTemplate:
spec:
backoffLimit: 1
template:
metadata:
annotations:
vault.hashicorp.com/agent-inject: "true"
vault.hashicorp.com/agent-pre-populate-only: "true"
vault.hashicorp.com/role: "vault-admin"
vault.hashicorp.com/agent-inject-secret-vault-oidc-env.sh: "kv/data/atlas/vault/vault-oidc-config"
vault.hashicorp.com/agent-inject-template-vault-oidc-env.sh: |
{{ with secret "kv/data/atlas/vault/vault-oidc-config" }}
export VAULT_OIDC_DISCOVERY_URL="{{ .Data.data.discovery_url }}"
export VAULT_OIDC_CLIENT_ID="{{ .Data.data.client_id }}"
export VAULT_OIDC_CLIENT_SECRET="{{ .Data.data.client_secret }}"
export VAULT_OIDC_DEFAULT_ROLE="{{ .Data.data.default_role }}"
export VAULT_OIDC_SCOPES="{{ .Data.data.scopes }}"
export VAULT_OIDC_USER_CLAIM="{{ .Data.data.user_claim }}"
export VAULT_OIDC_GROUPS_CLAIM="{{ .Data.data.groups_claim }}"
export VAULT_OIDC_TOKEN_POLICIES="{{ .Data.data.token_policies }}"
export VAULT_OIDC_ADMIN_GROUP="{{ .Data.data.admin_group }}"
export VAULT_OIDC_ADMIN_POLICIES="{{ .Data.data.admin_policies }}"
export VAULT_OIDC_DEV_GROUP="{{ .Data.data.dev_group }}"
export VAULT_OIDC_DEV_POLICIES="{{ .Data.data.dev_policies }}"
export VAULT_OIDC_USER_GROUP="{{ .Data.data.user_group }}"
export VAULT_OIDC_USER_POLICIES="{{ .Data.data.user_policies }}"
export VAULT_OIDC_REDIRECT_URIS="{{ .Data.data.redirect_uris }}"
export VAULT_OIDC_BOUND_AUDIENCES="{{ .Data.data.bound_audiences }}"
export VAULT_OIDC_BOUND_CLAIMS="{{ .Data.data.bound_claims }}"
export VAULT_OIDC_BOUND_CLAIMS_TYPE="{{ .Data.data.bound_claims_type }}"
{{ end }}
spec:
serviceAccountName: vault-admin
restartPolicy: Never
nodeSelector:
kubernetes.io/arch: arm64
node-role.kubernetes.io/worker: "true"
containers:
- name: configure-oidc
image: hashicorp/vault:1.17.6
imagePullPolicy: IfNotPresent
command: ["/bin/sh", "-c"]
args:
- |
set -eu
if [ -f /vault/secrets/vault-oidc-env.sh ]; then
. /vault/secrets/vault-oidc-env.sh
fi
cat <<'SH' > /tmp/vault_oidc_configure.sh
#!/usr/bin/env sh
set -eu
log() { echo "[vault-oidc] $*"; }
vault_cmd() {
for attempt in 1 2 3 4 5 6; do
set +e
output="$(vault "$@" 2>&1)"
status=$?
set -e
if [ "${status}" -eq 0 ]; then
printf '%s' "${output}"
return 0
fi
log "vault command failed; retrying (${attempt}/6)"
sleep $((attempt * 2))
done
log "vault command failed; giving up"
return 1
}
ensure_token() {
if [ -n "${VAULT_TOKEN:-}" ]; then
return
fi
role="${VAULT_K8S_ROLE:-vault}"
jwt="$(cat /var/run/secrets/kubernetes.io/serviceaccount/token)"
if ! VAULT_TOKEN="$(vault_cmd write -field=token auth/kubernetes/login role="${role}" jwt="${jwt}")"; then
log "kubernetes auth login failed; set VAULT_TOKEN or fix role ${role}"
exit 1
fi
export VAULT_TOKEN
}
if ! status_json="$(vault_cmd status -format=json)"; then
log "vault status failed; check VAULT_ADDR and VAULT_TOKEN"
exit 1
fi
if ! printf '%s' "${status_json}" | grep -q '"initialized":[[:space:]]*true'; then
log "vault not initialized; skipping"
exit 0
fi
if printf '%s' "${status_json}" | grep -q '"sealed":[[:space:]]*true'; then
log "vault sealed; skipping"
exit 0
fi
ensure_token
: "${VAULT_OIDC_DISCOVERY_URL:?set VAULT_OIDC_DISCOVERY_URL}"
: "${VAULT_OIDC_CLIENT_ID:?set VAULT_OIDC_CLIENT_ID}"
: "${VAULT_OIDC_CLIENT_SECRET:?set VAULT_OIDC_CLIENT_SECRET}"
default_role="${VAULT_OIDC_DEFAULT_ROLE:-admin}"
scopes="${VAULT_OIDC_SCOPES:-openid profile email groups}"
user_claim="${VAULT_OIDC_USER_CLAIM:-preferred_username}"
groups_claim="${VAULT_OIDC_GROUPS_CLAIM:-groups}"
redirect_uris="${VAULT_OIDC_REDIRECT_URIS:-https://secret.bstein.dev/ui/vault/auth/oidc/oidc/callback}"
bound_audiences="${VAULT_OIDC_BOUND_AUDIENCES:-${VAULT_OIDC_CLIENT_ID}}"
bound_claims_type="${VAULT_OIDC_BOUND_CLAIMS_TYPE:-string}"
bound_claims_type="$(printf '%s' "${bound_claims_type}" | tr -d '[:space:]')"
if [ -z "${bound_claims_type}" ] || [ "${bound_claims_type}" = "<novalue>" ]; then
bound_claims_type="string"
fi
admin_group="${VAULT_OIDC_ADMIN_GROUP:-admin}"
admin_policies="${VAULT_OIDC_ADMIN_POLICIES:-default,vault-admin}"
dev_group="${VAULT_OIDC_DEV_GROUP:-dev}"
dev_policies="${VAULT_OIDC_DEV_POLICIES:-default,dev-kv}"
user_group="${VAULT_OIDC_USER_GROUP:-${dev_group}}"
user_policies="${VAULT_OIDC_USER_POLICIES:-${VAULT_OIDC_TOKEN_POLICIES:-${dev_policies}}}"
if ! vault_cmd auth list -format=json | grep -q '"oidc/"'; then
log "enabling oidc auth method"
vault_cmd auth enable oidc
fi
log "configuring oidc auth"
vault_cmd write auth/oidc/config \
oidc_discovery_url="${VAULT_OIDC_DISCOVERY_URL}" \
oidc_client_id="${VAULT_OIDC_CLIENT_ID}" \
oidc_client_secret="${VAULT_OIDC_CLIENT_SECRET}" \
default_role="${default_role}"
vault_cmd auth tune -listing-visibility=unauth oidc >/dev/null
build_bound_claims() {
claim="$1"
groups="$2"
json="{\"${claim}\":["
first=1
old_ifs=$IFS
IFS=,
for item in $groups; do
item="$(printf '%s' "$item" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
if [ -z "${item}" ]; then
continue
fi
if [ "${first}" -eq 0 ]; then
json="${json},"
fi
json="${json}\"${item}\""
first=0
done
IFS=$old_ifs
json="${json}]}"
printf '%s' "${json}"
}
build_json_array() {
items="$1"
json="["
first=1
old_ifs=$IFS
IFS=,
for item in $items; do
item="$(printf '%s' "$item" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
if [ -z "${item}" ]; then
continue
fi
if [ "${first}" -eq 0 ]; then
json="${json},"
fi
json="${json}\"${item}\""
first=0
done
IFS=$old_ifs
json="${json}]"
printf '%s' "${json}"
}
configure_role() {
role_name="$1"
role_groups="$2"
role_policies="$3"
if [ -z "${role_name}" ] || [ -z "${role_groups}" ] || [ -z "${role_policies}" ]; then
log "skipping role ${role_name} (missing groups or policies)"
return
fi
claims="$(build_bound_claims "${groups_claim}" "${role_groups}")"
scopes_csv="$(printf '%s' "${scopes}" | tr ' ' ',' | tr -s ',' | sed 's/^,//;s/,$//')"
redirect_json="$(build_json_array "${redirect_uris}")"
payload_file="$(mktemp)"
cat > "${payload_file}" <<EOF
{
"user_claim": "${user_claim}",
"oidc_scopes": "${scopes_csv}",
"token_policies": "${role_policies}",
"bound_audiences": "${bound_audiences}",
"bound_claims": ${claims},
"bound_claims_type": "${bound_claims_type}",
"groups_claim": "${groups_claim}",
"allowed_redirect_uris": ${redirect_json}
}
EOF
log "configuring oidc role ${role_name}"
vault_cmd write "auth/oidc/role/${role_name}" @"${payload_file}"
rm -f "${payload_file}"
}
configure_role "admin" "${admin_group}" "${admin_policies}"
configure_role "dev" "${dev_group}" "${dev_policies}"
configure_role "user" "${user_group}" "${user_policies}"
SH
exec /bin/sh /tmp/vault_oidc_configure.sh
env:
- name: VAULT_ADDR
value: http://10.43.57.249:8200
- name: VAULT_K8S_ROLE
value: vault-admin

View File

@ -9,9 +9,10 @@ from ..utils.logging import get_logger
class JobSpawner:
def __init__(self, namespace: str, cronjob_name: str) -> None:
def __init__(self, namespace: str, cronjob_name: str, manifest: dict[str, Any] | None = None) -> None:
self._namespace = namespace
self._cronjob_name = cronjob_name
self._manifest = manifest
self._logger = get_logger(__name__)
@staticmethod
@ -77,7 +78,9 @@ class JobSpawner:
env_overrides: list[dict[str, str]] | None = None,
job_ttl_seconds: int | None = None,
) -> dict[str, Any]:
cronjob = get_json(f"/apis/batch/v1/namespaces/{self._namespace}/cronjobs/{self._cronjob_name}")
cronjob = self._manifest or get_json(
f"/apis/batch/v1/namespaces/{self._namespace}/cronjobs/{self._cronjob_name}"
)
job_payload = self._job_from_cronjob(cronjob, label_suffix, env_overrides, job_ttl_seconds)
created = post_json(f"/apis/batch/v1/namespaces/{self._namespace}/jobs", job_payload)
job_name = (
@ -104,16 +107,56 @@ class JobSpawner:
job = get_json(f"/apis/batch/v1/namespaces/{self._namespace}/jobs/{job_name}")
status = job.get("status") if isinstance(job.get("status"), dict) else {}
if int(status.get("succeeded") or 0) > 0:
self._logger.info(
"job completed",
extra={
"event": "job_complete",
"namespace": self._namespace,
"cronjob": self._cronjob_name,
"job": job_name,
"status": "ok",
},
)
return {"job": job_name, "status": "ok"}
if int(status.get("failed") or 0) > 0:
self._logger.info(
"job completed",
extra={
"event": "job_complete",
"namespace": self._namespace,
"cronjob": self._cronjob_name,
"job": job_name,
"status": "error",
},
)
return {"job": job_name, "status": "error"}
conditions = status.get("conditions") if isinstance(status.get("conditions"), list) else []
for cond in conditions:
if not isinstance(cond, dict):
continue
if cond.get("type") == "Complete" and cond.get("status") == "True":
self._logger.info(
"job completed",
extra={
"event": "job_complete",
"namespace": self._namespace,
"cronjob": self._cronjob_name,
"job": job_name,
"status": "ok",
},
)
return {"job": job_name, "status": "ok"}
if cond.get("type") == "Failed" and cond.get("status") == "True":
self._logger.info(
"job completed",
extra={
"event": "job_complete",
"namespace": self._namespace,
"cronjob": self._cronjob_name,
"job": job_name,
"status": "error",
},
)
return {"job": job_name, "status": "error"}
time.sleep(2)
self._logger.info(

20
ariadne/k8s/manifests.py Normal file
View File

@ -0,0 +1,20 @@
from __future__ import annotations
from functools import lru_cache
from importlib import resources
from typing import Any
import yaml
@lru_cache(maxsize=64)
def load_cronjob_manifest(path: str) -> dict[str, Any]:
resource = resources.files("ariadne.job_manifests").joinpath(path)
if not resource.is_file():
raise FileNotFoundError(f"manifest not found: {path}")
payload = yaml.safe_load(resource.read_text(encoding="utf-8"))
if not isinstance(payload, dict):
raise ValueError("manifest payload is not a mapping")
if payload.get("kind") != "CronJob":
raise ValueError("manifest is not a CronJob")
return payload

View File

@ -115,6 +115,13 @@ class ProvisioningManager:
"provisioning started",
extra={"event": "provision_start", "request_code": request_code},
)
try:
self._storage.record_event(
"provision_start",
{"request_code": request_code},
)
except Exception:
pass
with self._db.connection() as conn:
lock_id = _advisory_lock_id(request_code)
@ -219,6 +226,16 @@ class ProvisioningManager:
if isinstance(actions, list) and "CONFIGURE_TOTP" in actions:
new_actions = [a for a in actions if a != "CONFIGURE_TOTP"]
keycloak_admin.update_user_safe(user_id, {"requiredActions": new_actions})
email_value = full.get("email")
if (
(not isinstance(email_value, str) or not email_value.strip())
and isinstance(email_verified_at, datetime)
and contact_email.strip()
):
keycloak_admin.update_user_safe(
user_id,
{"email": contact_email.strip(), "emailVerified": True},
)
if isinstance(attrs, dict):
existing = _extract_attr(attrs, MAILU_EMAIL_ATTR)
if existing:
@ -469,13 +486,35 @@ class ProvisioningManager:
"status": "awaiting_onboarding",
},
)
try:
self._storage.record_event(
"provision_complete",
{
"request_code": request_code,
"username": username,
"status": "awaiting_onboarding",
},
)
except Exception:
pass
return ProvisionOutcome(ok=True, status="awaiting_onboarding")
pending_status = "accounts_building" if status == "accounts_building" else status
logger.info(
"provisioning pending",
extra={"event": "provision_pending", "request_code": request_code, "status": "accounts_building"},
extra={"event": "provision_pending", "request_code": request_code, "status": pending_status},
)
return ProvisionOutcome(ok=False, status="accounts_building")
try:
self._storage.record_event(
"provision_pending",
{
"request_code": request_code,
"status": pending_status,
},
)
except Exception:
pass
return ProvisionOutcome(ok=False, status=pending_status)
finally:
conn.execute("SELECT pg_advisory_unlock(%s)", (lock_id,))
@ -538,6 +577,19 @@ class ProvisioningManager:
"detail": detail or "",
},
)
try:
self._storage.record_event(
"provision_task",
{
"request_code": request_code,
"task": task,
"status": status,
"duration_sec": round(duration_sec, 3),
"detail": detail or "",
},
)
except Exception:
pass
try:
self._storage.record_task_run(
request_code,

View File

@ -1,16 +1,17 @@
from __future__ import annotations
from dataclasses import dataclass
from dataclasses import asdict, dataclass, is_dataclass
from datetime import datetime, timezone
import json
import threading
import time
from typing import Callable
from typing import Any, Callable
from croniter import croniter
from ..db.storage import Storage
from ..metrics.metrics import record_schedule_state, record_task_run
from ..utils.logging import get_logger
from ..utils.logging import get_logger, task_context
@dataclass(frozen=True)
@ -86,17 +87,22 @@ class CronScheduler:
started = datetime.now(timezone.utc)
status = "ok"
detail = None
result_detail = ""
result_payload: Any | None = None
self._logger.info(
"schedule task started",
extra={"event": "schedule_start", "task": task.name},
)
try:
task.runner()
with task_context(task.name):
result = task.runner()
result_detail, result_payload = self._format_result(result)
except Exception as exc:
status = "error"
detail = str(exc).strip() or "task failed"
finished = datetime.now(timezone.utc)
duration_sec = (finished - started).total_seconds()
detail_value = detail or result_detail or ""
record_task_run(task.name, status, duration_sec)
self._logger.info(
"schedule task finished",
@ -105,13 +111,32 @@ class CronScheduler:
"task": task.name,
"status": status,
"duration_sec": round(duration_sec, 3),
"detail": detail or "",
"detail": detail_value,
"result": result_payload if result_payload is not None else "",
},
)
try:
event_detail: dict[str, Any] = {
"task": task.name,
"status": status,
"duration_sec": round(duration_sec, 3),
"detail": detail_value,
"next_run_at": (
self._next_run.get(task.name).isoformat() if self._next_run.get(task.name) else ""
),
}
if result_payload not in (None, ""):
event_detail["result"] = result_payload
self._storage.record_event(
"schedule_task",
event_detail,
)
except Exception:
pass
record_schedule_state(
task.name,
started.timestamp(),
started.timestamp() if status == "ok" else None,
finished.timestamp() if status == "ok" else None,
self._next_run.get(task.name).timestamp() if self._next_run.get(task.name) else None,
status == "ok",
)
@ -120,7 +145,7 @@ class CronScheduler:
None,
task.name,
status,
detail,
detail_value or None,
started,
finished,
int(duration_sec * 1000),
@ -139,3 +164,13 @@ class CronScheduler:
pass
with self._lock:
self._running.discard(task.name)
@staticmethod
def _format_result(result: Any) -> tuple[str, Any | None]:
if result is None:
return "", None
if is_dataclass(result):
result = asdict(result)
if isinstance(result, dict):
return json.dumps(result, ensure_ascii=True), result
return str(result), result

View File

@ -3,15 +3,32 @@ from __future__ import annotations
from typing import Any
from ..k8s.jobs import JobSpawner
from ..k8s.manifests import load_cronjob_manifest
from ..settings import settings
class CommsService:
def __init__(self) -> None:
self._guest_name_spawner = JobSpawner(settings.comms_namespace, settings.comms_guest_name_cronjob)
self._pin_invite_spawner = JobSpawner(settings.comms_namespace, settings.comms_pin_invite_cronjob)
self._reset_room_spawner = JobSpawner(settings.comms_namespace, settings.comms_reset_room_cronjob)
self._seed_room_spawner = JobSpawner(settings.comms_namespace, settings.comms_seed_room_cronjob)
self._guest_name_spawner = JobSpawner(
settings.comms_namespace,
settings.comms_guest_name_cronjob,
load_cronjob_manifest("comms/guest-name-job.yaml"),
)
self._pin_invite_spawner = JobSpawner(
settings.comms_namespace,
settings.comms_pin_invite_cronjob,
load_cronjob_manifest("comms/pin-othrys-job.yaml"),
)
self._reset_room_spawner = JobSpawner(
settings.comms_namespace,
settings.comms_reset_room_cronjob,
load_cronjob_manifest("comms/reset-othrys-room-job.yaml"),
)
self._seed_room_spawner = JobSpawner(
settings.comms_namespace,
settings.comms_seed_room_cronjob,
load_cronjob_manifest("comms/seed-othrys-room.yaml"),
)
def _trigger(self, spawner: JobSpawner, label_suffix: str, wait: bool) -> dict[str, Any]:
if wait:

View File

@ -3,12 +3,17 @@ from __future__ import annotations
from typing import Any
from ..k8s.jobs import JobSpawner
from ..k8s.manifests import load_cronjob_manifest
from ..settings import settings
class FireflyService:
def __init__(self) -> None:
self._spawner = JobSpawner(settings.firefly_namespace, settings.firefly_user_sync_cronjob)
self._spawner = JobSpawner(
settings.firefly_namespace,
settings.firefly_user_sync_cronjob,
load_cronjob_manifest("finance/firefly-user-sync-cronjob.yaml"),
)
def sync_user(self, email: str, password: str, wait: bool = True) -> dict[str, Any]:
email = (email or "").strip()

View File

@ -235,5 +235,28 @@ class KeycloakAdminClient:
return users
first += page_size
def list_groups(self, max_groups: int = 200) -> list[dict[str, Any]]:
url = f"{settings.keycloak_admin_url}/admin/realms/{settings.keycloak_realm}/groups"
params = {"max": str(max_groups)}
with httpx.Client(timeout=10.0) as client:
resp = client.get(url, params=params, headers=self._headers())
resp.raise_for_status()
payload = resp.json()
if not isinstance(payload, list):
return []
return [item for item in payload if isinstance(item, dict)]
def list_group_names(self, exclude: set[str] | None = None) -> list[str]:
exclude = {name.strip() for name in (exclude or set()) if name.strip()}
names: list[str] = []
for group in self.list_groups():
name = group.get("name")
if isinstance(name, str) and name.strip():
normalized = name.strip()
if normalized in exclude:
continue
names.append(normalized)
return sorted(set(names))
keycloak_admin = KeycloakAdminClient()

View File

@ -0,0 +1,93 @@
from __future__ import annotations
from dataclasses import dataclass
from typing import Any
from ..utils.logging import get_logger
from .keycloak_admin import keycloak_admin
PROFILE_ACTIONS = {"UPDATE_PROFILE", "UPDATE_EMAIL", "VERIFY_EMAIL"}
logger = get_logger(__name__)
@dataclass(frozen=True)
class ProfileSyncSummary:
processed: int
updated: int
skipped: int
failures: int
detail: str = ""
def _profile_complete(user: dict[str, Any]) -> bool:
email = user.get("email") if isinstance(user.get("email"), str) else ""
first_name = user.get("firstName") if isinstance(user.get("firstName"), str) else ""
last_name = user.get("lastName") if isinstance(user.get("lastName"), str) else ""
email_verified = bool(user.get("emailVerified"))
return bool(email.strip() and first_name.strip() and last_name.strip() and email_verified)
def run_profile_sync() -> ProfileSyncSummary:
if not keycloak_admin.ready():
summary = ProfileSyncSummary(0, 0, 0, 1, detail="keycloak admin not configured")
logger.info(
"keycloak profile sync skipped",
extra={"event": "keycloak_profile_sync", "status": "error", "detail": summary.detail},
)
return summary
processed = 0
updated = 0
skipped = 0
failures = 0
users = keycloak_admin.iter_users(page_size=200, brief=False)
for user in users:
username = user.get("username") if isinstance(user.get("username"), str) else ""
if not username or user.get("enabled") is False:
skipped += 1
continue
if user.get("serviceAccountClientId") or username.startswith("service-account-"):
skipped += 1
continue
required = user.get("requiredActions") if isinstance(user.get("requiredActions"), list) else []
if not required:
skipped += 1
continue
if not _profile_complete(user):
skipped += 1
continue
trimmed = [action for action in required if action not in PROFILE_ACTIONS]
if trimmed == required:
skipped += 1
continue
user_id = user.get("id") if isinstance(user.get("id"), str) else ""
if not user_id:
failures += 1
continue
processed += 1
try:
keycloak_admin.update_user_safe(user_id, {"requiredActions": trimmed})
updated += 1
except Exception:
failures += 1
summary = ProfileSyncSummary(processed, updated, skipped, failures)
logger.info(
"keycloak profile sync finished",
extra={
"event": "keycloak_profile_sync",
"status": "ok" if failures == 0 else "error",
"processed": processed,
"updated": updated,
"skipped": skipped,
"failures": failures,
},
)
return summary

View File

@ -3,12 +3,17 @@ from __future__ import annotations
from typing import Any
from ..k8s.jobs import JobSpawner
from ..k8s.manifests import load_cronjob_manifest
from ..settings import settings
class NextcloudService:
def __init__(self) -> None:
self._spawner = JobSpawner(settings.nextcloud_namespace, settings.nextcloud_mail_sync_cronjob)
self._spawner = JobSpawner(
settings.nextcloud_namespace,
settings.nextcloud_mail_sync_cronjob,
load_cronjob_manifest("nextcloud-mail-sync/cronjob.yaml"),
)
def sync_mail(self, username: str | None = None, wait: bool = True) -> dict[str, Any]:
if not settings.nextcloud_namespace or not settings.nextcloud_mail_sync_cronjob:

View File

@ -3,13 +3,22 @@ from __future__ import annotations
from typing import Any
from ..k8s.jobs import JobSpawner
from ..k8s.manifests import load_cronjob_manifest
from ..settings import settings
class VaultService:
def __init__(self) -> None:
self._k8s_auth_spawner = JobSpawner(settings.vault_namespace, settings.vault_k8s_auth_cronjob)
self._oidc_spawner = JobSpawner(settings.vault_namespace, settings.vault_oidc_cronjob)
self._k8s_auth_spawner = JobSpawner(
settings.vault_namespace,
settings.vault_k8s_auth_cronjob,
load_cronjob_manifest("vault/k8s-auth-config-cronjob.yaml"),
)
self._oidc_spawner = JobSpawner(
settings.vault_namespace,
settings.vault_oidc_cronjob,
load_cronjob_manifest("vault/oidc-config-cronjob.yaml"),
)
def sync_k8s_auth(self, wait: bool = True) -> dict[str, Any]:
if not settings.vault_namespace or not settings.vault_k8s_auth_cronjob:

View File

@ -3,13 +3,22 @@ from __future__ import annotations
from typing import Any
from ..k8s.jobs import JobSpawner
from ..k8s.manifests import load_cronjob_manifest
from ..settings import settings
class WgerService:
def __init__(self) -> None:
self._user_spawner = JobSpawner(settings.wger_namespace, settings.wger_user_sync_cronjob)
self._admin_spawner = JobSpawner(settings.wger_namespace, settings.wger_admin_cronjob)
self._user_spawner = JobSpawner(
settings.wger_namespace,
settings.wger_user_sync_cronjob,
load_cronjob_manifest("health/wger-user-sync-cronjob.yaml"),
)
self._admin_spawner = JobSpawner(
settings.wger_namespace,
settings.wger_admin_cronjob,
load_cronjob_manifest("health/wger-admin-ensure-cronjob.yaml"),
)
def sync_user(self, username: str, email: str, password: str, wait: bool = True) -> dict[str, Any]:
username = (username or "").strip()

View File

@ -127,6 +127,7 @@ class Settings:
comms_pin_invite_cron: str
comms_reset_room_cron: str
comms_seed_room_cron: str
keycloak_profile_cron: str
metrics_path: str
@ -238,6 +239,7 @@ class Settings:
comms_pin_invite_cron=_env("ARIADNE_SCHEDULE_COMMS_PIN_INVITE", "*/30 * * * *"),
comms_reset_room_cron=_env("ARIADNE_SCHEDULE_COMMS_RESET_ROOM", "0 0 1 1 *"),
comms_seed_room_cron=_env("ARIADNE_SCHEDULE_COMMS_SEED_ROOM", "*/10 * * * *"),
keycloak_profile_cron=_env("ARIADNE_SCHEDULE_KEYCLOAK_PROFILE", "0 */6 * * *"),
metrics_path=_env("METRICS_PATH", "/metrics"),
)

View File

@ -1,5 +1,7 @@
from __future__ import annotations
from contextlib import contextmanager
import contextvars
from dataclasses import dataclass
from datetime import datetime, timezone
import json
@ -55,12 +57,25 @@ class JsonFormatter(logging.Formatter):
for key, value in record.__dict__.items():
if key in _STANDARD_ATTRS or key in payload:
continue
if key == "taskName" and value is None:
continue
payload[key] = value
return json.dumps(payload, ensure_ascii=True)
_LOGGING_CONFIGURED = False
_TASK_NAME: contextvars.ContextVar[str | None] = contextvars.ContextVar("ariadne_task_name", default=None)
class _ContextFilter(logging.Filter):
def filter(self, record: logging.LogRecord) -> bool:
task_value = getattr(record, "taskName", None)
if not task_value:
task_name = _TASK_NAME.get()
if task_name:
record.taskName = task_name
return True
def configure_logging(config: LogConfig | None = None) -> None:
@ -75,6 +90,7 @@ def configure_logging(config: LogConfig | None = None) -> None:
handler = logging.StreamHandler()
handler.setFormatter(JsonFormatter())
handler.addFilter(_ContextFilter())
root = logging.getLogger()
root.setLevel(level)
@ -86,3 +102,12 @@ def configure_logging(config: LogConfig | None = None) -> None:
def get_logger(name: str) -> logging.Logger:
return logging.getLogger(name)
@contextmanager
def task_context(name: str | None) -> Any:
token = _TASK_NAME.set(name)
try:
yield
finally:
_TASK_NAME.reset(token)

View File

@ -1,2 +1,3 @@
pytest==8.3.5
pytest-mock==3.14.0
slipcover==1.0.17

View File

@ -6,3 +6,4 @@ psycopg[binary]==3.2.6
psycopg-pool==3.2.6
croniter==2.0.7
prometheus-client==0.21.1
PyYAML==6.0.2

View File

@ -0,0 +1,115 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import os
import sys
import urllib.request
import xml.etree.ElementTree as ET
def _escape_label(value: str) -> str:
return value.replace("\\", "\\\\").replace("\n", "\\n").replace('"', '\\"')
def _label_str(labels: dict[str, str]) -> str:
parts = [f'{key}="{_escape_label(val)}"' for key, val in labels.items() if val]
return "{" + ",".join(parts) + "}" if parts else ""
def _load_coverage(path: str) -> float:
with open(path, "r", encoding="utf-8") as handle:
payload = json.load(handle)
summary = payload.get("summary") or {}
percent = summary.get("percent_covered")
if isinstance(percent, (int, float)):
return float(percent)
raise RuntimeError("coverage summary missing percent_covered")
def _load_junit(path: str) -> dict[str, int]:
tree = ET.parse(path)
root = tree.getroot()
def _as_int(node, name: str) -> int:
raw = node.attrib.get(name) or "0"
try:
return int(float(raw))
except ValueError:
return 0
suites = []
if root.tag == "testsuite":
suites = [root]
elif root.tag == "testsuites":
suites = list(root.findall("testsuite"))
totals = {"tests": 0, "failures": 0, "errors": 0, "skipped": 0}
for suite in suites:
totals["tests"] += _as_int(suite, "tests")
totals["failures"] += _as_int(suite, "failures")
totals["errors"] += _as_int(suite, "errors")
totals["skipped"] += _as_int(suite, "skipped")
return totals
def _post_metrics(url: str, payload: str) -> None:
req = urllib.request.Request(
url,
data=payload.encode("utf-8"),
method="POST",
headers={"Content-Type": "text/plain"},
)
with urllib.request.urlopen(req, timeout=10) as resp:
if resp.status >= 400:
raise RuntimeError(f"metrics push failed status={resp.status}")
def main() -> int:
vm_url = os.getenv("VM_IMPORT_URL", "").strip()
if not vm_url:
print("VM_IMPORT_URL not set; skipping metrics push")
return 0
coverage_path = os.getenv("COVERAGE_JSON", "build/coverage.json")
junit_path = os.getenv("JUNIT_XML", "build/junit.xml")
if not os.path.exists(coverage_path):
raise RuntimeError(f"missing coverage file {coverage_path}")
if not os.path.exists(junit_path):
raise RuntimeError(f"missing junit file {junit_path}")
coverage = _load_coverage(coverage_path)
totals = _load_junit(junit_path)
passed = max(totals["tests"] - totals["failures"] - totals["errors"] - totals["skipped"], 0)
labels = {
"job": os.getenv("CI_JOB_NAME", "ariadne"),
"branch": os.getenv("BRANCH_NAME", ""),
"build_number": os.getenv("BUILD_NUMBER", ""),
"commit": os.getenv("GIT_COMMIT", ""),
"repo": os.getenv("REPO_NAME", "ariadne"),
}
prefix = os.getenv("METRICS_PREFIX", "ariadne_ci")
lines = [
f"{prefix}_coverage_percent{_label_str(labels)} {coverage:.3f}",
f"{prefix}_tests_total{_label_str({**labels, 'result': 'passed'})} {passed}",
f"{prefix}_tests_total{_label_str({**labels, 'result': 'failed'})} {totals['failures']}",
f"{prefix}_tests_total{_label_str({**labels, 'result': 'error'})} {totals['errors']}",
f"{prefix}_tests_total{_label_str({**labels, 'result': 'skipped'})} {totals['skipped']}",
f"{prefix}_build_info{_label_str(labels)} 1",
]
payload = "\n".join(lines) + "\n"
_post_metrics(vm_url, payload)
print("metrics push complete")
return 0
if __name__ == "__main__":
try:
sys.exit(main())
except Exception as exc:
print(f"metrics push failed: {exc}")
sys.exit(1)

909
tests/test_app.py Normal file
View File

@ -0,0 +1,909 @@
from __future__ import annotations
import dataclasses
from datetime import datetime, timezone
import os
from fastapi import HTTPException
from fastapi.testclient import TestClient
os.environ.setdefault("PORTAL_DATABASE_URL", "postgresql://user:pass@localhost/db")
from ariadne.auth.keycloak import AuthContext
import ariadne.app as app_module
def _client(monkeypatch, ctx: AuthContext) -> TestClient:
monkeypatch.setattr(app_module.authenticator, "authenticate", lambda token: ctx)
monkeypatch.setattr(app_module.db, "ensure_schema", lambda: None)
monkeypatch.setattr(app_module.provisioning, "start", lambda: None)
monkeypatch.setattr(app_module.scheduler, "start", lambda: None)
monkeypatch.setattr(app_module.provisioning, "stop", lambda: None)
monkeypatch.setattr(app_module.scheduler, "stop", lambda: None)
monkeypatch.setattr(app_module.db, "close", lambda: None)
monkeypatch.setattr(app_module.storage, "record_event", lambda *args, **kwargs: None)
monkeypatch.setattr(app_module.storage, "record_task_run", lambda *args, **kwargs: None)
return TestClient(app_module.app)
def test_health_ok(monkeypatch) -> None:
ctx = AuthContext(username="", email="", groups=[], claims={})
client = _client(monkeypatch, ctx)
resp = client.get("/health")
assert resp.status_code == 200
assert resp.json() == {"ok": True}
def test_startup_and_shutdown(monkeypatch) -> None:
monkeypatch.setattr(app_module.db, "ensure_schema", lambda: None)
monkeypatch.setattr(app_module.provisioning, "start", lambda: None)
monkeypatch.setattr(app_module.scheduler, "add_task", lambda *args, **kwargs: None)
monkeypatch.setattr(app_module.scheduler, "start", lambda: None)
monkeypatch.setattr(app_module.scheduler, "stop", lambda: None)
monkeypatch.setattr(app_module.provisioning, "stop", lambda: None)
monkeypatch.setattr(app_module.db, "close", lambda: None)
app_module._startup()
app_module._shutdown()
def test_record_event_handles_exception(monkeypatch) -> None:
monkeypatch.setattr(app_module.storage, "record_event", lambda *args, **kwargs: (_ for _ in ()).throw(RuntimeError("fail")))
app_module._record_event("event", {"ok": True})
def test_parse_event_detail_variants() -> None:
assert app_module._parse_event_detail(None) == ""
assert app_module._parse_event_detail("not-json") == "not-json"
def test_missing_auth_header(monkeypatch) -> None:
ctx = AuthContext(username="", email="", groups=[], claims={})
client = _client(monkeypatch, ctx)
resp = client.get("/api/admin/access/requests")
assert resp.status_code == 401
def test_invalid_token(monkeypatch) -> None:
ctx = AuthContext(username="", email="", groups=[], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.authenticator, "authenticate", lambda token: (_ for _ in ()).throw(ValueError("bad")))
resp = client.get(
"/api/admin/access/requests",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 401
def test_forbidden_admin(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
resp = client.get(
"/api/admin/access/requests",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 403
def test_account_access_denied(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=[], claims={})
client = _client(monkeypatch, ctx)
resp = client.post(
"/api/account/firefly/reset",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 403
def test_metrics_endpoint(monkeypatch) -> None:
ctx = AuthContext(username="", email="", groups=[], claims={})
client = _client(monkeypatch, ctx)
resp = client.get("/metrics")
assert resp.status_code == 200
def test_list_access_requests(monkeypatch) -> None:
ctx = AuthContext(username="bstein", email="", groups=["admin"], claims={})
client = _client(monkeypatch, ctx)
now = datetime.now(timezone.utc)
monkeypatch.setattr(
app_module.storage,
"list_pending_requests",
lambda: [
{
"request_code": "REQ1",
"username": "alice",
"contact_email": "alice@example.com",
"note": "hello",
"status": "pending",
"created_at": now,
}
],
)
resp = client.get(
"/api/admin/access/requests",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 200
payload = resp.json()
assert payload["requests"][0]["username"] == "alice"
def test_list_access_requests_error(monkeypatch) -> None:
ctx = AuthContext(username="bstein", email="", groups=["admin"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.storage, "list_pending_requests", lambda: (_ for _ in ()).throw(RuntimeError("fail")))
resp = client.get(
"/api/admin/access/requests",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 502
def test_list_audit_events(monkeypatch) -> None:
ctx = AuthContext(username="bstein", email="", groups=["admin"], claims={})
client = _client(monkeypatch, ctx)
now = datetime.now(timezone.utc)
monkeypatch.setattr(
app_module.storage,
"list_events",
lambda **kwargs: [
{
"id": 1,
"event_type": "mailu_rotate",
"detail": '{"status":"ok"}',
"created_at": now,
}
],
)
resp = client.get(
"/api/admin/audit/events",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 200
payload = resp.json()
assert payload["events"][0]["detail"]["status"] == "ok"
def test_list_audit_events_error(monkeypatch) -> None:
ctx = AuthContext(username="bstein", email="", groups=["admin"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.storage, "list_events", lambda **kwargs: (_ for _ in ()).throw(RuntimeError("fail")))
resp = client.get(
"/api/admin/audit/events",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 502
def test_list_audit_task_runs(monkeypatch) -> None:
ctx = AuthContext(username="bstein", email="", groups=["admin"], claims={})
client = _client(monkeypatch, ctx)
now = datetime.now(timezone.utc)
monkeypatch.setattr(
app_module.storage,
"list_task_runs",
lambda **kwargs: [
{
"id": 1,
"request_code": "REQ1",
"task": "mailu_sync",
"status": "ok",
"detail": "done",
"started_at": now,
"finished_at": now,
"duration_ms": 120,
}
],
)
resp = client.get(
"/api/admin/audit/task-runs",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 200
payload = resp.json()
assert payload["task_runs"][0]["task"] == "mailu_sync"
def test_list_audit_task_runs_error(monkeypatch) -> None:
ctx = AuthContext(username="bstein", email="", groups=["admin"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.storage, "list_task_runs", lambda **kwargs: (_ for _ in ()).throw(RuntimeError("fail")))
resp = client.get(
"/api/admin/audit/task-runs",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 502
def test_access_flags_from_keycloak(monkeypatch) -> None:
ctx = AuthContext(username="bstein", email="", groups=["admin"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(app_module.keycloak_admin, "list_group_names", lambda **kwargs: ["demo", "test"])
resp = client.get(
"/api/admin/access/flags",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 200
assert resp.json()["flags"] == ["demo", "test"]
def test_access_flags_fallback(monkeypatch) -> None:
ctx = AuthContext(username="bstein", email="", groups=["admin"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: False)
monkeypatch.setattr(
app_module,
"settings",
dataclasses.replace(app_module.settings, allowed_flag_groups=["demo"]),
)
resp = client.get(
"/api/admin/access/flags",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 200
assert resp.json()["flags"] == ["demo"]
def test_access_request_approve(monkeypatch) -> None:
ctx = AuthContext(username="bstein", email="", groups=["admin"], claims={})
client = _client(monkeypatch, ctx)
captured = {}
def fake_fetchone(_query, params):
captured["flags"] = params[1]
return {"request_code": "REQ1"}
monkeypatch.setattr(app_module.db, "fetchone", fake_fetchone)
monkeypatch.setattr(app_module.provisioning, "provision_access_request", lambda code: None)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(app_module.keycloak_admin, "list_group_names", lambda **kwargs: ["demo"])
resp = client.post(
"/api/admin/access/requests/alice/approve",
headers={"Authorization": "Bearer token"},
json={"flags": ["demo", "test", "admin"], "note": "ok"},
)
assert resp.status_code == 200
assert resp.json()["request_code"] == "REQ1"
assert captured["flags"] == ["demo"]
def test_access_request_approve_bad_json(monkeypatch) -> None:
ctx = AuthContext(username="bstein", email="", groups=["admin"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.db, "fetchone", lambda *args, **kwargs: {"request_code": "REQ1"})
resp = client.post(
"/api/admin/access/requests/alice/approve",
headers={"Authorization": "Bearer token", "Content-Type": "application/json"},
data="{bad}",
)
assert resp.status_code == 200
def test_access_request_approve_db_error(monkeypatch) -> None:
ctx = AuthContext(username="bstein", email="", groups=["admin"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.db, "fetchone", lambda *args, **kwargs: (_ for _ in ()).throw(RuntimeError("fail")))
resp = client.post(
"/api/admin/access/requests/alice/approve",
headers={"Authorization": "Bearer token"},
json={},
)
assert resp.status_code == 502
def test_access_request_approve_skipped(monkeypatch) -> None:
ctx = AuthContext(username="bstein", email="", groups=["admin"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.db, "fetchone", lambda *args, **kwargs: None)
resp = client.post(
"/api/admin/access/requests/alice/approve",
headers={"Authorization": "Bearer token"},
json={"flags": ["demo"]},
)
assert resp.status_code == 200
assert resp.json()["request_code"] == ""
def test_access_request_deny(monkeypatch) -> None:
ctx = AuthContext(username="bstein", email="", groups=["admin"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.db, "fetchone", lambda *args, **kwargs: {"request_code": "REQ2"})
resp = client.post(
"/api/admin/access/requests/alice/deny",
headers={"Authorization": "Bearer token"},
json={"note": "no"},
)
assert resp.status_code == 200
assert resp.json()["request_code"] == "REQ2"
def test_access_request_deny_db_error(monkeypatch) -> None:
ctx = AuthContext(username="bstein", email="", groups=["admin"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.db, "fetchone", lambda *args, **kwargs: (_ for _ in ()).throw(RuntimeError("fail")))
resp = client.post(
"/api/admin/access/requests/alice/deny",
headers={"Authorization": "Bearer token"},
json={},
)
assert resp.status_code == 502
def test_access_request_deny_skipped(monkeypatch) -> None:
ctx = AuthContext(username="bstein", email="", groups=["admin"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.db, "fetchone", lambda *args, **kwargs: None)
resp = client.post(
"/api/admin/access/requests/alice/deny",
headers={"Authorization": "Bearer token"},
json={"note": "no"},
)
assert resp.status_code == 200
assert resp.json()["request_code"] == ""
def test_rotate_mailu_password(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(app_module.keycloak_admin, "set_user_attribute", lambda *args, **kwargs: None)
monkeypatch.setattr(app_module.mailu, "sync", lambda *args, **kwargs: None)
monkeypatch.setattr(app_module.nextcloud, "sync_mail", lambda *args, **kwargs: {"status": "ok"})
resp = client.post(
"/api/account/mailu/rotate",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 200
payload = resp.json()
assert payload["sync_ok"] is True
assert payload["password"]
def test_rotate_mailu_password_missing_config(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: False)
resp = client.post(
"/api/account/mailu/rotate",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 503
def test_reset_wger_password(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(app_module.keycloak_admin, "find_user", lambda username: {"attributes": {"mailu_email": ["alice@bstein.dev"]}})
monkeypatch.setattr(app_module.keycloak_admin, "set_user_attribute", lambda *args, **kwargs: None)
monkeypatch.setattr(app_module.wger, "sync_user", lambda *args, **kwargs: {"status": "ok"})
resp = client.post(
"/api/account/wger/reset",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 200
payload = resp.json()
assert payload["status"] == "ok"
def test_reset_firefly_password(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(app_module.keycloak_admin, "find_user", lambda username: {"attributes": {"mailu_email": ["alice@bstein.dev"]}})
monkeypatch.setattr(app_module.keycloak_admin, "set_user_attribute", lambda *args, **kwargs: None)
monkeypatch.setattr(app_module.firefly, "sync_user", lambda *args, **kwargs: {"status": "ok"})
resp = client.post(
"/api/account/firefly/reset",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 200
payload = resp.json()
assert payload["status"] == "ok"
def test_nextcloud_mail_sync(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(app_module.nextcloud, "sync_mail", lambda *args, **kwargs: {"status": "ok"})
resp = client.post(
"/api/account/nextcloud/mail/sync",
headers={"Authorization": "Bearer token"},
json={"wait": True},
)
assert resp.status_code == 200
payload = resp.json()
assert payload["status"] == "ok"
def test_nextcloud_mail_sync_error(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(app_module.nextcloud, "sync_mail", lambda *args, **kwargs: (_ for _ in ()).throw(RuntimeError("fail")))
resp = client.post(
"/api/account/nextcloud/mail/sync",
headers={"Authorization": "Bearer token"},
json={"wait": True},
)
assert resp.status_code == 502
def test_require_admin_allows_group() -> None:
ctx = AuthContext(username="alice", email="", groups=["admin"], claims={})
app_module._require_admin(ctx)
def test_require_account_access_allows_when_disabled(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=[], claims={})
dummy_settings = type("S", (), {"account_allowed_groups": []})()
monkeypatch.setattr(app_module, "settings", dummy_settings)
app_module._require_account_access(ctx)
def test_access_request_deny_bad_json(monkeypatch) -> None:
ctx = AuthContext(username="bstein", email="", groups=["admin"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.db, "fetchone", lambda *args, **kwargs: {"request_code": "REQ2"})
resp = client.post(
"/api/admin/access/requests/alice/deny",
headers={"Authorization": "Bearer token", "Content-Type": "application/json"},
data="{bad}",
)
assert resp.status_code == 200
def test_rotate_mailu_password_missing_username(monkeypatch) -> None:
ctx = AuthContext(username="", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
resp = client.post(
"/api/account/mailu/rotate",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 400
def test_rotate_mailu_password_sync_error(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(app_module.keycloak_admin, "set_user_attribute", lambda *args, **kwargs: None)
monkeypatch.setattr(app_module.mailu, "sync", lambda *args, **kwargs: (_ for _ in ()).throw(RuntimeError("fail")))
monkeypatch.setattr(app_module.nextcloud, "sync_mail", lambda *args, **kwargs: (_ for _ in ()).throw(RuntimeError("fail")))
resp = client.post(
"/api/account/mailu/rotate",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 200
payload = resp.json()
assert payload["sync_ok"] is False
assert payload["nextcloud_sync"]["status"] == "error"
def test_rotate_mailu_password_handles_storage_error(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(app_module.keycloak_admin, "set_user_attribute", lambda *args, **kwargs: None)
monkeypatch.setattr(app_module.storage, "record_task_run", lambda *args, **kwargs: (_ for _ in ()).throw(RuntimeError("fail")))
resp = client.post(
"/api/account/mailu/rotate",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 200
def test_rotate_mailu_password_failure(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(app_module.keycloak_admin, "set_user_attribute", lambda *args, **kwargs: (_ for _ in ()).throw(RuntimeError("fail")))
resp = client.post(
"/api/account/mailu/rotate",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 502
def test_rotate_mailu_password_http_exception(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(
app_module.keycloak_admin,
"set_user_attribute",
lambda *args, **kwargs: (_ for _ in ()).throw(HTTPException(status_code=409, detail="conflict")),
)
resp = client.post(
"/api/account/mailu/rotate",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 409
def test_wger_reset_missing_username(monkeypatch) -> None:
ctx = AuthContext(username="", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
resp = client.post(
"/api/account/wger/reset",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 400
def test_wger_reset_unconfigured(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: False)
resp = client.post(
"/api/account/wger/reset",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 503
def test_wger_reset_uses_mailu_string(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
captured = {}
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(
app_module.keycloak_admin,
"find_user",
lambda username: {"attributes": {"mailu_email": "alias@bstein.dev"}},
)
monkeypatch.setattr(app_module.keycloak_admin, "set_user_attribute", lambda *args, **kwargs: None)
def fake_sync_user(username, email, password, wait=True):
captured["email"] = email
return {"status": "ok"}
monkeypatch.setattr(app_module.wger, "sync_user", fake_sync_user)
resp = client.post(
"/api/account/wger/reset",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 200
assert captured["email"] == "alias@bstein.dev"
def test_wger_reset_error(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(app_module.keycloak_admin, "find_user", lambda username: {"attributes": {"mailu_email": ["alice@bstein.dev"]}})
monkeypatch.setattr(app_module.wger, "sync_user", lambda *args, **kwargs: {"status": "error"})
resp = client.post(
"/api/account/wger/reset",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 502
def test_wger_reset_http_exception(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(app_module.keycloak_admin, "find_user", lambda username: {"attributes": {"mailu_email": ["alice@bstein.dev"]}})
def raise_http(*_args, **_kwargs):
raise HTTPException(status_code=409, detail="conflict")
monkeypatch.setattr(app_module.wger, "sync_user", raise_http)
resp = client.post(
"/api/account/wger/reset",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 409
def test_wger_reset_handles_storage_error(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(app_module.keycloak_admin, "find_user", lambda username: {"attributes": {"mailu_email": ["alice@bstein.dev"]}})
monkeypatch.setattr(app_module.keycloak_admin, "set_user_attribute", lambda *args, **kwargs: None)
monkeypatch.setattr(app_module.wger, "sync_user", lambda *args, **kwargs: {"status": "ok"})
monkeypatch.setattr(
app_module.storage,
"record_task_run",
lambda *args, **kwargs: (_ for _ in ()).throw(RuntimeError("fail")),
)
resp = client.post(
"/api/account/wger/reset",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 200
def test_wger_reset_handles_find_user_error(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(
app_module.keycloak_admin,
"find_user",
lambda *_args, **_kwargs: (_ for _ in ()).throw(RuntimeError("fail")),
)
monkeypatch.setattr(app_module.keycloak_admin, "set_user_attribute", lambda *args, **kwargs: None)
monkeypatch.setattr(app_module.wger, "sync_user", lambda *args, **kwargs: {"status": "ok"})
resp = client.post(
"/api/account/wger/reset",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 200
def test_firefly_reset_missing_username(monkeypatch) -> None:
ctx = AuthContext(username="", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
resp = client.post(
"/api/account/firefly/reset",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 400
def test_firefly_reset_unconfigured(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: False)
resp = client.post(
"/api/account/firefly/reset",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 503
def test_firefly_reset_uses_mailu_string(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
captured = {}
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(
app_module.keycloak_admin,
"find_user",
lambda username: {"attributes": {"mailu_email": "alias@bstein.dev"}},
)
monkeypatch.setattr(app_module.keycloak_admin, "set_user_attribute", lambda *args, **kwargs: None)
def fake_sync_user(email, password, wait=True):
captured["email"] = email
return {"status": "ok"}
monkeypatch.setattr(app_module.firefly, "sync_user", fake_sync_user)
resp = client.post(
"/api/account/firefly/reset",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 200
assert captured["email"] == "alias@bstein.dev"
def test_firefly_reset_error(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(app_module.keycloak_admin, "find_user", lambda username: {"attributes": {"mailu_email": ["alice@bstein.dev"]}})
monkeypatch.setattr(app_module.firefly, "sync_user", lambda *args, **kwargs: {"status": "error"})
resp = client.post(
"/api/account/firefly/reset",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 502
def test_firefly_reset_http_exception(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(app_module.keycloak_admin, "find_user", lambda username: {"attributes": {"mailu_email": ["alice@bstein.dev"]}})
def raise_http(*_args, **_kwargs):
raise HTTPException(status_code=409, detail="conflict")
monkeypatch.setattr(app_module.firefly, "sync_user", raise_http)
resp = client.post(
"/api/account/firefly/reset",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 409
def test_firefly_reset_handles_storage_error(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(app_module.keycloak_admin, "find_user", lambda username: {"attributes": {"mailu_email": ["alice@bstein.dev"]}})
monkeypatch.setattr(app_module.keycloak_admin, "set_user_attribute", lambda *args, **kwargs: None)
monkeypatch.setattr(app_module.firefly, "sync_user", lambda *args, **kwargs: {"status": "ok"})
monkeypatch.setattr(
app_module.storage,
"record_task_run",
lambda *args, **kwargs: (_ for _ in ()).throw(RuntimeError("fail")),
)
resp = client.post(
"/api/account/firefly/reset",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 200
def test_firefly_reset_handles_find_user_error(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(
app_module.keycloak_admin,
"find_user",
lambda *_args, **_kwargs: (_ for _ in ()).throw(RuntimeError("fail")),
)
monkeypatch.setattr(app_module.keycloak_admin, "set_user_attribute", lambda *args, **kwargs: None)
monkeypatch.setattr(app_module.firefly, "sync_user", lambda *args, **kwargs: {"status": "ok"})
resp = client.post(
"/api/account/firefly/reset",
headers={"Authorization": "Bearer token"},
)
assert resp.status_code == 200
def test_nextcloud_mail_sync_bad_json(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(app_module.nextcloud, "sync_mail", lambda *args, **kwargs: {"status": "ok"})
resp = client.post(
"/api/account/nextcloud/mail/sync",
headers={"Authorization": "Bearer token", "Content-Type": "application/json"},
data="{bad}",
)
assert resp.status_code == 200
def test_nextcloud_mail_sync_unconfigured(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: False)
resp = client.post(
"/api/account/nextcloud/mail/sync",
headers={"Authorization": "Bearer token"},
json={"wait": True},
)
assert resp.status_code == 503
def test_nextcloud_mail_sync_missing_username(monkeypatch) -> None:
ctx = AuthContext(username="", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
resp = client.post(
"/api/account/nextcloud/mail/sync",
headers={"Authorization": "Bearer token"},
json={"wait": True},
)
assert resp.status_code == 400
def test_nextcloud_mail_sync_http_exception(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(
app_module.nextcloud,
"sync_mail",
lambda *args, **kwargs: (_ for _ in ()).throw(HTTPException(status_code=409, detail="conflict")),
)
resp = client.post(
"/api/account/nextcloud/mail/sync",
headers={"Authorization": "Bearer token"},
json={"wait": True},
)
assert resp.status_code == 409
def test_nextcloud_mail_sync_handles_storage_error(monkeypatch) -> None:
ctx = AuthContext(username="alice", email="", groups=["dev"], claims={})
client = _client(monkeypatch, ctx)
monkeypatch.setattr(app_module.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(app_module.nextcloud, "sync_mail", lambda *args, **kwargs: {"status": "ok"})
monkeypatch.setattr(
app_module.storage,
"record_task_run",
lambda *args, **kwargs: (_ for _ in ()).throw(RuntimeError("fail")),
)
resp = client.post(
"/api/account/nextcloud/mail/sync",
headers={"Authorization": "Bearer token"},
json={"wait": True},
)
assert resp.status_code == 200

View File

@ -47,6 +47,12 @@ def test_keycloak_verify_rejects_wrong_audience(monkeypatch) -> None:
kc.verify(token)
def test_keycloak_verify_missing_token() -> None:
kc = KeycloakOIDC("https://jwks", "https://issuer", "portal")
with pytest.raises(ValueError):
kc.verify("")
def test_keycloak_verify_missing_kid(monkeypatch) -> None:
kc = KeycloakOIDC("https://jwks", "https://issuer", "portal")
monkeypatch.setattr(jwt, "get_unverified_header", lambda token: {})
@ -55,12 +61,100 @@ def test_keycloak_verify_missing_kid(monkeypatch) -> None:
kc.verify("header.payload.sig")
def test_keycloak_verify_refreshes_jwks(monkeypatch) -> None:
token = _make_token()
kc = KeycloakOIDC("https://jwks", "https://issuer", "portal")
calls = {"force": []}
def fake_get_jwks(force=False):
calls["force"].append(force)
if not force:
return {"keys": [{"kid": "other"}]}
return {"keys": [{"kid": "test"}]}
monkeypatch.setattr(kc, "_get_jwks", fake_get_jwks)
monkeypatch.setattr(jwt.algorithms.RSAAlgorithm, "from_jwk", lambda key: "dummy")
monkeypatch.setattr(
jwt,
"decode",
lambda *args, **kwargs: {"azp": "other", "aud": "portal", "preferred_username": "alice"},
)
claims = kc.verify(token)
assert calls["force"] == [False, True]
assert claims["preferred_username"] == "alice"
def test_keycloak_verify_kid_not_found(monkeypatch) -> None:
token = _make_token()
kc = KeycloakOIDC("https://jwks", "https://issuer", "portal")
monkeypatch.setattr(kc, "_get_jwks", lambda force=False: {"keys": []})
with pytest.raises(ValueError):
kc.verify(token)
def test_authenticator_normalizes_groups(monkeypatch) -> None:
token = _make_token()
auth = Authenticator()
monkeypatch.setattr(auth._oidc, "verify", lambda token: {"preferred_username": "bob", "groups": ["/admin", "dev"]})
monkeypatch.setattr(
auth._oidc,
"verify",
lambda token: {"preferred_username": "bob", "groups": ["/admin", 123, "dev"]},
)
ctx = auth.authenticate(token)
assert ctx.username == "bob"
assert ctx.groups == ["admin", "dev"]
def test_authenticator_normalizes_groups_non_list(monkeypatch) -> None:
token = _make_token()
auth = Authenticator()
monkeypatch.setattr(auth._oidc, "verify", lambda token: {"preferred_username": "bob", "groups": "admin"})
ctx = auth.authenticate(token)
assert ctx.groups == []
def test_keycloak_get_jwks_invalid_payload(monkeypatch) -> None:
kc = KeycloakOIDC("https://jwks", "https://issuer", "portal")
class DummyClient:
def __enter__(self):
return self
def __exit__(self, exc_type, exc, tb):
return False
def get(self, url):
return type("Resp", (), {"raise_for_status": lambda self: None, "json": lambda self: []})()
monkeypatch.setattr("ariadne.auth.keycloak.httpx.Client", lambda *args, **kwargs: DummyClient())
with pytest.raises(ValueError):
kc._get_jwks(force=True)
def test_keycloak_get_jwks_cached(monkeypatch) -> None:
kc = KeycloakOIDC("https://jwks", "https://issuer", "portal")
calls = {"count": 0}
class DummyClient:
def __enter__(self):
return self
def __exit__(self, exc_type, exc, tb):
return False
def get(self, url):
calls["count"] += 1
return type("Resp", (), {"raise_for_status": lambda self: None, "json": lambda self: {"keys": []}})()
monkeypatch.setattr("ariadne.auth.keycloak.httpx.Client", lambda *args, **kwargs: DummyClient())
kc._get_jwks(force=True)
kc._get_jwks(force=False)
assert calls["count"] == 1

View File

@ -2,6 +2,8 @@ from __future__ import annotations
from contextlib import contextmanager
import pytest
import ariadne.db.database as db_module
from ariadne.db.database import Database
@ -45,3 +47,83 @@ def test_ensure_schema_runs(monkeypatch) -> None:
db = Database("postgresql://user:pass@localhost/db")
db.ensure_schema()
assert db._pool.conn.executed
def test_fetch_and_execute(monkeypatch) -> None:
monkeypatch.setattr(db_module, "ConnectionPool", DummyPool)
db = Database("postgresql://user:pass@localhost/db")
db.execute("SELECT 1")
db.fetchone("SELECT 1")
db.fetchall("SELECT 1")
db.close()
assert db._pool.conn.executed
def test_database_requires_dsn() -> None:
with pytest.raises(RuntimeError):
Database("")
def test_ensure_schema_handles_lock(monkeypatch) -> None:
class LockConn(DummyConn):
def execute(self, query, params=None):
if "CREATE TABLE" in query:
raise db_module.psycopg.errors.LockNotAvailable()
return super().execute(query, params)
class LockPool(DummyPool):
def __init__(self, conninfo=None, max_size=None):
self.conn = LockConn()
monkeypatch.setattr(db_module, "ConnectionPool", LockPool)
db = Database("postgresql://user:pass@localhost/db")
db.ensure_schema()
def test_ensure_schema_ignores_timeout_errors(monkeypatch) -> None:
class TimeoutConn(DummyConn):
def execute(self, query, params=None):
if query.startswith("SET lock_timeout") or query.startswith("SET statement_timeout"):
raise RuntimeError("boom")
return super().execute(query, params)
class TimeoutPool(DummyPool):
def __init__(self, conninfo=None, max_size=None):
self.conn = TimeoutConn()
monkeypatch.setattr(db_module, "ConnectionPool", TimeoutPool)
db = Database("postgresql://user:pass@localhost/db")
db.ensure_schema()
def test_ensure_schema_handles_lock_on_alter(monkeypatch) -> None:
class LockConn(DummyConn):
def execute(self, query, params=None):
if query.startswith("ALTER TABLE"):
raise db_module.psycopg.errors.QueryCanceled()
return super().execute(query, params)
class LockPool(DummyPool):
def __init__(self, conninfo=None, max_size=None):
self.conn = LockConn()
monkeypatch.setattr(db_module, "ConnectionPool", LockPool)
db = Database("postgresql://user:pass@localhost/db")
db.ensure_schema()
def test_fetchone_and_fetchall_return_dicts(monkeypatch) -> None:
class RowConn(DummyConn):
def execute(self, query, params=None):
if "fetchone" in query:
return DummyResult(row={"id": 1})
return DummyResult(row=None, rows=[{"id": 1}, {"id": 2}])
class RowPool(DummyPool):
def __init__(self, conninfo=None, max_size=None):
self.conn = RowConn()
monkeypatch.setattr(db_module, "ConnectionPool", RowPool)
db = Database("postgresql://user:pass@localhost/db")
assert db.fetchone("fetchone") == {"id": 1}
assert db.fetchall("fetchall") == [{"id": 1}, {"id": 2}]

View File

@ -1,7 +1,10 @@
from __future__ import annotations
import base64
import types
import pytest
import ariadne.k8s.client as k8s_client
@ -19,10 +22,11 @@ class DummyResponse:
class DummyClient:
def __init__(self, *args, **kwargs):
self.calls = []
self.payload = {"ok": True}
def request(self, method, url, json=None):
self.calls.append((method, url, json))
return DummyResponse({"ok": True})
return DummyResponse(self.payload)
def __enter__(self):
return self
@ -35,7 +39,106 @@ def test_get_json_builds_url(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(k8s_api_timeout_sec=5.0)
monkeypatch.setattr(k8s_client, "settings", dummy_settings)
monkeypatch.setattr(k8s_client, "_read_service_account", lambda: ("token", "/tmp/ca"))
monkeypatch.setattr(k8s_client.httpx, "Client", DummyClient)
client = DummyClient()
monkeypatch.setattr(k8s_client.httpx, "Client", lambda *args, **kwargs: client)
result = k8s_client.get_json("/api/test")
assert result == {"ok": True}
assert client.calls[0][1].endswith("/api/test")
def test_post_json_rejects_non_dict(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(k8s_api_timeout_sec=5.0)
monkeypatch.setattr(k8s_client, "settings", dummy_settings)
monkeypatch.setattr(k8s_client, "_read_service_account", lambda: ("token", "/tmp/ca"))
client = DummyClient()
client.payload = ["bad"]
monkeypatch.setattr(k8s_client.httpx, "Client", lambda *args, **kwargs: client)
with pytest.raises(RuntimeError):
k8s_client.post_json("/api/test", {"payload": "ok"})
def test_post_json_success(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(k8s_api_timeout_sec=5.0)
monkeypatch.setattr(k8s_client, "settings", dummy_settings)
monkeypatch.setattr(k8s_client, "_read_service_account", lambda: ("token", "/tmp/ca"))
client = DummyClient()
monkeypatch.setattr(k8s_client.httpx, "Client", lambda *args, **kwargs: client)
result = k8s_client.post_json("/api/test", {"payload": "ok"})
assert result == {"ok": True}
def test_get_json_rejects_non_dict(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(k8s_api_timeout_sec=5.0)
monkeypatch.setattr(k8s_client, "settings", dummy_settings)
monkeypatch.setattr(k8s_client, "_read_service_account", lambda: ("token", "/tmp/ca"))
client = DummyClient()
client.payload = ["bad"]
monkeypatch.setattr(k8s_client.httpx, "Client", lambda *args, **kwargs: client)
with pytest.raises(RuntimeError):
k8s_client.get_json("/api/test")
def test_read_service_account(monkeypatch, tmp_path) -> None:
sa_dir = tmp_path / "sa"
sa_dir.mkdir()
(sa_dir / "token").write_text("token123")
(sa_dir / "ca.crt").write_text("ca")
monkeypatch.setattr(k8s_client, "_SA_PATH", sa_dir)
token, ca_path = k8s_client._read_service_account()
assert token == "token123"
assert ca_path.endswith("ca.crt")
def test_read_service_account_missing_files(monkeypatch, tmp_path) -> None:
sa_dir = tmp_path / "sa"
sa_dir.mkdir()
monkeypatch.setattr(k8s_client, "_SA_PATH", sa_dir)
with pytest.raises(RuntimeError):
k8s_client._read_service_account()
def test_read_service_account_empty_token(monkeypatch, tmp_path) -> None:
sa_dir = tmp_path / "sa"
sa_dir.mkdir()
(sa_dir / "token").write_text("")
(sa_dir / "ca.crt").write_text("ca")
monkeypatch.setattr(k8s_client, "_SA_PATH", sa_dir)
with pytest.raises(RuntimeError):
k8s_client._read_service_account()
def test_get_secret_value_decodes(monkeypatch) -> None:
secret = base64.b64encode(b"secret").decode()
monkeypatch.setattr(k8s_client, "get_json", lambda *args, **kwargs: {"data": {"key": secret}})
assert k8s_client.get_secret_value("ns", "name", "key") == "secret"
def test_get_secret_value_missing(monkeypatch) -> None:
monkeypatch.setattr(k8s_client, "get_json", lambda *args, **kwargs: {"data": {}})
with pytest.raises(RuntimeError):
k8s_client.get_secret_value("ns", "name", "key")
def test_get_secret_value_decode_error(monkeypatch) -> None:
monkeypatch.setattr(k8s_client, "get_json", lambda *args, **kwargs: {"data": {"key": "bad"}})
monkeypatch.setattr(k8s_client.base64, "b64decode", lambda *_args, **_kwargs: (_ for _ in ()).throw(ValueError("bad")))
with pytest.raises(RuntimeError):
k8s_client.get_secret_value("ns", "name", "key")
def test_get_secret_value_empty_decoded(monkeypatch) -> None:
raw = base64.b64encode(b" ").decode()
monkeypatch.setattr(k8s_client, "get_json", lambda *args, **kwargs: {"data": {"key": raw}})
with pytest.raises(RuntimeError):
k8s_client.get_secret_value("ns", "name", "key")

View File

@ -1,5 +1,7 @@
from __future__ import annotations
import pytest
from ariadne.k8s.jobs import JobSpawner
@ -38,5 +40,203 @@ def test_job_from_cronjob_applies_env_and_ttl() -> None:
assert env_map["BAR"] == "3"
def test_job_from_cronjob_env_not_list() -> None:
cronjob = {
"spec": {
"jobTemplate": {
"spec": {
"template": {
"spec": {
"containers": [
{"name": "sync", "env": "bad"}
]
}
}
}
}
}
}
spawner = JobSpawner("ns", "cron")
job = spawner._job_from_cronjob(
cronjob,
"label",
env_overrides=[{"name": "FOO", "value": "1"}],
)
env = job["spec"]["template"]["spec"]["containers"][0]["env"]
assert env == [{"name": "FOO", "value": "1"}]
def test_safe_name_fragment() -> None:
assert JobSpawner._safe_name_fragment("User@Name") == "user-name"
assert JobSpawner._safe_name_fragment("$$$") == "job"
def test_trigger_creates_job(monkeypatch) -> None:
cronjob = {
"metadata": {"name": "cron"},
"spec": {
"jobTemplate": {
"spec": {
"template": {
"spec": {
"containers": [{"name": "sync", "env": []}]
}
}
}
}
},
}
monkeypatch.setattr("ariadne.k8s.jobs.get_json", lambda *args, **kwargs: cronjob)
monkeypatch.setattr("ariadne.k8s.jobs.post_json", lambda *args, **kwargs: {"metadata": {"name": "job"}})
spawner = JobSpawner("ns", "cron")
result = spawner.trigger("label", None, job_ttl_seconds=30)
assert result["status"] == "queued"
def test_trigger_uses_manifest(monkeypatch) -> None:
cronjob = {
"metadata": {"name": "cron"},
"spec": {
"jobTemplate": {
"spec": {
"template": {
"spec": {
"containers": [{"name": "sync", "env": []}]
}
}
}
}
},
}
def explode(*_args, **_kwargs):
raise AssertionError("get_json should not be called")
monkeypatch.setattr("ariadne.k8s.jobs.get_json", explode)
monkeypatch.setattr("ariadne.k8s.jobs.post_json", lambda *args, **kwargs: {"metadata": {"name": "job"}})
spawner = JobSpawner("ns", "cron", manifest=cronjob)
result = spawner.trigger("label", None, job_ttl_seconds=30)
assert result["status"] == "queued"
def test_trigger_missing_job_name(monkeypatch) -> None:
cronjob = {"spec": {"jobTemplate": {"spec": {"template": {"spec": {"containers": [{"name": "sync"}]}}}}}}
monkeypatch.setattr("ariadne.k8s.jobs.get_json", lambda *args, **kwargs: cronjob)
posted = {}
def fake_post(_path, payload):
posted["payload"] = payload
return {}
monkeypatch.setattr("ariadne.k8s.jobs.post_json", fake_post)
spawner = JobSpawner("ns", "cron")
result = spawner.trigger("label", None)
assert result["job"] == posted["payload"]["metadata"]["name"]
def test_trigger_missing_job_name_raises(monkeypatch) -> None:
monkeypatch.setattr("ariadne.k8s.jobs.get_json", lambda *args, **kwargs: {})
monkeypatch.setattr("ariadne.k8s.jobs.post_json", lambda *args, **kwargs: {})
spawner = JobSpawner("ns", "cron")
monkeypatch.setattr(spawner, "_job_from_cronjob", lambda *args, **kwargs: {"metadata": {}})
with pytest.raises(RuntimeError):
spawner.trigger("label", None)
def test_wait_for_completion_success(monkeypatch) -> None:
responses = [
{"status": {"succeeded": 1}},
]
def fake_get_json(path):
return responses.pop(0)
monkeypatch.setattr("ariadne.k8s.jobs.get_json", fake_get_json)
spawner = JobSpawner("ns", "cron")
result = spawner.wait_for_completion("job", timeout_sec=0.1)
assert result["status"] == "ok"
def test_wait_for_completion_skips_bad_condition(monkeypatch) -> None:
responses = [
{"status": {"conditions": ["bad", {"type": "Complete", "status": "True"}]}},
]
monkeypatch.setattr("ariadne.k8s.jobs.get_json", lambda path: responses.pop(0))
spawner = JobSpawner("ns", "cron")
result = spawner.wait_for_completion("job", timeout_sec=0.1)
assert result["status"] == "ok"
def test_wait_for_completion_error(monkeypatch) -> None:
responses = [
{"status": {"failed": 1}},
]
monkeypatch.setattr("ariadne.k8s.jobs.get_json", lambda path: responses.pop(0))
spawner = JobSpawner("ns", "cron")
result = spawner.wait_for_completion("job", timeout_sec=0.1)
assert result["status"] == "error"
def test_wait_for_completion_timeout(monkeypatch) -> None:
monkeypatch.setattr("ariadne.k8s.jobs.get_json", lambda path: {"status": {}})
spawner = JobSpawner("ns", "cron")
result = spawner.wait_for_completion("job", timeout_sec=0.01)
assert result["status"] == "running"
def test_wait_for_completion_conditions(monkeypatch) -> None:
responses = [
{"status": {"conditions": [{"type": "Complete", "status": "True"}]}},
]
monkeypatch.setattr("ariadne.k8s.jobs.get_json", lambda path: responses.pop(0))
spawner = JobSpawner("ns", "cron")
result = spawner.wait_for_completion("job", timeout_sec=0.1)
assert result["status"] == "ok"
def test_wait_for_completion_failed_condition(monkeypatch) -> None:
responses = [
{"status": {"conditions": [{"type": "Failed", "status": "True"}]}},
]
monkeypatch.setattr("ariadne.k8s.jobs.get_json", lambda path: responses.pop(0))
spawner = JobSpawner("ns", "cron")
result = spawner.wait_for_completion("job", timeout_sec=0.1)
assert result["status"] == "error"
def test_trigger_and_wait(monkeypatch) -> None:
cronjob = {"spec": {"jobTemplate": {"spec": {"template": {"spec": {"containers": [{"name": "sync"}]}}}}}}
monkeypatch.setattr("ariadne.k8s.jobs.get_json", lambda *args, **kwargs: cronjob)
monkeypatch.setattr(
"ariadne.k8s.jobs.post_json",
lambda *args, **kwargs: {"metadata": {"name": "job"}},
)
monkeypatch.setattr(
"ariadne.k8s.jobs.JobSpawner.wait_for_completion",
lambda self, job, timeout_sec: {"job": job, "status": "ok"},
)
spawner = JobSpawner("ns", "cron")
result = spawner.trigger_and_wait("label", None, timeout_sec=1.0)
assert result["status"] == "ok"
def test_trigger_and_wait_missing_job_name(monkeypatch) -> None:
spawner = JobSpawner("ns", "cron")
monkeypatch.setattr(spawner, "trigger", lambda *args, **kwargs: {"job": ""})
with pytest.raises(RuntimeError):
spawner.trigger_and_wait("label", None, timeout_sec=0.1)

View File

@ -0,0 +1,16 @@
from __future__ import annotations
import pytest
from ariadne.k8s.manifests import load_cronjob_manifest
def test_load_cronjob_manifest_ok() -> None:
manifest = load_cronjob_manifest("comms/guest-name-job.yaml")
assert manifest["kind"] == "CronJob"
assert manifest["metadata"]["name"] == "guest-name-randomizer"
def test_load_cronjob_manifest_missing() -> None:
with pytest.raises(FileNotFoundError):
load_cronjob_manifest("missing.yaml")

View File

@ -1,10 +1,59 @@
from __future__ import annotations
from typing import Any
import types
import httpx
import pytest
from ariadne.services.keycloak_admin import KeycloakAdminClient
class DummyResponse:
def __init__(self, payload=None, status_code=200, headers=None):
self._payload = payload
self.status_code = status_code
self.headers = headers or {}
def json(self):
return self._payload
def raise_for_status(self):
if self.status_code >= 400:
request = httpx.Request("GET", "https://example.com")
response = httpx.Response(self.status_code, request=request)
raise httpx.HTTPStatusError("error", request=request, response=response)
class DummyClient:
def __init__(self, responses):
self._responses = list(responses)
self.calls = []
def __enter__(self):
return self
def __exit__(self, exc_type, exc, tb):
return False
def _next(self):
if not self._responses:
raise RuntimeError("missing response")
return self._responses.pop(0)
def get(self, url, params=None, headers=None):
self.calls.append(("get", url, params))
return self._next()
def post(self, url, data=None, json=None, headers=None):
self.calls.append(("post", url, data, json))
return self._next()
def put(self, url, headers=None, json=None):
self.calls.append(("put", url, json))
return self._next()
def test_set_user_attribute_preserves_profile(monkeypatch) -> None:
client = KeycloakAdminClient()
captured: dict[str, Any] = {}
@ -77,3 +126,554 @@ def test_update_user_safe_merges_payload(monkeypatch) -> None:
assert payload.get("username") == "alice"
assert payload.get("attributes") == {"existing": ["value"], "new": ["item"]}
assert payload.get("requiredActions") == ["UPDATE_PASSWORD"]
def test_get_token_fetches_once(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
dummy = DummyClient([DummyResponse({"access_token": "token", "expires_in": 120})])
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
assert client._get_token() == "token"
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: (_ for _ in ()).throw(RuntimeError("should not call")))
assert client._get_token() == "token"
def test_find_user_by_email_case_insensitive(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
client._token = "token"
client._expires_at = 9999999999
dummy = DummyClient([DummyResponse([{"email": "Alice@Example.com", "id": "1"}])])
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
user = client.find_user_by_email("alice@example.com")
assert user["id"] == "1"
def test_find_user_invalid_payload(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
client._token = "token"
client._expires_at = 9999999999
dummy = DummyClient([DummyResponse(["bad"])])
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
assert client.find_user("alice") is None
def test_find_user_by_email_empty() -> None:
client = KeycloakAdminClient()
assert client.find_user_by_email("") is None
def test_find_user_by_email_invalid_payload(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
client._token = "token"
client._expires_at = 9999999999
dummy = DummyClient([DummyResponse({"bad": "payload"})])
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
assert client.find_user_by_email("alice@example.com") is None
def test_list_group_names_filters(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
client._token = "token"
client._expires_at = 9999999999
dummy = DummyClient([DummyResponse([{"name": "demo"}, {"name": "admin"}, {"name": "test"}])])
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
assert client.list_group_names(exclude={"admin"}) == ["demo", "test"]
def test_find_user_by_email_skips_non_dict(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
client._token = "token"
client._expires_at = 9999999999
dummy = DummyClient([DummyResponse(["bad"])])
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
assert client.find_user_by_email("alice@example.com") is None
def test_get_user_invalid_payload(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
client._token = "token"
client._expires_at = 9999999999
dummy = DummyClient([DummyResponse("bad")])
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
with pytest.raises(RuntimeError):
client.get_user("user-1")
def test_update_user_calls_put(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
client._token = "token"
client._expires_at = 9999999999
dummy = DummyClient([DummyResponse({})])
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
client.update_user("user-1", {"enabled": True})
assert dummy.calls
def test_update_user_safe_handles_bad_attrs(monkeypatch) -> None:
client = KeycloakAdminClient()
captured: dict[str, Any] = {}
def fake_get_user(user_id: str) -> dict[str, Any]:
return {"id": user_id, "username": "alice", "attributes": "bad"}
def fake_update_user(user_id: str, payload: dict[str, Any]) -> None:
captured["payload"] = payload
monkeypatch.setattr(client, "get_user", fake_get_user)
monkeypatch.setattr(client, "update_user", fake_update_user)
client.update_user_safe("user-1", {"attributes": {"new": ["item"]}})
assert captured["payload"]["attributes"] == {"new": ["item"]}
def test_set_user_attribute_user_id_missing(monkeypatch) -> None:
client = KeycloakAdminClient()
def fake_find_user(username: str) -> dict[str, Any]:
return {"id": ""}
monkeypatch.setattr(client, "find_user", fake_find_user)
with pytest.raises(RuntimeError):
client.set_user_attribute("alice", "attr", "val")
def test_set_user_attribute_handles_bad_attrs(monkeypatch) -> None:
client = KeycloakAdminClient()
def fake_find_user(username: str) -> dict[str, Any]:
return {"id": "user-1"}
def fake_get_user(user_id: str) -> dict[str, Any]:
return {"id": user_id, "username": "alice", "attributes": "bad"}
monkeypatch.setattr(client, "find_user", fake_find_user)
monkeypatch.setattr(client, "get_user", fake_get_user)
monkeypatch.setattr(client, "update_user", lambda *_args, **_kwargs: None)
client.set_user_attribute("alice", "attr", "val")
def test_get_group_id_skips_non_dict(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
client._token = "token"
client._expires_at = 9999999999
dummy = DummyClient([DummyResponse(["bad"])])
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
assert client.get_group_id("demo") is None
def test_get_group_id_cached(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
client._token = "token"
client._expires_at = 9999999999
dummy = DummyClient([DummyResponse([{"name": "demo", "id": "gid"}])])
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
assert client.get_group_id("demo") == "gid"
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: (_ for _ in ()).throw(RuntimeError("no call")))
assert client.get_group_id("demo") == "gid"
def test_get_group_id_invalid_payload(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
client._token = "token"
client._expires_at = 9999999999
dummy = DummyClient([DummyResponse({"bad": "payload"})])
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
assert client.get_group_id("demo") is None
def test_iter_users_paginates(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
client._token = "token"
client._expires_at = 9999999999
dummy = DummyClient(
[
DummyResponse([{"id": "1"}, {"id": "2"}]),
DummyResponse([{"id": "3"}]),
]
)
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
users = client.iter_users(page_size=2, brief=True)
assert [u["id"] for u in users] == ["1", "2", "3"]
def test_iter_users_empty(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
client._token = "token"
client._expires_at = 9999999999
dummy = DummyClient([DummyResponse([])])
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
assert client.iter_users(page_size=2) == []
def test_create_user_parses_location(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
client._token = "token"
client._expires_at = 9999999999
dummy = DummyClient([DummyResponse({}, headers={"Location": "http://kc/admin/realms/atlas/users/abc"})])
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
assert client.create_user({"username": "alice"}) == "abc"
def test_create_user_missing_location(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
client._token = "token"
client._expires_at = 9999999999
dummy = DummyClient([DummyResponse({}, headers={})])
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
with pytest.raises(RuntimeError):
client.create_user({"username": "alice"})
def test_get_token_missing_access_token(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
dummy = DummyClient([DummyResponse({"expires_in": 120})])
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
with pytest.raises(RuntimeError):
client._get_token()
def test_reset_password_raises_on_error(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
client._token = "token"
client._expires_at = 9999999999
dummy = DummyClient([DummyResponse({}, status_code=400)])
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
with pytest.raises(httpx.HTTPStatusError):
client.reset_password("user", "pw", temporary=True)
def test_get_token_requires_config(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="",
keycloak_admin_client_secret="",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
with pytest.raises(RuntimeError):
client._get_token()
def test_headers_includes_bearer(monkeypatch) -> None:
client = KeycloakAdminClient()
monkeypatch.setattr(client, "_get_token", lambda: "token")
headers = client.headers()
assert headers["Authorization"] == "Bearer token"
def test_find_user_returns_none(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
client._token = "token"
client._expires_at = 9999999999
dummy = DummyClient([DummyResponse([])])
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
assert client.find_user("alice") is None
def test_get_user_invalid_payload(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
client._token = "token"
client._expires_at = 9999999999
dummy = DummyClient([DummyResponse(["bad"])])
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
with pytest.raises(RuntimeError):
client.get_user("id")
def test_get_user_success(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
client._token = "token"
client._expires_at = 9999999999
dummy = DummyClient([DummyResponse({"id": "1"})])
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
user = client.get_user("id")
assert user["id"] == "1"
def test_set_user_attribute_user_missing(monkeypatch) -> None:
client = KeycloakAdminClient()
monkeypatch.setattr(client, "find_user", lambda username: None)
with pytest.raises(RuntimeError):
client.set_user_attribute("alice", "attr", "value")
def test_set_user_attribute_user_id_missing(monkeypatch) -> None:
client = KeycloakAdminClient()
monkeypatch.setattr(client, "find_user", lambda username: {})
with pytest.raises(RuntimeError):
client.set_user_attribute("alice", "attr", "value")
def test_add_user_to_group(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
client._token = "token"
client._expires_at = 9999999999
dummy = DummyClient([DummyResponse({})])
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
client.add_user_to_group("user", "group")
assert dummy.calls[0][0] == "put"
def test_get_user_raises_on_non_dict_payload(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
client._token = "token"
client._expires_at = 9999999999
dummy = DummyClient([DummyResponse("bad")])
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
with pytest.raises(RuntimeError):
client.get_user("user-1")
def test_update_user_safe_coerces_bad_attrs(monkeypatch) -> None:
client = KeycloakAdminClient()
monkeypatch.setattr(client, "get_user", lambda *_args, **_kwargs: {"id": "user-1"})
monkeypatch.setattr(client, "_safe_update_payload", lambda *_args, **_kwargs: {"attributes": "bad"})
monkeypatch.setattr(client, "update_user", lambda *_args, **_kwargs: None)
client.update_user_safe("user-1", {"attributes": {"new": ["item"]}})
def test_set_user_attribute_coerces_bad_attrs(monkeypatch) -> None:
client = KeycloakAdminClient()
monkeypatch.setattr(client, "find_user", lambda username: {"id": "user-1"})
monkeypatch.setattr(client, "get_user", lambda *_args, **_kwargs: {"id": "user-1"})
monkeypatch.setattr(client, "_safe_update_payload", lambda *_args, **_kwargs: {"attributes": "bad"})
monkeypatch.setattr(client, "update_user", lambda *_args, **_kwargs: None)
client.set_user_attribute("alice", "attr", "value")
def test_set_user_attribute_user_id_missing_raises(monkeypatch) -> None:
client = KeycloakAdminClient()
monkeypatch.setattr(client, "find_user", lambda username: {"id": ""})
with pytest.raises(RuntimeError):
client.set_user_attribute("alice", "attr", "value")
def test_get_user_rejects_non_dict_payload(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
keycloak_admin_url="http://kc",
keycloak_admin_realm="atlas",
keycloak_admin_client_id="client",
keycloak_admin_client_secret="secret",
keycloak_realm="atlas",
)
monkeypatch.setattr("ariadne.services.keycloak_admin.settings", dummy_settings)
client = KeycloakAdminClient()
client._token = "token"
client._expires_at = 9999999999
dummy = DummyClient([DummyResponse(123)])
monkeypatch.setattr("ariadne.services.keycloak_admin.httpx.Client", lambda *args, **kwargs: dummy)
with pytest.raises(RuntimeError) as exc:
client.get_user("user-1")
assert "unexpected user payload" in str(exc.value)

View File

@ -0,0 +1,59 @@
from __future__ import annotations
from ariadne.services import keycloak_profile
def test_profile_sync_removes_required_actions(monkeypatch) -> None:
calls = []
def fake_update(user_id: str, payload: dict[str, str]) -> None:
calls.append((user_id, payload))
user = {
"id": "user-1",
"username": "alice",
"enabled": True,
"email": "alice@example.com",
"emailVerified": True,
"firstName": "Alice",
"lastName": "Atlas",
"requiredActions": ["UPDATE_PROFILE", "VERIFY_EMAIL"],
}
monkeypatch.setattr(keycloak_profile.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(keycloak_profile.keycloak_admin, "iter_users", lambda page_size=200, brief=False: [user])
monkeypatch.setattr(keycloak_profile.keycloak_admin, "update_user_safe", fake_update)
summary = keycloak_profile.run_profile_sync()
assert summary.updated == 1
assert calls
assert calls[0][0] == "user-1"
assert calls[0][1]["requiredActions"] == []
def test_profile_sync_skips_incomplete(monkeypatch) -> None:
calls = []
def fake_update(user_id: str, payload: dict[str, str]) -> None:
calls.append((user_id, payload))
user = {
"id": "user-2",
"username": "bob",
"enabled": True,
"email": "bob@example.com",
"emailVerified": True,
"firstName": "",
"lastName": "",
"requiredActions": ["UPDATE_PROFILE"],
}
monkeypatch.setattr(keycloak_profile.keycloak_admin, "ready", lambda: True)
monkeypatch.setattr(keycloak_profile.keycloak_admin, "iter_users", lambda page_size=200, brief=False: [user])
monkeypatch.setattr(keycloak_profile.keycloak_admin, "update_user_safe", fake_update)
summary = keycloak_profile.run_profile_sync()
assert summary.updated == 0
assert not calls

View File

@ -2,8 +2,10 @@ from __future__ import annotations
import json
import logging
import sys
from ariadne.utils.logging import JsonFormatter
import ariadne.utils.logging as logging_module
from ariadne.utils.logging import JsonFormatter, LogConfig, configure_logging, task_context
def test_json_formatter_includes_extra_fields() -> None:
@ -25,3 +27,70 @@ def test_json_formatter_includes_extra_fields() -> None:
assert payload["message"] == "hello"
assert payload["event"] == "unit_test"
assert payload["request_code"] == "REQ123"
def test_configure_logging_idempotent() -> None:
configure_logging(LogConfig(level="INFO"))
root = logging.getLogger()
handlers_before = list(root.handlers)
configure_logging(LogConfig(level="DEBUG"))
assert root.handlers == handlers_before
def test_json_formatter_includes_exception() -> None:
try:
raise RuntimeError("boom")
except RuntimeError:
exc_info = sys.exc_info()
record = logging.LogRecord(
name="ariadne.test",
level=logging.ERROR,
pathname=__file__,
lineno=20,
msg="failed",
args=(),
exc_info=exc_info,
)
formatter = JsonFormatter()
payload = json.loads(formatter.format(record))
assert "exc_info" in payload
def test_json_formatter_includes_stack_info() -> None:
record = logging.LogRecord(
name="ariadne.test",
level=logging.INFO,
pathname=__file__,
lineno=30,
msg="hello",
args=(),
exc_info=None,
sinfo="stack",
)
formatter = JsonFormatter()
payload = json.loads(formatter.format(record))
assert payload["stack_info"] == "stack"
def test_configure_logging_invalid_level(monkeypatch) -> None:
monkeypatch.setattr(logging_module, "_LOGGING_CONFIGURED", False)
configure_logging(LogConfig(level="nope"))
root = logging.getLogger()
assert root.level == logging.INFO
def test_task_context_injects_task_name() -> None:
record = logging.LogRecord(
name="ariadne.test",
level=logging.INFO,
pathname=__file__,
lineno=50,
msg="hello",
args=(),
exc_info=None,
)
assert getattr(record, "taskName", None) in {None, ""}
with task_context("schedule.demo"):
logging_module._ContextFilter().filter(record)
assert record.taskName == "schedule.demo"

View File

@ -7,6 +7,30 @@ import pytest
from ariadne.services.mailer import Mailer, MailerError
class DummySMTP:
def __init__(self, host=None, port=None, timeout=None):
self.calls = []
self.message = None
def __enter__(self):
return self
def __exit__(self, exc_type, exc, tb):
return False
def ehlo(self):
self.calls.append("ehlo")
def starttls(self):
self.calls.append("starttls")
def login(self, username, password):
self.calls.append(("login", username, password))
def send_message(self, message):
self.message = message
def test_mailer_requires_host(monkeypatch) -> None:
dummy = types.SimpleNamespace(
smtp_host="",
@ -49,3 +73,64 @@ def test_send_welcome_calls_send(monkeypatch) -> None:
monkeypatch.setattr(svc, "send", _send)
svc.send_welcome("user@bstein.dev", "CODE", "https://bstein.dev/onboarding?code=CODE", username="user")
assert called["subject"] == "Welcome to Titan Lab"
def test_mailer_send_uses_starttls(monkeypatch) -> None:
dummy = types.SimpleNamespace(
smtp_host="smtp",
smtp_port=25,
smtp_username="user",
smtp_password="pass",
smtp_from="test@bstein.dev",
smtp_starttls=True,
smtp_use_tls=False,
smtp_timeout_sec=5.0,
)
monkeypatch.setattr("ariadne.services.mailer.settings", dummy)
monkeypatch.setattr("ariadne.services.mailer.smtplib.SMTP", DummySMTP)
svc = Mailer()
result = svc.send("subject", ["a@bstein.dev"], "body", html_body="<p>hi</p>")
assert result.ok is True
def test_mailer_send_uses_tls(monkeypatch) -> None:
dummy = types.SimpleNamespace(
smtp_host="smtp",
smtp_port=465,
smtp_username="user",
smtp_password="pass",
smtp_from="test@bstein.dev",
smtp_starttls=False,
smtp_use_tls=True,
smtp_timeout_sec=5.0,
)
monkeypatch.setattr("ariadne.services.mailer.settings", dummy)
monkeypatch.setattr("ariadne.services.mailer.smtplib.SMTP_SSL", DummySMTP)
svc = Mailer()
result = svc.send("subject", ["a@bstein.dev"], "body")
assert result.ok is True
def test_mailer_send_raises_error(monkeypatch) -> None:
dummy = types.SimpleNamespace(
smtp_host="smtp",
smtp_port=25,
smtp_username="user",
smtp_password="pass",
smtp_from="test@bstein.dev",
smtp_starttls=False,
smtp_use_tls=False,
smtp_timeout_sec=5.0,
)
monkeypatch.setattr("ariadne.services.mailer.settings", dummy)
class BrokenSMTP(DummySMTP):
def send_message(self, message):
raise RuntimeError("boom")
monkeypatch.setattr("ariadne.services.mailer.smtplib.SMTP", BrokenSMTP)
svc = Mailer()
with pytest.raises(MailerError):
svc.send("subject", ["a@bstein.dev"], "body")

View File

@ -1,17 +1,7 @@
from __future__ import annotations
from prometheus_client import generate_latest
from ariadne.metrics.metrics import record_task_run, record_schedule_state
from ariadne.metrics.metrics import set_access_request_counts
def test_metrics_include_task_run() -> None:
record_task_run("unit", "ok", 0.2)
payload = generate_latest()
assert b"ariadne_task_runs_total" in payload
def test_metrics_include_schedule() -> None:
record_schedule_state("sched", 1, 1, 2, True)
payload = generate_latest()
assert b"ariadne_schedule_last_status" in payload
def test_set_access_request_counts() -> None:
set_access_request_counts({"pending": 2, "approved": 1})

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,8 @@
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime, timezone
import time
from ariadne.scheduler.cron import CronScheduler, CronTask
@ -9,6 +11,7 @@ class DummyStorage:
def __init__(self) -> None:
self.task_runs = []
self.schedule_states = []
self.events = []
def record_task_run(self, *args, **kwargs):
self.task_runs.append((args, kwargs))
@ -16,6 +19,9 @@ class DummyStorage:
def update_schedule_state(self, *args, **kwargs):
self.schedule_states.append((args, kwargs))
def record_event(self, *args, **kwargs):
self.events.append((args, kwargs))
def test_execute_task_records_failure() -> None:
storage = DummyStorage()
@ -45,3 +51,132 @@ def test_execute_task_records_success() -> None:
assert storage.task_runs
assert storage.schedule_states
def test_scheduler_start_stop() -> None:
storage = DummyStorage()
scheduler = CronScheduler(storage, tick_sec=0.01)
scheduler.add_task("noop", "* * * * *", lambda: None)
scheduler.start()
time.sleep(0.02)
scheduler.stop()
assert scheduler._thread is not None
def test_scheduler_start_skips_when_running() -> None:
storage = DummyStorage()
scheduler = CronScheduler(storage, tick_sec=0.01)
class DummyThread:
def __init__(self) -> None:
self.started = False
def is_alive(self) -> bool:
return True
def start(self) -> None:
self.started = True
def join(self, timeout=None) -> None:
return None
scheduler._thread = DummyThread()
scheduler.start()
assert scheduler._thread.started is False
def test_compute_next_handles_naive_timestamp() -> None:
scheduler = CronScheduler(DummyStorage(), tick_sec=0.1)
base = datetime(2024, 1, 1, 12, 0, 0)
next_time = scheduler._compute_next("* * * * *", base)
assert next_time.tzinfo is not None
def test_run_loop_skips_running_task(monkeypatch) -> None:
storage = DummyStorage()
scheduler = CronScheduler(storage, tick_sec=0.01)
scheduler._tasks["test"] = CronTask(name="test", cron_expr="* * * * *", runner=lambda: None)
scheduler._next_run["test"] = datetime.now(timezone.utc)
scheduler._running.add("test")
monkeypatch.setattr(
"ariadne.scheduler.cron.time.sleep",
lambda *_args, **_kwargs: scheduler._stop_event.set(),
)
scheduler._run_loop()
def test_run_loop_spawns_thread(monkeypatch) -> None:
storage = DummyStorage()
scheduler = CronScheduler(storage, tick_sec=0.01)
scheduler._tasks["test"] = CronTask(name="test", cron_expr="* * * * *", runner=lambda: None)
scheduler._next_run["test"] = datetime.now(timezone.utc)
started = {"value": False}
class DummyThread:
def __init__(self, target=None, args=(), name=None, daemon=None):
self.target = target
self.args = args
self.name = name
self.daemon = daemon
def start(self) -> None:
started["value"] = True
monkeypatch.setattr("ariadne.scheduler.cron.threading.Thread", DummyThread)
monkeypatch.setattr(
"ariadne.scheduler.cron.time.sleep",
lambda *_args, **_kwargs: scheduler._stop_event.set(),
)
scheduler._run_loop()
assert started["value"] is True
def test_execute_task_records_result_payload() -> None:
storage = DummyStorage()
scheduler = CronScheduler(storage, tick_sec=0.1)
def runner():
return {"status": "ok", "count": 2}
task = CronTask(name="result-task", cron_expr="*/5 * * * *", runner=runner)
scheduler._next_run["result-task"] = datetime.now(timezone.utc)
scheduler._execute_task(task)
assert storage.events
event_args, _event_kwargs = storage.events[0]
assert event_args[0] == "schedule_task"
assert event_args[1]["result"]["status"] == "ok"
def test_execute_task_handles_storage_errors() -> None:
class FailingStorage(DummyStorage):
def record_event(self, *args, **kwargs):
raise RuntimeError("fail")
def record_task_run(self, *args, **kwargs):
raise RuntimeError("fail")
def update_schedule_state(self, *args, **kwargs):
raise RuntimeError("fail")
scheduler = CronScheduler(FailingStorage(), tick_sec=0.1)
@dataclass(frozen=True)
class Summary:
ok: bool
task = CronTask(name="ok-task", cron_expr="*/5 * * * *", runner=lambda: Summary(ok=True))
scheduler._next_run["ok-task"] = datetime.now(timezone.utc)
scheduler._execute_task(task)
def test_format_result_string() -> None:
detail, payload = CronScheduler._format_result("ok")
assert detail == "ok"
assert payload == "ok"

View File

@ -1,5 +1,6 @@
from __future__ import annotations
import time
import types
import pytest
@ -14,7 +15,7 @@ from ariadne.services.vaultwarden import VaultwardenService
class DummySpawner:
def __init__(self, namespace, cronjob):
def __init__(self, namespace, cronjob, manifest=None):
self.namespace = namespace
self.cronjob = cronjob
self.calls = []
@ -58,10 +59,14 @@ class DummyResponse:
class DummyVaultwardenClient:
def __init__(self):
self.calls = []
self.responses = {}
def post(self, path, json=None, data=None):
self.calls.append((path, json, data))
return DummyResponse(200, "")
resp = self.responses.get(path)
if resp is None:
resp = DummyResponse(200, "")
return resp
def close(self):
return None
@ -75,7 +80,10 @@ def test_nextcloud_sync_mail_builds_env(monkeypatch) -> None:
nextcloud_mail_sync_job_ttl_sec=3600,
)
monkeypatch.setattr("ariadne.services.nextcloud.settings", dummy)
monkeypatch.setattr("ariadne.services.nextcloud.JobSpawner", lambda ns, cj: DummySpawner(ns, cj))
monkeypatch.setattr(
"ariadne.services.nextcloud.JobSpawner",
lambda ns, cj, manifest=None: DummySpawner(ns, cj, manifest),
)
svc = NextcloudService()
result = svc.sync_mail("alice", wait=True)
@ -97,7 +105,10 @@ def test_wger_sync_user_env(monkeypatch) -> None:
wger_user_sync_wait_timeout_sec=60.0,
)
monkeypatch.setattr("ariadne.services.wger.settings", dummy)
monkeypatch.setattr("ariadne.services.wger.JobSpawner", lambda ns, cj: DummySpawner(ns, cj))
monkeypatch.setattr(
"ariadne.services.wger.JobSpawner",
lambda ns, cj, manifest=None: DummySpawner(ns, cj, manifest),
)
svc = WgerService()
result = svc.sync_user("alice", "alice@bstein.dev", "pw", wait=True)
@ -111,6 +122,24 @@ def test_wger_sync_user_env(monkeypatch) -> None:
assert env_map["WGER_EMAIL"] == "alice@bstein.dev"
def test_wger_sync_user_queued(monkeypatch) -> None:
dummy = types.SimpleNamespace(
wger_namespace="health",
wger_user_sync_cronjob="wger-user-sync",
wger_admin_cronjob="wger-admin-ensure",
wger_user_sync_wait_timeout_sec=60.0,
)
monkeypatch.setattr("ariadne.services.wger.settings", dummy)
monkeypatch.setattr(
"ariadne.services.wger.JobSpawner",
lambda ns, cj, manifest=None: DummySpawner(ns, cj, manifest),
)
svc = WgerService()
result = svc.sync_user("alice", "alice@bstein.dev", "pw", wait=False)
assert result["status"] == "queued"
def test_firefly_sync_user_env(monkeypatch) -> None:
dummy = types.SimpleNamespace(
firefly_namespace="finance",
@ -118,7 +147,10 @@ def test_firefly_sync_user_env(monkeypatch) -> None:
firefly_user_sync_wait_timeout_sec=60.0,
)
monkeypatch.setattr("ariadne.services.firefly.settings", dummy)
monkeypatch.setattr("ariadne.services.firefly.JobSpawner", lambda ns, cj: DummySpawner(ns, cj))
monkeypatch.setattr(
"ariadne.services.firefly.JobSpawner",
lambda ns, cj, manifest=None: DummySpawner(ns, cj, manifest),
)
svc = FireflyService()
result = svc.sync_user("alice@bstein.dev", "pw", wait=True)
@ -131,6 +163,54 @@ def test_firefly_sync_user_env(monkeypatch) -> None:
assert env_map["FIREFLY_USER_EMAIL"] == "alice@bstein.dev"
def test_firefly_sync_user_queued(monkeypatch) -> None:
dummy = types.SimpleNamespace(
firefly_namespace="finance",
firefly_user_sync_cronjob="firefly-user-sync",
firefly_user_sync_wait_timeout_sec=60.0,
)
monkeypatch.setattr("ariadne.services.firefly.settings", dummy)
monkeypatch.setattr(
"ariadne.services.firefly.JobSpawner",
lambda ns, cj, manifest=None: DummySpawner(ns, cj, manifest),
)
svc = FireflyService()
result = svc.sync_user("alice@bstein.dev", "pw", wait=False)
assert result["status"] == "queued"
def test_firefly_sync_missing_inputs(monkeypatch) -> None:
dummy = types.SimpleNamespace(
firefly_namespace="finance",
firefly_user_sync_cronjob="firefly-user-sync",
firefly_user_sync_wait_timeout_sec=60.0,
)
monkeypatch.setattr("ariadne.services.firefly.settings", dummy)
monkeypatch.setattr(
"ariadne.services.firefly.JobSpawner",
lambda ns, cj, manifest=None: DummySpawner(ns, cj, manifest),
)
svc = FireflyService()
with pytest.raises(RuntimeError):
svc.sync_user("", "pw", wait=True)
with pytest.raises(RuntimeError):
svc.sync_user("alice@bstein.dev", "", wait=True)
def test_firefly_sync_missing_config(monkeypatch) -> None:
dummy = types.SimpleNamespace(
firefly_namespace="",
firefly_user_sync_cronjob="",
firefly_user_sync_wait_timeout_sec=60.0,
)
monkeypatch.setattr("ariadne.services.firefly.settings", dummy)
svc = FireflyService()
with pytest.raises(RuntimeError):
svc.sync_user("alice@bstein.dev", "pw", wait=True)
def test_vault_sync_jobs(monkeypatch) -> None:
dummy = types.SimpleNamespace(
vault_namespace="vault",
@ -139,7 +219,10 @@ def test_vault_sync_jobs(monkeypatch) -> None:
vault_job_wait_timeout_sec=120.0,
)
monkeypatch.setattr("ariadne.services.vault.settings", dummy)
monkeypatch.setattr("ariadne.services.vault.JobSpawner", lambda ns, cj: DummySpawner(ns, cj))
monkeypatch.setattr(
"ariadne.services.vault.JobSpawner",
lambda ns, cj, manifest=None: DummySpawner(ns, cj, manifest),
)
svc = VaultService()
result = svc.sync_k8s_auth(wait=True)
@ -151,6 +234,60 @@ def test_vault_sync_jobs(monkeypatch) -> None:
assert timeout == 120.0
def test_vault_sync_k8s_auth_queue(monkeypatch) -> None:
dummy = types.SimpleNamespace(
vault_namespace="vault",
vault_k8s_auth_cronjob="vault-k8s-auth-config",
vault_oidc_cronjob="vault-oidc-config",
vault_job_wait_timeout_sec=120.0,
)
monkeypatch.setattr("ariadne.services.vault.settings", dummy)
monkeypatch.setattr(
"ariadne.services.vault.JobSpawner",
lambda ns, cj, manifest=None: DummySpawner(ns, cj, manifest),
)
svc = VaultService()
result = svc.sync_k8s_auth(wait=False)
assert result["status"] == "queued"
def test_vault_sync_oidc_queue(monkeypatch) -> None:
dummy = types.SimpleNamespace(
vault_namespace="vault",
vault_k8s_auth_cronjob="vault-k8s-auth-config",
vault_oidc_cronjob="vault-oidc-config",
vault_job_wait_timeout_sec=120.0,
)
monkeypatch.setattr("ariadne.services.vault.settings", dummy)
monkeypatch.setattr(
"ariadne.services.vault.JobSpawner",
lambda ns, cj, manifest=None: DummySpawner(ns, cj, manifest),
)
svc = VaultService()
result = svc.sync_oidc(wait=False)
assert result["status"] == "queued"
def test_vault_sync_oidc_wait(monkeypatch) -> None:
dummy = types.SimpleNamespace(
vault_namespace="vault",
vault_k8s_auth_cronjob="vault-k8s-auth-config",
vault_oidc_cronjob="vault-oidc-config",
vault_job_wait_timeout_sec=120.0,
)
monkeypatch.setattr("ariadne.services.vault.settings", dummy)
monkeypatch.setattr(
"ariadne.services.vault.JobSpawner",
lambda ns, cj, manifest=None: DummySpawner(ns, cj, manifest),
)
svc = VaultService()
result = svc.sync_oidc(wait=True)
assert result["status"] == "ok"
def test_comms_jobs(monkeypatch) -> None:
dummy = types.SimpleNamespace(
comms_namespace="comms",
@ -161,7 +298,10 @@ def test_comms_jobs(monkeypatch) -> None:
comms_job_wait_timeout_sec=60.0,
)
monkeypatch.setattr("ariadne.services.comms.settings", dummy)
monkeypatch.setattr("ariadne.services.comms.JobSpawner", lambda ns, cj: DummySpawner(ns, cj))
monkeypatch.setattr(
"ariadne.services.comms.JobSpawner",
lambda ns, cj, manifest=None: DummySpawner(ns, cj, manifest),
)
svc = CommsService()
result = svc.run_guest_name_randomizer(wait=True)
@ -173,6 +313,46 @@ def test_comms_jobs(monkeypatch) -> None:
assert timeout == 60.0
def test_comms_pin_invite(monkeypatch) -> None:
dummy = types.SimpleNamespace(
comms_namespace="comms",
comms_guest_name_cronjob="guest-name-randomizer",
comms_pin_invite_cronjob="pin-othrys-invite",
comms_reset_room_cronjob="othrys-room-reset",
comms_seed_room_cronjob="seed-othrys-room",
comms_job_wait_timeout_sec=60.0,
)
monkeypatch.setattr("ariadne.services.comms.settings", dummy)
monkeypatch.setattr(
"ariadne.services.comms.JobSpawner",
lambda ns, cj, manifest=None: DummySpawner(ns, cj, manifest),
)
svc = CommsService()
result = svc.run_pin_invite(wait=False)
assert result["status"] == "queued"
def test_comms_reset_and_seed(monkeypatch) -> None:
dummy = types.SimpleNamespace(
comms_namespace="comms",
comms_guest_name_cronjob="guest-name-randomizer",
comms_pin_invite_cronjob="pin-othrys-invite",
comms_reset_room_cronjob="othrys-room-reset",
comms_seed_room_cronjob="seed-othrys-room",
comms_job_wait_timeout_sec=60.0,
)
monkeypatch.setattr("ariadne.services.comms.settings", dummy)
monkeypatch.setattr(
"ariadne.services.comms.JobSpawner",
lambda ns, cj, manifest=None: DummySpawner(ns, cj, manifest),
)
svc = CommsService()
assert svc.run_reset_room(wait=False)["status"] == "queued"
assert svc.run_seed_room(wait=True)["status"] == "ok"
def test_mailu_sync_includes_force(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
mailu_sync_url="http://mailu",
@ -196,6 +376,43 @@ def test_mailu_sync_includes_force(monkeypatch) -> None:
assert client.payload["force"] is True
def test_mailu_sync_skips_without_url(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
mailu_sync_url="",
mailu_sync_wait_timeout_sec=10.0,
mailu_db_host="localhost",
mailu_db_port=5432,
mailu_db_name="mailu",
mailu_db_user="mailu",
mailu_db_password="secret",
mailu_domain="bstein.dev",
)
monkeypatch.setattr("ariadne.services.mailu.settings", dummy_settings)
svc = MailuService()
assert svc.sync("provision") is None
def test_mailu_sync_raises_on_error(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
mailu_sync_url="http://mailu",
mailu_sync_wait_timeout_sec=10.0,
mailu_db_host="localhost",
mailu_db_port=5432,
mailu_db_name="mailu",
mailu_db_user="mailu",
mailu_db_password="secret",
)
client = DummyClient()
client.status_code = 500
monkeypatch.setattr("ariadne.services.mailu.settings", dummy_settings)
monkeypatch.setattr("ariadne.services.mailu.httpx.Client", lambda *args, **kwargs: client)
svc = MailuService()
with pytest.raises(RuntimeError):
svc.sync("provision")
def test_vaultwarden_invite_uses_admin_session(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
vaultwarden_namespace="vaultwarden",
@ -225,6 +442,295 @@ def test_vaultwarden_invite_uses_admin_session(monkeypatch) -> None:
assert any(call[0] == "/admin/invite" for call in client.calls)
def test_vaultwarden_invite_handles_rate_limit(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
vaultwarden_namespace="vaultwarden",
vaultwarden_admin_secret_name="vaultwarden-admin",
vaultwarden_admin_secret_key="ADMIN_TOKEN",
vaultwarden_admin_rate_limit_backoff_sec=600,
vaultwarden_admin_session_ttl_sec=900,
vaultwarden_service_host="vaultwarden-service.vaultwarden.svc.cluster.local",
vaultwarden_pod_label="app=vaultwarden",
vaultwarden_pod_port=80,
)
client = DummyVaultwardenClient()
client.responses["/admin/invite"] = DummyResponse(429, "rate limited")
monkeypatch.setattr("ariadne.services.vaultwarden.settings", dummy_settings)
monkeypatch.setattr("ariadne.services.vaultwarden.get_secret_value", lambda *args, **kwargs: "token")
monkeypatch.setattr("ariadne.services.vaultwarden.httpx.Client", lambda *args, **kwargs: client)
monkeypatch.setattr(
"ariadne.services.vaultwarden.VaultwardenService._find_pod_ip",
staticmethod(lambda *args, **kwargs: "127.0.0.1"),
)
svc = VaultwardenService()
result = svc.invite_user("alice@bstein.dev")
assert result.status == "rate_limited"
def test_vaultwarden_invite_existing_user(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
vaultwarden_namespace="vaultwarden",
vaultwarden_admin_secret_name="vaultwarden-admin",
vaultwarden_admin_secret_key="ADMIN_TOKEN",
vaultwarden_admin_rate_limit_backoff_sec=600,
vaultwarden_admin_session_ttl_sec=900,
vaultwarden_service_host="vaultwarden-service.vaultwarden.svc.cluster.local",
vaultwarden_pod_label="app=vaultwarden",
vaultwarden_pod_port=80,
)
client = DummyVaultwardenClient()
client.responses["/admin/invite"] = DummyResponse(409, "user already exists")
monkeypatch.setattr("ariadne.services.vaultwarden.settings", dummy_settings)
monkeypatch.setattr("ariadne.services.vaultwarden.get_secret_value", lambda *args, **kwargs: "token")
monkeypatch.setattr("ariadne.services.vaultwarden.httpx.Client", lambda *args, **kwargs: client)
monkeypatch.setattr(
"ariadne.services.vaultwarden.VaultwardenService._find_pod_ip",
staticmethod(lambda *args, **kwargs: "127.0.0.1"),
)
svc = VaultwardenService()
result = svc.invite_user("alice@bstein.dev")
assert result.status == "already_present"
def test_vaultwarden_invite_rejects_invalid_email() -> None:
svc = VaultwardenService()
result = svc.invite_user("bad-email")
assert result.status == "invalid_email"
def test_vaultwarden_invite_rate_limited_short_circuit() -> None:
svc = VaultwardenService()
svc._rate_limited_until = time.time() + 60
result = svc.invite_user("alice@bstein.dev")
assert result.status == "rate_limited"
def test_vaultwarden_invite_handles_admin_exception(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
vaultwarden_namespace="vaultwarden",
vaultwarden_admin_secret_name="vaultwarden-admin",
vaultwarden_admin_secret_key="ADMIN_TOKEN",
vaultwarden_admin_rate_limit_backoff_sec=600,
vaultwarden_admin_session_ttl_sec=900,
vaultwarden_service_host="vaultwarden-service.vaultwarden.svc.cluster.local",
vaultwarden_pod_label="app=vaultwarden",
vaultwarden_pod_port=80,
)
monkeypatch.setattr("ariadne.services.vaultwarden.settings", dummy_settings)
monkeypatch.setattr(
"ariadne.services.vaultwarden.VaultwardenService._find_pod_ip",
staticmethod(lambda *args, **kwargs: (_ for _ in ()).throw(RuntimeError("boom"))),
)
svc = VaultwardenService()
monkeypatch.setattr(
svc,
"_admin_session",
lambda *_args, **_kwargs: (_ for _ in ()).throw(RuntimeError("rate limited")),
)
result = svc.invite_user("alice@bstein.dev")
assert result.status == "rate_limited"
def test_vaultwarden_invite_handles_bad_body(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
vaultwarden_namespace="vaultwarden",
vaultwarden_admin_secret_name="vaultwarden-admin",
vaultwarden_admin_secret_key="ADMIN_TOKEN",
vaultwarden_admin_rate_limit_backoff_sec=600,
vaultwarden_admin_session_ttl_sec=900,
vaultwarden_service_host="vaultwarden-service.vaultwarden.svc.cluster.local",
vaultwarden_pod_label="app=vaultwarden",
vaultwarden_pod_port=80,
)
class BadTextResponse:
def __init__(self, status_code=500):
self.status_code = status_code
def raise_for_status(self):
return None
@property
def text(self):
raise RuntimeError("boom")
class BadTextClient(DummyVaultwardenClient):
def post(self, path, json=None, data=None):
self.calls.append((path, json, data))
return BadTextResponse(500)
monkeypatch.setattr("ariadne.services.vaultwarden.settings", dummy_settings)
monkeypatch.setattr("ariadne.services.vaultwarden.get_secret_value", lambda *args, **kwargs: "token")
monkeypatch.setattr("ariadne.services.vaultwarden.httpx.Client", lambda *args, **kwargs: BadTextClient())
monkeypatch.setattr(
"ariadne.services.vaultwarden.VaultwardenService._find_pod_ip",
staticmethod(lambda *args, **kwargs: "127.0.0.1"),
)
svc = VaultwardenService()
result = svc.invite_user("alice@bstein.dev")
assert result.status == "error"
def test_vaultwarden_invite_handles_fallback_skip(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
vaultwarden_namespace="vaultwarden",
vaultwarden_admin_secret_name="vaultwarden-admin",
vaultwarden_admin_secret_key="ADMIN_TOKEN",
vaultwarden_admin_rate_limit_backoff_sec=600,
vaultwarden_admin_session_ttl_sec=900,
vaultwarden_service_host="vaultwarden-service.vaultwarden.svc.cluster.local",
vaultwarden_pod_label="app=vaultwarden",
vaultwarden_pod_port=80,
)
monkeypatch.setattr("ariadne.services.vaultwarden.settings", dummy_settings)
monkeypatch.setattr(
"ariadne.services.vaultwarden.VaultwardenService._find_pod_ip",
staticmethod(lambda *args, **kwargs: (_ for _ in ()).throw(RuntimeError("boom"))),
)
svc = VaultwardenService()
monkeypatch.setattr(svc, "_admin_session", lambda *_args, **_kwargs: (_ for _ in ()).throw(RuntimeError("nope")))
result = svc.invite_user("alice@bstein.dev")
assert result.status == "error"
def test_vaultwarden_find_pod_ip(monkeypatch) -> None:
monkeypatch.setattr(
"ariadne.services.vaultwarden.get_json",
lambda *args, **kwargs: {
"items": [
{
"status": {
"phase": "Running",
"podIP": "10.0.0.1",
"conditions": [{"type": "Ready", "status": "True"}],
}
}
]
},
)
assert VaultwardenService._find_pod_ip("ns", "app=vaultwarden") == "10.0.0.1"
def test_vaultwarden_find_pod_ip_skips_missing_ip(monkeypatch) -> None:
monkeypatch.setattr(
"ariadne.services.vaultwarden.get_json",
lambda *args, **kwargs: {
"items": [
{"status": {"phase": "Running", "podIP": ""}},
{"status": {"phase": "Running", "podIP": "10.0.0.2", "conditions": []}},
]
},
)
assert VaultwardenService._find_pod_ip("ns", "app=vaultwarden") == "10.0.0.2"
def test_vaultwarden_find_pod_ip_conditions_default_ready(monkeypatch) -> None:
monkeypatch.setattr(
"ariadne.services.vaultwarden.get_json",
lambda *args, **kwargs: {
"items": [
{"status": {"phase": "Running", "podIP": "10.0.0.3", "conditions": ["bad"]}},
]
},
)
assert VaultwardenService._find_pod_ip("ns", "app=vaultwarden") == "10.0.0.3"
def test_vaultwarden_find_pod_ip_no_pods(monkeypatch) -> None:
monkeypatch.setattr("ariadne.services.vaultwarden.get_json", lambda *args, **kwargs: {"items": []})
with pytest.raises(RuntimeError):
VaultwardenService._find_pod_ip("ns", "app=vaultwarden")
def test_vaultwarden_find_pod_ip_missing_ip(monkeypatch) -> None:
monkeypatch.setattr(
"ariadne.services.vaultwarden.get_json",
lambda *args, **kwargs: {
"items": [
{"status": {"phase": "Pending", "conditions": ["bad"]}},
]
},
)
with pytest.raises(RuntimeError):
VaultwardenService._find_pod_ip("ns", "app=vaultwarden")
def test_vaultwarden_admin_session_rate_limit(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
vaultwarden_namespace="vaultwarden",
vaultwarden_admin_secret_name="vaultwarden-admin",
vaultwarden_admin_secret_key="ADMIN_TOKEN",
vaultwarden_admin_rate_limit_backoff_sec=1,
vaultwarden_admin_session_ttl_sec=900,
vaultwarden_service_host="vaultwarden-service.vaultwarden.svc.cluster.local",
vaultwarden_pod_label="app=vaultwarden",
vaultwarden_pod_port=80,
)
client = DummyVaultwardenClient()
client.responses["/admin"] = DummyResponse(429, "")
monkeypatch.setattr("ariadne.services.vaultwarden.settings", dummy_settings)
monkeypatch.setattr("ariadne.services.vaultwarden.get_secret_value", lambda *args, **kwargs: "token")
monkeypatch.setattr("ariadne.services.vaultwarden.httpx.Client", lambda *args, **kwargs: client)
svc = VaultwardenService()
with pytest.raises(RuntimeError):
svc._admin_session("http://vaultwarden")
def test_vaultwarden_admin_session_reuses_client() -> None:
svc = VaultwardenService()
svc._admin_client = DummyVaultwardenClient()
svc._admin_session_expires_at = time.time() + 60
svc._admin_session_base_url = "http://vaultwarden"
client = svc._admin_session("http://vaultwarden")
assert client is svc._admin_client
def test_vaultwarden_admin_session_rate_limited_until() -> None:
svc = VaultwardenService()
svc._rate_limited_until = time.time() + 60
with pytest.raises(RuntimeError):
svc._admin_session("http://vaultwarden")
def test_vaultwarden_admin_session_closes_existing(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
vaultwarden_namespace="vaultwarden",
vaultwarden_admin_secret_name="vaultwarden-admin",
vaultwarden_admin_secret_key="ADMIN_TOKEN",
vaultwarden_admin_rate_limit_backoff_sec=600,
vaultwarden_admin_session_ttl_sec=900,
vaultwarden_service_host="vaultwarden-service.vaultwarden.svc.cluster.local",
vaultwarden_pod_label="app=vaultwarden",
vaultwarden_pod_port=80,
)
class CloseFail:
def close(self):
raise RuntimeError("boom")
monkeypatch.setattr("ariadne.services.vaultwarden.settings", dummy_settings)
monkeypatch.setattr("ariadne.services.vaultwarden.get_secret_value", lambda *args, **kwargs: "token")
monkeypatch.setattr("ariadne.services.vaultwarden.httpx.Client", lambda *args, **kwargs: DummyVaultwardenClient())
svc = VaultwardenService()
svc._admin_client = CloseFail()
svc._admin_session_expires_at = time.time() - 10
svc._admin_session_base_url = "http://old"
assert svc._admin_session("http://vaultwarden") is not None
def test_nextcloud_missing_config(monkeypatch) -> None:
dummy = types.SimpleNamespace(
nextcloud_namespace="",
@ -236,3 +742,275 @@ def test_nextcloud_missing_config(monkeypatch) -> None:
svc = NextcloudService()
with pytest.raises(RuntimeError):
svc.sync_mail("alice")
def test_vault_sync_missing_config(monkeypatch) -> None:
dummy = types.SimpleNamespace(
vault_namespace="",
vault_k8s_auth_cronjob="",
vault_oidc_cronjob="",
vault_job_wait_timeout_sec=120.0,
)
monkeypatch.setattr("ariadne.services.vault.settings", dummy)
svc = VaultService()
with pytest.raises(RuntimeError):
svc.sync_k8s_auth(wait=True)
def test_vault_sync_oidc_missing_config(monkeypatch) -> None:
dummy = types.SimpleNamespace(
vault_namespace="",
vault_k8s_auth_cronjob="",
vault_oidc_cronjob="",
vault_job_wait_timeout_sec=120.0,
)
monkeypatch.setattr("ariadne.services.vault.settings", dummy)
svc = VaultService()
with pytest.raises(RuntimeError):
svc.sync_oidc(wait=True)
def test_wger_sync_missing_inputs(monkeypatch) -> None:
dummy = types.SimpleNamespace(
wger_namespace="health",
wger_user_sync_cronjob="wger-user-sync",
wger_admin_cronjob="wger-admin-ensure",
wger_user_sync_wait_timeout_sec=60.0,
)
monkeypatch.setattr("ariadne.services.wger.settings", dummy)
monkeypatch.setattr(
"ariadne.services.wger.JobSpawner",
lambda ns, cj, manifest=None: DummySpawner(ns, cj, manifest),
)
svc = WgerService()
with pytest.raises(RuntimeError):
svc.sync_user("", "email", "pw", wait=True)
with pytest.raises(RuntimeError):
svc.sync_user("alice", "email", "", wait=True)
def test_wger_sync_missing_config(monkeypatch) -> None:
dummy = types.SimpleNamespace(
wger_namespace="",
wger_user_sync_cronjob="",
wger_admin_cronjob="wger-admin-ensure",
wger_user_sync_wait_timeout_sec=60.0,
)
monkeypatch.setattr("ariadne.services.wger.settings", dummy)
svc = WgerService()
with pytest.raises(RuntimeError):
svc.sync_user("alice", "email", "pw", wait=True)
def test_wger_ensure_admin(monkeypatch) -> None:
dummy = types.SimpleNamespace(
wger_namespace="health",
wger_user_sync_cronjob="wger-user-sync",
wger_admin_cronjob="wger-admin-ensure",
wger_user_sync_wait_timeout_sec=60.0,
)
monkeypatch.setattr("ariadne.services.wger.settings", dummy)
monkeypatch.setattr(
"ariadne.services.wger.JobSpawner",
lambda ns, cj, manifest=None: DummySpawner(ns, cj, manifest),
)
svc = WgerService()
result = svc.ensure_admin(wait=True)
assert result["status"] == "ok"
def test_wger_ensure_admin_queue(monkeypatch) -> None:
dummy = types.SimpleNamespace(
wger_namespace="health",
wger_user_sync_cronjob="wger-user-sync",
wger_admin_cronjob="wger-admin-ensure",
wger_user_sync_wait_timeout_sec=60.0,
)
monkeypatch.setattr("ariadne.services.wger.settings", dummy)
monkeypatch.setattr(
"ariadne.services.wger.JobSpawner",
lambda ns, cj, manifest=None: DummySpawner(ns, cj, manifest),
)
svc = WgerService()
result = svc.ensure_admin(wait=False)
assert result["status"] == "queued"
def test_wger_ensure_admin_missing_config(monkeypatch) -> None:
dummy = types.SimpleNamespace(
wger_namespace="",
wger_user_sync_cronjob="wger-user-sync",
wger_admin_cronjob="",
wger_user_sync_wait_timeout_sec=60.0,
)
monkeypatch.setattr("ariadne.services.wger.settings", dummy)
svc = WgerService()
with pytest.raises(RuntimeError):
svc.ensure_admin(wait=True)
def test_comms_missing_config(monkeypatch) -> None:
dummy = types.SimpleNamespace(
comms_namespace="",
comms_guest_name_cronjob="guest-name-randomizer",
comms_pin_invite_cronjob="pin-othrys-invite",
comms_reset_room_cronjob="othrys-room-reset",
comms_seed_room_cronjob="seed-othrys-room",
comms_job_wait_timeout_sec=60.0,
)
monkeypatch.setattr("ariadne.services.comms.settings", dummy)
svc = CommsService()
with pytest.raises(RuntimeError):
svc.run_guest_name_randomizer(wait=True)
def test_comms_missing_config_variants(monkeypatch) -> None:
dummy = types.SimpleNamespace(
comms_namespace="",
comms_guest_name_cronjob="guest-name-randomizer",
comms_pin_invite_cronjob="pin-othrys-invite",
comms_reset_room_cronjob="othrys-room-reset",
comms_seed_room_cronjob="seed-othrys-room",
comms_job_wait_timeout_sec=60.0,
)
monkeypatch.setattr("ariadne.services.comms.settings", dummy)
svc = CommsService()
with pytest.raises(RuntimeError):
svc.run_pin_invite(wait=True)
with pytest.raises(RuntimeError):
svc.run_reset_room(wait=True)
with pytest.raises(RuntimeError):
svc.run_seed_room(wait=True)
def test_mailu_mailbox_exists_handles_error(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
mailu_sync_url="",
mailu_sync_wait_timeout_sec=10.0,
mailu_db_host="localhost",
mailu_db_port=5432,
mailu_db_name="mailu",
mailu_db_user="mailu",
mailu_db_password="secret",
mailu_domain="bstein.dev",
)
monkeypatch.setattr("ariadne.services.mailu.settings", dummy_settings)
monkeypatch.setattr("ariadne.services.mailu.psycopg.connect", lambda *args, **kwargs: (_ for _ in ()).throw(RuntimeError("boom")))
svc = MailuService()
assert svc.mailbox_exists("alice@bstein.dev") is False
def test_mailu_mailbox_exists_success(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
mailu_sync_url="",
mailu_sync_wait_timeout_sec=10.0,
mailu_db_host="localhost",
mailu_db_port=5432,
mailu_db_name="mailu",
mailu_db_user="mailu",
mailu_db_password="secret",
mailu_domain="bstein.dev",
)
monkeypatch.setattr("ariadne.services.mailu.settings", dummy_settings)
class DummyCursor:
def execute(self, *_args, **_kwargs):
return None
def fetchone(self):
return {"id": 1}
def __enter__(self):
return self
def __exit__(self, *_args):
return False
class DummyConn:
def cursor(self):
return DummyCursor()
def __enter__(self):
return self
def __exit__(self, *_args):
return False
monkeypatch.setattr("ariadne.services.mailu.psycopg.connect", lambda *args, **kwargs: DummyConn())
svc = MailuService()
assert svc.mailbox_exists("alice@bstein.dev") is True
def test_mailu_wait_for_mailbox(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
mailu_sync_url="",
mailu_sync_wait_timeout_sec=10.0,
mailu_db_host="localhost",
mailu_db_port=5432,
mailu_db_name="mailu",
mailu_db_user="mailu",
mailu_db_password="secret",
mailu_domain="bstein.dev",
)
monkeypatch.setattr("ariadne.services.mailu.settings", dummy_settings)
monkeypatch.setattr(MailuService, "mailbox_exists", lambda self, email: True)
svc = MailuService()
assert svc.wait_for_mailbox("alice@bstein.dev", timeout_sec=1.0) is True
def test_mailu_mailbox_exists_empty_email(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
mailu_sync_url="",
mailu_sync_wait_timeout_sec=10.0,
mailu_db_host="localhost",
mailu_db_port=5432,
mailu_db_name="mailu",
mailu_db_user="mailu",
mailu_db_password="secret",
mailu_domain="bstein.dev",
)
monkeypatch.setattr("ariadne.services.mailu.settings", dummy_settings)
svc = MailuService()
assert svc.mailbox_exists("") is False
def test_nextcloud_sync_missing_username(monkeypatch) -> None:
dummy = types.SimpleNamespace(
nextcloud_namespace="nextcloud",
nextcloud_mail_sync_cronjob="nextcloud-mail-sync",
nextcloud_mail_sync_wait_timeout_sec=90.0,
nextcloud_mail_sync_job_ttl_sec=3600,
)
monkeypatch.setattr("ariadne.services.nextcloud.settings", dummy)
monkeypatch.setattr(
"ariadne.services.nextcloud.JobSpawner",
lambda ns, cj, manifest=None: DummySpawner(ns, cj, manifest),
)
svc = NextcloudService()
with pytest.raises(RuntimeError):
svc.sync_mail(" ", wait=True)
def test_nextcloud_sync_queue(monkeypatch) -> None:
dummy = types.SimpleNamespace(
nextcloud_namespace="nextcloud",
nextcloud_mail_sync_cronjob="nextcloud-mail-sync",
nextcloud_mail_sync_wait_timeout_sec=90.0,
nextcloud_mail_sync_job_ttl_sec=3600,
)
monkeypatch.setattr("ariadne.services.nextcloud.settings", dummy)
monkeypatch.setattr(
"ariadne.services.nextcloud.JobSpawner",
lambda ns, cj, manifest=None: DummySpawner(ns, cj, manifest),
)
svc = NextcloudService()
result = svc.sync_mail("alice", wait=False)
assert result["status"] == "queued"

13
tests/test_settings.py Normal file
View File

@ -0,0 +1,13 @@
from __future__ import annotations
from ariadne import settings as settings_module
def test_env_int_invalid(monkeypatch) -> None:
monkeypatch.setenv("ARIADNE_INT_TEST", "bad")
assert settings_module._env_int("ARIADNE_INT_TEST", 5) == 5
def test_env_float_invalid(monkeypatch) -> None:
monkeypatch.setenv("ARIADNE_FLOAT_TEST", "bad")
assert settings_module._env_float("ARIADNE_FLOAT_TEST", 1.5) == 1.5

View File

@ -6,16 +6,19 @@ from ariadne.db.storage import Storage
class DummyDB:
def __init__(self) -> None:
def __init__(self, row=None) -> None:
self.rows = []
self.executed = []
self.row = row
def fetchall(self, query, params=None):
return self.rows
def fetchone(self, query, params=None):
return None
return self.row
def execute(self, query, params=None):
self.executed.append((query, params))
return None
@ -48,3 +51,243 @@ def test_row_to_request_flags() -> None:
req = Storage._row_to_request(row)
assert req.request_code == "abc"
assert req.approval_flags == ["demo", "1", "test"]
def test_record_event_serializes_dict() -> None:
db = DummyDB()
storage = Storage(db)
storage.record_event("mailu_rotate", {"status": "ok"})
assert db.executed
_, params = db.executed[-1]
assert params[0] == "mailu_rotate"
assert "\"status\"" in params[1]
def test_list_events_filters() -> None:
db = DummyDB()
db.rows = [{"id": 1, "event_type": "foo", "detail": "bar", "created_at": datetime.now()}]
storage = Storage(db)
rows = storage.list_events(limit=1, event_type="foo")
assert rows[0]["event_type"] == "foo"
def test_list_events_without_filter() -> None:
db = DummyDB()
db.rows = [{"id": 1, "event_type": "foo", "detail": "bar", "created_at": datetime.now()}]
storage = Storage(db)
rows = storage.list_events(limit=1)
assert rows[0]["event_type"] == "foo"
def test_list_task_runs_filters() -> None:
db = DummyDB()
db.rows = [
{
"id": 1,
"request_code": "REQ1",
"task": "mailu_sync",
"status": "ok",
"detail": "done",
"started_at": datetime.now(),
"finished_at": datetime.now(),
"duration_ms": 10,
}
]
storage = Storage(db)
rows = storage.list_task_runs(limit=1, request_code="REQ1")
assert rows[0]["task"] == "mailu_sync"
def test_list_task_runs_filters_task_and_request() -> None:
db = DummyDB()
db.rows = [{"id": 1, "request_code": "REQ1", "task": "mailu_sync", "status": "ok"}]
storage = Storage(db)
rows = storage.list_task_runs(limit=1, request_code="REQ1", task="mailu_sync")
assert rows[0]["task"] == "mailu_sync"
def test_list_task_runs_filters_task_only() -> None:
db = DummyDB()
db.rows = [{"id": 1, "request_code": "REQ1", "task": "mailu_sync", "status": "ok"}]
storage = Storage(db)
rows = storage.list_task_runs(limit=1, task="mailu_sync")
assert rows[0]["task"] == "mailu_sync"
def test_list_task_runs_default() -> None:
db = DummyDB()
db.rows = [{"id": 1, "request_code": "REQ1", "task": "mailu_sync", "status": "ok"}]
storage = Storage(db)
rows = storage.list_task_runs(limit=1)
assert rows[0]["task"] == "mailu_sync"
def test_fetch_access_request(monkeypatch) -> None:
row = {
"request_code": "REQ1",
"username": "alice",
"contact_email": "alice@example.com",
"status": "pending",
"email_verified_at": None,
"initial_password": None,
"initial_password_revealed_at": None,
"provision_attempted_at": None,
"approval_flags": [],
"approval_note": None,
"denial_note": None,
}
db = DummyDB(row=row)
storage = Storage(db)
req = storage.fetch_access_request("REQ1")
assert req
assert req.username == "alice"
def test_fetch_access_request_missing() -> None:
db = DummyDB(row=None)
storage = Storage(db)
assert storage.fetch_access_request("REQ1") is None
def test_find_access_request_by_username() -> None:
row = {
"request_code": "REQ1",
"username": "alice",
"contact_email": "alice@example.com",
"status": "pending",
"email_verified_at": None,
"initial_password": None,
"initial_password_revealed_at": None,
"provision_attempted_at": None,
"approval_flags": [],
"approval_note": None,
"denial_note": None,
}
db = DummyDB(row=row)
storage = Storage(db)
req = storage.find_access_request_by_username("alice")
assert req
assert req.username == "alice"
def test_update_status_executes() -> None:
db = DummyDB()
storage = Storage(db)
storage.update_status("REQ1", "approved")
assert db.executed
def test_update_approval_executes() -> None:
db = DummyDB()
storage = Storage(db)
storage.update_approval("REQ1", "approved", "admin", ["demo"], "ok")
assert db.executed
def test_list_provision_candidates() -> None:
row = {
"request_code": "REQ1",
"username": "alice",
"contact_email": "alice@example.com",
"status": "approved",
"email_verified_at": None,
"initial_password": None,
"initial_password_revealed_at": None,
"provision_attempted_at": None,
"approval_flags": [],
"approval_note": None,
"denial_note": None,
}
db = DummyDB()
db.rows = [row]
storage = Storage(db)
candidates = storage.list_provision_candidates()
assert candidates[0].username == "alice"
def test_mark_provision_attempted_executes() -> None:
db = DummyDB()
storage = Storage(db)
storage.mark_provision_attempted("REQ1")
assert db.executed
def test_set_initial_password_executes() -> None:
db = DummyDB()
storage = Storage(db)
storage.set_initial_password("REQ1", "pw")
assert db.executed
def test_mark_welcome_sent_executes() -> None:
db = DummyDB()
storage = Storage(db)
storage.mark_welcome_sent("REQ1")
assert db.executed
def test_ensure_task_rows_empty() -> None:
db = DummyDB()
storage = Storage(db)
storage.ensure_task_rows("REQ1", [])
assert not db.executed
def test_ensure_task_rows_executes() -> None:
db = DummyDB()
storage = Storage(db)
storage.ensure_task_rows("REQ1", ["task"])
assert db.executed
def test_update_task_executes() -> None:
db = DummyDB()
storage = Storage(db)
storage.update_task("REQ1", "task", "ok", None)
assert db.executed
def test_find_access_request_by_username_missing() -> None:
db = DummyDB(row=None)
storage = Storage(db)
assert storage.find_access_request_by_username("alice") is None
def test_list_pending_requests() -> None:
db = DummyDB()
db.rows = [{"request_code": "REQ1"}]
storage = Storage(db)
assert storage.list_pending_requests()[0]["request_code"] == "REQ1"
def test_record_task_run_executes() -> None:
db = DummyDB()
storage = Storage(db)
storage.record_task_run("REQ1", "task", "ok", None, datetime.now(), datetime.now(), 5)
assert db.executed
def test_update_schedule_state_executes() -> None:
db = DummyDB()
storage = Storage(db)
storage.update_schedule_state("task", "* * * * *", None, None, "ok", None, None, None)
assert db.executed

View File

@ -2,15 +2,17 @@ from __future__ import annotations
import re
import httpx
from ariadne.services.mailu import MailuService
from ariadne.utils.errors import safe_error_detail
from ariadne.utils.http import extract_bearer_token
from ariadne.utils.passwords import random_password
class DummyRequest:
def __init__(self, headers):
self.headers = headers
from ariadne.utils.errors import safe_error_detail
from ariadne.utils.passwords import random_password
def test_random_password_length() -> None:
@ -24,6 +26,11 @@ def test_mailu_resolve_email_attribute() -> None:
assert MailuService.resolve_mailu_email("alice", attrs) == "custom@bstein.dev"
def test_mailu_resolve_email_string() -> None:
attrs = {"mailu_email": "custom@bstein.dev"}
assert MailuService.resolve_mailu_email("alice", attrs) == "custom@bstein.dev"
def test_mailu_resolve_email_default() -> None:
assert MailuService.resolve_mailu_email("alice", {}) == "alice@bstein.dev"
@ -32,6 +39,51 @@ def test_safe_error_detail_runtime() -> None:
assert safe_error_detail(RuntimeError("boom"), "fallback") == "boom"
def test_safe_error_detail_http_status_json() -> None:
request = httpx.Request("GET", "https://example.com")
response = httpx.Response(400, json={"errorMessage": "bad things"}, request=request)
exc = httpx.HTTPStatusError("bad", request=request, response=response)
detail = safe_error_detail(exc, "fallback")
assert "http 400" in detail
assert "bad things" in detail
def test_safe_error_detail_http_status_text() -> None:
request = httpx.Request("GET", "https://example.com")
response = httpx.Response(500, text="bad error", request=request)
exc = httpx.HTTPStatusError("bad", request=request, response=response)
detail = safe_error_detail(exc, "fallback")
assert "http 500" in detail
assert "bad error" in detail
def test_safe_error_detail_http_status_string() -> None:
request = httpx.Request("GET", "https://example.com")
response = httpx.Response(400, json="bad string", request=request)
exc = httpx.HTTPStatusError("bad", request=request, response=response)
detail = safe_error_detail(exc, "fallback")
assert "http 400" in detail
assert "bad string" in detail
def test_safe_error_detail_timeout() -> None:
exc = httpx.TimeoutException("timeout")
assert safe_error_detail(exc, "fallback") == "timeout"
def test_extract_bearer_token() -> None:
request = DummyRequest({"Authorization": "Bearer token123"})
assert extract_bearer_token(request) == "token123"
def test_extract_bearer_token_invalid() -> None:
request = DummyRequest({"Authorization": "Basic abc"})
assert extract_bearer_token(request) is None
def test_extract_bearer_token_missing_parts() -> None:
request = DummyRequest({"Authorization": "Bearer"})
assert extract_bearer_token(request) is None

View File

@ -1,6 +1,8 @@
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime, timezone
import types
from ariadne.services.vaultwarden import VaultwardenInvite
from ariadne.services import vaultwarden_sync
@ -62,3 +64,323 @@ def test_vaultwarden_sync_invites(monkeypatch) -> None:
summary = vaultwarden_sync.run_vaultwarden_sync()
assert summary.created_or_present == 1
assert dummy.set_calls
def test_vaultwarden_sync_respects_retry_cooldown(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
mailu_domain="bstein.dev",
vaultwarden_retry_cooldown_sec=9999,
vaultwarden_failure_bailout=2,
)
monkeypatch.setattr(vaultwarden_sync, "settings", dummy_settings)
now = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
dummy = DummyAdmin(
users=[
{
"id": "1",
"username": "alice",
"enabled": True,
"attributes": {
"mailu_email": ["alice@bstein.dev"],
"vaultwarden_status": ["rate_limited"],
"vaultwarden_synced_at": [now],
},
}
],
attrs={
"1": {
"id": "1",
"username": "alice",
"attributes": {
"mailu_email": ["alice@bstein.dev"],
"vaultwarden_status": ["rate_limited"],
"vaultwarden_synced_at": [now],
},
}
},
)
monkeypatch.setattr(vaultwarden_sync, "keycloak_admin", dummy)
monkeypatch.setattr(vaultwarden_sync.mailu, "mailbox_exists", lambda email: True)
summary = vaultwarden_sync.run_vaultwarden_sync()
assert summary.skipped == 1
def test_vaultwarden_sync_bails_after_failures(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
mailu_domain="bstein.dev",
vaultwarden_retry_cooldown_sec=0,
vaultwarden_failure_bailout=1,
)
monkeypatch.setattr(vaultwarden_sync, "settings", dummy_settings)
dummy = DummyAdmin(
users=[
{"id": "1", "username": "alice", "enabled": True, "attributes": {"mailu_email": ["alice@bstein.dev"]}},
{"id": "2", "username": "bob", "enabled": True, "attributes": {"mailu_email": ["bob@bstein.dev"]}},
],
attrs={
"1": {"id": "1", "username": "alice", "attributes": {"mailu_email": ["alice@bstein.dev"]}},
"2": {"id": "2", "username": "bob", "attributes": {"mailu_email": ["bob@bstein.dev"]}},
},
)
monkeypatch.setattr(vaultwarden_sync, "keycloak_admin", dummy)
monkeypatch.setattr(vaultwarden_sync.mailu, "mailbox_exists", lambda email: True)
monkeypatch.setattr(vaultwarden_sync.vaultwarden, "invite_user", lambda email: VaultwardenInvite(False, "error", "error"))
summary = vaultwarden_sync.run_vaultwarden_sync()
assert summary.failures == 1
def test_vaultwarden_sync_uses_keycloak_email(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
mailu_domain="bstein.dev",
vaultwarden_retry_cooldown_sec=0,
vaultwarden_failure_bailout=2,
)
monkeypatch.setattr(vaultwarden_sync, "settings", dummy_settings)
dummy = DummyAdmin(
users=[
{
"id": "1",
"username": "alice",
"enabled": True,
"email": "alice@bstein.dev",
"attributes": {},
}
],
attrs={
"1": {
"id": "1",
"username": "alice",
"email": "alice@bstein.dev",
"attributes": {},
}
},
)
monkeypatch.setattr(vaultwarden_sync, "keycloak_admin", dummy)
monkeypatch.setattr(vaultwarden_sync.mailu, "mailbox_exists", lambda email: True)
monkeypatch.setattr(vaultwarden_sync.vaultwarden, "invite_user", lambda email: VaultwardenInvite(True, "invited"))
summary = vaultwarden_sync.run_vaultwarden_sync()
assert summary.created_or_present == 1
def test_extract_attr_variants() -> None:
assert vaultwarden_sync._extract_attr("bad", "key") == ""
assert vaultwarden_sync._extract_attr({"key": ["", " "]}, "key") == ""
assert vaultwarden_sync._extract_attr({"key": "value"}, "key") == "value"
def test_parse_synced_at_invalid() -> None:
assert vaultwarden_sync._parse_synced_at("") is None
assert vaultwarden_sync._parse_synced_at("not-a-date") is None
def test_parse_synced_at_valid() -> None:
assert vaultwarden_sync._parse_synced_at("2025-01-01T00:00:00Z")
assert vaultwarden_sync._parse_synced_at("2025-01-01T00:00:00+0000")
def test_vaultwarden_email_for_user_prefers_attributes(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(mailu_domain="bstein.dev")
monkeypatch.setattr(vaultwarden_sync, "settings", dummy_settings)
user = {"username": "alice", "attributes": {"vaultwarden_email": ["alice@bstein.dev"]}}
assert vaultwarden_sync._vaultwarden_email_for_user(user) == "alice@bstein.dev"
def test_vaultwarden_email_for_user_uses_mailu_attr(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(mailu_domain="bstein.dev")
monkeypatch.setattr(vaultwarden_sync, "settings", dummy_settings)
user = {"username": "alice", "attributes": {"mailu_email": ["alias@bstein.dev"]}}
assert vaultwarden_sync._vaultwarden_email_for_user(user) == "alias@bstein.dev"
def test_vaultwarden_email_for_user_missing_username(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(mailu_domain="bstein.dev")
monkeypatch.setattr(vaultwarden_sync, "settings", dummy_settings)
assert vaultwarden_sync._vaultwarden_email_for_user({"username": " "}) == ""
def test_vaultwarden_email_for_user_rejects_external_email(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(mailu_domain="bstein.dev")
monkeypatch.setattr(vaultwarden_sync, "settings", dummy_settings)
user = {"username": "alice", "email": "alice@example.com", "attributes": {}}
assert vaultwarden_sync._vaultwarden_email_for_user(user) == ""
def test_set_user_attribute_if_missing_skips_existing(monkeypatch) -> None:
dummy = DummyAdmin(users=[])
monkeypatch.setattr(vaultwarden_sync, "keycloak_admin", dummy)
vaultwarden_sync._set_user_attribute_if_missing(
"alice",
{"attributes": {"vaultwarden_email": ["alice@bstein.dev"]}},
"vaultwarden_email",
"alice@bstein.dev",
)
assert dummy.set_calls is None
def test_set_user_attribute_if_missing_empty_value(monkeypatch) -> None:
dummy = DummyAdmin(users=[])
monkeypatch.setattr(vaultwarden_sync, "keycloak_admin", dummy)
vaultwarden_sync._set_user_attribute_if_missing("alice", {"attributes": {}}, "vaultwarden_email", "")
assert dummy.set_calls is None
def test_set_user_attribute_ignores_empty(monkeypatch) -> None:
dummy = DummyAdmin(users=[])
monkeypatch.setattr(vaultwarden_sync, "keycloak_admin", dummy)
vaultwarden_sync._set_user_attribute("alice", "vaultwarden_email", "")
assert dummy.set_calls is None
def test_vaultwarden_sync_sets_synced_at_for_invited(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
mailu_domain="bstein.dev",
vaultwarden_retry_cooldown_sec=0,
vaultwarden_failure_bailout=2,
)
monkeypatch.setattr(vaultwarden_sync, "settings", dummy_settings)
dummy = DummyAdmin(
users=[{"id": "1", "username": "alice", "enabled": True, "attributes": {"mailu_email": ["alice@bstein.dev"], "vaultwarden_status": ["invited"]}}],
attrs={"1": {"id": "1", "username": "alice", "attributes": {"mailu_email": ["alice@bstein.dev"], "vaultwarden_status": ["invited"]}}},
)
monkeypatch.setattr(vaultwarden_sync, "keycloak_admin", dummy)
monkeypatch.setattr(vaultwarden_sync.mailu, "mailbox_exists", lambda email: True)
summary = vaultwarden_sync.run_vaultwarden_sync()
assert summary.skipped == 1
assert dummy.set_calls
def test_vaultwarden_sync_skips_disabled_and_service_accounts(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
mailu_domain="bstein.dev",
vaultwarden_retry_cooldown_sec=0,
vaultwarden_failure_bailout=2,
)
monkeypatch.setattr(vaultwarden_sync, "settings", dummy_settings)
dummy = DummyAdmin(
users=[
{"id": "1", "username": "", "enabled": True, "attributes": {}},
{"id": "2", "username": "bob", "enabled": False, "attributes": {}},
{"id": "3", "username": "service-account-test", "enabled": True, "attributes": {}},
],
attrs={},
)
monkeypatch.setattr(vaultwarden_sync, "keycloak_admin", dummy)
summary = vaultwarden_sync.run_vaultwarden_sync()
assert summary.skipped == 3
def test_vaultwarden_sync_get_user_failure(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
mailu_domain="bstein.dev",
vaultwarden_retry_cooldown_sec=0,
vaultwarden_failure_bailout=2,
)
monkeypatch.setattr(vaultwarden_sync, "settings", dummy_settings)
class Admin(DummyAdmin):
def get_user(self, user_id: str):
raise RuntimeError("fail")
dummy = Admin(
users=[{"id": "1", "username": "alice", "enabled": True, "attributes": {"mailu_email": ["alice@bstein.dev"]}}],
)
monkeypatch.setattr(vaultwarden_sync, "keycloak_admin", dummy)
monkeypatch.setattr(vaultwarden_sync.mailu, "mailbox_exists", lambda email: True)
monkeypatch.setattr(vaultwarden_sync.vaultwarden, "invite_user", lambda email: VaultwardenInvite(True, "invited"))
summary = vaultwarden_sync.run_vaultwarden_sync()
assert summary.created_or_present == 1
def test_vaultwarden_sync_invited_attribute_failure(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
mailu_domain="bstein.dev",
vaultwarden_retry_cooldown_sec=0,
vaultwarden_failure_bailout=2,
)
monkeypatch.setattr(vaultwarden_sync, "settings", dummy_settings)
class Admin(DummyAdmin):
def set_user_attribute(self, username: str, key: str, value: str) -> None:
raise RuntimeError("fail")
dummy = Admin(
users=[{"id": "1", "username": "alice", "enabled": True, "attributes": {"mailu_email": ["alice@bstein.dev"], "vaultwarden_status": ["invited"]}}],
attrs={"1": {"id": "1", "username": "alice", "attributes": {"mailu_email": ["alice@bstein.dev"], "vaultwarden_status": ["invited"]}}},
)
monkeypatch.setattr(vaultwarden_sync, "keycloak_admin", dummy)
monkeypatch.setattr(vaultwarden_sync.mailu, "mailbox_exists", lambda email: True)
summary = vaultwarden_sync.run_vaultwarden_sync()
assert summary.skipped == 1
def test_vaultwarden_sync_set_attribute_failure_on_success(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
mailu_domain="bstein.dev",
vaultwarden_retry_cooldown_sec=0,
vaultwarden_failure_bailout=2,
)
monkeypatch.setattr(vaultwarden_sync, "settings", dummy_settings)
class Admin(DummyAdmin):
def set_user_attribute(self, username: str, key: str, value: str) -> None:
raise RuntimeError("fail")
dummy = Admin(
users=[{"id": "1", "username": "alice", "enabled": True, "attributes": {"mailu_email": ["alice@bstein.dev"]}}],
attrs={"1": {"id": "1", "username": "alice", "attributes": {"mailu_email": ["alice@bstein.dev"]}}},
)
monkeypatch.setattr(vaultwarden_sync, "keycloak_admin", dummy)
monkeypatch.setattr(vaultwarden_sync.mailu, "mailbox_exists", lambda email: True)
monkeypatch.setattr(vaultwarden_sync.vaultwarden, "invite_user", lambda email: VaultwardenInvite(True, "invited"))
summary = vaultwarden_sync.run_vaultwarden_sync()
assert summary.created_or_present == 1
def test_vaultwarden_sync_set_attribute_failure_on_error(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
mailu_domain="bstein.dev",
vaultwarden_retry_cooldown_sec=0,
vaultwarden_failure_bailout=2,
)
monkeypatch.setattr(vaultwarden_sync, "settings", dummy_settings)
class Admin(DummyAdmin):
def set_user_attribute(self, username: str, key: str, value: str) -> None:
raise RuntimeError("fail")
dummy = Admin(
users=[{"id": "1", "username": "alice", "enabled": True, "attributes": {"mailu_email": ["alice@bstein.dev"]}}],
attrs={"1": {"id": "1", "username": "alice", "attributes": {"mailu_email": ["alice@bstein.dev"]}}},
)
monkeypatch.setattr(vaultwarden_sync, "keycloak_admin", dummy)
monkeypatch.setattr(vaultwarden_sync.mailu, "mailbox_exists", lambda email: True)
monkeypatch.setattr(vaultwarden_sync.vaultwarden, "invite_user", lambda email: VaultwardenInvite(False, "error", "error"))
summary = vaultwarden_sync.run_vaultwarden_sync()
assert summary.failures == 1
def test_vaultwarden_sync_skips_missing_email(monkeypatch) -> None:
dummy_settings = types.SimpleNamespace(
mailu_domain="bstein.dev",
vaultwarden_retry_cooldown_sec=0,
vaultwarden_failure_bailout=2,
)
monkeypatch.setattr(vaultwarden_sync, "settings", dummy_settings)
dummy = DummyAdmin(
users=[{"id": "1", "username": "alice", "enabled": True, "email": "alice@example.com", "attributes": {}}],
attrs={"1": {"id": "1", "username": "alice", "email": "alice@example.com", "attributes": {}}},
)
monkeypatch.setattr(vaultwarden_sync, "keycloak_admin", dummy)
monkeypatch.setattr(vaultwarden_sync.mailu, "mailbox_exists", lambda email: True)
summary = vaultwarden_sync.run_vaultwarden_sync()
assert summary.skipped == 1