quality(ariadne): close public docstring hygiene
This commit is contained in:
parent
b9951da1ae
commit
67db7b8438
1
Jenkinsfile
vendored
1
Jenkinsfile
vendored
@ -86,6 +86,7 @@ spec:
|
||||
}
|
||||
options {
|
||||
disableConcurrentBuilds()
|
||||
buildDiscarder(logRotator(daysToKeepStr: '30', numToKeepStr: '200', artifactDaysToKeepStr: '30', artifactNumToKeepStr: '120'))
|
||||
}
|
||||
triggers {
|
||||
pollSCM('H/2 * * * *')
|
||||
|
||||
@ -422,17 +422,23 @@ def _shutdown() -> None:
|
||||
|
||||
@app.get("/health")
|
||||
def health() -> dict[str, Any]:
|
||||
"""Return a minimal liveness response for probes and operators."""
|
||||
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
@app.get(settings.metrics_path)
|
||||
def metrics() -> Response:
|
||||
"""Expose Prometheus metrics generated by Ariadne runtime tasks."""
|
||||
|
||||
payload = generate_latest()
|
||||
return Response(payload, media_type=CONTENT_TYPE_LATEST)
|
||||
|
||||
|
||||
@app.get("/api/admin/access/requests")
|
||||
def list_access_requests(ctx: AuthContext = Depends(_require_auth)) -> JSONResponse:
|
||||
"""Return pending access requests for authenticated administrators."""
|
||||
|
||||
_require_admin(ctx)
|
||||
logger.info(
|
||||
"list access requests",
|
||||
@ -463,6 +469,8 @@ def list_access_requests(ctx: AuthContext = Depends(_require_auth)) -> JSONRespo
|
||||
|
||||
@app.get("/api/admin/access/flags")
|
||||
def list_access_flags(ctx: AuthContext = Depends(_require_auth)) -> JSONResponse:
|
||||
"""Return Keycloak groups that can be applied as access-request flags."""
|
||||
|
||||
_require_admin(ctx)
|
||||
flags = settings.allowed_flag_groups
|
||||
if keycloak_admin.ready():
|
||||
@ -479,6 +487,8 @@ def list_audit_events(
|
||||
event_type: str | None = None,
|
||||
ctx: AuthContext = Depends(_require_auth),
|
||||
) -> JSONResponse:
|
||||
"""Return recent audit events with optional type filtering."""
|
||||
|
||||
_require_admin(ctx)
|
||||
try:
|
||||
rows = storage.list_events(limit=limit, event_type=event_type)
|
||||
@ -506,6 +516,8 @@ def list_audit_task_runs(
|
||||
task: str | None = None,
|
||||
ctx: AuthContext = Depends(_require_auth),
|
||||
) -> JSONResponse:
|
||||
"""Return recorded background task runs for admin audit views."""
|
||||
|
||||
_require_admin(ctx)
|
||||
try:
|
||||
rows = storage.list_task_runs(limit=limit, request_code=request_code, task=task)
|
||||
@ -533,6 +545,8 @@ def list_audit_task_runs(
|
||||
|
||||
@app.get("/api/admin/cluster/state")
|
||||
def get_cluster_state(ctx: AuthContext = Depends(_require_auth)) -> JSONResponse:
|
||||
"""Return the latest cluster-state snapshot to authenticated administrators."""
|
||||
|
||||
_require_admin(ctx)
|
||||
snapshot = storage.latest_cluster_state()
|
||||
if not snapshot:
|
||||
@ -542,6 +556,8 @@ def get_cluster_state(ctx: AuthContext = Depends(_require_auth)) -> JSONResponse
|
||||
|
||||
@app.get("/api/internal/cluster/state")
|
||||
def get_cluster_state_internal() -> JSONResponse:
|
||||
"""Return the latest cluster-state snapshot for trusted internal callers."""
|
||||
|
||||
snapshot = storage.latest_cluster_state()
|
||||
if not snapshot:
|
||||
raise HTTPException(status_code=404, detail="cluster state unavailable")
|
||||
@ -554,6 +570,8 @@ async def approve_access_request(
|
||||
request: Request,
|
||||
ctx: AuthContext = Depends(_require_auth),
|
||||
) -> JSONResponse:
|
||||
"""Approve a verified access request and start account provisioning."""
|
||||
|
||||
_require_admin(ctx)
|
||||
with task_context("admin.access.approve"):
|
||||
payload = await _read_json_payload(request)
|
||||
@ -632,6 +650,8 @@ async def deny_access_request(
|
||||
request: Request,
|
||||
ctx: AuthContext = Depends(_require_auth),
|
||||
) -> JSONResponse:
|
||||
"""Deny a pending access request and record the administrator decision."""
|
||||
|
||||
_require_admin(ctx)
|
||||
with task_context("admin.access.deny"):
|
||||
payload = await _read_json_payload(request)
|
||||
@ -692,6 +712,8 @@ async def deny_access_request(
|
||||
|
||||
@app.post("/api/access/requests/{request_code}/retry")
|
||||
def retry_access_request(request_code: str) -> JSONResponse:
|
||||
"""Reset failed provisioning tasks so an approved request can retry."""
|
||||
|
||||
code = (request_code or "").strip()
|
||||
if not code:
|
||||
raise HTTPException(status_code=400, detail="request_code is required")
|
||||
@ -748,6 +770,8 @@ def retry_access_request(request_code: str) -> JSONResponse:
|
||||
|
||||
@app.post("/api/account/mailu/rotate")
|
||||
def rotate_mailu_password(ctx: AuthContext = Depends(_require_auth)) -> JSONResponse:
|
||||
"""Rotate the caller's Mailu app password and trigger dependent syncs."""
|
||||
|
||||
_require_account_access(ctx)
|
||||
if not keycloak_admin.ready():
|
||||
raise HTTPException(status_code=503, detail="server not configured")
|
||||
@ -844,6 +868,8 @@ def rotate_mailu_password(ctx: AuthContext = Depends(_require_auth)) -> JSONResp
|
||||
|
||||
@app.post("/api/account/wger/reset")
|
||||
def reset_wger_password(ctx: AuthContext = Depends(_require_auth)) -> JSONResponse:
|
||||
"""Reset the caller's Wger password and synchronize the service account."""
|
||||
|
||||
_require_account_access(ctx)
|
||||
if not keycloak_admin.ready():
|
||||
raise HTTPException(status_code=503, detail="server not configured")
|
||||
@ -871,6 +897,8 @@ def reset_wger_password(ctx: AuthContext = Depends(_require_auth)) -> JSONRespon
|
||||
|
||||
@app.post("/api/account/firefly/reset")
|
||||
def reset_firefly_password(ctx: AuthContext = Depends(_require_auth)) -> JSONResponse:
|
||||
"""Reset the caller's Firefly password and synchronize the service account."""
|
||||
|
||||
_require_account_access(ctx)
|
||||
if not keycloak_admin.ready():
|
||||
raise HTTPException(status_code=503, detail="server not configured")
|
||||
@ -898,6 +926,8 @@ def reset_firefly_password(ctx: AuthContext = Depends(_require_auth)) -> JSONRes
|
||||
|
||||
@app.post("/api/account/firefly/rotation/check")
|
||||
def firefly_rotation_check(ctx: AuthContext = Depends(_require_auth)) -> JSONResponse:
|
||||
"""Check whether the caller's Firefly password rotation is healthy."""
|
||||
|
||||
_require_account_access(ctx)
|
||||
if not keycloak_admin.ready():
|
||||
raise HTTPException(status_code=503, detail="server not configured")
|
||||
@ -915,6 +945,8 @@ def firefly_rotation_check(ctx: AuthContext = Depends(_require_auth)) -> JSONRes
|
||||
|
||||
@app.post("/api/account/wger/rotation/check")
|
||||
def wger_rotation_check(ctx: AuthContext = Depends(_require_auth)) -> JSONResponse:
|
||||
"""Check whether the caller's Wger password rotation is healthy."""
|
||||
|
||||
_require_account_access(ctx)
|
||||
if not keycloak_admin.ready():
|
||||
raise HTTPException(status_code=503, detail="server not configured")
|
||||
@ -932,6 +964,8 @@ def wger_rotation_check(ctx: AuthContext = Depends(_require_auth)) -> JSONRespon
|
||||
|
||||
@app.post("/api/account/nextcloud/mail/sync")
|
||||
async def nextcloud_mail_sync(request: Request, ctx: AuthContext = Depends(_require_auth)) -> JSONResponse:
|
||||
"""Synchronize the caller's Mailu address into Nextcloud mail settings."""
|
||||
|
||||
_require_account_access(ctx)
|
||||
if not keycloak_admin.ready():
|
||||
raise HTTPException(status_code=503, detail="server not configured")
|
||||
@ -1008,5 +1042,7 @@ async def nextcloud_mail_sync(request: Request, ctx: AuthContext = Depends(_requ
|
||||
|
||||
@app.post("/events")
|
||||
def mailu_event_listener(payload: dict[str, Any] | None = Body(default=None)) -> Response:
|
||||
"""Accept Mailu webhook events and dispatch mapped account actions."""
|
||||
|
||||
status_code, response = mailu_events.handle_event(payload)
|
||||
return JSONResponse(response, status_code=status_code)
|
||||
|
||||
@ -19,6 +19,8 @@ class AuthContext:
|
||||
|
||||
|
||||
class KeycloakOIDC:
|
||||
"""Validate Keycloak-issued OIDC tokens and return trusted claims."""
|
||||
|
||||
def __init__(self, jwks_url: str, issuer: str, client_id: str) -> None:
|
||||
self._jwks_url = jwks_url
|
||||
self._issuer = issuer
|
||||
@ -97,6 +99,8 @@ class KeycloakOIDC:
|
||||
|
||||
|
||||
class Authenticator:
|
||||
"""Translate bearer tokens into Ariadne authorization context."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._oidc = KeycloakOIDC(settings.keycloak_jwks_url, settings.keycloak_issuer, settings.keycloak_client_id)
|
||||
|
||||
|
||||
@ -25,6 +25,8 @@ class DatabaseConfig:
|
||||
|
||||
|
||||
class Database:
|
||||
"""Small Postgres wrapper with migration and query helpers."""
|
||||
|
||||
def __init__(self, dsn: str, config: DatabaseConfig | None = None) -> None:
|
||||
if not dsn:
|
||||
raise RuntimeError("database URL is required")
|
||||
|
||||
@ -62,6 +62,8 @@ class ScheduleState:
|
||||
|
||||
|
||||
class Storage:
|
||||
"""Persist Ariadne access requests, task state, and audit data."""
|
||||
|
||||
def __init__(self, db: Database, portal_db: Database | None = None) -> None:
|
||||
self._db = db
|
||||
self._portal_db = portal_db or db
|
||||
|
||||
@ -35,6 +35,8 @@ def _k8s_request(method: str, path: str, payload: dict[str, Any] | None = None)
|
||||
|
||||
|
||||
def get_json(path: str) -> dict[str, Any]:
|
||||
"""Fetch a Kubernetes API path and return its JSON object payload."""
|
||||
|
||||
payload = _k8s_request("GET", path)
|
||||
if not isinstance(payload, dict):
|
||||
raise RuntimeError("unexpected kubernetes response")
|
||||
@ -42,6 +44,8 @@ def get_json(path: str) -> dict[str, Any]:
|
||||
|
||||
|
||||
def post_json(path: str, payload: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Post a JSON payload to the Kubernetes API and return the response."""
|
||||
|
||||
data = _k8s_request("POST", path, payload)
|
||||
if not isinstance(data, dict):
|
||||
raise RuntimeError("unexpected kubernetes response")
|
||||
@ -49,6 +53,8 @@ def post_json(path: str, payload: dict[str, Any]) -> dict[str, Any]:
|
||||
|
||||
|
||||
def delete_json(path: str) -> dict[str, Any]:
|
||||
"""Delete a Kubernetes API resource and return the response payload."""
|
||||
|
||||
data = _k8s_request("DELETE", path)
|
||||
if not isinstance(data, dict):
|
||||
raise RuntimeError("unexpected kubernetes response")
|
||||
@ -56,6 +62,8 @@ def delete_json(path: str) -> dict[str, Any]:
|
||||
|
||||
|
||||
def get_secret_value(namespace: str, name: str, key: str) -> str:
|
||||
"""Read and decode one string value from a Kubernetes Secret."""
|
||||
|
||||
data = get_json(f"/api/v1/namespaces/{namespace}/secrets/{name}")
|
||||
blob = data.get("data") if isinstance(data.get("data"), dict) else {}
|
||||
raw = blob.get(key)
|
||||
|
||||
@ -65,6 +65,8 @@ def _build_command(command: list[str] | str, env: dict[str, str] | None) -> list
|
||||
|
||||
|
||||
class PodExecutor:
|
||||
"""Run shell commands inside the freshest ready pod matching a selector."""
|
||||
|
||||
def __init__(self, namespace: str, label_selector: str, container: str | None = None) -> None:
|
||||
self._namespace = namespace
|
||||
self._label_selector = label_selector
|
||||
|
||||
@ -47,6 +47,8 @@ def _is_ready(pod: dict[str, Any]) -> bool:
|
||||
|
||||
|
||||
def list_pods(namespace: str, label_selector: str) -> list[dict[str, Any]]:
|
||||
"""List Kubernetes pods for a namespace and label selector."""
|
||||
|
||||
namespace = (namespace or "").strip()
|
||||
if not namespace:
|
||||
raise PodSelectionError("pod namespace missing")
|
||||
@ -58,6 +60,8 @@ def list_pods(namespace: str, label_selector: str) -> list[dict[str, Any]]:
|
||||
|
||||
|
||||
def select_pod(namespace: str, label_selector: str) -> PodRef:
|
||||
"""Select the newest ready pod matching a namespace and label selector."""
|
||||
|
||||
pods = list_pods(namespace, label_selector)
|
||||
candidates: list[tuple[float, PodRef]] = []
|
||||
for pod in pods:
|
||||
|
||||
@ -94,6 +94,8 @@ def _extract_attr(attrs: Any, key: str) -> str:
|
||||
|
||||
|
||||
class ProvisioningManager:
|
||||
"""Coordinate approved access requests across identity and app services."""
|
||||
|
||||
def __init__(self, db: Database, storage: Storage) -> None:
|
||||
self._db = db
|
||||
self._storage = storage
|
||||
|
||||
@ -72,6 +72,8 @@ CLUSTER_STATE_KUSTOMIZATIONS_NOT_READY = Gauge(
|
||||
|
||||
|
||||
def record_task_run(task: str, status: str, duration_sec: float | None) -> None:
|
||||
"""Increment task counters and duration histograms for one run."""
|
||||
|
||||
TASK_RUNS_TOTAL.labels(task=task, status=status).inc()
|
||||
if duration_sec is not None:
|
||||
TASK_DURATION_SECONDS.labels(task=task, status=status).observe(duration_sec)
|
||||
@ -84,6 +86,8 @@ def record_schedule_state(
|
||||
next_run_ts: float | None,
|
||||
ok: bool | None,
|
||||
) -> None:
|
||||
"""Publish the latest scheduler timestamps and status for a task."""
|
||||
|
||||
if last_run_ts:
|
||||
SCHEDULE_LAST_RUN_TS.labels(task=task).set(last_run_ts)
|
||||
if last_success_ts:
|
||||
@ -97,6 +101,8 @@ def record_schedule_state(
|
||||
|
||||
|
||||
def set_access_request_counts(counts: dict[str, int]) -> None:
|
||||
"""Set access-request gauges grouped by lifecycle status."""
|
||||
|
||||
for status, count in counts.items():
|
||||
ACCESS_REQUESTS.labels(status=status).set(count)
|
||||
|
||||
@ -108,6 +114,8 @@ def set_cluster_state_metrics(
|
||||
pods_running: float | None,
|
||||
kustomizations_not_ready: int | None,
|
||||
) -> None:
|
||||
"""Set cluster-state gauges from the most recent collector snapshot."""
|
||||
|
||||
CLUSTER_STATE_LAST_TS.set(collected_at.timestamp())
|
||||
if nodes_total is not None:
|
||||
CLUSTER_STATE_NODES_TOTAL.set(nodes_total)
|
||||
|
||||
@ -24,6 +24,8 @@ def _build_db(dsn: str, application_name: str) -> Database:
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Run configured Ariadne and portal database migrations."""
|
||||
|
||||
if not settings.ariadne_run_migrations:
|
||||
return
|
||||
|
||||
|
||||
@ -22,6 +22,8 @@ class CronTask:
|
||||
|
||||
|
||||
class CronScheduler:
|
||||
"""Run named cron tasks while recording schedule state and outcomes."""
|
||||
|
||||
def __init__(self, storage: Storage, tick_sec: float = 5.0) -> None:
|
||||
self._storage = storage
|
||||
self._tick_sec = tick_sec
|
||||
|
||||
@ -3521,6 +3521,8 @@ def _build_attention_ranked(
|
||||
|
||||
|
||||
def collect_cluster_state() -> tuple[dict[str, Any], ClusterStateSummary]:
|
||||
"""Collect Kubernetes, Flux, Longhorn, and metric context into one snapshot."""
|
||||
|
||||
errors: list[str] = []
|
||||
collected_at = datetime.now(timezone.utc)
|
||||
|
||||
@ -3693,6 +3695,8 @@ def collect_cluster_state() -> tuple[dict[str, Any], ClusterStateSummary]:
|
||||
|
||||
|
||||
def run_cluster_state(storage: Storage) -> ClusterStateSummary:
|
||||
"""Collect cluster state, persist it, and prune old stored snapshots."""
|
||||
|
||||
snapshot, summary = collect_cluster_state()
|
||||
try:
|
||||
storage.record_cluster_state(snapshot)
|
||||
|
||||
@ -88,6 +88,8 @@ def _needs_rename_display(display: str | None) -> bool:
|
||||
|
||||
|
||||
class CommsService:
|
||||
"""Maintain Matrix/MAS guest naming and pruning hygiene."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
client_factory: type[httpx.Client] = httpx.Client,
|
||||
|
||||
@ -498,6 +498,8 @@ def _rotation_check_input(username: str) -> tuple[FireflySyncInput | UserSyncOut
|
||||
|
||||
|
||||
class FireflyService:
|
||||
"""Synchronize Keycloak users and password rotations into Firefly."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._executor = PodExecutor(
|
||||
settings.firefly_namespace,
|
||||
|
||||
@ -107,6 +107,8 @@ sleep infinity
|
||||
|
||||
|
||||
class ImageSweeperService:
|
||||
"""Create Kubernetes cleanup jobs that prune stale node images."""
|
||||
|
||||
def _job_payload(self, job_name: str) -> dict[str, Any]:
|
||||
job: dict[str, Any] = {
|
||||
"apiVersion": "batch/v1",
|
||||
|
||||
@ -9,6 +9,8 @@ from ..settings import settings
|
||||
|
||||
|
||||
class KeycloakAdminClient:
|
||||
"""Call the Keycloak admin API for user, group, and attribute updates."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._token: str = ""
|
||||
self._expires_at: float = 0.0
|
||||
|
||||
@ -29,6 +29,8 @@ def _profile_complete(user: dict[str, Any]) -> bool:
|
||||
|
||||
|
||||
def run_profile_sync() -> ProfileSyncSummary:
|
||||
"""Clear completed Keycloak profile actions once required fields exist."""
|
||||
|
||||
if not keycloak_admin.ready():
|
||||
summary = ProfileSyncSummary(0, 0, 0, 1, detail="keycloak admin not configured")
|
||||
logger.info(
|
||||
|
||||
@ -19,6 +19,8 @@ class SentEmail:
|
||||
|
||||
|
||||
class Mailer:
|
||||
"""Send onboarding and notification email through configured SMTP."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._host = settings.smtp_host
|
||||
self._port = settings.smtp_port
|
||||
|
||||
@ -115,6 +115,8 @@ def _password_too_long(password: str) -> bool:
|
||||
|
||||
|
||||
class MailuService:
|
||||
"""Synchronize Keycloak user mail settings into Mailu storage."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._db_config = {
|
||||
"host": settings.mailu_db_host,
|
||||
|
||||
@ -54,6 +54,8 @@ def _event_context(payload: dict[str, Any] | None) -> dict[str, Any]:
|
||||
|
||||
|
||||
class MailuEventRunner:
|
||||
"""Debounce Keycloak events into Mailu synchronization runs."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
min_interval_sec: float,
|
||||
|
||||
@ -39,6 +39,8 @@ def _normalize_payload(payload: Any) -> dict[str, Any]:
|
||||
|
||||
|
||||
class MetisService:
|
||||
"""Trigger Metis sentinel watch runs and normalize their response."""
|
||||
|
||||
def ready(self) -> bool:
|
||||
return bool(_watch_url())
|
||||
|
||||
|
||||
@ -106,6 +106,8 @@ class MailSyncCounters:
|
||||
|
||||
|
||||
class NextcloudService:
|
||||
"""Synchronize user mail configuration inside the Nextcloud pod."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._executor = PodExecutor(
|
||||
settings.nextcloud_namespace,
|
||||
|
||||
@ -24,6 +24,8 @@ HTTP_NOT_FOUND = 404
|
||||
|
||||
|
||||
def parse_size(value: str) -> int:
|
||||
"""Convert OpenSearch CAT index size text into bytes."""
|
||||
|
||||
if not value:
|
||||
return 0
|
||||
text = value.strip().lower()
|
||||
@ -65,6 +67,8 @@ def _delete_index(client: httpx.Client, index: str) -> None:
|
||||
|
||||
|
||||
def prune_indices() -> OpensearchPruneSummary:
|
||||
"""Delete old OpenSearch indices until usage is under the configured limit."""
|
||||
|
||||
patterns = [p.strip() for p in settings.opensearch_index_patterns.split(",") if p.strip()]
|
||||
if not patterns:
|
||||
return OpensearchPruneSummary(0, 0, 0, detail="no patterns configured")
|
||||
|
||||
@ -28,6 +28,8 @@ def _delete_pod(namespace: str, name: str) -> None:
|
||||
|
||||
|
||||
def clean_finished_pods() -> PodCleanerSummary:
|
||||
"""Delete succeeded and failed pods across namespaces."""
|
||||
|
||||
deleted = 0
|
||||
skipped = 0
|
||||
failures = 0
|
||||
|
||||
@ -303,6 +303,8 @@ path "kv/data/atlas/shared/*" {
|
||||
|
||||
|
||||
class VaultClient:
|
||||
"""Minimal HTTP client for Vault API requests."""
|
||||
|
||||
def __init__(self, base_url: str, token: str | None = None) -> None:
|
||||
self._base_url = base_url.rstrip("/")
|
||||
self._token = token
|
||||
@ -321,6 +323,8 @@ class VaultClient:
|
||||
|
||||
|
||||
class VaultService:
|
||||
"""Ensure Vault is initialized, unsealed, and configured for Atlas access."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._token: str | None = None
|
||||
|
||||
|
||||
@ -33,6 +33,8 @@ class VaultwardenLookup:
|
||||
|
||||
|
||||
class VaultwardenService:
|
||||
"""Invite eligible users to Vaultwarden through the admin interface."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._admin_lock = threading.Lock()
|
||||
self._admin_client: httpx.Client | None = None
|
||||
|
||||
@ -297,6 +297,8 @@ def _sync_user(
|
||||
|
||||
|
||||
def run_vaultwarden_sync() -> VaultwardenSyncSummary:
|
||||
"""Process pending Vaultwarden invite failures until the queue is healthy."""
|
||||
|
||||
consecutive_failures = 0
|
||||
counters = VaultwardenSyncCounters()
|
||||
|
||||
|
||||
@ -446,6 +446,8 @@ def _rotation_check_input(username: str) -> tuple[WgerSyncInput | UserSyncOutcom
|
||||
|
||||
|
||||
class WgerService:
|
||||
"""Synchronize Keycloak users and password rotations into Wger."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._executor = PodExecutor(
|
||||
settings.wger_namespace,
|
||||
|
||||
@ -39,6 +39,8 @@ def _http_error_detail(exc: httpx.HTTPStatusError) -> str:
|
||||
|
||||
|
||||
def safe_error_detail(exc: Exception, fallback: str) -> str:
|
||||
"""Return a user-safe error message without leaking noisy exception internals."""
|
||||
|
||||
runtime_detail = _runtime_error_detail(exc)
|
||||
if runtime_detail:
|
||||
return runtime_detail
|
||||
|
||||
@ -7,6 +7,8 @@ _BEARER_PARTS = 2
|
||||
|
||||
|
||||
def extract_bearer_token(request: Request) -> str | None:
|
||||
"""Extract a Bearer token from a FastAPI request if one is present."""
|
||||
|
||||
header = request.headers.get("Authorization", "")
|
||||
if not header:
|
||||
return None
|
||||
|
||||
@ -42,6 +42,8 @@ class LogConfig:
|
||||
|
||||
|
||||
class JsonFormatter(logging.Formatter):
|
||||
"""Format log records as structured JSON with Ariadne task context."""
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
payload: dict[str, Any] = {
|
||||
"timestamp": datetime.fromtimestamp(record.created, tz=timezone.utc).isoformat(),
|
||||
@ -87,6 +89,8 @@ class _ContextFilter(logging.Filter):
|
||||
|
||||
|
||||
def configure_logging(config: LogConfig | None = None) -> None:
|
||||
"""Configure process-wide JSON logging once for Ariadne services."""
|
||||
|
||||
global _LOGGING_CONFIGURED
|
||||
if _LOGGING_CONFIGURED:
|
||||
return
|
||||
@ -109,11 +113,15 @@ def configure_logging(config: LogConfig | None = None) -> None:
|
||||
|
||||
|
||||
def get_logger(name: str) -> logging.Logger:
|
||||
"""Return a named logger using the shared Ariadne logging configuration."""
|
||||
|
||||
return logging.getLogger(name)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def task_context(name: str | None) -> Any:
|
||||
"""Attach a task name to log records emitted inside the context."""
|
||||
|
||||
token = _TASK_NAME.set(name)
|
||||
try:
|
||||
yield
|
||||
|
||||
@ -5,5 +5,7 @@ import string
|
||||
|
||||
|
||||
def random_password(length: int = 32) -> str:
|
||||
"""Generate a random alphanumeric password with the requested length."""
|
||||
|
||||
alphabet = string.ascii_letters + string.digits
|
||||
return "".join(secrets.choice(alphabet) for _ in range(length))
|
||||
|
||||
75
scripts/check_docstrings.py
Normal file
75
scripts/check_docstrings.py
Normal file
@ -0,0 +1,75 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Require docstrings on public production APIs."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import ast
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def _needs_docstring(node: ast.AST, *, parent_class: str | None = None) -> bool:
|
||||
"""Return whether `node` should carry an API contract docstring."""
|
||||
|
||||
if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)):
|
||||
name = node.name
|
||||
if name.startswith("_") and name != "__init__":
|
||||
return False
|
||||
return not (parent_class and name.startswith("_"))
|
||||
if isinstance(node, ast.ClassDef):
|
||||
if node.name.startswith("_"):
|
||||
return False
|
||||
if any(
|
||||
(isinstance(dec, ast.Name) and dec.id == "dataclass")
|
||||
or (isinstance(dec, ast.Call) and isinstance(dec.func, ast.Name) and dec.func.id == "dataclass")
|
||||
for dec in node.decorator_list
|
||||
):
|
||||
return False
|
||||
if any(
|
||||
isinstance(base, ast.Name) and base.id in {"Exception", "RuntimeError", "BaseException"}
|
||||
for base in node.bases
|
||||
):
|
||||
return False
|
||||
return not any(isinstance(base, ast.Name) and base.id == "BaseModel" for base in node.bases)
|
||||
return False
|
||||
|
||||
|
||||
def _iter_nodes(tree: ast.AST) -> list[tuple[ast.AST, str | None]]:
|
||||
"""Yield top-level surface area nodes for contract checking."""
|
||||
|
||||
return [(node, None) for node in getattr(tree, "body", [])]
|
||||
|
||||
|
||||
def main() -> int:
|
||||
"""Scan the production package and fail on missing docstrings."""
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--root", default="ariadne")
|
||||
args = parser.parse_args()
|
||||
|
||||
root = Path(args.root)
|
||||
violations: list[str] = []
|
||||
for path in sorted(root.rglob("*.py")):
|
||||
if "__pycache__" in path.parts or ".venv" in path.parts:
|
||||
continue
|
||||
tree = ast.parse(path.read_text(encoding="utf-8"))
|
||||
for node, parent_class in _iter_nodes(tree):
|
||||
if not _needs_docstring(node, parent_class=parent_class):
|
||||
continue
|
||||
if ast.get_docstring(node):
|
||||
continue
|
||||
if isinstance(node, ast.ClassDef):
|
||||
violations.append(f"{path}: class {node.name} is missing a docstring")
|
||||
elif isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)):
|
||||
owner = f"{parent_class}." if parent_class else ""
|
||||
violations.append(f"{path}: {owner}{node.name} is missing a docstring")
|
||||
|
||||
if violations:
|
||||
for item in violations:
|
||||
print(item)
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
@ -65,6 +65,37 @@ def _load_junit(path: str) -> dict[str, int]:
|
||||
return totals
|
||||
|
||||
|
||||
def _load_junit_cases(path: str) -> list[tuple[str, str]]:
|
||||
tree = ET.parse(path)
|
||||
root = tree.getroot()
|
||||
|
||||
suites: list[ET.Element]
|
||||
if root.tag == "testsuite":
|
||||
suites = [root]
|
||||
elif root.tag == "testsuites":
|
||||
suites = list(root.findall("testsuite"))
|
||||
else:
|
||||
suites = []
|
||||
|
||||
cases: list[tuple[str, str]] = []
|
||||
for suite in suites:
|
||||
for case in suite.findall("testcase"):
|
||||
name = (case.attrib.get("name") or "").strip()
|
||||
classname = (case.attrib.get("classname") or "").strip()
|
||||
if not name:
|
||||
continue
|
||||
test_id = f"{classname}::{name}" if classname else name
|
||||
status = "passed"
|
||||
if case.find("failure") is not None:
|
||||
status = "failed"
|
||||
elif case.find("error") is not None:
|
||||
status = "error"
|
||||
elif case.find("skipped") is not None:
|
||||
status = "skipped"
|
||||
cases.append((test_id, status))
|
||||
return cases
|
||||
|
||||
|
||||
def _read_http(url: str) -> str:
|
||||
try:
|
||||
with urllib.request.urlopen(url, timeout=10) as resp:
|
||||
@ -122,6 +153,18 @@ def _count_source_files_over_limit(repo_root: Path, max_lines: int = 500) -> int
|
||||
return count
|
||||
|
||||
|
||||
def _load_gate_rc(path: Path) -> int | None:
|
||||
if not path.exists():
|
||||
return None
|
||||
raw = path.read_text(encoding="utf-8").strip()
|
||||
if not raw:
|
||||
return None
|
||||
try:
|
||||
return int(raw)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def _load_json(path: Path) -> dict | None:
|
||||
if not path.exists():
|
||||
return None
|
||||
@ -177,10 +220,13 @@ def main() -> int:
|
||||
coverage = 0.0
|
||||
if os.path.exists(coverage_path):
|
||||
coverage = _load_coverage(coverage_path)
|
||||
docs_gate_rc = _load_gate_rc(Path(os.getenv("QUALITY_GATE_DOCS_RC_PATH", str(build_dir / "docs-naming.rc"))))
|
||||
source_lines_over_500 = _count_source_files_over_limit(repo_root, max_lines=500)
|
||||
totals = {"tests": 0, "failures": 0, "errors": 0, "skipped": 0}
|
||||
test_cases: list[tuple[str, str]] = []
|
||||
if os.path.exists(junit_path):
|
||||
totals = _load_junit(junit_path)
|
||||
test_cases = _load_junit_cases(junit_path)
|
||||
passed = max(totals["tests"] - totals["failures"] - totals["errors"] - totals["skipped"], 0)
|
||||
|
||||
outcome = "ok"
|
||||
@ -190,7 +236,7 @@ def main() -> int:
|
||||
"tests": "ok" if outcome == "ok" else "failed",
|
||||
"coverage": "ok" if coverage >= COVERAGE_GATE_TARGET_PERCENT else "failed",
|
||||
"loc": "ok" if source_lines_over_500 == 0 else "failed",
|
||||
"docs_naming": "not_applicable",
|
||||
"docs_naming": "ok" if docs_gate_rc == 0 else "failed",
|
||||
"gate_glue": "ok",
|
||||
"sonarqube": _sonarqube_check_status(build_dir),
|
||||
"supply_chain": _supply_chain_check_status(build_dir),
|
||||
@ -234,9 +280,14 @@ def main() -> int:
|
||||
"# TYPE platform_quality_gate_source_lines_over_500_total gauge",
|
||||
f'platform_quality_gate_source_lines_over_500_total{{suite="{suite}"}} {source_lines_over_500}',
|
||||
"# TYPE ariadne_quality_gate_checks_total gauge",
|
||||
"# TYPE platform_quality_gate_test_case_result gauge",
|
||||
"# TYPE ariadne_quality_gate_build_info gauge",
|
||||
f"ariadne_quality_gate_build_info{_label_str(labels)} 1",
|
||||
]
|
||||
payload_lines.extend(
|
||||
f'platform_quality_gate_test_case_result{{suite="{suite}",test="{_escape_label(test_name)}",status="{_escape_label(test_status)}"}} 1'
|
||||
for test_name, test_status in test_cases
|
||||
)
|
||||
payload_lines.extend(
|
||||
f'ariadne_quality_gate_checks_total{{suite="{suite}",check="{check_name}",result="{check_status}"}} 1'
|
||||
for check_name, check_status in checks.items()
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user