Compare commits
No commits in common. "master" and "feature/profile-avatar" have entirely different histories.
master
...
feature/pr
@ -4,6 +4,7 @@ node_modules
|
|||||||
frontend/node_modules
|
frontend/node_modules
|
||||||
frontend/dist
|
frontend/dist
|
||||||
frontend/.vite
|
frontend/.vite
|
||||||
|
media
|
||||||
docs
|
docs
|
||||||
__pycache__
|
__pycache__
|
||||||
.venv
|
.venv
|
||||||
|
|||||||
4
.gitignore
vendored
@ -12,7 +12,3 @@ frontend/dist/
|
|||||||
.coverage
|
.coverage
|
||||||
docs/*.md
|
docs/*.md
|
||||||
AGENTS.md
|
AGENTS.md
|
||||||
|
|
||||||
# Local-only wallpapers (do not commit)
|
|
||||||
media/atlas_bg.jpg
|
|
||||||
media/titan-iac-bg.png
|
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
FROM registry.bstein.dev/bstein/python:3.12-slim-arm64
|
FROM python:3.12-slim
|
||||||
|
|
||||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||||
PYTHONUNBUFFERED=1
|
PYTHONUNBUFFERED=1
|
||||||
|
|||||||
@ -1,16 +1,15 @@
|
|||||||
# Build stage
|
# Build stage
|
||||||
FROM registry.bstein.dev/bstein/node:20-alpine-arm64 AS build
|
FROM node:20-alpine AS build
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY frontend/package*.json ./
|
COPY frontend/package*.json ./
|
||||||
RUN npm ci --ignore-scripts
|
RUN npm ci --ignore-scripts
|
||||||
|
|
||||||
COPY frontend/ ./
|
COPY frontend/ ./
|
||||||
COPY media/ ./public/media/
|
|
||||||
RUN npm run build
|
RUN npm run build
|
||||||
|
|
||||||
# Runtime stage
|
# Runtime stage
|
||||||
FROM registry.bstein.dev/bstein/nginx:1.27-alpine-arm64
|
FROM nginx:1.27-alpine
|
||||||
WORKDIR /usr/share/nginx/html
|
WORKDIR /usr/share/nginx/html
|
||||||
|
|
||||||
# Minimal nginx config with SPA fallback.
|
# Minimal nginx config with SPA fallback.
|
||||||
|
|||||||
18
Jenkinsfile
vendored
@ -50,8 +50,7 @@ spec:
|
|||||||
- name: docker-config-writable
|
- name: docker-config-writable
|
||||||
emptyDir: {}
|
emptyDir: {}
|
||||||
- name: dind-storage
|
- name: dind-storage
|
||||||
persistentVolumeClaim:
|
emptyDir: {}
|
||||||
claimName: jenkins-dind-cache
|
|
||||||
- name: harbor-config
|
- name: harbor-config
|
||||||
secret:
|
secret:
|
||||||
secretName: harbor-bstein-robot
|
secretName: harbor-bstein-robot
|
||||||
@ -87,16 +86,7 @@ spec:
|
|||||||
container('builder') {
|
container('builder') {
|
||||||
sh '''
|
sh '''
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
for attempt in 1 2 3 4 5; do
|
apk add --no-cache bash git jq
|
||||||
if apk add --no-cache bash git jq; then
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
if [ "$attempt" -eq 5 ]; then
|
|
||||||
echo "apk add failed after ${attempt} attempts" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
sleep $((attempt * 2))
|
|
||||||
done
|
|
||||||
mkdir -p /root/.docker
|
mkdir -p /root/.docker
|
||||||
cp /docker-config/config.json /root/.docker/config.json
|
cp /docker-config/config.json /root/.docker/config.json
|
||||||
'''
|
'''
|
||||||
@ -141,9 +131,7 @@ spec:
|
|||||||
fi
|
fi
|
||||||
sleep 2
|
sleep 2
|
||||||
done
|
done
|
||||||
docker buildx create --name bstein-builder --driver docker-container \
|
docker buildx create --name bstein-builder --driver docker-container --bootstrap --use || docker buildx use bstein-builder
|
||||||
--driver-opt image=registry.bstein.dev/bstein/buildkit:buildx-stable-1-arm64 \
|
|
||||||
--bootstrap --use || docker buildx use bstein-builder
|
|
||||||
'''
|
'''
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
# bstein-dev-home
|
# bstein-dev-home
|
||||||
|
|
||||||
Atlas portal + lab status site with a Flask backend and Vue frontend.
|
Portfolio + lab status site with a Flask backend and Vue frontend.
|
||||||
|
|
||||||
- Jenkins pipeline builds arm64 Docker images (`bstein-dev-home-frontend`, `bstein-dev-home-backend`) and pushes to `registry.bstein.dev/bstein`.
|
- Jenkins pipeline builds arm64 Docker images (`bstein-dev-home-frontend`, `bstein-dev-home-backend`) and pushes to `registry.bstein.dev/bstein`.
|
||||||
- Flux deploys to `bstein.dev` with Traefik routing `/api` to the backend and the rest to the frontend.
|
- Flux deploys to `bstein.dev` with Traefik routing `/api` to the backend and the rest to the frontend.
|
||||||
|
|||||||
158
backend/app.py
@ -1,7 +1,161 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from atlas_portal.app_factory import create_app
|
import json
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
from urllib.error import URLError
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
from urllib.request import urlopen
|
||||||
|
|
||||||
|
from flask import Flask, jsonify, send_from_directory
|
||||||
|
from flask_cors import CORS
|
||||||
|
|
||||||
|
|
||||||
app = create_app()
|
app = Flask(__name__, static_folder="../frontend/dist", static_url_path="")
|
||||||
|
CORS(app, resources={r"/api/*": {"origins": "*"}})
|
||||||
|
|
||||||
|
MONERO_GET_INFO_URL = os.getenv("MONERO_GET_INFO_URL", "http://monerod.crypto.svc.cluster.local:18081/get_info")
|
||||||
|
VM_BASE_URL = os.getenv(
|
||||||
|
"VM_BASE_URL",
|
||||||
|
"http://victoria-metrics-single-server.monitoring.svc.cluster.local:8428",
|
||||||
|
).rstrip("/")
|
||||||
|
VM_QUERY_TIMEOUT_SEC = float(os.getenv("VM_QUERY_TIMEOUT_SEC", "2"))
|
||||||
|
HTTP_CHECK_TIMEOUT_SEC = float(os.getenv("HTTP_CHECK_TIMEOUT_SEC", "2"))
|
||||||
|
LAB_STATUS_CACHE_SEC = float(os.getenv("LAB_STATUS_CACHE_SEC", "30"))
|
||||||
|
GRAFANA_HEALTH_URL = os.getenv("GRAFANA_HEALTH_URL", "https://metrics.bstein.dev/api/health")
|
||||||
|
OCEANUS_NODE_EXPORTER_URL = os.getenv("OCEANUS_NODE_EXPORTER_URL", "http://192.168.22.24:9100/metrics")
|
||||||
|
|
||||||
|
_LAB_STATUS_CACHE: dict[str, Any] = {"ts": 0.0, "value": None}
|
||||||
|
|
||||||
|
@app.route("/api/healthz")
|
||||||
|
def healthz() -> Any:
|
||||||
|
return jsonify({"ok": True})
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/api/monero/get_info")
|
||||||
|
def monero_get_info() -> Any:
|
||||||
|
try:
|
||||||
|
with urlopen(MONERO_GET_INFO_URL, timeout=2) as resp:
|
||||||
|
payload = json.loads(resp.read().decode("utf-8"))
|
||||||
|
return jsonify(payload)
|
||||||
|
except (URLError, TimeoutError, ValueError) as exc:
|
||||||
|
return jsonify({"error": str(exc), "url": MONERO_GET_INFO_URL}), 503
|
||||||
|
|
||||||
|
|
||||||
|
def _vm_query(expr: str) -> float | None:
|
||||||
|
url = f"{VM_BASE_URL}/api/v1/query?{urlencode({'query': expr})}"
|
||||||
|
with urlopen(url, timeout=VM_QUERY_TIMEOUT_SEC) as resp:
|
||||||
|
payload = json.loads(resp.read().decode("utf-8"))
|
||||||
|
|
||||||
|
if payload.get("status") != "success":
|
||||||
|
return None
|
||||||
|
|
||||||
|
result = (payload.get("data") or {}).get("result") or []
|
||||||
|
if not result:
|
||||||
|
return None
|
||||||
|
|
||||||
|
values: list[float] = []
|
||||||
|
for item in result:
|
||||||
|
try:
|
||||||
|
values.append(float(item["value"][1]))
|
||||||
|
except (KeyError, IndexError, TypeError, ValueError):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not values:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return max(values)
|
||||||
|
|
||||||
|
|
||||||
|
def _http_ok(url: str, expect_substring: str | None = None) -> bool:
|
||||||
|
try:
|
||||||
|
with urlopen(url, timeout=HTTP_CHECK_TIMEOUT_SEC) as resp:
|
||||||
|
if getattr(resp, "status", 200) != 200:
|
||||||
|
return False
|
||||||
|
if expect_substring:
|
||||||
|
chunk = resp.read(4096).decode("utf-8", errors="ignore")
|
||||||
|
return expect_substring in chunk
|
||||||
|
return True
|
||||||
|
except (URLError, TimeoutError, ValueError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/api/lab/status")
|
||||||
|
def lab_status() -> Any:
|
||||||
|
now = time.time()
|
||||||
|
cached = _LAB_STATUS_CACHE.get("value")
|
||||||
|
if cached and (now - float(_LAB_STATUS_CACHE.get("ts", 0.0)) < LAB_STATUS_CACHE_SEC):
|
||||||
|
return jsonify(cached)
|
||||||
|
|
||||||
|
connected = False
|
||||||
|
atlas_up = False
|
||||||
|
atlas_known = False
|
||||||
|
atlas_source = "unknown"
|
||||||
|
oceanus_up = False
|
||||||
|
oceanus_known = False
|
||||||
|
oceanus_source = "unknown"
|
||||||
|
|
||||||
|
try:
|
||||||
|
atlas_value = _vm_query('max(up{job="kubernetes-apiservers"})')
|
||||||
|
oceanus_value = _vm_query('max(up{instance=~"(titan-23|192.168.22.24)(:9100)?"})')
|
||||||
|
connected = True
|
||||||
|
atlas_known = atlas_value is not None
|
||||||
|
atlas_up = bool(atlas_value and atlas_value > 0.5)
|
||||||
|
atlas_source = "victoria-metrics"
|
||||||
|
oceanus_known = oceanus_value is not None
|
||||||
|
oceanus_up = bool(oceanus_value and oceanus_value > 0.5)
|
||||||
|
oceanus_source = "victoria-metrics"
|
||||||
|
except (URLError, TimeoutError, ValueError):
|
||||||
|
atlas_value = None
|
||||||
|
oceanus_value = None
|
||||||
|
|
||||||
|
if not atlas_known:
|
||||||
|
if _http_ok(GRAFANA_HEALTH_URL):
|
||||||
|
connected = True
|
||||||
|
atlas_known = True
|
||||||
|
atlas_up = True
|
||||||
|
atlas_source = "grafana-health"
|
||||||
|
|
||||||
|
if not oceanus_up:
|
||||||
|
if _http_ok(OCEANUS_NODE_EXPORTER_URL, expect_substring="node_exporter_build_info"):
|
||||||
|
connected = True
|
||||||
|
oceanus_known = True
|
||||||
|
oceanus_up = True
|
||||||
|
oceanus_source = "node-exporter"
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"connected": connected,
|
||||||
|
"atlas": {"up": atlas_up, "known": atlas_known, "source": atlas_source},
|
||||||
|
"oceanus": {"up": oceanus_up, "known": oceanus_known, "source": oceanus_source},
|
||||||
|
"checked_at": int(now),
|
||||||
|
}
|
||||||
|
|
||||||
|
_LAB_STATUS_CACHE["ts"] = now
|
||||||
|
_LAB_STATUS_CACHE["value"] = payload
|
||||||
|
return jsonify(payload)
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/", defaults={"path": ""})
|
||||||
|
@app.route("/<path:path>")
|
||||||
|
def serve_frontend(path: str) -> Any:
|
||||||
|
dist_path = Path(app.static_folder)
|
||||||
|
index_path = dist_path / "index.html"
|
||||||
|
|
||||||
|
if dist_path.exists() and index_path.exists():
|
||||||
|
target = dist_path / path
|
||||||
|
if path and target.exists():
|
||||||
|
return send_from_directory(app.static_folder, path)
|
||||||
|
return send_from_directory(app.static_folder, "index.html")
|
||||||
|
|
||||||
|
return jsonify(
|
||||||
|
{
|
||||||
|
"message": "Frontend not built yet. Run `npm install && npm run build` inside frontend/, then restart Flask.",
|
||||||
|
"available_endpoints": ["/api/healthz", "/api/monero/get_info"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
app.run(host="0.0.0.0", port=5000, debug=True)
|
||||||
|
|||||||
@ -1,6 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from .app_factory import create_app
|
|
||||||
|
|
||||||
__all__ = ["create_app"]
|
|
||||||
|
|
||||||
@ -1,46 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from flask import Flask, jsonify, send_from_directory
|
|
||||||
from flask_cors import CORS
|
|
||||||
from werkzeug.middleware.proxy_fix import ProxyFix
|
|
||||||
|
|
||||||
from .routes import access_requests, account, admin_access, ai, auth_config, health, lab, monero
|
|
||||||
|
|
||||||
|
|
||||||
def create_app() -> Flask:
|
|
||||||
app = Flask(__name__, static_folder="../frontend/dist", static_url_path="")
|
|
||||||
app.wsgi_app = ProxyFix(app.wsgi_app, x_for=1, x_proto=1, x_host=1, x_port=1)
|
|
||||||
CORS(app, resources={r"/api/*": {"origins": "*"}})
|
|
||||||
|
|
||||||
health.register(app)
|
|
||||||
auth_config.register(app)
|
|
||||||
account.register(app)
|
|
||||||
access_requests.register(app)
|
|
||||||
admin_access.register(app)
|
|
||||||
monero.register(app)
|
|
||||||
lab.register(app)
|
|
||||||
ai.register(app)
|
|
||||||
|
|
||||||
@app.route("/", defaults={"path": ""})
|
|
||||||
@app.route("/<path:path>")
|
|
||||||
def serve_frontend(path: str) -> Any:
|
|
||||||
dist_path = Path(app.static_folder)
|
|
||||||
index_path = dist_path / "index.html"
|
|
||||||
|
|
||||||
if dist_path.exists() and index_path.exists():
|
|
||||||
target = dist_path / path
|
|
||||||
if path and target.exists():
|
|
||||||
return send_from_directory(app.static_folder, path)
|
|
||||||
return send_from_directory(app.static_folder, "index.html")
|
|
||||||
|
|
||||||
return jsonify(
|
|
||||||
{
|
|
||||||
"message": "Frontend not built yet. Run `npm install && npm run build` inside frontend/, then restart Flask.",
|
|
||||||
"available_endpoints": ["/api/healthz", "/api/monero/get_info"],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return app
|
|
||||||
@ -1,98 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import time
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
from flask import jsonify, request
|
|
||||||
|
|
||||||
from . import settings
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class AriadneError(Exception):
|
|
||||||
def __init__(self, message: str, status_code: int = 502) -> None:
|
|
||||||
super().__init__(message)
|
|
||||||
self.status_code = status_code
|
|
||||||
|
|
||||||
|
|
||||||
def enabled() -> bool:
|
|
||||||
return bool(settings.ARIADNE_URL)
|
|
||||||
|
|
||||||
|
|
||||||
def _auth_headers() -> dict[str, str]:
|
|
||||||
header = request.headers.get("Authorization", "").strip()
|
|
||||||
return {"Authorization": header} if header else {}
|
|
||||||
|
|
||||||
|
|
||||||
def _url(path: str) -> str:
|
|
||||||
base = settings.ARIADNE_URL.rstrip("/")
|
|
||||||
suffix = path.lstrip("/")
|
|
||||||
return f"{base}/{suffix}" if suffix else base
|
|
||||||
|
|
||||||
|
|
||||||
def request_raw(
|
|
||||||
method: str,
|
|
||||||
path: str,
|
|
||||||
*,
|
|
||||||
payload: Any | None = None,
|
|
||||||
params: dict[str, Any] | None = None,
|
|
||||||
) -> httpx.Response:
|
|
||||||
if not enabled():
|
|
||||||
raise AriadneError("ariadne not configured", 503)
|
|
||||||
|
|
||||||
url = _url(path)
|
|
||||||
attempts = max(1, settings.ARIADNE_RETRY_COUNT)
|
|
||||||
for attempt in range(1, attempts + 1):
|
|
||||||
try:
|
|
||||||
with httpx.Client(timeout=settings.ARIADNE_TIMEOUT_SEC) as client:
|
|
||||||
resp = client.request(
|
|
||||||
method,
|
|
||||||
url,
|
|
||||||
headers=_auth_headers(),
|
|
||||||
json=payload,
|
|
||||||
params=params,
|
|
||||||
)
|
|
||||||
if resp.status_code >= 500:
|
|
||||||
logger.warning(
|
|
||||||
"ariadne error response",
|
|
||||||
extra={"method": method, "path": path, "status": resp.status_code},
|
|
||||||
)
|
|
||||||
return resp
|
|
||||||
except httpx.RequestError as exc:
|
|
||||||
logger.warning(
|
|
||||||
"ariadne request failed",
|
|
||||||
extra={
|
|
||||||
"method": method,
|
|
||||||
"path": path,
|
|
||||||
"attempt": attempt,
|
|
||||||
"timeout_sec": settings.ARIADNE_TIMEOUT_SEC,
|
|
||||||
"error": str(exc),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
if attempt >= attempts:
|
|
||||||
raise AriadneError("ariadne unavailable", 502) from exc
|
|
||||||
time.sleep(settings.ARIADNE_RETRY_BACKOFF_SEC * attempt)
|
|
||||||
|
|
||||||
|
|
||||||
def proxy(
|
|
||||||
method: str,
|
|
||||||
path: str,
|
|
||||||
*,
|
|
||||||
payload: Any | None = None,
|
|
||||||
params: dict[str, Any] | None = None,
|
|
||||||
) -> tuple[Any, int]:
|
|
||||||
try:
|
|
||||||
resp = request_raw(method, path, payload=payload, params=params)
|
|
||||||
except AriadneError as exc:
|
|
||||||
return jsonify({"error": str(exc)}), exc.status_code
|
|
||||||
|
|
||||||
try:
|
|
||||||
data = resp.json()
|
|
||||||
except ValueError:
|
|
||||||
detail = resp.text.strip()
|
|
||||||
data = {"error": detail or "upstream error"}
|
|
||||||
|
|
||||||
return jsonify(data), resp.status_code
|
|
||||||
@ -1,196 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from typing import Any, Iterator
|
|
||||||
|
|
||||||
import psycopg
|
|
||||||
from psycopg.rows import dict_row
|
|
||||||
from psycopg_pool import ConnectionPool
|
|
||||||
|
|
||||||
from . import settings
|
|
||||||
|
|
||||||
|
|
||||||
MIGRATION_LOCK_ID = 982731
|
|
||||||
_pool: ConnectionPool | None = None
|
|
||||||
|
|
||||||
|
|
||||||
def configured() -> bool:
|
|
||||||
return bool(settings.PORTAL_DATABASE_URL)
|
|
||||||
|
|
||||||
|
|
||||||
def _pool_kwargs() -> dict[str, Any]:
|
|
||||||
options = (
|
|
||||||
f"-c lock_timeout={settings.PORTAL_DB_LOCK_TIMEOUT_SEC}s "
|
|
||||||
f"-c statement_timeout={settings.PORTAL_DB_STATEMENT_TIMEOUT_SEC}s "
|
|
||||||
f"-c idle_in_transaction_session_timeout={settings.PORTAL_DB_IDLE_IN_TX_TIMEOUT_SEC}s"
|
|
||||||
)
|
|
||||||
return {
|
|
||||||
"connect_timeout": settings.PORTAL_DB_CONNECT_TIMEOUT_SEC,
|
|
||||||
"application_name": "atlas_portal",
|
|
||||||
"options": options,
|
|
||||||
"row_factory": dict_row,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _get_pool() -> ConnectionPool:
|
|
||||||
global _pool
|
|
||||||
if _pool is None:
|
|
||||||
if not settings.PORTAL_DATABASE_URL:
|
|
||||||
raise RuntimeError("portal database not configured")
|
|
||||||
_pool = ConnectionPool(
|
|
||||||
conninfo=settings.PORTAL_DATABASE_URL,
|
|
||||||
min_size=settings.PORTAL_DB_POOL_MIN,
|
|
||||||
max_size=settings.PORTAL_DB_POOL_MAX,
|
|
||||||
kwargs=_pool_kwargs(),
|
|
||||||
)
|
|
||||||
return _pool
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def connect() -> Iterator[psycopg.Connection[Any]]:
|
|
||||||
if not settings.PORTAL_DATABASE_URL:
|
|
||||||
raise RuntimeError("portal database not configured")
|
|
||||||
with _get_pool().connection() as conn:
|
|
||||||
conn.row_factory = dict_row
|
|
||||||
yield conn
|
|
||||||
|
|
||||||
|
|
||||||
def _try_advisory_lock(conn: psycopg.Connection[Any], lock_id: int) -> bool:
|
|
||||||
row = conn.execute("SELECT pg_try_advisory_lock(%s)", (lock_id,)).fetchone()
|
|
||||||
if isinstance(row, dict):
|
|
||||||
return bool(row.get("pg_try_advisory_lock"))
|
|
||||||
return bool(row and row[0])
|
|
||||||
|
|
||||||
|
|
||||||
def _release_advisory_lock(conn: psycopg.Connection[Any], lock_id: int) -> None:
|
|
||||||
try:
|
|
||||||
conn.execute("SELECT pg_advisory_unlock(%s)", (lock_id,))
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def run_migrations() -> None:
|
|
||||||
if not settings.PORTAL_DATABASE_URL or not settings.PORTAL_RUN_MIGRATIONS:
|
|
||||||
return
|
|
||||||
with connect() as conn:
|
|
||||||
try:
|
|
||||||
conn.execute(f"SET lock_timeout = '{settings.PORTAL_DB_LOCK_TIMEOUT_SEC}s'")
|
|
||||||
conn.execute(f"SET statement_timeout = '{settings.PORTAL_DB_STATEMENT_TIMEOUT_SEC}s'")
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
if not _try_advisory_lock(conn, MIGRATION_LOCK_ID):
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
CREATE TABLE IF NOT EXISTS access_requests (
|
|
||||||
request_code TEXT PRIMARY KEY,
|
|
||||||
username TEXT NOT NULL,
|
|
||||||
first_name TEXT,
|
|
||||||
last_name TEXT,
|
|
||||||
contact_email TEXT,
|
|
||||||
note TEXT,
|
|
||||||
status TEXT NOT NULL,
|
|
||||||
email_verification_token_hash TEXT,
|
|
||||||
email_verification_sent_at TIMESTAMPTZ,
|
|
||||||
email_verified_at TIMESTAMPTZ,
|
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
||||||
decided_at TIMESTAMPTZ,
|
|
||||||
decided_by TEXT,
|
|
||||||
initial_password TEXT,
|
|
||||||
initial_password_revealed_at TIMESTAMPTZ,
|
|
||||||
provision_attempted_at TIMESTAMPTZ,
|
|
||||||
welcome_email_sent_at TIMESTAMPTZ,
|
|
||||||
approval_flags TEXT[],
|
|
||||||
approval_note TEXT,
|
|
||||||
denial_note TEXT
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
ALTER TABLE access_requests
|
|
||||||
ADD COLUMN IF NOT EXISTS initial_password TEXT,
|
|
||||||
ADD COLUMN IF NOT EXISTS initial_password_revealed_at TIMESTAMPTZ,
|
|
||||||
ADD COLUMN IF NOT EXISTS provision_attempted_at TIMESTAMPTZ,
|
|
||||||
ADD COLUMN IF NOT EXISTS email_verification_token_hash TEXT,
|
|
||||||
ADD COLUMN IF NOT EXISTS email_verification_sent_at TIMESTAMPTZ,
|
|
||||||
ADD COLUMN IF NOT EXISTS email_verified_at TIMESTAMPTZ,
|
|
||||||
ADD COLUMN IF NOT EXISTS welcome_email_sent_at TIMESTAMPTZ,
|
|
||||||
ADD COLUMN IF NOT EXISTS first_name TEXT,
|
|
||||||
ADD COLUMN IF NOT EXISTS last_name TEXT,
|
|
||||||
ADD COLUMN IF NOT EXISTS approval_flags TEXT[],
|
|
||||||
ADD COLUMN IF NOT EXISTS approval_note TEXT,
|
|
||||||
ADD COLUMN IF NOT EXISTS denial_note TEXT
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
CREATE TABLE IF NOT EXISTS access_request_tasks (
|
|
||||||
request_code TEXT NOT NULL REFERENCES access_requests(request_code) ON DELETE CASCADE,
|
|
||||||
task TEXT NOT NULL,
|
|
||||||
status TEXT NOT NULL,
|
|
||||||
detail TEXT,
|
|
||||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
||||||
PRIMARY KEY (request_code, task)
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
CREATE TABLE IF NOT EXISTS access_request_onboarding_steps (
|
|
||||||
request_code TEXT NOT NULL REFERENCES access_requests(request_code) ON DELETE CASCADE,
|
|
||||||
step TEXT NOT NULL,
|
|
||||||
completed_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
||||||
PRIMARY KEY (request_code, step)
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
CREATE TABLE IF NOT EXISTS access_request_onboarding_artifacts (
|
|
||||||
request_code TEXT NOT NULL REFERENCES access_requests(request_code) ON DELETE CASCADE,
|
|
||||||
artifact TEXT NOT NULL,
|
|
||||||
value_hash TEXT NOT NULL,
|
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
||||||
PRIMARY KEY (request_code, artifact)
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
CREATE INDEX IF NOT EXISTS access_requests_status_created_at
|
|
||||||
ON access_requests (status, created_at)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
CREATE INDEX IF NOT EXISTS access_request_tasks_request_code
|
|
||||||
ON access_request_tasks (request_code)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
CREATE INDEX IF NOT EXISTS access_request_onboarding_steps_request_code
|
|
||||||
ON access_request_onboarding_steps (request_code)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
CREATE INDEX IF NOT EXISTS access_request_onboarding_artifacts_request_code
|
|
||||||
ON access_request_onboarding_artifacts (request_code)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
CREATE UNIQUE INDEX IF NOT EXISTS access_requests_username_pending
|
|
||||||
ON access_requests (username)
|
|
||||||
WHERE status = 'pending'
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
finally:
|
|
||||||
_release_advisory_lock(conn, MIGRATION_LOCK_ID)
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_schema() -> None:
|
|
||||||
run_migrations()
|
|
||||||
@ -1,136 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import re
|
|
||||||
import time
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from . import settings
|
|
||||||
from .k8s import get_json, post_json
|
|
||||||
|
|
||||||
|
|
||||||
def _safe_name_fragment(value: str, max_len: int = 24) -> str:
|
|
||||||
cleaned = re.sub(r"[^a-z0-9-]+", "-", (value or "").lower()).strip("-")
|
|
||||||
if not cleaned:
|
|
||||||
cleaned = "user"
|
|
||||||
return cleaned[:max_len].rstrip("-") or "user"
|
|
||||||
|
|
||||||
|
|
||||||
def _job_from_cronjob(
|
|
||||||
cronjob: dict[str, Any],
|
|
||||||
username: str,
|
|
||||||
email: str,
|
|
||||||
password: str,
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
spec = cronjob.get("spec") if isinstance(cronjob.get("spec"), dict) else {}
|
|
||||||
jt = spec.get("jobTemplate") if isinstance(spec.get("jobTemplate"), dict) else {}
|
|
||||||
job_spec = jt.get("spec") if isinstance(jt.get("spec"), dict) else {}
|
|
||||||
|
|
||||||
now = int(time.time())
|
|
||||||
safe_user = _safe_name_fragment(username)
|
|
||||||
job_name = f"firefly-user-sync-{safe_user}-{now}"
|
|
||||||
|
|
||||||
job: dict[str, Any] = {
|
|
||||||
"apiVersion": "batch/v1",
|
|
||||||
"kind": "Job",
|
|
||||||
"metadata": {
|
|
||||||
"name": job_name,
|
|
||||||
"namespace": settings.FIREFLY_NAMESPACE,
|
|
||||||
"labels": {
|
|
||||||
"app": "firefly-user-sync",
|
|
||||||
"atlas.bstein.dev/trigger": "portal",
|
|
||||||
"atlas.bstein.dev/username": safe_user,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"spec": job_spec,
|
|
||||||
}
|
|
||||||
|
|
||||||
tpl = job.get("spec", {}).get("template", {})
|
|
||||||
pod_spec = tpl.get("spec") if isinstance(tpl.get("spec"), dict) else {}
|
|
||||||
containers = pod_spec.get("containers") if isinstance(pod_spec.get("containers"), list) else []
|
|
||||||
if containers and isinstance(containers[0], dict):
|
|
||||||
env = containers[0].get("env")
|
|
||||||
if not isinstance(env, list):
|
|
||||||
env = []
|
|
||||||
env = [
|
|
||||||
e
|
|
||||||
for e in env
|
|
||||||
if not (
|
|
||||||
isinstance(e, dict)
|
|
||||||
and e.get("name") in {"FIREFLY_USER_EMAIL", "FIREFLY_USER_PASSWORD"}
|
|
||||||
)
|
|
||||||
]
|
|
||||||
env.append({"name": "FIREFLY_USER_EMAIL", "value": email})
|
|
||||||
env.append({"name": "FIREFLY_USER_PASSWORD", "value": password})
|
|
||||||
containers[0]["env"] = env
|
|
||||||
pod_spec["containers"] = containers
|
|
||||||
tpl["spec"] = pod_spec
|
|
||||||
job["spec"]["template"] = tpl
|
|
||||||
|
|
||||||
return job
|
|
||||||
|
|
||||||
|
|
||||||
def _job_succeeded(job: dict[str, Any]) -> bool:
|
|
||||||
status = job.get("status") if isinstance(job.get("status"), dict) else {}
|
|
||||||
if int(status.get("succeeded") or 0) > 0:
|
|
||||||
return True
|
|
||||||
conditions = status.get("conditions") if isinstance(status.get("conditions"), list) else []
|
|
||||||
for cond in conditions:
|
|
||||||
if not isinstance(cond, dict):
|
|
||||||
continue
|
|
||||||
if cond.get("type") == "Complete" and cond.get("status") == "True":
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def _job_failed(job: dict[str, Any]) -> bool:
|
|
||||||
status = job.get("status") if isinstance(job.get("status"), dict) else {}
|
|
||||||
if int(status.get("failed") or 0) > 0:
|
|
||||||
return True
|
|
||||||
conditions = status.get("conditions") if isinstance(status.get("conditions"), list) else []
|
|
||||||
for cond in conditions:
|
|
||||||
if not isinstance(cond, dict):
|
|
||||||
continue
|
|
||||||
if cond.get("type") == "Failed" and cond.get("status") == "True":
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def trigger(username: str, email: str, password: str, wait: bool = True) -> dict[str, Any]:
|
|
||||||
username = (username or "").strip()
|
|
||||||
if not username:
|
|
||||||
raise RuntimeError("missing username")
|
|
||||||
if not password:
|
|
||||||
raise RuntimeError("missing password")
|
|
||||||
|
|
||||||
namespace = settings.FIREFLY_NAMESPACE
|
|
||||||
cronjob_name = settings.FIREFLY_USER_SYNC_CRONJOB
|
|
||||||
if not namespace or not cronjob_name:
|
|
||||||
raise RuntimeError("firefly sync not configured")
|
|
||||||
|
|
||||||
cronjob = get_json(f"/apis/batch/v1/namespaces/{namespace}/cronjobs/{cronjob_name}")
|
|
||||||
job_payload = _job_from_cronjob(cronjob, username, email, password)
|
|
||||||
created = post_json(f"/apis/batch/v1/namespaces/{namespace}/jobs", job_payload)
|
|
||||||
|
|
||||||
job_name = (
|
|
||||||
created.get("metadata", {}).get("name")
|
|
||||||
if isinstance(created.get("metadata"), dict)
|
|
||||||
else job_payload.get("metadata", {}).get("name")
|
|
||||||
)
|
|
||||||
if not isinstance(job_name, str) or not job_name:
|
|
||||||
raise RuntimeError("job name missing")
|
|
||||||
|
|
||||||
if not wait:
|
|
||||||
return {"job": job_name, "status": "queued"}
|
|
||||||
|
|
||||||
deadline = time.time() + float(settings.FIREFLY_USER_SYNC_WAIT_TIMEOUT_SEC)
|
|
||||||
last_state = "running"
|
|
||||||
while time.time() < deadline:
|
|
||||||
job = get_json(f"/apis/batch/v1/namespaces/{namespace}/jobs/{job_name}")
|
|
||||||
if _job_succeeded(job):
|
|
||||||
return {"job": job_name, "status": "ok"}
|
|
||||||
if _job_failed(job):
|
|
||||||
return {"job": job_name, "status": "error"}
|
|
||||||
time.sleep(2)
|
|
||||||
last_state = "running"
|
|
||||||
|
|
||||||
return {"job": job_name, "status": last_state}
|
|
||||||
@ -1,56 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
from . import settings
|
|
||||||
|
|
||||||
|
|
||||||
_K8S_BASE_URL = "https://kubernetes.default.svc"
|
|
||||||
_SA_PATH = Path("/var/run/secrets/kubernetes.io/serviceaccount")
|
|
||||||
|
|
||||||
|
|
||||||
def _read_service_account() -> tuple[str, str]:
|
|
||||||
token_path = _SA_PATH / "token"
|
|
||||||
ca_path = _SA_PATH / "ca.crt"
|
|
||||||
if not token_path.exists() or not ca_path.exists():
|
|
||||||
raise RuntimeError("kubernetes service account token missing")
|
|
||||||
token = token_path.read_text().strip()
|
|
||||||
if not token:
|
|
||||||
raise RuntimeError("kubernetes service account token empty")
|
|
||||||
return token, str(ca_path)
|
|
||||||
|
|
||||||
|
|
||||||
def get_json(path: str) -> dict[str, Any]:
|
|
||||||
token, ca_path = _read_service_account()
|
|
||||||
url = f"{_K8S_BASE_URL}{path}"
|
|
||||||
with httpx.Client(
|
|
||||||
verify=ca_path,
|
|
||||||
timeout=settings.K8S_API_TIMEOUT_SEC,
|
|
||||||
headers={"Authorization": f"Bearer {token}"},
|
|
||||||
) as client:
|
|
||||||
resp = client.get(url)
|
|
||||||
resp.raise_for_status()
|
|
||||||
data = resp.json()
|
|
||||||
if not isinstance(data, dict):
|
|
||||||
raise RuntimeError("unexpected kubernetes response")
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
def post_json(path: str, payload: dict[str, Any]) -> dict[str, Any]:
|
|
||||||
token, ca_path = _read_service_account()
|
|
||||||
url = f"{_K8S_BASE_URL}{path}"
|
|
||||||
with httpx.Client(
|
|
||||||
verify=ca_path,
|
|
||||||
timeout=settings.K8S_API_TIMEOUT_SEC,
|
|
||||||
headers={"Authorization": f"Bearer {token}"},
|
|
||||||
) as client:
|
|
||||||
resp = client.post(url, json=payload)
|
|
||||||
resp.raise_for_status()
|
|
||||||
data = resp.json()
|
|
||||||
if not isinstance(data, dict):
|
|
||||||
raise RuntimeError("unexpected kubernetes response")
|
|
||||||
return data
|
|
||||||
|
|
||||||
@ -1,423 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import time
|
|
||||||
from functools import wraps
|
|
||||||
from typing import Any
|
|
||||||
from urllib.parse import quote
|
|
||||||
|
|
||||||
from flask import g, jsonify, request
|
|
||||||
import httpx
|
|
||||||
import jwt
|
|
||||||
from jwt import PyJWKClient
|
|
||||||
|
|
||||||
from . import settings
|
|
||||||
|
|
||||||
|
|
||||||
class KeycloakOIDC:
|
|
||||||
def __init__(self) -> None:
|
|
||||||
self._jwk_client: PyJWKClient | None = None
|
|
||||||
|
|
||||||
def _client(self) -> PyJWKClient:
|
|
||||||
if self._jwk_client is None:
|
|
||||||
self._jwk_client = PyJWKClient(settings.KEYCLOAK_JWKS_URL)
|
|
||||||
return self._jwk_client
|
|
||||||
|
|
||||||
def verify(self, token: str) -> dict[str, Any]:
|
|
||||||
if not settings.KEYCLOAK_ENABLED:
|
|
||||||
raise ValueError("keycloak not enabled")
|
|
||||||
|
|
||||||
signing_key = self._client().get_signing_key_from_jwt(token).key
|
|
||||||
claims = jwt.decode(
|
|
||||||
token,
|
|
||||||
signing_key,
|
|
||||||
algorithms=["RS256"],
|
|
||||||
options={"verify_aud": False},
|
|
||||||
issuer=settings.KEYCLOAK_ISSUER,
|
|
||||||
)
|
|
||||||
|
|
||||||
azp = claims.get("azp")
|
|
||||||
aud = claims.get("aud")
|
|
||||||
aud_list: list[str] = []
|
|
||||||
if isinstance(aud, str):
|
|
||||||
aud_list = [aud]
|
|
||||||
elif isinstance(aud, list):
|
|
||||||
aud_list = [a for a in aud if isinstance(a, str)]
|
|
||||||
|
|
||||||
if azp != settings.KEYCLOAK_CLIENT_ID and settings.KEYCLOAK_CLIENT_ID not in aud_list:
|
|
||||||
raise ValueError("token not issued for this client")
|
|
||||||
|
|
||||||
return claims
|
|
||||||
|
|
||||||
|
|
||||||
class KeycloakAdminClient:
|
|
||||||
def __init__(self) -> None:
|
|
||||||
self._token: str = ""
|
|
||||||
self._expires_at: float = 0.0
|
|
||||||
self._group_id_cache: dict[str, str] = {}
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _safe_update_payload(full: dict[str, Any]) -> dict[str, Any]:
|
|
||||||
payload: dict[str, Any] = {}
|
|
||||||
username = full.get("username")
|
|
||||||
if isinstance(username, str):
|
|
||||||
payload["username"] = username
|
|
||||||
enabled = full.get("enabled")
|
|
||||||
if isinstance(enabled, bool):
|
|
||||||
payload["enabled"] = enabled
|
|
||||||
email = full.get("email")
|
|
||||||
if isinstance(email, str):
|
|
||||||
payload["email"] = email
|
|
||||||
email_verified = full.get("emailVerified")
|
|
||||||
if isinstance(email_verified, bool):
|
|
||||||
payload["emailVerified"] = email_verified
|
|
||||||
first_name = full.get("firstName")
|
|
||||||
if isinstance(first_name, str):
|
|
||||||
payload["firstName"] = first_name
|
|
||||||
last_name = full.get("lastName")
|
|
||||||
if isinstance(last_name, str):
|
|
||||||
payload["lastName"] = last_name
|
|
||||||
|
|
||||||
actions = full.get("requiredActions")
|
|
||||||
if isinstance(actions, list):
|
|
||||||
payload["requiredActions"] = [a for a in actions if isinstance(a, str)]
|
|
||||||
|
|
||||||
attrs = full.get("attributes")
|
|
||||||
payload["attributes"] = attrs if isinstance(attrs, dict) else {}
|
|
||||||
return payload
|
|
||||||
|
|
||||||
def ready(self) -> bool:
|
|
||||||
return bool(settings.KEYCLOAK_ADMIN_CLIENT_ID and settings.KEYCLOAK_ADMIN_CLIENT_SECRET)
|
|
||||||
|
|
||||||
def _get_token(self) -> str:
|
|
||||||
if not self.ready():
|
|
||||||
raise RuntimeError("keycloak admin client not configured")
|
|
||||||
|
|
||||||
now = time.time()
|
|
||||||
if self._token and now < self._expires_at - 30:
|
|
||||||
return self._token
|
|
||||||
|
|
||||||
token_url = (
|
|
||||||
f"{settings.KEYCLOAK_ADMIN_URL}/realms/{settings.KEYCLOAK_ADMIN_REALM}/protocol/openid-connect/token"
|
|
||||||
)
|
|
||||||
data = {
|
|
||||||
"grant_type": "client_credentials",
|
|
||||||
"client_id": settings.KEYCLOAK_ADMIN_CLIENT_ID,
|
|
||||||
"client_secret": settings.KEYCLOAK_ADMIN_CLIENT_SECRET,
|
|
||||||
}
|
|
||||||
with httpx.Client(timeout=settings.HTTP_CHECK_TIMEOUT_SEC) as client:
|
|
||||||
resp = client.post(token_url, data=data)
|
|
||||||
resp.raise_for_status()
|
|
||||||
payload = resp.json()
|
|
||||||
token = payload.get("access_token") or ""
|
|
||||||
if not token:
|
|
||||||
raise RuntimeError("no access_token in response")
|
|
||||||
expires_in = int(payload.get("expires_in") or 60)
|
|
||||||
self._token = token
|
|
||||||
self._expires_at = now + expires_in
|
|
||||||
return token
|
|
||||||
|
|
||||||
def _headers(self) -> dict[str, str]:
|
|
||||||
return {"Authorization": f"Bearer {self._get_token()}"}
|
|
||||||
|
|
||||||
def headers(self) -> dict[str, str]:
|
|
||||||
return self._headers()
|
|
||||||
|
|
||||||
def find_user(self, username: str) -> dict[str, Any] | None:
|
|
||||||
url = f"{settings.KEYCLOAK_ADMIN_URL}/admin/realms/{settings.KEYCLOAK_REALM}/users"
|
|
||||||
# Keycloak 26.x in our environment intermittently 400s on filtered user queries unless `max` is set.
|
|
||||||
# Use `max=1` and exact username match to keep admin calls reliable for portal provisioning.
|
|
||||||
params = {"username": username, "exact": "true", "max": "1"}
|
|
||||||
with httpx.Client(timeout=settings.HTTP_CHECK_TIMEOUT_SEC) as client:
|
|
||||||
resp = client.get(url, params=params, headers=self._headers())
|
|
||||||
resp.raise_for_status()
|
|
||||||
users = resp.json()
|
|
||||||
if not isinstance(users, list) or not users:
|
|
||||||
return None
|
|
||||||
user = users[0]
|
|
||||||
return user if isinstance(user, dict) else None
|
|
||||||
|
|
||||||
def find_user_by_email(self, email: str) -> dict[str, Any] | None:
|
|
||||||
email = (email or "").strip()
|
|
||||||
if not email:
|
|
||||||
return None
|
|
||||||
|
|
||||||
url = f"{settings.KEYCLOAK_ADMIN_URL}/admin/realms/{settings.KEYCLOAK_REALM}/users"
|
|
||||||
# Match the portal's username query behavior: set a low `max` and post-filter for exact matches.
|
|
||||||
params = {"email": email, "exact": "true", "max": "2"}
|
|
||||||
email_norm = email.lower()
|
|
||||||
|
|
||||||
with httpx.Client(timeout=settings.HTTP_CHECK_TIMEOUT_SEC) as client:
|
|
||||||
resp = client.get(url, params=params, headers=self._headers())
|
|
||||||
resp.raise_for_status()
|
|
||||||
users = resp.json()
|
|
||||||
if not isinstance(users, list) or not users:
|
|
||||||
return None
|
|
||||||
for user in users:
|
|
||||||
if not isinstance(user, dict):
|
|
||||||
continue
|
|
||||||
candidate = user.get("email")
|
|
||||||
if isinstance(candidate, str) and candidate.strip().lower() == email_norm:
|
|
||||||
return user
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_user(self, user_id: str) -> dict[str, Any]:
|
|
||||||
url = f"{settings.KEYCLOAK_ADMIN_URL}/admin/realms/{settings.KEYCLOAK_REALM}/users/{quote(user_id, safe='')}"
|
|
||||||
with httpx.Client(timeout=settings.HTTP_CHECK_TIMEOUT_SEC) as client:
|
|
||||||
resp = client.get(url, headers=self._headers())
|
|
||||||
resp.raise_for_status()
|
|
||||||
data = resp.json()
|
|
||||||
if not isinstance(data, dict):
|
|
||||||
raise RuntimeError("unexpected user payload")
|
|
||||||
return data
|
|
||||||
|
|
||||||
def update_user(self, user_id: str, payload: dict[str, Any]) -> None:
|
|
||||||
url = f"{settings.KEYCLOAK_ADMIN_URL}/admin/realms/{settings.KEYCLOAK_REALM}/users/{quote(user_id, safe='')}"
|
|
||||||
with httpx.Client(timeout=settings.HTTP_CHECK_TIMEOUT_SEC) as client:
|
|
||||||
resp = client.put(url, headers={**self._headers(), "Content-Type": "application/json"}, json=payload)
|
|
||||||
resp.raise_for_status()
|
|
||||||
|
|
||||||
def update_user_safe(self, user_id: str, payload: dict[str, Any]) -> None:
|
|
||||||
full = self.get_user(user_id)
|
|
||||||
merged = self._safe_update_payload(full)
|
|
||||||
for key, value in payload.items():
|
|
||||||
if key == "attributes":
|
|
||||||
attrs = merged.get("attributes")
|
|
||||||
if not isinstance(attrs, dict):
|
|
||||||
attrs = {}
|
|
||||||
if isinstance(value, dict):
|
|
||||||
attrs.update(value)
|
|
||||||
merged["attributes"] = attrs
|
|
||||||
continue
|
|
||||||
merged[key] = value
|
|
||||||
self.update_user(user_id, merged)
|
|
||||||
|
|
||||||
def create_user(self, payload: dict[str, Any]) -> str:
|
|
||||||
url = f"{settings.KEYCLOAK_ADMIN_URL}/admin/realms/{settings.KEYCLOAK_REALM}/users"
|
|
||||||
with httpx.Client(timeout=settings.HTTP_CHECK_TIMEOUT_SEC) as client:
|
|
||||||
resp = client.post(url, headers={**self._headers(), "Content-Type": "application/json"}, json=payload)
|
|
||||||
resp.raise_for_status()
|
|
||||||
location = resp.headers.get("Location") or ""
|
|
||||||
if location:
|
|
||||||
return location.rstrip("/").split("/")[-1]
|
|
||||||
raise RuntimeError("failed to determine created user id")
|
|
||||||
|
|
||||||
def reset_password(self, user_id: str, password: str, temporary: bool = True) -> None:
|
|
||||||
url = (
|
|
||||||
f"{settings.KEYCLOAK_ADMIN_URL}/admin/realms/{settings.KEYCLOAK_REALM}"
|
|
||||||
f"/users/{quote(user_id, safe='')}/reset-password"
|
|
||||||
)
|
|
||||||
payload = {"type": "password", "value": password, "temporary": bool(temporary)}
|
|
||||||
with httpx.Client(timeout=settings.HTTP_CHECK_TIMEOUT_SEC) as client:
|
|
||||||
resp = client.put(url, headers={**self._headers(), "Content-Type": "application/json"}, json=payload)
|
|
||||||
resp.raise_for_status()
|
|
||||||
|
|
||||||
def set_user_attribute(self, username: str, key: str, value: str) -> None:
|
|
||||||
user = self.find_user(username)
|
|
||||||
if not user:
|
|
||||||
raise RuntimeError("user not found")
|
|
||||||
user_id = user.get("id") or ""
|
|
||||||
if not user_id:
|
|
||||||
raise RuntimeError("user id missing")
|
|
||||||
|
|
||||||
full = self.get_user(user_id)
|
|
||||||
payload = self._safe_update_payload(full)
|
|
||||||
attrs = payload.get("attributes")
|
|
||||||
if not isinstance(attrs, dict):
|
|
||||||
attrs = {}
|
|
||||||
attrs[key] = [value]
|
|
||||||
payload["attributes"] = attrs
|
|
||||||
# Keep profile fields intact so required actions don't re-trigger unexpectedly.
|
|
||||||
self.update_user(user_id, payload)
|
|
||||||
|
|
||||||
def get_group_id(self, group_name: str) -> str | None:
|
|
||||||
cached = self._group_id_cache.get(group_name)
|
|
||||||
if cached:
|
|
||||||
return cached
|
|
||||||
|
|
||||||
url = f"{settings.KEYCLOAK_ADMIN_URL}/admin/realms/{settings.KEYCLOAK_REALM}/groups"
|
|
||||||
params = {"search": group_name}
|
|
||||||
with httpx.Client(timeout=settings.HTTP_CHECK_TIMEOUT_SEC) as client:
|
|
||||||
resp = client.get(url, params=params, headers=self._headers())
|
|
||||||
resp.raise_for_status()
|
|
||||||
items = resp.json()
|
|
||||||
if not isinstance(items, list):
|
|
||||||
return None
|
|
||||||
for item in items:
|
|
||||||
if not isinstance(item, dict):
|
|
||||||
continue
|
|
||||||
if item.get("name") == group_name and item.get("id"):
|
|
||||||
gid = str(item["id"])
|
|
||||||
self._group_id_cache[group_name] = gid
|
|
||||||
return gid
|
|
||||||
return None
|
|
||||||
|
|
||||||
def list_group_names(self) -> list[str]:
|
|
||||||
url = f"{settings.KEYCLOAK_ADMIN_URL}/admin/realms/{settings.KEYCLOAK_REALM}/groups"
|
|
||||||
with httpx.Client(timeout=settings.HTTP_CHECK_TIMEOUT_SEC) as client:
|
|
||||||
resp = client.get(url, headers=self._headers())
|
|
||||||
resp.raise_for_status()
|
|
||||||
items = resp.json()
|
|
||||||
if not isinstance(items, list):
|
|
||||||
return []
|
|
||||||
|
|
||||||
names: set[str] = set()
|
|
||||||
|
|
||||||
def walk(groups: list[Any]) -> None:
|
|
||||||
for group in groups:
|
|
||||||
if not isinstance(group, dict):
|
|
||||||
continue
|
|
||||||
name = group.get("name")
|
|
||||||
if isinstance(name, str) and name:
|
|
||||||
names.add(name)
|
|
||||||
sub = group.get("subGroups")
|
|
||||||
if isinstance(sub, list) and sub:
|
|
||||||
walk(sub)
|
|
||||||
|
|
||||||
walk(items)
|
|
||||||
return sorted(names)
|
|
||||||
|
|
||||||
def list_user_groups(self, user_id: str) -> list[str]:
|
|
||||||
url = (
|
|
||||||
f"{settings.KEYCLOAK_ADMIN_URL}/admin/realms/{settings.KEYCLOAK_REALM}"
|
|
||||||
f"/users/{quote(user_id, safe='')}/groups"
|
|
||||||
)
|
|
||||||
with httpx.Client(timeout=settings.HTTP_CHECK_TIMEOUT_SEC) as client:
|
|
||||||
resp = client.get(url, headers=self._headers())
|
|
||||||
resp.raise_for_status()
|
|
||||||
items = resp.json()
|
|
||||||
if not isinstance(items, list):
|
|
||||||
return []
|
|
||||||
names: list[str] = []
|
|
||||||
for item in items:
|
|
||||||
if not isinstance(item, dict):
|
|
||||||
continue
|
|
||||||
name = item.get("name")
|
|
||||||
if isinstance(name, str) and name:
|
|
||||||
names.append(name.lstrip("/"))
|
|
||||||
return names
|
|
||||||
|
|
||||||
def add_user_to_group(self, user_id: str, group_id: str) -> None:
|
|
||||||
url = (
|
|
||||||
f"{settings.KEYCLOAK_ADMIN_URL}/admin/realms/{settings.KEYCLOAK_REALM}"
|
|
||||||
f"/users/{quote(user_id, safe='')}/groups/{quote(group_id, safe='')}"
|
|
||||||
)
|
|
||||||
with httpx.Client(timeout=settings.HTTP_CHECK_TIMEOUT_SEC) as client:
|
|
||||||
resp = client.put(url, headers=self._headers())
|
|
||||||
resp.raise_for_status()
|
|
||||||
|
|
||||||
def execute_actions_email(self, user_id: str, actions: list[str], redirect_uri: str) -> None:
|
|
||||||
url = (
|
|
||||||
f"{settings.KEYCLOAK_ADMIN_URL}/admin/realms/{settings.KEYCLOAK_REALM}"
|
|
||||||
f"/users/{quote(user_id, safe='')}/execute-actions-email"
|
|
||||||
)
|
|
||||||
params = {"client_id": settings.KEYCLOAK_CLIENT_ID, "redirect_uri": redirect_uri}
|
|
||||||
with httpx.Client(timeout=settings.HTTP_CHECK_TIMEOUT_SEC) as client:
|
|
||||||
resp = client.put(
|
|
||||||
url,
|
|
||||||
params=params,
|
|
||||||
headers={**self._headers(), "Content-Type": "application/json"},
|
|
||||||
json=actions,
|
|
||||||
)
|
|
||||||
resp.raise_for_status()
|
|
||||||
|
|
||||||
def get_user_credentials(self, user_id: str) -> list[dict[str, Any]]:
|
|
||||||
url = (
|
|
||||||
f"{settings.KEYCLOAK_ADMIN_URL}/admin/realms/{settings.KEYCLOAK_REALM}"
|
|
||||||
f"/users/{quote(user_id, safe='')}/credentials"
|
|
||||||
)
|
|
||||||
with httpx.Client(timeout=settings.HTTP_CHECK_TIMEOUT_SEC) as client:
|
|
||||||
resp = client.get(url, headers=self._headers())
|
|
||||||
resp.raise_for_status()
|
|
||||||
data = resp.json()
|
|
||||||
if not isinstance(data, list):
|
|
||||||
return []
|
|
||||||
return [item for item in data if isinstance(item, dict)]
|
|
||||||
|
|
||||||
|
|
||||||
_OIDC: KeycloakOIDC | None = None
|
|
||||||
_ADMIN: KeycloakAdminClient | None = None
|
|
||||||
|
|
||||||
|
|
||||||
def oidc_client() -> KeycloakOIDC:
|
|
||||||
global _OIDC
|
|
||||||
if _OIDC is None:
|
|
||||||
_OIDC = KeycloakOIDC()
|
|
||||||
return _OIDC
|
|
||||||
|
|
||||||
|
|
||||||
def admin_client() -> KeycloakAdminClient:
|
|
||||||
global _ADMIN
|
|
||||||
if _ADMIN is None:
|
|
||||||
_ADMIN = KeycloakAdminClient()
|
|
||||||
return _ADMIN
|
|
||||||
|
|
||||||
|
|
||||||
def _normalize_groups(groups: Any) -> list[str]:
|
|
||||||
if not isinstance(groups, list):
|
|
||||||
return []
|
|
||||||
cleaned: list[str] = []
|
|
||||||
for gname in groups:
|
|
||||||
if not isinstance(gname, str):
|
|
||||||
continue
|
|
||||||
cleaned.append(gname.lstrip("/"))
|
|
||||||
return [gname for gname in cleaned if gname]
|
|
||||||
|
|
||||||
|
|
||||||
def _extract_bearer_token() -> str | None:
|
|
||||||
header = request.headers.get("Authorization", "")
|
|
||||||
if not header:
|
|
||||||
return None
|
|
||||||
parts = header.split(None, 1)
|
|
||||||
if len(parts) != 2:
|
|
||||||
return None
|
|
||||||
scheme, token = parts[0].lower(), parts[1].strip()
|
|
||||||
if scheme != "bearer" or not token:
|
|
||||||
return None
|
|
||||||
return token
|
|
||||||
|
|
||||||
|
|
||||||
def require_auth(fn):
|
|
||||||
@wraps(fn)
|
|
||||||
def wrapper(*args, **kwargs):
|
|
||||||
token = _extract_bearer_token()
|
|
||||||
if not token:
|
|
||||||
return jsonify({"error": "missing bearer token"}), 401
|
|
||||||
try:
|
|
||||||
claims = oidc_client().verify(token)
|
|
||||||
except Exception:
|
|
||||||
return jsonify({"error": "invalid token"}), 401
|
|
||||||
|
|
||||||
g.keycloak_claims = claims
|
|
||||||
g.keycloak_username = claims.get("preferred_username") or ""
|
|
||||||
g.keycloak_email = claims.get("email") or ""
|
|
||||||
g.keycloak_groups = _normalize_groups(claims.get("groups"))
|
|
||||||
return fn(*args, **kwargs)
|
|
||||||
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
|
|
||||||
def require_portal_admin() -> tuple[bool, Any]:
|
|
||||||
if not settings.KEYCLOAK_ENABLED:
|
|
||||||
return False, (jsonify({"error": "keycloak not enabled"}), 503)
|
|
||||||
|
|
||||||
username = getattr(g, "keycloak_username", "") or ""
|
|
||||||
groups = set(getattr(g, "keycloak_groups", []) or [])
|
|
||||||
|
|
||||||
if username and username in settings.PORTAL_ADMIN_USERS:
|
|
||||||
return True, None
|
|
||||||
if settings.PORTAL_ADMIN_GROUPS and groups.intersection(settings.PORTAL_ADMIN_GROUPS):
|
|
||||||
return True, None
|
|
||||||
return False, (jsonify({"error": "forbidden"}), 403)
|
|
||||||
|
|
||||||
|
|
||||||
def require_account_access() -> tuple[bool, Any]:
|
|
||||||
if not settings.KEYCLOAK_ENABLED:
|
|
||||||
return False, (jsonify({"error": "keycloak not enabled"}), 503)
|
|
||||||
if not settings.ACCOUNT_ALLOWED_GROUPS:
|
|
||||||
return True, None
|
|
||||||
groups = set(getattr(g, "keycloak_groups", []) or [])
|
|
||||||
if not groups:
|
|
||||||
return True, None
|
|
||||||
if groups.intersection(settings.ACCOUNT_ALLOWED_GROUPS):
|
|
||||||
return True, None
|
|
||||||
return False, (jsonify({"error": "forbidden"}), 403)
|
|
||||||
@ -1,53 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import smtplib
|
|
||||||
from email.message import EmailMessage
|
|
||||||
|
|
||||||
from . import settings
|
|
||||||
|
|
||||||
|
|
||||||
class MailerError(RuntimeError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def send_text_email(*, to_addr: str, subject: str, body: str) -> None:
|
|
||||||
if not to_addr:
|
|
||||||
raise MailerError("missing recipient")
|
|
||||||
if not settings.SMTP_HOST:
|
|
||||||
raise MailerError("smtp not configured")
|
|
||||||
|
|
||||||
message = EmailMessage()
|
|
||||||
message["From"] = settings.SMTP_FROM
|
|
||||||
message["To"] = to_addr
|
|
||||||
message["Subject"] = subject
|
|
||||||
message.set_content(body)
|
|
||||||
|
|
||||||
smtp_cls = smtplib.SMTP_SSL if settings.SMTP_USE_TLS else smtplib.SMTP
|
|
||||||
try:
|
|
||||||
with smtp_cls(settings.SMTP_HOST, settings.SMTP_PORT, timeout=settings.SMTP_TIMEOUT_SEC) as client:
|
|
||||||
if settings.SMTP_STARTTLS and not settings.SMTP_USE_TLS:
|
|
||||||
client.starttls()
|
|
||||||
if settings.SMTP_USERNAME:
|
|
||||||
client.login(settings.SMTP_USERNAME, settings.SMTP_PASSWORD)
|
|
||||||
client.send_message(message)
|
|
||||||
except Exception as exc:
|
|
||||||
raise MailerError("failed to send email") from exc
|
|
||||||
|
|
||||||
|
|
||||||
def access_request_verification_body(*, request_code: str, verify_url: str) -> str:
|
|
||||||
return "\n".join(
|
|
||||||
[
|
|
||||||
"Atlas — confirm your email",
|
|
||||||
"",
|
|
||||||
"Someone requested an Atlas account using this email address.",
|
|
||||||
"",
|
|
||||||
f"Request code: {request_code}",
|
|
||||||
"",
|
|
||||||
"To confirm this request, open:",
|
|
||||||
verify_url,
|
|
||||||
"",
|
|
||||||
"If you did not request access, you can ignore this email.",
|
|
||||||
"",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
@ -1,11 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from .db import run_migrations
|
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
|
||||||
run_migrations()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@ -1,123 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import re
|
|
||||||
import time
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from . import settings
|
|
||||||
from .k8s import get_json, post_json
|
|
||||||
|
|
||||||
|
|
||||||
def _safe_name_fragment(value: str, max_len: int = 24) -> str:
|
|
||||||
cleaned = re.sub(r"[^a-z0-9-]+", "-", (value or "").lower()).strip("-")
|
|
||||||
if not cleaned:
|
|
||||||
cleaned = "user"
|
|
||||||
return cleaned[:max_len].rstrip("-") or "user"
|
|
||||||
|
|
||||||
|
|
||||||
def _job_from_cronjob(cronjob: dict[str, Any], username: str) -> dict[str, Any]:
|
|
||||||
spec = cronjob.get("spec") if isinstance(cronjob.get("spec"), dict) else {}
|
|
||||||
jt = spec.get("jobTemplate") if isinstance(spec.get("jobTemplate"), dict) else {}
|
|
||||||
job_spec = jt.get("spec") if isinstance(jt.get("spec"), dict) else {}
|
|
||||||
|
|
||||||
now = int(time.time())
|
|
||||||
safe_user = _safe_name_fragment(username)
|
|
||||||
job_name = f"nextcloud-mail-sync-{safe_user}-{now}"
|
|
||||||
|
|
||||||
job: dict[str, Any] = {
|
|
||||||
"apiVersion": "batch/v1",
|
|
||||||
"kind": "Job",
|
|
||||||
"metadata": {
|
|
||||||
"name": job_name,
|
|
||||||
"namespace": settings.NEXTCLOUD_NAMESPACE,
|
|
||||||
"labels": {
|
|
||||||
"app": "nextcloud-mail-sync",
|
|
||||||
"atlas.bstein.dev/trigger": "portal",
|
|
||||||
"atlas.bstein.dev/username": safe_user,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"spec": job_spec,
|
|
||||||
}
|
|
||||||
|
|
||||||
if isinstance(settings.NEXTCLOUD_MAIL_SYNC_JOB_TTL_SEC, int) and settings.NEXTCLOUD_MAIL_SYNC_JOB_TTL_SEC > 0:
|
|
||||||
job.setdefault("spec", {})
|
|
||||||
job["spec"]["ttlSecondsAfterFinished"] = int(settings.NEXTCLOUD_MAIL_SYNC_JOB_TTL_SEC)
|
|
||||||
|
|
||||||
tpl = job.get("spec", {}).get("template", {})
|
|
||||||
pod_spec = tpl.get("spec") if isinstance(tpl.get("spec"), dict) else {}
|
|
||||||
containers = pod_spec.get("containers") if isinstance(pod_spec.get("containers"), list) else []
|
|
||||||
if containers and isinstance(containers[0], dict):
|
|
||||||
env = containers[0].get("env")
|
|
||||||
if not isinstance(env, list):
|
|
||||||
env = []
|
|
||||||
env = [e for e in env if not (isinstance(e, dict) and e.get("name") == "ONLY_USERNAME")]
|
|
||||||
env.append({"name": "ONLY_USERNAME", "value": username})
|
|
||||||
containers[0]["env"] = env
|
|
||||||
pod_spec["containers"] = containers
|
|
||||||
tpl["spec"] = pod_spec
|
|
||||||
job["spec"]["template"] = tpl
|
|
||||||
|
|
||||||
return job
|
|
||||||
|
|
||||||
|
|
||||||
def _job_succeeded(job: dict[str, Any]) -> bool:
|
|
||||||
status = job.get("status") if isinstance(job.get("status"), dict) else {}
|
|
||||||
if int(status.get("succeeded") or 0) > 0:
|
|
||||||
return True
|
|
||||||
conditions = status.get("conditions") if isinstance(status.get("conditions"), list) else []
|
|
||||||
for cond in conditions:
|
|
||||||
if not isinstance(cond, dict):
|
|
||||||
continue
|
|
||||||
if cond.get("type") == "Complete" and cond.get("status") == "True":
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def _job_failed(job: dict[str, Any]) -> bool:
|
|
||||||
status = job.get("status") if isinstance(job.get("status"), dict) else {}
|
|
||||||
if int(status.get("failed") or 0) > 0:
|
|
||||||
return True
|
|
||||||
conditions = status.get("conditions") if isinstance(status.get("conditions"), list) else []
|
|
||||||
for cond in conditions:
|
|
||||||
if not isinstance(cond, dict):
|
|
||||||
continue
|
|
||||||
if cond.get("type") == "Failed" and cond.get("status") == "True":
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def trigger(username: str, wait: bool = True) -> dict[str, Any]:
|
|
||||||
username = (username or "").strip()
|
|
||||||
if not username:
|
|
||||||
raise RuntimeError("missing username")
|
|
||||||
|
|
||||||
cronjob = get_json(
|
|
||||||
f"/apis/batch/v1/namespaces/{settings.NEXTCLOUD_NAMESPACE}/cronjobs/{settings.NEXTCLOUD_MAIL_SYNC_CRONJOB}"
|
|
||||||
)
|
|
||||||
job_payload = _job_from_cronjob(cronjob, username)
|
|
||||||
created = post_json(f"/apis/batch/v1/namespaces/{settings.NEXTCLOUD_NAMESPACE}/jobs", job_payload)
|
|
||||||
|
|
||||||
job_name = (
|
|
||||||
created.get("metadata", {}).get("name")
|
|
||||||
if isinstance(created.get("metadata"), dict)
|
|
||||||
else job_payload.get("metadata", {}).get("name")
|
|
||||||
)
|
|
||||||
if not isinstance(job_name, str) or not job_name:
|
|
||||||
raise RuntimeError("job name missing")
|
|
||||||
|
|
||||||
if not wait:
|
|
||||||
return {"job": job_name, "status": "queued"}
|
|
||||||
|
|
||||||
deadline = time.time() + float(settings.NEXTCLOUD_MAIL_SYNC_WAIT_TIMEOUT_SEC)
|
|
||||||
last_state = "running"
|
|
||||||
while time.time() < deadline:
|
|
||||||
job = get_json(f"/apis/batch/v1/namespaces/{settings.NEXTCLOUD_NAMESPACE}/jobs/{job_name}")
|
|
||||||
if _job_succeeded(job):
|
|
||||||
return {"job": job_name, "status": "ok"}
|
|
||||||
if _job_failed(job):
|
|
||||||
return {"job": job_name, "status": "error"}
|
|
||||||
time.sleep(2)
|
|
||||||
last_state = "running"
|
|
||||||
|
|
||||||
return {"job": job_name, "status": last_state}
|
|
||||||
|
|
||||||
@ -1,536 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from datetime import datetime, timezone
|
|
||||||
import hashlib
|
|
||||||
import time
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
from . import settings
|
|
||||||
from .db import connect
|
|
||||||
from .keycloak import admin_client
|
|
||||||
from .nextcloud_mail_sync import trigger as trigger_nextcloud_mail_sync
|
|
||||||
from .utils import random_password
|
|
||||||
from .vaultwarden import invite_user
|
|
||||||
from .firefly_user_sync import trigger as trigger_firefly_user_sync
|
|
||||||
from .wger_user_sync import trigger as trigger_wger_user_sync
|
|
||||||
|
|
||||||
|
|
||||||
MAILU_EMAIL_ATTR = "mailu_email"
|
|
||||||
MAILU_APP_PASSWORD_ATTR = "mailu_app_password"
|
|
||||||
MAILU_ENABLED_ATTR = "mailu_enabled"
|
|
||||||
WGER_PASSWORD_ATTR = "wger_password"
|
|
||||||
WGER_PASSWORD_UPDATED_ATTR = "wger_password_updated_at"
|
|
||||||
FIREFLY_PASSWORD_ATTR = "firefly_password"
|
|
||||||
FIREFLY_PASSWORD_UPDATED_ATTR = "firefly_password_updated_at"
|
|
||||||
REQUIRED_PROVISION_TASKS: tuple[str, ...] = (
|
|
||||||
"keycloak_user",
|
|
||||||
"keycloak_password",
|
|
||||||
"keycloak_groups",
|
|
||||||
"mailu_app_password",
|
|
||||||
"mailu_sync",
|
|
||||||
"nextcloud_mail_sync",
|
|
||||||
"wger_account",
|
|
||||||
"firefly_account",
|
|
||||||
"vaultwarden_invite",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
|
||||||
class ProvisionResult:
|
|
||||||
ok: bool
|
|
||||||
status: str
|
|
||||||
|
|
||||||
|
|
||||||
def _advisory_lock_id(request_code: str) -> int:
|
|
||||||
digest = hashlib.sha256(request_code.encode("utf-8")).digest()
|
|
||||||
return int.from_bytes(digest[:8], "big", signed=True)
|
|
||||||
|
|
||||||
|
|
||||||
def _upsert_task(conn, request_code: str, task: str, status: str, detail: str | None = None) -> None:
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
INSERT INTO access_request_tasks (request_code, task, status, detail, updated_at)
|
|
||||||
VALUES (%s, %s, %s, %s, NOW())
|
|
||||||
ON CONFLICT (request_code, task)
|
|
||||||
DO UPDATE SET status = EXCLUDED.status, detail = EXCLUDED.detail, updated_at = NOW()
|
|
||||||
""",
|
|
||||||
(request_code, task, status, detail),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _ensure_task_rows(conn, request_code: str, tasks: list[str]) -> None:
|
|
||||||
if not tasks:
|
|
||||||
return
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
INSERT INTO access_request_tasks (request_code, task, status, detail, updated_at)
|
|
||||||
SELECT %s, task, 'pending', NULL, NOW()
|
|
||||||
FROM UNNEST(%s::text[]) AS task
|
|
||||||
ON CONFLICT (request_code, task) DO NOTHING
|
|
||||||
""",
|
|
||||||
(request_code, tasks),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _safe_error_detail(exc: Exception, fallback: str) -> str:
|
|
||||||
if isinstance(exc, RuntimeError):
|
|
||||||
msg = str(exc).strip()
|
|
||||||
if msg:
|
|
||||||
return msg
|
|
||||||
if isinstance(exc, httpx.HTTPStatusError):
|
|
||||||
detail = f"http {exc.response.status_code}"
|
|
||||||
try:
|
|
||||||
payload = exc.response.json()
|
|
||||||
msg: str | None = None
|
|
||||||
if isinstance(payload, dict):
|
|
||||||
raw = payload.get("errorMessage") or payload.get("error") or payload.get("message")
|
|
||||||
if isinstance(raw, str) and raw.strip():
|
|
||||||
msg = raw.strip()
|
|
||||||
elif isinstance(payload, str) and payload.strip():
|
|
||||||
msg = payload.strip()
|
|
||||||
if msg:
|
|
||||||
msg = " ".join(msg.split())
|
|
||||||
detail = f"{detail}: {msg[:200]}"
|
|
||||||
except Exception:
|
|
||||||
text = (exc.response.text or "").strip()
|
|
||||||
if text:
|
|
||||||
text = " ".join(text.split())
|
|
||||||
detail = f"{detail}: {text[:200]}"
|
|
||||||
return detail
|
|
||||||
if isinstance(exc, httpx.TimeoutException):
|
|
||||||
return "timeout"
|
|
||||||
return fallback
|
|
||||||
|
|
||||||
|
|
||||||
def _task_statuses(conn, request_code: str) -> dict[str, str]:
|
|
||||||
rows = conn.execute(
|
|
||||||
"SELECT task, status FROM access_request_tasks WHERE request_code = %s",
|
|
||||||
(request_code,),
|
|
||||||
).fetchall()
|
|
||||||
output: dict[str, str] = {}
|
|
||||||
for row in rows:
|
|
||||||
task = row.get("task") if isinstance(row, dict) else None
|
|
||||||
status = row.get("status") if isinstance(row, dict) else None
|
|
||||||
if isinstance(task, str) and isinstance(status, str):
|
|
||||||
output[task] = status
|
|
||||||
return output
|
|
||||||
|
|
||||||
|
|
||||||
def _all_tasks_ok(conn, request_code: str, tasks: list[str]) -> bool:
|
|
||||||
statuses = _task_statuses(conn, request_code)
|
|
||||||
for task in tasks:
|
|
||||||
if statuses.get(task) != "ok":
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def provision_tasks_complete(conn, request_code: str) -> bool:
|
|
||||||
return _all_tasks_ok(conn, request_code, list(REQUIRED_PROVISION_TASKS))
|
|
||||||
|
|
||||||
|
|
||||||
def provision_access_request(request_code: str) -> ProvisionResult:
|
|
||||||
if not request_code:
|
|
||||||
return ProvisionResult(ok=False, status="unknown")
|
|
||||||
if not admin_client().ready():
|
|
||||||
return ProvisionResult(ok=False, status="accounts_building")
|
|
||||||
|
|
||||||
required_tasks = list(REQUIRED_PROVISION_TASKS)
|
|
||||||
|
|
||||||
with connect() as conn:
|
|
||||||
lock_id = _advisory_lock_id(request_code)
|
|
||||||
lock_row = conn.execute(
|
|
||||||
"SELECT pg_try_advisory_lock(%s) AS locked",
|
|
||||||
(lock_id,),
|
|
||||||
).fetchone()
|
|
||||||
if not lock_row or not lock_row.get("locked"):
|
|
||||||
return ProvisionResult(ok=False, status="accounts_building")
|
|
||||||
|
|
||||||
try:
|
|
||||||
row = conn.execute(
|
|
||||||
"""
|
|
||||||
SELECT username,
|
|
||||||
contact_email,
|
|
||||||
email_verified_at,
|
|
||||||
status,
|
|
||||||
initial_password,
|
|
||||||
initial_password_revealed_at,
|
|
||||||
provision_attempted_at
|
|
||||||
FROM access_requests
|
|
||||||
WHERE request_code = %s
|
|
||||||
""",
|
|
||||||
(request_code,),
|
|
||||||
).fetchone()
|
|
||||||
if not row:
|
|
||||||
return ProvisionResult(ok=False, status="unknown")
|
|
||||||
|
|
||||||
username = str(row.get("username") or "")
|
|
||||||
contact_email = str(row.get("contact_email") or "")
|
|
||||||
email_verified_at = row.get("email_verified_at")
|
|
||||||
status = str(row.get("status") or "")
|
|
||||||
initial_password = row.get("initial_password")
|
|
||||||
revealed_at = row.get("initial_password_revealed_at")
|
|
||||||
attempted_at = row.get("provision_attempted_at")
|
|
||||||
|
|
||||||
if status == "approved":
|
|
||||||
conn.execute(
|
|
||||||
"UPDATE access_requests SET status = 'accounts_building' WHERE request_code = %s AND status = 'approved'",
|
|
||||||
(request_code,),
|
|
||||||
)
|
|
||||||
status = "accounts_building"
|
|
||||||
|
|
||||||
if status not in {"accounts_building", "awaiting_onboarding", "ready"}:
|
|
||||||
return ProvisionResult(ok=False, status=status or "unknown")
|
|
||||||
|
|
||||||
_ensure_task_rows(conn, request_code, required_tasks)
|
|
||||||
|
|
||||||
if status == "accounts_building":
|
|
||||||
now = datetime.now(timezone.utc)
|
|
||||||
if isinstance(attempted_at, datetime):
|
|
||||||
if attempted_at.tzinfo is None:
|
|
||||||
attempted_at = attempted_at.replace(tzinfo=timezone.utc)
|
|
||||||
age_sec = (now - attempted_at).total_seconds()
|
|
||||||
if age_sec < settings.ACCESS_REQUEST_PROVISION_RETRY_COOLDOWN_SEC:
|
|
||||||
return ProvisionResult(ok=False, status="accounts_building")
|
|
||||||
conn.execute(
|
|
||||||
"UPDATE access_requests SET provision_attempted_at = NOW() WHERE request_code = %s",
|
|
||||||
(request_code,),
|
|
||||||
)
|
|
||||||
|
|
||||||
user_id = ""
|
|
||||||
mailu_email = f"{username}@{settings.MAILU_DOMAIN}"
|
|
||||||
|
|
||||||
# Task: ensure Keycloak user exists
|
|
||||||
try:
|
|
||||||
user = admin_client().find_user(username)
|
|
||||||
if not user:
|
|
||||||
email = contact_email.strip()
|
|
||||||
if not email:
|
|
||||||
raise RuntimeError("missing verified email address")
|
|
||||||
existing_email_user = admin_client().find_user_by_email(email)
|
|
||||||
if existing_email_user and (existing_email_user.get("username") or "") != username:
|
|
||||||
raise RuntimeError("email is already associated with an existing Atlas account")
|
|
||||||
# The portal already verified the external contact email before approval,
|
|
||||||
# so mark it as verified in Keycloak.
|
|
||||||
#
|
|
||||||
# Do not force password rotation on first login: the onboarding flow
|
|
||||||
# intentionally guides users through Vaultwarden first, then triggers a
|
|
||||||
# Keycloak password change step later.
|
|
||||||
#
|
|
||||||
# Do not force MFA enrollment during initial login: users can opt into MFA
|
|
||||||
# later.
|
|
||||||
required_actions: list[str] = []
|
|
||||||
payload = {
|
|
||||||
"username": username,
|
|
||||||
"enabled": True,
|
|
||||||
"email": email,
|
|
||||||
"emailVerified": True,
|
|
||||||
"requiredActions": required_actions,
|
|
||||||
"attributes": {
|
|
||||||
MAILU_EMAIL_ATTR: [mailu_email],
|
|
||||||
MAILU_ENABLED_ATTR: ["true"],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
created_id = admin_client().create_user(payload)
|
|
||||||
user = admin_client().get_user(created_id)
|
|
||||||
user_id = str((user or {}).get("id") or "")
|
|
||||||
if not user_id:
|
|
||||||
raise RuntimeError("user id missing")
|
|
||||||
|
|
||||||
try:
|
|
||||||
full = admin_client().get_user(user_id)
|
|
||||||
attrs = full.get("attributes") or {}
|
|
||||||
actions = full.get("requiredActions")
|
|
||||||
if isinstance(actions, list) and "CONFIGURE_TOTP" in actions:
|
|
||||||
# Backfill earlier accounts created when we forced MFA enrollment.
|
|
||||||
new_actions = [a for a in actions if a != "CONFIGURE_TOTP"]
|
|
||||||
admin_client().update_user_safe(user_id, {"requiredActions": new_actions})
|
|
||||||
mailu_from_attr: str | None = None
|
|
||||||
if isinstance(attrs, dict):
|
|
||||||
raw_mailu = attrs.get(MAILU_EMAIL_ATTR)
|
|
||||||
if isinstance(raw_mailu, list):
|
|
||||||
for item in raw_mailu:
|
|
||||||
if isinstance(item, str) and item.strip():
|
|
||||||
mailu_from_attr = item.strip()
|
|
||||||
break
|
|
||||||
elif isinstance(raw_mailu, str) and raw_mailu.strip():
|
|
||||||
mailu_from_attr = raw_mailu.strip()
|
|
||||||
|
|
||||||
if mailu_from_attr:
|
|
||||||
mailu_email = mailu_from_attr
|
|
||||||
else:
|
|
||||||
mailu_email = f"{username}@{settings.MAILU_DOMAIN}"
|
|
||||||
admin_client().set_user_attribute(username, MAILU_EMAIL_ATTR, mailu_email)
|
|
||||||
try:
|
|
||||||
raw_enabled = attrs.get(MAILU_ENABLED_ATTR) if isinstance(attrs, dict) else None
|
|
||||||
enabled_value = ""
|
|
||||||
if isinstance(raw_enabled, list) and raw_enabled:
|
|
||||||
enabled_value = str(raw_enabled[0]).strip()
|
|
||||||
elif isinstance(raw_enabled, str):
|
|
||||||
enabled_value = raw_enabled.strip()
|
|
||||||
if enabled_value.lower() not in {"1", "true", "yes", "y", "on"}:
|
|
||||||
admin_client().set_user_attribute(username, MAILU_ENABLED_ATTR, "true")
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
except Exception:
|
|
||||||
mailu_email = f"{username}@{settings.MAILU_DOMAIN}"
|
|
||||||
|
|
||||||
_upsert_task(conn, request_code, "keycloak_user", "ok", None)
|
|
||||||
except Exception as exc:
|
|
||||||
_upsert_task(conn, request_code, "keycloak_user", "error", _safe_error_detail(exc, "failed to ensure user"))
|
|
||||||
|
|
||||||
if not user_id:
|
|
||||||
return ProvisionResult(ok=False, status="accounts_building")
|
|
||||||
|
|
||||||
# Task: set initial password and store it for "show once" onboarding.
|
|
||||||
try:
|
|
||||||
if not user_id:
|
|
||||||
raise RuntimeError("missing user id")
|
|
||||||
|
|
||||||
should_reset = status == "accounts_building" and revealed_at is None
|
|
||||||
password_value: str | None = None
|
|
||||||
|
|
||||||
if should_reset:
|
|
||||||
if isinstance(initial_password, str) and initial_password:
|
|
||||||
password_value = initial_password
|
|
||||||
elif initial_password is None:
|
|
||||||
password_value = random_password(20)
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
UPDATE access_requests
|
|
||||||
SET initial_password = %s
|
|
||||||
WHERE request_code = %s AND initial_password IS NULL
|
|
||||||
""",
|
|
||||||
(password_value, request_code),
|
|
||||||
)
|
|
||||||
initial_password = password_value
|
|
||||||
|
|
||||||
if password_value:
|
|
||||||
admin_client().reset_password(user_id, password_value, temporary=False)
|
|
||||||
|
|
||||||
if isinstance(initial_password, str) and initial_password:
|
|
||||||
_upsert_task(conn, request_code, "keycloak_password", "ok", None)
|
|
||||||
elif revealed_at is not None:
|
|
||||||
_upsert_task(conn, request_code, "keycloak_password", "ok", "initial password already revealed")
|
|
||||||
else:
|
|
||||||
raise RuntimeError("initial password missing")
|
|
||||||
except Exception as exc:
|
|
||||||
_upsert_task(conn, request_code, "keycloak_password", "error", _safe_error_detail(exc, "failed to set password"))
|
|
||||||
|
|
||||||
# Task: group membership (default dev)
|
|
||||||
try:
|
|
||||||
if not user_id:
|
|
||||||
raise RuntimeError("missing user id")
|
|
||||||
groups = settings.DEFAULT_USER_GROUPS or ["dev"]
|
|
||||||
for group_name in groups:
|
|
||||||
gid = admin_client().get_group_id(group_name)
|
|
||||||
if not gid:
|
|
||||||
raise RuntimeError("group missing")
|
|
||||||
admin_client().add_user_to_group(user_id, gid)
|
|
||||||
_upsert_task(conn, request_code, "keycloak_groups", "ok", None)
|
|
||||||
except Exception as exc:
|
|
||||||
_upsert_task(conn, request_code, "keycloak_groups", "error", _safe_error_detail(exc, "failed to add groups"))
|
|
||||||
|
|
||||||
# Task: ensure mailu_app_password attribute exists
|
|
||||||
try:
|
|
||||||
if not user_id:
|
|
||||||
raise RuntimeError("missing user id")
|
|
||||||
full = admin_client().get_user(user_id)
|
|
||||||
attrs = full.get("attributes") or {}
|
|
||||||
existing = None
|
|
||||||
if isinstance(attrs, dict):
|
|
||||||
raw = attrs.get(MAILU_APP_PASSWORD_ATTR)
|
|
||||||
if isinstance(raw, list) and raw and isinstance(raw[0], str):
|
|
||||||
existing = raw[0]
|
|
||||||
elif isinstance(raw, str) and raw:
|
|
||||||
existing = raw
|
|
||||||
if not existing:
|
|
||||||
admin_client().set_user_attribute(username, MAILU_APP_PASSWORD_ATTR, random_password())
|
|
||||||
_upsert_task(conn, request_code, "mailu_app_password", "ok", None)
|
|
||||||
except Exception as exc:
|
|
||||||
_upsert_task(conn, request_code, "mailu_app_password", "error", _safe_error_detail(exc, "failed to set mail password"))
|
|
||||||
|
|
||||||
# Task: trigger Mailu sync if configured
|
|
||||||
try:
|
|
||||||
if not settings.MAILU_SYNC_URL:
|
|
||||||
_upsert_task(conn, request_code, "mailu_sync", "ok", "sync disabled")
|
|
||||||
else:
|
|
||||||
with httpx.Client(timeout=30) as client:
|
|
||||||
resp = client.post(
|
|
||||||
settings.MAILU_SYNC_URL,
|
|
||||||
json={"ts": int(time.time()), "wait": True, "reason": "portal_access_approve"},
|
|
||||||
)
|
|
||||||
if resp.status_code != 200:
|
|
||||||
raise RuntimeError("mailu sync failed")
|
|
||||||
_upsert_task(conn, request_code, "mailu_sync", "ok", None)
|
|
||||||
except Exception as exc:
|
|
||||||
_upsert_task(conn, request_code, "mailu_sync", "error", _safe_error_detail(exc, "failed to sync mailu"))
|
|
||||||
|
|
||||||
# Task: trigger Nextcloud mail sync if configured
|
|
||||||
try:
|
|
||||||
if not settings.NEXTCLOUD_NAMESPACE or not settings.NEXTCLOUD_MAIL_SYNC_CRONJOB:
|
|
||||||
_upsert_task(conn, request_code, "nextcloud_mail_sync", "ok", "sync disabled")
|
|
||||||
else:
|
|
||||||
result = trigger_nextcloud_mail_sync(username, wait=True)
|
|
||||||
if isinstance(result, dict) and result.get("status") == "ok":
|
|
||||||
_upsert_task(conn, request_code, "nextcloud_mail_sync", "ok", None)
|
|
||||||
else:
|
|
||||||
status_val = result.get("status") if isinstance(result, dict) else "error"
|
|
||||||
_upsert_task(conn, request_code, "nextcloud_mail_sync", "error", str(status_val))
|
|
||||||
except Exception as exc:
|
|
||||||
_upsert_task(conn, request_code, "nextcloud_mail_sync", "error", _safe_error_detail(exc, "failed to sync nextcloud"))
|
|
||||||
|
|
||||||
# Task: ensure wger account exists
|
|
||||||
try:
|
|
||||||
if not user_id:
|
|
||||||
raise RuntimeError("missing user id")
|
|
||||||
|
|
||||||
full = admin_client().get_user(user_id)
|
|
||||||
attrs = full.get("attributes") or {}
|
|
||||||
wger_password = ""
|
|
||||||
wger_password_updated_at = ""
|
|
||||||
if isinstance(attrs, dict):
|
|
||||||
raw_pw = attrs.get(WGER_PASSWORD_ATTR)
|
|
||||||
if isinstance(raw_pw, list) and raw_pw and isinstance(raw_pw[0], str):
|
|
||||||
wger_password = raw_pw[0]
|
|
||||||
elif isinstance(raw_pw, str) and raw_pw:
|
|
||||||
wger_password = raw_pw
|
|
||||||
raw_updated = attrs.get(WGER_PASSWORD_UPDATED_ATTR)
|
|
||||||
if isinstance(raw_updated, list) and raw_updated and isinstance(raw_updated[0], str):
|
|
||||||
wger_password_updated_at = raw_updated[0]
|
|
||||||
elif isinstance(raw_updated, str) and raw_updated:
|
|
||||||
wger_password_updated_at = raw_updated
|
|
||||||
|
|
||||||
if not wger_password:
|
|
||||||
wger_password = random_password(20)
|
|
||||||
admin_client().set_user_attribute(username, WGER_PASSWORD_ATTR, wger_password)
|
|
||||||
|
|
||||||
wger_email = mailu_email or f"{username}@{settings.MAILU_DOMAIN}"
|
|
||||||
|
|
||||||
if not wger_password_updated_at:
|
|
||||||
result = trigger_wger_user_sync(username, wger_email, wger_password, wait=True)
|
|
||||||
status_val = result.get("status") if isinstance(result, dict) else "error"
|
|
||||||
if status_val != "ok":
|
|
||||||
raise RuntimeError(f"wger sync {status_val}")
|
|
||||||
|
|
||||||
now_iso = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
||||||
admin_client().set_user_attribute(username, WGER_PASSWORD_UPDATED_ATTR, now_iso)
|
|
||||||
|
|
||||||
_upsert_task(conn, request_code, "wger_account", "ok", None)
|
|
||||||
except Exception as exc:
|
|
||||||
_upsert_task(conn, request_code, "wger_account", "error", _safe_error_detail(exc, "failed to provision wger"))
|
|
||||||
|
|
||||||
# Task: ensure firefly account exists
|
|
||||||
try:
|
|
||||||
if not user_id:
|
|
||||||
raise RuntimeError("missing user id")
|
|
||||||
|
|
||||||
full = admin_client().get_user(user_id)
|
|
||||||
attrs = full.get("attributes") or {}
|
|
||||||
firefly_password = ""
|
|
||||||
firefly_password_updated_at = ""
|
|
||||||
if isinstance(attrs, dict):
|
|
||||||
raw_pw = attrs.get(FIREFLY_PASSWORD_ATTR)
|
|
||||||
if isinstance(raw_pw, list) and raw_pw and isinstance(raw_pw[0], str):
|
|
||||||
firefly_password = raw_pw[0]
|
|
||||||
elif isinstance(raw_pw, str) and raw_pw:
|
|
||||||
firefly_password = raw_pw
|
|
||||||
raw_updated = attrs.get(FIREFLY_PASSWORD_UPDATED_ATTR)
|
|
||||||
if isinstance(raw_updated, list) and raw_updated and isinstance(raw_updated[0], str):
|
|
||||||
firefly_password_updated_at = raw_updated[0]
|
|
||||||
elif isinstance(raw_updated, str) and raw_updated:
|
|
||||||
firefly_password_updated_at = raw_updated
|
|
||||||
|
|
||||||
if not firefly_password:
|
|
||||||
firefly_password = random_password(24)
|
|
||||||
admin_client().set_user_attribute(username, FIREFLY_PASSWORD_ATTR, firefly_password)
|
|
||||||
|
|
||||||
firefly_email = mailu_email or f"{username}@{settings.MAILU_DOMAIN}"
|
|
||||||
|
|
||||||
if not firefly_password_updated_at:
|
|
||||||
result = trigger_firefly_user_sync(username, firefly_email, firefly_password, wait=True)
|
|
||||||
status_val = result.get("status") if isinstance(result, dict) else "error"
|
|
||||||
if status_val != "ok":
|
|
||||||
raise RuntimeError(f"firefly sync {status_val}")
|
|
||||||
|
|
||||||
now_iso = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
||||||
admin_client().set_user_attribute(username, FIREFLY_PASSWORD_UPDATED_ATTR, now_iso)
|
|
||||||
|
|
||||||
_upsert_task(conn, request_code, "firefly_account", "ok", None)
|
|
||||||
except Exception as exc:
|
|
||||||
_upsert_task(
|
|
||||||
conn,
|
|
||||||
request_code,
|
|
||||||
"firefly_account",
|
|
||||||
"error",
|
|
||||||
_safe_error_detail(exc, "failed to provision firefly"),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Task: ensure Vaultwarden account exists (invite flow)
|
|
||||||
try:
|
|
||||||
if not user_id:
|
|
||||||
raise RuntimeError("missing user id")
|
|
||||||
vaultwarden_email = mailu_email or f"{username}@{settings.MAILU_DOMAIN}"
|
|
||||||
try:
|
|
||||||
full = admin_client().get_user(user_id)
|
|
||||||
attrs = full.get("attributes") or {}
|
|
||||||
override = None
|
|
||||||
if isinstance(attrs, dict):
|
|
||||||
raw = attrs.get("vaultwarden_email")
|
|
||||||
if isinstance(raw, list):
|
|
||||||
for item in raw:
|
|
||||||
if isinstance(item, str) and item.strip():
|
|
||||||
override = item.strip()
|
|
||||||
break
|
|
||||||
elif isinstance(raw, str) and raw.strip():
|
|
||||||
override = raw.strip()
|
|
||||||
if override:
|
|
||||||
vaultwarden_email = override
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
result = invite_user(vaultwarden_email)
|
|
||||||
if not result.ok and result.status == "error":
|
|
||||||
fallback_email = contact_email.strip()
|
|
||||||
if fallback_email and fallback_email != vaultwarden_email:
|
|
||||||
fallback_result = invite_user(fallback_email)
|
|
||||||
if fallback_result.ok:
|
|
||||||
vaultwarden_email = fallback_email
|
|
||||||
result = fallback_result
|
|
||||||
if result.ok:
|
|
||||||
_upsert_task(conn, request_code, "vaultwarden_invite", "ok", result.status)
|
|
||||||
else:
|
|
||||||
_upsert_task(conn, request_code, "vaultwarden_invite", "error", result.detail or result.status)
|
|
||||||
|
|
||||||
# Persist Vaultwarden association/status on the Keycloak user so the portal can display it quickly.
|
|
||||||
try:
|
|
||||||
now_iso = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
||||||
admin_client().set_user_attribute(username, "vaultwarden_email", vaultwarden_email)
|
|
||||||
admin_client().set_user_attribute(username, "vaultwarden_status", result.status)
|
|
||||||
admin_client().set_user_attribute(username, "vaultwarden_synced_at", now_iso)
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
except Exception as exc:
|
|
||||||
_upsert_task(
|
|
||||||
conn,
|
|
||||||
request_code,
|
|
||||||
"vaultwarden_invite",
|
|
||||||
"error",
|
|
||||||
_safe_error_detail(exc, "failed to provision vaultwarden"),
|
|
||||||
)
|
|
||||||
|
|
||||||
if _all_tasks_ok(conn, request_code, required_tasks):
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
UPDATE access_requests
|
|
||||||
SET status = 'awaiting_onboarding'
|
|
||||||
WHERE request_code = %s AND status = 'accounts_building'
|
|
||||||
""",
|
|
||||||
(request_code,),
|
|
||||||
)
|
|
||||||
return ProvisionResult(ok=True, status="awaiting_onboarding")
|
|
||||||
|
|
||||||
return ProvisionResult(ok=False, status="accounts_building")
|
|
||||||
finally:
|
|
||||||
conn.execute("SELECT pg_advisory_unlock(%s)", (lock_id,))
|
|
||||||
@ -1,21 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import time
|
|
||||||
|
|
||||||
from . import settings
|
|
||||||
|
|
||||||
_RATE_BUCKETS: dict[str, dict[str, list[float]]] = {}
|
|
||||||
|
|
||||||
|
|
||||||
def rate_limit_allow(ip: str, *, key: str, limit: int, window_sec: int) -> bool:
|
|
||||||
if limit <= 0:
|
|
||||||
return True
|
|
||||||
now = time.time()
|
|
||||||
window_start = now - window_sec
|
|
||||||
buckets_by_ip = _RATE_BUCKETS.setdefault(key, {})
|
|
||||||
bucket = buckets_by_ip.setdefault(ip, [])
|
|
||||||
bucket[:] = [t for t in bucket if t >= window_start]
|
|
||||||
if len(bucket) >= limit:
|
|
||||||
return False
|
|
||||||
bucket.append(now)
|
|
||||||
return True
|
|
||||||
@ -1,4 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
__all__ = []
|
|
||||||
|
|
||||||
@ -1,607 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import socket
|
|
||||||
import time
|
|
||||||
from urllib.parse import quote
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
from flask import jsonify, g, request
|
|
||||||
|
|
||||||
from .. import settings
|
|
||||||
from .. import ariadne_client
|
|
||||||
from ..db import connect
|
|
||||||
from ..keycloak import admin_client, require_auth, require_account_access
|
|
||||||
from ..nextcloud_mail_sync import trigger as trigger_nextcloud_mail_sync
|
|
||||||
from ..utils import random_password
|
|
||||||
from ..firefly_user_sync import trigger as trigger_firefly_user_sync
|
|
||||||
from ..wger_user_sync import trigger as trigger_wger_user_sync
|
|
||||||
|
|
||||||
|
|
||||||
def _tcp_check(host: str, port: int, timeout_sec: float) -> bool:
|
|
||||||
if not host or port <= 0:
|
|
||||||
return False
|
|
||||||
try:
|
|
||||||
with socket.create_connection((host, port), timeout=timeout_sec):
|
|
||||||
return True
|
|
||||||
except OSError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def register(app) -> None:
|
|
||||||
@app.route("/api/account/overview", methods=["GET"])
|
|
||||||
@require_auth
|
|
||||||
def account_overview() -> Any:
|
|
||||||
ok, resp = require_account_access()
|
|
||||||
if not ok:
|
|
||||||
return resp
|
|
||||||
|
|
||||||
username = g.keycloak_username
|
|
||||||
keycloak_email = g.keycloak_email or ""
|
|
||||||
mailu_email = ""
|
|
||||||
mailu_app_password = ""
|
|
||||||
mailu_status = "ready"
|
|
||||||
nextcloud_mail_status = "unknown"
|
|
||||||
nextcloud_mail_primary_email = ""
|
|
||||||
nextcloud_mail_account_count = ""
|
|
||||||
nextcloud_mail_synced_at = ""
|
|
||||||
wger_status = "ready"
|
|
||||||
wger_password = ""
|
|
||||||
wger_password_updated_at = ""
|
|
||||||
firefly_status = "ready"
|
|
||||||
firefly_password = ""
|
|
||||||
firefly_password_updated_at = ""
|
|
||||||
vaultwarden_email = ""
|
|
||||||
vaultwarden_status = ""
|
|
||||||
vaultwarden_synced_at = ""
|
|
||||||
vaultwarden_master_set_at = ""
|
|
||||||
jellyfin_status = "ready"
|
|
||||||
jellyfin_sync_status = "unknown"
|
|
||||||
jellyfin_sync_detail = ""
|
|
||||||
jellyfin_user_is_ldap = False
|
|
||||||
onboarding_url = ""
|
|
||||||
|
|
||||||
if not admin_client().ready():
|
|
||||||
mailu_status = "server not configured"
|
|
||||||
wger_status = "server not configured"
|
|
||||||
firefly_status = "server not configured"
|
|
||||||
jellyfin_status = "server not configured"
|
|
||||||
jellyfin_sync_status = "unknown"
|
|
||||||
jellyfin_sync_detail = "keycloak admin not configured"
|
|
||||||
elif username:
|
|
||||||
try:
|
|
||||||
user = admin_client().find_user(username) or {}
|
|
||||||
if isinstance(user, dict):
|
|
||||||
jellyfin_user_is_ldap = bool(user.get("federationLink"))
|
|
||||||
if not keycloak_email:
|
|
||||||
keycloak_email = str(user.get("email") or "")
|
|
||||||
|
|
||||||
attrs = user.get("attributes") if isinstance(user, dict) else None
|
|
||||||
if isinstance(attrs, dict):
|
|
||||||
raw_mailu = attrs.get("mailu_email")
|
|
||||||
if isinstance(raw_mailu, list) and raw_mailu:
|
|
||||||
mailu_email = str(raw_mailu[0])
|
|
||||||
elif isinstance(raw_mailu, str) and raw_mailu:
|
|
||||||
mailu_email = raw_mailu
|
|
||||||
raw_pw = attrs.get("mailu_app_password")
|
|
||||||
if isinstance(raw_pw, list) and raw_pw:
|
|
||||||
mailu_app_password = str(raw_pw[0])
|
|
||||||
elif isinstance(raw_pw, str) and raw_pw:
|
|
||||||
mailu_app_password = raw_pw
|
|
||||||
raw_primary = attrs.get("nextcloud_mail_primary_email")
|
|
||||||
if isinstance(raw_primary, list) and raw_primary:
|
|
||||||
nextcloud_mail_primary_email = str(raw_primary[0])
|
|
||||||
elif isinstance(raw_primary, str) and raw_primary:
|
|
||||||
nextcloud_mail_primary_email = raw_primary
|
|
||||||
raw_count = attrs.get("nextcloud_mail_account_count")
|
|
||||||
if isinstance(raw_count, list) and raw_count:
|
|
||||||
nextcloud_mail_account_count = str(raw_count[0])
|
|
||||||
elif isinstance(raw_count, str) and raw_count:
|
|
||||||
nextcloud_mail_account_count = raw_count
|
|
||||||
raw_synced = attrs.get("nextcloud_mail_synced_at")
|
|
||||||
if isinstance(raw_synced, list) and raw_synced:
|
|
||||||
nextcloud_mail_synced_at = str(raw_synced[0])
|
|
||||||
elif isinstance(raw_synced, str) and raw_synced:
|
|
||||||
nextcloud_mail_synced_at = raw_synced
|
|
||||||
raw_wger_password = attrs.get("wger_password")
|
|
||||||
if isinstance(raw_wger_password, list) and raw_wger_password:
|
|
||||||
wger_password = str(raw_wger_password[0])
|
|
||||||
elif isinstance(raw_wger_password, str) and raw_wger_password:
|
|
||||||
wger_password = raw_wger_password
|
|
||||||
raw_wger_updated = attrs.get("wger_password_updated_at")
|
|
||||||
if isinstance(raw_wger_updated, list) and raw_wger_updated:
|
|
||||||
wger_password_updated_at = str(raw_wger_updated[0])
|
|
||||||
elif isinstance(raw_wger_updated, str) and raw_wger_updated:
|
|
||||||
wger_password_updated_at = raw_wger_updated
|
|
||||||
raw_firefly_password = attrs.get("firefly_password")
|
|
||||||
if isinstance(raw_firefly_password, list) and raw_firefly_password:
|
|
||||||
firefly_password = str(raw_firefly_password[0])
|
|
||||||
elif isinstance(raw_firefly_password, str) and raw_firefly_password:
|
|
||||||
firefly_password = raw_firefly_password
|
|
||||||
raw_firefly_updated = attrs.get("firefly_password_updated_at")
|
|
||||||
if isinstance(raw_firefly_updated, list) and raw_firefly_updated:
|
|
||||||
firefly_password_updated_at = str(raw_firefly_updated[0])
|
|
||||||
elif isinstance(raw_firefly_updated, str) and raw_firefly_updated:
|
|
||||||
firefly_password_updated_at = raw_firefly_updated
|
|
||||||
raw_vw_email = attrs.get("vaultwarden_email")
|
|
||||||
if isinstance(raw_vw_email, list) and raw_vw_email:
|
|
||||||
vaultwarden_email = str(raw_vw_email[0])
|
|
||||||
elif isinstance(raw_vw_email, str) and raw_vw_email:
|
|
||||||
vaultwarden_email = raw_vw_email
|
|
||||||
raw_vw_status = attrs.get("vaultwarden_status")
|
|
||||||
if isinstance(raw_vw_status, list) and raw_vw_status:
|
|
||||||
vaultwarden_status = str(raw_vw_status[0])
|
|
||||||
elif isinstance(raw_vw_status, str) and raw_vw_status:
|
|
||||||
vaultwarden_status = raw_vw_status
|
|
||||||
raw_vw_synced = attrs.get("vaultwarden_synced_at")
|
|
||||||
if isinstance(raw_vw_synced, list) and raw_vw_synced:
|
|
||||||
vaultwarden_synced_at = str(raw_vw_synced[0])
|
|
||||||
elif isinstance(raw_vw_synced, str) and raw_vw_synced:
|
|
||||||
vaultwarden_synced_at = raw_vw_synced
|
|
||||||
raw_vw_master = attrs.get("vaultwarden_master_password_set_at")
|
|
||||||
if isinstance(raw_vw_master, list) and raw_vw_master:
|
|
||||||
vaultwarden_master_set_at = str(raw_vw_master[0])
|
|
||||||
elif isinstance(raw_vw_master, str) and raw_vw_master:
|
|
||||||
vaultwarden_master_set_at = raw_vw_master
|
|
||||||
|
|
||||||
user_id = user.get("id") if isinstance(user, dict) else None
|
|
||||||
if user_id and (
|
|
||||||
not keycloak_email
|
|
||||||
or not mailu_email
|
|
||||||
or not mailu_app_password
|
|
||||||
or not wger_password
|
|
||||||
or not wger_password_updated_at
|
|
||||||
or not firefly_password
|
|
||||||
or not firefly_password_updated_at
|
|
||||||
or not vaultwarden_email
|
|
||||||
or not vaultwarden_status
|
|
||||||
or not vaultwarden_synced_at
|
|
||||||
or not vaultwarden_master_set_at
|
|
||||||
):
|
|
||||||
full = admin_client().get_user(str(user_id))
|
|
||||||
if not keycloak_email:
|
|
||||||
keycloak_email = str(full.get("email") or "")
|
|
||||||
attrs = full.get("attributes") or {}
|
|
||||||
if isinstance(attrs, dict):
|
|
||||||
if not mailu_email:
|
|
||||||
raw_mailu = attrs.get("mailu_email")
|
|
||||||
if isinstance(raw_mailu, list) and raw_mailu and isinstance(raw_mailu[0], str):
|
|
||||||
mailu_email = raw_mailu[0]
|
|
||||||
elif isinstance(raw_mailu, str) and raw_mailu:
|
|
||||||
mailu_email = raw_mailu
|
|
||||||
|
|
||||||
if not mailu_app_password:
|
|
||||||
raw_pw = attrs.get("mailu_app_password")
|
|
||||||
if isinstance(raw_pw, list) and raw_pw:
|
|
||||||
mailu_app_password = str(raw_pw[0])
|
|
||||||
elif isinstance(raw_pw, str) and raw_pw:
|
|
||||||
mailu_app_password = raw_pw
|
|
||||||
if not nextcloud_mail_primary_email:
|
|
||||||
raw_primary = attrs.get("nextcloud_mail_primary_email")
|
|
||||||
if isinstance(raw_primary, list) and raw_primary:
|
|
||||||
nextcloud_mail_primary_email = str(raw_primary[0])
|
|
||||||
elif isinstance(raw_primary, str) and raw_primary:
|
|
||||||
nextcloud_mail_primary_email = raw_primary
|
|
||||||
if not nextcloud_mail_account_count:
|
|
||||||
raw_count = attrs.get("nextcloud_mail_account_count")
|
|
||||||
if isinstance(raw_count, list) and raw_count:
|
|
||||||
nextcloud_mail_account_count = str(raw_count[0])
|
|
||||||
elif isinstance(raw_count, str) and raw_count:
|
|
||||||
nextcloud_mail_account_count = raw_count
|
|
||||||
if not nextcloud_mail_synced_at:
|
|
||||||
raw_synced = attrs.get("nextcloud_mail_synced_at")
|
|
||||||
if isinstance(raw_synced, list) and raw_synced:
|
|
||||||
nextcloud_mail_synced_at = str(raw_synced[0])
|
|
||||||
elif isinstance(raw_synced, str) and raw_synced:
|
|
||||||
nextcloud_mail_synced_at = raw_synced
|
|
||||||
if not wger_password:
|
|
||||||
raw_wger_password = attrs.get("wger_password")
|
|
||||||
if isinstance(raw_wger_password, list) and raw_wger_password:
|
|
||||||
wger_password = str(raw_wger_password[0])
|
|
||||||
elif isinstance(raw_wger_password, str) and raw_wger_password:
|
|
||||||
wger_password = raw_wger_password
|
|
||||||
if not wger_password_updated_at:
|
|
||||||
raw_wger_updated = attrs.get("wger_password_updated_at")
|
|
||||||
if isinstance(raw_wger_updated, list) and raw_wger_updated:
|
|
||||||
wger_password_updated_at = str(raw_wger_updated[0])
|
|
||||||
elif isinstance(raw_wger_updated, str) and raw_wger_updated:
|
|
||||||
wger_password_updated_at = raw_wger_updated
|
|
||||||
if not firefly_password:
|
|
||||||
raw_firefly_password = attrs.get("firefly_password")
|
|
||||||
if isinstance(raw_firefly_password, list) and raw_firefly_password:
|
|
||||||
firefly_password = str(raw_firefly_password[0])
|
|
||||||
elif isinstance(raw_firefly_password, str) and raw_firefly_password:
|
|
||||||
firefly_password = raw_firefly_password
|
|
||||||
if not firefly_password_updated_at:
|
|
||||||
raw_firefly_updated = attrs.get("firefly_password_updated_at")
|
|
||||||
if isinstance(raw_firefly_updated, list) and raw_firefly_updated:
|
|
||||||
firefly_password_updated_at = str(raw_firefly_updated[0])
|
|
||||||
elif isinstance(raw_firefly_updated, str) and raw_firefly_updated:
|
|
||||||
firefly_password_updated_at = raw_firefly_updated
|
|
||||||
if not vaultwarden_email:
|
|
||||||
raw_vw_email = attrs.get("vaultwarden_email")
|
|
||||||
if isinstance(raw_vw_email, list) and raw_vw_email:
|
|
||||||
vaultwarden_email = str(raw_vw_email[0])
|
|
||||||
elif isinstance(raw_vw_email, str) and raw_vw_email:
|
|
||||||
vaultwarden_email = raw_vw_email
|
|
||||||
if not vaultwarden_status:
|
|
||||||
raw_vw_status = attrs.get("vaultwarden_status")
|
|
||||||
if isinstance(raw_vw_status, list) and raw_vw_status:
|
|
||||||
vaultwarden_status = str(raw_vw_status[0])
|
|
||||||
elif isinstance(raw_vw_status, str) and raw_vw_status:
|
|
||||||
vaultwarden_status = raw_vw_status
|
|
||||||
if not vaultwarden_synced_at:
|
|
||||||
raw_vw_synced = attrs.get("vaultwarden_synced_at")
|
|
||||||
if isinstance(raw_vw_synced, list) and raw_vw_synced:
|
|
||||||
vaultwarden_synced_at = str(raw_vw_synced[0])
|
|
||||||
elif isinstance(raw_vw_synced, str) and raw_vw_synced:
|
|
||||||
vaultwarden_synced_at = raw_vw_synced
|
|
||||||
if not vaultwarden_master_set_at:
|
|
||||||
raw_vw_master = attrs.get("vaultwarden_master_password_set_at")
|
|
||||||
if isinstance(raw_vw_master, list) and raw_vw_master:
|
|
||||||
vaultwarden_master_set_at = str(raw_vw_master[0])
|
|
||||||
elif isinstance(raw_vw_master, str) and raw_vw_master:
|
|
||||||
vaultwarden_master_set_at = raw_vw_master
|
|
||||||
|
|
||||||
if vaultwarden_master_set_at:
|
|
||||||
vaultwarden_status = "ready"
|
|
||||||
except Exception:
|
|
||||||
mailu_status = "unavailable"
|
|
||||||
nextcloud_mail_status = "unavailable"
|
|
||||||
wger_status = "unavailable"
|
|
||||||
firefly_status = "unavailable"
|
|
||||||
vaultwarden_status = "unavailable"
|
|
||||||
jellyfin_status = "unavailable"
|
|
||||||
jellyfin_sync_status = "unknown"
|
|
||||||
jellyfin_sync_detail = "unavailable"
|
|
||||||
|
|
||||||
if (
|
|
||||||
username
|
|
||||||
and not vaultwarden_master_set_at
|
|
||||||
and vaultwarden_status in {"", "invited", "needs provisioning"}
|
|
||||||
and settings.PORTAL_DATABASE_URL
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
with connect() as conn:
|
|
||||||
row = conn.execute(
|
|
||||||
"""
|
|
||||||
SELECT request_code
|
|
||||||
FROM access_requests
|
|
||||||
WHERE username = %s AND status IN ('awaiting_onboarding', 'ready')
|
|
||||||
ORDER BY created_at DESC
|
|
||||||
LIMIT 1
|
|
||||||
""",
|
|
||||||
(username,),
|
|
||||||
).fetchone()
|
|
||||||
if not row:
|
|
||||||
row = conn.execute(
|
|
||||||
"""
|
|
||||||
SELECT request_code
|
|
||||||
FROM access_requests
|
|
||||||
WHERE username = %s
|
|
||||||
ORDER BY created_at DESC
|
|
||||||
LIMIT 1
|
|
||||||
""",
|
|
||||||
(username,),
|
|
||||||
).fetchone()
|
|
||||||
if row and isinstance(row, dict):
|
|
||||||
request_code = str(row.get("request_code") or "").strip()
|
|
||||||
if request_code:
|
|
||||||
step = conn.execute(
|
|
||||||
"""
|
|
||||||
SELECT 1
|
|
||||||
FROM access_request_onboarding_steps
|
|
||||||
WHERE request_code = %s AND step = %s
|
|
||||||
LIMIT 1
|
|
||||||
""",
|
|
||||||
(request_code, "vaultwarden_master_password"),
|
|
||||||
).fetchone()
|
|
||||||
if step:
|
|
||||||
vaultwarden_master_set_at = "confirmed"
|
|
||||||
vaultwarden_status = "ready"
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
mailu_username = mailu_email or (f"{username}@{settings.MAILU_DOMAIN}" if username else "")
|
|
||||||
firefly_username = mailu_username
|
|
||||||
vaultwarden_username = vaultwarden_email or mailu_username
|
|
||||||
|
|
||||||
if not mailu_app_password and mailu_status == "ready":
|
|
||||||
mailu_status = "needs app password"
|
|
||||||
|
|
||||||
if not wger_password and wger_status == "ready":
|
|
||||||
wger_status = "needs provisioning"
|
|
||||||
|
|
||||||
if not firefly_password and firefly_status == "ready":
|
|
||||||
firefly_status = "needs provisioning"
|
|
||||||
|
|
||||||
if nextcloud_mail_status == "unknown":
|
|
||||||
try:
|
|
||||||
count_val = int(nextcloud_mail_account_count) if nextcloud_mail_account_count else 0
|
|
||||||
except ValueError:
|
|
||||||
count_val = 0
|
|
||||||
if count_val > 0:
|
|
||||||
nextcloud_mail_status = "ready"
|
|
||||||
else:
|
|
||||||
nextcloud_mail_status = "needs sync"
|
|
||||||
|
|
||||||
if jellyfin_status == "ready":
|
|
||||||
ldap_reachable = _tcp_check(
|
|
||||||
settings.JELLYFIN_LDAP_HOST,
|
|
||||||
settings.JELLYFIN_LDAP_PORT,
|
|
||||||
settings.JELLYFIN_LDAP_CHECK_TIMEOUT_SEC,
|
|
||||||
)
|
|
||||||
if not ldap_reachable:
|
|
||||||
jellyfin_sync_status = "degraded"
|
|
||||||
jellyfin_sync_detail = "LDAP unreachable"
|
|
||||||
elif not jellyfin_user_is_ldap:
|
|
||||||
jellyfin_sync_status = "degraded"
|
|
||||||
jellyfin_sync_detail = "Keycloak user is not LDAP-backed"
|
|
||||||
else:
|
|
||||||
jellyfin_sync_status = "ok"
|
|
||||||
jellyfin_sync_detail = "LDAP-backed (Keycloak is source of truth)"
|
|
||||||
|
|
||||||
if not vaultwarden_status:
|
|
||||||
vaultwarden_status = "needs provisioning"
|
|
||||||
|
|
||||||
if settings.PORTAL_DATABASE_URL and username:
|
|
||||||
request_code = ""
|
|
||||||
try:
|
|
||||||
with connect() as conn:
|
|
||||||
row = conn.execute(
|
|
||||||
"SELECT request_code FROM access_requests WHERE username = %s ORDER BY created_at DESC LIMIT 1",
|
|
||||||
(username,),
|
|
||||||
).fetchone()
|
|
||||||
if not row and keycloak_email:
|
|
||||||
row = conn.execute(
|
|
||||||
"SELECT request_code FROM access_requests WHERE contact_email = %s ORDER BY created_at DESC LIMIT 1",
|
|
||||||
(keycloak_email,),
|
|
||||||
).fetchone()
|
|
||||||
if row and isinstance(row, dict):
|
|
||||||
request_code = str(row.get("request_code") or "").strip()
|
|
||||||
except Exception:
|
|
||||||
request_code = ""
|
|
||||||
if request_code:
|
|
||||||
onboarding_url = f"{settings.PORTAL_PUBLIC_BASE_URL}/onboarding?code={quote(request_code)}"
|
|
||||||
|
|
||||||
return jsonify(
|
|
||||||
{
|
|
||||||
"user": {"username": username, "email": keycloak_email, "groups": g.keycloak_groups},
|
|
||||||
"onboarding_url": onboarding_url,
|
|
||||||
"mailu": {"status": mailu_status, "username": mailu_username, "app_password": mailu_app_password},
|
|
||||||
"nextcloud_mail": {
|
|
||||||
"status": nextcloud_mail_status,
|
|
||||||
"primary_email": nextcloud_mail_primary_email,
|
|
||||||
"account_count": nextcloud_mail_account_count,
|
|
||||||
"synced_at": nextcloud_mail_synced_at,
|
|
||||||
},
|
|
||||||
"wger": {
|
|
||||||
"status": wger_status,
|
|
||||||
"username": username,
|
|
||||||
"password": wger_password,
|
|
||||||
"password_updated_at": wger_password_updated_at,
|
|
||||||
},
|
|
||||||
"firefly": {
|
|
||||||
"status": firefly_status,
|
|
||||||
"username": firefly_username,
|
|
||||||
"password": firefly_password,
|
|
||||||
"password_updated_at": firefly_password_updated_at,
|
|
||||||
},
|
|
||||||
"vaultwarden": {
|
|
||||||
"status": vaultwarden_status,
|
|
||||||
"username": vaultwarden_username,
|
|
||||||
"synced_at": vaultwarden_synced_at,
|
|
||||||
},
|
|
||||||
"jellyfin": {
|
|
||||||
"status": jellyfin_status,
|
|
||||||
"username": username,
|
|
||||||
"sync_status": jellyfin_sync_status,
|
|
||||||
"sync_detail": jellyfin_sync_detail,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.route("/api/account/mailu/rotate", methods=["POST"])
|
|
||||||
@require_auth
|
|
||||||
def account_mailu_rotate() -> Any:
|
|
||||||
ok, resp = require_account_access()
|
|
||||||
if not ok:
|
|
||||||
return resp
|
|
||||||
if ariadne_client.enabled():
|
|
||||||
return ariadne_client.proxy("POST", "/api/account/mailu/rotate")
|
|
||||||
if not admin_client().ready():
|
|
||||||
return jsonify({"error": "server not configured"}), 503
|
|
||||||
|
|
||||||
username = g.keycloak_username
|
|
||||||
if not username:
|
|
||||||
return jsonify({"error": "missing username"}), 400
|
|
||||||
|
|
||||||
password = random_password()
|
|
||||||
try:
|
|
||||||
admin_client().set_user_attribute(username, "mailu_app_password", password)
|
|
||||||
except Exception:
|
|
||||||
return jsonify({"error": "failed to update mail password"}), 502
|
|
||||||
|
|
||||||
sync_enabled = bool(settings.MAILU_SYNC_URL)
|
|
||||||
sync_ok = False
|
|
||||||
sync_error = ""
|
|
||||||
if sync_enabled:
|
|
||||||
try:
|
|
||||||
with httpx.Client(timeout=30) as client:
|
|
||||||
resp = client.post(
|
|
||||||
settings.MAILU_SYNC_URL,
|
|
||||||
json={"ts": int(time.time()), "wait": True, "reason": "portal_mailu_rotate"},
|
|
||||||
)
|
|
||||||
sync_ok = resp.status_code == 200
|
|
||||||
if not sync_ok:
|
|
||||||
sync_error = f"sync status {resp.status_code}"
|
|
||||||
except Exception:
|
|
||||||
sync_error = "sync request failed"
|
|
||||||
|
|
||||||
nextcloud_sync: dict[str, Any] = {"status": "skipped"}
|
|
||||||
try:
|
|
||||||
nextcloud_sync = trigger_nextcloud_mail_sync(username, wait=True)
|
|
||||||
except Exception:
|
|
||||||
nextcloud_sync = {"status": "error"}
|
|
||||||
|
|
||||||
return jsonify(
|
|
||||||
{
|
|
||||||
"password": password,
|
|
||||||
"sync_enabled": sync_enabled,
|
|
||||||
"sync_ok": sync_ok,
|
|
||||||
"sync_error": sync_error,
|
|
||||||
"nextcloud_sync": nextcloud_sync,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.route("/api/account/wger/reset", methods=["POST"])
|
|
||||||
@require_auth
|
|
||||||
def account_wger_reset() -> Any:
|
|
||||||
ok, resp = require_account_access()
|
|
||||||
if not ok:
|
|
||||||
return resp
|
|
||||||
if ariadne_client.enabled():
|
|
||||||
return ariadne_client.proxy("POST", "/api/account/wger/reset")
|
|
||||||
if not admin_client().ready():
|
|
||||||
return jsonify({"error": "server not configured"}), 503
|
|
||||||
|
|
||||||
username = g.keycloak_username
|
|
||||||
if not username:
|
|
||||||
return jsonify({"error": "missing username"}), 400
|
|
||||||
|
|
||||||
keycloak_email = g.keycloak_email or ""
|
|
||||||
mailu_email = ""
|
|
||||||
try:
|
|
||||||
user = admin_client().find_user(username) or {}
|
|
||||||
attrs = user.get("attributes") if isinstance(user, dict) else None
|
|
||||||
if isinstance(attrs, dict):
|
|
||||||
raw_mailu = attrs.get("mailu_email")
|
|
||||||
if isinstance(raw_mailu, list) and raw_mailu:
|
|
||||||
mailu_email = str(raw_mailu[0])
|
|
||||||
elif isinstance(raw_mailu, str) and raw_mailu:
|
|
||||||
mailu_email = raw_mailu
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
email = mailu_email or f"{username}@{settings.MAILU_DOMAIN}"
|
|
||||||
password = random_password()
|
|
||||||
|
|
||||||
try:
|
|
||||||
result = trigger_wger_user_sync(username, email, password, wait=True)
|
|
||||||
status_val = result.get("status") if isinstance(result, dict) else "error"
|
|
||||||
if status_val != "ok":
|
|
||||||
raise RuntimeError(f"wger sync {status_val}")
|
|
||||||
except Exception as exc:
|
|
||||||
message = str(exc).strip() or "wger sync failed"
|
|
||||||
return jsonify({"error": message}), 502
|
|
||||||
|
|
||||||
try:
|
|
||||||
admin_client().set_user_attribute(username, "wger_password", password)
|
|
||||||
admin_client().set_user_attribute(
|
|
||||||
username,
|
|
||||||
"wger_password_updated_at",
|
|
||||||
time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()),
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
return jsonify({"error": "failed to store wger password"}), 502
|
|
||||||
|
|
||||||
return jsonify({"status": "ok", "password": password})
|
|
||||||
|
|
||||||
@app.route("/api/account/wger/rotation/check", methods=["POST"])
|
|
||||||
@require_auth
|
|
||||||
def account_wger_rotation_check() -> Any:
|
|
||||||
ok, resp = require_account_access()
|
|
||||||
if not ok:
|
|
||||||
return resp
|
|
||||||
if ariadne_client.enabled():
|
|
||||||
return ariadne_client.proxy("POST", "/api/account/wger/rotation/check")
|
|
||||||
return jsonify({"error": "server not configured"}), 503
|
|
||||||
|
|
||||||
@app.route("/api/account/firefly/reset", methods=["POST"])
|
|
||||||
@require_auth
|
|
||||||
def account_firefly_reset() -> Any:
|
|
||||||
ok, resp = require_account_access()
|
|
||||||
if not ok:
|
|
||||||
return resp
|
|
||||||
if ariadne_client.enabled():
|
|
||||||
return ariadne_client.proxy("POST", "/api/account/firefly/reset")
|
|
||||||
if not admin_client().ready():
|
|
||||||
return jsonify({"error": "server not configured"}), 503
|
|
||||||
|
|
||||||
username = g.keycloak_username
|
|
||||||
if not username:
|
|
||||||
return jsonify({"error": "missing username"}), 400
|
|
||||||
|
|
||||||
keycloak_email = g.keycloak_email or ""
|
|
||||||
mailu_email = ""
|
|
||||||
try:
|
|
||||||
user = admin_client().find_user(username) or {}
|
|
||||||
attrs = user.get("attributes") if isinstance(user, dict) else None
|
|
||||||
if isinstance(attrs, dict):
|
|
||||||
raw_mailu = attrs.get("mailu_email")
|
|
||||||
if isinstance(raw_mailu, list) and raw_mailu:
|
|
||||||
mailu_email = str(raw_mailu[0])
|
|
||||||
elif isinstance(raw_mailu, str) and raw_mailu:
|
|
||||||
mailu_email = raw_mailu
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
email = mailu_email or f"{username}@{settings.MAILU_DOMAIN}"
|
|
||||||
password = random_password(24)
|
|
||||||
|
|
||||||
try:
|
|
||||||
result = trigger_firefly_user_sync(username, email, password, wait=True)
|
|
||||||
status_val = result.get("status") if isinstance(result, dict) else "error"
|
|
||||||
if status_val != "ok":
|
|
||||||
raise RuntimeError(f"firefly sync {status_val}")
|
|
||||||
except Exception as exc:
|
|
||||||
message = str(exc).strip() or "firefly sync failed"
|
|
||||||
return jsonify({"error": message}), 502
|
|
||||||
|
|
||||||
try:
|
|
||||||
admin_client().set_user_attribute(username, "firefly_password", password)
|
|
||||||
admin_client().set_user_attribute(
|
|
||||||
username,
|
|
||||||
"firefly_password_updated_at",
|
|
||||||
time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()),
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
return jsonify({"error": "failed to store firefly password"}), 502
|
|
||||||
|
|
||||||
return jsonify({"status": "ok", "password": password})
|
|
||||||
|
|
||||||
@app.route("/api/account/firefly/rotation/check", methods=["POST"])
|
|
||||||
@require_auth
|
|
||||||
def account_firefly_rotation_check() -> Any:
|
|
||||||
ok, resp = require_account_access()
|
|
||||||
if not ok:
|
|
||||||
return resp
|
|
||||||
if ariadne_client.enabled():
|
|
||||||
return ariadne_client.proxy("POST", "/api/account/firefly/rotation/check")
|
|
||||||
return jsonify({"error": "server not configured"}), 503
|
|
||||||
|
|
||||||
@app.route("/api/account/nextcloud/mail/sync", methods=["POST"])
|
|
||||||
@require_auth
|
|
||||||
def account_nextcloud_mail_sync() -> Any:
|
|
||||||
ok, resp = require_account_access()
|
|
||||||
if not ok:
|
|
||||||
return resp
|
|
||||||
if ariadne_client.enabled():
|
|
||||||
payload = request.get_json(silent=True) or {}
|
|
||||||
return ariadne_client.proxy("POST", "/api/account/nextcloud/mail/sync", payload=payload)
|
|
||||||
if not admin_client().ready():
|
|
||||||
return jsonify({"error": "server not configured"}), 503
|
|
||||||
|
|
||||||
username = g.keycloak_username
|
|
||||||
if not username:
|
|
||||||
return jsonify({"error": "missing username"}), 400
|
|
||||||
|
|
||||||
payload = request.get_json(silent=True) or {}
|
|
||||||
wait = bool(payload.get("wait", True))
|
|
||||||
|
|
||||||
try:
|
|
||||||
result = trigger_nextcloud_mail_sync(username, wait=wait)
|
|
||||||
return jsonify(result)
|
|
||||||
except Exception as exc:
|
|
||||||
message = str(exc).strip() or "failed to sync nextcloud mail"
|
|
||||||
return jsonify({"error": message}), 502
|
|
||||||
@ -1,163 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import Any
|
|
||||||
from urllib.parse import quote
|
|
||||||
|
|
||||||
from flask import jsonify, g, request
|
|
||||||
|
|
||||||
from .. import ariadne_client, settings
|
|
||||||
from ..db import connect, configured
|
|
||||||
from ..keycloak import admin_client, require_auth, require_portal_admin
|
|
||||||
from ..provisioning import provision_access_request
|
|
||||||
|
|
||||||
|
|
||||||
def register(app) -> None:
|
|
||||||
@app.route("/api/admin/access/requests", methods=["GET"])
|
|
||||||
@require_auth
|
|
||||||
def admin_list_requests() -> Any:
|
|
||||||
ok, resp = require_portal_admin()
|
|
||||||
if not ok:
|
|
||||||
return resp
|
|
||||||
if not configured():
|
|
||||||
return jsonify({"error": "server not configured"}), 503
|
|
||||||
if ariadne_client.enabled():
|
|
||||||
return ariadne_client.proxy("GET", "/api/admin/access/requests")
|
|
||||||
|
|
||||||
try:
|
|
||||||
with connect() as conn:
|
|
||||||
rows = conn.execute(
|
|
||||||
"""
|
|
||||||
SELECT request_code, username, contact_email, first_name, last_name, note, status, created_at
|
|
||||||
FROM access_requests
|
|
||||||
WHERE status = 'pending'
|
|
||||||
ORDER BY created_at ASC
|
|
||||||
LIMIT 200
|
|
||||||
"""
|
|
||||||
).fetchall()
|
|
||||||
except Exception:
|
|
||||||
return jsonify({"error": "failed to load requests"}), 502
|
|
||||||
|
|
||||||
output: list[dict[str, Any]] = []
|
|
||||||
for row in rows:
|
|
||||||
output.append(
|
|
||||||
{
|
|
||||||
"id": row["request_code"],
|
|
||||||
"username": row["username"],
|
|
||||||
"email": row.get("contact_email") or "",
|
|
||||||
"first_name": row.get("first_name") or "",
|
|
||||||
"last_name": row.get("last_name") or "",
|
|
||||||
"request_code": row["request_code"],
|
|
||||||
"created_at": (row.get("created_at").isoformat() if row.get("created_at") else ""),
|
|
||||||
"note": row.get("note") or "",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return jsonify({"requests": output})
|
|
||||||
|
|
||||||
@app.route("/api/admin/access/flags", methods=["GET"])
|
|
||||||
@require_auth
|
|
||||||
def admin_list_flags() -> Any:
|
|
||||||
ok, resp = require_portal_admin()
|
|
||||||
if not ok:
|
|
||||||
return resp
|
|
||||||
if ariadne_client.enabled():
|
|
||||||
return ariadne_client.proxy("GET", "/api/admin/access/flags")
|
|
||||||
if not admin_client().ready():
|
|
||||||
return jsonify({"error": "keycloak admin unavailable"}), 503
|
|
||||||
try:
|
|
||||||
groups = admin_client().list_group_names()
|
|
||||||
except Exception:
|
|
||||||
return jsonify({"error": "failed to list flags"}), 502
|
|
||||||
excluded = set(settings.PORTAL_ADMIN_GROUPS)
|
|
||||||
flags = sorted([name for name in groups if name not in excluded])
|
|
||||||
return jsonify({"flags": flags})
|
|
||||||
|
|
||||||
@app.route("/api/admin/access/requests/<username>/approve", methods=["POST"])
|
|
||||||
@require_auth
|
|
||||||
def admin_approve_request(username: str) -> Any:
|
|
||||||
ok, resp = require_portal_admin()
|
|
||||||
if not ok:
|
|
||||||
return resp
|
|
||||||
if not configured():
|
|
||||||
return jsonify({"error": "server not configured"}), 503
|
|
||||||
payload = request.get_json(silent=True) or {}
|
|
||||||
if ariadne_client.enabled():
|
|
||||||
return ariadne_client.proxy(
|
|
||||||
"POST",
|
|
||||||
f"/api/admin/access/requests/{quote(username, safe='')}/approve",
|
|
||||||
payload=payload,
|
|
||||||
)
|
|
||||||
|
|
||||||
decided_by = getattr(g, "keycloak_username", "") or ""
|
|
||||||
flags_raw = payload.get("flags") if isinstance(payload, dict) else None
|
|
||||||
flags = [f for f in flags_raw if isinstance(f, str)] if isinstance(flags_raw, list) else []
|
|
||||||
note = payload.get("note") if isinstance(payload, dict) else None
|
|
||||||
note = str(note).strip() if isinstance(note, str) else None
|
|
||||||
try:
|
|
||||||
with connect() as conn:
|
|
||||||
row = conn.execute(
|
|
||||||
"""
|
|
||||||
UPDATE access_requests
|
|
||||||
SET status = 'accounts_building',
|
|
||||||
decided_at = NOW(),
|
|
||||||
decided_by = %s,
|
|
||||||
approval_flags = %s,
|
|
||||||
approval_note = %s
|
|
||||||
WHERE username = %s AND status = 'pending'
|
|
||||||
AND email_verified_at IS NOT NULL
|
|
||||||
RETURNING request_code
|
|
||||||
""",
|
|
||||||
(decided_by or None, flags or None, note, username),
|
|
||||||
).fetchone()
|
|
||||||
except Exception:
|
|
||||||
return jsonify({"error": "failed to approve request"}), 502
|
|
||||||
|
|
||||||
if not row:
|
|
||||||
return jsonify({"ok": True, "request_code": ""})
|
|
||||||
|
|
||||||
# Provision the account best-effort (Keycloak user + Mailu password + sync).
|
|
||||||
try:
|
|
||||||
provision_access_request(row["request_code"])
|
|
||||||
except Exception:
|
|
||||||
# Keep the request in accounts_building; status checks will surface it.
|
|
||||||
pass
|
|
||||||
return jsonify({"ok": True, "request_code": row["request_code"]})
|
|
||||||
|
|
||||||
@app.route("/api/admin/access/requests/<username>/deny", methods=["POST"])
|
|
||||||
@require_auth
|
|
||||||
def admin_deny_request(username: str) -> Any:
|
|
||||||
ok, resp = require_portal_admin()
|
|
||||||
if not ok:
|
|
||||||
return resp
|
|
||||||
if not configured():
|
|
||||||
return jsonify({"error": "server not configured"}), 503
|
|
||||||
payload = request.get_json(silent=True) or {}
|
|
||||||
if ariadne_client.enabled():
|
|
||||||
return ariadne_client.proxy(
|
|
||||||
"POST",
|
|
||||||
f"/api/admin/access/requests/{quote(username, safe='')}/deny",
|
|
||||||
payload=payload,
|
|
||||||
)
|
|
||||||
|
|
||||||
decided_by = getattr(g, "keycloak_username", "") or ""
|
|
||||||
note = payload.get("note") if isinstance(payload, dict) else None
|
|
||||||
note = str(note).strip() if isinstance(note, str) else None
|
|
||||||
try:
|
|
||||||
with connect() as conn:
|
|
||||||
row = conn.execute(
|
|
||||||
"""
|
|
||||||
UPDATE access_requests
|
|
||||||
SET status = 'denied',
|
|
||||||
decided_at = NOW(),
|
|
||||||
decided_by = %s,
|
|
||||||
denial_note = %s
|
|
||||||
WHERE username = %s AND status = 'pending'
|
|
||||||
RETURNING request_code
|
|
||||||
""",
|
|
||||||
(decided_by or None, note, username),
|
|
||||||
).fetchone()
|
|
||||||
except Exception:
|
|
||||||
return jsonify({"error": "failed to deny request"}), 502
|
|
||||||
|
|
||||||
if not row:
|
|
||||||
return jsonify({"ok": True, "request_code": ""})
|
|
||||||
return jsonify({"ok": True, "request_code": row["request_code"]})
|
|
||||||
@ -1,198 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import json
|
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from flask import jsonify, request
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
from .. import settings
|
|
||||||
|
|
||||||
|
|
||||||
def register(app) -> None:
|
|
||||||
@app.route("/api/chat", methods=["POST"])
|
|
||||||
@app.route("/api/ai/chat", methods=["POST"])
|
|
||||||
def ai_chat() -> Any:
|
|
||||||
payload = request.get_json(silent=True) or {}
|
|
||||||
user_message = (payload.get("message") or "").strip()
|
|
||||||
history = payload.get("history") or []
|
|
||||||
profile = (payload.get("profile") or payload.get("mode") or "atlas-quick").strip().lower()
|
|
||||||
conversation_id = payload.get("conversation_id") if isinstance(payload.get("conversation_id"), str) else ""
|
|
||||||
|
|
||||||
if not user_message:
|
|
||||||
return jsonify({"error": "message required"}), 400
|
|
||||||
|
|
||||||
started = time.time()
|
|
||||||
if profile in {"stock", "stock-ai", "stock_ai"}:
|
|
||||||
reply = _stock_answer(user_message, history)
|
|
||||||
source = "stock"
|
|
||||||
else:
|
|
||||||
mode = "smart" if profile in {"atlas-smart", "smart"} else "quick"
|
|
||||||
reply = _atlasbot_answer(user_message, mode, conversation_id)
|
|
||||||
source = f"atlas-{mode}"
|
|
||||||
if reply:
|
|
||||||
elapsed_ms = int((time.time() - started) * 1000)
|
|
||||||
return jsonify({"reply": reply, "latency_ms": elapsed_ms, "source": source})
|
|
||||||
elapsed_ms = int((time.time() - started) * 1000)
|
|
||||||
return jsonify(
|
|
||||||
{
|
|
||||||
"reply": "Atlasbot is busy. Please try again in a moment.",
|
|
||||||
"latency_ms": elapsed_ms,
|
|
||||||
"source": source,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.route("/api/chat/info", methods=["GET"])
|
|
||||||
@app.route("/api/ai/info", methods=["GET"])
|
|
||||||
def ai_info() -> Any:
|
|
||||||
profile = (request.args.get("profile") or "atlas-quick").strip().lower()
|
|
||||||
meta = _discover_ai_meta(profile)
|
|
||||||
return jsonify(meta)
|
|
||||||
|
|
||||||
_start_keep_warm()
|
|
||||||
|
|
||||||
|
|
||||||
def _atlasbot_answer(message: str, mode: str, conversation_id: str) -> str:
|
|
||||||
endpoint = settings.AI_ATLASBOT_ENDPOINT
|
|
||||||
if not endpoint:
|
|
||||||
return ""
|
|
||||||
headers: dict[str, str] = {}
|
|
||||||
if settings.AI_ATLASBOT_TOKEN:
|
|
||||||
headers["X-Internal-Token"] = settings.AI_ATLASBOT_TOKEN
|
|
||||||
try:
|
|
||||||
payload = {"prompt": message, "mode": mode}
|
|
||||||
if conversation_id:
|
|
||||||
payload["conversation_id"] = conversation_id
|
|
||||||
with httpx.Client(timeout=settings.AI_ATLASBOT_TIMEOUT_SEC) as client:
|
|
||||||
resp = client.post(endpoint, json=payload, headers=headers)
|
|
||||||
if resp.status_code != 200:
|
|
||||||
return ""
|
|
||||||
data = resp.json()
|
|
||||||
answer = (data.get("answer") or "").strip()
|
|
||||||
return answer
|
|
||||||
except (httpx.RequestError, ValueError):
|
|
||||||
return ""
|
|
||||||
|
|
||||||
def _stock_answer(message: str, history: list[dict[str, Any]]) -> str:
|
|
||||||
body = {
|
|
||||||
"model": settings.AI_CHAT_MODEL,
|
|
||||||
"messages": _build_messages(message, history),
|
|
||||||
"stream": False,
|
|
||||||
}
|
|
||||||
try:
|
|
||||||
with httpx.Client(timeout=settings.AI_CHAT_TIMEOUT_SEC) as client:
|
|
||||||
resp = client.post(f"{settings.AI_CHAT_API}/api/chat", json=body)
|
|
||||||
resp.raise_for_status()
|
|
||||||
data = resp.json()
|
|
||||||
except (httpx.RequestError, ValueError):
|
|
||||||
return ""
|
|
||||||
message_data = data.get("message") if isinstance(data, dict) else None
|
|
||||||
if isinstance(message_data, dict) and message_data.get("content"):
|
|
||||||
return str(message_data["content"]).strip()
|
|
||||||
if isinstance(data, dict) and data.get("response"):
|
|
||||||
return str(data["response"]).strip()
|
|
||||||
return ""
|
|
||||||
|
|
||||||
|
|
||||||
def _build_messages(message: str, history: list[dict[str, Any]]) -> list[dict[str, str]]:
|
|
||||||
messages = [{"role": "system", "content": settings.AI_CHAT_SYSTEM_PROMPT}]
|
|
||||||
for entry in history:
|
|
||||||
role = entry.get("role")
|
|
||||||
content = entry.get("content")
|
|
||||||
if role in {"user", "assistant"} and isinstance(content, str) and content.strip():
|
|
||||||
messages.append({"role": role, "content": content})
|
|
||||||
messages.append({"role": "user", "content": message})
|
|
||||||
return messages
|
|
||||||
|
|
||||||
|
|
||||||
def _discover_ai_meta(profile: str) -> dict[str, str]:
|
|
||||||
meta = {
|
|
||||||
"node": settings.AI_NODE_NAME,
|
|
||||||
"gpu": settings.AI_GPU_DESC,
|
|
||||||
"model": settings.AI_CHAT_MODEL,
|
|
||||||
"endpoint": settings.AI_PUBLIC_ENDPOINT or "/api/chat",
|
|
||||||
"profile": profile,
|
|
||||||
}
|
|
||||||
if profile in {"atlas-smart", "smart"}:
|
|
||||||
meta["model"] = settings.AI_ATLASBOT_MODEL_SMART or settings.AI_CHAT_MODEL
|
|
||||||
meta["endpoint"] = "/api/ai/chat"
|
|
||||||
elif profile in {"atlas-quick", "quick"}:
|
|
||||||
meta["model"] = settings.AI_ATLASBOT_MODEL_FAST or settings.AI_CHAT_MODEL
|
|
||||||
meta["endpoint"] = "/api/ai/chat"
|
|
||||||
elif profile in {"stock", "stock-ai", "stock_ai"}:
|
|
||||||
meta["model"] = settings.AI_CHAT_MODEL
|
|
||||||
meta["endpoint"] = "/api/ai/chat"
|
|
||||||
|
|
||||||
sa_path = Path("/var/run/secrets/kubernetes.io/serviceaccount")
|
|
||||||
token_path = sa_path / "token"
|
|
||||||
ca_path = sa_path / "ca.crt"
|
|
||||||
ns_path = sa_path / "namespace"
|
|
||||||
if not token_path.exists() or not ca_path.exists() or not ns_path.exists():
|
|
||||||
return meta
|
|
||||||
|
|
||||||
try:
|
|
||||||
token = token_path.read_text().strip()
|
|
||||||
namespace = settings.AI_K8S_NAMESPACE
|
|
||||||
base_url = "https://kubernetes.default.svc"
|
|
||||||
pod_url = f"{base_url}/api/v1/namespaces/{namespace}/pods?labelSelector={settings.AI_K8S_LABEL}"
|
|
||||||
|
|
||||||
with httpx.Client(
|
|
||||||
verify=str(ca_path),
|
|
||||||
timeout=settings.HTTP_CHECK_TIMEOUT_SEC,
|
|
||||||
headers={"Authorization": f"Bearer {token}"},
|
|
||||||
) as client:
|
|
||||||
resp = client.get(pod_url)
|
|
||||||
resp.raise_for_status()
|
|
||||||
data = resp.json()
|
|
||||||
items = data.get("items") or []
|
|
||||||
running = [p for p in items if p.get("status", {}).get("phase") == "Running"] or items
|
|
||||||
if running:
|
|
||||||
pod = running[0]
|
|
||||||
node_name = pod.get("spec", {}).get("nodeName") or meta["node"]
|
|
||||||
meta["node"] = node_name
|
|
||||||
|
|
||||||
annotations = pod.get("metadata", {}).get("annotations") or {}
|
|
||||||
gpu_hint = (
|
|
||||||
annotations.get(settings.AI_GPU_ANNOTATION)
|
|
||||||
or annotations.get("ai.gpu/description")
|
|
||||||
or annotations.get("gpu/description")
|
|
||||||
)
|
|
||||||
if gpu_hint:
|
|
||||||
meta["gpu"] = gpu_hint
|
|
||||||
|
|
||||||
model_hint = annotations.get(settings.AI_MODEL_ANNOTATION)
|
|
||||||
if not model_hint:
|
|
||||||
containers = pod.get("spec", {}).get("containers") or []
|
|
||||||
if containers:
|
|
||||||
image = containers[0].get("image") or ""
|
|
||||||
model_hint = image.split(":")[-1] if ":" in image else image
|
|
||||||
if model_hint:
|
|
||||||
meta["model"] = model_hint
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return meta
|
|
||||||
|
|
||||||
|
|
||||||
def _start_keep_warm() -> None:
|
|
||||||
if not settings.AI_WARM_ENABLED or settings.AI_WARM_INTERVAL_SEC <= 0:
|
|
||||||
return
|
|
||||||
|
|
||||||
def loop() -> None:
|
|
||||||
while True:
|
|
||||||
time.sleep(settings.AI_WARM_INTERVAL_SEC)
|
|
||||||
try:
|
|
||||||
body = {
|
|
||||||
"model": settings.AI_CHAT_MODEL,
|
|
||||||
"messages": [{"role": "user", "content": "ping"}],
|
|
||||||
"stream": False,
|
|
||||||
}
|
|
||||||
with httpx.Client(timeout=min(settings.AI_CHAT_TIMEOUT_SEC, 15)) as client:
|
|
||||||
client.post(f"{settings.AI_CHAT_API}/api/chat", json=body)
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
|
|
||||||
threading.Thread(target=loop, daemon=True, name="ai-keep-warm").start()
|
|
||||||
@ -1,46 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import Any
|
|
||||||
from urllib.parse import quote
|
|
||||||
|
|
||||||
from flask import jsonify, request
|
|
||||||
|
|
||||||
from .. import settings
|
|
||||||
|
|
||||||
|
|
||||||
def register(app) -> None:
|
|
||||||
@app.route("/api/auth/config", methods=["GET"])
|
|
||||||
def auth_config() -> Any:
|
|
||||||
if not settings.KEYCLOAK_ENABLED:
|
|
||||||
return jsonify({"enabled": False})
|
|
||||||
|
|
||||||
issuer = settings.KEYCLOAK_ISSUER
|
|
||||||
public_origin = request.host_url.rstrip("/")
|
|
||||||
redirect_uri = quote(f"{public_origin}/", safe="")
|
|
||||||
login_url = (
|
|
||||||
f"{issuer}/protocol/openid-connect/auth"
|
|
||||||
f"?client_id={quote(settings.KEYCLOAK_CLIENT_ID, safe='')}"
|
|
||||||
f"&redirect_uri={redirect_uri}"
|
|
||||||
f"&response_type=code"
|
|
||||||
f"&scope=openid"
|
|
||||||
)
|
|
||||||
reset_url = (
|
|
||||||
f"{issuer}/login-actions/reset-credentials"
|
|
||||||
f"?client_id={quote(settings.KEYCLOAK_CLIENT_ID, safe='')}"
|
|
||||||
f"&redirect_uri={redirect_uri}"
|
|
||||||
)
|
|
||||||
account_url = f"{issuer}/account"
|
|
||||||
account_password_url = f"{account_url}/#/security/signingin"
|
|
||||||
|
|
||||||
return jsonify(
|
|
||||||
{
|
|
||||||
"enabled": True,
|
|
||||||
"url": settings.KEYCLOAK_URL,
|
|
||||||
"realm": settings.KEYCLOAK_REALM,
|
|
||||||
"client_id": settings.KEYCLOAK_CLIENT_ID,
|
|
||||||
"login_url": login_url,
|
|
||||||
"reset_url": reset_url,
|
|
||||||
"account_url": account_url,
|
|
||||||
"account_password_url": account_password_url,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
@ -1,12 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from flask import jsonify
|
|
||||||
|
|
||||||
|
|
||||||
def register(app) -> None:
|
|
||||||
@app.route("/api/healthz")
|
|
||||||
def healthz() -> Any:
|
|
||||||
return jsonify({"ok": True})
|
|
||||||
|
|
||||||
@ -1,125 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
from typing import Any
|
|
||||||
from urllib.error import URLError
|
|
||||||
from urllib.parse import urlencode
|
|
||||||
from urllib.request import urlopen
|
|
||||||
|
|
||||||
from flask import jsonify
|
|
||||||
|
|
||||||
from .. import settings
|
|
||||||
|
|
||||||
_LAB_STATUS_CACHE: dict[str, Any] = {"ts": 0.0, "value": None}
|
|
||||||
|
|
||||||
|
|
||||||
def _vm_query(expr: str) -> float | None:
|
|
||||||
url = f"{settings.VM_BASE_URL}/api/v1/query?{urlencode({'query': expr})}"
|
|
||||||
with urlopen(url, timeout=settings.VM_QUERY_TIMEOUT_SEC) as resp:
|
|
||||||
payload = json.loads(resp.read().decode("utf-8"))
|
|
||||||
|
|
||||||
if payload.get("status") != "success":
|
|
||||||
return None
|
|
||||||
|
|
||||||
result = (payload.get("data") or {}).get("result") or []
|
|
||||||
if not result:
|
|
||||||
return None
|
|
||||||
|
|
||||||
values: list[float] = []
|
|
||||||
for item in result:
|
|
||||||
try:
|
|
||||||
values.append(float(item["value"][1]))
|
|
||||||
except (KeyError, IndexError, TypeError, ValueError):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not values:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return max(values)
|
|
||||||
|
|
||||||
|
|
||||||
def _http_ok(url: str, expect_substring: str | None = None) -> bool:
|
|
||||||
try:
|
|
||||||
with urlopen(url, timeout=settings.HTTP_CHECK_TIMEOUT_SEC) as resp:
|
|
||||||
if getattr(resp, "status", 200) != 200:
|
|
||||||
return False
|
|
||||||
if expect_substring:
|
|
||||||
chunk = resp.read(4096).decode("utf-8", errors="ignore")
|
|
||||||
return expect_substring in chunk
|
|
||||||
return True
|
|
||||||
except (URLError, TimeoutError, ValueError):
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def register(app) -> None:
|
|
||||||
@app.route("/api/lab/status")
|
|
||||||
def lab_status() -> Any:
|
|
||||||
now = time.time()
|
|
||||||
cached = _LAB_STATUS_CACHE.get("value")
|
|
||||||
if cached and (now - float(_LAB_STATUS_CACHE.get("ts", 0.0)) < settings.LAB_STATUS_CACHE_SEC):
|
|
||||||
return jsonify(cached)
|
|
||||||
|
|
||||||
t_total = time.perf_counter()
|
|
||||||
timings_ms: dict[str, int] = {}
|
|
||||||
|
|
||||||
connected = False
|
|
||||||
atlas_up = False
|
|
||||||
atlas_known = False
|
|
||||||
atlas_source = "unknown"
|
|
||||||
|
|
||||||
oceanus_up = False
|
|
||||||
oceanus_known = False
|
|
||||||
oceanus_source = "unknown"
|
|
||||||
|
|
||||||
# Atlas
|
|
||||||
try:
|
|
||||||
t_probe = time.perf_counter()
|
|
||||||
atlas_grafana_ok = _http_ok(settings.GRAFANA_HEALTH_URL, expect_substring="ok")
|
|
||||||
timings_ms["grafana"] = int((time.perf_counter() - t_probe) * 1000)
|
|
||||||
if atlas_grafana_ok:
|
|
||||||
connected = True
|
|
||||||
atlas_up = True
|
|
||||||
atlas_known = True
|
|
||||||
atlas_source = "grafana"
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if not atlas_known:
|
|
||||||
try:
|
|
||||||
t_probe = time.perf_counter()
|
|
||||||
value = _vm_query("up")
|
|
||||||
timings_ms["victoria_metrics"] = int((time.perf_counter() - t_probe) * 1000)
|
|
||||||
if value is not None:
|
|
||||||
connected = True
|
|
||||||
atlas_known = True
|
|
||||||
atlas_up = value > 0
|
|
||||||
atlas_source = "victoria-metrics"
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Oceanus (node-exporter direct probe)
|
|
||||||
try:
|
|
||||||
t_probe = time.perf_counter()
|
|
||||||
if _http_ok(settings.OCEANUS_NODE_EXPORTER_URL):
|
|
||||||
timings_ms["oceanus_node_exporter"] = int((time.perf_counter() - t_probe) * 1000)
|
|
||||||
connected = True
|
|
||||||
oceanus_known = True
|
|
||||||
oceanus_up = True
|
|
||||||
oceanus_source = "node-exporter"
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
timings_ms["total"] = int((time.perf_counter() - t_total) * 1000)
|
|
||||||
|
|
||||||
payload = {
|
|
||||||
"connected": connected,
|
|
||||||
"atlas": {"up": atlas_up, "known": atlas_known, "source": atlas_source},
|
|
||||||
"oceanus": {"up": oceanus_up, "known": oceanus_known, "source": oceanus_source},
|
|
||||||
"checked_at": int(now),
|
|
||||||
"timings_ms": timings_ms,
|
|
||||||
}
|
|
||||||
|
|
||||||
_LAB_STATUS_CACHE["ts"] = now
|
|
||||||
_LAB_STATUS_CACHE["value"] = payload
|
|
||||||
return jsonify(payload)
|
|
||||||
@ -1,22 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import json
|
|
||||||
from typing import Any
|
|
||||||
from urllib.error import URLError
|
|
||||||
from urllib.request import urlopen
|
|
||||||
|
|
||||||
from flask import jsonify
|
|
||||||
|
|
||||||
from .. import settings
|
|
||||||
|
|
||||||
|
|
||||||
def register(app) -> None:
|
|
||||||
@app.route("/api/monero/get_info")
|
|
||||||
def monero_get_info() -> Any:
|
|
||||||
try:
|
|
||||||
with urlopen(settings.MONERO_GET_INFO_URL, timeout=2) as resp:
|
|
||||||
payload = json.loads(resp.read().decode("utf-8"))
|
|
||||||
return jsonify(payload)
|
|
||||||
except (URLError, TimeoutError, ValueError) as exc:
|
|
||||||
return jsonify({"error": str(exc), "url": settings.MONERO_GET_INFO_URL}), 503
|
|
||||||
|
|
||||||
@ -1,145 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
def _env_bool(name: str, default: str = "false") -> bool:
|
|
||||||
return os.getenv(name, default).lower() in ("1", "true", "yes")
|
|
||||||
|
|
||||||
|
|
||||||
MONERO_GET_INFO_URL = os.getenv("MONERO_GET_INFO_URL", "http://monerod.crypto.svc.cluster.local:18081/get_info")
|
|
||||||
VM_BASE_URL = os.getenv(
|
|
||||||
"VM_BASE_URL",
|
|
||||||
"http://victoria-metrics-single-server.monitoring.svc.cluster.local:8428",
|
|
||||||
).rstrip("/")
|
|
||||||
VM_QUERY_TIMEOUT_SEC = float(os.getenv("VM_QUERY_TIMEOUT_SEC", "2"))
|
|
||||||
HTTP_CHECK_TIMEOUT_SEC = float(os.getenv("HTTP_CHECK_TIMEOUT_SEC", "2"))
|
|
||||||
K8S_API_TIMEOUT_SEC = float(os.getenv("K8S_API_TIMEOUT_SEC", "5"))
|
|
||||||
LAB_STATUS_CACHE_SEC = float(os.getenv("LAB_STATUS_CACHE_SEC", "30"))
|
|
||||||
GRAFANA_HEALTH_URL = os.getenv("GRAFANA_HEALTH_URL", "http://grafana.monitoring.svc.cluster.local/api/health")
|
|
||||||
OCEANUS_NODE_EXPORTER_URL = os.getenv("OCEANUS_NODE_EXPORTER_URL", "http://192.168.22.24:9100/metrics")
|
|
||||||
|
|
||||||
AI_CHAT_API = os.getenv("AI_CHAT_API", "http://ollama.ai.svc.cluster.local:11434").rstrip("/")
|
|
||||||
AI_CHAT_MODEL = os.getenv("AI_CHAT_MODEL", "qwen2.5-coder:7b-instruct-q4_0")
|
|
||||||
AI_CHAT_SYSTEM_PROMPT = os.getenv(
|
|
||||||
"AI_CHAT_SYSTEM_PROMPT",
|
|
||||||
"You are the Titan Lab assistant for bstein.dev. Be concise and helpful.",
|
|
||||||
)
|
|
||||||
AI_CHAT_TIMEOUT_SEC = float(os.getenv("AI_CHAT_TIMEOUT_SEC", "20"))
|
|
||||||
AI_ATLASBOT_ENDPOINT = os.getenv("AI_ATLASBOT_ENDPOINT", "").strip()
|
|
||||||
AI_ATLASBOT_TOKEN = os.getenv("AI_ATLASBOT_TOKEN", "").strip()
|
|
||||||
AI_ATLASBOT_TIMEOUT_SEC = float(os.getenv("AI_ATLASBOT_TIMEOUT_SEC", "5"))
|
|
||||||
AI_ATLASBOT_MODEL_FAST = os.getenv("AI_ATLASBOT_MODEL_FAST", "").strip()
|
|
||||||
AI_ATLASBOT_MODEL_SMART = os.getenv("AI_ATLASBOT_MODEL_SMART", "").strip()
|
|
||||||
AI_NODE_NAME = os.getenv("AI_CHAT_NODE_NAME") or os.getenv("AI_NODE_NAME") or "ai-cluster"
|
|
||||||
AI_GPU_DESC = os.getenv("AI_CHAT_GPU_DESC") or "local GPU (dynamic)"
|
|
||||||
AI_PUBLIC_ENDPOINT = os.getenv("AI_PUBLIC_CHAT_ENDPOINT", "https://chat.ai.bstein.dev/api/chat")
|
|
||||||
AI_K8S_LABEL = os.getenv("AI_K8S_LABEL", "app=ollama")
|
|
||||||
AI_K8S_NAMESPACE = os.getenv("AI_K8S_NAMESPACE", "ai")
|
|
||||||
AI_MODEL_ANNOTATION = os.getenv("AI_MODEL_ANNOTATION", "ai.bstein.dev/model")
|
|
||||||
AI_GPU_ANNOTATION = os.getenv("AI_GPU_ANNOTATION", "ai.bstein.dev/gpu")
|
|
||||||
AI_WARM_INTERVAL_SEC = float(os.getenv("AI_WARM_INTERVAL_SEC", "300"))
|
|
||||||
AI_WARM_ENABLED = _env_bool("AI_WARM_ENABLED", "true")
|
|
||||||
|
|
||||||
KEYCLOAK_ENABLED = _env_bool("KEYCLOAK_ENABLED", "false")
|
|
||||||
KEYCLOAK_URL = os.getenv("KEYCLOAK_URL", "https://sso.bstein.dev").rstrip("/")
|
|
||||||
KEYCLOAK_REALM = os.getenv("KEYCLOAK_REALM", "atlas")
|
|
||||||
KEYCLOAK_CLIENT_ID = os.getenv("KEYCLOAK_CLIENT_ID", "bstein-dev-home")
|
|
||||||
KEYCLOAK_ISSUER = os.getenv("KEYCLOAK_ISSUER", f"{KEYCLOAK_URL}/realms/{KEYCLOAK_REALM}").rstrip("/")
|
|
||||||
KEYCLOAK_JWKS_URL = os.getenv("KEYCLOAK_JWKS_URL", f"{KEYCLOAK_ISSUER}/protocol/openid-connect/certs").rstrip("/")
|
|
||||||
|
|
||||||
KEYCLOAK_ADMIN_URL = os.getenv("KEYCLOAK_ADMIN_URL", KEYCLOAK_URL).rstrip("/")
|
|
||||||
KEYCLOAK_ADMIN_CLIENT_ID = os.getenv("KEYCLOAK_ADMIN_CLIENT_ID", "")
|
|
||||||
KEYCLOAK_ADMIN_CLIENT_SECRET = os.getenv("KEYCLOAK_ADMIN_CLIENT_SECRET", "")
|
|
||||||
KEYCLOAK_ADMIN_REALM = os.getenv("KEYCLOAK_ADMIN_REALM", KEYCLOAK_REALM)
|
|
||||||
|
|
||||||
ARIADNE_URL = os.getenv("ARIADNE_URL", "").strip()
|
|
||||||
ARIADNE_TIMEOUT_SEC = float(os.getenv("ARIADNE_TIMEOUT_SEC", "10"))
|
|
||||||
ARIADNE_RETRY_COUNT = int(os.getenv("ARIADNE_RETRY_COUNT", "2"))
|
|
||||||
ARIADNE_RETRY_BACKOFF_SEC = float(os.getenv("ARIADNE_RETRY_BACKOFF_SEC", "0.2"))
|
|
||||||
|
|
||||||
ACCOUNT_ALLOWED_GROUPS = [
|
|
||||||
g.strip()
|
|
||||||
for g in os.getenv("ACCOUNT_ALLOWED_GROUPS", "dev,admin").split(",")
|
|
||||||
if g.strip()
|
|
||||||
]
|
|
||||||
|
|
||||||
PORTAL_DATABASE_URL = os.getenv("PORTAL_DATABASE_URL", "").strip()
|
|
||||||
PORTAL_DB_POOL_MIN = int(os.getenv("PORTAL_DB_POOL_MIN", "0"))
|
|
||||||
PORTAL_DB_POOL_MAX = int(os.getenv("PORTAL_DB_POOL_MAX", "5"))
|
|
||||||
PORTAL_DB_CONNECT_TIMEOUT_SEC = int(os.getenv("PORTAL_DB_CONNECT_TIMEOUT_SEC", "5"))
|
|
||||||
PORTAL_DB_LOCK_TIMEOUT_SEC = int(os.getenv("PORTAL_DB_LOCK_TIMEOUT_SEC", "5"))
|
|
||||||
PORTAL_DB_STATEMENT_TIMEOUT_SEC = int(os.getenv("PORTAL_DB_STATEMENT_TIMEOUT_SEC", "30"))
|
|
||||||
PORTAL_DB_IDLE_IN_TX_TIMEOUT_SEC = int(os.getenv("PORTAL_DB_IDLE_IN_TX_TIMEOUT_SEC", "10"))
|
|
||||||
PORTAL_RUN_MIGRATIONS = _env_bool("PORTAL_RUN_MIGRATIONS", "false")
|
|
||||||
|
|
||||||
PORTAL_ADMIN_USERS = [u.strip() for u in os.getenv("PORTAL_ADMIN_USERS", "bstein").split(",") if u.strip()]
|
|
||||||
PORTAL_ADMIN_GROUPS = [g.strip() for g in os.getenv("PORTAL_ADMIN_GROUPS", "admin").split(",") if g.strip()]
|
|
||||||
|
|
||||||
DEFAULT_USER_GROUPS = [g.strip() for g in os.getenv("DEFAULT_USER_GROUPS", "dev").split(",") if g.strip()]
|
|
||||||
|
|
||||||
ACCESS_REQUEST_ENABLED = _env_bool("ACCESS_REQUEST_ENABLED", "true")
|
|
||||||
ACCESS_REQUEST_RATE_LIMIT = int(os.getenv("ACCESS_REQUEST_RATE_LIMIT", "5"))
|
|
||||||
ACCESS_REQUEST_RATE_WINDOW_SEC = int(os.getenv("ACCESS_REQUEST_RATE_WINDOW_SEC", str(60 * 60)))
|
|
||||||
ACCESS_REQUEST_SUBMIT_RATE_LIMIT = int(
|
|
||||||
os.getenv("ACCESS_REQUEST_SUBMIT_RATE_LIMIT", str(ACCESS_REQUEST_RATE_LIMIT))
|
|
||||||
)
|
|
||||||
ACCESS_REQUEST_SUBMIT_RATE_WINDOW_SEC = int(
|
|
||||||
os.getenv("ACCESS_REQUEST_SUBMIT_RATE_WINDOW_SEC", str(ACCESS_REQUEST_RATE_WINDOW_SEC))
|
|
||||||
)
|
|
||||||
ACCESS_REQUEST_STATUS_RATE_LIMIT = int(os.getenv("ACCESS_REQUEST_STATUS_RATE_LIMIT", "60"))
|
|
||||||
ACCESS_REQUEST_STATUS_RATE_WINDOW_SEC = int(os.getenv("ACCESS_REQUEST_STATUS_RATE_WINDOW_SEC", "60"))
|
|
||||||
ACCESS_REQUEST_EMAIL_VERIFY_TTL_SEC = int(os.getenv("ACCESS_REQUEST_EMAIL_VERIFY_TTL_SEC", str(24 * 60 * 60)))
|
|
||||||
ACCESS_REQUEST_INTERNAL_EMAIL_ALLOWLIST = {
|
|
||||||
address.strip().lower()
|
|
||||||
for address in os.getenv("ACCESS_REQUEST_INTERNAL_EMAIL_ALLOWLIST", "").split(",")
|
|
||||||
if address.strip()
|
|
||||||
}
|
|
||||||
ACCESS_REQUEST_PROVISION_RETRY_COOLDOWN_SEC = float(
|
|
||||||
os.getenv("ACCESS_REQUEST_PROVISION_RETRY_COOLDOWN_SEC", "30")
|
|
||||||
)
|
|
||||||
|
|
||||||
PORTAL_PUBLIC_BASE_URL = os.getenv("PORTAL_PUBLIC_BASE_URL", "https://bstein.dev").rstrip("/")
|
|
||||||
|
|
||||||
MAILU_DOMAIN = os.getenv("MAILU_DOMAIN", "bstein.dev")
|
|
||||||
MAILU_SYNC_URL = os.getenv(
|
|
||||||
"MAILU_SYNC_URL",
|
|
||||||
"http://mailu-sync-listener.mailu-mailserver.svc.cluster.local:8080/events",
|
|
||||||
).rstrip("/")
|
|
||||||
|
|
||||||
NEXTCLOUD_NAMESPACE = os.getenv("NEXTCLOUD_NAMESPACE", "nextcloud").strip()
|
|
||||||
NEXTCLOUD_MAIL_SYNC_CRONJOB = os.getenv("NEXTCLOUD_MAIL_SYNC_CRONJOB", "nextcloud-mail-sync").strip()
|
|
||||||
NEXTCLOUD_MAIL_SYNC_WAIT_TIMEOUT_SEC = float(os.getenv("NEXTCLOUD_MAIL_SYNC_WAIT_TIMEOUT_SEC", "90"))
|
|
||||||
NEXTCLOUD_MAIL_SYNC_JOB_TTL_SEC = int(os.getenv("NEXTCLOUD_MAIL_SYNC_JOB_TTL_SEC", "3600"))
|
|
||||||
|
|
||||||
WGER_NAMESPACE = os.getenv("WGER_NAMESPACE", "health").strip()
|
|
||||||
WGER_USER_SYNC_CRONJOB = os.getenv("WGER_USER_SYNC_CRONJOB", "wger-user-sync").strip()
|
|
||||||
WGER_USER_SYNC_WAIT_TIMEOUT_SEC = float(os.getenv("WGER_USER_SYNC_WAIT_TIMEOUT_SEC", "60"))
|
|
||||||
FIREFLY_NAMESPACE = os.getenv("FIREFLY_NAMESPACE", "finance").strip()
|
|
||||||
FIREFLY_USER_SYNC_CRONJOB = os.getenv("FIREFLY_USER_SYNC_CRONJOB", "firefly-user-sync").strip()
|
|
||||||
FIREFLY_USER_SYNC_WAIT_TIMEOUT_SEC = float(os.getenv("FIREFLY_USER_SYNC_WAIT_TIMEOUT_SEC", "90"))
|
|
||||||
|
|
||||||
SMTP_HOST = os.getenv("SMTP_HOST", "mailu-front.mailu-mailserver.svc.cluster.local").strip()
|
|
||||||
SMTP_PORT = int(os.getenv("SMTP_PORT", "25"))
|
|
||||||
SMTP_USERNAME = os.getenv("SMTP_USERNAME", "").strip()
|
|
||||||
SMTP_PASSWORD = os.getenv("SMTP_PASSWORD", "").strip()
|
|
||||||
SMTP_STARTTLS = _env_bool("SMTP_STARTTLS", "false")
|
|
||||||
SMTP_USE_TLS = _env_bool("SMTP_USE_TLS", "false")
|
|
||||||
SMTP_FROM = os.getenv("SMTP_FROM", "").strip() or f"postmaster@{MAILU_DOMAIN}"
|
|
||||||
SMTP_TIMEOUT_SEC = float(os.getenv("SMTP_TIMEOUT_SEC", "10"))
|
|
||||||
|
|
||||||
JELLYFIN_SYNC_URL = os.getenv("JELLYFIN_SYNC_URL", "").rstrip("/")
|
|
||||||
JELLYFIN_LDAP_HOST = os.getenv("JELLYFIN_LDAP_HOST", "openldap.sso.svc.cluster.local").strip()
|
|
||||||
JELLYFIN_LDAP_PORT = int(os.getenv("JELLYFIN_LDAP_PORT", "389"))
|
|
||||||
JELLYFIN_LDAP_CHECK_TIMEOUT_SEC = float(os.getenv("JELLYFIN_LDAP_CHECK_TIMEOUT_SEC", "1"))
|
|
||||||
|
|
||||||
VAULTWARDEN_NAMESPACE = os.getenv("VAULTWARDEN_NAMESPACE", "vaultwarden").strip()
|
|
||||||
VAULTWARDEN_POD_LABEL = os.getenv("VAULTWARDEN_POD_LABEL", "app=vaultwarden").strip()
|
|
||||||
VAULTWARDEN_POD_PORT = int(os.getenv("VAULTWARDEN_POD_PORT", "80"))
|
|
||||||
VAULTWARDEN_SERVICE_HOST = os.getenv("VAULTWARDEN_SERVICE_HOST", "vaultwarden-service.vaultwarden.svc.cluster.local").strip()
|
|
||||||
VAULTWARDEN_ADMIN_SECRET_NAME = os.getenv("VAULTWARDEN_ADMIN_SECRET_NAME", "vaultwarden-admin").strip()
|
|
||||||
VAULTWARDEN_ADMIN_SECRET_KEY = os.getenv("VAULTWARDEN_ADMIN_SECRET_KEY", "ADMIN_TOKEN").strip()
|
|
||||||
VAULTWARDEN_ADMIN_SESSION_TTL_SEC = float(os.getenv("VAULTWARDEN_ADMIN_SESSION_TTL_SEC", "300"))
|
|
||||||
VAULTWARDEN_ADMIN_RATE_LIMIT_BACKOFF_SEC = float(
|
|
||||||
os.getenv("VAULTWARDEN_ADMIN_RATE_LIMIT_BACKOFF_SEC", "600")
|
|
||||||
)
|
|
||||||
@ -1,25 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import secrets
|
|
||||||
import string
|
|
||||||
import time
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
from . import settings
|
|
||||||
|
|
||||||
|
|
||||||
def random_password(length: int = 32) -> str:
|
|
||||||
alphabet = string.ascii_letters + string.digits
|
|
||||||
return "".join(secrets.choice(alphabet) for _ in range(length))
|
|
||||||
|
|
||||||
|
|
||||||
def best_effort_post(url: str) -> None:
|
|
||||||
if not url:
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
with httpx.Client(timeout=settings.HTTP_CHECK_TIMEOUT_SEC) as client:
|
|
||||||
client.post(url, json={"ts": int(time.time())})
|
|
||||||
except Exception:
|
|
||||||
return
|
|
||||||
|
|
||||||
@ -1,203 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import base64
|
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
from . import settings
|
|
||||||
|
|
||||||
|
|
||||||
_K8S_BASE_URL = "https://kubernetes.default.svc"
|
|
||||||
_SA_PATH = Path("/var/run/secrets/kubernetes.io/serviceaccount")
|
|
||||||
|
|
||||||
|
|
||||||
def _read_service_account() -> tuple[str, str]:
|
|
||||||
token_path = _SA_PATH / "token"
|
|
||||||
ca_path = _SA_PATH / "ca.crt"
|
|
||||||
if not token_path.exists() or not ca_path.exists():
|
|
||||||
raise RuntimeError("kubernetes service account token missing")
|
|
||||||
token = token_path.read_text().strip()
|
|
||||||
if not token:
|
|
||||||
raise RuntimeError("kubernetes service account token empty")
|
|
||||||
return token, str(ca_path)
|
|
||||||
|
|
||||||
|
|
||||||
def _k8s_get_json(path: str) -> dict[str, Any]:
|
|
||||||
token, ca_path = _read_service_account()
|
|
||||||
url = f"{_K8S_BASE_URL}{path}"
|
|
||||||
with httpx.Client(
|
|
||||||
verify=ca_path,
|
|
||||||
timeout=settings.HTTP_CHECK_TIMEOUT_SEC,
|
|
||||||
headers={"Authorization": f"Bearer {token}"},
|
|
||||||
) as client:
|
|
||||||
resp = client.get(url)
|
|
||||||
resp.raise_for_status()
|
|
||||||
data = resp.json()
|
|
||||||
if not isinstance(data, dict):
|
|
||||||
raise RuntimeError("unexpected kubernetes response")
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
def _k8s_find_pod_ip(namespace: str, label_selector: str) -> str:
|
|
||||||
data = _k8s_get_json(f"/api/v1/namespaces/{namespace}/pods?labelSelector={label_selector}")
|
|
||||||
items = data.get("items") or []
|
|
||||||
if not isinstance(items, list) or not items:
|
|
||||||
raise RuntimeError("no vaultwarden pods found")
|
|
||||||
|
|
||||||
def _pod_ready(pod: dict[str, Any]) -> bool:
|
|
||||||
status = pod.get("status") if isinstance(pod.get("status"), dict) else {}
|
|
||||||
if status.get("phase") != "Running":
|
|
||||||
return False
|
|
||||||
ip = status.get("podIP")
|
|
||||||
if not isinstance(ip, str) or not ip:
|
|
||||||
return False
|
|
||||||
conditions = status.get("conditions") if isinstance(status.get("conditions"), list) else []
|
|
||||||
for cond in conditions:
|
|
||||||
if not isinstance(cond, dict):
|
|
||||||
continue
|
|
||||||
if cond.get("type") == "Ready":
|
|
||||||
return cond.get("status") == "True"
|
|
||||||
return True
|
|
||||||
|
|
||||||
ready = [p for p in items if isinstance(p, dict) and _pod_ready(p)]
|
|
||||||
candidates = ready or [p for p in items if isinstance(p, dict)]
|
|
||||||
status = candidates[0].get("status") or {}
|
|
||||||
ip = status.get("podIP") if isinstance(status, dict) else None
|
|
||||||
if not isinstance(ip, str) or not ip:
|
|
||||||
raise RuntimeError("vaultwarden pod has no IP")
|
|
||||||
return ip
|
|
||||||
|
|
||||||
|
|
||||||
def _k8s_get_secret_value(namespace: str, name: str, key: str) -> str:
|
|
||||||
data = _k8s_get_json(f"/api/v1/namespaces/{namespace}/secrets/{name}")
|
|
||||||
blob = data.get("data") if isinstance(data.get("data"), dict) else {}
|
|
||||||
raw = blob.get(key)
|
|
||||||
if not isinstance(raw, str) or not raw:
|
|
||||||
raise RuntimeError("secret key missing")
|
|
||||||
try:
|
|
||||||
decoded = base64.b64decode(raw).decode("utf-8").strip()
|
|
||||||
except Exception as exc:
|
|
||||||
raise RuntimeError("failed to decode secret") from exc
|
|
||||||
if not decoded:
|
|
||||||
raise RuntimeError("secret value empty")
|
|
||||||
return decoded
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
|
||||||
class VaultwardenInvite:
|
|
||||||
ok: bool
|
|
||||||
status: str
|
|
||||||
detail: str = ""
|
|
||||||
|
|
||||||
|
|
||||||
_ADMIN_LOCK = threading.Lock()
|
|
||||||
_ADMIN_SESSION: httpx.Client | None = None
|
|
||||||
_ADMIN_SESSION_EXPIRES_AT: float = 0.0
|
|
||||||
_ADMIN_SESSION_BASE_URL: str = ""
|
|
||||||
_ADMIN_RATE_LIMITED_UNTIL: float = 0.0
|
|
||||||
|
|
||||||
|
|
||||||
def _admin_session(base_url: str) -> httpx.Client:
|
|
||||||
global _ADMIN_SESSION, _ADMIN_SESSION_EXPIRES_AT, _ADMIN_SESSION_BASE_URL, _ADMIN_RATE_LIMITED_UNTIL
|
|
||||||
now = time.time()
|
|
||||||
with _ADMIN_LOCK:
|
|
||||||
if _ADMIN_RATE_LIMITED_UNTIL and now < _ADMIN_RATE_LIMITED_UNTIL:
|
|
||||||
raise RuntimeError("vaultwarden rate limited")
|
|
||||||
if _ADMIN_SESSION and now < _ADMIN_SESSION_EXPIRES_AT and _ADMIN_SESSION_BASE_URL == base_url:
|
|
||||||
return _ADMIN_SESSION
|
|
||||||
|
|
||||||
if _ADMIN_SESSION:
|
|
||||||
try:
|
|
||||||
_ADMIN_SESSION.close()
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
_ADMIN_SESSION = None
|
|
||||||
|
|
||||||
token = _k8s_get_secret_value(
|
|
||||||
settings.VAULTWARDEN_NAMESPACE,
|
|
||||||
settings.VAULTWARDEN_ADMIN_SECRET_NAME,
|
|
||||||
settings.VAULTWARDEN_ADMIN_SECRET_KEY,
|
|
||||||
)
|
|
||||||
|
|
||||||
client = httpx.Client(
|
|
||||||
base_url=base_url,
|
|
||||||
timeout=settings.HTTP_CHECK_TIMEOUT_SEC,
|
|
||||||
follow_redirects=True,
|
|
||||||
headers={"User-Agent": "atlas-portal/1"},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Authenticate to the admin UI to establish a session cookie.
|
|
||||||
# Vaultwarden can rate-limit admin login attempts, so keep this session cached briefly.
|
|
||||||
resp = client.post("/admin", data={"token": token})
|
|
||||||
if resp.status_code == 429:
|
|
||||||
_ADMIN_RATE_LIMITED_UNTIL = now + float(settings.VAULTWARDEN_ADMIN_RATE_LIMIT_BACKOFF_SEC)
|
|
||||||
raise RuntimeError("vaultwarden rate limited")
|
|
||||||
resp.raise_for_status()
|
|
||||||
|
|
||||||
_ADMIN_SESSION = client
|
|
||||||
_ADMIN_SESSION_BASE_URL = base_url
|
|
||||||
_ADMIN_SESSION_EXPIRES_AT = now + float(settings.VAULTWARDEN_ADMIN_SESSION_TTL_SEC)
|
|
||||||
return client
|
|
||||||
|
|
||||||
|
|
||||||
def invite_user(email: str) -> VaultwardenInvite:
|
|
||||||
global _ADMIN_RATE_LIMITED_UNTIL
|
|
||||||
email = (email or "").strip()
|
|
||||||
if not email or "@" not in email:
|
|
||||||
return VaultwardenInvite(ok=False, status="invalid_email", detail="email invalid")
|
|
||||||
if _ADMIN_RATE_LIMITED_UNTIL and time.time() < _ADMIN_RATE_LIMITED_UNTIL:
|
|
||||||
return VaultwardenInvite(ok=False, status="rate_limited", detail="vaultwarden rate limited")
|
|
||||||
|
|
||||||
# Prefer the service name when it works; fall back to pod IP because the Service can be misconfigured.
|
|
||||||
base_url = f"http://{settings.VAULTWARDEN_SERVICE_HOST}"
|
|
||||||
fallback_url = ""
|
|
||||||
try:
|
|
||||||
pod_ip = _k8s_find_pod_ip(settings.VAULTWARDEN_NAMESPACE, settings.VAULTWARDEN_POD_LABEL)
|
|
||||||
fallback_url = f"http://{pod_ip}:{settings.VAULTWARDEN_POD_PORT}"
|
|
||||||
except Exception:
|
|
||||||
fallback_url = ""
|
|
||||||
|
|
||||||
last_error = ""
|
|
||||||
for candidate in [base_url, fallback_url]:
|
|
||||||
if not candidate:
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
session = _admin_session(candidate)
|
|
||||||
resp = session.post("/admin/invite", json={"email": email})
|
|
||||||
if resp.status_code == 429:
|
|
||||||
_ADMIN_RATE_LIMITED_UNTIL = time.time() + float(settings.VAULTWARDEN_ADMIN_RATE_LIMIT_BACKOFF_SEC)
|
|
||||||
return VaultwardenInvite(ok=False, status="rate_limited", detail="vaultwarden rate limited")
|
|
||||||
|
|
||||||
if resp.status_code in {200, 201, 204}:
|
|
||||||
return VaultwardenInvite(ok=True, status="invited", detail="invite created")
|
|
||||||
|
|
||||||
# Treat "already exists/invited" as success for idempotency.
|
|
||||||
body = ""
|
|
||||||
try:
|
|
||||||
body = resp.text or ""
|
|
||||||
except Exception:
|
|
||||||
body = ""
|
|
||||||
if resp.status_code in {400, 409} and any(
|
|
||||||
marker in body.lower()
|
|
||||||
for marker in (
|
|
||||||
"already invited",
|
|
||||||
"already exists",
|
|
||||||
"already registered",
|
|
||||||
"user already exists",
|
|
||||||
)
|
|
||||||
):
|
|
||||||
return VaultwardenInvite(ok=True, status="already_present", detail="user already present")
|
|
||||||
|
|
||||||
last_error = f"status {resp.status_code}"
|
|
||||||
except Exception as exc:
|
|
||||||
last_error = str(exc)
|
|
||||||
if "rate limited" in last_error.lower():
|
|
||||||
return VaultwardenInvite(ok=False, status="rate_limited", detail="vaultwarden rate limited")
|
|
||||||
continue
|
|
||||||
|
|
||||||
return VaultwardenInvite(ok=False, status="error", detail=last_error or "failed to invite")
|
|
||||||
@ -1,137 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import re
|
|
||||||
import time
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from . import settings
|
|
||||||
from .k8s import get_json, post_json
|
|
||||||
|
|
||||||
|
|
||||||
def _safe_name_fragment(value: str, max_len: int = 24) -> str:
|
|
||||||
cleaned = re.sub(r"[^a-z0-9-]+", "-", (value or "").lower()).strip("-")
|
|
||||||
if not cleaned:
|
|
||||||
cleaned = "user"
|
|
||||||
return cleaned[:max_len].rstrip("-") or "user"
|
|
||||||
|
|
||||||
|
|
||||||
def _job_from_cronjob(
|
|
||||||
cronjob: dict[str, Any],
|
|
||||||
username: str,
|
|
||||||
email: str,
|
|
||||||
password: str,
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
spec = cronjob.get("spec") if isinstance(cronjob.get("spec"), dict) else {}
|
|
||||||
jt = spec.get("jobTemplate") if isinstance(spec.get("jobTemplate"), dict) else {}
|
|
||||||
job_spec = jt.get("spec") if isinstance(jt.get("spec"), dict) else {}
|
|
||||||
|
|
||||||
now = int(time.time())
|
|
||||||
safe_user = _safe_name_fragment(username)
|
|
||||||
job_name = f"wger-user-sync-{safe_user}-{now}"
|
|
||||||
|
|
||||||
job: dict[str, Any] = {
|
|
||||||
"apiVersion": "batch/v1",
|
|
||||||
"kind": "Job",
|
|
||||||
"metadata": {
|
|
||||||
"name": job_name,
|
|
||||||
"namespace": settings.WGER_NAMESPACE,
|
|
||||||
"labels": {
|
|
||||||
"app": "wger-user-sync",
|
|
||||||
"atlas.bstein.dev/trigger": "portal",
|
|
||||||
"atlas.bstein.dev/username": safe_user,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"spec": job_spec,
|
|
||||||
}
|
|
||||||
|
|
||||||
tpl = job.get("spec", {}).get("template", {})
|
|
||||||
pod_spec = tpl.get("spec") if isinstance(tpl.get("spec"), dict) else {}
|
|
||||||
containers = pod_spec.get("containers") if isinstance(pod_spec.get("containers"), list) else []
|
|
||||||
if containers and isinstance(containers[0], dict):
|
|
||||||
env = containers[0].get("env")
|
|
||||||
if not isinstance(env, list):
|
|
||||||
env = []
|
|
||||||
env = [
|
|
||||||
e
|
|
||||||
for e in env
|
|
||||||
if not (
|
|
||||||
isinstance(e, dict)
|
|
||||||
and e.get("name") in {"WGER_USERNAME", "WGER_EMAIL", "WGER_PASSWORD"}
|
|
||||||
)
|
|
||||||
]
|
|
||||||
env.append({"name": "WGER_USERNAME", "value": username})
|
|
||||||
env.append({"name": "WGER_EMAIL", "value": email})
|
|
||||||
env.append({"name": "WGER_PASSWORD", "value": password})
|
|
||||||
containers[0]["env"] = env
|
|
||||||
pod_spec["containers"] = containers
|
|
||||||
tpl["spec"] = pod_spec
|
|
||||||
job["spec"]["template"] = tpl
|
|
||||||
|
|
||||||
return job
|
|
||||||
|
|
||||||
|
|
||||||
def _job_succeeded(job: dict[str, Any]) -> bool:
|
|
||||||
status = job.get("status") if isinstance(job.get("status"), dict) else {}
|
|
||||||
if int(status.get("succeeded") or 0) > 0:
|
|
||||||
return True
|
|
||||||
conditions = status.get("conditions") if isinstance(status.get("conditions"), list) else []
|
|
||||||
for cond in conditions:
|
|
||||||
if not isinstance(cond, dict):
|
|
||||||
continue
|
|
||||||
if cond.get("type") == "Complete" and cond.get("status") == "True":
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def _job_failed(job: dict[str, Any]) -> bool:
|
|
||||||
status = job.get("status") if isinstance(job.get("status"), dict) else {}
|
|
||||||
if int(status.get("failed") or 0) > 0:
|
|
||||||
return True
|
|
||||||
conditions = status.get("conditions") if isinstance(status.get("conditions"), list) else []
|
|
||||||
for cond in conditions:
|
|
||||||
if not isinstance(cond, dict):
|
|
||||||
continue
|
|
||||||
if cond.get("type") == "Failed" and cond.get("status") == "True":
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def trigger(username: str, email: str, password: str, wait: bool = True) -> dict[str, Any]:
|
|
||||||
username = (username or "").strip()
|
|
||||||
if not username:
|
|
||||||
raise RuntimeError("missing username")
|
|
||||||
if not password:
|
|
||||||
raise RuntimeError("missing password")
|
|
||||||
|
|
||||||
namespace = settings.WGER_NAMESPACE
|
|
||||||
cronjob_name = settings.WGER_USER_SYNC_CRONJOB
|
|
||||||
if not namespace or not cronjob_name:
|
|
||||||
raise RuntimeError("wger sync not configured")
|
|
||||||
|
|
||||||
cronjob = get_json(f"/apis/batch/v1/namespaces/{namespace}/cronjobs/{cronjob_name}")
|
|
||||||
job_payload = _job_from_cronjob(cronjob, username, email, password)
|
|
||||||
created = post_json(f"/apis/batch/v1/namespaces/{namespace}/jobs", job_payload)
|
|
||||||
|
|
||||||
job_name = (
|
|
||||||
created.get("metadata", {}).get("name")
|
|
||||||
if isinstance(created.get("metadata"), dict)
|
|
||||||
else job_payload.get("metadata", {}).get("name")
|
|
||||||
)
|
|
||||||
if not isinstance(job_name, str) or not job_name:
|
|
||||||
raise RuntimeError("job name missing")
|
|
||||||
|
|
||||||
if not wait:
|
|
||||||
return {"job": job_name, "status": "queued"}
|
|
||||||
|
|
||||||
deadline = time.time() + float(settings.WGER_USER_SYNC_WAIT_TIMEOUT_SEC)
|
|
||||||
last_state = "running"
|
|
||||||
while time.time() < deadline:
|
|
||||||
job = get_json(f"/apis/batch/v1/namespaces/{namespace}/jobs/{job_name}")
|
|
||||||
if _job_succeeded(job):
|
|
||||||
return {"job": job_name, "status": "ok"}
|
|
||||||
if _job_failed(job):
|
|
||||||
return {"job": job_name, "status": "error"}
|
|
||||||
time.sleep(2)
|
|
||||||
last_state = "running"
|
|
||||||
|
|
||||||
return {"job": job_name, "status": last_state}
|
|
||||||
@ -1,7 +1,3 @@
|
|||||||
flask==3.0.3
|
flask==3.0.3
|
||||||
flask-cors==4.0.0
|
flask-cors==4.0.0
|
||||||
gunicorn==21.2.0
|
gunicorn==21.2.0
|
||||||
httpx==0.27.2
|
|
||||||
PyJWT[crypto]==2.10.1
|
|
||||||
psycopg[binary]==3.2.6
|
|
||||||
psycopg-pool==3.2.6
|
|
||||||
|
|||||||
@ -1,326 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import json
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from unittest import TestCase, mock
|
|
||||||
|
|
||||||
from atlas_portal.app_factory import create_app
|
|
||||||
from atlas_portal.routes import access_requests as ar
|
|
||||||
|
|
||||||
|
|
||||||
class DummyResult:
|
|
||||||
def __init__(self, row=None):
|
|
||||||
self._row = row
|
|
||||||
|
|
||||||
def fetchone(self):
|
|
||||||
return self._row
|
|
||||||
|
|
||||||
def fetchall(self):
|
|
||||||
return []
|
|
||||||
|
|
||||||
|
|
||||||
class DummyConn:
|
|
||||||
def __init__(self, rows_by_query=None):
|
|
||||||
self._rows_by_query = rows_by_query or {}
|
|
||||||
self.executed = []
|
|
||||||
|
|
||||||
def execute(self, query, params=None):
|
|
||||||
self.executed.append((query, params))
|
|
||||||
for key, row in self._rows_by_query.items():
|
|
||||||
if key in query:
|
|
||||||
return DummyResult(row)
|
|
||||||
return DummyResult()
|
|
||||||
|
|
||||||
|
|
||||||
class DummyAdmin:
|
|
||||||
def ready(self):
|
|
||||||
return False
|
|
||||||
|
|
||||||
def find_user(self, username):
|
|
||||||
return None
|
|
||||||
|
|
||||||
def find_user_by_email(self, email):
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def dummy_connect(rows_by_query=None):
|
|
||||||
yield DummyConn(rows_by_query=rows_by_query)
|
|
||||||
|
|
||||||
|
|
||||||
class AccessRequestTests(TestCase):
|
|
||||||
@classmethod
|
|
||||||
def setUpClass(cls):
|
|
||||||
cls.app = create_app()
|
|
||||||
cls.client = cls.app.test_client()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def tearDownClass(cls):
|
|
||||||
return None
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
self.configured_patch = mock.patch.object(ar, "configured", lambda: True)
|
|
||||||
self.rate_patch = mock.patch.object(ar, "rate_limit_allow", lambda *args, **kwargs: True)
|
|
||||||
self.admin_patch = mock.patch.object(ar, "admin_client", lambda: DummyAdmin())
|
|
||||||
self.configured_patch.start()
|
|
||||||
self.rate_patch.start()
|
|
||||||
self.admin_patch.start()
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
self.configured_patch.stop()
|
|
||||||
self.rate_patch.stop()
|
|
||||||
self.admin_patch.stop()
|
|
||||||
|
|
||||||
def test_request_access_requires_last_name(self):
|
|
||||||
with mock.patch.object(ar, "connect", lambda: dummy_connect()):
|
|
||||||
resp = self.client.post(
|
|
||||||
"/api/access/request",
|
|
||||||
data=json.dumps(
|
|
||||||
{
|
|
||||||
"username": "alice",
|
|
||||||
"email": "alice@example.com",
|
|
||||||
"first_name": "Alice",
|
|
||||||
"last_name": "",
|
|
||||||
"note": "",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
content_type="application/json",
|
|
||||||
)
|
|
||||||
data = resp.get_json()
|
|
||||||
self.assertEqual(resp.status_code, 400)
|
|
||||||
self.assertIn("last name is required", data.get("error", ""))
|
|
||||||
|
|
||||||
def test_request_access_sends_verification_email(self):
|
|
||||||
sent = {}
|
|
||||||
|
|
||||||
def fake_send_email(*, request_code, email, token):
|
|
||||||
sent["request_code"] = request_code
|
|
||||||
sent["email"] = email
|
|
||||||
|
|
||||||
with (
|
|
||||||
mock.patch.object(ar, "_random_request_code", lambda username: f"{username}~CODE123"),
|
|
||||||
mock.patch.object(ar, "_send_verification_email", fake_send_email),
|
|
||||||
mock.patch.object(ar, "connect", lambda: dummy_connect()),
|
|
||||||
):
|
|
||||||
resp = self.client.post(
|
|
||||||
"/api/access/request",
|
|
||||||
data=json.dumps(
|
|
||||||
{
|
|
||||||
"username": "alice",
|
|
||||||
"email": "alice@example.com",
|
|
||||||
"first_name": "Alice",
|
|
||||||
"last_name": "Atlas",
|
|
||||||
"note": "",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
content_type="application/json",
|
|
||||||
)
|
|
||||||
data = resp.get_json()
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
self.assertEqual(data.get("request_code"), "alice~CODE123")
|
|
||||||
self.assertEqual(data.get("status"), "pending_email_verification")
|
|
||||||
self.assertEqual(sent.get("request_code"), "alice~CODE123")
|
|
||||||
self.assertEqual(sent.get("email"), "alice@example.com")
|
|
||||||
|
|
||||||
def test_request_access_email_failure_returns_request_code(self):
|
|
||||||
def fake_send_email(*, request_code, email, token):
|
|
||||||
raise ar.MailerError("failed")
|
|
||||||
|
|
||||||
with (
|
|
||||||
mock.patch.object(ar, "_random_request_code", lambda username: f"{username}~CODE123"),
|
|
||||||
mock.patch.object(ar, "_send_verification_email", fake_send_email),
|
|
||||||
mock.patch.object(ar, "connect", lambda: dummy_connect()),
|
|
||||||
):
|
|
||||||
resp = self.client.post(
|
|
||||||
"/api/access/request",
|
|
||||||
data=json.dumps(
|
|
||||||
{
|
|
||||||
"username": "alice",
|
|
||||||
"email": "alice@example.com",
|
|
||||||
"first_name": "Alice",
|
|
||||||
"last_name": "Atlas",
|
|
||||||
"note": "",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
content_type="application/json",
|
|
||||||
)
|
|
||||||
data = resp.get_json()
|
|
||||||
self.assertEqual(resp.status_code, 502)
|
|
||||||
self.assertEqual(data.get("request_code"), "alice~CODE123")
|
|
||||||
self.assertIn("failed to send verification email", data.get("error", ""))
|
|
||||||
|
|
||||||
def test_request_access_resend_sends_email(self):
|
|
||||||
sent = {}
|
|
||||||
|
|
||||||
def fake_send_email(*, request_code, email, token):
|
|
||||||
sent["request_code"] = request_code
|
|
||||||
sent["email"] = email
|
|
||||||
|
|
||||||
rows = {
|
|
||||||
"SELECT status, contact_email": {
|
|
||||||
"status": "pending_email_verification",
|
|
||||||
"contact_email": "alice@example.com",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
with (
|
|
||||||
mock.patch.object(ar, "_send_verification_email", fake_send_email),
|
|
||||||
mock.patch.object(ar, "connect", lambda: dummy_connect(rows)),
|
|
||||||
):
|
|
||||||
resp = self.client.post(
|
|
||||||
"/api/access/request/resend",
|
|
||||||
data=json.dumps({"request_code": "alice~CODE123"}),
|
|
||||||
content_type="application/json",
|
|
||||||
)
|
|
||||||
data = resp.get_json()
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
self.assertEqual(data.get("status"), "pending_email_verification")
|
|
||||||
self.assertEqual(sent.get("request_code"), "alice~CODE123")
|
|
||||||
self.assertEqual(sent.get("email"), "alice@example.com")
|
|
||||||
|
|
||||||
def test_verify_request_updates_status(self):
|
|
||||||
token = "tok-123"
|
|
||||||
rows = {
|
|
||||||
"SELECT status, email_verification_token_hash": {
|
|
||||||
"status": "pending_email_verification",
|
|
||||||
"email_verification_token_hash": ar._hash_verification_token(token),
|
|
||||||
"email_verification_sent_at": ar.datetime.now(ar.timezone.utc),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
with dummy_connect(rows) as conn:
|
|
||||||
status = ar._verify_request(conn, "alice~CODE123", token)
|
|
||||||
self.assertEqual(status, "pending")
|
|
||||||
|
|
||||||
def test_verify_link_redirects(self):
|
|
||||||
token = "tok-123"
|
|
||||||
rows = {
|
|
||||||
"SELECT status, email_verification_token_hash": {
|
|
||||||
"status": "pending_email_verification",
|
|
||||||
"email_verification_token_hash": ar._hash_verification_token(token),
|
|
||||||
"email_verification_sent_at": ar.datetime.now(ar.timezone.utc),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
with mock.patch.object(ar, "connect", lambda: dummy_connect(rows)):
|
|
||||||
resp = self.client.get(f"/api/access/request/verify-link?code=alice~CODE123&token={token}")
|
|
||||||
self.assertEqual(resp.status_code, 302)
|
|
||||||
self.assertIn("verified=1", resp.headers.get("Location", ""))
|
|
||||||
|
|
||||||
def test_status_includes_email_verified(self):
|
|
||||||
rows = {
|
|
||||||
"SELECT status": {
|
|
||||||
"status": "pending",
|
|
||||||
"username": "alice",
|
|
||||||
"initial_password": None,
|
|
||||||
"initial_password_revealed_at": None,
|
|
||||||
"email_verified_at": ar.datetime.now(ar.timezone.utc),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
with mock.patch.object(ar, "connect", lambda: dummy_connect(rows)):
|
|
||||||
resp = self.client.post(
|
|
||||||
"/api/access/request/status",
|
|
||||||
data=json.dumps({"request_code": "alice~CODE123"}),
|
|
||||||
content_type="application/json",
|
|
||||||
)
|
|
||||||
data = resp.get_json()
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
self.assertTrue(data.get("email_verified"))
|
|
||||||
|
|
||||||
def test_status_hides_initial_password_without_reveal_flag(self):
|
|
||||||
rows = {
|
|
||||||
"SELECT status": {
|
|
||||||
"status": "awaiting_onboarding",
|
|
||||||
"username": "alice",
|
|
||||||
"initial_password": "temp-pass",
|
|
||||||
"initial_password_revealed_at": None,
|
|
||||||
"email_verified_at": None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
with (
|
|
||||||
mock.patch.object(ar, "connect", lambda: dummy_connect(rows)),
|
|
||||||
mock.patch.object(ar, "_advance_status", lambda *args, **kwargs: "awaiting_onboarding"),
|
|
||||||
):
|
|
||||||
resp = self.client.post(
|
|
||||||
"/api/access/request/status",
|
|
||||||
data=json.dumps({"request_code": "alice~CODE123"}),
|
|
||||||
content_type="application/json",
|
|
||||||
)
|
|
||||||
data = resp.get_json()
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
self.assertIsNone(data.get("initial_password"))
|
|
||||||
|
|
||||||
def test_status_reveals_initial_password_with_flag(self):
|
|
||||||
rows = {
|
|
||||||
"SELECT status": {
|
|
||||||
"status": "awaiting_onboarding",
|
|
||||||
"username": "alice",
|
|
||||||
"initial_password": "temp-pass",
|
|
||||||
"initial_password_revealed_at": None,
|
|
||||||
"email_verified_at": None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
with (
|
|
||||||
mock.patch.object(ar, "connect", lambda: dummy_connect(rows)),
|
|
||||||
mock.patch.object(ar, "_advance_status", lambda *args, **kwargs: "awaiting_onboarding"),
|
|
||||||
):
|
|
||||||
resp = self.client.post(
|
|
||||||
"/api/access/request/status",
|
|
||||||
data=json.dumps({"request_code": "alice~CODE123", "reveal_initial_password": True}),
|
|
||||||
content_type="application/json",
|
|
||||||
)
|
|
||||||
data = resp.get_json()
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
self.assertEqual(data.get("initial_password"), "temp-pass")
|
|
||||||
|
|
||||||
def test_onboarding_payload_includes_vaultwarden_grandfathered(self):
|
|
||||||
rows = {
|
|
||||||
"SELECT approval_flags": {
|
|
||||||
"approval_flags": ["vaultwarden_grandfathered"],
|
|
||||||
"contact_email": "alice@example.com",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
conn = DummyConn(rows_by_query=rows)
|
|
||||||
with (
|
|
||||||
mock.patch.object(ar, "_completed_onboarding_steps", lambda *args, **kwargs: set()),
|
|
||||||
mock.patch.object(ar, "_password_rotation_requested", lambda *args, **kwargs: False),
|
|
||||||
):
|
|
||||||
payload = ar._onboarding_payload(conn, "alice~CODE123", "alice")
|
|
||||||
vault = payload.get("vaultwarden") or {}
|
|
||||||
self.assertTrue(vault.get("grandfathered"))
|
|
||||||
self.assertEqual(vault.get("recovery_email"), "alice@example.com")
|
|
||||||
|
|
||||||
def test_retry_request_fallback_updates_tasks(self):
|
|
||||||
rows = {"SELECT status": {"status": "accounts_building"}}
|
|
||||||
conn = DummyConn(rows_by_query=rows)
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def connect_override():
|
|
||||||
yield conn
|
|
||||||
|
|
||||||
with (
|
|
||||||
mock.patch.object(ar.ariadne_client, "enabled", lambda: False),
|
|
||||||
mock.patch.object(ar, "connect", lambda: connect_override()),
|
|
||||||
mock.patch.object(ar, "provision_access_request", lambda *_args, **_kwargs: None),
|
|
||||||
):
|
|
||||||
resp = self.client.post(
|
|
||||||
"/api/access/request/retry",
|
|
||||||
data=json.dumps({"request_code": "alice~CODE123"}),
|
|
||||||
content_type="application/json",
|
|
||||||
)
|
|
||||||
data = resp.get_json()
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
self.assertTrue(data.get("ok"))
|
|
||||||
self.assertTrue(any("provision_attempted_at" in query for query, _params in conn.executed))
|
|
||||||
|
|
||||||
def test_retry_request_rejects_non_retryable(self):
|
|
||||||
rows = {"SELECT status": {"status": "ready"}}
|
|
||||||
|
|
||||||
with (
|
|
||||||
mock.patch.object(ar.ariadne_client, "enabled", lambda: False),
|
|
||||||
mock.patch.object(ar, "connect", lambda: dummy_connect(rows)),
|
|
||||||
):
|
|
||||||
resp = self.client.post(
|
|
||||||
"/api/access/request/retry",
|
|
||||||
data=json.dumps({"request_code": "alice~CODE123"}),
|
|
||||||
content_type="application/json",
|
|
||||||
)
|
|
||||||
self.assertEqual(resp.status_code, 409)
|
|
||||||
@ -3,10 +3,10 @@
|
|||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8" />
|
<meta charset="UTF-8" />
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
<title>bstein.dev | Titan Lab</title>
|
<title>bstein.dev | Titan Lab Portfolio</title>
|
||||||
<meta
|
<meta
|
||||||
name="description"
|
name="description"
|
||||||
content="Live status for the Titan Lab clusters powering bstein.dev."
|
content="Portfolio and live status for the Titan Lab clusters powering bstein.dev."
|
||||||
/>
|
/>
|
||||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||||
</head>
|
</head>
|
||||||
|
|||||||
324
frontend/package-lock.json
generated
@ -1,17 +1,15 @@
|
|||||||
{
|
{
|
||||||
"name": "atlas-portal",
|
"name": "bstein-portfolio",
|
||||||
"version": "0.1.1",
|
"version": "0.1.0",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "atlas-portal",
|
"name": "bstein-portfolio",
|
||||||
"version": "0.1.1",
|
"version": "0.1.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"axios": "^1.6.7",
|
"axios": "^1.6.7",
|
||||||
"keycloak-js": "^26.2.2",
|
|
||||||
"mermaid": "^10.9.1",
|
"mermaid": "^10.9.1",
|
||||||
"qrcode": "^1.5.4",
|
|
||||||
"vue": "^3.4.21",
|
"vue": "^3.4.21",
|
||||||
"vue-router": "^4.3.2"
|
"vue-router": "^4.3.2"
|
||||||
},
|
},
|
||||||
@ -962,30 +960,6 @@
|
|||||||
"integrity": "sha512-AbOPdQQnAnzs58H2FrrDxYj/TJfmeS2jdfEEhgiKINy+bnOANmVizIEgq1r+C5zsbs6l1CCQxtcj71rwNQ4jWg==",
|
"integrity": "sha512-AbOPdQQnAnzs58H2FrrDxYj/TJfmeS2jdfEEhgiKINy+bnOANmVizIEgq1r+C5zsbs6l1CCQxtcj71rwNQ4jWg==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/ansi-regex": {
|
|
||||||
"version": "5.0.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
|
|
||||||
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/ansi-styles": {
|
|
||||||
"version": "4.3.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
|
|
||||||
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"color-convert": "^2.0.1"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/asynckit": {
|
"node_modules/asynckit": {
|
||||||
"version": "0.4.0",
|
"version": "0.4.0",
|
||||||
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||||
@ -1016,15 +990,6 @@
|
|||||||
"node": ">= 0.4"
|
"node": ">= 0.4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/camelcase": {
|
|
||||||
"version": "5.3.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz",
|
|
||||||
"integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==",
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=6"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/character-entities": {
|
"node_modules/character-entities": {
|
||||||
"version": "2.0.2",
|
"version": "2.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz",
|
||||||
@ -1035,35 +1000,6 @@
|
|||||||
"url": "https://github.com/sponsors/wooorm"
|
"url": "https://github.com/sponsors/wooorm"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/cliui": {
|
|
||||||
"version": "6.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz",
|
|
||||||
"integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==",
|
|
||||||
"license": "ISC",
|
|
||||||
"dependencies": {
|
|
||||||
"string-width": "^4.2.0",
|
|
||||||
"strip-ansi": "^6.0.0",
|
|
||||||
"wrap-ansi": "^6.2.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/color-convert": {
|
|
||||||
"version": "2.0.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
|
||||||
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"color-name": "~1.1.4"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=7.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/color-name": {
|
|
||||||
"version": "1.1.4",
|
|
||||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
|
|
||||||
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
|
|
||||||
"license": "MIT"
|
|
||||||
},
|
|
||||||
"node_modules/combined-stream": {
|
"node_modules/combined-stream": {
|
||||||
"version": "1.0.8",
|
"version": "1.0.8",
|
||||||
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||||
@ -1595,15 +1531,6 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/decamelize": {
|
|
||||||
"version": "1.2.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz",
|
|
||||||
"integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==",
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.10.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/decode-named-character-reference": {
|
"node_modules/decode-named-character-reference": {
|
||||||
"version": "1.2.0",
|
"version": "1.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.2.0.tgz",
|
||||||
@ -1653,12 +1580,6 @@
|
|||||||
"node": ">=0.3.1"
|
"node": ">=0.3.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/dijkstrajs": {
|
|
||||||
"version": "1.0.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/dijkstrajs/-/dijkstrajs-1.0.3.tgz",
|
|
||||||
"integrity": "sha512-qiSlmBq9+BCdCA/L46dw8Uy93mloxsPSbwnm5yrKn2vMPiy8KyAskTF6zuV/j5BMsmOGZDPs7KjU+mjb670kfA==",
|
|
||||||
"license": "MIT"
|
|
||||||
},
|
|
||||||
"node_modules/dompurify": {
|
"node_modules/dompurify": {
|
||||||
"version": "3.3.1",
|
"version": "3.3.1",
|
||||||
"resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.3.1.tgz",
|
"resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.3.1.tgz",
|
||||||
@ -1688,12 +1609,6 @@
|
|||||||
"integrity": "sha512-f/ZeWvW/BCXbhGEf1Ujp29EASo/lk1FDnETgNKwJrsVvGZhUWCZyg3xLJjAsxfOmt8KjswHmI5EwCQcPMpOYhQ==",
|
"integrity": "sha512-f/ZeWvW/BCXbhGEf1Ujp29EASo/lk1FDnETgNKwJrsVvGZhUWCZyg3xLJjAsxfOmt8KjswHmI5EwCQcPMpOYhQ==",
|
||||||
"license": "EPL-2.0"
|
"license": "EPL-2.0"
|
||||||
},
|
},
|
||||||
"node_modules/emoji-regex": {
|
|
||||||
"version": "8.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
|
|
||||||
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
|
|
||||||
"license": "MIT"
|
|
||||||
},
|
|
||||||
"node_modules/entities": {
|
"node_modules/entities": {
|
||||||
"version": "4.5.0",
|
"version": "4.5.0",
|
||||||
"resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
|
"resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
|
||||||
@ -1796,19 +1711,6 @@
|
|||||||
"integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==",
|
"integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/find-up": {
|
|
||||||
"version": "4.1.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
|
|
||||||
"integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"locate-path": "^5.0.0",
|
|
||||||
"path-exists": "^4.0.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/follow-redirects": {
|
"node_modules/follow-redirects": {
|
||||||
"version": "1.15.11",
|
"version": "1.15.11",
|
||||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz",
|
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz",
|
||||||
@ -1869,15 +1771,6 @@
|
|||||||
"url": "https://github.com/sponsors/ljharb"
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/get-caller-file": {
|
|
||||||
"version": "2.0.5",
|
|
||||||
"resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
|
|
||||||
"integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
|
|
||||||
"license": "ISC",
|
|
||||||
"engines": {
|
|
||||||
"node": "6.* || 8.* || >= 10.*"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/get-intrinsic": {
|
"node_modules/get-intrinsic": {
|
||||||
"version": "1.3.0",
|
"version": "1.3.0",
|
||||||
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
|
||||||
@ -1987,15 +1880,6 @@
|
|||||||
"node": ">=12"
|
"node": ">=12"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/is-fullwidth-code-point": {
|
|
||||||
"version": "3.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
|
|
||||||
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/katex": {
|
"node_modules/katex": {
|
||||||
"version": "0.16.27",
|
"version": "0.16.27",
|
||||||
"resolved": "https://registry.npmjs.org/katex/-/katex-0.16.27.tgz",
|
"resolved": "https://registry.npmjs.org/katex/-/katex-0.16.27.tgz",
|
||||||
@ -2021,15 +1905,6 @@
|
|||||||
"node": ">= 12"
|
"node": ">= 12"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/keycloak-js": {
|
|
||||||
"version": "26.2.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/keycloak-js/-/keycloak-js-26.2.2.tgz",
|
|
||||||
"integrity": "sha512-ug7pNZ1xNkd7PPkerOJCEU2VnUhS7CYStDOCFJgqCNQ64h53ppxaKrh4iXH0xM8hFu5b1W6e6lsyYWqBMvaQFg==",
|
|
||||||
"license": "Apache-2.0",
|
|
||||||
"workspaces": [
|
|
||||||
"test"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"node_modules/khroma": {
|
"node_modules/khroma": {
|
||||||
"version": "2.1.0",
|
"version": "2.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/khroma/-/khroma-2.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/khroma/-/khroma-2.1.0.tgz",
|
||||||
@ -2050,18 +1925,6 @@
|
|||||||
"integrity": "sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==",
|
"integrity": "sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/locate-path": {
|
|
||||||
"version": "5.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
|
|
||||||
"integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"p-locate": "^4.1.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/lodash-es": {
|
"node_modules/lodash-es": {
|
||||||
"version": "4.17.21",
|
"version": "4.17.21",
|
||||||
"resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz",
|
"resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz",
|
||||||
@ -2653,66 +2516,12 @@
|
|||||||
"integrity": "sha512-gkXMxRzUH+PB0ax9dUN0yYF0S25BqeAYqhgMaLUFmpXLEk7Fcu8f4emJuOAY0V8kjDICxROIKsTAKsV/v355xw==",
|
"integrity": "sha512-gkXMxRzUH+PB0ax9dUN0yYF0S25BqeAYqhgMaLUFmpXLEk7Fcu8f4emJuOAY0V8kjDICxROIKsTAKsV/v355xw==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/p-limit": {
|
|
||||||
"version": "2.3.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
|
|
||||||
"integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"p-try": "^2.0.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=6"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"url": "https://github.com/sponsors/sindresorhus"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/p-locate": {
|
|
||||||
"version": "4.1.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
|
|
||||||
"integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"p-limit": "^2.2.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/p-try": {
|
|
||||||
"version": "2.2.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz",
|
|
||||||
"integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==",
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=6"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/path-exists": {
|
|
||||||
"version": "4.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
|
|
||||||
"integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/picocolors": {
|
"node_modules/picocolors": {
|
||||||
"version": "1.1.1",
|
"version": "1.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
|
||||||
"integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
|
"integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
|
||||||
"license": "ISC"
|
"license": "ISC"
|
||||||
},
|
},
|
||||||
"node_modules/pngjs": {
|
|
||||||
"version": "5.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/pngjs/-/pngjs-5.0.0.tgz",
|
|
||||||
"integrity": "sha512-40QW5YalBNfQo5yRYmiw7Yz6TKKVr3h6970B2YE+3fQpsWcrbj1PzJgxeJ19DRQjhMbKPIuMY8rFaXc8moolVw==",
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=10.13.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/postcss": {
|
"node_modules/postcss": {
|
||||||
"version": "8.5.6",
|
"version": "8.5.6",
|
||||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz",
|
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz",
|
||||||
@ -2747,38 +2556,6 @@
|
|||||||
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
|
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/qrcode": {
|
|
||||||
"version": "1.5.4",
|
|
||||||
"resolved": "https://registry.npmjs.org/qrcode/-/qrcode-1.5.4.tgz",
|
|
||||||
"integrity": "sha512-1ca71Zgiu6ORjHqFBDpnSMTR2ReToX4l1Au1VFLyVeBTFavzQnv5JxMFr3ukHVKpSrSA2MCk0lNJSykjUfz7Zg==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"dijkstrajs": "^1.0.1",
|
|
||||||
"pngjs": "^5.0.0",
|
|
||||||
"yargs": "^15.3.1"
|
|
||||||
},
|
|
||||||
"bin": {
|
|
||||||
"qrcode": "bin/qrcode"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=10.13.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/require-directory": {
|
|
||||||
"version": "2.1.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
|
|
||||||
"integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.10.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/require-main-filename": {
|
|
||||||
"version": "2.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz",
|
|
||||||
"integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==",
|
|
||||||
"license": "ISC"
|
|
||||||
},
|
|
||||||
"node_modules/robust-predicates": {
|
"node_modules/robust-predicates": {
|
||||||
"version": "3.0.2",
|
"version": "3.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.2.tgz",
|
||||||
@ -2851,12 +2628,6 @@
|
|||||||
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
|
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/set-blocking": {
|
|
||||||
"version": "2.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
|
|
||||||
"integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==",
|
|
||||||
"license": "ISC"
|
|
||||||
},
|
|
||||||
"node_modules/source-map-js": {
|
"node_modules/source-map-js": {
|
||||||
"version": "1.2.1",
|
"version": "1.2.1",
|
||||||
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
|
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
|
||||||
@ -2866,32 +2637,6 @@
|
|||||||
"node": ">=0.10.0"
|
"node": ">=0.10.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/string-width": {
|
|
||||||
"version": "4.2.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
|
|
||||||
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"emoji-regex": "^8.0.0",
|
|
||||||
"is-fullwidth-code-point": "^3.0.0",
|
|
||||||
"strip-ansi": "^6.0.1"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/strip-ansi": {
|
|
||||||
"version": "6.0.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
|
|
||||||
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"ansi-regex": "^5.0.1"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/stylis": {
|
"node_modules/stylis": {
|
||||||
"version": "4.3.6",
|
"version": "4.3.6",
|
||||||
"resolved": "https://registry.npmjs.org/stylis/-/stylis-4.3.6.tgz",
|
"resolved": "https://registry.npmjs.org/stylis/-/stylis-4.3.6.tgz",
|
||||||
@ -3052,67 +2797,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/web-worker/-/web-worker-1.5.0.tgz",
|
"resolved": "https://registry.npmjs.org/web-worker/-/web-worker-1.5.0.tgz",
|
||||||
"integrity": "sha512-RiMReJrTAiA+mBjGONMnjVDP2u3p9R1vkcGz6gDIrOMT3oGuYwX2WRMYI9ipkphSuE5XKEhydbhNEJh4NY9mlw==",
|
"integrity": "sha512-RiMReJrTAiA+mBjGONMnjVDP2u3p9R1vkcGz6gDIrOMT3oGuYwX2WRMYI9ipkphSuE5XKEhydbhNEJh4NY9mlw==",
|
||||||
"license": "Apache-2.0"
|
"license": "Apache-2.0"
|
||||||
},
|
|
||||||
"node_modules/which-module": {
|
|
||||||
"version": "2.0.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.1.tgz",
|
|
||||||
"integrity": "sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==",
|
|
||||||
"license": "ISC"
|
|
||||||
},
|
|
||||||
"node_modules/wrap-ansi": {
|
|
||||||
"version": "6.2.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz",
|
|
||||||
"integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"ansi-styles": "^4.0.0",
|
|
||||||
"string-width": "^4.1.0",
|
|
||||||
"strip-ansi": "^6.0.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/y18n": {
|
|
||||||
"version": "4.0.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz",
|
|
||||||
"integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==",
|
|
||||||
"license": "ISC"
|
|
||||||
},
|
|
||||||
"node_modules/yargs": {
|
|
||||||
"version": "15.4.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz",
|
|
||||||
"integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"cliui": "^6.0.0",
|
|
||||||
"decamelize": "^1.2.0",
|
|
||||||
"find-up": "^4.1.0",
|
|
||||||
"get-caller-file": "^2.0.1",
|
|
||||||
"require-directory": "^2.1.1",
|
|
||||||
"require-main-filename": "^2.0.0",
|
|
||||||
"set-blocking": "^2.0.0",
|
|
||||||
"string-width": "^4.2.0",
|
|
||||||
"which-module": "^2.0.0",
|
|
||||||
"y18n": "^4.0.0",
|
|
||||||
"yargs-parser": "^18.1.2"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/yargs-parser": {
|
|
||||||
"version": "18.1.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz",
|
|
||||||
"integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==",
|
|
||||||
"license": "ISC",
|
|
||||||
"dependencies": {
|
|
||||||
"camelcase": "^5.0.0",
|
|
||||||
"decamelize": "^1.2.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=6"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,19 +1,16 @@
|
|||||||
{
|
{
|
||||||
"name": "atlas-portal",
|
"name": "bstein-portfolio",
|
||||||
"version": "0.1.1",
|
"version": "0.1.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "vite",
|
"dev": "vite",
|
||||||
"prebuild": "node scripts/build_media_manifest.mjs",
|
|
||||||
"build": "vite build",
|
"build": "vite build",
|
||||||
"preview": "vite preview"
|
"preview": "vite preview"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"axios": "^1.6.7",
|
"axios": "^1.6.7",
|
||||||
"keycloak-js": "^26.2.2",
|
|
||||||
"mermaid": "^10.9.1",
|
"mermaid": "^10.9.1",
|
||||||
"qrcode": "^1.5.4",
|
|
||||||
"vue": "^3.4.21",
|
"vue": "^3.4.21",
|
||||||
"vue-router": "^4.3.2"
|
"vue-router": "^4.3.2"
|
||||||
},
|
},
|
||||||
|
|||||||
@ -1,13 +0,0 @@
|
|||||||
<!doctype html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8" />
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
|
||||||
<title>SSO</title>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<script>
|
|
||||||
parent.postMessage(location.href, location.origin);
|
|
||||||
</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
@ -1,63 +0,0 @@
|
|||||||
import { promises as fs } from "fs";
|
|
||||||
import path from "path";
|
|
||||||
|
|
||||||
const SOURCE = path.resolve("..", "media", "onboarding");
|
|
||||||
const ROOT = path.resolve("public", "media", "onboarding");
|
|
||||||
const MANIFEST = path.join(ROOT, "manifest.json");
|
|
||||||
const EXTENSIONS = new Set([".png", ".jpg", ".jpeg", ".webp"]);
|
|
||||||
|
|
||||||
async function walk(dir, base = "") {
|
|
||||||
const entries = await fs.readdir(dir, { withFileTypes: true });
|
|
||||||
const files = [];
|
|
||||||
for (const entry of entries) {
|
|
||||||
const full = path.join(dir, entry.name);
|
|
||||||
const rel = path.join(base, entry.name);
|
|
||||||
if (entry.isDirectory()) {
|
|
||||||
files.push(...(await walk(full, rel)));
|
|
||||||
} else if (EXTENSIONS.has(path.extname(entry.name).toLowerCase())) {
|
|
||||||
files.push(rel.replace(/\\/g, "/"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return files;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function ensureDir(dir) {
|
|
||||||
await fs.mkdir(dir, { recursive: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
async function exists(dir) {
|
|
||||||
try {
|
|
||||||
await fs.access(dir);
|
|
||||||
return true;
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
try {
|
|
||||||
const sourceExists = await exists(SOURCE);
|
|
||||||
const rootExists = await exists(ROOT);
|
|
||||||
const source = sourceExists ? SOURCE : rootExists ? ROOT : null;
|
|
||||||
await ensureDir(ROOT);
|
|
||||||
const files = source ? await walk(source) : [];
|
|
||||||
if (source && source !== ROOT) {
|
|
||||||
for (const file of files) {
|
|
||||||
const src = path.join(source, file);
|
|
||||||
const dest = path.join(ROOT, file);
|
|
||||||
await ensureDir(path.dirname(dest));
|
|
||||||
await fs.copyFile(src, dest);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const payload = {
|
|
||||||
generated_at: new Date().toISOString(),
|
|
||||||
files: files.sort(),
|
|
||||||
};
|
|
||||||
await fs.writeFile(MANIFEST, JSON.stringify(payload, null, 2));
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Failed to build onboarding media manifest", err);
|
|
||||||
process.exitCode = 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await main();
|
|
||||||
@ -14,7 +14,7 @@
|
|||||||
</template>
|
</template>
|
||||||
|
|
||||||
<script setup>
|
<script setup>
|
||||||
import { onMounted, onUnmounted, ref } from "vue";
|
import { onMounted, ref } from "vue";
|
||||||
import TopBar from "./components/TopBar.vue";
|
import TopBar from "./components/TopBar.vue";
|
||||||
import { fallbackHardware, fallbackServices, fallbackNetwork, fallbackMetrics } from "./data/sample.js";
|
import { fallbackHardware, fallbackServices, fallbackNetwork, fallbackMetrics } from "./data/sample.js";
|
||||||
|
|
||||||
@ -25,51 +25,26 @@ const networkData = ref(fallbackNetwork());
|
|||||||
const metricsData = ref(fallbackMetrics());
|
const metricsData = ref(fallbackMetrics());
|
||||||
|
|
||||||
const statusLoading = ref(true);
|
const statusLoading = ref(true);
|
||||||
const statusFetching = ref(false);
|
|
||||||
const statusError = ref("");
|
const statusError = ref("");
|
||||||
let pollTimerId = null;
|
|
||||||
|
|
||||||
async function refreshLabStatus() {
|
async function refreshLabStatus() {
|
||||||
if (statusFetching.value) return;
|
|
||||||
statusFetching.value = true;
|
|
||||||
const controller = new AbortController();
|
|
||||||
const timeoutId = window.setTimeout(() => controller.abort(), 10000);
|
|
||||||
try {
|
try {
|
||||||
const resp = await fetch("/api/lab/status", {
|
const resp = await fetch("/api/lab/status", { headers: { Accept: "application/json" } });
|
||||||
headers: { Accept: "application/json" },
|
|
||||||
signal: controller.signal,
|
|
||||||
});
|
|
||||||
if (!resp.ok) throw new Error(`status ${resp.status}`);
|
if (!resp.ok) throw new Error(`status ${resp.status}`);
|
||||||
labStatus.value = await resp.json();
|
labStatus.value = await resp.json();
|
||||||
statusError.value = "";
|
statusError.value = "";
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
labStatus.value = null;
|
labStatus.value = null;
|
||||||
if (err?.name === "AbortError") {
|
statusError.value = "Live data unavailable";
|
||||||
statusError.value = "Live data timed out";
|
|
||||||
} else {
|
|
||||||
statusError.value = "Live data unavailable";
|
|
||||||
}
|
|
||||||
} finally {
|
} finally {
|
||||||
window.clearTimeout(timeoutId);
|
|
||||||
statusLoading.value = false;
|
statusLoading.value = false;
|
||||||
statusFetching.value = false;
|
|
||||||
scheduleNextPoll();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
onMounted(() => {
|
onMounted(() => {
|
||||||
refreshLabStatus();
|
refreshLabStatus();
|
||||||
|
window.setInterval(refreshLabStatus, 30000);
|
||||||
});
|
});
|
||||||
|
|
||||||
onUnmounted(() => {
|
|
||||||
if (pollTimerId) window.clearTimeout(pollTimerId);
|
|
||||||
});
|
|
||||||
|
|
||||||
function scheduleNextPoll() {
|
|
||||||
if (pollTimerId) window.clearTimeout(pollTimerId);
|
|
||||||
const delayMs = labStatus.value ? 30000 : 8000;
|
|
||||||
pollTimerId = window.setTimeout(refreshLabStatus, delayMs);
|
|
||||||
}
|
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<style scoped>
|
<style scoped>
|
||||||
|
|||||||
@ -49,12 +49,6 @@ p {
|
|||||||
padding: 48px 22px 72px;
|
padding: 48px 22px 72px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.page > section + section {
|
section + section {
|
||||||
margin-top: 32px;
|
margin-top: 32px;
|
||||||
}
|
}
|
||||||
|
|
||||||
@media (max-width: 720px) {
|
|
||||||
.page {
|
|
||||||
padding: 24px 16px 56px;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@ -29,31 +29,6 @@
|
|||||||
font-size: 13px;
|
font-size: 13px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.pill-ok {
|
|
||||||
border-color: rgba(120, 255, 160, 0.35);
|
|
||||||
color: rgba(170, 255, 215, 0.92);
|
|
||||||
}
|
|
||||||
|
|
||||||
.pill-info {
|
|
||||||
border-color: rgba(120, 180, 255, 0.42);
|
|
||||||
color: rgba(185, 225, 255, 0.92);
|
|
||||||
}
|
|
||||||
|
|
||||||
.pill-warn {
|
|
||||||
border-color: rgba(255, 220, 120, 0.35);
|
|
||||||
color: rgba(255, 230, 170, 0.92);
|
|
||||||
}
|
|
||||||
|
|
||||||
.pill-wait {
|
|
||||||
border-color: rgba(255, 170, 80, 0.42);
|
|
||||||
color: rgba(255, 210, 170, 0.92);
|
|
||||||
}
|
|
||||||
|
|
||||||
.pill-bad {
|
|
||||||
border-color: rgba(255, 96, 96, 0.45);
|
|
||||||
color: rgba(255, 170, 170, 0.92);
|
|
||||||
}
|
|
||||||
|
|
||||||
.card {
|
.card {
|
||||||
background: var(--bg-panel);
|
background: var(--bg-panel);
|
||||||
border: 1px solid var(--border);
|
border: 1px solid var(--border);
|
||||||
|
|||||||
@ -1,125 +0,0 @@
|
|||||||
import Keycloak from "keycloak-js";
|
|
||||||
import { reactive } from "vue";
|
|
||||||
|
|
||||||
export const auth = reactive({
|
|
||||||
ready: false,
|
|
||||||
enabled: false,
|
|
||||||
authenticated: false,
|
|
||||||
username: "",
|
|
||||||
email: "",
|
|
||||||
groups: [],
|
|
||||||
loginUrl: "",
|
|
||||||
resetUrl: "",
|
|
||||||
accountUrl: "",
|
|
||||||
accountPasswordUrl: "",
|
|
||||||
token: "",
|
|
||||||
});
|
|
||||||
|
|
||||||
let keycloak = null;
|
|
||||||
let initPromise = null;
|
|
||||||
|
|
||||||
function normalizeGroups(groups) {
|
|
||||||
if (!Array.isArray(groups)) return [];
|
|
||||||
return groups
|
|
||||||
.filter((g) => typeof g === "string")
|
|
||||||
.map((g) => g.replace(/^\//, ""))
|
|
||||||
.filter(Boolean);
|
|
||||||
}
|
|
||||||
|
|
||||||
function updateFromToken() {
|
|
||||||
const parsed = keycloak?.tokenParsed || {};
|
|
||||||
auth.authenticated = Boolean(keycloak?.authenticated);
|
|
||||||
auth.token = keycloak?.token || "";
|
|
||||||
auth.username = parsed.preferred_username || "";
|
|
||||||
auth.email = parsed.email || "";
|
|
||||||
auth.groups = normalizeGroups(parsed.groups);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function initAuth() {
|
|
||||||
if (initPromise) return initPromise;
|
|
||||||
|
|
||||||
initPromise = (async () => {
|
|
||||||
try {
|
|
||||||
const resp = await fetch("/api/auth/config", { headers: { Accept: "application/json" } });
|
|
||||||
if (!resp.ok) throw new Error(`auth config ${resp.status}`);
|
|
||||||
const cfg = await resp.json();
|
|
||||||
auth.enabled = Boolean(cfg.enabled);
|
|
||||||
auth.loginUrl = cfg.login_url || "";
|
|
||||||
auth.resetUrl = cfg.reset_url || "";
|
|
||||||
auth.accountUrl = cfg.account_url || "";
|
|
||||||
auth.accountPasswordUrl = cfg.account_password_url || "";
|
|
||||||
|
|
||||||
if (!auth.enabled) return;
|
|
||||||
|
|
||||||
keycloak = new Keycloak({
|
|
||||||
url: cfg.url,
|
|
||||||
realm: cfg.realm,
|
|
||||||
clientId: cfg.client_id,
|
|
||||||
});
|
|
||||||
|
|
||||||
const authenticated = await keycloak.init({
|
|
||||||
onLoad: "check-sso",
|
|
||||||
pkceMethod: "S256",
|
|
||||||
silentCheckSsoRedirectUri: `${window.location.origin}/silent-check-sso.html`,
|
|
||||||
checkLoginIframe: true,
|
|
||||||
scope: "openid profile email",
|
|
||||||
});
|
|
||||||
|
|
||||||
auth.authenticated = authenticated;
|
|
||||||
updateFromToken();
|
|
||||||
|
|
||||||
keycloak.onAuthSuccess = () => updateFromToken();
|
|
||||||
keycloak.onAuthLogout = () => updateFromToken();
|
|
||||||
keycloak.onAuthRefreshSuccess = () => updateFromToken();
|
|
||||||
keycloak.onTokenExpired = () => {
|
|
||||||
keycloak
|
|
||||||
.updateToken(30)
|
|
||||||
.then(() => updateFromToken())
|
|
||||||
.catch(() => updateFromToken());
|
|
||||||
};
|
|
||||||
|
|
||||||
window.setInterval(() => {
|
|
||||||
if (!keycloak?.authenticated) return;
|
|
||||||
keycloak.updateToken(60).then(updateFromToken).catch(() => {});
|
|
||||||
}, 30_000);
|
|
||||||
} catch {
|
|
||||||
auth.enabled = false;
|
|
||||||
} finally {
|
|
||||||
auth.ready = true;
|
|
||||||
}
|
|
||||||
})();
|
|
||||||
|
|
||||||
return initPromise;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function login(
|
|
||||||
redirectPath = window.location.pathname + window.location.search + window.location.hash,
|
|
||||||
loginHint = "",
|
|
||||||
) {
|
|
||||||
if (!keycloak) return;
|
|
||||||
const redirectUri = new URL(redirectPath, window.location.origin).toString();
|
|
||||||
const options = { redirectUri };
|
|
||||||
if (typeof loginHint === "string" && loginHint.trim()) {
|
|
||||||
options.loginHint = loginHint.trim();
|
|
||||||
}
|
|
||||||
await keycloak.login(options);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function logout() {
|
|
||||||
if (!keycloak) return;
|
|
||||||
await keycloak.logout({ redirectUri: window.location.origin });
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function authFetch(url, options = {}) {
|
|
||||||
const headers = new Headers(options.headers || {});
|
|
||||||
if (keycloak?.authenticated) {
|
|
||||||
try {
|
|
||||||
await keycloak.updateToken(30);
|
|
||||||
updateFromToken();
|
|
||||||
} catch {
|
|
||||||
// ignore refresh failures; the API will return 401 and the UI can prompt for login
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (auth.token) headers.set("Authorization", `Bearer ${auth.token}`);
|
|
||||||
return fetch(url, { ...options, headers });
|
|
||||||
}
|
|
||||||
@ -1,7 +1,7 @@
|
|||||||
<template>
|
<template>
|
||||||
<header class="hero card glass">
|
<header class="hero card glass">
|
||||||
<div class="eyebrow">
|
<div class="eyebrow">
|
||||||
<span class="pill">Titan Lab</span>
|
<span class="pill">Portfolio + Titan Lab</span>
|
||||||
<span class="mono accent">atlas · oceanus · nextcloud-ready</span>
|
<span class="mono accent">atlas · oceanus · nextcloud-ready</span>
|
||||||
</div>
|
</div>
|
||||||
<h1>{{ title }}</h1>
|
<h1>{{ title }}</h1>
|
||||||
|
|||||||
@ -39,8 +39,6 @@ const svgContent = ref("");
|
|||||||
const renderKey = ref(props.cardId || `mermaid-${Math.random().toString(36).slice(2)}`);
|
const renderKey = ref(props.cardId || `mermaid-${Math.random().toString(36).slice(2)}`);
|
||||||
const isOpen = ref(false);
|
const isOpen = ref(false);
|
||||||
let initialized = false;
|
let initialized = false;
|
||||||
let scheduledHandle = null;
|
|
||||||
let scheduledKind = "";
|
|
||||||
|
|
||||||
const renderDiagram = async () => {
|
const renderDiagram = async () => {
|
||||||
if (!props.diagram) return;
|
if (!props.diagram) return;
|
||||||
@ -67,38 +65,10 @@ const renderDiagram = async () => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
function cancelScheduledRender() {
|
onMounted(renderDiagram);
|
||||||
if (!scheduledHandle) return;
|
|
||||||
if (scheduledKind === "idle" && window.cancelIdleCallback) {
|
|
||||||
window.cancelIdleCallback(scheduledHandle);
|
|
||||||
} else {
|
|
||||||
window.clearTimeout(scheduledHandle);
|
|
||||||
}
|
|
||||||
scheduledHandle = null;
|
|
||||||
scheduledKind = "";
|
|
||||||
}
|
|
||||||
|
|
||||||
function scheduleRenderDiagram() {
|
|
||||||
cancelScheduledRender();
|
|
||||||
if (!props.diagram) return;
|
|
||||||
const runner = () => {
|
|
||||||
scheduledHandle = null;
|
|
||||||
scheduledKind = "";
|
|
||||||
renderDiagram();
|
|
||||||
};
|
|
||||||
if (window.requestIdleCallback) {
|
|
||||||
scheduledKind = "idle";
|
|
||||||
scheduledHandle = window.requestIdleCallback(runner, { timeout: 1500 });
|
|
||||||
} else {
|
|
||||||
scheduledKind = "timeout";
|
|
||||||
scheduledHandle = window.setTimeout(runner, 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
onMounted(scheduleRenderDiagram);
|
|
||||||
watch(
|
watch(
|
||||||
() => props.diagram,
|
() => props.diagram,
|
||||||
() => scheduleRenderDiagram()
|
() => renderDiagram()
|
||||||
);
|
);
|
||||||
|
|
||||||
const onKeyDown = (event) => {
|
const onKeyDown = (event) => {
|
||||||
@ -106,7 +76,6 @@ const onKeyDown = (event) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const open = () => {
|
const open = () => {
|
||||||
if (!svgContent.value) scheduleRenderDiagram();
|
|
||||||
isOpen.value = true;
|
isOpen.value = true;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -125,7 +94,6 @@ watch(isOpen, (value) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
onUnmounted(() => {
|
onUnmounted(() => {
|
||||||
cancelScheduledRender();
|
|
||||||
document.body.style.overflow = "";
|
document.body.style.overflow = "";
|
||||||
window.removeEventListener("keydown", onKeyDown);
|
window.removeEventListener("keydown", onKeyDown);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -12,32 +12,19 @@
|
|||||||
<nav class="links">
|
<nav class="links">
|
||||||
<RouterLink to="/" class="nav-link">Home</RouterLink>
|
<RouterLink to="/" class="nav-link">Home</RouterLink>
|
||||||
<RouterLink to="/about" class="nav-link">About</RouterLink>
|
<RouterLink to="/about" class="nav-link">About</RouterLink>
|
||||||
|
<a href="https://cloud.bstein.dev" class="nav-link strong" target="_blank" rel="noreferrer">Cloud</a>
|
||||||
<template v-if="auth.enabled">
|
<a href="https://sso.bstein.dev" class="nav-link" target="_blank" rel="noreferrer">Login</a>
|
||||||
<template v-if="auth.authenticated">
|
<a href="https://sso.bstein.dev/realms/master/protocol/openid-connect/registrations" class="nav-link" target="_blank" rel="noreferrer">Sign Up</a>
|
||||||
<RouterLink to="/apps" class="nav-link">Apps</RouterLink>
|
<a href="https://sso.bstein.dev/realms/master/login-actions/reset-credentials" class="nav-link" target="_blank" rel="noreferrer">Reset</a>
|
||||||
<RouterLink to="/account" class="nav-link">Account</RouterLink>
|
|
||||||
<button class="nav-link button" type="button" @click="doLogout">Logout</button>
|
|
||||||
</template>
|
|
||||||
<template v-else>
|
|
||||||
<button class="nav-link button" type="button" @click="doLogin">Login</button>
|
|
||||||
<RouterLink to="/request-access" class="nav-link">Request Access</RouterLink>
|
|
||||||
<a v-if="auth.resetUrl" :href="auth.resetUrl" class="nav-link" target="_blank" rel="noreferrer">Reset Password</a>
|
|
||||||
</template>
|
|
||||||
</template>
|
|
||||||
</nav>
|
</nav>
|
||||||
</header>
|
</header>
|
||||||
</template>
|
</template>
|
||||||
|
|
||||||
<script setup>
|
<script setup>
|
||||||
import { useRouter, RouterLink } from "vue-router";
|
import { useRouter, RouterLink } from "vue-router";
|
||||||
import { auth, login, logout } from "@/auth";
|
|
||||||
|
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const goAbout = () => router.push("/about");
|
const goAbout = () => router.push("/about");
|
||||||
|
|
||||||
const doLogin = () => login();
|
|
||||||
const doLogout = () => logout();
|
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<style scoped>
|
<style scoped>
|
||||||
@ -64,8 +51,6 @@ const doLogout = () => logout();
|
|||||||
.avatar {
|
.avatar {
|
||||||
width: 44px;
|
width: 44px;
|
||||||
height: 44px;
|
height: 44px;
|
||||||
aspect-ratio: 1 / 1;
|
|
||||||
flex-shrink: 0;
|
|
||||||
border-radius: 50%;
|
border-radius: 50%;
|
||||||
border: 1px solid rgba(255, 255, 255, 0.14);
|
border: 1px solid rgba(255, 255, 255, 0.14);
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
@ -78,8 +63,6 @@ const doLogout = () => logout();
|
|||||||
width: 100%;
|
width: 100%;
|
||||||
height: 100%;
|
height: 100%;
|
||||||
object-fit: cover;
|
object-fit: cover;
|
||||||
display: block;
|
|
||||||
border-radius: 50%;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.name {
|
.name {
|
||||||
@ -108,11 +91,6 @@ const doLogout = () => logout();
|
|||||||
border: 1px solid transparent;
|
border: 1px solid transparent;
|
||||||
}
|
}
|
||||||
|
|
||||||
.button {
|
|
||||||
background: transparent;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
|
|
||||||
.nav-link.strong {
|
.nav-link.strong {
|
||||||
border-color: rgba(255, 255, 255, 0.14);
|
border-color: rgba(255, 255, 255, 0.14);
|
||||||
color: var(--accent-cyan);
|
color: var(--accent-cyan);
|
||||||
|
|||||||
@ -43,221 +43,109 @@ export function fallbackServices() {
|
|||||||
return {
|
return {
|
||||||
services: [
|
services: [
|
||||||
{
|
{
|
||||||
name: "Keycloak",
|
name: "Nextcloud Hub",
|
||||||
icon: "🔐",
|
|
||||||
category: "identity",
|
|
||||||
summary: "Unified accounts for Single Sign-On. OIDC & LDAP",
|
|
||||||
link: "https://sso.bstein.dev",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Nextcloud",
|
|
||||||
icon: "☁️",
|
|
||||||
category: "productivity",
|
category: "productivity",
|
||||||
summary: "Storage, office, and mail apps for bstein.dev users.",
|
summary: "Core user hub: storage, office, bstein.dev mail, & more apps.",
|
||||||
link: "https://cloud.bstein.dev",
|
link: "https://cloud.bstein.dev",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Outline",
|
name: "Jellyfin",
|
||||||
icon: "📝",
|
category: "media",
|
||||||
category: "productivity",
|
summary: "Family Movies hosted on titan-22 for GPU acceleration.",
|
||||||
summary: "Team docs and knowledge base for Atlas users.",
|
link: "https://stream.bstein.dev",
|
||||||
link: "https://notes.bstein.dev",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Planka",
|
|
||||||
icon: "🧭",
|
|
||||||
category: "productivity",
|
|
||||||
summary: "Kanban planning boards for tasks and projects.",
|
|
||||||
link: "https://tasks.bstein.dev",
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "VaultWarden",
|
name: "VaultWarden",
|
||||||
icon: "🔑",
|
|
||||||
category: "security",
|
category: "security",
|
||||||
summary: "Open Source & private password manager.",
|
summary: "Open Source & private password manager.",
|
||||||
link: "https://vault.bstein.dev",
|
link: "https://vault.bstein.dev",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Vault",
|
name: "Keycloak",
|
||||||
icon: "🗝️",
|
category: "identity",
|
||||||
category: "security",
|
summary: "Unified accounts for Single Sign-On.",
|
||||||
summary: "Secrets for secure environment injection.",
|
link: "https://sso.bstein.dev",
|
||||||
link: "https://secret.bstein.dev",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Element",
|
|
||||||
icon: "💬",
|
|
||||||
category: "comms",
|
|
||||||
summary: "Discord style communication system for users.",
|
|
||||||
link: "https://live.bstein.dev",
|
|
||||||
host: "live.bstein.dev",
|
|
||||||
status: "live",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Matrix (Synapse)",
|
|
||||||
icon: "🧩",
|
|
||||||
category: "comms",
|
|
||||||
summary: "Encrypted chat backend with bot integrations.",
|
|
||||||
link: "https://live.bstein.dev",
|
|
||||||
host: "matrix.live.bstein.dev",
|
|
||||||
status: "live",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "LiveKit",
|
|
||||||
icon: "🎥",
|
|
||||||
category: "comms",
|
|
||||||
summary: "Video and Voice SFU calls integrated into Element.",
|
|
||||||
link: "https://live.bstein.dev",
|
|
||||||
host: "kit.live.bstein.dev",
|
|
||||||
status: "live",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Coturn",
|
|
||||||
icon: "📡",
|
|
||||||
category: "comms",
|
|
||||||
summary: "Integrated VoIP server for reliable calls.",
|
|
||||||
link: "https://live.bstein.dev",
|
|
||||||
host: "turn.live.bstein.dev",
|
|
||||||
status: "live",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Mailu",
|
|
||||||
icon: "✉️",
|
|
||||||
category: "mail",
|
|
||||||
summary: "Self-hosted mailserver with accepted outgoing mail.",
|
|
||||||
link: "https://mail.bstein.dev",
|
|
||||||
host: "mail.bstein.dev",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Jellyfin",
|
|
||||||
icon: "🎬",
|
|
||||||
category: "streaming",
|
|
||||||
summary: "Family movies streaming server with GPU acceleration.",
|
|
||||||
link: "https://stream.bstein.dev",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Pegasus",
|
|
||||||
icon: "📤",
|
|
||||||
category: "streaming",
|
|
||||||
summary: "Upload pipeline feeding the Jellyfin library.",
|
|
||||||
link: "https://pegasus.bstein.dev",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Wger",
|
|
||||||
icon: "🏋️",
|
|
||||||
category: "health",
|
|
||||||
summary: "Workout + nutrition tracking with the wger mobile app.",
|
|
||||||
link: "https://health.bstein.dev",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Actual Budget",
|
|
||||||
icon: "💸",
|
|
||||||
category: "finance",
|
|
||||||
summary: "Local-first budgeting with Keycloak SSO.",
|
|
||||||
link: "https://budget.bstein.dev",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Firefly III",
|
|
||||||
icon: "💵",
|
|
||||||
category: "finance",
|
|
||||||
summary: "Personal finance manager with Abacus mobile sync.",
|
|
||||||
link: "https://money.bstein.dev",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Grafana",
|
|
||||||
icon: "📈",
|
|
||||||
category: "observability",
|
|
||||||
summary: "Atlas metrics and dashboards for the lab.",
|
|
||||||
link: "https://metrics.bstein.dev",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "OpenSearch",
|
|
||||||
icon: "🔎",
|
|
||||||
category: "observability",
|
|
||||||
summary: "Centralized Atlas services logs. Powered by Fluent Bit.",
|
|
||||||
link: "https://logs.bstein.dev",
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Gitea",
|
name: "Gitea",
|
||||||
icon: "🍵",
|
|
||||||
category: "dev",
|
category: "dev",
|
||||||
summary: "Source control for dev projects.",
|
summary: "Source control for dev projects.",
|
||||||
link: "https://scm.bstein.dev",
|
link: "https://scm.bstein.dev",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Jenkins",
|
name: "Jenkins",
|
||||||
icon: "🏗️",
|
|
||||||
category: "dev",
|
category: "dev",
|
||||||
summary: "Continuous integration build pipelines.",
|
summary: "Continuous integration build pipelines.",
|
||||||
link: "https://ci.bstein.dev",
|
link: "https://ci.bstein.dev",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Harbor",
|
name: "Harbor",
|
||||||
icon: "⚓",
|
|
||||||
category: "dev",
|
category: "dev",
|
||||||
summary: "Artifact Registry for build artifacts.",
|
summary: "Artifact Registry for build artifacts.",
|
||||||
link: "https://registry.bstein.dev",
|
link: "https://registry.bstein.dev",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Flux",
|
name: "Flux",
|
||||||
icon: "🧲",
|
|
||||||
category: "dev",
|
category: "dev",
|
||||||
summary: "GitOps UI for flux continuous deployment drift prevention.",
|
summary: "GitOps UI for flux continuous deployment drift prevention.",
|
||||||
link: "https://cd.bstein.dev",
|
link: "https://cd.bstein.dev",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "Vault",
|
||||||
|
category: "dev",
|
||||||
|
summary: "Secrets for secure environment injection.",
|
||||||
|
link: "https://secret.bstein.dev",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Grafana",
|
||||||
|
category: "observability",
|
||||||
|
summary: "Health metrics for atlas and eventually oceanus.",
|
||||||
|
link: "https://metrics.bstein.dev",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Pegasus",
|
||||||
|
category: "media ingest",
|
||||||
|
summary: "Uploading service to inject jellyfin media.",
|
||||||
|
link: "https://pegasus.bstein.dev",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
name: "Monero",
|
name: "Monero",
|
||||||
icon: "🪙",
|
|
||||||
category: "crypto",
|
category: "crypto",
|
||||||
summary: "Private monero node for monero wallets.",
|
summary: "Private monero node for monero wallets",
|
||||||
link: "/monero",
|
link: "/monero",
|
||||||
host: "monerod.crypto.svc.cluster.local:18081",
|
host: "monerod.crypto.svc.cluster.local:18081",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Oceanus",
|
name: "Jitsi",
|
||||||
icon: "🌊",
|
category: "conferencing",
|
||||||
category: "crypto",
|
summary: "Video Conferencing - Planned",
|
||||||
summary: "Dedicated SUI Validator - Planned.",
|
link: "https://meet.bstein.dev",
|
||||||
link: "#",
|
status: "degraded",
|
||||||
host: "oceanus",
|
|
||||||
status: "planned",
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "AI Chat",
|
name: "AI Chat",
|
||||||
icon: "🤖",
|
|
||||||
category: "ai",
|
category: "ai",
|
||||||
summary: "Customized LLM for the titan home lab.",
|
summary: "LLM Chat - Planned",
|
||||||
link: "/ai/chat",
|
link: "/ai",
|
||||||
host: "chat.ai.bstein.dev",
|
host: "chat.ai.bstein.dev",
|
||||||
status: "live",
|
status: "planned",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "AI Vision",
|
name: "AI Image",
|
||||||
icon: "👁️",
|
|
||||||
category: "ai",
|
category: "ai",
|
||||||
summary: "Visualization tool - Planned",
|
summary: "Visualization tool - Planned",
|
||||||
link: "/ai/roadmap",
|
link: "/ai",
|
||||||
host: "vision.ai.bstein.dev",
|
host: "draw.ai.bstein.dev",
|
||||||
status: "planned",
|
status: "planned",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "AI Speech",
|
name: "AI Speech",
|
||||||
icon: "🎙️",
|
|
||||||
category: "ai",
|
category: "ai",
|
||||||
summary: "Live Translation - Planned",
|
summary: "Live Translation - Planned",
|
||||||
link: "/ai/roadmap",
|
link: "/ai",
|
||||||
host: "talk.ai.bstein.dev",
|
host: "talk.ai.bstein.dev",
|
||||||
status: "planned",
|
status: "planned",
|
||||||
},
|
},
|
||||||
{
|
|
||||||
name: "AI Translation",
|
|
||||||
icon: "🌐",
|
|
||||||
category: "ai",
|
|
||||||
summary: "Inline translation (roadmap) for comms.",
|
|
||||||
link: "/ai/roadmap",
|
|
||||||
host: "translate.ai.bstein.dev",
|
|
||||||
status: "planned",
|
|
||||||
},
|
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,10 +3,7 @@ import App from "./App.vue";
|
|||||||
import router from "./router";
|
import router from "./router";
|
||||||
import "./assets/base.css";
|
import "./assets/base.css";
|
||||||
import "./assets/theme.css";
|
import "./assets/theme.css";
|
||||||
import { initAuth } from "./auth";
|
|
||||||
|
|
||||||
const app = createApp(App);
|
const app = createApp(App);
|
||||||
app.use(router);
|
app.use(router);
|
||||||
app.mount("#app");
|
app.mount("#app");
|
||||||
|
|
||||||
initAuth();
|
|
||||||
|
|||||||
@ -2,25 +2,14 @@ import { createRouter, createWebHistory } from "vue-router";
|
|||||||
import HomeView from "./views/HomeView.vue";
|
import HomeView from "./views/HomeView.vue";
|
||||||
import AboutView from "./views/AboutView.vue";
|
import AboutView from "./views/AboutView.vue";
|
||||||
import AiView from "./views/AiView.vue";
|
import AiView from "./views/AiView.vue";
|
||||||
import AiPlanView from "./views/AiPlanView.vue";
|
|
||||||
import MoneroView from "./views/MoneroView.vue";
|
import MoneroView from "./views/MoneroView.vue";
|
||||||
import AppsView from "./views/AppsView.vue";
|
|
||||||
import AccountView from "./views/AccountView.vue";
|
|
||||||
import RequestAccessView from "./views/RequestAccessView.vue";
|
|
||||||
import OnboardingView from "./views/OnboardingView.vue";
|
|
||||||
|
|
||||||
export default createRouter({
|
export default createRouter({
|
||||||
history: createWebHistory(),
|
history: createWebHistory(),
|
||||||
routes: [
|
routes: [
|
||||||
{ path: "/", name: "home", component: HomeView },
|
{ path: "/", name: "home", component: HomeView },
|
||||||
{ path: "/about", name: "about", component: AboutView },
|
{ path: "/about", name: "about", component: AboutView },
|
||||||
{ path: "/ai", redirect: "/ai/chat" },
|
{ path: "/ai", name: "ai", component: AiView },
|
||||||
{ path: "/ai/chat", name: "ai-chat", component: AiView },
|
|
||||||
{ path: "/ai/roadmap", name: "ai-roadmap", component: AiPlanView },
|
|
||||||
{ path: "/monero", name: "monero", component: MoneroView },
|
{ path: "/monero", name: "monero", component: MoneroView },
|
||||||
{ path: "/apps", name: "apps", component: AppsView },
|
|
||||||
{ path: "/account", name: "account", component: AccountView },
|
|
||||||
{ path: "/request-access", name: "request-access", component: RequestAccessView },
|
|
||||||
{ path: "/onboarding", name: "onboarding", component: OnboardingView },
|
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
|
|||||||
@ -68,25 +68,19 @@
|
|||||||
|
|
||||||
<section class="card">
|
<section class="card">
|
||||||
<div class="section-head">
|
<div class="section-head">
|
||||||
<h2>Titan Lab</h2>
|
<h2>The Titan Story</h2>
|
||||||
<span class="pill mono">atlas + oceanus</span>
|
<span class="pill mono">atlas + oceanus</span>
|
||||||
</div>
|
</div>
|
||||||
<div class="copy">
|
<div class="copy">
|
||||||
<p>
|
<p>
|
||||||
The Titan Lab is my 26-node (and growing) homelab with a production mindset: security, monitoring, and repeatable changes. The core is
|
Titan Lab is my 25-node homelab with a production mindset: security, monitoring, and repeatable changes. The core is
|
||||||
<span class="mono">Atlas</span>, a GitOps-managed <span class="mono">k3s</span> cluster where services are reconciled by
|
<span class="mono">Atlas</span>, a GitOps-managed <span class="mono">k3s</span> cluster where most services are reconciled by
|
||||||
<span class="mono">Flux</span>.
|
<span class="mono">Flux</span>.
|
||||||
</p>
|
</p>
|
||||||
<p>
|
<p>
|
||||||
<span class="mono">Atlas</span> is my attempt to fully replace all online services that I need or use with self-hosted versions. I care
|
<span class="mono">Oceanus</span> is intentionally separated for validator workloads while still feeding data back into the same
|
||||||
deeply about security both for the lab and the cluster and personally. My first hosted services was <span class="mono">VaultWarden</span>
|
observability stack. Storage is tiered with Longhorn (<span class="mono">astreae</span> for system data and
|
||||||
but now the cluster hosts everything from email to ai. In my freetime, I'm always working on <span class="mono">Atlas</span> with small
|
<span class="mono">asteria</span> for user data), fronted by Traefik and backed by centralized identity via Keycloak.
|
||||||
services or organization improvements to make it better and cleaner and cooler.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
<span class="mono">Oceanus</span> is an intentionally separated host for validator workloads while still feeding data back into the same
|
|
||||||
observability stack. SUI is a crypto currency I follow and believe in and so <span class="mono">Oceanus</span> is in my lab and is dedicated
|
|
||||||
hardware to extend their infrastructure and make the SUI project more resilient.
|
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</section>
|
</section>
|
||||||
@ -97,7 +91,7 @@
|
|||||||
const skills = [
|
const skills = [
|
||||||
"Python",
|
"Python",
|
||||||
"Linux",
|
"Linux",
|
||||||
"Kubernetes (k3s/k8s)",
|
"Kubernetes (k3s)",
|
||||||
"Containers (Docker/OCI)",
|
"Containers (Docker/OCI)",
|
||||||
"GitOps (Flux)",
|
"GitOps (Flux)",
|
||||||
"CI/CD (Jenkins)",
|
"CI/CD (Jenkins)",
|
||||||
@ -133,15 +127,10 @@ const timeline = [
|
|||||||
company: "Titan Lab (personal platform)",
|
company: "Titan Lab (personal platform)",
|
||||||
dates: "Apr 2020 – Present",
|
dates: "Apr 2020 – Present",
|
||||||
points: [
|
points: [
|
||||||
"Operate a mixed arm64/amd64 environment with GitOps (Gitea -> Jenkins -> Harbor -> Flux).",
|
"Operate a mixed arm64/amd64 environment with GitOps (Gitea → Jenkins → Harbor → Flux).",
|
||||||
"Centralized identity with Keycloak and front services via Traefik ingress.",
|
"Centralize identity with Keycloak and front services via Traefik ingress.",
|
||||||
"Run tiered Longhorn storage: astreae (system) and asteria (user).",
|
"Run tiered Longhorn storage: astreae (system) and asteria (user).",
|
||||||
"Build observability with Grafana + VictoriaMetrics and OpenSearch dashboards around real service health and log tracking.",
|
"Build observability with Grafana + VictoriaMetrics and dashboards around real service health.",
|
||||||
"Audio, video, and text communication on a Matrix-LiveKit-Coturn-Element stack with mobile compatibility and AI chat integration.",
|
|
||||||
"Video streaming of home movies via Jellyfin and instant uploading/publishing with Pegasus.",
|
|
||||||
"Host crypto projects with a XRM node and a SUI validator.",
|
|
||||||
"Has a knowledge base about the cluster itself for AI bot awareness.",
|
|
||||||
"Vault based secret management - no critical information in Kubernetes secrets.",
|
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@ -1,89 +0,0 @@
|
|||||||
<template>
|
|
||||||
<div class="page">
|
|
||||||
<section class="card hero glass">
|
|
||||||
<div>
|
|
||||||
<p class="eyebrow">Atlas AI</p>
|
|
||||||
<h1>Roadmap</h1>
|
|
||||||
<p class="lede">
|
|
||||||
Chat is live today. Image generation and speech / translation will roll out next. This page tracks what’s planned and
|
|
||||||
what hardware it will land on.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section class="card grid">
|
|
||||||
<div class="track">
|
|
||||||
<div class="pill mono">AI Image</div>
|
|
||||||
<h3>Visualization</h3>
|
|
||||||
<p class="text">
|
|
||||||
Goal: small, fast image generation for diagrams, thumbnails, and mockups. Targeting Jetson nodes once stable. Output
|
|
||||||
will be gated to members only.
|
|
||||||
</p>
|
|
||||||
<ul>
|
|
||||||
<li>Models: open-source SD/FLUX variants distilled for 16GB GPUs.</li>
|
|
||||||
<li>Pipeline: upload prompt → queued job → signed URL in Nextcloud.</li>
|
|
||||||
<li>Status: planned (no UI yet).</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<div class="track">
|
|
||||||
<div class="pill mono">AI Speech</div>
|
|
||||||
<h3>Voice + Translation</h3>
|
|
||||||
<p class="text">
|
|
||||||
Goal: low-latency ASR + TTS for meetings and media. Results should stream back into Matrix/LiveKit rooms and Pegasus.
|
|
||||||
</p>
|
|
||||||
<ul>
|
|
||||||
<li>Models: whisper-style ASR, lightweight TTS with multilingual support.</li>
|
|
||||||
<li>Targets: titan-20/21 Jetsons for acceleration; fall back to CPU-only if needed.</li>
|
|
||||||
<li>Status: planned (no UI yet).</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section class="card">
|
|
||||||
<h2>What’s live now?</h2>
|
|
||||||
<p class="text">
|
|
||||||
Atlas AI chat is running on local GPU hardware at <code>chat.ai.bstein.dev</code>. The chat page streams responses and
|
|
||||||
reports latency per turn. As larger models come online on the Jetsons, the chat endpoint will be upgraded in-place.
|
|
||||||
</p>
|
|
||||||
<div class="pill mono">Next step: migrate chat to Jetsons when available</div>
|
|
||||||
</section>
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<style scoped>
|
|
||||||
.page {
|
|
||||||
max-width: 1100px;
|
|
||||||
margin: 0 auto;
|
|
||||||
padding: 32px 22px 72px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.grid {
|
|
||||||
display: grid;
|
|
||||||
gap: 16px;
|
|
||||||
grid-template-columns: repeat(auto-fit, minmax(280px, 1fr));
|
|
||||||
}
|
|
||||||
|
|
||||||
.track {
|
|
||||||
border: 1px solid var(--card-border);
|
|
||||||
border-radius: 12px;
|
|
||||||
padding: 16px;
|
|
||||||
background: rgba(255, 255, 255, 0.02);
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.text {
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
ul {
|
|
||||||
margin: 0;
|
|
||||||
padding-left: 18px;
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.pill {
|
|
||||||
display: inline-block;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
@ -1,470 +1,25 @@
|
|||||||
<template>
|
<template>
|
||||||
<div class="page">
|
<div class="page">
|
||||||
<section class="card hero glass">
|
<section class="card">
|
||||||
<div>
|
<h1>AI services (planned)</h1>
|
||||||
<p class="eyebrow">Atlas AI</p>
|
<p>Targets for chat.ai.bstein.dev, draw.ai.bstein.dev, and talk.ai.bstein.dev. These will land behind Keycloak once the pipelines are ready.</p>
|
||||||
<h1>Chat</h1>
|
<ul>
|
||||||
<p class="lede">
|
<li>Chat: conversational agent with SSO.</li>
|
||||||
Talk with Atlas AI. It knows a surprising amount about technology!
|
<li>Image: text-to-image workflows for user media.</li>
|
||||||
</p>
|
<li>Speech: voice-to-voice translation and dubbing.</li>
|
||||||
<div class="pill mono pill-live">Online</div>
|
</ul>
|
||||||
</div>
|
|
||||||
<div class="hero-facts">
|
|
||||||
<div class="fact">
|
|
||||||
<span class="label mono">Model</span>
|
|
||||||
<span class="value mono">{{ current.meta.model }}</span>
|
|
||||||
</div>
|
|
||||||
<div class="fact">
|
|
||||||
<span class="label mono">GPU</span>
|
|
||||||
<span class="value mono">{{ current.meta.gpu }}</span>
|
|
||||||
</div>
|
|
||||||
<div class="fact">
|
|
||||||
<span class="label mono">Endpoint</span>
|
|
||||||
<button class="endpoint-copy mono" type="button" @click="copyCurl">
|
|
||||||
{{ current.meta.endpoint || apiDisplay }}
|
|
||||||
<span v-if="copied" class="copied">copied</span>
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section class="card chat-card">
|
|
||||||
<div class="profile-tabs">
|
|
||||||
<button
|
|
||||||
v-for="profile in profiles"
|
|
||||||
:key="profile.id"
|
|
||||||
type="button"
|
|
||||||
class="profile-tab mono"
|
|
||||||
:class="{ active: activeProfile === profile.id }"
|
|
||||||
@click="activeProfile = profile.id"
|
|
||||||
>
|
|
||||||
{{ profile.label }}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
<div class="chat-window" ref="chatWindow">
|
|
||||||
<div v-for="(msg, idx) in current.messages" :key="idx" :class="['chat-row', msg.role]">
|
|
||||||
<div class="bubble" :class="{ streaming: msg.streaming }">
|
|
||||||
<div class="role mono">{{ msg.role === 'assistant' ? 'Atlas AI' : 'you' }}</div>
|
|
||||||
<p class="message">{{ msg.content }}</p>
|
|
||||||
<div v-if="msg.streaming" class="meta mono typing">streaming…</div>
|
|
||||||
<div v-else-if="msg.latency_ms" class="meta mono">{{ msg.latency_ms }} ms</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div v-if="current.error" class="chat-row error">
|
|
||||||
<div class="bubble">
|
|
||||||
<div class="role mono">error</div>
|
|
||||||
<p>{{ current.error }}</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<form class="chat-form" @submit.prevent="sendMessage">
|
|
||||||
<textarea
|
|
||||||
v-model="draft"
|
|
||||||
placeholder="Ask anything about the lab or general topics..."
|
|
||||||
rows="3"
|
|
||||||
@keydown="handleKeydown"
|
|
||||||
:disabled="sending"
|
|
||||||
/>
|
|
||||||
<div class="actions">
|
|
||||||
<span class="hint mono">Enter to send · Shift+Enter for newline</span>
|
|
||||||
<button class="primary" type="submit" :disabled="sending || !draft.trim()">
|
|
||||||
{{ sending ? "Sending..." : "Send" }}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
</section>
|
</section>
|
||||||
</div>
|
</div>
|
||||||
</template>
|
</template>
|
||||||
|
|
||||||
<script setup>
|
|
||||||
import { computed, onMounted, onUpdated, reactive, ref, watch } from "vue";
|
|
||||||
|
|
||||||
const API_URL = (import.meta.env.VITE_AI_ENDPOINT || "/api/chat").trim();
|
|
||||||
const apiUrl = new URL(API_URL, window.location.href);
|
|
||||||
const apiDisplay = apiUrl.host + apiUrl.pathname;
|
|
||||||
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
|
||||||
|
|
||||||
const profiles = [
|
|
||||||
{ id: "atlas-quick", label: "Atlas Quick" },
|
|
||||||
{ id: "atlas-smart", label: "Atlas Smart" },
|
|
||||||
{ id: "stock-ai", label: "Stock AI" },
|
|
||||||
];
|
|
||||||
const activeProfile = ref("atlas-quick");
|
|
||||||
const profileState = reactive(
|
|
||||||
Object.fromEntries(
|
|
||||||
profiles.map((profile) => [
|
|
||||||
profile.id,
|
|
||||||
{
|
|
||||||
meta: {
|
|
||||||
model: "loading...",
|
|
||||||
gpu: "local GPU (dynamic)",
|
|
||||||
node: "unknown",
|
|
||||||
endpoint: apiUrl.toString(),
|
|
||||||
},
|
|
||||||
messages: [
|
|
||||||
{
|
|
||||||
role: "assistant",
|
|
||||||
content: "Hi! I'm Atlas AI. How can I help?",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
error: "",
|
|
||||||
},
|
|
||||||
])
|
|
||||||
)
|
|
||||||
);
|
|
||||||
const current = computed(() => profileState[activeProfile.value]);
|
|
||||||
const draft = ref("");
|
|
||||||
const sending = ref(false);
|
|
||||||
const chatWindow = ref(null);
|
|
||||||
const copied = ref(false);
|
|
||||||
const conversationIds = reactive({});
|
|
||||||
|
|
||||||
function ensureConversationId(profile) {
|
|
||||||
if (conversationIds[profile]) return conversationIds[profile];
|
|
||||||
const key = `atlas-ai-conversation:${profile}`;
|
|
||||||
let value = localStorage.getItem(key);
|
|
||||||
if (!value) {
|
|
||||||
const suffix =
|
|
||||||
typeof crypto !== "undefined" && crypto.randomUUID ? crypto.randomUUID() : `${Math.random()}`.slice(2);
|
|
||||||
value = `${profile}-${Date.now()}-${suffix}`;
|
|
||||||
localStorage.setItem(key, value);
|
|
||||||
}
|
|
||||||
conversationIds[profile] = value;
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
|
|
||||||
onMounted(() => fetchMeta(activeProfile.value));
|
|
||||||
watch(activeProfile, (profile) => fetchMeta(profile));
|
|
||||||
|
|
||||||
onUpdated(() => {
|
|
||||||
if (chatWindow.value) {
|
|
||||||
chatWindow.value.scrollTop = chatWindow.value.scrollHeight;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
async function fetchMeta(profile) {
|
|
||||||
try {
|
|
||||||
const resp = await fetch(`/api/ai/info?profile=${encodeURIComponent(profile)}`);
|
|
||||||
if (!resp.ok) return;
|
|
||||||
const data = await resp.json();
|
|
||||||
current.value.meta = {
|
|
||||||
model: data.model || current.value.meta.model,
|
|
||||||
gpu: data.gpu || current.value.meta.gpu,
|
|
||||||
node: data.node || current.value.meta.node,
|
|
||||||
endpoint: data.endpoint || current.value.meta.endpoint || apiDisplay,
|
|
||||||
};
|
|
||||||
} catch {
|
|
||||||
// swallow
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function sendMessage() {
|
|
||||||
if (!draft.value.trim() || sending.value) return;
|
|
||||||
const text = draft.value.trim();
|
|
||||||
draft.value = "";
|
|
||||||
const state = current.value;
|
|
||||||
state.error = "";
|
|
||||||
const userEntry = { role: "user", content: text };
|
|
||||||
state.messages.push(userEntry);
|
|
||||||
const assistantEntry = { role: "assistant", content: "", streaming: true };
|
|
||||||
state.messages.push(assistantEntry);
|
|
||||||
sending.value = true;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const history = state.messages.filter((m) => !m.streaming).map((m) => ({ role: m.role, content: m.content }));
|
|
||||||
const conversation_id = ensureConversationId(activeProfile.value);
|
|
||||||
const start = performance.now();
|
|
||||||
const resp = await fetch(API_URL, {
|
|
||||||
method: "POST",
|
|
||||||
headers: { "Content-Type": "application/json" },
|
|
||||||
body: JSON.stringify({ message: text, history, profile: activeProfile.value, conversation_id }),
|
|
||||||
});
|
|
||||||
const contentType = resp.headers.get("content-type") || "";
|
|
||||||
|
|
||||||
if (!resp.ok) {
|
|
||||||
const data = await resp.json().catch(() => ({}));
|
|
||||||
throw new Error(data.error || resp.statusText || "Request failed");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Prefer streaming if the server sends a stream; otherwise fall back to JSON body.
|
|
||||||
if (resp.body && !contentType.includes("application/json")) {
|
|
||||||
const reader = resp.body.getReader();
|
|
||||||
const decoder = new TextDecoder();
|
|
||||||
let firstChunk = true;
|
|
||||||
while (true) {
|
|
||||||
const { value, done } = await reader.read();
|
|
||||||
if (done) break;
|
|
||||||
const chunk = decoder.decode(value, { stream: true });
|
|
||||||
assistantEntry.content += chunk;
|
|
||||||
if (firstChunk) {
|
|
||||||
assistantEntry.latency_ms = Math.round(performance.now() - start);
|
|
||||||
firstChunk = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
assistantEntry.latency_ms = assistantEntry.latency_ms || Math.round(performance.now() - start);
|
|
||||||
assistantEntry.streaming = false;
|
|
||||||
} else {
|
|
||||||
const data = await resp.json();
|
|
||||||
const textReply = data.reply || "(empty response)";
|
|
||||||
assistantEntry.latency_ms = data.latency_ms ?? Math.round(performance.now() - start);
|
|
||||||
await typeReveal(assistantEntry, textReply);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
state.error = err.message || "Unexpected error";
|
|
||||||
assistantEntry.content = assistantEntry.content || "(no response)";
|
|
||||||
assistantEntry.streaming = false;
|
|
||||||
} finally {
|
|
||||||
sending.value = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function typeReveal(entry, text) {
|
|
||||||
entry.content = "";
|
|
||||||
entry.streaming = true;
|
|
||||||
const chunks = text.match(/.{1,14}/g) || [text];
|
|
||||||
for (const chunk of chunks) {
|
|
||||||
entry.content += chunk;
|
|
||||||
await sleep(15);
|
|
||||||
}
|
|
||||||
entry.streaming = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
function handleKeydown(e) {
|
|
||||||
if (e.key === "Enter" && !e.shiftKey) {
|
|
||||||
e.preventDefault();
|
|
||||||
sendMessage();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function copyCurl() {
|
|
||||||
const target = current.value.meta.endpoint || apiUrl.toString();
|
|
||||||
const curl = `curl -X POST ${target} -H 'content-type: application/json' -d '{\"message\":\"hi\"}'`;
|
|
||||||
try {
|
|
||||||
await navigator.clipboard.writeText(curl);
|
|
||||||
copied.value = true;
|
|
||||||
setTimeout(() => (copied.value = false), 1400);
|
|
||||||
} catch {
|
|
||||||
copied.value = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<style scoped>
|
<style scoped>
|
||||||
.page {
|
.page {
|
||||||
max-width: 1200px;
|
max-width: 900px;
|
||||||
margin: 0 auto;
|
margin: 0 auto;
|
||||||
padding: 32px 22px 72px;
|
padding: 32px 22px 72px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.hero {
|
ul {
|
||||||
display: grid;
|
|
||||||
grid-template-columns: 2fr 1fr;
|
|
||||||
gap: 18px;
|
|
||||||
align-items: start;
|
|
||||||
}
|
|
||||||
|
|
||||||
.hero-facts {
|
|
||||||
display: grid;
|
|
||||||
gap: 10px;
|
|
||||||
align-content: start;
|
|
||||||
}
|
|
||||||
|
|
||||||
.fact {
|
|
||||||
border: 1px solid var(--card-border);
|
|
||||||
border-radius: 10px;
|
|
||||||
padding: 10px 12px;
|
|
||||||
background: rgba(255, 255, 255, 0.02);
|
|
||||||
}
|
|
||||||
|
|
||||||
.label {
|
|
||||||
color: var(--text-muted);
|
color: var(--text-muted);
|
||||||
font-size: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.value {
|
|
||||||
display: block;
|
|
||||||
margin-top: 4px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.endpoint-copy {
|
|
||||||
background: none;
|
|
||||||
color: inherit;
|
|
||||||
border: 1px solid var(--card-border);
|
|
||||||
border-radius: 8px;
|
|
||||||
padding: 6px 8px;
|
|
||||||
width: 100%;
|
|
||||||
text-align: left;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
.endpoint-copy .copied {
|
|
||||||
float: right;
|
|
||||||
color: var(--accent-cyan);
|
|
||||||
font-size: 11px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.pill-live {
|
|
||||||
display: inline-block;
|
|
||||||
margin-top: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.chat-card {
|
|
||||||
margin-top: 18px;
|
|
||||||
display: grid;
|
|
||||||
grid-template-rows: auto 1fr auto;
|
|
||||||
gap: 12px;
|
|
||||||
min-height: 60vh;
|
|
||||||
}
|
|
||||||
|
|
||||||
.profile-tabs {
|
|
||||||
display: flex;
|
|
||||||
flex-wrap: wrap;
|
|
||||||
gap: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.profile-tab {
|
|
||||||
border: 1px solid var(--card-border);
|
|
||||||
background: rgba(255, 255, 255, 0.03);
|
|
||||||
color: var(--text-muted);
|
|
||||||
padding: 6px 12px;
|
|
||||||
border-radius: 999px;
|
|
||||||
cursor: pointer;
|
|
||||||
transition: border-color 0.2s ease, color 0.2s ease, background 0.2s ease;
|
|
||||||
}
|
|
||||||
|
|
||||||
.profile-tab.active {
|
|
||||||
border-color: rgba(0, 229, 197, 0.6);
|
|
||||||
color: var(--text-primary);
|
|
||||||
background: rgba(0, 229, 197, 0.12);
|
|
||||||
box-shadow: var(--glow-soft);
|
|
||||||
}
|
|
||||||
|
|
||||||
.chat-window {
|
|
||||||
background: rgba(255, 255, 255, 0.02);
|
|
||||||
border: 1px solid var(--card-border);
|
|
||||||
border-radius: 12px;
|
|
||||||
padding: 14px;
|
|
||||||
min-height: 360px;
|
|
||||||
height: 100%;
|
|
||||||
overflow-y: auto;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.chat-row {
|
|
||||||
display: flex;
|
|
||||||
}
|
|
||||||
|
|
||||||
.chat-row.user {
|
|
||||||
justify-content: flex-end;
|
|
||||||
}
|
|
||||||
|
|
||||||
.bubble {
|
|
||||||
max-width: 85%;
|
|
||||||
padding: 10px 12px;
|
|
||||||
border-radius: 12px;
|
|
||||||
border: 1px solid var(--card-border);
|
|
||||||
background: rgba(255, 255, 255, 0.04);
|
|
||||||
}
|
|
||||||
.message {
|
|
||||||
white-space: pre-wrap;
|
|
||||||
word-break: break-word;
|
|
||||||
}
|
|
||||||
.bubble.streaming {
|
|
||||||
border-color: rgba(0, 229, 197, 0.4);
|
|
||||||
box-shadow: var(--glow-soft);
|
|
||||||
}
|
|
||||||
|
|
||||||
.chat-row.assistant .bubble {
|
|
||||||
background: rgba(80, 163, 255, 0.08);
|
|
||||||
}
|
|
||||||
|
|
||||||
.chat-row.user .bubble {
|
|
||||||
background: rgba(255, 255, 255, 0.06);
|
|
||||||
}
|
|
||||||
|
|
||||||
.chat-row.error .bubble {
|
|
||||||
background: rgba(255, 87, 87, 0.1);
|
|
||||||
border-color: rgba(255, 87, 87, 0.5);
|
|
||||||
}
|
|
||||||
|
|
||||||
.role {
|
|
||||||
font-size: 12px;
|
|
||||||
color: var(--text-muted);
|
|
||||||
margin-bottom: 4px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.meta {
|
|
||||||
color: var(--text-muted);
|
|
||||||
font-size: 12px;
|
|
||||||
margin-top: 6px;
|
|
||||||
}
|
|
||||||
.meta.typing {
|
|
||||||
color: var(--accent-cyan);
|
|
||||||
}
|
|
||||||
|
|
||||||
.chat-form {
|
|
||||||
margin-top: 0;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
textarea {
|
|
||||||
width: 100%;
|
|
||||||
border-radius: 12px;
|
|
||||||
border: 1px solid var(--card-border);
|
|
||||||
background: rgba(255, 255, 255, 0.03);
|
|
||||||
color: var(--text-primary);
|
|
||||||
padding: 10px 12px;
|
|
||||||
resize: vertical;
|
|
||||||
}
|
|
||||||
|
|
||||||
.actions {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: space-between;
|
|
||||||
gap: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.hint {
|
|
||||||
color: var(--text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
button.primary {
|
|
||||||
background: linear-gradient(90deg, #4f8bff, #7dd0ff);
|
|
||||||
color: #0b1222;
|
|
||||||
padding: 10px 16px;
|
|
||||||
border: none;
|
|
||||||
border-radius: 10px;
|
|
||||||
cursor: pointer;
|
|
||||||
font-weight: 700;
|
|
||||||
}
|
|
||||||
|
|
||||||
button:disabled {
|
|
||||||
opacity: 0.6;
|
|
||||||
cursor: not-allowed;
|
|
||||||
}
|
|
||||||
|
|
||||||
@media (max-width: 820px) {
|
|
||||||
.hero {
|
|
||||||
grid-template-columns: 1fr;
|
|
||||||
}
|
|
||||||
.chat-card {
|
|
||||||
min-height: 50vh;
|
|
||||||
}
|
|
||||||
.actions {
|
|
||||||
align-items: flex-start;
|
|
||||||
flex-direction: column;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@media (min-width: 1100px) {
|
|
||||||
.chat-card {
|
|
||||||
min-height: calc(100vh - 260px);
|
|
||||||
}
|
|
||||||
.chat-window {
|
|
||||||
min-height: 480px;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|||||||
@ -1,326 +0,0 @@
|
|||||||
<template>
|
|
||||||
<div class="page">
|
|
||||||
<section class="card hero glass">
|
|
||||||
<div>
|
|
||||||
<p class="eyebrow">Atlas</p>
|
|
||||||
<h1>Apps</h1>
|
|
||||||
<p class="lede">
|
|
||||||
Service shortcuts for Atlas.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section class="section-grid">
|
|
||||||
<section v-for="section in sections" :key="section.title" class="card category">
|
|
||||||
<div class="section-head">
|
|
||||||
<div>
|
|
||||||
<h2>{{ section.title }}</h2>
|
|
||||||
<p class="muted">{{ section.description }}</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div v-for="group in section.groups" :key="group.title" class="group">
|
|
||||||
<div class="tiles">
|
|
||||||
<a
|
|
||||||
v-for="app in group.apps"
|
|
||||||
:key="app.name"
|
|
||||||
class="tile"
|
|
||||||
:href="app.url"
|
|
||||||
:target="app.target"
|
|
||||||
rel="noreferrer"
|
|
||||||
>
|
|
||||||
<div class="tile-title">{{ app.name }}</div>
|
|
||||||
<div class="tile-desc">{{ app.description }}</div>
|
|
||||||
</a>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</section>
|
|
||||||
</section>
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<script setup>
|
|
||||||
const sections = [
|
|
||||||
{
|
|
||||||
title: "Productivity",
|
|
||||||
description: "Docs, planning, cloud workspace, and personal finance for Atlas users.",
|
|
||||||
groups: [
|
|
||||||
{
|
|
||||||
title: "Workspace",
|
|
||||||
apps: [
|
|
||||||
{
|
|
||||||
name: "Nextcloud",
|
|
||||||
url: "https://cloud.bstein.dev",
|
|
||||||
target: "_blank",
|
|
||||||
description: "Storage, mail, photos, and office docs — the main Atlas hub.",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Outline",
|
|
||||||
url: "https://notes.bstein.dev",
|
|
||||||
target: "_blank",
|
|
||||||
description: "Team docs and knowledge base.",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Planka",
|
|
||||||
url: "https://tasks.bstein.dev",
|
|
||||||
target: "_blank",
|
|
||||||
description: "Kanban planning boards for projects.",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Actual Budget",
|
|
||||||
url: "https://budget.bstein.dev",
|
|
||||||
target: "_blank",
|
|
||||||
description: "Local-first budgets and envelopes with SSO.",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Firefly III",
|
|
||||||
url: "https://money.bstein.dev",
|
|
||||||
target: "_blank",
|
|
||||||
description: "Expense tracking with Abacus mobile sync.",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Wger",
|
|
||||||
url: "https://health.bstein.dev",
|
|
||||||
target: "_blank",
|
|
||||||
description: "Workout + nutrition tracking with the wger mobile app.",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: "Dev",
|
|
||||||
description: "Build and ship: source control, CI, registry, and GitOps.",
|
|
||||||
groups: [
|
|
||||||
{
|
|
||||||
title: "Dev Stack",
|
|
||||||
apps: [
|
|
||||||
{ name: "Gitea", url: "https://scm.bstein.dev", target: "_blank", description: "Git hosting and collaboration." },
|
|
||||||
{ name: "Jenkins", url: "https://ci.bstein.dev", target: "_blank", description: "CI pipelines and automation." },
|
|
||||||
{ name: "Harbor", url: "https://registry.bstein.dev", target: "_blank", description: "Artifact registry." },
|
|
||||||
{ name: "GitOps", url: "https://cd.bstein.dev", target: "_blank", description: "GitOps UI for Flux." },
|
|
||||||
{ name: "OpenSearch", url: "https://logs.bstein.dev", target: "_blank", description: "Centralized logs powered by Fluent Bit." },
|
|
||||||
{ name: "Grafana", url: "https://metrics.bstein.dev", target: "_blank", description: "Dashboards and monitoring." },
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: "Security",
|
|
||||||
description: "Passwords for humans, secrets for infrastructure.",
|
|
||||||
groups: [
|
|
||||||
{
|
|
||||||
title: "Personal",
|
|
||||||
apps: [
|
|
||||||
{
|
|
||||||
name: "Vaultwarden",
|
|
||||||
url: "https://vault.bstein.dev",
|
|
||||||
target: "_blank",
|
|
||||||
description: "Password manager (Bitwarden-compatible).",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Keycloak",
|
|
||||||
url: "https://sso.bstein.dev/realms/atlas/account/#/security/signing-in",
|
|
||||||
target: "_blank",
|
|
||||||
description: "Account security + MFA (2FA) settings (Keycloak).",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: "Infrastructure",
|
|
||||||
apps: [
|
|
||||||
{
|
|
||||||
name: "Vault",
|
|
||||||
url: "https://secret.bstein.dev",
|
|
||||||
target: "_blank",
|
|
||||||
description: "Secrets management for infrastructure and apps.",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: "Communications",
|
|
||||||
description: "Chat rooms, calls, and bots. Element X (mobile) compatible.",
|
|
||||||
groups: [
|
|
||||||
{
|
|
||||||
title: "Chat",
|
|
||||||
apps: [
|
|
||||||
{
|
|
||||||
name: "Element X",
|
|
||||||
url: "https://live.bstein.dev",
|
|
||||||
target: "_blank",
|
|
||||||
description: "Matrix rooms with calls powered by Atlas infra.",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "AI Chat",
|
|
||||||
url: "/ai/chat",
|
|
||||||
target: "_self",
|
|
||||||
description: "Chat with Atlas AI (GPU-accelerated).",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: "Streaming",
|
|
||||||
description: "Stream media and publish uploads into your library.",
|
|
||||||
groups: [
|
|
||||||
{
|
|
||||||
title: "Media",
|
|
||||||
apps: [
|
|
||||||
{
|
|
||||||
name: "Jellyfin",
|
|
||||||
url: "https://stream.bstein.dev",
|
|
||||||
target: "_blank",
|
|
||||||
description: "Stream videos to desktop, mobile, and TV.",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Pegasus",
|
|
||||||
url: "https://pegasus.bstein.dev",
|
|
||||||
target: "_blank",
|
|
||||||
description: "Mobile-friendly upload/publish into Jellyfin.",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: "Crypto",
|
|
||||||
description: "Local infrastructure for crypto workloads.",
|
|
||||||
groups: [
|
|
||||||
{
|
|
||||||
title: "Monero",
|
|
||||||
apps: [
|
|
||||||
{
|
|
||||||
name: "Monero Node",
|
|
||||||
url: "/monero",
|
|
||||||
target: "_self",
|
|
||||||
description: "Faster sync using the Atlas Monero node.",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
];
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<style scoped>
|
|
||||||
.page {
|
|
||||||
max-width: 1200px;
|
|
||||||
margin: 0 auto;
|
|
||||||
padding: 32px 22px 72px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.section-grid {
|
|
||||||
display: grid;
|
|
||||||
gap: 14px;
|
|
||||||
align-items: stretch;
|
|
||||||
}
|
|
||||||
|
|
||||||
@media (min-width: 820px) {
|
|
||||||
.section-grid {
|
|
||||||
grid-template-columns: 1fr 1fr;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.hero {
|
|
||||||
display: flex;
|
|
||||||
align-items: flex-start;
|
|
||||||
justify-content: space-between;
|
|
||||||
gap: 18px;
|
|
||||||
margin-bottom: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.category {
|
|
||||||
padding: 18px;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
}
|
|
||||||
|
|
||||||
.section-head {
|
|
||||||
display: flex;
|
|
||||||
align-items: flex-start;
|
|
||||||
justify-content: space-between;
|
|
||||||
gap: 18px;
|
|
||||||
margin-bottom: 14px;
|
|
||||||
min-height: 92px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.group + .group {
|
|
||||||
margin-top: 14px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.muted {
|
|
||||||
margin: 6px 0 0;
|
|
||||||
color: var(--text-muted);
|
|
||||||
max-width: 820px;
|
|
||||||
display: -webkit-box;
|
|
||||||
-webkit-line-clamp: 2;
|
|
||||||
-webkit-box-orient: vertical;
|
|
||||||
overflow: hidden;
|
|
||||||
}
|
|
||||||
|
|
||||||
.tiles {
|
|
||||||
display: grid;
|
|
||||||
grid-template-columns: 1fr;
|
|
||||||
gap: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
@media (min-width: 680px) {
|
|
||||||
.tiles {
|
|
||||||
grid-template-columns: repeat(2, minmax(0, 1fr));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.tile {
|
|
||||||
display: block;
|
|
||||||
text-decoration: none;
|
|
||||||
padding: 14px 14px 12px;
|
|
||||||
border-radius: 14px;
|
|
||||||
border: 1px solid rgba(255, 255, 255, 0.1);
|
|
||||||
background: rgba(255, 255, 255, 0.02);
|
|
||||||
transition: border-color 160ms ease, transform 160ms ease;
|
|
||||||
min-height: 120px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.tile:hover {
|
|
||||||
border-color: rgba(120, 180, 255, 0.35);
|
|
||||||
transform: translateY(-1px);
|
|
||||||
}
|
|
||||||
|
|
||||||
.tile-title {
|
|
||||||
font-weight: 750;
|
|
||||||
color: var(--text-strong);
|
|
||||||
}
|
|
||||||
|
|
||||||
.tile-desc {
|
|
||||||
margin-top: 6px;
|
|
||||||
color: var(--text-muted);
|
|
||||||
font-size: 14px;
|
|
||||||
line-height: 1.4;
|
|
||||||
display: -webkit-box;
|
|
||||||
-webkit-line-clamp: 3;
|
|
||||||
-webkit-box-orient: vertical;
|
|
||||||
overflow: hidden;
|
|
||||||
}
|
|
||||||
|
|
||||||
.eyebrow {
|
|
||||||
text-transform: uppercase;
|
|
||||||
letter-spacing: 0.08em;
|
|
||||||
color: var(--text-muted);
|
|
||||||
margin: 0 0 6px;
|
|
||||||
font-size: 13px;
|
|
||||||
}
|
|
||||||
|
|
||||||
h1 {
|
|
||||||
margin: 0 0 6px;
|
|
||||||
font-size: 32px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.lede {
|
|
||||||
margin: 0;
|
|
||||||
color: var(--text-muted);
|
|
||||||
max-width: 640px;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
@ -5,7 +5,7 @@
|
|||||||
<p class="eyebrow">Titan Lab</p>
|
<p class="eyebrow">Titan Lab</p>
|
||||||
<h1>Overview</h1>
|
<h1>Overview</h1>
|
||||||
<p class="lede">
|
<p class="lede">
|
||||||
Titan Lab is a 26-node homelab with a production mindset. Atlas is its Kubernetes cluster that runs user and dev
|
Titan Lab is a 25-node homelab with a production mindset. Atlas is its Kubernetes cluster that runs user and dev
|
||||||
services. Oceanus is a dedicated SUI validator host. Underlying components such as Theia, the bastion, and Tethys, the link between
|
services. Oceanus is a dedicated SUI validator host. Underlying components such as Theia, the bastion, and Tethys, the link between
|
||||||
Atlas and Oceanus underpin the lab. Membership grants the following services below.
|
Atlas and Oceanus underpin the lab. Membership grants the following services below.
|
||||||
</p>
|
</p>
|
||||||
@ -28,7 +28,7 @@
|
|||||||
<span class="pill mono">k3s cluster</span>
|
<span class="pill mono">k3s cluster</span>
|
||||||
</div>
|
</div>
|
||||||
<iframe
|
<iframe
|
||||||
src="https://metrics.bstein.dev/d-solo/atlas-overview/atlas-overview?from=now-24h&to=now&refresh=1m&theme=dark&panelId=27&__feature.dashboardSceneSolo"
|
src="https://metrics.bstein.dev/d-solo/atlas-overview/atlas-overview?from=now-24h&to=now&refresh=1m&orgId=1&theme=dark&panelId=27&__feature.dashboardSceneSolo"
|
||||||
width="100%"
|
width="100%"
|
||||||
height="180"
|
height="180"
|
||||||
frameborder="0"
|
frameborder="0"
|
||||||
@ -50,7 +50,7 @@
|
|||||||
<section class="card">
|
<section class="card">
|
||||||
<div class="section-head">
|
<div class="section-head">
|
||||||
<h2>Service Grid</h2>
|
<h2>Service Grid</h2>
|
||||||
<span class="pill mono">ai + comms + storage + streaming + development</span>
|
<span class="pill mono">email + storage + streaming + pipelines</span>
|
||||||
</div>
|
</div>
|
||||||
<ServiceGrid :services="displayServices" />
|
<ServiceGrid :services="displayServices" />
|
||||||
</section>
|
</section>
|
||||||
@ -100,14 +100,10 @@ const oceanusPillClass = computed(() => (props.labStatus?.oceanus?.up ? "pill-ok
|
|||||||
|
|
||||||
const metricItems = computed(() => {
|
const metricItems = computed(() => {
|
||||||
const items = [
|
const items = [
|
||||||
{ label: "Lab nodes", value: "26", note: "Workers: 8 rpi5s, 8 rpi4s, 2 jetsons,\n\t\t\t\t 1 minipc\nControl plane: 3 rpi5\nDedicated Hosts: oceanus, titan-db,\n\t\t\t\t\t\t\t\t tethys, theia" },
|
{ label: "Lab nodes", value: "25", note: "26 total (titan-16 is down)\nWorkers: 8 rpi5s, 8 rpi4s, 2 jetsons,\n\t\t\t\t 1 minipc\nControl plane: 3 rpi5\nDedicated Hosts: oceanus, titan-db,\n\t\t\t\t\t\t\t\t tethys, theia" },
|
||||||
{ label: "CPU cores", value: "142", note: "32 arm64 cores @ 1.5Ghz\n12 arm64 cores @ 1.9Ghz\n52 arm64 cores @ 2.4Ghz\n10 amd64 cores @ 5.00Ghz\n12 amd64 cores @ 4.67Ghz\n24 amd64 cores @ 4.04Ghz" },
|
{ label: "CPU cores", value: "142", note: "32 arm64 cores @ 1.5Ghz\n12 arm64 cores @ 1.9Ghz\n52 arm64 cores @ 2.4Ghz\n10 amd64 cores @ 5.00Ghz\n12 amd64 cores @ 4.67Ghz\n24 amd64 cores @ 4.04Ghz" },
|
||||||
{
|
{ label: "Memory", value: "552 GB", note: "nominal\n(includes downed titan-16)" },
|
||||||
label: "Memory",
|
{ label: "Atlas storage", value: "80 TB", note: "Longhorn astreae + asteria" },
|
||||||
value: "552 GB",
|
|
||||||
note: "64GB Raspberry Pi 4\n104GB Raspberry Pi 5\n32GB NVIDIA Jetson Xavier\n352GB AMD64 Chipsets",
|
|
||||||
},
|
|
||||||
{ label: "Storage", value: "80 TB", note: "astreae: 32GB/4xRPI4\nasteria: 48GB/4xRPI4" },
|
|
||||||
];
|
];
|
||||||
return items.map((item) => ({
|
return items.map((item) => ({
|
||||||
...item,
|
...item,
|
||||||
@ -131,10 +127,7 @@ function pickIcon(name) {
|
|||||||
const h = name.toLowerCase();
|
const h = name.toLowerCase();
|
||||||
if (h.includes("nextcloud")) return "☁️";
|
if (h.includes("nextcloud")) return "☁️";
|
||||||
if (h.includes("jellyfin")) return "🎞️";
|
if (h.includes("jellyfin")) return "🎞️";
|
||||||
if (h.includes("matrix")) return "🗨️";
|
if (h.includes("jitsi")) return "📡";
|
||||||
if (h.includes("element")) return "🧩";
|
|
||||||
if (h.includes("livekit")) return "🎥";
|
|
||||||
if (h.includes("coturn") || h.includes("turn")) return "📞";
|
|
||||||
if (h.includes("mail")) return "📮";
|
if (h.includes("mail")) return "📮";
|
||||||
if (h.includes("vaultwarden")) return "🔒";
|
if (h.includes("vaultwarden")) return "🔒";
|
||||||
if (h.includes("vault")) return "🔑";
|
if (h.includes("vault")) return "🔑";
|
||||||
@ -143,13 +136,11 @@ function pickIcon(name) {
|
|||||||
if (h.includes("harbor")) return "📦";
|
if (h.includes("harbor")) return "📦";
|
||||||
if (h.includes("flux")) return "🔄";
|
if (h.includes("flux")) return "🔄";
|
||||||
if (h.includes("monero")) return "⛏️";
|
if (h.includes("monero")) return "⛏️";
|
||||||
if (h.includes("sui")) return "💠";
|
|
||||||
if (h.includes("keycloak")) return "🛡️";
|
if (h.includes("keycloak")) return "🛡️";
|
||||||
if (h.includes("translation")) return "🌐";
|
|
||||||
if (h.includes("grafana")) return "📈";
|
if (h.includes("grafana")) return "📈";
|
||||||
if (h.includes("pegasus")) return "🚀";
|
if (h.includes("pegasus")) return "🚀";
|
||||||
if (h.includes("ai chat")) return "💬";
|
if (h.includes("ai chat")) return "💬";
|
||||||
if (h.includes("ai image") || h.includes("vision")) return "🖼️";
|
if (h.includes("ai image")) return "🖼️";
|
||||||
if (h.includes("ai speech")) return "🎙️";
|
if (h.includes("ai speech")) return "🎙️";
|
||||||
return "🛰️";
|
return "🛰️";
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,867 +0,0 @@
|
|||||||
<template>
|
|
||||||
<div class="page">
|
|
||||||
<section class="card hero glass">
|
|
||||||
<div>
|
|
||||||
<p class="eyebrow">Atlas</p>
|
|
||||||
<h1>Request Access</h1>
|
|
||||||
<p class="lede">
|
|
||||||
Request access to Atlas. Approved accounts are provisioned from this form only.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section class="card module">
|
|
||||||
<div class="module-head">
|
|
||||||
<h2>Request form</h2>
|
|
||||||
<span class="pill mono" :class="submitted ? 'pill-ok' : 'pill-warn'">
|
|
||||||
{{ submitted ? "submitted" : "pending" }}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<p class="muted">
|
|
||||||
Requests require a verified external email so Keycloak can support account recovery. After verification, an admin can approve your account.
|
|
||||||
Your lab username becomes your Atlas identity (including your @{{ mailDomain }} mailbox).
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<form class="form" @submit.prevent="submit" v-if="!submitted">
|
|
||||||
<label class="field">
|
|
||||||
<span class="label mono">Lab Name (username)</span>
|
|
||||||
<input
|
|
||||||
v-model="form.username"
|
|
||||||
class="input mono"
|
|
||||||
type="text"
|
|
||||||
autocomplete="username"
|
|
||||||
placeholder="e.g. alice"
|
|
||||||
:disabled="submitting"
|
|
||||||
required
|
|
||||||
/>
|
|
||||||
<div v-if="availability.label" class="availability">
|
|
||||||
<span class="pill mono" :class="availability.pillClass">{{ availability.label }}</span>
|
|
||||||
<span v-if="availability.detail" class="hint mono">{{ availability.detail }}</span>
|
|
||||||
</div>
|
|
||||||
</label>
|
|
||||||
|
|
||||||
<label class="field">
|
|
||||||
<span class="label mono">Last name</span>
|
|
||||||
<input
|
|
||||||
v-model="form.last_name"
|
|
||||||
class="input"
|
|
||||||
type="text"
|
|
||||||
autocomplete="family-name"
|
|
||||||
placeholder="e.g. Stein"
|
|
||||||
:disabled="submitting"
|
|
||||||
required
|
|
||||||
/>
|
|
||||||
<span class="hint mono">Required for account provisioning.</span>
|
|
||||||
</label>
|
|
||||||
|
|
||||||
<label class="field">
|
|
||||||
<span class="label mono">First name (optional)</span>
|
|
||||||
<input
|
|
||||||
v-model="form.first_name"
|
|
||||||
class="input"
|
|
||||||
type="text"
|
|
||||||
autocomplete="given-name"
|
|
||||||
placeholder="e.g. Brad"
|
|
||||||
:disabled="submitting"
|
|
||||||
/>
|
|
||||||
</label>
|
|
||||||
|
|
||||||
<label class="field">
|
|
||||||
<span class="label mono">Email</span>
|
|
||||||
<input
|
|
||||||
v-model="form.email"
|
|
||||||
class="input mono"
|
|
||||||
type="email"
|
|
||||||
autocomplete="email"
|
|
||||||
placeholder="you@example.com"
|
|
||||||
:disabled="submitting"
|
|
||||||
required
|
|
||||||
/>
|
|
||||||
<span class="hint mono">Must be an external address (not @{{ mailDomain }})</span>
|
|
||||||
</label>
|
|
||||||
|
|
||||||
<label class="field">
|
|
||||||
<span class="label mono">Note (optional)</span>
|
|
||||||
<textarea
|
|
||||||
v-model="form.note"
|
|
||||||
class="textarea"
|
|
||||||
rows="4"
|
|
||||||
placeholder="What do you want access to?"
|
|
||||||
:disabled="submitting"
|
|
||||||
/>
|
|
||||||
</label>
|
|
||||||
|
|
||||||
<div class="actions">
|
|
||||||
<button
|
|
||||||
class="primary"
|
|
||||||
type="submit"
|
|
||||||
:disabled="submitting || !form.username.trim() || !form.last_name.trim() || availability.blockSubmit"
|
|
||||||
>
|
|
||||||
{{ submitting ? "Submitting..." : "Submit request" }}
|
|
||||||
</button>
|
|
||||||
<span class="hint mono">Requests are rate-limited.</span>
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
|
|
||||||
<div v-else class="success-box">
|
|
||||||
<div class="mono">Request submitted.</div>
|
|
||||||
<div class="muted">
|
|
||||||
Save this request code. Check your email for a verification link, then use the code to track status. Once approved,
|
|
||||||
your status will provide an onboarding link to finish account setup.
|
|
||||||
</div>
|
|
||||||
<div class="request-code-row">
|
|
||||||
<span class="label mono">Request Code</span>
|
|
||||||
<button class="copy mono" type="button" @click="copyRequestCode">
|
|
||||||
{{ requestCode }}
|
|
||||||
<span v-if="copied" class="copied">copied</span>
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="card module status-module">
|
|
||||||
<div class="module-head">
|
|
||||||
<h2>Check status</h2>
|
|
||||||
<span class="pill mono" :class="statusPillClass(status)">
|
|
||||||
{{ statusLabel(status) }}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<p class="muted">
|
|
||||||
Enter your request code to see whether it is awaiting approval, building accounts, awaiting onboarding, ready, or rejected.
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<div class="status-form">
|
|
||||||
<input
|
|
||||||
v-model="statusForm.request_code"
|
|
||||||
class="input mono"
|
|
||||||
type="text"
|
|
||||||
placeholder="username~XXXXXXXXXX"
|
|
||||||
:disabled="checking"
|
|
||||||
/>
|
|
||||||
<button class="primary" type="button" @click="checkStatus" :disabled="checking || !statusForm.request_code.trim()">
|
|
||||||
{{ checking ? "Checking..." : "Check" }}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div v-if="verifying" class="muted" style="margin-top: 10px;">
|
|
||||||
Verifying email…
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div v-if="verifyBanner" class="verify-box">
|
|
||||||
<div class="verify-title mono">{{ verifyBanner.title }}</div>
|
|
||||||
<div class="verify-body">{{ verifyBanner.body }}</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div v-if="status === 'pending_email_verification'" class="actions" style="margin-top: 10px;">
|
|
||||||
<button class="pill mono" type="button" :disabled="resending" @click="resendVerification">
|
|
||||||
{{ resending ? "Resending..." : "Resend verification email" }}
|
|
||||||
</button>
|
|
||||||
<span v-if="resendMessage" class="hint mono">{{ resendMessage }}</span>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div v-if="tasks.length" class="task-box">
|
|
||||||
<div class="module-head" style="margin-bottom: 10px;">
|
|
||||||
<h2>Automation</h2>
|
|
||||||
<span class="pill mono" :class="blocked ? 'pill-bad' : 'pill-ok'">
|
|
||||||
{{ blocked ? "blocked" : "running" }}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<ul class="task-list">
|
|
||||||
<li v-for="item in tasks" :key="item.task" class="task-row">
|
|
||||||
<span class="mono task-name">{{ item.task }}</span>
|
|
||||||
<span class="pill mono" :class="taskPillClass(item.status)">{{ item.status }}</span>
|
|
||||||
<span v-if="item.detail" class="mono task-detail">{{ item.detail }}</span>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<p v-if="blocked" class="muted" style="margin-top: 10px;">
|
|
||||||
One or more automation steps failed. Fix the error above, then check again.
|
|
||||||
</p>
|
|
||||||
<div v-if="blocked" class="actions" style="margin-top: 10px;">
|
|
||||||
<button class="pill mono" type="button" :disabled="retrying" @click="retryProvisioning">
|
|
||||||
{{ retrying ? "Retrying..." : "Retry failed steps" }}
|
|
||||||
</button>
|
|
||||||
<span v-if="retryMessage" class="hint mono">{{ retryMessage }}</span>
|
|
||||||
</div>
|
|
||||||
<p v-if="blocked" class="muted" style="margin-top: 8px;">
|
|
||||||
If the error mentions rate limiting or a temporary outage, wait a few minutes and retry. If it keeps failing,
|
|
||||||
contact an admin.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div
|
|
||||||
v-if="onboardingUrl && (status === 'awaiting_onboarding' || status === 'ready')"
|
|
||||||
class="actions onboarding-actions"
|
|
||||||
>
|
|
||||||
<div class="onboarding-copy">
|
|
||||||
<p class="muted" style="margin: 0;">
|
|
||||||
Your accounts are ready. Continue onboarding to finish setup.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<a class="primary onboarding-cta" :href="onboardingUrl">Continue onboarding</a>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div v-if="error" class="error-box">
|
|
||||||
<div class="mono">{{ error }}</div>
|
|
||||||
</div>
|
|
||||||
</section>
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<script setup>
|
|
||||||
import { onMounted, reactive, ref, watch } from "vue";
|
|
||||||
import { useRoute } from "vue-router";
|
|
||||||
|
|
||||||
const route = useRoute();
|
|
||||||
|
|
||||||
function statusLabel(value) {
|
|
||||||
const key = (value || "").trim();
|
|
||||||
if (key === "pending_email_verification") return "confirm email";
|
|
||||||
if (key === "pending") return "awaiting approval";
|
|
||||||
if (key === "accounts_building") return "accounts building";
|
|
||||||
if (key === "awaiting_onboarding") return "awaiting onboarding";
|
|
||||||
if (key === "ready") return "ready";
|
|
||||||
if (key === "denied") return "rejected";
|
|
||||||
return key || "unknown";
|
|
||||||
}
|
|
||||||
|
|
||||||
function statusPillClass(value) {
|
|
||||||
const key = (value || "").trim();
|
|
||||||
if (key === "pending_email_verification") return "pill-warn";
|
|
||||||
if (key === "pending") return "pill-wait";
|
|
||||||
if (key === "accounts_building") return "pill-warn";
|
|
||||||
if (key === "awaiting_onboarding") return "pill-ok";
|
|
||||||
if (key === "ready") return "pill-info";
|
|
||||||
if (key === "denied") return "pill-bad";
|
|
||||||
return "pill-warn";
|
|
||||||
}
|
|
||||||
|
|
||||||
const form = reactive({
|
|
||||||
username: "",
|
|
||||||
first_name: "",
|
|
||||||
last_name: "",
|
|
||||||
email: "",
|
|
||||||
note: "",
|
|
||||||
});
|
|
||||||
|
|
||||||
const submitting = ref(false);
|
|
||||||
const submitted = ref(false);
|
|
||||||
const error = ref("");
|
|
||||||
const requestCode = ref("");
|
|
||||||
const copied = ref(false);
|
|
||||||
const verifying = ref(false);
|
|
||||||
const mailDomain = import.meta.env?.VITE_MAILU_DOMAIN || "bstein.dev";
|
|
||||||
const availability = reactive({
|
|
||||||
label: "",
|
|
||||||
detail: "",
|
|
||||||
pillClass: "",
|
|
||||||
checking: false,
|
|
||||||
blockSubmit: false,
|
|
||||||
});
|
|
||||||
let availabilityTimer = 0;
|
|
||||||
let availabilityToken = 0;
|
|
||||||
|
|
||||||
const statusForm = reactive({
|
|
||||||
request_code: "",
|
|
||||||
});
|
|
||||||
const checking = ref(false);
|
|
||||||
const status = ref("");
|
|
||||||
const onboardingUrl = ref("");
|
|
||||||
const tasks = ref([]);
|
|
||||||
const blocked = ref(false);
|
|
||||||
const retrying = ref(false);
|
|
||||||
const retryMessage = ref("");
|
|
||||||
const resending = ref(false);
|
|
||||||
const resendMessage = ref("");
|
|
||||||
const verifyBanner = ref(null);
|
|
||||||
|
|
||||||
function taskPillClass(status) {
|
|
||||||
const key = (status || "").trim();
|
|
||||||
if (key === "ok") return "pill-ok";
|
|
||||||
if (key === "error") return "pill-bad";
|
|
||||||
if (key === "pending") return "pill-warn";
|
|
||||||
return "pill-warn";
|
|
||||||
}
|
|
||||||
|
|
||||||
function resetAvailability() {
|
|
||||||
availability.label = "";
|
|
||||||
availability.detail = "";
|
|
||||||
availability.pillClass = "";
|
|
||||||
availability.blockSubmit = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
function setAvailability(state, detail = "") {
|
|
||||||
availability.detail = detail;
|
|
||||||
availability.blockSubmit = false;
|
|
||||||
if (state === "checking") {
|
|
||||||
availability.label = "checking";
|
|
||||||
availability.pillClass = "pill-warn";
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (state === "available") {
|
|
||||||
availability.label = "available";
|
|
||||||
availability.pillClass = "pill-ok";
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (state === "invalid") {
|
|
||||||
availability.label = "invalid";
|
|
||||||
availability.pillClass = "pill-bad";
|
|
||||||
availability.blockSubmit = true;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (state === "requested") {
|
|
||||||
availability.label = "requested";
|
|
||||||
availability.pillClass = "pill-warn";
|
|
||||||
availability.blockSubmit = true;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (state === "exists") {
|
|
||||||
availability.label = "taken";
|
|
||||||
availability.pillClass = "pill-bad";
|
|
||||||
availability.blockSubmit = true;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (state === "error") {
|
|
||||||
availability.label = "error";
|
|
||||||
availability.pillClass = "pill-warn";
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
resetAvailability();
|
|
||||||
}
|
|
||||||
|
|
||||||
async function checkAvailability(name) {
|
|
||||||
const token = (availabilityToken += 1);
|
|
||||||
setAvailability("checking");
|
|
||||||
availability.checking = true;
|
|
||||||
try {
|
|
||||||
const resp = await fetch(`/api/access/request/availability?username=${encodeURIComponent(name)}`, {
|
|
||||||
headers: { Accept: "application/json" },
|
|
||||||
cache: "no-store",
|
|
||||||
});
|
|
||||||
const data = await resp.json().catch(() => ({}));
|
|
||||||
if (token !== availabilityToken) return;
|
|
||||||
if (!resp.ok) throw new Error(data.error || `status ${resp.status}`);
|
|
||||||
if (data.available) {
|
|
||||||
setAvailability("available", "Username is available.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const reason = data.reason || "";
|
|
||||||
const status = data.status || "";
|
|
||||||
if (reason === "invalid") {
|
|
||||||
setAvailability("invalid", data.detail || "Use 3-32 characters (letters, numbers, . _ -).");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (reason === "exists") {
|
|
||||||
setAvailability("exists", "Already in use. Choose another name.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (reason === "requested") {
|
|
||||||
const label = status ? `Existing request: ${statusLabel(status)}` : "Request already exists.";
|
|
||||||
setAvailability("requested", label);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
setAvailability("error", "Unable to confirm availability.");
|
|
||||||
} catch (err) {
|
|
||||||
if (token !== availabilityToken) return;
|
|
||||||
setAvailability("error", err.message || "Availability check failed.");
|
|
||||||
} finally {
|
|
||||||
if (token === availabilityToken) availability.checking = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function submit() {
|
|
||||||
if (submitting.value) return;
|
|
||||||
error.value = "";
|
|
||||||
submitting.value = true;
|
|
||||||
try {
|
|
||||||
const resp = await fetch("/api/access/request", {
|
|
||||||
method: "POST",
|
|
||||||
headers: { "Content-Type": "application/json" },
|
|
||||||
cache: "no-store",
|
|
||||||
body: JSON.stringify({
|
|
||||||
username: form.username.trim(),
|
|
||||||
first_name: form.first_name.trim(),
|
|
||||||
last_name: form.last_name.trim(),
|
|
||||||
email: form.email.trim(),
|
|
||||||
note: form.note.trim(),
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
const data = await resp.json().catch(() => ({}));
|
|
||||||
if (!resp.ok) throw new Error(data.error || resp.statusText || `status ${resp.status}`);
|
|
||||||
submitted.value = true;
|
|
||||||
requestCode.value = data.request_code || "";
|
|
||||||
statusForm.request_code = requestCode.value;
|
|
||||||
status.value = data.status || "pending_email_verification";
|
|
||||||
} catch (err) {
|
|
||||||
error.value = err.message || "Failed to submit request";
|
|
||||||
} finally {
|
|
||||||
submitting.value = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
watch(
|
|
||||||
() => form.username,
|
|
||||||
(value) => {
|
|
||||||
const trimmed = value.trim();
|
|
||||||
if (availabilityTimer) {
|
|
||||||
window.clearTimeout(availabilityTimer);
|
|
||||||
availabilityTimer = 0;
|
|
||||||
}
|
|
||||||
availabilityToken += 1;
|
|
||||||
if (!trimmed) {
|
|
||||||
resetAvailability();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (trimmed.length < 3 || trimmed.length > 32) {
|
|
||||||
setAvailability("invalid", "Use 3-32 characters (letters, numbers, . _ -).");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (!/^[a-zA-Z0-9._-]+$/.test(trimmed)) {
|
|
||||||
setAvailability("invalid", "Use letters, numbers, and . _ - only.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
availabilityTimer = window.setTimeout(() => {
|
|
||||||
checkAvailability(trimmed);
|
|
||||||
}, 350);
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
async function copyRequestCode() {
|
|
||||||
if (!requestCode.value) return;
|
|
||||||
try {
|
|
||||||
if (navigator?.clipboard?.writeText) {
|
|
||||||
await navigator.clipboard.writeText(requestCode.value);
|
|
||||||
} else {
|
|
||||||
const textarea = document.createElement("textarea");
|
|
||||||
textarea.value = requestCode.value;
|
|
||||||
textarea.setAttribute("readonly", "");
|
|
||||||
textarea.style.position = "fixed";
|
|
||||||
textarea.style.top = "-9999px";
|
|
||||||
textarea.style.left = "-9999px";
|
|
||||||
document.body.appendChild(textarea);
|
|
||||||
textarea.select();
|
|
||||||
textarea.setSelectionRange(0, textarea.value.length);
|
|
||||||
document.execCommand("copy");
|
|
||||||
document.body.removeChild(textarea);
|
|
||||||
}
|
|
||||||
copied.value = true;
|
|
||||||
setTimeout(() => (copied.value = false), 1500);
|
|
||||||
} catch (err) {
|
|
||||||
error.value = err?.message || "Failed to copy request code";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function checkStatus() {
|
|
||||||
if (checking.value) return;
|
|
||||||
error.value = "";
|
|
||||||
verifyBanner.value = null;
|
|
||||||
const trimmed = statusForm.request_code.trim();
|
|
||||||
if (!trimmed) return;
|
|
||||||
if (!trimmed.includes("~")) {
|
|
||||||
error.value = "Request code should look like username~XXXXXXXXXX. Copy it from the submit step.";
|
|
||||||
status.value = "unknown";
|
|
||||||
onboardingUrl.value = "";
|
|
||||||
tasks.value = [];
|
|
||||||
blocked.value = false;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
checking.value = true;
|
|
||||||
try {
|
|
||||||
const resp = await fetch("/api/access/request/status", {
|
|
||||||
method: "POST",
|
|
||||||
headers: { "Content-Type": "application/json" },
|
|
||||||
cache: "no-store",
|
|
||||||
body: JSON.stringify({ request_code: trimmed }),
|
|
||||||
});
|
|
||||||
const data = await resp.json().catch(() => ({}));
|
|
||||||
if (!resp.ok) throw new Error(data.error || resp.statusText || `status ${resp.status}`);
|
|
||||||
status.value = data.status || "unknown";
|
|
||||||
onboardingUrl.value = data.onboarding_url || "";
|
|
||||||
tasks.value = Array.isArray(data.tasks) ? data.tasks : [];
|
|
||||||
blocked.value = Boolean(data.blocked);
|
|
||||||
if (data.email_verified && status.value === "pending") {
|
|
||||||
verifyBanner.value = {
|
|
||||||
title: "Email confirmed",
|
|
||||||
body: "Your request is now waiting for manual approval. Check back here after an admin reviews it.",
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
verifyBanner.value = null;
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
error.value = err.message || "Failed to check status";
|
|
||||||
status.value = "unknown";
|
|
||||||
onboardingUrl.value = "";
|
|
||||||
tasks.value = [];
|
|
||||||
blocked.value = false;
|
|
||||||
} finally {
|
|
||||||
checking.value = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function retryProvisioning() {
|
|
||||||
if (retrying.value) return;
|
|
||||||
retryMessage.value = "";
|
|
||||||
const code = statusForm.request_code.trim();
|
|
||||||
if (!code) return;
|
|
||||||
retrying.value = true;
|
|
||||||
try {
|
|
||||||
const retryTasks = tasks.value
|
|
||||||
.filter((item) => item.status === "error")
|
|
||||||
.map((item) => item.task)
|
|
||||||
.filter(Boolean);
|
|
||||||
const resp = await fetch("/api/access/request/retry", {
|
|
||||||
method: "POST",
|
|
||||||
headers: { "Content-Type": "application/json" },
|
|
||||||
cache: "no-store",
|
|
||||||
body: JSON.stringify({ request_code: code, tasks: retryTasks }),
|
|
||||||
});
|
|
||||||
const data = await resp.json().catch(() => ({}));
|
|
||||||
if (!resp.ok) throw new Error(data.error || resp.statusText || `status ${resp.status}`);
|
|
||||||
retryMessage.value = "Retry requested. Check again in a moment.";
|
|
||||||
await checkStatus();
|
|
||||||
} catch (err) {
|
|
||||||
retryMessage.value = err?.message || "Retry request failed.";
|
|
||||||
} finally {
|
|
||||||
retrying.value = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function verifyFromLink(code, token) {
|
|
||||||
verifying.value = true;
|
|
||||||
try {
|
|
||||||
const resp = await fetch("/api/access/request/verify", {
|
|
||||||
method: "POST",
|
|
||||||
headers: { "Content-Type": "application/json" },
|
|
||||||
cache: "no-store",
|
|
||||||
body: JSON.stringify({ request_code: code, token }),
|
|
||||||
});
|
|
||||||
const data = await resp.json().catch(() => ({}));
|
|
||||||
if (!resp.ok) throw new Error(data.error || resp.statusText || `status ${resp.status}`);
|
|
||||||
status.value = data.status || status.value;
|
|
||||||
if (status.value === "pending") {
|
|
||||||
verifyBanner.value = {
|
|
||||||
title: "Email confirmed",
|
|
||||||
body: "Your request is now waiting for manual approval. Check back here after an admin reviews it.",
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
verifyBanner.value = null;
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
verifying.value = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function resendVerification() {
|
|
||||||
if (resending.value) return;
|
|
||||||
const code = statusForm.request_code.trim();
|
|
||||||
if (!code) return;
|
|
||||||
resending.value = true;
|
|
||||||
resendMessage.value = "";
|
|
||||||
try {
|
|
||||||
const resp = await fetch("/api/access/request/resend", {
|
|
||||||
method: "POST",
|
|
||||||
headers: { "Content-Type": "application/json" },
|
|
||||||
cache: "no-store",
|
|
||||||
body: JSON.stringify({ request_code: code }),
|
|
||||||
});
|
|
||||||
const data = await resp.json().catch(() => ({}));
|
|
||||||
if (!resp.ok) throw new Error(data.error || resp.statusText || `status ${resp.status}`);
|
|
||||||
resendMessage.value = "Verification email sent.";
|
|
||||||
} catch (err) {
|
|
||||||
resendMessage.value = err?.message || "Failed to resend verification email.";
|
|
||||||
} finally {
|
|
||||||
resending.value = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
onMounted(async () => {
|
|
||||||
const code = typeof route.query.code === "string" ? route.query.code.trim() : "";
|
|
||||||
const token = typeof route.query.verify === "string" ? route.query.verify.trim() : "";
|
|
||||||
const verified = typeof route.query.verified === "string" ? route.query.verified.trim() : "";
|
|
||||||
const verifyError = typeof route.query.verify_error === "string" ? route.query.verify_error.trim() : "";
|
|
||||||
if (code) {
|
|
||||||
requestCode.value = code;
|
|
||||||
statusForm.request_code = code;
|
|
||||||
submitted.value = true;
|
|
||||||
}
|
|
||||||
if (code && token) {
|
|
||||||
try {
|
|
||||||
await verifyFromLink(code, token);
|
|
||||||
} catch (err) {
|
|
||||||
error.value = err?.message || "Failed to verify email";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (code) {
|
|
||||||
await checkStatus();
|
|
||||||
}
|
|
||||||
if (verified && status.value === "pending") {
|
|
||||||
verifyBanner.value = {
|
|
||||||
title: "Email confirmed",
|
|
||||||
body: "Your request is now waiting for manual approval. Check back here after an admin reviews it.",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
if (verifyError) {
|
|
||||||
error.value = `Email verification failed: ${decodeURIComponent(verifyError)}`;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<style scoped>
|
|
||||||
.page {
|
|
||||||
max-width: 960px;
|
|
||||||
margin: 0 auto;
|
|
||||||
padding: 32px 22px 72px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.hero {
|
|
||||||
display: flex;
|
|
||||||
align-items: flex-start;
|
|
||||||
justify-content: space-between;
|
|
||||||
gap: 18px;
|
|
||||||
margin-bottom: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.eyebrow {
|
|
||||||
text-transform: uppercase;
|
|
||||||
letter-spacing: 0.08em;
|
|
||||||
color: var(--text-muted);
|
|
||||||
margin: 0 0 6px;
|
|
||||||
font-size: 13px;
|
|
||||||
}
|
|
||||||
|
|
||||||
h1 {
|
|
||||||
margin: 0 0 6px;
|
|
||||||
font-size: 32px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.lede {
|
|
||||||
margin: 0;
|
|
||||||
color: var(--text-muted);
|
|
||||||
max-width: 640px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.module {
|
|
||||||
padding: 18px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.status-module {
|
|
||||||
margin-top: 14px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.module-head {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: space-between;
|
|
||||||
gap: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.muted {
|
|
||||||
color: var(--text-muted);
|
|
||||||
margin: 10px 0 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.mono {
|
|
||||||
font-family: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;
|
|
||||||
}
|
|
||||||
|
|
||||||
.form {
|
|
||||||
margin-top: 14px;
|
|
||||||
display: grid;
|
|
||||||
gap: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.field {
|
|
||||||
display: grid;
|
|
||||||
gap: 6px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.availability {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.label {
|
|
||||||
color: var(--text-muted);
|
|
||||||
font-size: 12px;
|
|
||||||
letter-spacing: 0.04em;
|
|
||||||
text-transform: uppercase;
|
|
||||||
}
|
|
||||||
|
|
||||||
.input,
|
|
||||||
.textarea {
|
|
||||||
width: 100%;
|
|
||||||
border-radius: 12px;
|
|
||||||
border: 1px solid rgba(255, 255, 255, 0.1);
|
|
||||||
background: rgba(0, 0, 0, 0.22);
|
|
||||||
color: var(--text);
|
|
||||||
padding: 10px 12px;
|
|
||||||
outline: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.textarea {
|
|
||||||
resize: vertical;
|
|
||||||
}
|
|
||||||
|
|
||||||
.actions {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 12px;
|
|
||||||
margin-top: 6px;
|
|
||||||
}
|
|
||||||
|
|
||||||
button.primary,
|
|
||||||
a.primary {
|
|
||||||
background: linear-gradient(90deg, #4f8bff, #7dd0ff);
|
|
||||||
color: #0b1222;
|
|
||||||
padding: 10px 14px;
|
|
||||||
border: none;
|
|
||||||
border-radius: 10px;
|
|
||||||
cursor: pointer;
|
|
||||||
font-weight: 700;
|
|
||||||
text-decoration: none;
|
|
||||||
display: inline-flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
button.primary:disabled {
|
|
||||||
opacity: 0.6;
|
|
||||||
cursor: not-allowed;
|
|
||||||
}
|
|
||||||
|
|
||||||
.onboarding-actions {
|
|
||||||
margin-top: 18px;
|
|
||||||
flex-direction: column;
|
|
||||||
align-items: stretch;
|
|
||||||
padding: 14px;
|
|
||||||
border-radius: 14px;
|
|
||||||
border: 1px solid rgba(120, 180, 255, 0.2);
|
|
||||||
background: rgba(0, 0, 0, 0.24);
|
|
||||||
}
|
|
||||||
|
|
||||||
.onboarding-copy {
|
|
||||||
display: grid;
|
|
||||||
gap: 6px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.onboarding-cta {
|
|
||||||
text-align: center;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.status-form {
|
|
||||||
display: flex;
|
|
||||||
gap: 10px;
|
|
||||||
margin-top: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.hint {
|
|
||||||
color: var(--text-muted);
|
|
||||||
font-size: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.error-box {
|
|
||||||
margin-top: 14px;
|
|
||||||
border: 1px solid rgba(255, 120, 120, 0.35);
|
|
||||||
background: rgba(255, 64, 64, 0.12);
|
|
||||||
border-radius: 14px;
|
|
||||||
padding: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.success-box {
|
|
||||||
margin-top: 14px;
|
|
||||||
border: 1px solid rgba(120, 255, 160, 0.25);
|
|
||||||
background: rgba(48, 255, 160, 0.1);
|
|
||||||
border-radius: 14px;
|
|
||||||
padding: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.request-code-row {
|
|
||||||
margin-top: 12px;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: 6px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.copy {
|
|
||||||
display: inline-flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 10px;
|
|
||||||
border-radius: 12px;
|
|
||||||
border: 1px solid rgba(255, 255, 255, 0.14);
|
|
||||||
background: rgba(0, 0, 0, 0.22);
|
|
||||||
color: var(--text);
|
|
||||||
padding: 10px 12px;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
|
|
||||||
.copied {
|
|
||||||
font-size: 12px;
|
|
||||||
color: rgba(120, 255, 160, 0.9);
|
|
||||||
}
|
|
||||||
|
|
||||||
.pill {
|
|
||||||
padding: 6px 10px;
|
|
||||||
border-radius: 999px;
|
|
||||||
font-size: 12px;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
|
|
||||||
<style scoped>
|
|
||||||
.verify-box {
|
|
||||||
margin-top: 12px;
|
|
||||||
padding: 12px 14px;
|
|
||||||
border: 1px solid rgba(120, 200, 255, 0.35);
|
|
||||||
border-radius: 14px;
|
|
||||||
background: rgba(48, 120, 200, 0.16);
|
|
||||||
display: grid;
|
|
||||||
gap: 4px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.verify-title {
|
|
||||||
font-size: 12px;
|
|
||||||
text-transform: uppercase;
|
|
||||||
letter-spacing: 0.08em;
|
|
||||||
color: rgba(150, 220, 255, 0.95);
|
|
||||||
}
|
|
||||||
|
|
||||||
.verify-body {
|
|
||||||
font-size: 13px;
|
|
||||||
color: var(--text);
|
|
||||||
}
|
|
||||||
|
|
||||||
.task-box {
|
|
||||||
margin-top: 14px;
|
|
||||||
padding: 14px;
|
|
||||||
border: 1px solid rgba(255, 255, 255, 0.08);
|
|
||||||
border-radius: 14px;
|
|
||||||
background: rgba(0, 0, 0, 0.25);
|
|
||||||
}
|
|
||||||
|
|
||||||
.task-list {
|
|
||||||
list-style: none;
|
|
||||||
padding: 0;
|
|
||||||
margin: 0;
|
|
||||||
display: grid;
|
|
||||||
gap: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.task-row {
|
|
||||||
display: grid;
|
|
||||||
gap: 6px;
|
|
||||||
grid-template-columns: 1fr auto;
|
|
||||||
align-items: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.task-name {
|
|
||||||
color: var(--text);
|
|
||||||
}
|
|
||||||
|
|
||||||
.task-detail {
|
|
||||||
grid-column: 1 / -1;
|
|
||||||
color: var(--text-muted);
|
|
||||||
font-size: 12px;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
|
Before Width: | Height: | Size: 240 KiB |
|
Before Width: | Height: | Size: 358 KiB |
|
Before Width: | Height: | Size: 148 KiB |
|
Before Width: | Height: | Size: 67 KiB |
|
Before Width: | Height: | Size: 55 KiB |
|
Before Width: | Height: | Size: 56 KiB |
|
Before Width: | Height: | Size: 1.0 MiB |
|
Before Width: | Height: | Size: 958 KiB |
|
Before Width: | Height: | Size: 52 KiB |
|
Before Width: | Height: | Size: 61 KiB |
|
Before Width: | Height: | Size: 78 KiB |
|
Before Width: | Height: | Size: 92 KiB |
|
Before Width: | Height: | Size: 74 KiB |
|
Before Width: | Height: | Size: 78 KiB |
|
Before Width: | Height: | Size: 100 KiB |
|
Before Width: | Height: | Size: 132 KiB |
|
Before Width: | Height: | Size: 129 KiB |
|
Before Width: | Height: | Size: 106 KiB |
|
Before Width: | Height: | Size: 122 KiB |
|
Before Width: | Height: | Size: 120 KiB |
|
Before Width: | Height: | Size: 97 KiB |
|
Before Width: | Height: | Size: 130 KiB |
|
Before Width: | Height: | Size: 116 KiB |
|
Before Width: | Height: | Size: 126 KiB |
|
Before Width: | Height: | Size: 34 KiB |
|
Before Width: | Height: | Size: 33 KiB |
|
Before Width: | Height: | Size: 25 KiB |
|
Before Width: | Height: | Size: 48 KiB |
|
Before Width: | Height: | Size: 24 KiB |
|
Before Width: | Height: | Size: 53 KiB |
|
Before Width: | Height: | Size: 32 KiB |
|
Before Width: | Height: | Size: 39 KiB |
|
Before Width: | Height: | Size: 57 KiB |
|
Before Width: | Height: | Size: 60 KiB |
|
Before Width: | Height: | Size: 33 KiB |
|
Before Width: | Height: | Size: 37 KiB |
|
Before Width: | Height: | Size: 36 KiB |
|
Before Width: | Height: | Size: 48 KiB |