quality: add LOC ratchet and platform metrics
This commit is contained in:
parent
aa7098efad
commit
9faabcbc0e
1
Jenkinsfile
vendored
1
Jenkinsfile
vendored
@ -103,6 +103,7 @@ set -euo pipefail
|
|||||||
mkdir -p build
|
mkdir -p build
|
||||||
python -m pip install --no-cache-dir -r requirements.txt -r requirements-dev.txt
|
python -m pip install --no-cache-dir -r requirements.txt -r requirements-dev.txt
|
||||||
python -m ruff check ariadne --select PLR
|
python -m ruff check ariadne --select PLR
|
||||||
|
python scripts/check_file_sizes.py --roots ariadne scripts tests --max-lines 500 --waivers scripts/loc_hygiene_waivers.tsv
|
||||||
python -m slipcover \
|
python -m slipcover \
|
||||||
--json \
|
--json \
|
||||||
--out "${COVERAGE_JSON}" \
|
--out "${COVERAGE_JSON}" \
|
||||||
|
|||||||
86
scripts/check_file_sizes.py
Normal file
86
scripts/check_file_sizes.py
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Enforce a ratcheted source file line-budget contract.
|
||||||
|
|
||||||
|
The check fails when:
|
||||||
|
- a file exceeds the configured line budget and is not allowlisted; or
|
||||||
|
- an allowlist entry is stale (file removed or now within budget).
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
def _iter_source_files(roots: list[str], exts: set[str]) -> list[Path]:
|
||||||
|
files: list[Path] = []
|
||||||
|
for root_text in roots:
|
||||||
|
root = Path(root_text)
|
||||||
|
if not root.exists():
|
||||||
|
continue
|
||||||
|
for path in root.rglob("*"):
|
||||||
|
if not path.is_file():
|
||||||
|
continue
|
||||||
|
if path.suffix not in exts:
|
||||||
|
continue
|
||||||
|
if "__pycache__" in path.parts or ".venv" in path.parts:
|
||||||
|
continue
|
||||||
|
files.append(path.resolve())
|
||||||
|
return sorted(files)
|
||||||
|
|
||||||
|
|
||||||
|
def _load_waivers(path: Path) -> dict[str, str]:
|
||||||
|
waivers: dict[str, str] = {}
|
||||||
|
if not path.exists():
|
||||||
|
return waivers
|
||||||
|
for raw_line in path.read_text(encoding="utf-8").splitlines():
|
||||||
|
line = raw_line.strip()
|
||||||
|
if not line or line.startswith("#"):
|
||||||
|
continue
|
||||||
|
parts = line.split("\t")
|
||||||
|
rel_path = parts[0].strip()
|
||||||
|
reason = parts[1].strip() if len(parts) > 1 else ""
|
||||||
|
if rel_path:
|
||||||
|
waivers[rel_path] = reason
|
||||||
|
return waivers
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> int:
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("--roots", nargs="+", default=["ariadne", "scripts", "tests"])
|
||||||
|
parser.add_argument("--max-lines", type=int, default=500)
|
||||||
|
parser.add_argument("--waivers", default="scripts/loc_hygiene_waivers.tsv")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
repo_root = Path.cwd().resolve()
|
||||||
|
waivers = _load_waivers(repo_root / args.waivers)
|
||||||
|
source_files = _iter_source_files(args.roots, {".py", ".sh"})
|
||||||
|
|
||||||
|
violations: dict[str, int] = {}
|
||||||
|
for path in source_files:
|
||||||
|
rel = path.relative_to(repo_root).as_posix()
|
||||||
|
lines = len(path.read_text(encoding="utf-8", errors="ignore").splitlines())
|
||||||
|
if lines > args.max_lines:
|
||||||
|
violations[rel] = lines
|
||||||
|
|
||||||
|
unexpected = sorted(rel for rel in violations if rel not in waivers)
|
||||||
|
stale = sorted(rel for rel in waivers if rel not in violations)
|
||||||
|
if not unexpected and not stale:
|
||||||
|
print(
|
||||||
|
f"[hygiene] source line budget check passed (limit={args.max_lines}, over_limit={len(violations)}, waivers={len(waivers)})"
|
||||||
|
)
|
||||||
|
return 0
|
||||||
|
|
||||||
|
if unexpected:
|
||||||
|
print("[hygiene] files over budget missing from waiver list:")
|
||||||
|
for rel in unexpected:
|
||||||
|
print(f"- {rel}: {violations[rel]} lines (limit {args.max_lines})")
|
||||||
|
if stale:
|
||||||
|
print("[hygiene] stale waiver entries (remove from waiver list):")
|
||||||
|
for rel in stale:
|
||||||
|
print(f"- {rel}")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
raise SystemExit(main())
|
||||||
14
scripts/loc_hygiene_waivers.tsv
Normal file
14
scripts/loc_hygiene_waivers.tsv
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
# relative_path<TAB>why_it_is_allowlisted_for_now
|
||||||
|
ariadne/app.py core application router/orchestration pending decomposition
|
||||||
|
ariadne/manager/provisioning.py provisioning workflow hub pending modular extraction
|
||||||
|
ariadne/services/cluster_state.py legacy cluster-state monolith pending split (tracked by branch scope)
|
||||||
|
ariadne/services/comms.py legacy comms monolith pending split by concern
|
||||||
|
ariadne/services/firefly.py firefly integration handlers pending endpoint split
|
||||||
|
ariadne/services/nextcloud.py nextcloud integration surface pending staged decomposition
|
||||||
|
ariadne/services/vault.py vault integration flow pending dedicated auth/storage modules
|
||||||
|
ariadne/services/wger.py wger integration flow pending endpoint-layer split
|
||||||
|
ariadne/settings.py configuration map pending domain-specific config modules
|
||||||
|
tests/test_app.py broad integration assertions pending test-suite decomposition
|
||||||
|
tests/test_keycloak_admin.py keycloak contract tests pending helper extraction
|
||||||
|
tests/test_provisioning.py provisioning matrix tests pending split by workflow phase
|
||||||
|
tests/test_services.py service integration matrix pending split by service domain
|
||||||
|
Can't render this file because it has a wrong number of fields in line 2.
|
@ -5,10 +5,14 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
from pathlib import Path
|
||||||
import sys
|
import sys
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
|
|
||||||
|
SOURCE_SCAN_ROOTS = ("ariadne", "scripts", "tests")
|
||||||
|
SOURCE_EXTENSIONS = {".py", ".sh"}
|
||||||
|
|
||||||
|
|
||||||
def _escape_label(value: str) -> str:
|
def _escape_label(value: str) -> str:
|
||||||
return value.replace("\\", "\\\\").replace("\n", "\\n").replace('"', '\\"')
|
return value.replace("\\", "\\\\").replace("\n", "\\n").replace('"', '\\"')
|
||||||
@ -97,7 +101,25 @@ def _fetch_existing_counter(pushgateway_url: str, metric: str, labels: dict[str,
|
|||||||
return 0.0
|
return 0.0
|
||||||
|
|
||||||
|
|
||||||
|
def _count_source_files_over_limit(repo_root: Path, max_lines: int = 500) -> int:
|
||||||
|
count = 0
|
||||||
|
for rel_root in SOURCE_SCAN_ROOTS:
|
||||||
|
base = repo_root / rel_root
|
||||||
|
if not base.exists():
|
||||||
|
continue
|
||||||
|
for path in base.rglob("*"):
|
||||||
|
if not path.is_file():
|
||||||
|
continue
|
||||||
|
if path.suffix not in SOURCE_EXTENSIONS:
|
||||||
|
continue
|
||||||
|
lines = len(path.read_text(encoding="utf-8", errors="ignore").splitlines())
|
||||||
|
if lines > max_lines:
|
||||||
|
count += 1
|
||||||
|
return count
|
||||||
|
|
||||||
|
|
||||||
def main() -> int:
|
def main() -> int:
|
||||||
|
repo_root = Path(__file__).resolve().parents[1]
|
||||||
coverage_path = os.getenv("COVERAGE_JSON", "build/coverage.json")
|
coverage_path = os.getenv("COVERAGE_JSON", "build/coverage.json")
|
||||||
junit_path = os.getenv("JUNIT_XML", "build/junit.xml")
|
junit_path = os.getenv("JUNIT_XML", "build/junit.xml")
|
||||||
pushgateway_url = os.getenv(
|
pushgateway_url = os.getenv(
|
||||||
@ -114,6 +136,7 @@ def main() -> int:
|
|||||||
raise RuntimeError(f"missing junit file {junit_path}")
|
raise RuntimeError(f"missing junit file {junit_path}")
|
||||||
|
|
||||||
coverage = _load_coverage(coverage_path)
|
coverage = _load_coverage(coverage_path)
|
||||||
|
source_lines_over_500 = _count_source_files_over_limit(repo_root, max_lines=500)
|
||||||
totals = _load_junit(junit_path)
|
totals = _load_junit(junit_path)
|
||||||
passed = max(totals["tests"] - totals["failures"] - totals["errors"] - totals["skipped"], 0)
|
passed = max(totals["tests"] - totals["failures"] - totals["errors"] - totals["skipped"], 0)
|
||||||
|
|
||||||
@ -154,6 +177,10 @@ def main() -> int:
|
|||||||
f'ariadne_quality_gate_tests_total{{suite="{suite}",result="skipped"}} {totals["skipped"]}',
|
f'ariadne_quality_gate_tests_total{{suite="{suite}",result="skipped"}} {totals["skipped"]}',
|
||||||
"# TYPE ariadne_quality_gate_coverage_percent gauge",
|
"# TYPE ariadne_quality_gate_coverage_percent gauge",
|
||||||
f'ariadne_quality_gate_coverage_percent{{suite="{suite}"}} {coverage:.3f}',
|
f'ariadne_quality_gate_coverage_percent{{suite="{suite}"}} {coverage:.3f}',
|
||||||
|
"# TYPE platform_quality_gate_workspace_line_coverage_percent gauge",
|
||||||
|
f'platform_quality_gate_workspace_line_coverage_percent{{suite="{suite}"}} {coverage:.3f}',
|
||||||
|
"# TYPE platform_quality_gate_source_lines_over_500_total gauge",
|
||||||
|
f'platform_quality_gate_source_lines_over_500_total{{suite="{suite}"}} {source_lines_over_500}',
|
||||||
"# TYPE ariadne_quality_gate_build_info gauge",
|
"# TYPE ariadne_quality_gate_build_info gauge",
|
||||||
f"ariadne_quality_gate_build_info{_label_str(labels)} 1",
|
f"ariadne_quality_gate_build_info{_label_str(labels)} 1",
|
||||||
]
|
]
|
||||||
@ -171,6 +198,7 @@ def main() -> int:
|
|||||||
"tests_errors": totals["errors"],
|
"tests_errors": totals["errors"],
|
||||||
"tests_skipped": totals["skipped"],
|
"tests_skipped": totals["skipped"],
|
||||||
"coverage_percent": round(coverage, 3),
|
"coverage_percent": round(coverage, 3),
|
||||||
|
"source_lines_over_500": source_lines_over_500,
|
||||||
"ok_counter": ok_count,
|
"ok_counter": ok_count,
|
||||||
"failed_counter": failed_count,
|
"failed_counter": failed_count,
|
||||||
},
|
},
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user