ci: use mirrored binfmt and final status metrics

This commit is contained in:
codex 2026-05-11 01:37:16 -03:00
parent 95068013fa
commit a343452233
2 changed files with 62 additions and 28 deletions

28
Jenkinsfile vendored
View File

@ -129,6 +129,7 @@ spec:
SONARQUBE_TOKEN = credentials('sonarqube-token') SONARQUBE_TOKEN = credentials('sonarqube-token')
QUALITY_GATE_SONARQUBE_REPORT = 'build/sonarqube-quality-gate.json' QUALITY_GATE_SONARQUBE_REPORT = 'build/sonarqube-quality-gate.json'
QUALITY_GATE_IRONBANK_REPORT = 'build/ironbank-compliance.json' QUALITY_GATE_IRONBANK_REPORT = 'build/ironbank-compliance.json'
BINFMT_IMAGE = 'registry.bstein.dev/bstein/binfmt@sha256:d3b963f787999e6c0219a48dba02978769286ff61a5f4d26245cb6a6e5567ea3'
} }
options { options {
disableConcurrentBuilds() disableConcurrentBuilds()
@ -345,17 +346,6 @@ PY
} }
} }
stage('Publish test metrics') {
steps {
container('publisher') {
sh '''
set -eu
python scripts/publish_test_metrics.py
'''
}
}
}
stage('Enforce quality gate') { stage('Enforce quality gate') {
steps { steps {
container('tester') { container('tester') {
@ -424,7 +414,7 @@ PY
docker version || true docker version || true
exit 1 exit 1
fi fi
docker run --privileged --rm tonistiigi/binfmt --install amd64,arm64 docker run --privileged --rm "${BINFMT_IMAGE}" --install amd64,arm64
BUILDER_NAME="metis-builder-${BUILD_NUMBER}" BUILDER_NAME="metis-builder-${BUILD_NUMBER}"
docker buildx rm "${BUILDER_NAME}" >/dev/null 2>&1 || true docker buildx rm "${BUILDER_NAME}" >/dev/null 2>&1 || true
docker buildx create --name "${BUILDER_NAME}" --driver docker-container --driver-opt image=registry.bstein.dev/bstein/buildkit:buildx-stable-1 --use docker buildx create --name "${BUILDER_NAME}" --driver docker-container --driver-opt image=registry.bstein.dev/bstein/buildkit:buildx-stable-1 --use
@ -471,6 +461,20 @@ PY
} }
post { post {
always { always {
script {
if (fileExists(env.COVERAGE_JSON) && fileExists(env.JUNIT_XML)) {
withEnv(["QUALITY_GATE_FINAL_STATUS=${currentBuild.currentResult ?: 'SUCCESS'}"]) {
container('publisher') {
sh '''
set -eu
python scripts/publish_test_metrics.py
'''
}
}
} else {
echo 'quality metrics artifacts missing; skipping metrics publish'
}
}
script { script {
if (fileExists('build/junit.xml')) { if (fileExists('build/junit.xml')) {
try { try {

View File

@ -10,6 +10,7 @@ import urllib.request
import xml.etree.ElementTree as ET import xml.etree.ElementTree as ET
QUALITY_SUCCESS_STATES = {"ok", "pass", "passed", "success", "compliant"} QUALITY_SUCCESS_STATES = {"ok", "pass", "passed", "success", "compliant"}
FINAL_SUCCESS_STATES = {"ok", "passed", "success"}
def _escape_label(value: str) -> str: def _escape_label(value: str) -> str:
@ -139,6 +140,18 @@ def _fetch_existing_counter(pushgateway_url: str, metric: str, labels: dict[str,
return 0.0 return 0.0
def _series_exists(pushgateway_url: str, metric: str, labels: dict[str, str]) -> bool:
text = _read_http(f"{pushgateway_url.rstrip('/')}/metrics")
if not text:
return False
for line in text.splitlines():
if not line.startswith(metric + "{"):
continue
if all(f'{key}="{value}"' in line for key, value in labels.items()):
return True
return False
def _count_source_files_over_limit(repo_root: Path, max_lines: int = 500) -> int: def _count_source_files_over_limit(repo_root: Path, max_lines: int = 500) -> int:
"""Count source files above the configured line budget.""" """Count source files above the configured line budget."""
@ -212,6 +225,7 @@ def main() -> int:
build_number = os.getenv("BUILD_NUMBER", "") build_number = os.getenv("BUILD_NUMBER", "")
jenkins_job = os.getenv("JOB_NAME", "metis") jenkins_job = os.getenv("JOB_NAME", "metis")
commit = os.getenv("GIT_COMMIT", "") commit = os.getenv("GIT_COMMIT", "")
final_status = os.getenv("QUALITY_GATE_FINAL_STATUS", "").strip().lower()
strict = os.getenv("METRICS_STRICT", "") == "1" strict = os.getenv("METRICS_STRICT", "") == "1"
repo_root = Path(__file__).resolve().parents[1] repo_root = Path(__file__).resolve().parents[1]
build_dir = repo_root / "build" build_dir = repo_root / "build"
@ -229,23 +243,45 @@ def main() -> int:
source_lines_over_500 = _count_source_files_over_limit(repo_root, max_lines=500) source_lines_over_500 = _count_source_files_over_limit(repo_root, max_lines=500)
passed = max(totals["tests"] - totals["failures"] - totals["errors"] - totals["skipped"], 0) passed = max(totals["tests"] - totals["failures"] - totals["errors"] - totals["skipped"], 0)
outcome = "ok" test_outcome = "ok"
if ( if (
(test_exit_code is not None and test_exit_code != 0) (test_exit_code is not None and test_exit_code != 0)
or totals["tests"] <= 0 or totals["tests"] <= 0
or totals["failures"] > 0 or totals["failures"] > 0
or totals["errors"] > 0 or totals["errors"] > 0
): ):
test_outcome = "failed"
outcome = test_outcome
pipeline_failed = bool(final_status) and final_status not in FINAL_SUCCESS_STATES
if pipeline_failed:
outcome = "failed" outcome = "failed"
checks = { checks = {
"tests": "ok" if outcome == "ok" else "failed", "tests": "ok" if test_outcome == "ok" else "failed",
"coverage": "ok" if coverage >= 95.0 else "failed", "coverage": "ok" if coverage >= 95.0 else "failed",
"loc": "ok" if source_lines_over_500 == 0 else "failed", "loc": "ok" if source_lines_over_500 == 0 else "failed",
"docs_naming": "ok" if docs_exit_code == 0 else "failed", "docs_naming": "ok" if docs_exit_code == 0 else "failed",
"gate_glue": "ok", "gate_glue": "failed" if pipeline_failed else "ok",
"sonarqube": _sonarqube_check_status(build_dir), "sonarqube": _sonarqube_check_status(build_dir),
"supply_chain": _supply_chain_check_status(build_dir), "supply_chain": _supply_chain_check_status(build_dir),
} }
labels = {
"job": "platform-quality-ci",
"suite": suite,
"branch": branch,
"build_number": build_number,
"jenkins_job": jenkins_job,
"commit": commit,
}
already_recorded = bool(build_number) and _series_exists(
pushgateway_url,
"platform_quality_gate_build_info",
{
"job": labels["job"],
"suite": suite,
"build_number": build_number,
"jenkins_job": jenkins_job,
},
)
ok_count = _fetch_existing_counter( ok_count = _fetch_existing_counter(
pushgateway_url, pushgateway_url,
"platform_quality_gate_runs_total", "platform_quality_gate_runs_total",
@ -256,19 +292,11 @@ def main() -> int:
"platform_quality_gate_runs_total", "platform_quality_gate_runs_total",
{"job": "platform-quality-ci", "suite": suite, "status": "failed"}, {"job": "platform-quality-ci", "suite": suite, "status": "failed"},
) )
if not already_recorded:
if outcome == "ok": if outcome == "ok":
ok_count += 1 ok_count += 1
else: else:
failed_count += 1 failed_count += 1
labels = {
"job": "platform-quality-ci",
"suite": suite,
"branch": branch,
"build_number": build_number,
"jenkins_job": jenkins_job,
"commit": commit,
}
test_case_base_labels = { test_case_base_labels = {
"suite": suite, "suite": suite,
"branch": branch, "branch": branch,
@ -331,6 +359,8 @@ def main() -> int:
"coverage_percent": round(coverage, 3), "coverage_percent": round(coverage, 3),
"source_lines_over_500": source_lines_over_500, "source_lines_over_500": source_lines_over_500,
"test_exit_code": test_exit_code, "test_exit_code": test_exit_code,
"final_status": final_status or None,
"already_recorded": already_recorded,
}, },
indent=2, indent=2,
) )