ci(data-prepper): avoid xml parser in metrics publish
This commit is contained in:
parent
be0d3e4300
commit
8a58132dd4
@ -214,16 +214,34 @@ EOF
|
||||
mkdir -p build
|
||||
failures=0
|
||||
cases=""
|
||||
case_metrics=""
|
||||
|
||||
label_value() {
|
||||
printf '%s' "$1" | sed -e 's/\\/\\\\/g' -e 's/"/\\"/g'
|
||||
}
|
||||
|
||||
suite="${SUITE_NAME:-data_prepper}"
|
||||
metric_branch_raw="${BRANCH_NAME:-${GIT_BRANCH:-unknown}}"
|
||||
metric_branch_raw="${metric_branch_raw#origin/}"
|
||||
metric_suite="$(label_value "${suite}")"
|
||||
metric_branch="$(label_value "${metric_branch_raw}")"
|
||||
metric_build_number="$(label_value "${BUILD_NUMBER:-unknown}")"
|
||||
metric_jenkins_job="$(label_value "${JOB_NAME:-data-prepper}")"
|
||||
|
||||
add_case() {
|
||||
name="$1"
|
||||
message="$2"
|
||||
status="passed"
|
||||
if [ -n "${message}" ]; then
|
||||
status="failed"
|
||||
failures=$((failures + 1))
|
||||
cases="${cases}<testcase classname=\"data_prepper.packaging\" name=\"${name}\"><failure message=\"${message}\" /></testcase>"
|
||||
else
|
||||
cases="${cases}<testcase classname=\"data_prepper.packaging\" name=\"${name}\" />"
|
||||
fi
|
||||
test_name="$(label_value "data_prepper.packaging::${name}")"
|
||||
case_metrics="${case_metrics}platform_quality_gate_test_case_result{suite=\"${metric_suite}\",branch=\"${metric_branch}\",build_number=\"${metric_build_number}\",jenkins_job=\"${metric_jenkins_job}\",test=\"${test_name}\",status=\"${status}\"} 1
|
||||
"
|
||||
}
|
||||
|
||||
if [ -s dockerfiles/Dockerfile.data-prepper ]; then
|
||||
@ -249,6 +267,14 @@ EOF
|
||||
${cases}
|
||||
</testsuite>
|
||||
EOF
|
||||
passed=$((3 - failures))
|
||||
cat > build/test-counts.env <<EOF
|
||||
test_passed_count=${passed}
|
||||
test_failed_count=${failures}
|
||||
test_error_count=0
|
||||
test_skipped_count=0
|
||||
EOF
|
||||
printf '%s' "${case_metrics}" > build/testcase-metrics.prom
|
||||
if [ "${failures}" -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
@ -364,7 +390,7 @@ EOF
|
||||
container('git') {
|
||||
sh '''
|
||||
set -euo pipefail
|
||||
apk add --no-cache curl jq python3 >/dev/null 2>&1 || true
|
||||
apk add --no-cache curl jq >/dev/null 2>&1 || true
|
||||
suite="${SUITE_NAME}"
|
||||
gateway="${PUSHGATEWAY_URL}"
|
||||
status="${QUALITY_OUTCOME:-failed}"
|
||||
@ -428,73 +454,17 @@ EOF
|
||||
export METRIC_BRANCH_RAW="${metric_branch_raw}"
|
||||
export METRIC_BUILD_NUMBER_RAW="${BUILD_NUMBER:-unknown}"
|
||||
export METRIC_JENKINS_JOB_RAW="${JOB_NAME:-data-prepper}"
|
||||
python3 - <<'PY'
|
||||
import glob
|
||||
import os
|
||||
import xml.etree.ElementTree as ET
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def label_value(value: str) -> str:
|
||||
slash = chr(92)
|
||||
return value.replace(slash, slash * 2).replace(chr(10), slash + "n").replace(chr(34), slash + chr(34))
|
||||
|
||||
|
||||
totals = {"passed": 0, "failed": 0, "error": 0, "skipped": 0}
|
||||
case_lines = []
|
||||
suite = os.environ["METRIC_SUITE"]
|
||||
branch = os.environ["METRIC_BRANCH_RAW"]
|
||||
build_number = os.environ["METRIC_BUILD_NUMBER_RAW"]
|
||||
jenkins_job = os.environ["METRIC_JENKINS_JOB_RAW"]
|
||||
|
||||
for path in glob.glob("build/junit-*.xml"):
|
||||
try:
|
||||
root = ET.parse(path).getroot()
|
||||
except ET.ParseError:
|
||||
totals["error"] += 1
|
||||
continue
|
||||
for case in root.findall(".//testcase"):
|
||||
name = case.get("name") or "unnamed"
|
||||
classname = case.get("classname") or Path(path).stem
|
||||
test_name = f"{classname}::{name}" if classname else name
|
||||
status = "passed"
|
||||
if case.find("error") is not None:
|
||||
status = "error"
|
||||
elif case.find("failure") is not None:
|
||||
status = "failed"
|
||||
elif case.find("skipped") is not None:
|
||||
status = "skipped"
|
||||
totals[status] += 1
|
||||
labels = {
|
||||
"suite": suite,
|
||||
"branch": branch,
|
||||
"build_number": build_number,
|
||||
"jenkins_job": jenkins_job,
|
||||
"test": test_name,
|
||||
"status": status,
|
||||
}
|
||||
label_blob = ",".join(f'{key}="{label_value(value)}"' for key, value in labels.items())
|
||||
case_lines.append(f"platform_quality_gate_test_case_result{{{label_blob}}} 1")
|
||||
|
||||
if not case_lines:
|
||||
totals["skipped"] += 1
|
||||
labels = {
|
||||
"suite": suite,
|
||||
"branch": branch,
|
||||
"build_number": build_number,
|
||||
"jenkins_job": jenkins_job,
|
||||
"test": "__no_test_cases__",
|
||||
"status": "skipped",
|
||||
}
|
||||
label_blob = ",".join(f'{key}="{label_value(value)}"' for key, value in labels.items())
|
||||
case_lines.append(f"platform_quality_gate_test_case_result{{{label_blob}}} 1")
|
||||
|
||||
Path("build/test-counts.env").write_text(
|
||||
chr(10).join(f"test_{key}_count={value}" for key, value in totals.items()) + chr(10),
|
||||
encoding="utf-8",
|
||||
)
|
||||
Path("build/testcase-metrics.prom").write_text(chr(10).join(case_lines) + chr(10), encoding="utf-8")
|
||||
PY
|
||||
if [ ! -s build/test-counts.env ] || [ ! -s build/testcase-metrics.prom ]; then
|
||||
cat > build/test-counts.env <<EOF
|
||||
test_passed_count=0
|
||||
test_failed_count=0
|
||||
test_error_count=0
|
||||
test_skipped_count=1
|
||||
EOF
|
||||
cat > build/testcase-metrics.prom <<METRICS
|
||||
platform_quality_gate_test_case_result{suite="${suite}",branch="${metric_branch}",build_number="${metric_build_number}",jenkins_job="${metric_jenkins_job}",test="__no_test_cases__",status="skipped"} 1
|
||||
METRICS
|
||||
fi
|
||||
. build/test-counts.env
|
||||
tests_check="ok"
|
||||
if [ "$((test_failed_count + test_error_count))" -gt 0 ]; then
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user