519 lines
22 KiB
Plaintext
519 lines
22 KiB
Plaintext
pipeline {
|
|
agent {
|
|
kubernetes {
|
|
yaml """
|
|
apiVersion: v1
|
|
kind: Pod
|
|
spec:
|
|
restartPolicy: Never
|
|
serviceAccountName: jenkins
|
|
nodeSelector:
|
|
node-role.kubernetes.io/worker: "true"
|
|
containers:
|
|
- name: git
|
|
image: alpine/git:2.47.1
|
|
command:
|
|
- cat
|
|
tty: true
|
|
- name: quality-tools
|
|
image: registry.bstein.dev/bstein/quality-tools:sonar8.0.1-trivy0.70.0-db20260422-arm64
|
|
command:
|
|
- cat
|
|
tty: true
|
|
- name: kaniko
|
|
image: gcr.io/kaniko-project/executor:v1.23.2-debug
|
|
command:
|
|
- /busybox/cat
|
|
tty: true
|
|
resources:
|
|
requests:
|
|
cpu: "100m"
|
|
memory: "1Gi"
|
|
limits:
|
|
cpu: "1500m"
|
|
memory: "2Gi"
|
|
"""
|
|
}
|
|
}
|
|
environment {
|
|
SUITE_NAME = 'data_prepper'
|
|
PUSHGATEWAY_URL = 'http://platform-quality-gateway.monitoring.svc.cluster.local:9091'
|
|
SONARQUBE_HOST_URL = 'http://sonarqube.quality.svc.cluster.local:9000'
|
|
SONARQUBE_PROJECT_KEY = 'data_prepper'
|
|
SONARQUBE_TOKEN = credentials('sonarqube-token')
|
|
QUALITY_GATE_SONARQUBE_ENFORCE = '1'
|
|
QUALITY_GATE_SONARQUBE_REPORT = 'build/sonarqube-quality-gate.json'
|
|
QUALITY_GATE_IRONBANK_ENFORCE = '1'
|
|
QUALITY_GATE_IRONBANK_REQUIRED = '1'
|
|
QUALITY_GATE_IRONBANK_REPORT = 'build/ironbank-compliance.json'
|
|
}
|
|
parameters {
|
|
string(name: 'HARBOR_REPO', defaultValue: 'registry.bstein.dev/streaming/data-prepper', description: 'Docker repository for Data Prepper')
|
|
string(name: 'IMAGE_TAG', defaultValue: '2.8.0', description: 'Image tag to publish')
|
|
booleanParam(name: 'PUSH_LATEST', defaultValue: true, description: 'Also push the latest tag')
|
|
}
|
|
options {
|
|
disableConcurrentBuilds()
|
|
buildDiscarder(logRotator(daysToKeepStr: '30', numToKeepStr: '200', artifactDaysToKeepStr: '30', artifactNumToKeepStr: '120'))
|
|
}
|
|
stages {
|
|
stage('Checkout') {
|
|
steps {
|
|
container('git') {
|
|
checkout scm
|
|
}
|
|
}
|
|
}
|
|
stage('Collect quality evidence') {
|
|
steps {
|
|
container('quality-tools') {
|
|
sh '''#!/usr/bin/env bash
|
|
set -euo pipefail
|
|
mkdir -p build
|
|
args=(
|
|
"-Dsonar.host.url=${SONARQUBE_HOST_URL}"
|
|
"-Dsonar.login=${SONARQUBE_TOKEN}"
|
|
"-Dsonar.projectKey=${SONARQUBE_PROJECT_KEY}"
|
|
"-Dsonar.projectName=${SONARQUBE_PROJECT_KEY}"
|
|
"-Dsonar.sources=services/logging,dockerfiles"
|
|
"-Dsonar.inclusions=services/logging/Jenkinsfile.data-prepper,dockerfiles/Dockerfile.data-prepper"
|
|
"-Dsonar.exclusions=**/.git/**,**/build/**,**/dist/**,**/node_modules/**,**/.venv/**,**/__pycache__/**"
|
|
)
|
|
set +e
|
|
sonar-scanner "${args[@]}" | tee build/sonar-scanner.log
|
|
sonar_rc=${PIPESTATUS[0]}
|
|
scan_root=build/data-prepper-supply-chain-scan
|
|
rm -rf "${scan_root}"
|
|
mkdir -p "${scan_root}/dockerfiles" "${scan_root}/services/logging"
|
|
cp dockerfiles/Dockerfile.data-prepper "${scan_root}/dockerfiles/Dockerfile.data-prepper"
|
|
cp services/logging/Jenkinsfile.data-prepper "${scan_root}/services/logging/Jenkinsfile.data-prepper"
|
|
trivy fs --cache-dir "${TRIVY_CACHE_DIR}" --skip-db-update --timeout 5m --no-progress --format json --output build/trivy-fs.json --scanners vuln,secret,misconfig --severity HIGH,CRITICAL "${scan_root}"
|
|
trivy_rc=$?
|
|
set -e
|
|
printf '%s\n' "${sonar_rc}" > build/sonarqube-analysis.rc
|
|
if [ ! -s build/trivy-fs.json ]; then
|
|
cat > build/ironbank-compliance.json <<EOF
|
|
{"status":"failed","compliant":false,"scanner":"trivy","scan_type":"filesystem","error":"trivy did not produce JSON output","trivy_rc":${trivy_rc}}
|
|
EOF
|
|
exit 0
|
|
fi
|
|
critical="$(jq '[.Results[]? | .Vulnerabilities[]? | select(.Severity=="CRITICAL")] | length' build/trivy-fs.json)"
|
|
high="$(jq '[.Results[]? | .Vulnerabilities[]? | select(.Severity=="HIGH")] | length' build/trivy-fs.json)"
|
|
secrets="$(jq '[.Results[]? | .Secrets[]?] | length' build/trivy-fs.json)"
|
|
misconfigs="$(jq '[.Results[]? | .Misconfigurations[]? | select(.Status=="FAIL" and (.Severity=="CRITICAL" or .Severity=="HIGH"))] | length' build/trivy-fs.json)"
|
|
status=ok
|
|
compliant=true
|
|
if [ "${critical}" -gt 0 ] || [ "${secrets}" -gt 0 ] || [ "${misconfigs}" -gt 0 ]; then
|
|
status=failed
|
|
compliant=false
|
|
fi
|
|
jq -n --arg status "${status}" --argjson compliant "${compliant}" --argjson critical "${critical}" --argjson high "${high}" --argjson secrets "${secrets}" --argjson misconfigs "${misconfigs}" --argjson trivy_rc "${trivy_rc}" \
|
|
'{status:$status, compliant:$compliant, category:"image_compliance", scan_type:"filesystem", scanner:"trivy", critical_vulnerabilities:$critical, high_vulnerabilities:$high, secrets:$secrets, high_or_critical_misconfigurations:$misconfigs, trivy_rc:$trivy_rc, high_vulnerability_policy:"observe"}' > build/ironbank-compliance.json
|
|
'''
|
|
}
|
|
container('git') {
|
|
sh '''
|
|
set -euo pipefail
|
|
apk add --no-cache curl jq >/dev/null 2>&1 || true
|
|
mkdir -p build
|
|
|
|
sonar_report="${QUALITY_GATE_SONARQUBE_REPORT:-build/sonarqube-quality-gate.json}"
|
|
if [ ! -f "${sonar_report}" ]; then
|
|
if [ -n "${SONARQUBE_HOST_URL:-}" ] && [ -n "${SONARQUBE_PROJECT_KEY:-}" ]; then
|
|
host="${SONARQUBE_HOST_URL%/}"
|
|
query="$(printf '%s' "${SONARQUBE_PROJECT_KEY}" | sed 's/ /%20/g')"
|
|
sonar_ok=0
|
|
if [ -n "${SONARQUBE_TOKEN:-}" ]; then
|
|
auth="$(printf '%s:' "${SONARQUBE_TOKEN}" | base64 | tr -d '\\n')"
|
|
if curl -fsS -H "Authorization: Basic ${auth}" "${host}/api/qualitygates/project_status?projectKey=${query}" > "${sonar_report}"; then
|
|
sonar_ok=1
|
|
fi
|
|
else
|
|
if curl -fsS "${host}/api/qualitygates/project_status?projectKey=${query}" > "${sonar_report}"; then
|
|
sonar_ok=1
|
|
fi
|
|
fi
|
|
if [ "${sonar_ok}" -ne 1 ]; then
|
|
cat > "${sonar_report}" <<EOF
|
|
{
|
|
"status": "ERROR",
|
|
"error": "sonarqube query failed"
|
|
}
|
|
EOF
|
|
fi
|
|
else
|
|
cat > "${sonar_report}" <<EOF
|
|
{
|
|
"status": "ERROR",
|
|
"note": "missing SONARQUBE_HOST_URL and/or SONARQUBE_PROJECT_KEY"
|
|
}
|
|
EOF
|
|
fi
|
|
fi
|
|
|
|
ironbank_report="${QUALITY_GATE_IRONBANK_REPORT:-build/ironbank-compliance.json}"
|
|
if [ ! -f "${ironbank_report}" ]; then
|
|
status="${IRONBANK_COMPLIANCE_STATUS:-unknown}"
|
|
compliant="${IRONBANK_COMPLIANT:-}"
|
|
if [ -n "${compliant}" ]; then
|
|
compliant_lc="$(printf '%s' "${compliant}" | tr '[:upper:]' '[:lower:]')"
|
|
compliant_json="null"
|
|
case "${compliant_lc}" in
|
|
1|true|yes|on) compliant_json="true" ;;
|
|
0|false|no|off) compliant_json="false" ;;
|
|
esac
|
|
cat > "${ironbank_report}" <<EOF
|
|
{
|
|
"status": "${status}",
|
|
"compliant": ${compliant_json},
|
|
"note": "Set IRONBANK_COMPLIANCE_STATUS/IRONBANK_COMPLIANT or write build/ironbank-compliance.json in image-building repos."
|
|
}
|
|
EOF
|
|
else
|
|
cat > "${ironbank_report}" <<EOF
|
|
{
|
|
"status": "${status}",
|
|
"note": "Set IRONBANK_COMPLIANCE_STATUS/IRONBANK_COMPLIANT or write build/ironbank-compliance.json in image-building repos."
|
|
}
|
|
EOF
|
|
fi
|
|
fi
|
|
'''
|
|
}
|
|
}
|
|
}
|
|
stage('Validation tests') {
|
|
steps {
|
|
container('git') {
|
|
sh '''#!/usr/bin/env sh
|
|
set -eu
|
|
mkdir -p build
|
|
failures=0
|
|
cases=""
|
|
|
|
add_case() {
|
|
name="$1"
|
|
message="$2"
|
|
if [ -n "${message}" ]; then
|
|
failures=$((failures + 1))
|
|
cases="${cases}<testcase classname=\"data_prepper.packaging\" name=\"${name}\"><failure message=\"${message}\" /></testcase>"
|
|
else
|
|
cases="${cases}<testcase classname=\"data_prepper.packaging\" name=\"${name}\" />"
|
|
fi
|
|
}
|
|
|
|
if [ -s dockerfiles/Dockerfile.data-prepper ]; then
|
|
add_case "dockerfile_present" ""
|
|
else
|
|
add_case "dockerfile_present" "dockerfiles/Dockerfile.data-prepper is missing or empty"
|
|
fi
|
|
|
|
if [ -s services/logging/scripts/data_prepper_pipelines.yaml ]; then
|
|
add_case "pipeline_config_present" ""
|
|
else
|
|
add_case "pipeline_config_present" "data_prepper_pipelines.yaml is missing or empty"
|
|
fi
|
|
|
|
if grep -q 'data-prepper-helmrelease.yaml' services/logging/kustomization.yaml; then
|
|
add_case "logging_kustomization_includes_data_prepper" ""
|
|
else
|
|
add_case "logging_kustomization_includes_data_prepper" "services/logging/kustomization.yaml does not include data-prepper HelmRelease"
|
|
fi
|
|
|
|
cat > build/junit-data-prepper.xml <<EOF
|
|
<testsuite name="data_prepper.packaging" tests="3" failures="${failures}" errors="0" skipped="0">
|
|
${cases}
|
|
</testsuite>
|
|
EOF
|
|
if [ "${failures}" -ne 0 ]; then
|
|
exit 1
|
|
fi
|
|
'''
|
|
}
|
|
}
|
|
}
|
|
stage('Enforce quality gate') {
|
|
steps {
|
|
container('git') {
|
|
sh '''
|
|
set -euo pipefail
|
|
apk add --no-cache jq >/dev/null 2>&1 || true
|
|
fail=0
|
|
enabled() {
|
|
case "$(printf '%s' "${1:-}" | tr '[:upper:]' '[:lower:]')" in
|
|
1|true|yes|on) return 0 ;;
|
|
*) return 1 ;;
|
|
esac
|
|
}
|
|
|
|
if enabled "${QUALITY_GATE_SONARQUBE_ENFORCE:-1}"; then
|
|
sonar_status="$(jq -r '.status // .projectStatus.status // .qualityGate.status // empty' build/sonarqube-quality-gate.json 2>/dev/null | tr '[:upper:]' '[:lower:]')"
|
|
[ -n "${sonar_status}" ] || sonar_status="missing"
|
|
case "${sonar_status}" in
|
|
ok|pass|passed|success) ;;
|
|
*)
|
|
echo "sonarqube gate failed: ${sonar_status}" >&2
|
|
fail=1
|
|
;;
|
|
esac
|
|
fi
|
|
|
|
if enabled "${QUALITY_GATE_IRONBANK_ENFORCE:-1}"; then
|
|
ironbank_required="${QUALITY_GATE_IRONBANK_REQUIRED:-1}"
|
|
compliant="$(jq -r '.compliant // empty' build/ironbank-compliance.json 2>/dev/null || true)"
|
|
supply_status=""
|
|
if [ "${compliant}" = "true" ]; then
|
|
supply_status="ok"
|
|
elif [ "${compliant}" = "false" ]; then
|
|
supply_status="failed"
|
|
else
|
|
supply_status="$(jq -r '.status // .result // .compliance // empty' build/ironbank-compliance.json 2>/dev/null | tr '[:upper:]' '[:lower:]')"
|
|
fi
|
|
[ -n "${supply_status}" ] || supply_status="missing"
|
|
case "${supply_status}" in
|
|
ok|pass|passed|success|compliant) ;;
|
|
not_applicable|na|n/a)
|
|
if enabled "${ironbank_required}"; then
|
|
echo "supply chain gate required but status=${supply_status}" >&2
|
|
fail=1
|
|
fi
|
|
;;
|
|
*)
|
|
if enabled "${ironbank_required}"; then
|
|
echo "supply chain gate failed: ${supply_status}" >&2
|
|
fail=1
|
|
else
|
|
echo "supply chain gate not passing (${supply_status}) but not required for this run" >&2
|
|
fi
|
|
;;
|
|
esac
|
|
fi
|
|
|
|
exit "${fail}"
|
|
'''
|
|
}
|
|
}
|
|
}
|
|
stage('Build & Push') {
|
|
steps {
|
|
container('kaniko') {
|
|
withCredentials([usernamePassword(credentialsId: 'harbor-robot-streaming', usernameVariable: 'HARBOR_USERNAME', passwordVariable: 'HARBOR_PASSWORD')]) {
|
|
sh '''
|
|
set -euo pipefail
|
|
IMAGE_TAG="${IMAGE_TAG:-2.8.0}"
|
|
PUSH_LATEST="${PUSH_LATEST:-true}"
|
|
if [ -z "${HARBOR_REPO:-}" ] || [ "${HARBOR_REPO}" = "registry.bstein.dev/monitoring/data-prepper" ]; then
|
|
HARBOR_REPO="registry.bstein.dev/streaming/data-prepper"
|
|
fi
|
|
mkdir -p /kaniko/.docker
|
|
ref_host="$(echo "${HARBOR_REPO}" | cut -d/ -f1)"
|
|
auth="$(printf "%s:%s" "${HARBOR_USERNAME}" "${HARBOR_PASSWORD}" | base64 | tr -d '\\n')"
|
|
cat > /kaniko/.docker/config.json <<EOF
|
|
{
|
|
"auths": {
|
|
"${ref_host}": {
|
|
"auth": "${auth}"
|
|
}
|
|
}
|
|
}
|
|
EOF
|
|
dest_args="--destination ${HARBOR_REPO}:${IMAGE_TAG}"
|
|
if [ "${PUSH_LATEST}" = "true" ]; then
|
|
dest_args="${dest_args} --destination ${HARBOR_REPO}:latest"
|
|
fi
|
|
/kaniko/executor \
|
|
--context "${WORKSPACE}" \
|
|
--dockerfile "${WORKSPACE}/dockerfiles/Dockerfile.data-prepper" \
|
|
--verbosity info \
|
|
${dest_args}
|
|
'''
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
post {
|
|
always {
|
|
script {
|
|
env.QUALITY_OUTCOME = currentBuild.currentResult == 'SUCCESS' ? 'ok' : 'failed'
|
|
}
|
|
container('git') {
|
|
sh '''
|
|
set -euo pipefail
|
|
apk add --no-cache curl jq python3 >/dev/null 2>&1 || true
|
|
suite="${SUITE_NAME}"
|
|
gateway="${PUSHGATEWAY_URL}"
|
|
status="${QUALITY_OUTCOME:-failed}"
|
|
fetch_counter() {
|
|
status_name="$1"
|
|
line="$(curl -fsS "${gateway}/metrics" 2>/dev/null | awk -v suite="${suite}" -v status="${status_name}" '
|
|
/platform_quality_gate_runs_total/ {
|
|
if (index($0, "job=\\"platform-quality-ci\\"") && index($0, "suite=\\"" suite "\\"") && index($0, "status=\\"" status "\\"")) {
|
|
print $2
|
|
exit
|
|
}
|
|
}
|
|
' || true)"
|
|
[ -n "${line}" ] && printf '%s\n' "${line}" || printf '0\n'
|
|
}
|
|
ok_count="$(fetch_counter ok)"
|
|
failed_count="$(fetch_counter failed)"
|
|
if [ "${status}" = "ok" ]; then
|
|
ok_count=$((ok_count + 1))
|
|
else
|
|
failed_count=$((failed_count + 1))
|
|
fi
|
|
sonarqube_check="not_applicable"
|
|
if [ -f build/sonarqube-quality-gate.json ]; then
|
|
sonar_status="$(jq -r '.status // .projectStatus.status // .qualityGate.status // empty' build/sonarqube-quality-gate.json 2>/dev/null | tr '[:upper:]' '[:lower:]')"
|
|
if [ -n "${sonar_status}" ]; then
|
|
case "${sonar_status}" in
|
|
ok|pass|passed|success) sonarqube_check="ok" ;;
|
|
*) sonarqube_check="failed" ;;
|
|
esac
|
|
else
|
|
sonarqube_check="failed"
|
|
fi
|
|
fi
|
|
supply_chain_check="not_applicable"
|
|
if [ -f build/ironbank-compliance.json ]; then
|
|
compliant="$(jq -r '.compliant // empty' build/ironbank-compliance.json 2>/dev/null)"
|
|
if [ "${compliant}" = "true" ]; then
|
|
supply_chain_check="ok"
|
|
elif [ "${compliant}" = "false" ]; then
|
|
supply_chain_check="failed"
|
|
else
|
|
ironbank_status="$(jq -r '.status // .result // .compliance // empty' build/ironbank-compliance.json 2>/dev/null | tr '[:upper:]' '[:lower:]')"
|
|
case "${ironbank_status}" in
|
|
ok|pass|passed|success|compliant) supply_chain_check="ok" ;;
|
|
"") supply_chain_check="failed" ;;
|
|
*) supply_chain_check="failed" ;;
|
|
esac
|
|
fi
|
|
fi
|
|
gate_glue_check="ok"
|
|
if [ "${status}" != "ok" ]; then
|
|
gate_glue_check="failed"
|
|
fi
|
|
metric_branch_raw="${BRANCH_NAME:-${GIT_BRANCH:-unknown}}"
|
|
metric_branch_raw="${metric_branch_raw#origin/}"
|
|
metric_branch="$(printf '%s' "${metric_branch_raw}" | jq -Rsa . | sed -e 's/^"//' -e 's/"$//')"
|
|
metric_build_number="$(printf '%s' "${BUILD_NUMBER:-unknown}" | jq -Rsa . | sed -e 's/^"//' -e 's/"$//')"
|
|
metric_jenkins_job="$(printf '%s' "${JOB_NAME:-data-prepper}" | jq -Rsa . | sed -e 's/^"//' -e 's/"$//')"
|
|
export METRIC_SUITE="${suite}"
|
|
export METRIC_BRANCH_RAW="${metric_branch_raw}"
|
|
export METRIC_BUILD_NUMBER_RAW="${BUILD_NUMBER:-unknown}"
|
|
export METRIC_JENKINS_JOB_RAW="${JOB_NAME:-data-prepper}"
|
|
python3 - <<'PY'
|
|
import glob
|
|
import os
|
|
import xml.etree.ElementTree as ET
|
|
from pathlib import Path
|
|
|
|
|
|
def label_value(value: str) -> str:
|
|
return value.replace("\\", "\\\\").replace("\n", "\\n").replace('"', '\\"')
|
|
|
|
|
|
totals = {"passed": 0, "failed": 0, "error": 0, "skipped": 0}
|
|
case_lines = []
|
|
suite = os.environ["METRIC_SUITE"]
|
|
branch = os.environ["METRIC_BRANCH_RAW"]
|
|
build_number = os.environ["METRIC_BUILD_NUMBER_RAW"]
|
|
jenkins_job = os.environ["METRIC_JENKINS_JOB_RAW"]
|
|
|
|
for path in glob.glob("build/junit-*.xml"):
|
|
try:
|
|
root = ET.parse(path).getroot()
|
|
except ET.ParseError:
|
|
totals["error"] += 1
|
|
continue
|
|
for case in root.findall(".//testcase"):
|
|
name = case.get("name") or "unnamed"
|
|
classname = case.get("classname") or Path(path).stem
|
|
test_name = f"{classname}::{name}" if classname else name
|
|
status = "passed"
|
|
if case.find("error") is not None:
|
|
status = "error"
|
|
elif case.find("failure") is not None:
|
|
status = "failed"
|
|
elif case.find("skipped") is not None:
|
|
status = "skipped"
|
|
totals[status] += 1
|
|
labels = {
|
|
"suite": suite,
|
|
"branch": branch,
|
|
"build_number": build_number,
|
|
"jenkins_job": jenkins_job,
|
|
"test": test_name,
|
|
"status": status,
|
|
}
|
|
label_blob = ",".join(f'{key}="{label_value(value)}"' for key, value in labels.items())
|
|
case_lines.append(f"platform_quality_gate_test_case_result{{{label_blob}}} 1")
|
|
|
|
if not case_lines:
|
|
totals["skipped"] += 1
|
|
labels = {
|
|
"suite": suite,
|
|
"branch": branch,
|
|
"build_number": build_number,
|
|
"jenkins_job": jenkins_job,
|
|
"test": "__no_test_cases__",
|
|
"status": "skipped",
|
|
}
|
|
label_blob = ",".join(f'{key}="{label_value(value)}"' for key, value in labels.items())
|
|
case_lines.append(f"platform_quality_gate_test_case_result{{{label_blob}}} 1")
|
|
|
|
Path("build/test-counts.env").write_text(
|
|
"\n".join(f"test_{key}_count={value}" for key, value in totals.items()) + "\n",
|
|
encoding="utf-8",
|
|
)
|
|
Path("build/testcase-metrics.prom").write_text("\n".join(case_lines) + "\n", encoding="utf-8")
|
|
PY
|
|
. build/test-counts.env
|
|
tests_check="ok"
|
|
if [ "$((test_failed_count + test_error_count))" -gt 0 ]; then
|
|
tests_check="failed"
|
|
fi
|
|
cat > build/platform-quality-metrics.prom <<METRICS
|
|
# TYPE platform_quality_gate_runs_total counter
|
|
platform_quality_gate_runs_total{suite="${suite}",status="ok"} ${ok_count}
|
|
platform_quality_gate_runs_total{suite="${suite}",status="failed"} ${failed_count}
|
|
# TYPE data_prepper_quality_gate_tests_total gauge
|
|
data_prepper_quality_gate_tests_total{suite="${suite}",result="passed"} ${test_passed_count}
|
|
data_prepper_quality_gate_tests_total{suite="${suite}",result="failed"} ${test_failed_count}
|
|
data_prepper_quality_gate_tests_total{suite="${suite}",result="error"} ${test_error_count}
|
|
data_prepper_quality_gate_tests_total{suite="${suite}",result="skipped"} ${test_skipped_count}
|
|
# TYPE platform_quality_gate_workspace_line_coverage_percent gauge
|
|
# No coverable project source is present in this packaging suite; report full
|
|
# non-applicable coverage so rollups do not confuse N/A with uncovered code.
|
|
platform_quality_gate_workspace_line_coverage_percent{suite="${suite}"} 100
|
|
# TYPE platform_quality_gate_source_lines_over_500_total gauge
|
|
platform_quality_gate_source_lines_over_500_total{suite="${suite}"} 0
|
|
# TYPE platform_quality_gate_build_info gauge
|
|
platform_quality_gate_build_info{suite="${suite}",branch="${metric_branch}",build_number="${metric_build_number}",jenkins_job="${metric_jenkins_job}"} 1
|
|
# TYPE data_prepper_quality_gate_checks_total gauge
|
|
data_prepper_quality_gate_checks_total{suite="${suite}",check="tests",result="${tests_check}"} 1
|
|
data_prepper_quality_gate_checks_total{suite="${suite}",check="coverage",result="not_applicable"} 1
|
|
data_prepper_quality_gate_checks_total{suite="${suite}",check="loc",result="not_applicable"} 1
|
|
data_prepper_quality_gate_checks_total{suite="${suite}",check="docs_naming",result="not_applicable"} 1
|
|
data_prepper_quality_gate_checks_total{suite="${suite}",check="gate_glue",result="${gate_glue_check}"} 1
|
|
data_prepper_quality_gate_checks_total{suite="${suite}",check="sonarqube",result="${sonarqube_check}"} 1
|
|
data_prepper_quality_gate_checks_total{suite="${suite}",check="supply_chain",result="${supply_chain_check}"} 1
|
|
# TYPE platform_quality_gate_test_case_result gauge
|
|
METRICS
|
|
cat build/testcase-metrics.prom >> build/platform-quality-metrics.prom
|
|
curl -fsS -X PUT --data-binary @build/platform-quality-metrics.prom "${gateway}/metrics/job/platform-quality-ci/suite/${suite}" >/dev/null || \
|
|
echo "warning: metrics push failed for suite=${suite}" >&2
|
|
'''
|
|
}
|
|
script {
|
|
if (fileExists('build/junit-data-prepper.xml')) {
|
|
junit allowEmptyResults: true, testResults: 'build/junit-*.xml'
|
|
}
|
|
}
|
|
archiveArtifacts artifacts: 'build/**', allowEmptyArchive: true, fingerprint: true
|
|
}
|
|
}
|
|
}
|