pipeline { agent { kubernetes { yaml """ apiVersion: v1 kind: Pod spec: restartPolicy: Never serviceAccountName: jenkins nodeSelector: node-role.kubernetes.io/worker: "true" containers: - name: git image: alpine/git:2.47.1 command: - cat tty: true - name: quality-tools image: registry.bstein.dev/bstein/quality-tools:sonar8.0.1-trivy0.70.0-db20260422-arm64 command: - cat tty: true - name: kaniko image: gcr.io/kaniko-project/executor:v1.23.2-debug command: - /busybox/cat tty: true resources: requests: cpu: "100m" memory: "1Gi" limits: cpu: "1500m" memory: "2Gi" """ } } environment { SUITE_NAME = 'data_prepper' PUSHGATEWAY_URL = 'http://platform-quality-gateway.monitoring.svc.cluster.local:9091' SONARQUBE_HOST_URL = 'http://sonarqube.quality.svc.cluster.local:9000' SONARQUBE_PROJECT_KEY = 'data_prepper' SONARQUBE_TOKEN = credentials('sonarqube-token') QUALITY_GATE_SONARQUBE_ENFORCE = '1' QUALITY_GATE_SONARQUBE_REPORT = 'build/sonarqube-quality-gate.json' QUALITY_GATE_IRONBANK_ENFORCE = '1' QUALITY_GATE_IRONBANK_REQUIRED = '1' QUALITY_GATE_IRONBANK_REPORT = 'build/ironbank-compliance.json' } parameters { string(name: 'HARBOR_REPO', defaultValue: 'registry.bstein.dev/streaming/data-prepper', description: 'Docker repository for Data Prepper') string(name: 'IMAGE_TAG', defaultValue: '2.8.0', description: 'Image tag to publish') booleanParam(name: 'PUSH_LATEST', defaultValue: true, description: 'Also push the latest tag') } options { disableConcurrentBuilds() buildDiscarder(logRotator(daysToKeepStr: '30', numToKeepStr: '200', artifactDaysToKeepStr: '30', artifactNumToKeepStr: '120')) } stages { stage('Checkout') { steps { container('git') { checkout scm } } } stage('Collect quality evidence') { steps { container('quality-tools') { sh '''#!/usr/bin/env bash set -euo pipefail mkdir -p build args=( "-Dsonar.host.url=${SONARQUBE_HOST_URL}" "-Dsonar.login=${SONARQUBE_TOKEN}" "-Dsonar.projectKey=${SONARQUBE_PROJECT_KEY}" "-Dsonar.projectName=${SONARQUBE_PROJECT_KEY}" "-Dsonar.sources=services/logging,dockerfiles" "-Dsonar.inclusions=services/logging/Jenkinsfile.data-prepper,dockerfiles/Dockerfile.data-prepper" "-Dsonar.exclusions=**/.git/**,**/build/**,**/dist/**,**/node_modules/**,**/.venv/**,**/__pycache__/**" ) set +e sonar-scanner "${args[@]}" | tee build/sonar-scanner.log sonar_rc=${PIPESTATUS[0]} scan_root=build/data-prepper-supply-chain-scan rm -rf "${scan_root}" mkdir -p "${scan_root}/dockerfiles" "${scan_root}/services/logging" cp dockerfiles/Dockerfile.data-prepper "${scan_root}/dockerfiles/Dockerfile.data-prepper" cp services/logging/Jenkinsfile.data-prepper "${scan_root}/services/logging/Jenkinsfile.data-prepper" trivy fs --cache-dir "${TRIVY_CACHE_DIR}" --skip-db-update --timeout 5m --no-progress --format json --output build/trivy-fs.json --scanners vuln,secret,misconfig --severity HIGH,CRITICAL "${scan_root}" trivy_rc=$? set -e printf '%s\n' "${sonar_rc}" > build/sonarqube-analysis.rc if [ ! -s build/trivy-fs.json ]; then cat > build/ironbank-compliance.json < build/ironbank-compliance.json ''' } container('git') { sh ''' set -euo pipefail apk add --no-cache curl jq >/dev/null 2>&1 || true mkdir -p build sonar_report="${QUALITY_GATE_SONARQUBE_REPORT:-build/sonarqube-quality-gate.json}" if [ ! -f "${sonar_report}" ]; then if [ -n "${SONARQUBE_HOST_URL:-}" ] && [ -n "${SONARQUBE_PROJECT_KEY:-}" ]; then host="${SONARQUBE_HOST_URL%/}" query="$(printf '%s' "${SONARQUBE_PROJECT_KEY}" | sed 's/ /%20/g')" sonar_ok=0 if [ -n "${SONARQUBE_TOKEN:-}" ]; then auth="$(printf '%s:' "${SONARQUBE_TOKEN}" | base64 | tr -d '\\n')" if curl -fsS -H "Authorization: Basic ${auth}" "${host}/api/qualitygates/project_status?projectKey=${query}" > "${sonar_report}"; then sonar_ok=1 fi else if curl -fsS "${host}/api/qualitygates/project_status?projectKey=${query}" > "${sonar_report}"; then sonar_ok=1 fi fi if [ "${sonar_ok}" -ne 1 ]; then cat > "${sonar_report}" < "${sonar_report}" < "${ironbank_report}" < "${ironbank_report}" <" else cases="${cases}" fi } if [ -s dockerfiles/Dockerfile.data-prepper ]; then add_case "dockerfile_present" "" else add_case "dockerfile_present" "dockerfiles/Dockerfile.data-prepper is missing or empty" fi if [ -s services/logging/scripts/data_prepper_pipelines.yaml ]; then add_case "pipeline_config_present" "" else add_case "pipeline_config_present" "data_prepper_pipelines.yaml is missing or empty" fi if grep -q 'data-prepper-helmrelease.yaml' services/logging/kustomization.yaml; then add_case "logging_kustomization_includes_data_prepper" "" else add_case "logging_kustomization_includes_data_prepper" "services/logging/kustomization.yaml does not include data-prepper HelmRelease" fi cat > build/junit-data-prepper.xml < ${cases} EOF if [ "${failures}" -ne 0 ]; then exit 1 fi ''' } } } stage('Enforce quality gate') { steps { container('git') { sh ''' set -euo pipefail apk add --no-cache jq >/dev/null 2>&1 || true fail=0 enabled() { case "$(printf '%s' "${1:-}" | tr '[:upper:]' '[:lower:]')" in 1|true|yes|on) return 0 ;; *) return 1 ;; esac } if enabled "${QUALITY_GATE_SONARQUBE_ENFORCE:-1}"; then sonar_status="$(jq -r '.status // .projectStatus.status // .qualityGate.status // empty' build/sonarqube-quality-gate.json 2>/dev/null | tr '[:upper:]' '[:lower:]')" [ -n "${sonar_status}" ] || sonar_status="missing" case "${sonar_status}" in ok|pass|passed|success) ;; *) echo "sonarqube gate failed: ${sonar_status}" >&2 fail=1 ;; esac fi if enabled "${QUALITY_GATE_IRONBANK_ENFORCE:-1}"; then ironbank_required="${QUALITY_GATE_IRONBANK_REQUIRED:-1}" compliant="$(jq -r '.compliant // empty' build/ironbank-compliance.json 2>/dev/null || true)" supply_status="" if [ "${compliant}" = "true" ]; then supply_status="ok" elif [ "${compliant}" = "false" ]; then supply_status="failed" else supply_status="$(jq -r '.status // .result // .compliance // empty' build/ironbank-compliance.json 2>/dev/null | tr '[:upper:]' '[:lower:]')" fi [ -n "${supply_status}" ] || supply_status="missing" case "${supply_status}" in ok|pass|passed|success|compliant) ;; not_applicable|na|n/a) if enabled "${ironbank_required}"; then echo "supply chain gate required but status=${supply_status}" >&2 fail=1 fi ;; *) if enabled "${ironbank_required}"; then echo "supply chain gate failed: ${supply_status}" >&2 fail=1 else echo "supply chain gate not passing (${supply_status}) but not required for this run" >&2 fi ;; esac fi exit "${fail}" ''' } } } stage('Build & Push') { steps { container('kaniko') { withCredentials([usernamePassword(credentialsId: 'harbor-robot-streaming', usernameVariable: 'HARBOR_USERNAME', passwordVariable: 'HARBOR_PASSWORD')]) { sh ''' set -euo pipefail IMAGE_TAG="${IMAGE_TAG:-2.8.0}" PUSH_LATEST="${PUSH_LATEST:-true}" if [ -z "${HARBOR_REPO:-}" ] || [ "${HARBOR_REPO}" = "registry.bstein.dev/monitoring/data-prepper" ]; then HARBOR_REPO="registry.bstein.dev/streaming/data-prepper" fi mkdir -p /kaniko/.docker ref_host="$(echo "${HARBOR_REPO}" | cut -d/ -f1)" auth="$(printf "%s:%s" "${HARBOR_USERNAME}" "${HARBOR_PASSWORD}" | base64 | tr -d '\\n')" cat > /kaniko/.docker/config.json </dev/null 2>&1 || true suite="${SUITE_NAME}" gateway="${PUSHGATEWAY_URL}" status="${QUALITY_OUTCOME:-failed}" fetch_counter() { status_name="$1" line="$(curl -fsS "${gateway}/metrics" 2>/dev/null | awk -v suite="${suite}" -v status="${status_name}" ' /platform_quality_gate_runs_total/ { if (index($0, "job=\\"platform-quality-ci\\"") && index($0, "suite=\\"" suite "\\"") && index($0, "status=\\"" status "\\"")) { print $2 exit } } ' || true)" [ -n "${line}" ] && printf '%s\n' "${line}" || printf '0\n' } ok_count="$(fetch_counter ok)" failed_count="$(fetch_counter failed)" if [ "${status}" = "ok" ]; then ok_count=$((ok_count + 1)) else failed_count=$((failed_count + 1)) fi sonarqube_check="not_applicable" if [ -f build/sonarqube-quality-gate.json ]; then sonar_status="$(jq -r '.status // .projectStatus.status // .qualityGate.status // empty' build/sonarqube-quality-gate.json 2>/dev/null | tr '[:upper:]' '[:lower:]')" if [ -n "${sonar_status}" ]; then case "${sonar_status}" in ok|pass|passed|success) sonarqube_check="ok" ;; *) sonarqube_check="failed" ;; esac else sonarqube_check="failed" fi fi supply_chain_check="not_applicable" if [ -f build/ironbank-compliance.json ]; then compliant="$(jq -r '.compliant // empty' build/ironbank-compliance.json 2>/dev/null)" if [ "${compliant}" = "true" ]; then supply_chain_check="ok" elif [ "${compliant}" = "false" ]; then supply_chain_check="failed" else ironbank_status="$(jq -r '.status // .result // .compliance // empty' build/ironbank-compliance.json 2>/dev/null | tr '[:upper:]' '[:lower:]')" case "${ironbank_status}" in ok|pass|passed|success|compliant) supply_chain_check="ok" ;; "") supply_chain_check="failed" ;; *) supply_chain_check="failed" ;; esac fi fi gate_glue_check="ok" if [ "${status}" != "ok" ]; then gate_glue_check="failed" fi metric_branch_raw="${BRANCH_NAME:-${GIT_BRANCH:-unknown}}" metric_branch_raw="${metric_branch_raw#origin/}" metric_branch="$(printf '%s' "${metric_branch_raw}" | jq -Rsa . | sed -e 's/^"//' -e 's/"$//')" metric_build_number="$(printf '%s' "${BUILD_NUMBER:-unknown}" | jq -Rsa . | sed -e 's/^"//' -e 's/"$//')" metric_jenkins_job="$(printf '%s' "${JOB_NAME:-data-prepper}" | jq -Rsa . | sed -e 's/^"//' -e 's/"$//')" export METRIC_SUITE="${suite}" export METRIC_BRANCH_RAW="${metric_branch_raw}" export METRIC_BUILD_NUMBER_RAW="${BUILD_NUMBER:-unknown}" export METRIC_JENKINS_JOB_RAW="${JOB_NAME:-data-prepper}" python3 - <<'PY' import glob import os import xml.etree.ElementTree as ET from pathlib import Path def label_value(value: str) -> str: return value.replace("\\", "\\\\").replace("\n", "\\n").replace('"', '\\"') totals = {"passed": 0, "failed": 0, "error": 0, "skipped": 0} case_lines = [] suite = os.environ["METRIC_SUITE"] branch = os.environ["METRIC_BRANCH_RAW"] build_number = os.environ["METRIC_BUILD_NUMBER_RAW"] jenkins_job = os.environ["METRIC_JENKINS_JOB_RAW"] for path in glob.glob("build/junit-*.xml"): try: root = ET.parse(path).getroot() except ET.ParseError: totals["error"] += 1 continue for case in root.findall(".//testcase"): name = case.get("name") or "unnamed" classname = case.get("classname") or Path(path).stem test_name = f"{classname}::{name}" if classname else name status = "passed" if case.find("error") is not None: status = "error" elif case.find("failure") is not None: status = "failed" elif case.find("skipped") is not None: status = "skipped" totals[status] += 1 labels = { "suite": suite, "branch": branch, "build_number": build_number, "jenkins_job": jenkins_job, "test": test_name, "status": status, } label_blob = ",".join(f'{key}="{label_value(value)}"' for key, value in labels.items()) case_lines.append(f"platform_quality_gate_test_case_result{{{label_blob}}} 1") if not case_lines: totals["skipped"] += 1 labels = { "suite": suite, "branch": branch, "build_number": build_number, "jenkins_job": jenkins_job, "test": "__no_test_cases__", "status": "skipped", } label_blob = ",".join(f'{key}="{label_value(value)}"' for key, value in labels.items()) case_lines.append(f"platform_quality_gate_test_case_result{{{label_blob}}} 1") Path("build/test-counts.env").write_text( "\n".join(f"test_{key}_count={value}" for key, value in totals.items()) + "\n", encoding="utf-8", ) Path("build/testcase-metrics.prom").write_text("\n".join(case_lines) + "\n", encoding="utf-8") PY . build/test-counts.env tests_check="ok" if [ "$((test_failed_count + test_error_count))" -gt 0 ]; then tests_check="failed" fi cat > build/platform-quality-metrics.prom <> build/platform-quality-metrics.prom curl -fsS -X PUT --data-binary @build/platform-quality-metrics.prom "${gateway}/metrics/job/platform-quality-ci/suite/${suite}" >/dev/null || \ echo "warning: metrics push failed for suite=${suite}" >&2 ''' } script { if (fileExists('build/junit-data-prepper.xml')) { junit allowEmptyResults: true, testResults: 'build/junit-*.xml' } } archiveArtifacts artifacts: 'build/**', allowEmptyArchive: true, fingerprint: true } } }