pipeline { agent { kubernetes { yaml """ apiVersion: v1 kind: Pod spec: restartPolicy: Never serviceAccountName: jenkins nodeSelector: node-role.kubernetes.io/worker: "true" containers: - name: git image: alpine/git:2.47.1 command: - cat tty: true - name: kaniko image: gcr.io/kaniko-project/executor:v1.23.2-debug command: - /busybox/cat tty: true resources: requests: cpu: "100m" memory: "1Gi" limits: cpu: "1500m" memory: "2Gi" """ } } environment { SUITE_NAME = 'data_prepper' PUSHGATEWAY_URL = 'http://platform-quality-gateway.monitoring.svc.cluster.local:9091' SONARQUBE_HOST_URL = 'http://sonarqube.quality.svc.cluster.local:9000' SONARQUBE_PROJECT_KEY = 'data_prepper' SONARQUBE_TOKEN = credentials('sonarqube-token') QUALITY_GATE_SONARQUBE_ENFORCE = '0' QUALITY_GATE_SONARQUBE_REPORT = 'build/sonarqube-quality-gate.json' QUALITY_GATE_IRONBANK_ENFORCE = '0' QUALITY_GATE_IRONBANK_REQUIRED = '1' QUALITY_GATE_IRONBANK_REPORT = 'build/ironbank-compliance.json' } parameters { string(name: 'HARBOR_REPO', defaultValue: 'registry.bstein.dev/streaming/data-prepper', description: 'Docker repository for Data Prepper') string(name: 'IMAGE_TAG', defaultValue: '2.8.0', description: 'Image tag to publish') booleanParam(name: 'PUSH_LATEST', defaultValue: true, description: 'Also push the latest tag') } options { disableConcurrentBuilds() buildDiscarder(logRotator(daysToKeepStr: '30', numToKeepStr: '200', artifactDaysToKeepStr: '30', artifactNumToKeepStr: '120')) } stages { stage('Checkout') { steps { container('git') { checkout scm } } } stage('Collect quality evidence') { steps { container('git') { sh ''' set -euo pipefail apk add --no-cache curl jq >/dev/null 2>&1 || true mkdir -p build sonar_report="${QUALITY_GATE_SONARQUBE_REPORT:-build/sonarqube-quality-gate.json}" if [ ! -f "${sonar_report}" ]; then if [ -n "${SONARQUBE_HOST_URL:-}" ] && [ -n "${SONARQUBE_PROJECT_KEY:-}" ]; then host="${SONARQUBE_HOST_URL%/}" query="$(printf '%s' "${SONARQUBE_PROJECT_KEY}" | sed 's/ /%20/g')" sonar_ok=0 if [ -n "${SONARQUBE_TOKEN:-}" ]; then auth="$(printf '%s:' "${SONARQUBE_TOKEN}" | base64 | tr -d '\\n')" if curl -fsS -H "Authorization: Basic ${auth}" "${host}/api/qualitygates/project_status?projectKey=${query}" > "${sonar_report}"; then sonar_ok=1 fi else if curl -fsS "${host}/api/qualitygates/project_status?projectKey=${query}" > "${sonar_report}"; then sonar_ok=1 fi fi if [ "${sonar_ok}" -ne 1 ]; then cat > "${sonar_report}" < "${sonar_report}" < "${ironbank_report}" < "${ironbank_report}" </dev/null 2>&1 || true fail=0 enabled() { case "$(printf '%s' "${1:-}" | tr '[:upper:]' '[:lower:]')" in 1|true|yes|on) return 0 ;; *) return 1 ;; esac } if enabled "${QUALITY_GATE_SONARQUBE_ENFORCE:-1}"; then sonar_status="$(jq -r '.status // .projectStatus.status // .qualityGate.status // empty' build/sonarqube-quality-gate.json 2>/dev/null | tr '[:upper:]' '[:lower:]')" [ -n "${sonar_status}" ] || sonar_status="missing" case "${sonar_status}" in ok|pass|passed|success) ;; *) echo "sonarqube gate failed: ${sonar_status}" >&2 fail=1 ;; esac fi if enabled "${QUALITY_GATE_IRONBANK_ENFORCE:-1}"; then ironbank_required="${QUALITY_GATE_IRONBANK_REQUIRED:-1}" compliant="$(jq -r '.compliant // empty' build/ironbank-compliance.json 2>/dev/null || true)" supply_status="" if [ "${compliant}" = "true" ]; then supply_status="ok" elif [ "${compliant}" = "false" ]; then supply_status="failed" else supply_status="$(jq -r '.status // .result // .compliance // empty' build/ironbank-compliance.json 2>/dev/null | tr '[:upper:]' '[:lower:]')" fi [ -n "${supply_status}" ] || supply_status="missing" case "${supply_status}" in ok|pass|passed|success|compliant) ;; not_applicable|na|n/a) if enabled "${ironbank_required}"; then echo "supply chain gate required but status=${supply_status}" >&2 fail=1 fi ;; *) if enabled "${ironbank_required}"; then echo "supply chain gate failed: ${supply_status}" >&2 fail=1 else echo "supply chain gate not passing (${supply_status}) but not required for this run" >&2 fi ;; esac fi exit "${fail}" ''' } } } stage('Build & Push') { steps { container('kaniko') { withCredentials([usernamePassword(credentialsId: 'harbor-robot-streaming', usernameVariable: 'HARBOR_USERNAME', passwordVariable: 'HARBOR_PASSWORD')]) { sh ''' set -euo pipefail IMAGE_TAG="${IMAGE_TAG:-2.8.0}" PUSH_LATEST="${PUSH_LATEST:-true}" if [ -z "${HARBOR_REPO:-}" ] || [ "${HARBOR_REPO}" = "registry.bstein.dev/monitoring/data-prepper" ]; then HARBOR_REPO="registry.bstein.dev/streaming/data-prepper" fi mkdir -p /kaniko/.docker ref_host="$(echo "${HARBOR_REPO}" | cut -d/ -f1)" auth="$(printf "%s:%s" "${HARBOR_USERNAME}" "${HARBOR_PASSWORD}" | base64 | tr -d '\\n')" cat > /kaniko/.docker/config.json </dev/null 2>&1 || true suite="${SUITE_NAME}" gateway="${PUSHGATEWAY_URL}" status="${QUALITY_OUTCOME:-failed}" fetch_counter() { status_name="$1" line="$(curl -fsS "${gateway}/metrics" 2>/dev/null | awk -v suite="${suite}" -v status="${status_name}" ' /platform_quality_gate_runs_total/ { if (index($0, "job=\\"platform-quality-ci\\"") && index($0, "suite=\\"" suite "\\"") && index($0, "status=\\"" status "\\"")) { print $2 exit } } ' || true)" [ -n "${line}" ] && printf '%s\n' "${line}" || printf '0\n' } ok_count="$(fetch_counter ok)" failed_count="$(fetch_counter failed)" if [ "${status}" = "ok" ]; then ok_count=$((ok_count + 1)) else failed_count=$((failed_count + 1)) fi sonarqube_check="not_applicable" if [ -f build/sonarqube-quality-gate.json ]; then sonar_status="$(jq -r '.status // .projectStatus.status // .qualityGate.status // empty' build/sonarqube-quality-gate.json 2>/dev/null | tr '[:upper:]' '[:lower:]')" if [ -n "${sonar_status}" ]; then case "${sonar_status}" in ok|pass|passed|success) sonarqube_check="ok" ;; *) sonarqube_check="failed" ;; esac else sonarqube_check="failed" fi fi supply_chain_check="not_applicable" if [ -f build/ironbank-compliance.json ]; then compliant="$(jq -r '.compliant // empty' build/ironbank-compliance.json 2>/dev/null)" if [ "${compliant}" = "true" ]; then supply_chain_check="ok" elif [ "${compliant}" = "false" ]; then supply_chain_check="failed" else ironbank_status="$(jq -r '.status // .result // .compliance // empty' build/ironbank-compliance.json 2>/dev/null | tr '[:upper:]' '[:lower:]')" case "${ironbank_status}" in ok|pass|passed|success|compliant) supply_chain_check="ok" ;; "") supply_chain_check="failed" ;; *) supply_chain_check="failed" ;; esac fi fi gate_glue_check="ok" if [ "${status}" != "ok" ]; then gate_glue_check="failed" fi metric_branch_raw="${BRANCH_NAME:-${GIT_BRANCH:-unknown}}" metric_branch_raw="${metric_branch_raw#origin/}" metric_branch="$(printf '%s' "${metric_branch_raw}" | jq -Rsa . | sed -e 's/^"//' -e 's/"$//')" metric_build_number="$(printf '%s' "${BUILD_NUMBER:-unknown}" | jq -Rsa . | sed -e 's/^"//' -e 's/"$//')" metric_jenkins_job="$(printf '%s' "${JOB_NAME:-data-prepper}" | jq -Rsa . | sed -e 's/^"//' -e 's/"$//')" cat </dev/null || \ echo "warning: metrics push failed for suite=${suite}" >&2 # TYPE platform_quality_gate_runs_total counter platform_quality_gate_runs_total{suite="${suite}",status="ok"} ${ok_count} platform_quality_gate_runs_total{suite="${suite}",status="failed"} ${failed_count} # TYPE data_prepper_quality_gate_tests_total gauge data_prepper_quality_gate_tests_total{suite="${suite}",result="passed"} 0 data_prepper_quality_gate_tests_total{suite="${suite}",result="failed"} 0 data_prepper_quality_gate_tests_total{suite="${suite}",result="error"} 0 data_prepper_quality_gate_tests_total{suite="${suite}",result="skipped"} 0 # TYPE platform_quality_gate_workspace_line_coverage_percent gauge # No coverable project source is present in this packaging suite; report full # non-applicable coverage so rollups do not confuse N/A with uncovered code. platform_quality_gate_workspace_line_coverage_percent{suite="${suite}"} 100 # TYPE platform_quality_gate_source_lines_over_500_total gauge platform_quality_gate_source_lines_over_500_total{suite="${suite}"} 0 # TYPE platform_quality_gate_build_info gauge platform_quality_gate_build_info{suite="${suite}",branch="${metric_branch}",build_number="${metric_build_number}",jenkins_job="${metric_jenkins_job}"} 1 # TYPE data_prepper_quality_gate_checks_total gauge data_prepper_quality_gate_checks_total{suite="${suite}",check="tests",result="not_applicable"} 1 data_prepper_quality_gate_checks_total{suite="${suite}",check="coverage",result="not_applicable"} 1 data_prepper_quality_gate_checks_total{suite="${suite}",check="loc",result="not_applicable"} 1 data_prepper_quality_gate_checks_total{suite="${suite}",check="docs_naming",result="not_applicable"} 1 data_prepper_quality_gate_checks_total{suite="${suite}",check="gate_glue",result="${gate_glue_check}"} 1 data_prepper_quality_gate_checks_total{suite="${suite}",check="sonarqube",result="${sonarqube_check}"} 1 data_prepper_quality_gate_checks_total{suite="${suite}",check="supply_chain",result="${supply_chain_check}"} 1 # TYPE platform_quality_gate_test_case_result gauge platform_quality_gate_test_case_result{suite="${suite}",branch="${metric_branch}",build_number="${metric_build_number}",jenkins_job="${metric_jenkins_job}",test="__no_test_cases__",status="skipped"} 1 METRICS ''' } archiveArtifacts artifacts: 'build/**', allowEmptyArchive: true, fingerprint: true } } }