diff --git a/.github/scripts/process-performance-metrics.sh b/.github/scripts/process-performance-metrics.sh new file mode 100755 index 0000000000..03dfb73c51 --- /dev/null +++ b/.github/scripts/process-performance-metrics.sh @@ -0,0 +1,187 @@ +#!/bin/bash +# +# Process AIT Performance Metrics +# +# This script processes JSONL-format performance metrics files from AIT test runs +# and generates a markdown file with comparison tables. +# +# Arguments: +# input-dir Directory containing *-metrics-summary.json files (JSONL format) +# output-file Path to output markdown file +# report-title Title for the report (e.g., "Release9.0.0") +# +# JSONL Format: +# Each input file contains one JSON object per line (not a JSON array). +# Each JSON object represents one test result with metadata and metrics. +# +# Output Format: +# Markdown with nested structure: +# - Level 1: Directory (e.g., "server") +# - Level 2: Test File (e.g., "tomcat") +# - Level 3: Test Case (e.g., "test_tomcat") +# Each test case contains a comparison table showing all +# version combinations. +# + +set -euo pipefail + +if [ $# -ne 3 ]; then + echo "ProcessPerformanceMetrics: Incorrect number of arguments" >&2 + echo "ProcessPerformanceMetrics: $0 " >&2 + exit 1 +fi + +INPUT_DIR="$1" +OUTPUT_FILE="$2" +REPORT_TITLE="$3" + +if [ ! -d "$INPUT_DIR" ]; then + echo "ProcessPerformanceMetrics: Input directory does not exist: $INPUT_DIR" >&2 + exit 1 +fi + +if ! command -v jq &> /dev/null; then + echo "ProcessPerformanceMetrics: jq is required but not installed" >&2 + exit 1 +fi + +cat > "$OUTPUT_FILE" << EOF +# Performance Metrics - ${REPORT_TITLE} + +Generated: $(date) + +EOF + +TEMP_FILE=$(mktemp) + +trap 'rm -f "$TEMP_FILE"' EXIT + +FILE_COUNT=0 +LINE_COUNT=0 + +for file in "$INPUT_DIR"/*-metrics-summary.json; do + if [ -f "$file" ]; then + FILE_COUNT=$((FILE_COUNT + 1)) + + while IFS= read -r test_result; do + [ -z "$test_result" ] && continue + + LINE_COUNT=$((LINE_COUNT + 1)) + + TEST_NAME=$(echo "$test_result" | jq -r '.metadata.test_name') + JAVA_VERSION=$(echo "$test_result" | jq -r '.metadata.java_version') + FRAMEWORK_VERSION=$(echo "$test_result" | jq -r '.metadata.framework_version // "N/A"') + + DIR=$(echo "$TEST_NAME" | cut -d'-' -f1) + REST=$(echo "$TEST_NAME" | cut -d'-' -f2-) + TEST_FILE=$(echo "$REST" | cut -d'.' -f1) + TEST_CASE=$(echo "$REST" | cut -d'.' -f2-) + + # dir|test_file|test_case|test_name|java_version|framework_version|json_data + echo "${DIR}|${TEST_FILE}|${TEST_CASE}|${TEST_NAME}|${JAVA_VERSION}|${FRAMEWORK_VERSION}|${test_result}" >> "$TEMP_FILE" + done < "$file" + fi +done + +if [ ! -s "$TEMP_FILE" ]; then + echo "ProcessPerformanceMetrics: No test results found in input directory" >&2 + echo "No test results found." >> "$OUTPUT_FILE" + exit 0 +fi + +sort -t'|' -k1,1 -k2,2 -k3,3 -k5,5n -k6,6 "$TEMP_FILE" -o "$TEMP_FILE" + +close_test_case() { + if [ -n "$CURRENT_TEST_CASE" ]; then + echo "" >> "$OUTPUT_FILE" + echo "" >> "$OUTPUT_FILE" + echo "" >> "$OUTPUT_FILE" + echo "" >> "$OUTPUT_FILE" + fi +} + +close_test_file() { + if [ -n "$CURRENT_TEST_FILE" ]; then + echo "" >> "$OUTPUT_FILE" + echo "" >> "$OUTPUT_FILE" + echo "" >> "$OUTPUT_FILE" + echo "" >> "$OUTPUT_FILE" + fi +} + +close_directory() { + if [ -n "$CURRENT_DIR" ]; then + echo "" >> "$OUTPUT_FILE" + echo "" >> "$OUTPUT_FILE" + echo "" >> "$OUTPUT_FILE" + fi +} + +CURRENT_DIR="" +CURRENT_TEST_FILE="" +CURRENT_TEST_CASE="" + +while IFS='|' read -r DIR TEST_FILE TEST_CASE TEST_NAME JAVA_VERSION FRAMEWORK_VERSION JSON_DATA; do + + if [ "$DIR" != "$CURRENT_DIR" ]; then + close_test_case + close_test_file + close_directory + + echo "
" >> "$OUTPUT_FILE" + echo " ${DIR}" >> "$OUTPUT_FILE" + echo "" >> "$OUTPUT_FILE" + CURRENT_DIR="$DIR" + CURRENT_TEST_FILE="" + CURRENT_TEST_CASE="" + fi + + if [ "$TEST_FILE" != "$CURRENT_TEST_FILE" ]; then + close_test_case + close_test_file + + echo "
" >> "$OUTPUT_FILE" + echo "
" >> "$OUTPUT_FILE" + echo " ${TEST_FILE}" >> "$OUTPUT_FILE" + echo "" >> "$OUTPUT_FILE" + CURRENT_TEST_FILE="$TEST_FILE" + CURRENT_TEST_CASE="" + fi + + if [ "$TEST_CASE" != "$CURRENT_TEST_CASE" ]; then + close_test_case + + echo "
" >> "$OUTPUT_FILE" + echo "
" >> "$OUTPUT_FILE" + echo " ${TEST_CASE}" >> "$OUTPUT_FILE" + echo "" >> "$OUTPUT_FILE" + + echo "| Java | Framework | CPU Time (s) | Response Time (s) | Response Count | Throughput | Errors | Heap Max (%) |" >> "$OUTPUT_FILE" + echo "|------|-----------|--------------|-------------------|----------------|------------|--------|--------------|" >> "$OUTPUT_FILE" + + CURRENT_TEST_CASE="$TEST_CASE" + fi + + CPU_TIME=$(echo "$JSON_DATA" | jq -r '.metrics.cpu_time // "N/A"') + RESP_TIME=$(echo "$JSON_DATA" | jq -r '.metrics.response_time_total // "N/A"') + RESP_COUNT=$(echo "$JSON_DATA" | jq -r '.metrics.response_count // "N/A"') + THROUGHPUT=$(echo "$JSON_DATA" | jq -r '.metrics.throughput // "N/A"') + ERROR_COUNT=$(echo "$JSON_DATA" | jq -r '.metrics.error_count // "N/A"') + HEAP_UTIL=$(echo "$JSON_DATA" | jq -r '.metrics.heap_utilization_max // "0.0"') + HEAP_PRESENT=$(echo "$JSON_DATA" | jq -r '.metrics.heap_metrics_present') + + # formatting values + CPU_TIME_FMT=$(printf "%.2f" "$CPU_TIME" 2>/dev/null || echo "$CPU_TIME") + RESP_TIME_FMT=$(printf "%.2f" "$RESP_TIME" 2>/dev/null || echo "$RESP_TIME") + HEAP_UTIL_FMT=$(printf "%.1f%%" "$HEAP_UTIL" 2>/dev/null || echo "0.0%") + if [ "$HEAP_PRESENT" = "false" ] || [ "$HEAP_UTIL" = "0.0" ]; then + HEAP_UTIL_FMT="-" + fi + + echo "| ${JAVA_VERSION} | ${FRAMEWORK_VERSION} | ${CPU_TIME_FMT} | ${RESP_TIME_FMT} | ${RESP_COUNT} | ${THROUGHPUT} | ${ERROR_COUNT} | ${HEAP_UTIL_FMT} |" >> "$OUTPUT_FILE" + +done < "$TEMP_FILE" + +close_test_case +close_test_file +close_directory \ No newline at end of file diff --git a/.github/workflows/Test-AITs.yml b/.github/workflows/Test-AITs.yml index 6c7105f562..4d9446a38c 100644 --- a/.github/workflows/Test-AITs.yml +++ b/.github/workflows/Test-AITs.yml @@ -19,9 +19,19 @@ on: description: "Specify a single test. If left blank, all tests will run. You can choose to specify a single test file or a test case in that file. For example you can write 'server/tomcat.py' to run all tomcat tests or run 'server/tomcat.py TomcatTest.test_tomcat' to run a specific test case." required: false default: '' - ingest-metrics-desc: - description: "(Optional) For site wiki ingest metrics: Specify a description for this run. This will be used as the title for the new wiki page. NO SPACES. Ex: Post8.25Release" + wiki-report-desc: + description: "(Optional) Description for wiki reports (used when publishing ingest metrics and/or metrics summary.) NO SPACES. Ex: Release9.0.0" required: false + publish-ingest-metrics: + description: "Publish ingest metrics to wiki" + required: false + default: false + type: boolean + publish-performance-metrics: + description: "Publish AIT performance metrics to wiki" + required: false + default: false + type: boolean workflow_call: inputs: agent-ref: @@ -44,10 +54,20 @@ on: required: false default: '' type: string - ingest-metrics-desc: - description: "(Optional) For site wiki ingest metrics: Specify a description for this run. This will be used as the title for the new wiki page. NO SPACES. Ex: Post8.25Release" + wiki-report-desc: + description: "(Optional) Description for wiki reports (used when publishing ingest metrics and/or metrics summary.) NO SPACES. Ex: Release9.0.0" required: false type: string + publish-ingest-metrics: + description: "Publish ingest metrics to wiki" + required: false + default: false + type: boolean + publish-performance-metrics: + description: "Publish performance metrics to wiki" + required: false + default: false + type: boolean jobs: build-agent: @@ -371,6 +391,12 @@ jobs: PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python \ ./bin/runtest.sh tests/java/functionality/${{ matrix.tests }} + - name: Export Test Name + if: ${{ always() }} + run: | + TEST_NAME=$(echo ${{ matrix.tests }} | sed 's|/|-|g') + echo "TEST_NAME=${TEST_NAME}" >> $GITHUB_ENV + # Rename matrix item to remove problem characters - name: Rename Matrix item run: | @@ -392,17 +418,25 @@ jobs: if-no-files-found: ignore - name: Upload raw ingest metric files - if: ${{ success() }} + if: ${{ success() && inputs.wiki-report-desc != '' && inputs.publish-ingest-metrics }} uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # pin@v4 with: name: ingest-raw-data-${{ env.MATRIX }} path: ${{ github.workspace }}/ingest-*.txt if-no-files-found: ignore + - name: Upload AIT performance files + if: ${{ success() && inputs.wiki-report-desc != '' && inputs.publish-performance-metrics }} + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # pin@v4 + with: + name: performance-metrics-${{ env.MATRIX }} + path: ${{ github.workspace }}/*-metrics-summary.json + if-no-files-found: ignore + process-ingest-metric-files: name: Process Ingest Metrics needs: [ tests ] - if: ${{ always() && inputs.ingest-metrics-desc != '' }} + if: ${{ always() && inputs.wiki-report-desc != '' && inputs.publish-ingest-metrics }} runs-on: ubuntu-22.04 permissions: contents: write @@ -455,10 +489,10 @@ jobs: if-no-files-found: ignore - name: Create markdown file - if: ${{ inputs.ingest-metrics-desc != '' }} + if: ${{ inputs.wiki-report-desc != '' }} run: | # Start with the sum as the first line - echo "# Ingest Metrics Summary - ${{ inputs.ingest-metrics-desc }}" > ingest-report.md + echo "# Ingest Metrics Summary - ${{ inputs.wiki-report-desc }}" > ingest-report.md echo "" >> ingest-report.md echo "## **Total Bytes Sent:** $(cat ingest-sum.txt)" >> ingest-report.md echo "" >> ingest-report.md @@ -471,27 +505,27 @@ jobs: echo "\`\`\`" >> ingest-report.md - name: Checkout wiki repository - if: ${{ inputs.ingest-metrics-desc != '' }} + if: ${{ inputs.wiki-report-desc != '' }} uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # pin@v4 with: repository: ${{ github.repository }}.wiki path: wiki - name: Commit and push to wiki - if: ${{ inputs.ingest-metrics-desc != '' }} + if: ${{ inputs.wiki-report-desc != '' }} run: | # Create ingest-metrics directory if it doesn't exist mkdir -p wiki/ingest-metrics # Copy the report using ingest-metrics-desc as filename - FILENAME="ingest-metrics-${{ inputs.ingest-metrics-desc }}.md" + FILENAME="ingest-metrics-${{ inputs.wiki-report-desc }}.md" cp ingest-report.md "wiki/ingest-metrics/${FILENAME}" cd wiki # Append link to Ingest-Metrics.md if it doesn't already exist - LINK_TEXT="- [Ingest Metrics ${{ inputs.ingest-metrics-desc }}](https://github.com/${{ github.repository }}/wiki/ingest-metrics-${{ inputs.ingest-metrics-desc }})" - if ! grep -q "ingest-metrics-${{ inputs.ingest-metrics-desc }})" Ingest-Metrics.md 2>/dev/null; then + LINK_TEXT="- [Ingest Metrics ${{ inputs.wiki-report-desc }}](https://github.com/${{ github.repository }}/wiki/ingest-metrics-${{ inputs.wiki-report-desc }})" + if ! grep -q "ingest-metrics-${{ inputs.wiki-report-desc }})" Ingest-Metrics.md 2>/dev/null; then echo "" >> Ingest-Metrics.md echo "${LINK_TEXT}" >> Ingest-Metrics.md fi @@ -499,9 +533,87 @@ jobs: git config user.name "github-actions[bot]" git config user.email "github-actions[bot]@users.noreply.github.com" git add "ingest-metrics/${FILENAME}" Ingest-Metrics.md - git commit -m "Update ingest metrics report for ${{ inputs.ingest-metrics-desc }} from workflow run ${{ github.run_id }}" + git commit -m "Update ingest metrics report for ${{ inputs.wiki-report-desc }} from workflow run ${{ github.run_id }}" + git push + + process-performance-metrics: + name: Process AIT Performance Metrics + needs: [ tests ] + if: ${{ always() && inputs.wiki-report-desc != '' && inputs.publish-performance-metrics }} + runs-on: ubuntu-22.04 + permissions: + contents: write + + steps: + - name: Checkout repository for scripts + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # pin@v4 + + - name: Download all AIT performance files + uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # pin@v4 + with: + pattern: performance-metrics-* + merge-multiple: true + path: all-performance-metrics + + - name: Process performance metrics and generate report + run: | + chmod +x .github/scripts/process-performance-metrics.sh + .github/scripts/process-performance-metrics.sh \ + all-performance-metrics \ + performance-metrics-report.md \ + "${{ inputs.wiki-report-desc }}" + + - name: Checkout wiki repository + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # pin@v4 + with: + repository: ${{ github.repository }}.wiki + path: wiki + + - name: Commit and push to wiki + run: | + mkdir -p wiki/performance-metrics + + FILENAME="performance-metrics-${{ inputs.wiki-report-desc }}.md" + cp performance-metrics-report.md "wiki/performance-metrics/${FILENAME}" + + cd wiki + + # Add link to index page + LINK_TEXT="- [Performance Metrics ${{ inputs.wiki-report-desc }}](https://github.com/${{ github.repository }}/wiki/performance-metrics-${{ inputs.wiki-report-desc }})" + if ! grep -q "performance-metrics-${{ inputs.wiki-report-desc }})" Performance-Metrics.md 2>/dev/null; then + echo "" >> Performance-Metrics.md + echo "${LINK_TEXT}" >> Performance-Metrics.md + fi + + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + git add "performance-metrics/${FILENAME}" Performance-Metrics.md + git commit -m "Add AIT performance metrics for ${{ inputs.wiki-report-desc }} from workflow run ${{ github.run_id }}" git push + - name: Add summary link to workflow + run: | + if [ ! -d "all-performance-metrics" ] || [ -z "$(ls -A all-performance-metrics 2>/dev/null)" ]; then + echo "## Performance Metrics" >> $GITHUB_STEP_SUMMARY + echo "No AIT performance files found" >> $GITHUB_STEP_SUMMARY + else + # Count total test results across all JSONL files (one test per line) + TEST_COUNT=0 + for file in all-performance-metrics/*-metrics-summary.json; do + if [ -f "$file" ]; then + # Count non-empty lines + COUNT=$(grep -c . "$file" 2>/dev/null || echo 0) + TEST_COUNT=$((TEST_COUNT + COUNT)) + fi + done + + echo "## Performance Metrics" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "**${TEST_COUNT} test(s)** with metrics reported" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "View detailed metrics in [Wiki - Performance Metrics ${{ inputs.wiki-report-desc }}](https://github.com/${{ github.repository }}/wiki/performance-metrics-${{ inputs.wiki-report-desc }})" >> $GITHUB_STEP_SUMMARY + fi + notify-if-negative-values: needs: [ tests ] runs-on: ubuntu-22.04