diff --git a/.github/workflows/full-build-ci.yml b/.github/workflows/full-build-ci.yml index bb2defc0d77..8b53a392593 100644 --- a/.github/workflows/full-build-ci.yml +++ b/.github/workflows/full-build-ci.yml @@ -76,6 +76,16 @@ jobs: uses: ./.github/workflows/verify-liquibase-backward-compatibility.yml secrets: inherit + run-liquibase-ddl-safety: + needs: build-core + uses: ./.github/workflows/verify-liquibase-ddl-safety.yml + secrets: inherit + + run-regression-safety-db-changes: + needs: build-core + uses: ./.github/workflows/regression-safety-db-changes.yml + secrets: inherit + run-sonarqube: needs: build-core uses: ./.github/workflows/sonarqube.yml diff --git a/.github/workflows/regression-safety-db-changes.yml b/.github/workflows/regression-safety-db-changes.yml new file mode 100644 index 00000000000..ff0cffb2447 --- /dev/null +++ b/.github/workflows/regression-safety-db-changes.yml @@ -0,0 +1,407 @@ +name: Regression Safety - Database Changes + +on: + workflow_call: + +permissions: + contents: read + +jobs: + detect-db-changes: + runs-on: ubuntu-24.04 + timeout-minutes: 5 + outputs: + has_liquibase_changes: ${{ steps.check.outputs.has_changes }} + steps: + - name: Download workspace + uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5 + with: + name: fineract-workspace-${{ github.run_id }} + path: . + + - name: Extract workspace + run: tar -xf fineract-workspace.tar + + - name: Detect Liquibase changes + id: check + run: | + if [ "${{ github.event_name }}" != "pull_request" ]; then + echo "has_changes=false" >> "$GITHUB_OUTPUT" + echo "Not a pull request event." + exit 0 + fi + + git fetch origin "${{ github.event.pull_request.base.ref }}" --no-tags + MERGE_BASE=$(git merge-base "origin/${{ github.event.pull_request.base.ref }}" HEAD) + CHANGES=$(git diff --name-only "$MERGE_BASE"..HEAD -- '**/db/changelog/**/*.xml' || true) + + if [ -n "$CHANGES" ]; then + echo "has_changes=true" >> "$GITHUB_OUTPUT" + echo "Liquibase changes detected:" + echo "$CHANGES" + else + echo "has_changes=false" >> "$GITHUB_OUTPUT" + echo "No Liquibase changes detected." + fi + + regression-safety: + needs: detect-db-changes + if: needs.detect-db-changes.outputs.has_liquibase_changes == 'true' + runs-on: ubuntu-24.04 + timeout-minutes: 60 + + services: + postgresql: + image: postgres:18.3 + ports: + - 5432:5432 + env: + POSTGRES_USER: root + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd="pg_isready -q -d postgres -U root" + --health-interval=5s + --health-timeout=2s + --health-retries=5 + + env: + DB_USER: root + DB_PASSWORD: postgres + DB_NAME: fineract_default + TZ: Asia/Kolkata + DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }} + # E2E test configuration + BASE_URL: https://localhost:8443 + TEST_USERNAME: mifos + TEST_PASSWORD: password + TEST_STRONG_PASSWORD: A1b2c3d4e5f$ + TEST_TENANT_ID: default + INITIALIZATION_ENABLED: true + EVENT_VERIFICATION_ENABLED: false + # devRun connection args (reused across instances) + DEVRUN_ARGS: >- + --spring.datasource.hikari.driverClassName=org.postgresql.Driver + --spring.datasource.hikari.jdbcUrl=jdbc:postgresql://localhost:5432/fineract_tenants + --spring.datasource.hikari.username=root + --spring.datasource.hikari.password=postgres + --fineract.tenant.host=localhost + --fineract.tenant.port=5432 + --fineract.tenant.username=root + --fineract.tenant.password=postgres + + steps: + # ============================================================ + # SETUP + # ============================================================ + - name: Checkout base commit (baseline) + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + repository: ${{ github.event.pull_request.base.repo.full_name }} + ref: ${{ github.event.pull_request.base.sha }} + fetch-depth: 0 + path: baseline + + - name: Set up JDK 21 + uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5 + with: + distribution: 'zulu' + java-version: '21' + + - name: Download workspace + uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5 + with: + name: fineract-workspace-${{ github.run_id }} + path: . + + - name: Extract workspace + run: tar -xf fineract-workspace.tar + + - name: Setup Gradle + uses: gradle/actions/setup-gradle@0723195856401067f7a2779048b490ace7a47d7c # v5.0.2 + + - name: Wait for PostgreSQL + run: | + until pg_isready -h localhost -U $DB_USER; do + echo "Waiting for postgres..." + sleep 2 + done + + - name: Print checked out revisions + run: | + echo "Baseline revision:" + git -C baseline rev-parse HEAD + git -C baseline log -1 --oneline + echo "PR revision:" + git rev-parse HEAD + git log -1 --oneline + + # ============================================================ + # INSTANCE #1: BASELINE (develop code + fresh DB) + # Proves: golden path works before the PR + # ============================================================ + - name: "Instance #1: Create fresh databases" + working-directory: baseline + run: | + ./gradlew --no-daemon createPGDB -PdbName=fineract_tenants + ./gradlew --no-daemon createPGDB -PdbName=$DB_NAME + + - name: "Instance #1: Start baseline backend" + working-directory: baseline + run: | + ./gradlew :fineract-provider:devRun --args="${DEVRUN_ARGS}" & + BACKEND_PID=$! + echo $BACKEND_PID > backend.pid + + ACTUATOR_URL="https://localhost:8443/fineract-provider/actuator/health" + TIMEOUT_SECONDS=600 + INTERVAL_SECONDS=2 + + echo "Waiting for baseline backend Actuator: $ACTUATOR_URL (timeout ${TIMEOUT_SECONDS}s)..." + start_ts=$(date +%s) + while true; do + if ! kill -0 "$BACKEND_PID" 2>/dev/null; then + echo "Backend process exited before Actuator became available." + exit 1 + fi + if curl -kfsS "$ACTUATOR_URL" >/dev/null 2>&1; then + echo "Actuator is up." + break + fi + now_ts=$(date +%s) + if [ $((now_ts - start_ts)) -ge "$TIMEOUT_SECONDS" ]; then + echo "Timed out waiting for Actuator." + exit 1 + fi + sleep "$INTERVAL_SECONDS" + done + + - name: "Instance #1: Run regression safety E2E test" + id: instance1 + continue-on-error: true + run: | + ./gradlew --no-daemon --console=plain \ + :fineract-e2e-tests-runner:cucumber \ + -Pcucumber.tags="@RegressionSafety" \ + -x spotlessCheck + + - name: "Instance #1: Stop backend" + if: always() + working-directory: baseline + run: | + kill $(cat backend.pid) 2>/dev/null || true + sleep 10 + + - name: "Instance #1: Drop databases" + if: always() + run: | + PGPASSWORD=postgres psql -h localhost -U root -d postgres \ + -c "DROP DATABASE IF EXISTS fineract_tenants;" \ + -c "DROP DATABASE IF EXISTS $DB_NAME;" + + # ============================================================ + # INSTANCE #2: HYBRID (baseline code + PR schema) + # Proves: old code survives the PR's schema changes + # This is the KEY test for rolling deployment safety + # ============================================================ + - name: "Instance #2: Create fresh databases" + run: | + PGPASSWORD=postgres psql -h localhost -U root -d postgres \ + -c "CREATE DATABASE fineract_tenants;" \ + -c "CREATE DATABASE $DB_NAME;" + + - name: "Instance #2: Apply PR Liquibase migrations (schema only)" + run: | + echo "Applying all Liquibase changesets (base + PR) using PR code..." + # The liquibase-only profile sets web-application-type=none, so Spring Boot + # exits naturally after migrations complete. The 600s timeout is a safety net + # that FAILS the step if migrations hang — it is NOT treated as success. + timeout 600 ./gradlew --no-daemon :fineract-provider:bootRun --args="\ + --spring.profiles.active=liquibase-only \ + --spring.datasource.hikari.driverClassName=org.postgresql.Driver \ + --spring.datasource.hikari.jdbcUrl=jdbc:postgresql://localhost:5432/fineract_tenants \ + --spring.datasource.hikari.username=root \ + --spring.datasource.hikari.password=postgres \ + --fineract.tenant.host=localhost \ + --fineract.tenant.port=5432 \ + --fineract.tenant.username=root \ + --fineract.tenant.password=postgres" || { + EXIT_CODE=$? + if [ $EXIT_CODE -eq 124 ]; then + echo "::error::Liquibase migrations timed out after 600s — migrations may be incomplete." + exit 1 + else + echo "bootRun failed with exit code $EXIT_CODE" + exit 1 + fi + } + echo "Liquibase migrations completed successfully (natural exit)." + + - name: "Instance #2: Start baseline backend (old code, new schema)" + working-directory: baseline + run: | + echo "Starting baseline code against DB with PR schema (Liquibase disabled)..." + ./gradlew :fineract-provider:devRun --args="\ + ${DEVRUN_ARGS} \ + --spring.liquibase.enabled=false" & + BACKEND_PID=$! + echo $BACKEND_PID > backend.pid + + ACTUATOR_URL="https://localhost:8443/fineract-provider/actuator/health" + TIMEOUT_SECONDS=600 + INTERVAL_SECONDS=2 + + echo "Waiting for hybrid backend Actuator: $ACTUATOR_URL (timeout ${TIMEOUT_SECONDS}s)..." + start_ts=$(date +%s) + while true; do + if ! kill -0 "$BACKEND_PID" 2>/dev/null; then + echo "::error::Instance #2 FAILED: Baseline code could not start against PR schema." + echo "This likely means the PR's Liquibase changes break backward compatibility." + exit 1 + fi + if curl -kfsS "$ACTUATOR_URL" >/dev/null 2>&1; then + echo "Actuator is up." + break + fi + now_ts=$(date +%s) + if [ $((now_ts - start_ts)) -ge "$TIMEOUT_SECONDS" ]; then + echo "Timed out waiting for Actuator." + exit 1 + fi + sleep "$INTERVAL_SECONDS" + done + + - name: "Instance #2: Run regression safety E2E test" + id: instance2 + continue-on-error: true + run: | + ./gradlew --no-daemon --console=plain \ + :fineract-e2e-tests-runner:cucumber \ + -Pcucumber.tags="@RegressionSafety" \ + -x spotlessCheck + + - name: "Instance #2: Stop backend" + if: always() + working-directory: baseline + run: | + kill $(cat backend.pid) 2>/dev/null || true + sleep 10 + + - name: "Instance #2: Drop databases" + if: always() + run: | + PGPASSWORD=postgres psql -h localhost -U root -d postgres \ + -c "DROP DATABASE IF EXISTS fineract_tenants;" \ + -c "DROP DATABASE IF EXISTS $DB_NAME;" + + # ============================================================ + # INSTANCE #3: FULL PR (PR code + PR schema) + # Proves: golden path works with all PR changes + # ============================================================ + - name: "Instance #3: Create fresh databases" + run: | + PGPASSWORD=postgres psql -h localhost -U root -d postgres \ + -c "CREATE DATABASE fineract_tenants;" \ + -c "CREATE DATABASE $DB_NAME;" + + - name: "Instance #3: Start PR backend" + run: | + ./gradlew :fineract-provider:devRun --args="${DEVRUN_ARGS}" & + BACKEND_PID=$! + echo $BACKEND_PID > backend.pid + + ACTUATOR_URL="https://localhost:8443/fineract-provider/actuator/health" + TIMEOUT_SECONDS=600 + INTERVAL_SECONDS=2 + + echo "Waiting for PR backend Actuator: $ACTUATOR_URL (timeout ${TIMEOUT_SECONDS}s)..." + start_ts=$(date +%s) + while true; do + if ! kill -0 "$BACKEND_PID" 2>/dev/null; then + echo "Backend process exited before Actuator became available." + exit 1 + fi + if curl -kfsS "$ACTUATOR_URL" >/dev/null 2>&1; then + echo "Actuator is up." + break + fi + now_ts=$(date +%s) + if [ $((now_ts - start_ts)) -ge "$TIMEOUT_SECONDS" ]; then + echo "Timed out waiting for Actuator." + exit 1 + fi + sleep "$INTERVAL_SECONDS" + done + + - name: "Instance #3: Run regression safety E2E test" + id: instance3 + continue-on-error: true + run: | + ./gradlew --no-daemon --console=plain \ + :fineract-e2e-tests-runner:cucumber \ + -Pcucumber.tags="@RegressionSafety" \ + -x spotlessCheck + + - name: "Instance #3: Stop backend" + if: always() + run: | + kill $(cat backend.pid) 2>/dev/null || true + sleep 10 + + # ============================================================ + # RESULTS + # ============================================================ + - name: Generate results summary + if: always() + run: | + echo "## Regression Safety - Database Changes" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "| Instance | Description | Result |" >> $GITHUB_STEP_SUMMARY + echo "|----------|-------------|--------|" >> $GITHUB_STEP_SUMMARY + + if [ "${{ steps.instance1.outcome }}" == "success" ]; then + echo "| #1 Baseline | develop code + fresh DB | PASSED |" >> $GITHUB_STEP_SUMMARY + else + echo "| #1 Baseline | develop code + fresh DB | FAILED |" >> $GITHUB_STEP_SUMMARY + fi + + if [ "${{ steps.instance2.outcome }}" == "success" ]; then + echo "| #2 Hybrid | develop code + PR schema | PASSED |" >> $GITHUB_STEP_SUMMARY + else + echo "| #2 Hybrid | develop code + PR schema | **FAILED** — PR schema breaks old code |" >> $GITHUB_STEP_SUMMARY + fi + + if [ "${{ steps.instance3.outcome }}" == "success" ]; then + echo "| #3 Full PR | PR code + PR schema | PASSED |" >> $GITHUB_STEP_SUMMARY + else + echo "| #3 Full PR | PR code + PR schema | FAILED |" >> $GITHUB_STEP_SUMMARY + fi + + - name: Upload test results + if: always() + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + with: + name: regression-safety-results + path: | + fineract-e2e-tests-runner/build/reports/ + retention-days: 1 + + - name: Fail if any instance failed + if: always() + run: | + FAILED=0 + if [ "${{ steps.instance1.outcome }}" != "success" ]; then + echo "::error::Instance #1 (baseline) FAILED — golden path broken on develop" + FAILED=1 + fi + if [ "${{ steps.instance2.outcome }}" != "success" ]; then + echo "::error::Instance #2 (hybrid) FAILED — PR's Liquibase changes break backward compatibility!" + FAILED=1 + fi + if [ "${{ steps.instance3.outcome }}" != "success" ]; then + echo "::error::Instance #3 (full PR) FAILED — functional regression in the PR" + FAILED=1 + fi + if [ $FAILED -ne 0 ]; then + exit 1 + fi + echo "All instances passed." diff --git a/.github/workflows/verify-liquibase-ddl-safety.yml b/.github/workflows/verify-liquibase-ddl-safety.yml new file mode 100644 index 00000000000..bf2f6206f57 --- /dev/null +++ b/.github/workflows/verify-liquibase-ddl-safety.yml @@ -0,0 +1,125 @@ +name: Verify Liquibase DDL Safety + +on: + workflow_call: + +permissions: + contents: read + pull-requests: write + +jobs: + ddl-safety-check: + if: ${{ github.event_name == 'pull_request' }} + runs-on: ubuntu-24.04 + timeout-minutes: 10 + + steps: + - name: Download workspace + uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5 + with: + name: fineract-workspace-${{ github.run_id }} + path: . + + - name: Extract workspace + run: tar -xf fineract-workspace.tar + + - name: Fetch base branch + run: git fetch origin "${{ github.event.pull_request.base.ref }}" --no-tags + + - name: Detect Liquibase changes + id: changes + run: | + MERGE_BASE=$(git merge-base "origin/${{ github.event.pull_request.base.ref }}" HEAD) + CHANGES=$(git diff --name-only "$MERGE_BASE"..HEAD -- '**/db/changelog/**/*.xml' || true) + if [ -n "$CHANGES" ]; then + echo "has_changes=true" >> "$GITHUB_OUTPUT" + echo "$CHANGES" + else + echo "has_changes=false" >> "$GITHUB_OUTPUT" + echo "No Liquibase changes detected." + fi + + - name: Check for override label + if: steps.changes.outputs.has_changes == 'true' + id: override + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + LABELS=$(gh pr view ${{ github.event.pull_request.number }} --json labels --jq '.labels[].name' 2>/dev/null || true) + if echo "$LABELS" | grep -q "ddl-safety-override"; then + echo "override=true" >> $GITHUB_OUTPUT + echo "Override label found — check will report but not block." + else + echo "override=false" >> $GITHUB_OUTPUT + fi + + - name: Run DDL safety check + if: steps.changes.outputs.has_changes == 'true' + id: ddl-check + continue-on-error: true + run: | + chmod +x scripts/check-liquibase-ddl-safety.sh + ./scripts/check-liquibase-ddl-safety.sh \ + --base-ref origin/${{ github.event.pull_request.base.ref }} \ + --head-ref HEAD \ + --config config/liquibase \ + --output-dir ${{ runner.temp }}/ddl-safety-report + + - name: Write step summary + if: always() + run: | + if [ "${{ steps.changes.outputs.has_changes }}" != "true" ]; then + echo "## Liquibase DDL Safety Check - SKIPPED" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "No Liquibase changelog XML changes detected." >> $GITHUB_STEP_SUMMARY + elif [ -f "${{ runner.temp }}/ddl-safety-report/report.md" ]; then + cat "${{ runner.temp }}/ddl-safety-report/report.md" >> $GITHUB_STEP_SUMMARY + else + echo "## Liquibase DDL Safety Check - PASSED" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "No dangerous DDL changes detected." >> $GITHUB_STEP_SUMMARY + fi + + - name: Comment on PR + if: always() && steps.changes.outputs.has_changes == 'true' + continue-on-error: true + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + PR_NUMBER: ${{ github.event.pull_request.number }} + run: | + MARKER="" + + # Find existing comment by marker + COMMENT_ID=$(gh api "repos/${{ github.repository }}/issues/${PR_NUMBER}/comments" \ + --jq ".[] | select(.body | contains(\"${MARKER}\")) | .id" | head -1) + + if [ "${{ steps.ddl-check.outcome }}" == "failure" ] && \ + [ -f "${{ runner.temp }}/ddl-safety-report/report.md" ]; then + BODY="${MARKER} + $(cat ${{ runner.temp }}/ddl-safety-report/report.md)" + + if [ -n "$COMMENT_ID" ]; then + gh api "repos/${{ github.repository }}/issues/comments/${COMMENT_ID}" \ + -X PATCH -f body="${BODY}" + else + gh pr comment "${PR_NUMBER}" --repo ${{ github.repository }} --body "${BODY}" + fi + elif [ -n "$COMMENT_ID" ]; then + # No violations anymore, delete the old comment + gh api "repos/${{ github.repository }}/issues/comments/${COMMENT_ID}" -X DELETE + fi + + - name: Archive report + if: always() && steps.changes.outputs.has_changes == 'true' + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + with: + name: ddl-safety-report + path: ${{ runner.temp }}/ddl-safety-report/ + retention-days: 1 + + - name: Fail if critical violations found (unless overridden) + if: steps.changes.outputs.has_changes == 'true' && steps.ddl-check.outcome == 'failure' && steps.override.outputs.override != 'true' + run: | + echo "::error::Dangerous DDL changes detected on critical tables. See the report above for details." + echo "::error::Add the 'ddl-safety-override' label to bypass this check if the change is intentional." + exit 1 diff --git a/config/liquibase/critical-tables.txt b/config/liquibase/critical-tables.txt new file mode 100644 index 00000000000..584ed43b2a3 --- /dev/null +++ b/config/liquibase/critical-tables.txt @@ -0,0 +1,34 @@ +# Critical tables for DDL safety checks. +# Dangerous DDL operations on these tables BLOCK the PR. +# These are high-traffic production tables where DDL can cause locks, +# data loss, or break running instances during rolling deployments. +# +# One table name per line. Lines starting with # are comments. + +# Loan core tables +m_loan +m_loan_transaction +m_loan_transaction_relation +m_loan_transaction_repayment_schedule_mapping +m_loan_repayment_schedule +m_loan_charge +m_loan_disbursement_detail +m_loan_arrears_aging +m_loan_repayment_schedule_history + +# Delinquency +m_loan_delinquency_tag_history +m_delinquency_range + +# Savings core tables +m_savings_account +m_savings_account_transaction + +# Accounting +acc_gl_journal_entry + +# Client +m_client + +# Payment +m_payment_detail diff --git a/config/liquibase/dangerous-operations.txt b/config/liquibase/dangerous-operations.txt new file mode 100644 index 00000000000..80d4ba4251d --- /dev/null +++ b/config/liquibase/dangerous-operations.txt @@ -0,0 +1,14 @@ +# Dangerous Liquibase DDL operations for safety checks. +# Format: element|severity|risk description +# Lines starting with # are comments. +# +# Severity levels: +# CRITICAL - Blocks PR when targeting critical tables +# HIGH - Blocks PR when targeting critical tables, warning otherwise + +dropColumn|CRITICAL|Dropping a column breaks old code that SELECT/UPDATE it during rolling deployment +dropTable|CRITICAL|Dropping a table breaks old code that references it during rolling deployment +renameColumn|CRITICAL|Renaming a column breaks old code that references the old name during rolling deployment +renameTable|CRITICAL|Renaming a table breaks old code that references the old name during rolling deployment +modifyDataType|HIGH|Changing data type can cause full table lock and type mismatch with old code +addNotNullConstraint|HIGH|Adding NOT NULL constraint breaks old code INSERTs that omit this column diff --git a/fineract-e2e-tests-runner/src/test/resources/features/LoanRegressionSafety.feature b/fineract-e2e-tests-runner/src/test/resources/features/LoanRegressionSafety.feature new file mode 100644 index 00000000000..e90b3c0509a --- /dev/null +++ b/fineract-e2e-tests-runner/src/test/resources/features/LoanRegressionSafety.feature @@ -0,0 +1,81 @@ +@RegressionSafety +Feature: Loan Lifecycle Regression Safety + Exercises the complete loan lifecycle through all major state transitions. + Designed to be run against multiple Fineract instances with different DB states + to catch regressions introduced by database schema changes. + Based on proven patterns from 0_COB.feature (C2501, C2681). + + @TestRailId:C76758 + Scenario: Verify loan delinquency lifecycle (in-advance + late repayment) after DB changes + # --- Setup + disburse -------------------------------------------------- + When Admin sets the business date to "01 January 2022" + When Admin creates a client with random data + When Admin creates a new default Loan with date: "01 January 2022" + And Admin successfully approves the loan on "01 January 2022" with "1000" amount and expected disbursement date on "01 January 2022" + And Admin successfully disburse the loan on "01 January 2022" with "1000" EUR transaction amount + Then Loan status will be "ACTIVE" + Then Loan has 1000 outstanding amount + Then Loan Repayment schedule has 1 periods, with the following data for periods: + | Nr | Days | Date | Paid date | Balance of loan | Principal due | Interest | Fees | Penalties | Due | Paid | In advance | Late | Outstanding | + | | | 01 January 2022 | | 1000.0 | | | 0.0 | | 0.0 | 0.0 | | | | + | 1 | 30 | 31 January 2022 | | 0.0 | 1000.0 | 0.0 | 0.0 | 0.0 | 1000.0 | 0.0 | 0.0 | 0.0 | 1000.0 | + Then Loan Repayment schedule has the following data in Total row: + | Principal due | Interest | Fees | Penalties | Due | Paid | In advance | Late | Outstanding | + | 1000.0 | 0.0 | 0.0 | 0.0 | 1000.0 | 0.0 | 0.0 | 0.0 | 1000.0 | + Then Loan Transactions tab has the following data: + | Transaction date | Transaction Type | Amount | Principal | Interest | Fees | Penalties | Loan Balance | + | 01 January 2022 | Disbursement | 1000.0 | 0.0 | 0.0 | 0.0 | 0.0 | 1000.0 | + + # --- IN-ADVANCE partial repayment (15 Jan, before due 31 Jan) --------- + # Writes to m_loan_transaction, m_loan_transaction_repayment_schedule_mapping + # AND records the amount in the "In advance" column of the schedule. + When Admin sets the business date to "15 January 2022" + And Customer makes "AUTOPAY" repayment on "15 January 2022" with 500 EUR transaction amount + Then Repayment transaction is created with 500 amount and "AUTOPAY" type + Then Loan has 500 outstanding amount + Then Loan status will be "ACTIVE" + Then Loan Repayment schedule has 1 periods, with the following data for periods: + | Nr | Days | Date | Paid date | Balance of loan | Principal due | Interest | Fees | Penalties | Due | Paid | In advance | Late | Outstanding | + | | | 01 January 2022 | | 1000.0 | | | 0.0 | | 0.0 | 0.0 | | | | + | 1 | 30 | 31 January 2022 | | 0.0 | 1000.0 | 0.0 | 0.0 | 0.0 | 1000.0 | 500.0 | 500.0 | 0.0 | 500.0 | + Then Loan Repayment schedule has the following data in Total row: + | Principal due | Interest | Fees | Penalties | Due | Paid | In advance | Late | Outstanding | + | 1000.0 | 0.0 | 0.0 | 0.0 | 1000.0 | 500.0 | 500.0 | 0.0 | 500.0 | + Then Loan Transactions tab has the following data: + | Transaction date | Transaction Type | Amount | Principal | Interest | Fees | Penalties | Loan Balance | + | 01 January 2022 | Disbursement | 1000.0 | 0.0 | 0.0 | 0.0 | 0.0 | 1000.0 | + | 15 January 2022 | Repayment | 500.0 | 500.0 | 0.0 | 0.0 | 0.0 | 500.0 | + + # --- Advance past due (04 Feb) + COB triggers delinquency -------------- + # Default LP1 has an arrears setting of 3 days -> delinquent from 03 Feb. + # Writes to m_loan (lastClosedBusinessDate), m_loan_delinquency_tag_history, + # m_delinquency_range. Interest-free, no charges -> no Accrual rows. + When Admin sets the business date to "04 February 2022" + When Admin runs COB job + Then Admin checks that delinquency range is: "RANGE_1" and has delinquentDate "2022-02-03" + Then Loan Repayment schedule has the following data in Total row: + | Principal due | Interest | Fees | Penalties | Due | Paid | In advance | Late | Outstanding | + | 1000.0 | 0.0 | 0.0 | 0.0 | 1000.0 | 500.0 | 500.0 | 0.0 | 500.0 | + Then Loan Transactions tab has the following data: + | Transaction date | Transaction Type | Amount | Principal | Interest | Fees | Penalties | Loan Balance | + | 01 January 2022 | Disbursement | 1000.0 | 0.0 | 0.0 | 0.0 | 0.0 | 1000.0 | + | 15 January 2022 | Repayment | 500.0 | 500.0 | 0.0 | 0.0 | 0.0 | 500.0 | + + # --- LATE full payoff (04 Feb, after due) ----------------------------- + # This 500 lands in the "Late" column (paid after 31 Jan due date). + # Final schedule shows both in-advance 500 and late 500 amounts. + And Customer makes "AUTOPAY" repayment on "04 February 2022" with 500 EUR transaction amount + Then Loan Repayment schedule has 1 periods, with the following data for periods: + | Nr | Days | Date | Paid date | Balance of loan | Principal due | Interest | Fees | Penalties | Due | Paid | In advance | Late | Outstanding | + | | | 01 January 2022 | | 1000.0 | | | 0.0 | | 0.0 | 0.0 | | | | + | 1 | 30 | 31 January 2022 | 04 February 2022 | 0.0 | 1000.0 | 0.0 | 0.0 | 0.0 | 1000.0 | 1000.0 | 500.0 | 500.0 | 0.0 | + Then Loan Repayment schedule has the following data in Total row: + | Principal due | Interest | Fees | Penalties | Due | Paid | In advance | Late | Outstanding | + | 1000.0 | 0.0 | 0.0 | 0.0 | 1000.0 | 1000.0 | 500.0 | 500.0 | 0.0 | + Then Loan Transactions tab has the following data: + | Transaction date | Transaction Type | Amount | Principal | Interest | Fees | Penalties | Loan Balance | + | 01 January 2022 | Disbursement | 1000.0 | 0.0 | 0.0 | 0.0 | 0.0 | 1000.0 | + | 15 January 2022 | Repayment | 500.0 | 500.0 | 0.0 | 0.0 | 0.0 | 500.0 | + | 04 February 2022 | Repayment | 500.0 | 500.0 | 0.0 | 0.0 | 0.0 | 0.0 | + Then Loan has 0 outstanding amount + Then Loan status will be "CLOSED_OBLIGATIONS_MET" diff --git a/scripts/check-liquibase-ddl-safety.sh b/scripts/check-liquibase-ddl-safety.sh new file mode 100755 index 00000000000..6422974b314 --- /dev/null +++ b/scripts/check-liquibase-ddl-safety.sh @@ -0,0 +1,427 @@ +#!/bin/bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# --------------------------------------------------------------------------- +# check-liquibase-ddl-safety.sh +# +# Scans Liquibase XML changesets introduced in a PR for dangerous DDL +# operations on critical tables. Designed for CI but also runs locally. +# +# Uses only bash builtins + grep/sed (no xmllint dependency). +# +# Usage: +# ./scripts/check-liquibase-ddl-safety.sh \ +# --base-ref origin/develop \ +# --head-ref HEAD \ +# --config config/liquibase \ +# --output-dir /tmp/ddl-safety-report +# +# Exit codes: +# 0 — No blocking violations found (may have warnings) +# 1 — Blocking violations found on critical tables +# 2 — Script error / bad arguments +# --------------------------------------------------------------------------- +set -euo pipefail + +# ---- Defaults ---- +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)" +CONFIG_DIR="$REPO_ROOT/config/liquibase" +OUTPUT_DIR="" +BASE_REF="" +HEAD_REF="" +FILES_OVERRIDE="" # For local testing: pass specific files instead of using git diff + +# ---- Argument parsing ---- +while [[ $# -gt 0 ]]; do + case "$1" in + --base-ref) BASE_REF="$2"; shift 2 ;; + --head-ref) HEAD_REF="$2"; shift 2 ;; + --config) CONFIG_DIR="$2"; shift 2 ;; + --output-dir) OUTPUT_DIR="$2"; shift 2 ;; + --files) FILES_OVERRIDE="$2"; shift 2 ;; + -h|--help) + echo "Usage: $0 --base-ref --head-ref [--config ] [--output-dir ] [--files ]" + exit 0 + ;; + *) echo "Unknown argument: $1"; exit 2 ;; + esac +done + +# ---- Validate inputs ---- +if [[ -z "$FILES_OVERRIDE" ]] && { [[ -z "$BASE_REF" ]] || [[ -z "$HEAD_REF" ]]; }; then + echo "Error: Either --base-ref + --head-ref or --files is required." + exit 2 +fi + +if [[ -n "$OUTPUT_DIR" ]]; then + mkdir -p "$OUTPUT_DIR" +fi + +# ---- Load configuration ---- +CRITICAL_TABLES_FILE="$CONFIG_DIR/critical-tables.txt" +DANGEROUS_OPS_FILE="$CONFIG_DIR/dangerous-operations.txt" + +if [[ ! -f "$CRITICAL_TABLES_FILE" ]]; then + echo "Error: Critical tables config not found: $CRITICAL_TABLES_FILE" + exit 2 +fi +if [[ ! -f "$DANGEROUS_OPS_FILE" ]]; then + echo "Error: Dangerous operations config not found: $DANGEROUS_OPS_FILE" + exit 2 +fi + +# Load critical tables into array (skip comments and blank lines) +CRITICAL_TABLES=() +while IFS= read -r line; do + line="${line%%#*}" # strip inline comments + line="$(echo "$line" | tr -d '[:space:]')" + [[ -z "$line" ]] && continue + CRITICAL_TABLES+=("$line") +done < "$CRITICAL_TABLES_FILE" + +# Load dangerous operations into associative arrays +declare -A OP_SEVERITY +declare -A OP_RISK +while IFS='|' read -r element severity risk; do + element="${element%%#*}" + element="$(echo "$element" | tr -d '[:space:]')" + [[ -z "$element" ]] && continue + severity="$(echo "$severity" | tr -d '[:space:]')" + risk="$(echo "$risk" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')" + OP_SEVERITY["$element"]="$severity" + OP_RISK["$element"]="$risk" +done < "$DANGEROUS_OPS_FILE" + +# ---- Find changed XML files ---- +CHANGED_FILES=() +if [[ -n "$FILES_OVERRIDE" ]]; then + IFS=',' read -ra CHANGED_FILES <<< "$FILES_OVERRIDE" +else + MERGE_BASE=$(git -C "$REPO_ROOT" merge-base "$BASE_REF" "$HEAD_REF" 2>/dev/null || echo "$BASE_REF") + while IFS= read -r f; do + [[ -n "$f" ]] && CHANGED_FILES+=("$f") + done < <(git -C "$REPO_ROOT" diff --name-only --diff-filter=AM "$MERGE_BASE".."$HEAD_REF" -- '*.xml' | grep '/db/changelog/' || true) +fi + +if [[ ${#CHANGED_FILES[@]} -eq 0 ]]; then + echo "No Liquibase changelog XML files changed." + if [[ -n "$OUTPUT_DIR" ]]; then + echo "No Liquibase changelog XML files changed in this PR." > "$OUTPUT_DIR/report.md" + echo '{"violations":[],"summary":{"total":0,"critical":0,"warnings":0}}' > "$OUTPUT_DIR/report.json" + fi + exit 0 +fi + +echo "Found ${#CHANGED_FILES[@]} changed Liquibase XML file(s):" +printf " %s\n" "${CHANGED_FILES[@]}" +echo "" + +# ---- Helper functions ---- +is_critical_table() { + local table="$1" + for ct in "${CRITICAL_TABLES[@]}"; do + if [[ "$table" == "$ct" ]]; then + return 0 + fi + done + return 1 +} + +# ---- Helper: flatten multiline XML tags into single lines ---- +# Liquibase tags like span +# multiple lines. This function joins lines between '' or '/>' so that grep can match attributes regardless of line breaks. +flatten_xml_tags() { + local content="$1" + echo "$content" | awk ' + BEGIN { buf="" } + { + if (buf != "") { + buf = buf " " $0 + if (match(buf, />/)) { + print buf + buf = "" + } + } else if (match($0, /<[a-zA-Z]/) && !match($0, />/)) { + buf = $0 + } else { + print $0 + } + } + END { if (buf != "") print buf } + ' +} + +# Track tables created in the same PR (for false-positive mitigation) +CREATED_TABLES=() +for file in "${CHANGED_FILES[@]}"; do + local_file="$REPO_ROOT/$file" + [[ -f "$local_file" ]] || continue + flat=$(flatten_xml_tags "$(cat "$local_file")") + while IFS= read -r tbl; do + [[ -n "$tbl" ]] && CREATED_TABLES+=("$tbl") + done < <(echo "$flat" | grep -oP ']*tableName="\K[^"]+' 2>/dev/null || true) +done + +is_created_in_pr() { + local table="$1" + for ct in "${CREATED_TABLES[@]}"; do + if [[ "$table" == "$ct" ]]; then + return 0 + fi + done + return 1 +} + +# ---- Violation tracking ---- +declare -a VIOLATIONS=() +BLOCKING_COUNT=0 +WARNING_COUNT=0 + +add_violation() { + local severity="$1" + local file="$2" + local operation="$3" + local table="$4" + local risk="$5" + local detail="${6:-}" + + # False positive check: table created in same PR + if is_created_in_pr "$table"; then + severity="INFO" + risk="$risk (table created in same PR - likely safe)" + fi + + local effective_severity="$severity" + if [[ "$severity" == "CRITICAL" ]] || [[ "$severity" == "HIGH" ]]; then + if is_critical_table "$table"; then + effective_severity="CRITICAL" + ((BLOCKING_COUNT++)) || true + else + effective_severity="WARNING" + ((WARNING_COUNT++)) || true + fi + elif [[ "$severity" == "INFO" ]]; then + ((WARNING_COUNT++)) || true + fi + + local detail_str="" + if [[ -n "$detail" ]]; then + detail_str=" ($detail)" + fi + + VIOLATIONS+=("${effective_severity}|${file}|${operation}|${table}${detail_str}|${risk}") +} + +# ---- Scan each changed file ---- +for file in "${CHANGED_FILES[@]}"; do + local_file="$REPO_ROOT/$file" + if [[ ! -f "$local_file" ]]; then + echo "Warning: File not found (deleted?): $file" + continue + fi + + # Validate it's actually a Liquibase changelog + if ! grep -q 'databaseChangeLog' "$local_file"; then + continue + fi + + echo "Scanning: $file" + + # Flatten multiline XML tags so attribute matching works across line breaks + FLAT_CONTENT=$(flatten_xml_tags "$(cat "$local_file")") + + # ---- Check simple dangerous operations ---- + for op in "${!OP_SEVERITY[@]}"; do + # Find all occurrences of or + while IFS= read -r table; do + [[ -z "$table" ]] && continue + + # Try to get extra detail depending on operation type + detail="" + case "$op" in + dropColumn) + # or nested + col=$(echo "$FLAT_CONTENT" | grep -oP "]*tableName=\"${table}\"[^>]*columnName=\"\K[^\"]*" 2>/dev/null | head -1 || true) + if [[ -z "$col" ]]; then + col=$(echo "$FLAT_CONTENT" | grep -A5 "]*tableName=\"${table}\"" 2>/dev/null | grep -oP ']*name="\K[^"]+' | paste -sd',' || true) + fi + [[ -n "$col" ]] && detail="column: $col" + ;; + renameColumn) + old=$(echo "$FLAT_CONTENT" | grep -oP "]*tableName=\"${table}\"[^>]*oldColumnName=\"\K[^\"]*" 2>/dev/null | head -1 || true) + new=$(echo "$FLAT_CONTENT" | grep -oP "]*tableName=\"${table}\"[^>]*newColumnName=\"\K[^\"]*" 2>/dev/null | head -1 || true) + [[ -n "$old" ]] && detail="$old -> $new" + ;; + addNotNullConstraint) + col=$(echo "$FLAT_CONTENT" | grep -oP "]*tableName=\"${table}\"[^>]*columnName=\"\K[^\"]*" 2>/dev/null | paste -sd',' || true) + [[ -n "$col" ]] && detail="column: $col" + ;; + modifyDataType) + col=$(echo "$FLAT_CONTENT" | grep -oP "]*tableName=\"${table}\"[^>]*columnName=\"\K[^\"]*" 2>/dev/null | head -1 || true) + [[ -n "$col" ]] && detail="column: $col" + ;; + esac + + add_violation "${OP_SEVERITY[$op]}" "$file" "$op" "$table" "${OP_RISK[$op]}" "$detail" + done < <(echo "$FLAT_CONTENT" | grep -oP "<${op}[^>]*tableName=\"\K[^\"]*" 2>/dev/null | sort -u || true) + done + + # ---- Check addColumn with NOT NULL and no default ---- + # Strategy: find blocks, then check for nullable="false" without defaultValue + while IFS= read -r table; do + [[ -z "$table" ]] && continue + # Extract the block from to next or /> + block=$(sed -n "/]*tableName=\"${table}\"/,/<\/addColumn>/p" "$local_file" 2>/dev/null || true) + if [[ -z "$block" ]]; then + continue + fi + # Check for nullable="false" + if echo "$block" | grep -qi 'nullable="false"'; then + # Check if any defaultValue variant is present + if ! echo "$block" | grep -qiP 'defaultValue|defaultValueNumeric|defaultValueBoolean|defaultValueDate|defaultValueComputed|valueComputed'; then + add_violation "HIGH" "$file" "addColumn+NOT_NULL" "$table" \ + "Adding a NOT NULL column without default value breaks old code INSERTs" + fi + fi + done < <(echo "$FLAT_CONTENT" | grep -oP ']*tableName="\K[^"]+' 2>/dev/null | sort -u || true) + + # ---- Check raw SQL blocks for dangerous DDL ---- + # Extract text between and tags + sql_content=$(sed -n '//,/<\/sql>/p' "$local_file" 2>/dev/null || true) + if [[ -n "$sql_content" ]]; then + while IFS= read -r sql_match; do + [[ -z "$sql_match" ]] && continue + sql_match="$(echo "$sql_match" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')" + # Try to extract table name from the SQL match (e.g., "DROP TABLE m_loan" -> "m_loan") + sql_table=$(echo "$sql_match" | grep -ioP '(DROP\s+TABLE\s+|ALTER\s+TABLE\s+)\K\S+' | head -1 || true) + sql_table="${sql_table//\`/}" # strip backticks + sql_table="${sql_table//\"/}" # strip quotes + sql_table="${sql_table//;/}" # strip semicolons + [[ -z "$sql_table" ]] && sql_table="unknown" + add_violation "HIGH" "$file" "raw-SQL" "$sql_table" \ + "Raw SQL contains potentially dangerous DDL: $sql_match" + done < <(echo "$sql_content" | grep -ioP '(DROP\s+(TABLE|COLUMN)\s+\S+|ALTER\s+TABLE\s+\S+\s+(RENAME|DROP)\s+\S+)' 2>/dev/null || true) + fi +done + +echo "" + +# ---- Generate reports ---- +TOTAL_VIOLATIONS=${#VIOLATIONS[@]} + +if [[ $TOTAL_VIOLATIONS -eq 0 ]]; then + echo "No dangerous DDL changes detected." + if [[ -n "$OUTPUT_DIR" ]]; then + cat > "$OUTPUT_DIR/report.md" << 'MDEOF' +## Liquibase DDL Safety Check - PASSED + +No dangerous DDL changes detected in this PR's Liquibase migrations. +All migrations are safe for rolling deployments. +MDEOF + echo '{"violations":[],"summary":{"total":0,"critical":0,"warnings":0}}' > "$OUTPUT_DIR/report.json" + fi + exit 0 +fi + +# Build markdown report +MD_REPORT="" +if [[ $BLOCKING_COUNT -gt 0 ]]; then + MD_REPORT+="## Liquibase DDL Safety Check - BLOCKED"$'\n\n' + MD_REPORT+="**${BLOCKING_COUNT} blocking violation(s) found** on critical tables."$'\n' + MD_REPORT+="These changes are dangerous for rolling deployments and must be reviewed."$'\n\n' +else + MD_REPORT+="## Liquibase DDL Safety Check - WARNINGS"$'\n\n' + MD_REPORT+="**${WARNING_COUNT} warning(s) found** (non-blocking)."$'\n\n' +fi + +MD_REPORT+="| Severity | File | Operation | Table | Risk |"$'\n' +MD_REPORT+="|----------|------|-----------|-------|------|"$'\n' + +JSON_VIOLATIONS="[" +first_json=true + +for v in "${VIOLATIONS[@]}"; do + IFS='|' read -r sev vfile op tbl risk <<< "$v" + + # Shorten file path for readability + short_file="${vfile#fineract-provider/src/main/resources/}" + short_file="${short_file#fineract-loan/src/main/resources/}" + short_file="${short_file#fineract-investor/src/main/resources/}" + short_file="${short_file#fineract-savings/src/main/resources/}" + short_file="${short_file#fineract-progressive-loan/src/main/resources/}" + + MD_REPORT+="| $sev | \`$short_file\` | $op | \`$tbl\` | $risk |"$'\n' + + # JSON + if [[ "$first_json" == "true" ]]; then + first_json=false + else + JSON_VIOLATIONS+="," + fi + # Escape JSON-special characters + json_risk="${risk//\\/\\\\}" + json_risk="${json_risk//\"/\\\"}" + json_risk="${json_risk//$'\n'/\\n}" + json_risk="${json_risk//$'\t'/\\t}" + json_tbl="${tbl//\\/\\\\}" + json_tbl="${json_tbl//\"/\\\"}" + JSON_VIOLATIONS+="{\"severity\":\"$sev\",\"file\":\"$vfile\",\"operation\":\"$op\",\"table\":\"$json_tbl\",\"risk\":\"$json_risk\"}" +done + +JSON_VIOLATIONS+="]" + +MD_REPORT+=$'\n' +MD_REPORT+="**Total: $TOTAL_VIOLATIONS violation(s) ($BLOCKING_COUNT blocking, $WARNING_COUNT warnings)**"$'\n\n' + +if [[ $BLOCKING_COUNT -gt 0 ]]; then + MD_REPORT+="### How to resolve"$'\n\n' + MD_REPORT+="If this change is **intentional** and has been reviewed for rolling deployment safety:"$'\n' + MD_REPORT+="- Add the \`ddl-safety-override\` label to this PR"$'\n\n' + MD_REPORT+="For **safe migration patterns**, consider:"$'\n' + MD_REPORT+="- **Instead of DROP COLUMN**: Mark as deprecated, remove in a future release after all instances are updated"$'\n' + MD_REPORT+="- **Instead of RENAME COLUMN**: Add new column, backfill, update code, drop old column in a later release"$'\n' + MD_REPORT+="- **Instead of ADD NOT NULL**: Add as nullable first, backfill defaults, add constraint in a later release"$'\n' +fi + +# Print to console +echo "========================================" +echo "$MD_REPORT" +echo "========================================" + +# Write to files +if [[ -n "$OUTPUT_DIR" ]]; then + echo "$MD_REPORT" > "$OUTPUT_DIR/report.md" + cat > "$OUTPUT_DIR/report.json" << JSONEOF +{"violations":$JSON_VIOLATIONS,"summary":{"total":$TOTAL_VIOLATIONS,"critical":$BLOCKING_COUNT,"warnings":$WARNING_COUNT}} +JSONEOF + echo "Reports written to $OUTPUT_DIR/" +fi + +# Exit code +if [[ $BLOCKING_COUNT -gt 0 ]]; then + echo "" + echo "FAILED: $BLOCKING_COUNT blocking violation(s) on critical tables." + exit 1 +else + echo "" + echo "PASSED with $WARNING_COUNT warning(s) (non-blocking)." + exit 0 +fi