Skip to content

Commit 9ad6411

Browse files
authored
harden testing workflows (env-var policy + permissions + SHA-pin) (#996)
- env-var-ize all ${{ }} expressions inside `run:` blocks across the test-warehouse, test-all-warehouses, test-all-warehouses-dbt-pre-releases, and cleanup-stale-schemas workflows - validate inputs.max-age-hours (fail-closed on non-integer) - SHA-pin pmeier/pytest-results-action@v0.8.0 - deny GITHUB_TOKEN by default, grant minimum per job
1 parent 975bdaa commit 9ad6411

4 files changed

Lines changed: 92 additions & 34 deletions

File tree

.github/workflows/cleanup-stale-schemas.yml

Lines changed: 23 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -12,12 +12,19 @@ on:
1212
default: "24"
1313
description: Drop schemas older than this many hours
1414

15+
permissions: {}
16+
1517
env:
1618
TESTS_DIR: ${{ github.workspace }}/dbt-data-reliability/integration_tests
1719

1820
jobs:
1921
cleanup:
2022
runs-on: ubuntu-latest
23+
permissions:
24+
contents: read
25+
env:
26+
WAREHOUSE: ${{ matrix.warehouse-type }}
27+
MAX_AGE_HOURS: ${{ inputs.max-age-hours || '24' }}
2128
strategy:
2229
fail-fast: false
2330
matrix:
@@ -28,6 +35,14 @@ jobs:
2835
- databricks_catalog
2936
- athena
3037
steps:
38+
- name: Validate max-age-hours input
39+
# Fail-closed on non-integer input before it reaches dbt run-operation.
40+
run: |
41+
if ! [[ "$MAX_AGE_HOURS" =~ ^[0-9]+$ ]]; then
42+
echo "::error::Invalid max-age-hours: '$MAX_AGE_HOURS' (must be a non-negative integer)"
43+
exit 1
44+
fi
45+
3146
- name: Checkout dbt package
3247
uses: actions/checkout@v6
3348
with:
@@ -40,10 +55,9 @@ jobs:
4055
cache: "pip"
4156

4257
- name: Install dbt
43-
run: >
44-
pip install
45-
"dbt-core"
46-
"dbt-${{ (matrix.warehouse-type == 'databricks_catalog' && 'databricks') || (matrix.warehouse-type == 'athena' && 'athena-community') || matrix.warehouse-type }}"
58+
env:
59+
DBT_ADAPTER_PKG: ${{ (matrix.warehouse-type == 'databricks_catalog' && 'databricks') || (matrix.warehouse-type == 'athena' && 'athena-community') || matrix.warehouse-type }}
60+
run: pip install "dbt-core" "dbt-${DBT_ADAPTER_PKG}"
4761

4862
- name: Write dbt profiles
4963
env:
@@ -61,13 +75,13 @@ jobs:
6175
run: dbt deps
6276

6377
- name: Symlink local elementary package
64-
run: ln -sfn ${{ github.workspace }}/dbt-data-reliability ${{ env.TESTS_DIR }}/dbt_project/dbt_packages/elementary
78+
run: ln -sfn "${{ github.workspace }}/dbt-data-reliability" "${{ env.TESTS_DIR }}/dbt_project/dbt_packages/elementary"
6579

6680
- name: Drop stale CI schemas
6781
working-directory: ${{ env.TESTS_DIR }}/dbt_project
6882
# Only dbt_ prefixed schemas are created in this repo's CI.
6983
# The elementary repo has its own workflow for py_ prefixed schemas.
70-
run: >
71-
dbt run-operation drop_stale_ci_schemas
72-
--args '{prefixes: ["dbt_"], max_age_hours: ${{ inputs.max-age-hours || '24' }}}'
73-
-t "${{ matrix.warehouse-type }}"
84+
run: |
85+
dbt run-operation drop_stale_ci_schemas \
86+
--args '{prefixes: ["dbt_"], max_age_hours: '"$MAX_AGE_HOURS"'}' \
87+
-t "$WAREHOUSE"

.github/workflows/test-all-warehouses-dbt-pre-releases.yml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,12 @@ name: Test all warehouse platforms on dbt pre-releases
22
on:
33
workflow_dispatch:
44

5+
permissions: {}
6+
57
jobs:
68
test:
9+
permissions:
10+
contents: read
711
uses: ./.github/workflows/test-all-warehouses.yml
812
secrets: inherit
913
with:

.github/workflows/test-all-warehouses.yml

Lines changed: 18 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,8 @@ on:
3434
type: string
3535
required: false
3636

37+
permissions: {}
38+
3739
jobs:
3840
# ── Local targets ─────────────────────────────────────────────────────
3941
# No secrets needed — run on pull_request (works for forks without approval).
@@ -42,6 +44,8 @@ jobs:
4244
# fully in-process adapters (duckdb).
4345
test-local:
4446
if: github.event_name != 'pull_request_target'
47+
permissions:
48+
contents: read
4549
strategy:
4650
fail-fast: false
4751
matrix:
@@ -88,35 +92,41 @@ jobs:
8892
# Determine if this is a fork PR and skip if wrong trigger is used
8993
check-fork-status:
9094
runs-on: ubuntu-latest
95+
permissions: {}
9196
outputs:
9297
is_fork: ${{ steps.check.outputs.is_fork }}
9398
should_skip: ${{ steps.check.outputs.should_skip }}
9499
steps:
95100
- name: Check if PR is from fork
96101
id: check
102+
env:
103+
EVENT_NAME: ${{ github.event_name }}
104+
PR_REPO: ${{ github.event.pull_request.head.repo.full_name }}
105+
BASE_REPO: ${{ github.repository }}
97106
run: |
98107
IS_FORK="false"
99108
SHOULD_SKIP="false"
100109
101-
if [[ "${{ github.event_name }}" == "pull_request" || "${{ github.event_name }}" == "pull_request_target" ]]; then
102-
if [[ "${{ github.event.pull_request.head.repo.full_name }}" != "${{ github.repository }}" ]]; then
110+
if [[ "$EVENT_NAME" == "pull_request" || "$EVENT_NAME" == "pull_request_target" ]]; then
111+
if [[ "$PR_REPO" != "$BASE_REPO" ]]; then
103112
IS_FORK="true"
104113
fi
105114
106115
# Skip if: pull_request from fork (should use pull_request_target) OR pull_request_target from non-fork (should use pull_request)
107-
if [[ "${{ github.event_name }}" == "pull_request" && "$IS_FORK" == "true" ]]; then
116+
if [[ "$EVENT_NAME" == "pull_request" && "$IS_FORK" == "true" ]]; then
108117
SHOULD_SKIP="true"
109-
elif [[ "${{ github.event_name }}" == "pull_request_target" && "$IS_FORK" == "false" ]]; then
118+
elif [[ "$EVENT_NAME" == "pull_request_target" && "$IS_FORK" == "false" ]]; then
110119
SHOULD_SKIP="true"
111120
fi
112121
fi
113122
114-
echo "is_fork=$IS_FORK" >> $GITHUB_OUTPUT
115-
echo "should_skip=$SHOULD_SKIP" >> $GITHUB_OUTPUT
123+
echo "is_fork=$IS_FORK" >> "$GITHUB_OUTPUT"
124+
echo "should_skip=$SHOULD_SKIP" >> "$GITHUB_OUTPUT"
116125
117126
# Approval gate for fork PRs (only runs once for all platforms)
118127
approve-fork:
119128
runs-on: ubuntu-latest
129+
permissions: {}
120130
needs: [check-fork-status]
121131
if: needs.check-fork-status.outputs.should_skip != 'true' && needs.check-fork-status.outputs.is_fork == 'true'
122132
environment: elementary_test_env
@@ -126,6 +136,8 @@ jobs:
126136

127137
test-cloud:
128138
needs: [check-fork-status, approve-fork]
139+
permissions:
140+
contents: read
129141
if: |
130142
! cancelled() &&
131143
needs.check-fork-status.result == 'success' &&

.github/workflows/test-warehouse.yml

Lines changed: 47 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,8 @@ on:
5151
default: "latest_official"
5252
required: false
5353

54+
permissions: {}
55+
5456
env:
5557
BRANCH_NAME: ${{ github.head_ref || github.ref_name }}
5658
TESTS_DIR: ${{ github.workspace }}/dbt-data-reliability/integration_tests
@@ -59,6 +61,11 @@ jobs:
5961
test:
6062
runs-on: ubuntu-latest
6163
timeout-minutes: 60
64+
permissions:
65+
contents: read
66+
env:
67+
WAREHOUSE: ${{ inputs.warehouse-type }}
68+
DBT_VERSION: ${{ inputs.dbt-version }}
6269
concurrency:
6370
# Serialises runs for the same warehouse × dbt-version × branch.
6471
# The schema name is derived from a hash of this group (see "Write dbt profiles").
@@ -160,22 +167,26 @@ jobs:
160167
161168
- name: Install dbt-vertica
162169
if: inputs.warehouse-type == 'vertica' && inputs.dbt-version != 'fusion'
170+
env:
171+
DBT_CORE_PIN: ${{ (!startsWith(inputs.dbt-version, 'latest') && format('=={0}', inputs.dbt-version)) || '' }}
163172
run: |
164173
# dbt-vertica pins dbt-core~=1.8 which lacks native support for the
165174
# "arguments" test property used by the integration-test framework.
166175
# Install dbt-vertica without deps, then install the requested
167176
# dbt-core version separately (dbt-vertica works fine with newer
168177
# dbt-core versions).
169178
pip install dbt-vertica --no-deps
170-
pip install vertica-python \
171-
"dbt-core${{ (!startsWith(inputs.dbt-version, 'latest') && format('=={0}', inputs.dbt-version)) || '' }}"
179+
pip install vertica-python "dbt-core${DBT_CORE_PIN}"
172180
173181
- name: Install dbt
174182
if: ${{ inputs.dbt-version != 'fusion' && inputs.warehouse-type != 'vertica' }}
175-
run:
176-
pip install${{ (inputs.dbt-version == 'latest_pre' && ' --pre') || '' }}
177-
"dbt-core${{ (!startsWith(inputs.dbt-version, 'latest') && format('=={0}', inputs.dbt-version)) || '' }}"
178-
"dbt-${{ (inputs.warehouse-type == 'databricks_catalog' && 'databricks') || (inputs.warehouse-type == 'spark' && 'spark[PyHive]') || (inputs.warehouse-type == 'athena' && 'athena-community') || inputs.warehouse-type }}${{ (!startsWith(inputs.dbt-version, 'latest') && format('~={0}', inputs.dbt-version)) || '' }}"
183+
env:
184+
PIP_PRE_FLAG: ${{ (inputs.dbt-version == 'latest_pre' && '--pre') || '' }}
185+
DBT_CORE_PIN: ${{ (!startsWith(inputs.dbt-version, 'latest') && format('=={0}', inputs.dbt-version)) || '' }}
186+
DBT_ADAPTER_PKG: ${{ (inputs.warehouse-type == 'databricks_catalog' && 'databricks') || (inputs.warehouse-type == 'spark' && 'spark[PyHive]') || (inputs.warehouse-type == 'athena' && 'athena-community') || inputs.warehouse-type }}
187+
DBT_ADAPTER_PIN: ${{ (!startsWith(inputs.dbt-version, 'latest') && format('~={0}', inputs.dbt-version)) || '' }}
188+
run: |
189+
pip install $PIP_PRE_FLAG "dbt-core${DBT_CORE_PIN}" "dbt-${DBT_ADAPTER_PKG}${DBT_ADAPTER_PIN}"
179190
180191
- name: Install dbt-fusion
181192
if: inputs.dbt-version == 'fusion'
@@ -187,11 +198,17 @@ jobs:
187198
# For Vertica, dbt-vertica is already installed with --no-deps above;
188199
# using ".[vertica]" would re-resolve dbt-vertica's deps and downgrade
189200
# dbt-core to ~=1.8. Install elementary without the adapter extra.
190-
if [ "${{ inputs.warehouse-type }}" = "vertica" ]; then
191-
pip install "./elementary"
192-
else
193-
pip install "./elementary[${{ (inputs.warehouse-type == 'databricks_catalog' && 'databricks') || inputs.warehouse-type }}]"
194-
fi
201+
case "$WAREHOUSE" in
202+
vertica)
203+
pip install "./elementary"
204+
;;
205+
databricks_catalog)
206+
pip install "./elementary[databricks]"
207+
;;
208+
*)
209+
pip install "./elementary[$WAREHOUSE]"
210+
;;
211+
esac
195212
196213
- name: Write dbt profiles
197214
env:
@@ -205,7 +222,7 @@ jobs:
205222
# Budget (PostgreSQL 63-char limit):
206223
# dbt_(4) + timestamp(13) + _(1) + branch(≤18) + _(1) + hash(8) = 45
207224
# + _elementary(11) + _gw7(4) = 60
208-
CONCURRENCY_GROUP="tests_${{ inputs.warehouse-type }}_dbt_${{ inputs.dbt-version }}_${BRANCH_NAME}"
225+
CONCURRENCY_GROUP="tests_${WAREHOUSE}_dbt_${DBT_VERSION}_${BRANCH_NAME}"
209226
SHORT_HASH=$(echo -n "$CONCURRENCY_GROUP" | sha256sum | head -c 8)
210227
SAFE_BRANCH=$(echo "${BRANCH_NAME}" | awk '{print tolower($0)}' | sed "s/[^a-z0-9]/_/g; s/__*/_/g" | head -c 18)
211228
DATE_STAMP=$(date -u +%y%m%d_%H%M%S)
@@ -221,8 +238,9 @@ jobs:
221238
- name: Install dependencies
222239
working-directory: ${{ env.TESTS_DIR }}
223240
run: |
224-
${{ (inputs.dbt-version == 'fusion' && '~/.local/bin/dbt') || 'dbt' }} deps --project-dir dbt_project
225-
ln -sfn ${{ github.workspace }}/dbt-data-reliability dbt_project/dbt_packages/elementary
241+
if [ "$DBT_VERSION" = "fusion" ]; then DBT_BIN="$HOME/.local/bin/dbt"; else DBT_BIN="dbt"; fi
242+
"$DBT_BIN" deps --project-dir dbt_project
243+
ln -sfn "${{ github.workspace }}/dbt-data-reliability" dbt_project/dbt_packages/elementary
226244
pip install -r requirements.txt
227245
228246
- name: Start Vertica
@@ -240,15 +258,24 @@ jobs:
240258
- name: Check DWH connection
241259
working-directory: ${{ env.TESTS_DIR }}
242260
run: |
243-
${{ (inputs.dbt-version == 'fusion' && '~/.local/bin/dbt') || 'dbt' }} debug -t "${{ inputs.warehouse-type }}"
261+
if [ "$DBT_VERSION" = "fusion" ]; then DBT_BIN="$HOME/.local/bin/dbt"; else DBT_BIN="dbt"; fi
262+
"$DBT_BIN" debug -t "$WAREHOUSE"
244263
245264
- name: Test
246265
working-directory: "${{ env.TESTS_DIR }}/tests"
247-
run: py.test -n${{ (inputs.warehouse-type == 'spark' && '4') || '8' }} -vvv --target "${{ inputs.warehouse-type }}" --junit-xml=test-results.xml --html=detailed_report_${{ inputs.warehouse-type }}_dbt_${{ inputs.dbt-version }}.html --self-contained-html --clear-on-end ${{ (inputs.dbt-version == 'fusion' && '--runner-method fusion') || '' }}
266+
env:
267+
PYTEST_PARALLEL: ${{ (inputs.warehouse-type == 'spark' && '4') || '8' }}
268+
FUSION_RUNNER_FLAG: ${{ (inputs.dbt-version == 'fusion' && '--runner-method fusion') || '' }}
269+
run: |
270+
py.test -n"$PYTEST_PARALLEL" -vvv --target "$WAREHOUSE" \
271+
--junit-xml=test-results.xml \
272+
--html="detailed_report_${WAREHOUSE}_dbt_${DBT_VERSION}.html" \
273+
--self-contained-html --clear-on-end $FUSION_RUNNER_FLAG
248274
249275
- name: Upload test results
250276
if: always()
251-
uses: pmeier/pytest-results-action@v0.8.0
277+
# pmeier/pytest-results-action v0.8.0, checked 2026-04-26.
278+
uses: pmeier/pytest-results-action@0841ca7226ab155943837380769373a5dd14d7ed
252279
with:
253280
path: ${{ env.TESTS_DIR }}/tests/test-results.xml
254281
summary: true
@@ -269,6 +296,7 @@ jobs:
269296
working-directory: ${{ env.TESTS_DIR }}
270297
continue-on-error: true
271298
run: |
272-
${{ (inputs.dbt-version == 'fusion' && '~/.local/bin/dbt') || 'dbt' }} run-operation elementary_tests.drop_test_schemas \
299+
if [ "$DBT_VERSION" = "fusion" ]; then DBT_BIN="$HOME/.local/bin/dbt"; else DBT_BIN="dbt"; fi
300+
"$DBT_BIN" run-operation elementary_tests.drop_test_schemas \
273301
--project-dir dbt_project \
274-
-t "${{ inputs.warehouse-type }}"
302+
-t "$WAREHOUSE"

0 commit comments

Comments
 (0)