-
Notifications
You must be signed in to change notification settings - Fork 192
222 lines (192 loc) · 9.46 KB
/
integration.yml
File metadata and controls
222 lines (192 loc) · 9.46 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
# Integration Tests for dbt-databricks
#
# This workflow runs integration tests that require Databricks secrets.
#
# For testing external contributions (PRs from forks):
# 1. Go to Actions tab -> Integration Tests -> Run workflow
# 2. Enter the PR number in the 'pr_number' field
# 3. Click "Run workflow"
#
# This approach is secure because:
# - The workflow runs in the databricks repository context (access to secrets)
# - The code to test is explicitly specified by maintainers
# - No automatic execution of untrusted code with secrets
name: Integration Tests
on:
pull_request:
# Run on PRs to the same repository (internal contributors)
paths-ignore:
- "**.MD"
- "**.md"
- "adapters/databricks/__version__.py"
- "tests/unit/**"
- ".github/workflows/main.yml"
- ".github/workflows/stale.yml"
workflow_dispatch:
# Manual triggering for external contributions and ad-hoc testing
inputs:
pr_number:
description: "PR number to test (for external contributions)"
required: false
type: string
git_ref:
description: "Git ref (branch/tag/commit) to test"
required: false
type: string
permissions:
id-token: write
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
run-uc-cluster-e2e-tests:
runs-on:
group: databricks-protected-runner-group
labels: linux-ubuntu-latest
environment: azure-prod
# Only run on internal PRs or manual dispatch - skip external forks to avoid secret access failures
if: github.event_name == 'workflow_dispatch' || github.event.pull_request.head.repo.full_name == github.repository
env:
DBT_DATABRICKS_HOST_NAME: ${{ secrets.DATABRICKS_HOST }}
DBT_DATABRICKS_CLIENT_ID: ${{ secrets.TEST_PECO_SP_ID }}
DBT_DATABRICKS_CLIENT_SECRET: ${{ secrets.TEST_PECO_SP_SECRET }}
DBT_DATABRICKS_UC_INITIAL_CATALOG: peco
DBT_DATABRICKS_LOCATION_ROOT: ${{ secrets.TEST_PECO_EXTERNAL_LOCATION }}test
TEST_PECO_UC_CLUSTER_ID: ${{ secrets.TEST_PECO_UC_CLUSTER_ID }}
UV_FROZEN: "1"
steps:
- name: Check out repository
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
with:
# For pull_request: checkout the PR head commit
# For workflow_dispatch with pr_number: checkout that PR's head
# For workflow_dispatch with git_ref: checkout that ref
# Otherwise: checkout current branch
ref: ${{ github.event.pull_request.head.sha || (github.event.inputs.pr_number && format('refs/pull/{0}/head', github.event.inputs.pr_number)) || github.event.inputs.git_ref || github.ref }}
# Fetch enough history for PR testing
fetch-depth: 0
- name: Setup JFrog PyPI Proxy
uses: ./.github/actions/setup-jfrog-pypi
- name: Set up python
id: setup-python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
with:
python-version: "3.10"
- name: Get http path from environment
run: python .github/workflows/build_cluster_http_path.py
shell: sh
- name: Install uv
uses: astral-sh/setup-uv@38f3f104447c67c051c4a08e39b64a148898af3a # v4
- name: Install Hatch
id: install-dependencies
uses: pypa/hatch@257e27e51a6a5616ed08a39a408a21c35c9931bc # install
- name: Run UC Cluster Functional Tests
run: DBT_TEST_USER=notnecessaryformosttests@example.com DBT_DATABRICKS_LOCATION_ROOT=$DBT_DATABRICKS_LOCATION_ROOT DBT_DATABRICKS_HOST_NAME=$DBT_DATABRICKS_HOST_NAME DBT_DATABRICKS_UC_CLUSTER_HTTP_PATH=$DBT_DATABRICKS_UC_CLUSTER_HTTP_PATH DBT_DATABRICKS_CLIENT_ID=$DBT_DATABRICKS_CLIENT_ID DBT_DATABRICKS_CLIENT_SECRET=$DBT_DATABRICKS_CLIENT_SECRET hatch -v run uc-cluster-e2e
- name: Upload UC Cluster Test Logs
if: always()
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
with:
name: uc-cluster-test-logs
path: logs/
retention-days: 5
run-sqlwarehouse-e2e-tests:
runs-on:
group: databricks-protected-runner-group
labels: linux-ubuntu-latest
environment: azure-prod
# Only run on internal PRs or manual dispatch - skip external forks to avoid secret access failures
if: github.event_name == 'workflow_dispatch' || github.event.pull_request.head.repo.full_name == github.repository
env:
DBT_DATABRICKS_HOST_NAME: ${{ secrets.DATABRICKS_HOST }}
DBT_DATABRICKS_CLIENT_ID: ${{ secrets.TEST_PECO_SP_ID }}
DBT_DATABRICKS_CLIENT_SECRET: ${{ secrets.TEST_PECO_SP_SECRET }}
DBT_DATABRICKS_HTTP_PATH: ${{ secrets.TEST_PECO_WAREHOUSE_HTTP_PATH }}
DBT_DATABRICKS_UC_INITIAL_CATALOG: peco
DBT_DATABRICKS_LOCATION_ROOT: ${{ secrets.TEST_PECO_EXTERNAL_LOCATION }}test
TEST_PECO_UC_CLUSTER_ID: ${{ secrets.TEST_PECO_UC_CLUSTER_ID }}
UV_FROZEN: "1"
steps:
- name: Check out repository
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
with:
# For pull_request: checkout the PR head commit
# For workflow_dispatch with pr_number: checkout that PR's head
# For workflow_dispatch with git_ref: checkout that ref
# Otherwise: checkout current branch
ref: ${{ github.event.pull_request.head.sha || (github.event.inputs.pr_number && format('refs/pull/{0}/head', github.event.inputs.pr_number)) || github.event.inputs.git_ref || github.ref }}
# Fetch enough history for PR testing
fetch-depth: 0
- name: Setup JFrog PyPI Proxy
uses: ./.github/actions/setup-jfrog-pypi
- name: Set up python
id: setup-python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
with:
python-version: "3.10"
- name: Get http path from environment
run: python .github/workflows/build_cluster_http_path.py
shell: sh
- name: Install uv
uses: astral-sh/setup-uv@38f3f104447c67c051c4a08e39b64a148898af3a # v4
- name: Install Hatch
id: install-dependencies
uses: pypa/hatch@257e27e51a6a5616ed08a39a408a21c35c9931bc # install
- name: Run Sql Endpoint Functional Tests
run: DBT_TEST_USER=notnecessaryformosttests@example.com DBT_DATABRICKS_LOCATION_ROOT=$DBT_DATABRICKS_LOCATION_ROOT DBT_DATABRICKS_HOST_NAME=$DBT_DATABRICKS_HOST_NAME DBT_DATABRICKS_UC_CLUSTER_HTTP_PATH=$DBT_DATABRICKS_UC_CLUSTER_HTTP_PATH DBT_DATABRICKS_CLIENT_ID=$DBT_DATABRICKS_CLIENT_ID DBT_DATABRICKS_CLIENT_SECRET=$DBT_DATABRICKS_CLIENT_SECRET hatch -v run sqlw-e2e
- name: Upload SQL Endpoint Test Logs
if: always()
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
with:
name: sql-endpoint-test-logs
path: logs/
retention-days: 5
run-cluster-e2e-tests:
runs-on:
group: databricks-protected-runner-group
labels: linux-ubuntu-latest
environment: azure-prod
# Only run on internal PRs or manual dispatch - skip external forks to avoid secret access failures
if: github.event_name == 'workflow_dispatch' || github.event.pull_request.head.repo.full_name == github.repository
env:
DBT_DATABRICKS_HOST_NAME: ${{ secrets.DATABRICKS_HOST }}
DBT_DATABRICKS_CLIENT_ID: ${{ secrets.TEST_PECO_SP_ID }}
DBT_DATABRICKS_CLIENT_SECRET: ${{ secrets.TEST_PECO_SP_SECRET }}
TEST_PECO_CLUSTER_ID: ${{ secrets.TEST_PECO_CLUSTER_ID }}
DBT_DATABRICKS_LOCATION_ROOT: ${{ secrets.TEST_PECO_EXTERNAL_LOCATION }}test
UV_FROZEN: "1"
steps:
- name: Check out repository
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
with:
# For pull_request: checkout the PR head commit
# For workflow_dispatch with pr_number: checkout that PR's head
# For workflow_dispatch with git_ref: checkout that ref
# Otherwise: checkout current branch
ref: ${{ github.event.pull_request.head.sha || (github.event.inputs.pr_number && format('refs/pull/{0}/head', github.event.inputs.pr_number)) || github.event.inputs.git_ref || github.ref }}
# Fetch enough history for PR testing
fetch-depth: 0
- name: Setup JFrog PyPI Proxy
uses: ./.github/actions/setup-jfrog-pypi
- name: Set up python
id: setup-python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
with:
python-version: "3.10"
- name: Get http path from environment
run: python .github/workflows/build_cluster_http_path.py
shell: sh
- name: Install uv
uses: astral-sh/setup-uv@38f3f104447c67c051c4a08e39b64a148898af3a # v4
- name: Install Hatch
id: install-dependencies
uses: pypa/hatch@257e27e51a6a5616ed08a39a408a21c35c9931bc # install
- name: Run Cluster Functional Tests
run: DBT_TEST_USER=notnecessaryformosttests@example.com DBT_DATABRICKS_LOCATION_ROOT=$DBT_DATABRICKS_LOCATION_ROOT DBT_DATABRICKS_HOST_NAME=$DBT_DATABRICKS_HOST_NAME DBT_DATABRICKS_HTTP_PATH=$DBT_DATABRICKS_CLUSTER_HTTP_PATH DBT_DATABRICKS_CLIENT_ID=$DBT_DATABRICKS_CLIENT_ID DBT_DATABRICKS_CLIENT_SECRET=$DBT_DATABRICKS_CLIENT_SECRET hatch -v run cluster-e2e
- name: Upload Cluster Test Logs
if: always()
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
with:
name: cluster-test-logs
path: logs/
retention-days: 5