|
18 | 18 | - clickhouse |
19 | 19 | - dremio |
20 | 20 | - duckdb |
| 21 | + - fabric |
21 | 22 | elementary-ref: |
22 | 23 | type: string |
23 | 24 | required: false |
|
51 | 52 | env: |
52 | 53 | BRANCH_NAME: ${{ github.head_ref || github.ref_name }} |
53 | 54 | TESTS_DIR: ${{ github.workspace }}/dbt-data-reliability/integration_tests |
| 55 | + MSSQL_SA_PASSWORD: ${{ secrets.MSSQL_SA_PASSWORD || 'Elementary123!' }} |
54 | 56 |
|
55 | 57 | jobs: |
56 | 58 | test: |
@@ -100,6 +102,25 @@ jobs: |
100 | 102 | timeout 180 bash -c 'until [ "$(docker inspect -f {{.State.Health.Status}} dremio 2>/dev/null)" = "healthy" ]; do sleep 5; done' |
101 | 103 | echo "Dremio is healthy." |
102 | 104 |
|
| 105 | + - name: Start SQL Server |
| 106 | + if: inputs.warehouse-type == 'fabric' |
| 107 | + working-directory: ${{ env.TESTS_DIR }} |
| 108 | + env: |
| 109 | + MSSQL_SA_PASSWORD: ${{ env.MSSQL_SA_PASSWORD }} |
| 110 | + run: | |
| 111 | + docker compose -f docker-compose-sqlserver.yml up -d |
| 112 | + echo "Waiting for SQL Server to become healthy..." |
| 113 | + timeout 120 bash -c 'until [ "$(docker inspect -f {{.State.Health.Status}} sqlserver 2>/dev/null)" = "healthy" ]; do sleep 5; done' |
| 114 | + echo "SQL Server is healthy." |
| 115 | +
|
| 116 | + - name: Install ODBC Driver |
| 117 | + if: inputs.warehouse-type == 'fabric' |
| 118 | + run: | |
| 119 | + curl https://packages.microsoft.com/keys/microsoft.asc | sudo tee /etc/apt/trusted.gpg.d/microsoft.asc |
| 120 | + curl https://packages.microsoft.com/config/ubuntu/$(lsb_release -rs)/prod.list | sudo tee /etc/apt/sources.list.d/mssql-release.list |
| 121 | + sudo apt-get update |
| 122 | + sudo ACCEPT_EULA=Y apt-get install -y msodbcsql18 unixodbc-dev |
| 123 | +
|
103 | 124 | - name: Start Spark |
104 | 125 | if: inputs.warehouse-type == 'spark' |
105 | 126 | working-directory: ${{ env.TESTS_DIR }} |
@@ -136,19 +157,20 @@ jobs: |
136 | 157 | run: |
137 | 158 | pip install${{ (inputs.dbt-version == 'latest_pre' && ' --pre') || '' }} |
138 | 159 | "dbt-core${{ (!startsWith(inputs.dbt-version, 'latest') && format('=={0}', inputs.dbt-version)) || '' }}" |
139 | | - "dbt-${{ (inputs.warehouse-type == 'databricks_catalog' && 'databricks') || (inputs.warehouse-type == 'spark' && 'spark[PyHive]') || (inputs.warehouse-type == 'athena' && 'athena-community') || inputs.warehouse-type }}${{ (!startsWith(inputs.dbt-version, 'latest') && format('~={0}', inputs.dbt-version)) || '' }}" |
| 160 | + "dbt-${{ (inputs.warehouse-type == 'databricks_catalog' && 'databricks') || (inputs.warehouse-type == 'spark' && 'spark[PyHive]') || (inputs.warehouse-type == 'athena' && 'athena-community') || (inputs.warehouse-type == 'fabric' && 'fabric') || inputs.warehouse-type }}${{ (!startsWith(inputs.dbt-version, 'latest') && format('~={0}', inputs.dbt-version)) || '' }}" |
140 | 161 |
|
141 | 162 | - name: Install dbt-fusion |
142 | 163 | if: inputs.dbt-version == 'fusion' |
143 | 164 | run: | |
144 | 165 | curl -fsSL https://public.cdn.getdbt.com/fs/install/install.sh | sh -s -- |
145 | 166 |
|
146 | 167 | - name: Install Elementary |
147 | | - run: pip install "./elementary[${{ (inputs.warehouse-type == 'databricks_catalog' && 'databricks') || inputs.warehouse-type }}]" |
| 168 | + run: pip install "./elementary[${{ (inputs.warehouse-type == 'databricks_catalog' && 'databricks') || (inputs.warehouse-type == 'fabric' && 'fabric') || inputs.warehouse-type }}]" |
148 | 169 |
|
149 | 170 | - name: Write dbt profiles |
150 | 171 | env: |
151 | 172 | CI_WAREHOUSE_SECRETS: ${{ secrets.CI_WAREHOUSE_SECRETS || '' }} |
| 173 | + MSSQL_SA_PASSWORD: ${{ env.MSSQL_SA_PASSWORD }} |
152 | 174 | run: | |
153 | 175 | # Schema name = dbt_<YYMMDD_HHMMSS>_<branch≤18>_<8-char hash> |
154 | 176 | # The hash prevents collisions across concurrent jobs; the branch |
|
0 commit comments