Skip to content

Commit 5cb4445

Browse files
authored
Merge pull request #1 from TobikoData/main
chore: Sync upstream `main`.
2 parents 52a9795 + 4f833af commit 5cb4445

File tree

22 files changed

+307
-62
lines changed

22 files changed

+307
-62
lines changed

.circleci/continue_config.yml

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -258,6 +258,10 @@ jobs:
258258
echo "export REDSHIFT_DATABASE='$TEST_DB_NAME'" >> "$BASH_ENV"
259259
echo "export GCP_POSTGRES_DATABASE='$TEST_DB_NAME'" >> "$BASH_ENV"
260260
echo "export FABRIC_DATABASE='$TEST_DB_NAME'" >> "$BASH_ENV"
261+
262+
# Make snowflake private key available
263+
echo $SNOWFLAKE_PRIVATE_KEY_RAW | base64 -d > /tmp/snowflake-keyfile.p8
264+
echo "export SNOWFLAKE_PRIVATE_KEY_FILE='/tmp/snowflake-keyfile.p8'" >> "$BASH_ENV"
261265
- run:
262266
name: Create test database
263267
command: ./.circleci/manage-test-db.sh << parameters.engine >> "$TEST_DB_NAME" up
@@ -309,7 +313,7 @@ workflows:
309313
matrix:
310314
parameters:
311315
engine:
312-
#- snowflake
316+
- snowflake
313317
- databricks
314318
- redshift
315319
- bigquery

.circleci/manage-test-db.sh

Lines changed: 1 addition & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ function_exists() {
2525
# Snowflake
2626
snowflake_init() {
2727
echo "Installing Snowflake CLI"
28-
pip install "snowflake-cli-labs<3.8.0"
28+
pip install "snowflake-cli"
2929
}
3030

3131
snowflake_up() {
@@ -40,20 +40,6 @@ snowflake_down() {
4040
databricks_init() {
4141
echo "Installing Databricks CLI"
4242
curl -fsSL https://raw.githubusercontent.com/databricks/setup-cli/main/install.sh | sudo sh || true
43-
44-
echo "Writing out Databricks CLI config file"
45-
echo -e "[DEFAULT]\nhost = $DATABRICKS_SERVER_HOSTNAME\ntoken = $DATABRICKS_ACCESS_TOKEN" > ~/.databrickscfg
46-
47-
# this takes a path like 'sql/protocolv1/o/2934659247569/0723-005339-foobar' and extracts '0723-005339-foobar' from it
48-
CLUSTER_ID=${DATABRICKS_HTTP_PATH##*/}
49-
50-
echo "Extracted cluster id: $CLUSTER_ID from '$DATABRICKS_HTTP_PATH'"
51-
52-
# Note: the cluster doesnt need to be running to create / drop catalogs, but it does need to be running to run the integration tests
53-
echo "Ensuring cluster is running"
54-
# the || true is to prevent the following error from causing an abort:
55-
# > Error: is in unexpected state Running.
56-
databricks clusters start $CLUSTER_ID || true
5743
}
5844

5945
databricks_up() {

.github/workflows/pr.yaml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,8 @@ jobs:
3030
test-vscode-e2e:
3131
runs-on:
3232
labels: [ubuntu-2204-8]
33+
# As at 2026-01-12 this job flakes 100% of the time. It needs investigation
34+
if: false
3335
steps:
3436
- uses: actions/checkout@v5
3537
- uses: actions/setup-node@v6

Makefile

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -212,14 +212,14 @@ risingwave-test: engine-risingwave-up
212212
# Cloud Engines #
213213
#################
214214

215-
snowflake-test: guard-SNOWFLAKE_ACCOUNT guard-SNOWFLAKE_WAREHOUSE guard-SNOWFLAKE_DATABASE guard-SNOWFLAKE_USER guard-SNOWFLAKE_PASSWORD engine-snowflake-install
215+
snowflake-test: guard-SNOWFLAKE_ACCOUNT guard-SNOWFLAKE_WAREHOUSE guard-SNOWFLAKE_DATABASE guard-SNOWFLAKE_USER engine-snowflake-install
216216
pytest -n auto -m "snowflake" --reruns 3 --junitxml=test-results/junit-snowflake.xml
217217

218218
bigquery-test: guard-BIGQUERY_KEYFILE engine-bigquery-install
219219
$(PIP) install -e ".[bigframes]"
220220
pytest -n auto -m "bigquery" --reruns 3 --junitxml=test-results/junit-bigquery.xml
221221

222-
databricks-test: guard-DATABRICKS_CATALOG guard-DATABRICKS_SERVER_HOSTNAME guard-DATABRICKS_HTTP_PATH guard-DATABRICKS_ACCESS_TOKEN guard-DATABRICKS_CONNECT_VERSION engine-databricks-install
222+
databricks-test: guard-DATABRICKS_CATALOG guard-DATABRICKS_SERVER_HOSTNAME guard-DATABRICKS_HTTP_PATH guard-DATABRICKS_CONNECT_VERSION engine-databricks-install
223223
$(PIP) install 'databricks-connect==${DATABRICKS_CONNECT_VERSION}'
224224
pytest -n auto -m "databricks" --reruns 3 --junitxml=test-results/junit-databricks.xml
225225

docs/integrations/engines/bigquery.md

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -193,6 +193,23 @@ If the `impersonated_service_account` argument is set, SQLMesh will:
193193

194194
The user account must have [sufficient permissions to impersonate the service account](https://cloud.google.com/docs/authentication/use-service-account-impersonation).
195195

196+
## Query Label
197+
198+
BigQuery supports a `query_label` session variable which is attached to query jobs and can be used for auditing / attribution.
199+
200+
SQLMesh supports setting it via `session_properties.query_label` on a model, as an array (or tuple) of key/value tuples.
201+
202+
Example:
203+
```sql
204+
MODEL (
205+
name my_project.my_dataset.my_model,
206+
dialect 'bigquery',
207+
session_properties (
208+
query_label = [('team', 'data_platform'), ('env', 'prod')]
209+
)
210+
);
211+
```
212+
196213
## Permissions Required
197214
With any of the above connection methods, ensure these BigQuery permissions are enabled to allow SQLMesh to work correctly.
198215

docs/integrations/engines/trino.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -90,6 +90,7 @@ hive.metastore.glue.default-warehouse-dir=s3://my-bucket/
9090
| `http_scheme` | The HTTP scheme to use when connecting to your cluster. By default, it's `https` and can only be `http` for no-auth or basic auth. | string | N |
9191
| `port` | The port to connect to your cluster. By default, it's `443` for `https` scheme and `80` for `http` | int | N |
9292
| `roles` | Mapping of catalog name to a role | dict | N |
93+
| `source` | Value to send as Trino's `source` field for query attribution / auditing. Default: `sqlmesh`. | string | N |
9394
| `http_headers` | Additional HTTP headers to send with each request. | dict | N |
9495
| `session_properties` | Trino session properties. Run `SHOW SESSION` to see all options. | dict | N |
9596
| `retries` | Number of retries to attempt when a request fails. Default: `3` | int | N |

0 commit comments

Comments
 (0)