Skip to content

Commit 745791d

Browse files
authored
Feat: Nessie support (#3700)
1 parent f56d84c commit 745791d

8 files changed

Lines changed: 68 additions & 5 deletions

File tree

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -191,7 +191,7 @@ spark-test: engine-spark-up
191191
pytest -n auto -x -m "spark or pyspark" --retries 3 --junitxml=test-results/junit-spark.xml
192192

193193
trino-test: engine-trino-up
194-
pytest -n auto -x -m "trino or trino_iceberg or trino_delta" --retries 3 --junitxml=test-results/junit-trino.xml
194+
pytest -n auto -x -m "trino or trino_iceberg or trino_delta or trino_nessie" --retries 3 --junitxml=test-results/junit-trino.xml
195195

196196
#################
197197
# Cloud Engines #

docs/integrations/engines/trino.md

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,11 @@ iceberg.catalog.type=hive_metastore
4747

4848
**Note**: The Trino Iceberg Connector must be configured with an `iceberg.catalog.type` that supports views. At the time of this writing, this is `hive_metastore`, `glue`, and `rest`.
4949

50-
The `jdbc` and `nessie` catalogs do not support views and are thus incompatible with SQLMesh.
50+
The `jdbc` and `nessie` iceberg catalog types do not support views and are thus incompatible with SQLMesh.
51+
52+
!!! info "Nessie"
53+
Nessie is supported when used as an Iceberg REST Catalog (`iceberg.catalog.type=rest`).
54+
For more information on how to configure the Trino Iceberg connector for this, see the [Nessie documentation](https://projectnessie.org/nessie-latest/trino/).
5155

5256
#### Delta Lake Connector Configuration
5357

tests/core/engine_adapter/integration/config.yaml

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,17 @@ gateways:
4141
retries: 20
4242
state_connection:
4343
type: duckdb
44+
inttest_trino_nessie:
45+
connection:
46+
type: trino
47+
host: {{ env_var('DOCKER_HOSTNAME', 'localhost') }}
48+
port: 8080
49+
user: admin
50+
catalog: datalake_nessie
51+
http_scheme: http
52+
retries: 20
53+
state_connection:
54+
type: duckdb
4455
inttest_spark:
4556
connection:
4657
type: spark

tests/core/engine_adapter/integration/docker/_common-hive.yaml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,8 @@ services:
3535
/usr/bin/mc mb --quiet myminio/trino/testing_iceberg;
3636
/usr/bin/mc mb --quiet myminio/trino/testing_delta;
3737
/usr/bin/mc mb --quiet myminio/spark/datalake;
38-
/usr/bin/mc mb --quiet myminio/spark/testing
38+
/usr/bin/mc mb --quiet myminio/spark/testing;
39+
/usr/bin/mc mb --quiet myminio/nessie/warehouse;
3940
"
4041
depends_on:
4142
- minio

tests/core/engine_adapter/integration/docker/compose.trino.yaml

Lines changed: 25 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,4 +83,28 @@ services:
8383
HIVE_METASTORE_WAREHOUSE_DIR: s3://trino/datalake_delta
8484
<<: *hive_metastore_environments
8585
depends_on:
86-
- metastore
86+
- metastore
87+
88+
nessie:
89+
image: ghcr.io/projectnessie/nessie:0.102.2
90+
restart: on-failure
91+
ports:
92+
- '19120:19120'
93+
environment:
94+
nessie.version.store.type: JDBC2
95+
nessie.version.store.persist.jdbc.datasource: postgresql
96+
quarkus.datasource.postgresql.jdbc.url: jdbc:postgresql://metastore:5432/nessie
97+
quarkus.datasource.postgresql.username: hive
98+
quarkus.datasource.postgresql.password: hive
99+
nessie.catalog.default-warehouse: warehouse
100+
nessie.catalog.warehouses.warehouse.location: s3://nessie/warehouse
101+
nessie.catalog.service.s3.default-options.region: us-east-1
102+
nessie.catalog.service.s3.default-options.path-style-access: 'true'
103+
nessie.catalog.service.s3.default-options.access-key: urn:nessie-secret:quarkus:nessie.catalog.secrets.access-key
104+
nessie.catalog.secrets.access-key.name: minio
105+
nessie.catalog.secrets.access-key.secret: minio123
106+
nessie.catalog.service.s3.default-options.endpoint: http://minio:9000/
107+
108+
depends_on:
109+
- metastore
110+

tests/core/engine_adapter/integration/docker/init-metastore-db.sql

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,4 +10,5 @@ create database datalake_iceberg_metastore;
1010
create database datalake_delta_metastore;
1111
create database testing_metastore;
1212
create database testing_iceberg_metastore;
13-
create database testing_delta_metastore;
13+
create database testing_delta_metastore;
14+
create database nessie;
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
connector.name=iceberg
2+
iceberg.catalog.type=rest
3+
iceberg.rest-catalog.security=NONE
4+
iceberg.rest-catalog.uri=http://nessie:19120/iceberg/
5+
iceberg.rest-catalog.vended-credentials-enabled=false
6+
iceberg.metadata-cache.enabled=false
7+
iceberg.hive-catalog-name=datalake
8+
9+
fs.native-s3.enabled=true
10+
s3.endpoint=http://minio:9000
11+
s3.path-style-access=true
12+
s3.aws-access-key=minio
13+
s3.aws-secret-key=minio123
14+
s3.region=us-east-1

tests/core/engine_adapter/integration/test_integration.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -146,6 +146,14 @@ def test_type(request):
146146
pytest.mark.trino_delta,
147147
],
148148
),
149+
pytest.param(
150+
"trino_nessie",
151+
marks=[
152+
pytest.mark.docker,
153+
pytest.mark.engine,
154+
pytest.mark.trino_nessie,
155+
],
156+
),
149157
pytest.param(
150158
"spark",
151159
marks=[

0 commit comments

Comments
 (0)