11.PHONY : docs
22
3+ ifdef UV
4+ PIP := uv pip
5+ else
6+ PIP := pip3
7+ endif
8+
9+ UNAME_S := $(shell uname -s)
10+ ifeq ($(UNAME_S ) ,Darwin)
11+ SED_INPLACE = sed -i ''
12+ else
13+ SED_INPLACE = sed -i
14+ endif
15+
316install-dev :
4- pip3 install -e " .[dev,web,slack,dlt,lsp]" ./examples/custom_materializations
17+ $( PIP ) install -e " .[dev,web,slack,dlt,lsp]" ./examples/custom_materializations
518
619install-doc :
7- pip3 install -r ./docs/requirements.txt
20+ $( PIP ) install -r ./docs/requirements.txt
821
922install-pre-commit :
1023 pre-commit install
1124
25+ install-dev-dbt-% :
26+ @version=" $* " ; \
27+ period_count=$$(echo "$$version" | tr -cd '.' | wc -c ) ; \
28+ if [ " $$ period_count" -eq 0 ]; then \
29+ version=" $$ {version:0:1}.$$ {version:1}" ; \
30+ elif [ " $$ period_count" -eq 1 ]; then \
31+ version=" $$ version.0" ; \
32+ fi ; \
33+ echo " Installing dbt version: $$ version" ; \
34+ cp pyproject.toml pyproject.toml.backup; \
35+ $(SED_INPLACE ) ' s/"pydantic>=2.0.0"/"pydantic"/g' pyproject.toml; \
36+ if [ " $$ version" = " 1.10.0" ]; then \
37+ echo " Applying special handling for dbt 1.10.0" ; \
38+ $(SED_INPLACE ) -E ' s/"(dbt-core)[^"]*"/"\1~=' " $$ version" ' "/g' pyproject.toml; \
39+ $(SED_INPLACE ) -E ' s/"(dbt-(bigquery|duckdb|snowflake|athena-community|clickhouse|databricks|redshift|trino))[^"]*"/"\1"/g' pyproject.toml; \
40+ else \
41+ echo " Applying version $$ version to all dbt packages" ; \
42+ $(SED_INPLACE ) -E ' s/"(dbt-[^"><=~!]+)[^"]*"/"\1~=' " $$ version" ' "/g' pyproject.toml; \
43+ fi ; \
44+ $(MAKE ) install-dev; \
45+ if [ " $$ version" = " 1.6.0" ]; then \
46+ echo " Applying overrides for dbt 1.6.0" ; \
47+ $(PIP ) install ' pydantic>=2.0.0' ' google-cloud-bigquery==3.30.0' ' databricks-sdk==0.28.0' --reinstall; \
48+ fi ; \
49+ if [ " $$ version" = " 1.7.0" ]; then \
50+ echo " Applying overrides for dbt 1.7.0" ; \
51+ $(PIP ) install ' databricks-sdk==0.28.0' --reinstall; \
52+ fi ; \
53+ mv pyproject.toml.backup pyproject.toml; \
54+ echo " Restored original pyproject.toml"
55+
1256style :
1357 pre-commit run --all-files
1458
@@ -22,16 +66,16 @@ doc-test:
2266 python -m pytest --doctest-modules sqlmesh/core sqlmesh/utils
2367
2468package :
25- pip3 install build && python3 -m build
69+ $( PIP ) install build && python3 -m build
2670
2771publish : package
28- pip3 install twine && python3 -m twine upload dist/*
72+ $( PIP ) install twine && python3 -m twine upload dist/*
2973
3074package-tests :
31- pip3 install build && cp pyproject.toml tests/sqlmesh_pyproject.toml && python3 -m build tests/
75+ $( PIP ) install build && cp pyproject.toml tests/sqlmesh_pyproject.toml && python3 -m build tests/
3276
3377publish-tests : package-tests
34- pip3 install twine && python3 -m twine upload -r tobiko-private tests/dist/*
78+ $( PIP ) install twine && python3 -m twine upload -r tobiko-private tests/dist/*
3579
3680docs-serve :
3781 mkdocs serve
@@ -93,6 +137,9 @@ engine-test:
93137dbt-test :
94138 pytest -n auto -m " dbt and not cicdonly"
95139
140+ dbt-fast-test :
141+ pytest -n auto -m " dbt and fast" --retries 3
142+
96143github-test :
97144 pytest -n auto -m " github"
98145
@@ -109,7 +156,7 @@ guard-%:
109156 fi
110157
111158engine-% -install :
112- pip3 install -e " .[dev,web,slack,lsp,${* } ]" ./examples/custom_materializations
159+ $( PIP ) install -e " .[dev,web,slack,lsp,${* } ]" ./examples/custom_materializations
113160
114161engine-docker-% -up :
115162 docker compose -f ./tests/core/engine_adapter/integration/docker/compose.${* } .yaml up -d
@@ -159,11 +206,11 @@ snowflake-test: guard-SNOWFLAKE_ACCOUNT guard-SNOWFLAKE_WAREHOUSE guard-SNOWFLAK
159206 pytest -n auto -m " snowflake" --retries 3 --junitxml=test-results/junit-snowflake.xml
160207
161208bigquery-test : guard-BIGQUERY_KEYFILE engine-bigquery-install
162- pip install -e " .[bigframes]"
209+ $( PIP ) install -e " .[bigframes]"
163210 pytest -n auto -m " bigquery" --retries 3 --junitxml=test-results/junit-bigquery.xml
164211
165212databricks-test : guard-DATABRICKS_CATALOG guard-DATABRICKS_SERVER_HOSTNAME guard-DATABRICKS_HTTP_PATH guard-DATABRICKS_ACCESS_TOKEN guard-DATABRICKS_CONNECT_VERSION engine-databricks-install
166- pip install ' databricks-connect==${DATABRICKS_CONNECT_VERSION}'
213+ $( PIP ) install ' databricks-connect==${DATABRICKS_CONNECT_VERSION}'
167214 pytest -n auto -m " databricks" --retries 3 --junitxml=test-results/junit-databricks.xml
168215
169216redshift-test : guard-REDSHIFT_HOST guard-REDSHIFT_USER guard-REDSHIFT_PASSWORD guard-REDSHIFT_DATABASE engine-redshift-install
@@ -176,7 +223,7 @@ athena-test: guard-AWS_ACCESS_KEY_ID guard-AWS_SECRET_ACCESS_KEY guard-ATHENA_S3
176223 pytest -n auto -m " athena" --retries 3 --junitxml=test-results/junit-athena.xml
177224
178225fabric-test : guard-FABRIC_HOST guard-FABRIC_CLIENT_ID guard-FABRIC_CLIENT_SECRET guard-FABRIC_DATABASE engine-fabric-install
179- pytest -n auto -m " fabric" --retries 3 --junitxml=test-results/junit-fabric.xml
226+ pytest -n auto -m " fabric" --retries 3 --junitxml=test-results/junit-fabric.xml
180227
181228gcp-postgres-test : guard-GCP_POSTGRES_INSTANCE_CONNECTION_STRING guard-GCP_POSTGRES_USER guard-GCP_POSTGRES_PASSWORD guard-GCP_POSTGRES_KEYFILE_JSON engine-gcppostgres-install
182229 pytest -n auto -m " gcp_postgres" --retries 3 --junitxml=test-results/junit-gcp-postgres.xml
0 commit comments