Skip to content

Commit 941c259

Browse files
committed
update test for doris
1 parent ff7c02b commit 941c259

File tree

6 files changed

+71
-117
lines changed

6 files changed

+71
-117
lines changed

.circleci/wait-for-db.sh

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -36,20 +36,26 @@ clickhouse_ready() {
3636

3737
doris_ready() {
3838
probe_port 9030
39-
40-
# Check that we have 3 alive backends
39+
4140
echo "Checking for 3 alive Doris backends..."
42-
41+
4342
while true; do
4443
echo "Checking Doris backends..."
45-
# Use docker compose exec to run mysql command inside the fe-01 container
46-
# Use timeout to prevent hanging, and handle connection errors gracefully
47-
ALIVE_BACKENDS=$(timeout 10 docker compose -f tests/core/engine_adapter/integration/docker/compose.doris.yaml exec -T fe-01 mysql -uroot -e "show backends \G" 2>/dev/null | grep -c "Alive: true" || echo "0")
44+
ALIVE_BACKENDS=$(timeout 10 docker compose exec -T doris-fe-01 mysql -uroot -e "show backends \G" 2>/dev/null | grep -c "Alive: true")
45+
46+
# fallback value if failed to get number
47+
if ! [[ "$ALIVE_BACKENDS" =~ ^[0-9]+$ ]]; then
48+
echo "WARN: Unable to parse number of alive backends, got: '$ALIVE_BACKENDS'"
49+
ALIVE_BACKENDS=0
50+
fi
51+
4852
echo "Found $ALIVE_BACKENDS alive backends"
53+
4954
if [ "$ALIVE_BACKENDS" -ge 3 ]; then
5055
echo "Doris has 3 or more alive backends"
5156
break
5257
fi
58+
5359
echo "Waiting for more backends to become alive..."
5460
sleep 5
5561
done

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,7 @@ dev = [
7878
"pydantic",
7979
"PyAthena[Pandas]",
8080
"PyGithub>=2.6.0",
81+
"pymysql",
8182
"pyodbc>=5.0.0",
8283
"pyperf",
8384
"pyspark~=3.5.0",

sqlmesh/cli/project_init.py

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
from sqlmesh.integrations.dlt import generate_dlt_models_and_settings
88
from sqlmesh.utils.date import yesterday_ds
99
from sqlmesh.utils.errors import SQLMeshError
10-
from datetime import datetime
1110

1211
from sqlmesh.core.config.connection import (
1312
CONNECTION_CONFIG_TO_TYPE,
@@ -161,7 +160,7 @@ class ExampleObjects:
161160
python_macros: t.Dict[str, str]
162161

163162

164-
def _gen_example_objects(schema_name: str, start_year: int) -> ExampleObjects:
163+
def _gen_example_objects(schema_name: str, dialect: str) -> ExampleObjects:
165164
sql_models: t.Dict[str, str] = {}
166165
python_models: t.Dict[str, str] = {}
167166
seeds: t.Dict[str, str] = {}
@@ -193,9 +192,10 @@ def _gen_example_objects(schema_name: str, start_year: int) -> ExampleObjects:
193192
sql_models[incremental_model_name] = f"""MODEL (
194193
name {incremental_model_name},
195194
kind INCREMENTAL_BY_TIME_RANGE (
196-
time_column event_date
195+
time_column event_date,
196+
{"partition_by_time_column false" if dialect == "doris" else ""}
197197
),
198-
start '{start_year}-01-01',
198+
start '2020-01-01',
199199
cron '@daily',
200200
grain (id, event_date)
201201
);
@@ -224,14 +224,14 @@ def _gen_example_objects(schema_name: str, start_year: int) -> ExampleObjects:
224224
);
225225
"""
226226

227-
seeds["seed_data"] = f"""id,item_id,event_date
228-
1,2,{start_year}-01-01
229-
2,1,{start_year}-01-01
230-
3,3,{start_year}-01-03
231-
4,1,{start_year}-01-04
232-
5,1,{start_year}-01-05
233-
6,1,{start_year}-01-06
234-
7,1,{start_year}-01-07
227+
seeds["seed_data"] = """id,item_id,event_date
228+
1,2,2020-01-01
229+
2,1,2020-01-01
230+
3,3,2020-01-03
231+
4,1,2020-01-04
232+
5,1,2020-01-05
233+
6,1,2020-01-06
234+
7,1,2020-01-07
235235
"""
236236

237237
audits["assert_positive_order_ids"] = """AUDIT (
@@ -319,7 +319,7 @@ def init_example_project(
319319
settings = None
320320
start = None
321321
if engine_type and template == ProjectTemplate.DLT:
322-
project_dialect = dialect or DIALECT_TO_TYPE.get(engine_type)
322+
project_dialect = dialect if dialect else DIALECT_TO_TYPE.get(engine_type)
323323
if pipeline and project_dialect:
324324
dlt_models, settings, start = generate_dlt_models_and_settings(
325325
pipeline_name=pipeline, dialect=project_dialect, dlt_path=dlt_path
@@ -329,9 +329,6 @@ def init_example_project(
329329
"Please provide a DLT pipeline with the `--dlt-pipeline` flag to generate a SQLMesh project from DLT."
330330
)
331331

332-
if engine_type == "doris":
333-
start = datetime(datetime.now().year, 1, 1).strftime("%Y-%m-%d")
334-
335332
_create_config(config_path, engine_type, dialect, settings, start, template, cli_mode)
336333
if template == ProjectTemplate.DBT:
337334
return config_path
@@ -344,7 +341,10 @@ def init_example_project(
344341
)
345342
return config_path
346343

347-
example_objects = _gen_example_objects(schema_name=schema_name, start_year=datetime.now().year)
344+
example_objects = _gen_example_objects(
345+
schema_name=schema_name,
346+
dialect=dialect if dialect else DIALECT_TO_TYPE.get(engine_type, "duckdb"),
347+
)
348348

349349
if template != ProjectTemplate.EMPTY:
350350
_create_object_files(models_path, example_objects.sql_models, "sql")

sqlmesh/core/model/meta.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,10 @@
4848

4949
FunctionCall = t.Tuple[str, t.Dict[str, exp.Expression]]
5050

51+
import logging
52+
53+
logger = logging.getLogger(__name__)
54+
5155

5256
class ModelMeta(_Node):
5357
"""Metadata for models which can be defined in SQL."""
@@ -475,12 +479,9 @@ def physical_properties(self) -> t.Dict[str, exp.Expression]:
475479
and self.unique_key
476480
and self.dialect == "doris"
477481
):
478-
# Convert unique_key expressions to a format suitable for table_properties
479482
if len(self.unique_key) == 1:
480-
# Single column key
481483
properties["unique_key"] = self.unique_key[0]
482484
else:
483-
# Multiple column key - create a tuple expression
484485
properties["unique_key"] = exp.Tuple(expressions=self.unique_key)
485486

486487
return properties

tests/core/engine_adapter/integration/test_integration.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,10 @@
4444
DATA_TYPE = exp.DataType.Type
4545
VARCHAR_100 = exp.DataType.build("varchar(100)")
4646

47+
import logging
48+
49+
logger = logging.getLogger(__name__)
50+
4751

4852
class PlanResults(PydanticModel):
4953
plan: Plan
@@ -1920,6 +1924,7 @@ def _mutate_config(current_gateway_name: str, config: Config):
19201924
unique_key item_id,
19211925
batch_size 1
19221926
),
1927+
dialect {ctx.dialect},
19231928
{table_format}
19241929
start '2020-01-01',
19251930
end '2020-01-07',

0 commit comments

Comments
 (0)