Skip to content

Commit 0302d3c

Browse files
authored
Fix: empty schema and schema name parsing (#1550)
* fix: empty schema and schema name parsing * fix: empty schema and schema name parsing
1 parent 2519ee0 commit 0302d3c

5 files changed

Lines changed: 45 additions & 20 deletions

File tree

sqlmesh/core/engine_adapter/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -652,7 +652,7 @@ def drop_schema(
652652
"""Drop a schema from a name or qualified table name."""
653653
self.execute(
654654
exp.Drop(
655-
this=exp.to_identifier(schema_name.split(".")[0]),
655+
this=exp.table_(schema_name.split(".")[0]),
656656
kind="SCHEMA",
657657
exists=ignore_if_not_exists,
658658
cascade=cascade,

sqlmesh/core/engine_adapter/bigquery.py

Lines changed: 13 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -541,21 +541,24 @@ def _get_data_objects(
541541
"""
542542
Returns all the data objects that exist in the given schema and optionally catalog.
543543
"""
544+
from google.api_core.exceptions import NotFound
544545
from google.cloud.bigquery import DatasetReference
545546

546547
dataset_ref = DatasetReference(
547548
project=catalog_name or self.client.project, dataset_id=schema_name
548549
)
549-
all_tables = self._db_call(self.client.list_tables, dataset=dataset_ref)
550-
return [
551-
DataObject(
552-
catalog=table.project,
553-
schema=table.dataset_id,
554-
name=table.table_id,
555-
type=DataObjectType.from_str(table.table_type),
556-
)
557-
for table in all_tables
558-
]
550+
try:
551+
return [
552+
DataObject(
553+
catalog=table.project,
554+
schema=table.dataset_id,
555+
name=table.table_id,
556+
type=DataObjectType.from_str(table.table_type),
557+
)
558+
for table in self._db_call(self.client.list_tables, dataset=dataset_ref)
559+
]
560+
except NotFound:
561+
return []
559562

560563
@property
561564
def _query_data(self) -> t.Any:

sqlmesh/core/engine_adapter/snowflake.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,9 +89,16 @@ def _get_data_objects(
8989
"""
9090
Returns all the data objects that exist in the given schema and optionally catalog.
9191
"""
92+
from snowflake.connector.errors import ProgrammingError
93+
9294
target = nullsafe_join(".", catalog_name, schema_name)
9395
sql = f"SHOW TERSE OBJECTS IN {target}"
94-
df = self.fetchdf(sql, quote_identifiers=True)
96+
try:
97+
df = self.fetchdf(sql, quote_identifiers=True)
98+
except ProgrammingError as e:
99+
if "Object does not exist" in str(e):
100+
return []
101+
raise e
95102
if df.empty:
96103
return []
97104
return [

sqlmesh/core/engine_adapter/spark.py

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -271,11 +271,17 @@ def _get_data_objects(
271271
) -> t.List[DataObject]:
272272
target = nullsafe_join(".", catalog_name, schema_name)
273273
sql = f"SHOW TABLE EXTENDED IN {target} LIKE '*'"
274-
results = (
275-
self.fetch_pyspark_df(sql).collect()
276-
if self._use_spark_session
277-
else self.fetchdf(sql).to_dict("records")
278-
)
274+
try:
275+
results = (
276+
self.fetch_pyspark_df(sql).collect()
277+
if self._use_spark_session
278+
else self.fetchdf(sql).to_dict("records")
279+
)
280+
# Improvement: Figure out all the different exceptions we could get from executing a query either with or
281+
# without a Spark Session. In addition Databricks would need to be updated to handle it's own exceptions.
282+
# Therefore just doing except Exception for now.
283+
except Exception:
284+
return []
279285
return [
280286
DataObject(
281287
catalog=catalog_name,

tests/core/engine_adapter/test_integration.py

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -317,14 +317,23 @@ def test_materialized_view(ctx: TestContext):
317317

318318
def test_drop_schema(ctx: TestContext):
319319
ctx.columns_to_types = {"one": "int"}
320-
ctx.init()
321-
ctx.engine_adapter.create_schema("test_schema")
320+
schema = normalize_identifiers(
321+
exp.to_identifier(TEST_SCHEMA), dialect=ctx.engine_adapter.dialect
322+
).sql(dialect=ctx.engine_adapter.dialect)
323+
ctx.engine_adapter.drop_schema(schema, cascade=True)
324+
results = ctx.get_metadata_results()
325+
assert len(results.tables) == 0
326+
assert len(results.views) == 0
322327

328+
ctx.engine_adapter.create_schema(schema)
323329
view = ctx.table("test_view")
324330
view_query = exp.Select().select(exp.Literal.number(1).as_("one"))
325331
ctx.engine_adapter.create_view(view, view_query, ctx.columns_to_types)
332+
results = ctx.get_metadata_results(schema)
333+
assert len(results.tables) == 0
334+
assert len(results.views) == 1
326335

327-
ctx.engine_adapter.drop_schema("test_schema", cascade=True)
336+
ctx.engine_adapter.drop_schema(schema, cascade=True)
328337
results = ctx.get_metadata_results()
329338
assert len(results.tables) == 0
330339
assert len(results.views) == 0

0 commit comments

Comments
 (0)