Skip to content

Commit 5fafd8d

Browse files
feat: add dbt-fabricspark adapter support (ELE-5295) (#964)
* feat: add dbt-fabricspark adapter support (ELE-5295) Add fabricspark__ prefixed macros that delegate to spark__ implementations. dbt-fabricspark speaks Spark SQL but doesn't declare dependencies=["spark"] in its AdapterPlugin, so dispatch falls through to default__ instead of spark__. Macros added across 22 files covering: - Cross-DB utils (timestamps, dateadd/diff, to_char, safe_cast, etc.) - Data types (string, bool, timestamp, type lists, normalization) - Table operations (create, delete/insert, temp relations, etc.) - System utils (buckets CTE) - Metadata collection (information schema) - Test utils (clean test tables) Also adds a defensive fallback in get_elementary_relation.sql using api.Relation.create() when adapter.get_relation() returns None, working around a bug in dbt-fabricspark's list_relations_without_caching. Co-Authored-By: Itamar Hartstein <haritamar@gmail.com> * revert: remove get_elementary_relation.sql defensive fallback Per reviewer feedback, removing the api.Relation.create() fallback since callers rely on None returns to detect missing relations. Co-Authored-By: Itamar Hartstein <haritamar@gmail.com> * feat: dedicated fabricspark__generate_elementary_profile_args with Fabric Livy API params Co-Authored-By: Itamar Hartstein <haritamar@gmail.com> * fix: map lakehouse param to elementary_database instead of elementary_schema Co-Authored-By: Itamar Hartstein <haritamar@gmail.com> --------- Co-authored-by: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com> Co-authored-by: Itamar Hartstein <haritamar@gmail.com>
1 parent a025161 commit 5fafd8d

21 files changed

+149
-0
lines changed

macros/edr/metadata_collection/get_columns_from_information_schema.sql

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -124,6 +124,18 @@
124124
{{ elementary.get_empty_columns_from_information_schema_table() }}
125125
{% endmacro %}
126126

127+
{% macro fabricspark__get_columns_from_information_schema(
128+
database_name, schema_name, table_name=none
129+
) %}
130+
{{
131+
return(
132+
elementary.spark__get_columns_from_information_schema(
133+
database_name, schema_name, table_name
134+
)
135+
)
136+
}}
137+
{% endmacro %}
138+
127139
{% macro get_empty_columns_from_information_schema_table() %}
128140
{{
129141
elementary.empty_table(

macros/edr/system/system_utils/buckets_cte.sql

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -84,6 +84,14 @@
8484
{{ return(complete_buckets_cte) }}
8585
{% endmacro %}
8686

87+
{% macro fabricspark__complete_buckets_cte(
88+
time_bucket, bucket_end_expr, min_bucket_start_expr, max_bucket_end_expr
89+
) %}
90+
{{ return(elementary.spark__complete_buckets_cte(
91+
time_bucket, bucket_end_expr, min_bucket_start_expr, max_bucket_end_expr
92+
)) }}
93+
{% endmacro %}
94+
8795
{% macro snowflake__complete_buckets_cte(
8896
time_bucket,
8997
bucket_end_expr,

macros/edr/tests/test_utils/clean_elementary_test_tables.sql

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,16 @@
6565
) %}
6666
{% endmacro %}
6767

68+
{% macro fabricspark__get_clean_elementary_test_tables_queries(test_table_relations) %}
69+
{{
70+
return(
71+
elementary.spark__get_clean_elementary_test_tables_queries(
72+
test_table_relations
73+
)
74+
)
75+
}}
76+
{% endmacro %}
77+
6878
{% macro clickhouse__get_clean_elementary_test_tables_queries(test_table_relations) %}
6979
{# Self-hosted clustered ClickHouse installations require tables to be dropped on all cluster nodes explicitly #}
7080
{% set queries = [] %}

macros/utils/cross_db_utils/current_timestamp.sql

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,14 @@
9292
cast(sysutcdatetime() as datetime2(6))
9393
{%- endmacro -%}
9494

95+
{% macro fabricspark__edr_current_timestamp() %}
96+
{{ return(elementary.spark__edr_current_timestamp()) }}
97+
{% endmacro %}
98+
99+
{% macro fabricspark__edr_current_timestamp_in_utc() %}
100+
{{ return(elementary.spark__edr_current_timestamp_in_utc()) }}
101+
{% endmacro %}
102+
95103
{% macro dremio__edr_current_timestamp() -%} current_timestamp() {%- endmacro -%}
96104

97105
{% macro dremio__edr_current_timestamp_in_utc() -%}

macros/utils/cross_db_utils/datediff.sql

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -240,6 +240,10 @@
240240

241241
{% endmacro %}
242242

243+
{% macro fabricspark__edr_datediff(first_date, second_date, datepart) %}
244+
{{ return(elementary.spark__edr_datediff(first_date, second_date, datepart)) }}
245+
{% endmacro %}
246+
243247
{% macro athena__edr_datediff(first_date, second_date, date_part) %}
244248
{% set macro = dbt.datediff or dbt_utils.datediff %}
245249
{% if not macro %}

macros/utils/cross_db_utils/generate_elementary_profile_args.sql

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -311,6 +311,26 @@
311311
) %}
312312
{% endmacro %}
313313

314+
{% macro fabricspark__generate_elementary_profile_args(
315+
method, elementary_database, elementary_schema
316+
) %}
317+
{% set parameters = [
318+
_parameter("type", "fabricspark"),
319+
_parameter("method", "livy"),
320+
_parameter("authentication", "CLI"),
321+
_parameter(
322+
"endpoint",
323+
target.endpoint | default("https://api.fabric.microsoft.com/v1"),
324+
),
325+
_parameter("workspaceid", "<WORKSPACE_GUID>"),
326+
_parameter("lakehouseid", "<LAKEHOUSE_GUID>"),
327+
_parameter("lakehouse", elementary_database),
328+
_parameter("schema", elementary_schema),
329+
_parameter("threads", target.threads),
330+
] %}
331+
{% do return(parameters) %}
332+
{% endmacro %}
333+
314334
{% macro default__generate_elementary_profile_args(
315335
method, elementary_database, elementary_schema
316336
) %}

macros/utils/cross_db_utils/incremental_strategy.sql

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,10 @@
2424
{% do return("merge") %}
2525
{% endmacro %}
2626

27+
{%- macro fabricspark__get_default_incremental_strategy() %}
28+
{{ return(elementary.spark__get_default_incremental_strategy()) }}
29+
{% endmacro %}
30+
2731
{% macro default__get_default_incremental_strategy() %}
2832
{% do return(none) %}
2933
{% endmacro %}

macros/utils/cross_db_utils/safe_cast.sql

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,3 +13,7 @@
1313
{% macro spark__edr_safe_cast(field, type) %}
1414
try_cast({{ field }} as {{ type }})
1515
{% endmacro %}
16+
17+
{% macro fabricspark__edr_safe_cast(field, type) %}
18+
{{ return(elementary.spark__edr_safe_cast(field, type)) }}
19+
{% endmacro %}

macros/utils/cross_db_utils/target_database.sql

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,3 +28,7 @@
2828
{% macro sqlserver__target_database() %} {% do return(target.database) %} {% endmacro %}
2929

3030
{% macro vertica__target_database() %} {% do return(target.database) %} {% endmacro %}
31+
32+
{% macro fabricspark__target_database() %}
33+
{{ return(elementary.spark__target_database()) }}
34+
{% endmacro %}

macros/utils/cross_db_utils/to_char.sql

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,10 @@
2424
{%- endif %}
2525
{% endmacro %}
2626

27+
{% macro fabricspark__edr_to_char(column, format) %}
28+
{{ return(elementary.spark__edr_to_char(column, format)) }}
29+
{% endmacro %}
30+
2731
{% macro fabric__edr_to_char(column, format) %}
2832
convert(varchar, {{ column }}
2933
{%- if format %}, {{ format }})

0 commit comments

Comments
 (0)