This repository was archived by the owner on Apr 1, 2026. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 67
Expand file tree
/
Copy pathfunction.py
More file actions
340 lines (274 loc) · 11.1 KB
/
function.py
File metadata and controls
340 lines (274 loc) · 11.1 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
import logging
from typing import Callable, cast, get_origin, Optional, TYPE_CHECKING
if TYPE_CHECKING:
from bigframes.session import Session
import bigframes.series
import google.api_core.exceptions
from google.cloud import bigquery
import bigframes.formatting_helpers as bf_formatting
from bigframes.functions import _function_session as bff_session
from bigframes.functions import _utils, function_typing, udf_def
logger = logging.getLogger(__name__)
class UnsupportedTypeError(ValueError):
def __init__(self, type_, supported_types):
self.type = type_
self.supported_types = supported_types
class DatasetMissingError(ValueError):
pass
def get_routine_reference(
routine_ref_str: str, bigquery_client: bigquery.Client, session: Optional[Session]
) -> bigquery.RoutineReference:
try:
# Handle cases "<project_id>.<dataset_name>.<routine_name>" and
# "<dataset_name>.<routine_name>".
return bigquery.RoutineReference.from_string(
routine_ref_str,
default_project=bigquery_client.project,
)
except ValueError:
# Handle case of "<routine_name>".
if not session:
raise DatasetMissingError
dataset_ref = bigquery.DatasetReference(
bigquery_client.project, session._anonymous_dataset.dataset_id
)
return dataset_ref.routine(routine_ref_str)
def remote_function(*args, **kwargs):
function_session = bff_session.FunctionSession()
return function_session.remote_function(*args, **kwargs)
remote_function.__doc__ = bff_session.FunctionSession.remote_function.__doc__
def udf(*args, **kwargs):
function_session = bff_session.FunctionSession()
return function_session.udf(*args, **kwargs)
udf.__doc__ = bff_session.FunctionSession.udf.__doc__
def _try_import_routine(
routine: bigquery.Routine, session: bigframes.Session
) -> BigqueryCallableRoutine:
udf_def = _routine_as_udf_def(routine)
override_type = _get_output_type_override(routine)
is_remote = (
hasattr(routine, "remote_function_options") and routine.remote_function_options
)
if override_type is not None:
return BigqueryCallableRoutine(
udf_def,
session,
post_routine=_utils.build_unnest_post_routine(override_type),
)
return BigqueryCallableRoutine(udf_def, session, is_managed=not is_remote)
def _try_import_row_routine(
routine: bigquery.Routine, session: bigframes.Session
) -> BigqueryCallableRowRoutine:
udf_def = _routine_as_udf_def(routine)
override_type = _get_output_type_override(routine)
is_remote = (
hasattr(routine, "remote_function_options") and routine.remote_function_options
)
if override_type is not None:
return BigqueryCallableRowRoutine(
udf_def,
session,
post_routine=_utils.build_unnest_post_routine(override_type),
)
return BigqueryCallableRowRoutine(udf_def, session, is_managed=not is_remote)
def _routine_as_udf_def(routine: bigquery.Routine) -> udf_def.BigqueryUdf:
try:
return udf_def.BigqueryUdf.from_routine(routine)
except udf_def.ReturnTypeMissingError:
raise bf_formatting.create_exception_with_feedback_link(
ValueError, "Function return type must be specified."
)
except function_typing.UnsupportedTypeError as e:
raise bf_formatting.create_exception_with_feedback_link(
ValueError,
f"Type {e.type} not supported, supported types are {e.supported_types}.",
)
def _get_output_type_override(routine: bigquery.Routine) -> Optional[type[list]]:
if routine.description is not None and isinstance(routine.description, str):
if python_output_type := _utils.get_python_output_type_from_bigframes_metadata(
routine.description
):
bq_return_type = cast(bigquery.StandardSqlDataType, routine.return_type)
if bq_return_type is None or bq_return_type.type_kind != "STRING":
raise bf_formatting.create_exception_with_feedback_link(
TypeError,
"An explicit output_type should be provided only for a BigQuery function with STRING output.",
)
if get_origin(python_output_type) is list:
return python_output_type
else:
raise bf_formatting.create_exception_with_feedback_link(
TypeError,
"Currently only list of "
"a type is supported as python output type.",
)
return None
# TODO(b/399894805): Support managed function.
def read_gbq_function(
function_name: str,
*,
session: Session,
is_row_processor: bool = False,
):
"""
Read an existing BigQuery function and prepare it for use in future queries.
"""
bigquery_client = session.bqclient
try:
routine_ref = get_routine_reference(function_name, bigquery_client, session)
except DatasetMissingError:
raise bf_formatting.create_exception_with_feedback_link(
ValueError,
"Project and dataset must be provided, either directly or via session.",
)
# Find the routine and get its arguments.
try:
routine = bigquery_client.get_routine(routine_ref)
except google.api_core.exceptions.NotFound:
raise bf_formatting.create_exception_with_feedback_link(
ValueError, f"Unknown function '{routine_ref}'."
)
if is_row_processor:
return _try_import_row_routine(routine, session)
else:
return _try_import_routine(routine, session)
class BigqueryCallableRoutine:
"""
A reference to a routine in the context of a session.
Can be used both directly as a callable, or as an input to dataframe ops that take a callable.
"""
def __init__(
self,
udf_def: udf_def.BigqueryUdf,
session: bigframes.Session,
*,
local_func: Optional[Callable] = None,
cloud_function_ref: Optional[str] = None,
post_routine: Optional[
Callable[[bigframes.series.Series], bigframes.series.Series]
] = None,
is_managed: bool = False,
):
self._udf_def = udf_def
self._session = session
self._post_routine = post_routine
self._local_fun = local_func
self._cloud_function = cloud_function_ref
self._is_managed = is_managed
def __call__(self, *args, **kwargs):
if self._local_fun:
return self._local_fun(*args, **kwargs)
# avoid circular imports
import bigframes.core.sql as bf_sql
import bigframes.session._io.bigquery as bf_io_bigquery
args_string = ", ".join(map(bf_sql.simple_literal, args))
sql = f"SELECT `{str(self._udf_def.routine_ref)}`({args_string})"
iter, job = bf_io_bigquery.start_query_with_client(self._session.bqclient, sql=sql, query_with_job=True, job_config=bigquery.QueryJobConfig()) # type: ignore
return list(iter.to_arrow().to_pydict().values())[0][0]
@property
def bigframes_bigquery_function(self) -> str:
return str(self._udf_def.routine_ref)
@property
def bigframes_remote_function(self):
return None if self._is_managed else str(self._udf_def.routine_ref)
@property
def is_row_processor(self) -> bool:
return False
@property
def udf_def(self) -> udf_def.BigqueryUdf:
return self._udf_def
@property
def bigframes_cloud_function(self) -> Optional[str]:
return self._cloud_function
@property
def input_dtypes(self):
return self.udf_def.signature.bf_input_types
@property
def output_dtype(self):
return self.udf_def.signature.bf_output_type
@property
def bigframes_bigquery_function_output_dtype(self):
return self.output_dtype
def _post_process_series(
self, series: bigframes.series.Series
) -> bigframes.series.Series:
if self._post_routine is not None:
return self._post_routine(series)
return series
class BigqueryCallableRowRoutine:
"""
A reference to a routine in the context of a session.
Can be used both directly as a callable, or as an input to dataframe ops that take a callable.
"""
def __init__(
self,
udf_def: udf_def.BigqueryUdf,
session: bigframes.Session,
*,
local_func: Optional[Callable] = None,
cloud_function_ref: Optional[str] = None,
post_routine: Optional[
Callable[[bigframes.series.Series], bigframes.series.Series]
] = None,
is_managed: bool = False,
):
self._udf_def = udf_def
self._session = session
self._post_routine = post_routine
self._local_fun = local_func
self._cloud_function = cloud_function_ref
self._is_managed = is_managed
def __call__(self, *args, **kwargs):
if self._local_fun:
return self._local_fun(*args, **kwargs)
# avoid circular imports
import bigframes.core.sql as bf_sql
import bigframes.session._io.bigquery as bf_io_bigquery
args_string = ", ".join(map(bf_sql.simple_literal, args))
sql = f"SELECT `{str(self._udf_def.routine_ref)}`({args_string})"
iter, job = bf_io_bigquery.start_query_with_client(self._session.bqclient, sql=sql, query_with_job=True, job_config=bigquery.QueryJobConfig()) # type: ignore
return list(iter.to_arrow().to_pydict().values())[0][0]
@property
def bigframes_bigquery_function(self) -> str:
return str(self._udf_def.routine_ref)
@property
def bigframes_remote_function(self):
return None if self._is_managed else str(self._udf_def.routine_ref)
@property
def is_row_processor(self) -> bool:
return True
@property
def udf_def(self) -> udf_def.BigqueryUdf:
return self._udf_def
@property
def bigframes_cloud_function(self) -> Optional[str]:
return self._cloud_function
@property
def input_dtypes(self):
return self.udf_def.signature.bf_input_types
@property
def output_dtype(self):
return self.udf_def.signature.bf_output_type
@property
def bigframes_bigquery_function_output_dtype(self):
return self.output_dtype
def _post_process_series(
self, series: bigframes.series.Series
) -> bigframes.series.Series:
if self._post_routine is not None:
return self._post_routine(series)
return series