Skip to content

Commit d0ff584

Browse files
committed
Merge remote-tracking branch 'upstream/main' into tswast-pandas-gbq
2 parents 6258aaf + 485b03a commit d0ff584

File tree

9 files changed

+237
-21
lines changed

9 files changed

+237
-21
lines changed

.github/CODEOWNERS

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,4 +12,10 @@
1212
* @googleapis/cloud-sdk-python-team @googleapis/cloud-sdk-librarian-team
1313

1414
/packages/bigframes/ @googleapis/cloud-sdk-python-team @googleapis/cloud-sdk-librarian-team @googleapis/bigquery-dataframe-team
15+
/packages/bigquery-magics/ @googleapis/cloud-sdk-python-team @googleapis/cloud-sdk-librarian-team @googleapis/bigquery-dataframe-team
16+
/packages/django-google-spanner/ @googleapis/cloud-sdk-python-team @googleapis/cloud-sdk-librarian-team @googleapis/spanner-team
1517
/packages/google-auth/ @googleapis/cloud-sdk-python-team @googleapis/cloud-sdk-librarian-team @googleapis/cloud-sdk-auth-team @googleapis/aion-team
18+
/packages/google-cloud-bigquery*/ @googleapis/cloud-sdk-python-team @googleapis/cloud-sdk-librarian-team @googleapis/bigquery-dataframe-team
19+
/packages/google-cloud-spanner/ @googleapis/cloud-sdk-python-team @googleapis/cloud-sdk-librarian-team @googleapis/spanner-team
20+
/packages/pandas-gbq/ @googleapis/cloud-sdk-python-team @googleapis/cloud-sdk-librarian-team @googleapis/bigquery-dataframe-team
21+
/packages/sqlalchemy-bigquery/ @googleapis/cloud-sdk-python-team @googleapis/cloud-sdk-librarian-team @googleapis/bigquery-dataframe-team

packages/bigquery-magics/bigquery_magics/bigquery.py

Lines changed: 15 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -78,8 +78,10 @@
7878
the variable name (ex. ``$my_dict_var``). See ``In[6]`` and ``In[7]``
7979
in the Examples section below.
8080
* ``--engine <engine>`` (Optional[line argument]):
81-
Set the execution engine, either 'pandas' (default) or 'bigframes'
82-
(experimental).
81+
[Deprecated] Set the execution engine, either 'pandas' (default) or
82+
'bigframes'.
83+
Please use ``%load_ext bigframes`` and the ``%%bqsql`` magic instead.
84+
See: https://dataframes.bigquery.dev/notebooks/getting_started/magics.html
8385
* ``--pyformat`` (Optional[line argument]):
8486
Warning! Do not use with user-provided values.
8587
This doesn't escape values. Use --params instead for proper SQL escaping.
@@ -397,8 +399,10 @@ def _create_dataset_if_necessary(client, dataset_id):
397399
type=str,
398400
default=None,
399401
help=(
400-
"Set the execution engine, either 'pandas' or 'bigframes'."
401-
"Defaults to engine set in the query setting in console."
402+
"[Deprecated] Set the execution engine, either 'pandas' or 'bigframes'. "
403+
"Defaults to engine set in the query setting in console. "
404+
"Please use %%load_ext bigframes and the %%%%bqsql magic instead. "
405+
"See: https://dataframes.bigquery.dev/notebooks/getting_started/magics.html"
402406
),
403407
)
404408
@magic_arguments.argument(
@@ -510,6 +514,13 @@ def _split_args_line(line: str) -> Tuple[str, str]:
510514

511515

512516
def _query_with_bigframes(query: str, params: List[Any], args: Any):
517+
warnings.warn(
518+
"The bigframes engine is deprecated. Please use %load_ext bigframes "
519+
"and the %%bqsql magic instead. "
520+
"See: https://dataframes.bigquery.dev/notebooks/getting_started/magics.html",
521+
FutureWarning,
522+
stacklevel=2,
523+
)
513524
if args.dry_run:
514525
raise ValueError("Dry run is not supported by bigframes engine.")
515526

packages/bigquery-magics/bigquery_magics/config.py

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414

1515
from dataclasses import dataclass
1616
from typing import Optional
17+
import warnings
1718

1819
import google.api_core.client_options as client_options
1920
import google.cloud.bigquery as bigquery
@@ -183,10 +184,15 @@ def project(self, value):
183184

184185
@property
185186
def engine(self) -> str:
186-
"""Engine to run the query. Could either be "pandas" or "bigframes".
187+
"""[Deprecated] Engine to run the query. Could either be "pandas" or
188+
"bigframes".
187189
188190
If using "pandas", the query result will be stored in a Pandas dataframe.
189-
If using "bigframes", the query result will be stored in a bigframes dataframe instead.
191+
If using "bigframes", the query result will be stored in a bigframes
192+
dataframe instead.
193+
194+
Please use ``%load_ext bigframes`` and the ``%%bqsql`` magic instead.
195+
See: https://dataframes.bigquery.dev/notebooks/getting_started/magics.html
190196
191197
Example:
192198
Manully setting the content engine:
@@ -200,6 +206,14 @@ def engine(self) -> str:
200206
def engine(self, value):
201207
if value != "pandas" and value != "bigframes":
202208
raise ValueError("engine must be either 'pandas' or 'bigframes'")
209+
if value == "bigframes":
210+
warnings.warn(
211+
"The bigframes engine is deprecated. Please use %load_ext bigframes "
212+
"and the %%bqsql magic instead. "
213+
"See: https://dataframes.bigquery.dev/notebooks/getting_started/magics.html",
214+
FutureWarning,
215+
stacklevel=2,
216+
)
203217
self._engine = value
204218

205219

Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
# Copyright 2026 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
import pytest
16+
17+
from bigquery_magics import bigquery as magics
18+
import bigquery_magics.config
19+
20+
21+
@pytest.fixture(autouse=True)
22+
def mock_bq_client_and_credentials(mock_credentials):
23+
from unittest import mock
24+
25+
with mock.patch("google.cloud.bigquery.Client", autospec=True):
26+
with mock.patch("bigquery_magics.core.create_bq_client", autospec=True):
27+
yield
28+
29+
30+
def test_config_engine_setter_warning():
31+
context = bigquery_magics.config.Context()
32+
with pytest.warns(FutureWarning, match="The bigframes engine is deprecated"):
33+
context.engine = "bigframes"
34+
35+
36+
def test_query_with_bigframes_warning(mock_ipython):
37+
# Mocking bigframes.pandas since it might not be installed
38+
from unittest import mock
39+
40+
with mock.patch("bigquery_magics.bigquery.bpd") as mock_bpd:
41+
mock_bpd.read_gbq_query.return_value = mock.MagicMock()
42+
43+
args = mock.MagicMock()
44+
args.engine = "bigframes"
45+
args.dry_run = False
46+
args.max_results = None
47+
args.destination_var = None
48+
args.destination_table = None
49+
50+
with pytest.warns(FutureWarning, match="The bigframes engine is deprecated"):
51+
magics._query_with_bigframes("SELECT 1", [], args)
52+
53+
54+
def test_cell_magic_engine_bigframes_warning(mock_ipython):
55+
from unittest import mock
56+
57+
from IPython.testing.globalipapp import get_ipython
58+
59+
ip = get_ipython()
60+
if ip is None:
61+
from IPython.testing.globalipapp import start_ipython
62+
63+
ip = start_ipython()
64+
65+
ip.extension_manager.load_extension("bigquery_magics")
66+
67+
# Mock the actual execution to avoid needing real credentials/data
68+
with mock.patch("bigquery_magics.bigquery.bpd") as mock_bpd:
69+
mock_bpd.read_gbq_query.return_value = mock.MagicMock()
70+
with pytest.warns(FutureWarning, match="The bigframes engine is deprecated"):
71+
ip.run_cell_magic("bigquery", "--engine bigframes", "SELECT 1")
72+
73+
74+
@pytest.fixture
75+
def mock_ipython():
76+
from unittest import mock
77+
78+
with mock.patch("bigquery_magics.bigquery.get_ipython") as mock_get_ipython:
79+
yield mock_get_ipython

packages/google-api-core/google/api_core/_python_version_support.py

Lines changed: 15 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616

1717
import datetime
1818
import enum
19+
import functools
1920
import logging
2021
import warnings
2122
import sys
@@ -152,15 +153,16 @@ def _get_pypi_package_name(module_name): # pragma: NO COVER
152153
else:
153154
from importlib import metadata
154155

156+
@functools.cache
157+
def _cached_packages_distributions():
158+
return metadata.packages_distributions()
159+
155160
def _get_pypi_package_name(module_name):
156161
"""Determine the PyPI package name for a given module name."""
157162
try:
158-
# Get the mapping of modules to distributions
159-
module_to_distributions = metadata.packages_distributions()
163+
module_to_distributions = _cached_packages_distributions()
160164

161-
# Check if the module is found in the mapping
162165
if module_name in module_to_distributions: # pragma: NO COVER
163-
# The value is a list of distribution names, take the first one
164166
return module_to_distributions[module_name][0]
165167
except Exception as e: # pragma: NO COVER
166168
_LOGGER.info(
@@ -195,7 +197,6 @@ def check_python_version(
195197
The support status of the current Python version.
196198
"""
197199
today = today or datetime.date.today()
198-
package_label, _ = _get_distribution_and_import_packages(package)
199200

200201
python_version = sys.version_info
201202
version_tuple = (python_version.major, python_version.minor)
@@ -221,7 +222,14 @@ def min_python(date: datetime.date) -> str:
221222
return f"{version[0]}.{version[1]}"
222223
return "at a currently supported version [https://devguide.python.org/versions]"
223224

225+
# Resolve the pretty package label lazily so we avoid any work on
226+
# the happy path (supported Python version, no warning needed).
227+
def get_package_label():
228+
label, _ = _get_distribution_and_import_packages(package)
229+
return label
230+
224231
if gapic_end < today:
232+
package_label = get_package_label()
225233
message = _flatten_message(
226234
f"""
227235
You are using a non-supported Python version ({py_version_str}).
@@ -236,6 +244,7 @@ def min_python(date: datetime.date) -> str:
236244

237245
eol_date = version_info.python_eol + EOL_GRACE_PERIOD
238246
if eol_date <= today <= gapic_end:
247+
package_label = get_package_label()
239248
message = _flatten_message(
240249
f"""
241250
You are using a Python version ({py_version_str})
@@ -250,6 +259,7 @@ def min_python(date: datetime.date) -> str:
250259
return PythonVersionStatus.PYTHON_VERSION_EOL
251260

252261
if gapic_deprecation <= today <= gapic_end:
262+
package_label = get_package_label()
253263
message = _flatten_message(
254264
f"""
255265
You are using a Python version ({py_version_str}) which Google will

packages/google-cloud-spanner/google/cloud/spanner_v1/_helpers.py

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
from google.api_core.exceptions import Aborted
2929
from google.cloud._helpers import _date_from_iso8601_date
3030
from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper
31-
from google.protobuf.message import Message
31+
from google.protobuf.message import DecodeError, Message
3232
from google.protobuf.struct_pb2 import ListValue, Value
3333
from google.rpc.error_details_pb2 import RetryInfo
3434

@@ -76,7 +76,7 @@
7676

7777
GOOGLE_CLOUD_REGION_GLOBAL = "global"
7878

79-
log = logging.getLogger(__name__)
79+
_LOGGER = logging.getLogger(__name__)
8080

8181
_cloud_region: str = None
8282

@@ -122,7 +122,7 @@ def _get_cloud_region() -> str:
122122
else:
123123
_cloud_region = GOOGLE_CLOUD_REGION_GLOBAL
124124
except Exception as e:
125-
log.warning(
125+
_LOGGER.warning(
126126
"Failed to detect GCP resource location for Spanner metrics, defaulting to 'global'. Error: %s",
127127
e,
128128
)
@@ -603,8 +603,14 @@ def _parse_proto(value_pb, column_info, field_name):
603603
default_proto_message = column_info.get(field_name)
604604
if isinstance(default_proto_message, Message):
605605
proto_message = type(default_proto_message)()
606-
proto_message.ParseFromString(bytes_value)
607-
return proto_message
606+
try:
607+
proto_message.ParseFromString(bytes_value)
608+
return proto_message
609+
except (DecodeError, RecursionError):
610+
_LOGGER.warning(
611+
"Field could not be parsed as Proto due to excessive nesting/corruption. Returning raw bytes."
612+
)
613+
return bytes_value
608614
return bytes_value
609615

610616

packages/google-cloud-spanner/tests/unit/test__helpers.py

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -771,6 +771,59 @@ def test_w_proto_message(self):
771771
self._callFUT(value_pb, field_type, field_name, column_info), VALUE
772772
)
773773

774+
def test_w_proto_message_decode_error(self):
775+
import base64
776+
from unittest import mock
777+
778+
from google.protobuf.message import DecodeError
779+
from google.protobuf.struct_pb2 import Value
780+
781+
from google.cloud.spanner_v1 import Type, TypeCode
782+
783+
from .testdata import singer_pb2
784+
785+
VALUE = singer_pb2.SingerInfo(singer_id=1, nationality="Canadian")
786+
field_type = Type(code=TypeCode.PROTO)
787+
field_name = "proto_message_column"
788+
raw_bytes = VALUE.SerializeToString()
789+
value_pb = Value(string_value=base64.b64encode(raw_bytes).decode("utf-8"))
790+
column_info = {"proto_message_column": singer_pb2.SingerInfo()}
791+
792+
# Mock ParseFromString to raise DecodeError
793+
with mock.patch.object(
794+
singer_pb2.SingerInfo,
795+
"ParseFromString",
796+
side_effect=DecodeError("Mock Decode Error"),
797+
):
798+
result = self._callFUT(value_pb, field_type, field_name, column_info)
799+
# Should return raw bytes
800+
self.assertEqual(result, raw_bytes)
801+
802+
def test_w_proto_message_recursion_error(self):
803+
import base64
804+
from unittest import mock
805+
806+
from google.protobuf.struct_pb2 import Value
807+
808+
from google.cloud.spanner_v1 import Type, TypeCode
809+
810+
from .testdata import singer_pb2
811+
812+
VALUE = singer_pb2.SingerInfo(singer_id=1, nationality="Canadian")
813+
field_type = Type(code=TypeCode.PROTO)
814+
field_name = "proto_message_column"
815+
raw_bytes = VALUE.SerializeToString()
816+
value_pb = Value(string_value=base64.b64encode(raw_bytes).decode("utf-8"))
817+
column_info = {"proto_message_column": singer_pb2.SingerInfo()}
818+
819+
with mock.patch.object(
820+
singer_pb2.SingerInfo,
821+
"ParseFromString",
822+
side_effect=RecursionError("Mock Recursion Error"),
823+
):
824+
result = self._callFUT(value_pb, field_type, field_name, column_info)
825+
self.assertEqual(result, raw_bytes)
826+
774827
def test_w_proto_enum(self):
775828
from google.protobuf.struct_pb2 import Value
776829

packages/google-cloud-storage/cloudbuild/run_zonal_tests.sh

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,12 @@ echo '--- Installing git and cloning repository on VM ---'
44
sudo apt-get update && sudo apt-get install -y git python3-pip python3-venv
55

66
# Clone the repository and checkout the specific commit from the build trigger.
7-
git clone https://github.com/googleapis/python-storage.git
8-
cd python-storage
7+
git clone --no-checkout --depth 1 --sparse --filter=blob:none https://github.com/googleapis/google-cloud-python.git
8+
cd google-cloud-python
9+
git sparse-checkout set packages/google-cloud-storage
910
git fetch origin "refs/pull/${_PR_NUMBER}/head"
1011
git checkout ${COMMIT_SHA}
12+
cd packages/google-cloud-storage
1113

1214

1315
echo '--- Installing Python and dependencies on VM ---'
@@ -27,4 +29,3 @@ export GCE_METADATA_MTLS_MODE=None
2729
CURRENT_ULIMIT=$(ulimit -n)
2830
echo '--- Running Zonal tests on VM with ulimit set to ---' $CURRENT_ULIMIT
2931
pytest -vv -s --log-format='%(asctime)s %(levelname)s %(message)s' --log-date-format='%H:%M:%S' tests/system/test_zonal.py
30-
pytest -vv -s --log-format='%(asctime)s %(levelname)s %(message)s' --log-date-format='%H:%M:%S' samples/snippets/zonal_buckets/zonal_snippets_test.py

0 commit comments

Comments
 (0)