Skip to content

Commit c4bd938

Browse files
chore(bigtable): use ruff for generated sync files (#16713)
Bigtable uses CrossSync to auto-generate sync classes from async ones. Previously, bigtable used black for formatting, so generated files were formatted in the same way. This PR changes to ruff, so generated files match the rest of the repo
1 parent 025d152 commit c4bd938

21 files changed

Lines changed: 151 additions & 77 deletions

packages/google-cloud-bigtable/.cross_sync/generate.py

Lines changed: 45 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -51,18 +51,55 @@ def render(self, with_formatter=True, save_to_disk: bool = True) -> str:
5151
Render the file to a string, and optionally save to disk
5252
5353
Args:
54-
with_formatter: whether to run the output through black before returning
54+
with_formatter: whether to run the output through ruff before returning
5555
save_to_disk: whether to write the output to the file path
5656
"""
5757
full_str = self.header + ast.unparse(self.tree)
5858
if with_formatter:
59-
import black # type: ignore
60-
import autoflake # type: ignore
61-
62-
full_str = black.format_str(
63-
autoflake.fix_code(full_str, remove_all_unused_imports=True),
64-
mode=black.FileMode(),
65-
)
59+
import subprocess
60+
import sys
61+
62+
try:
63+
# Run ruff check
64+
result = subprocess.run(
65+
[
66+
sys.executable,
67+
"-m",
68+
"ruff",
69+
"check",
70+
"--select",
71+
"I,F401",
72+
"--fix",
73+
"--line-length=88",
74+
"-",
75+
],
76+
input=full_str,
77+
text=True,
78+
capture_output=True,
79+
check=True,
80+
)
81+
full_str = result.stdout
82+
83+
# Run ruff format
84+
result = subprocess.run(
85+
[
86+
sys.executable,
87+
"-m",
88+
"ruff",
89+
"format",
90+
"--line-length=88",
91+
"--config",
92+
"format.skip-magic-trailing-comma=true",
93+
"-",
94+
],
95+
input=full_str,
96+
text=True,
97+
capture_output=True,
98+
check=True,
99+
)
100+
full_str = result.stdout
101+
except subprocess.CalledProcessError as e:
102+
print(f"ruff formatting failed: {e.stderr}")
66103
if save_to_disk:
67104
import os
68105
os.makedirs(os.path.dirname(self.output_path), exist_ok=True)

packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_mutate_rows.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,12 @@
1616
# This file is automatically generated by CrossSync. Do not edit manually.
1717

1818
from __future__ import annotations
19+
1920
from typing import TYPE_CHECKING, Sequence
21+
2022
from google.api_core import exceptions as core_exceptions
2123
from google.api_core import retry as retries
24+
2225
import google.cloud.bigtable.data.exceptions as bt_exceptions
2326
import google.cloud.bigtable_v2.types.bigtable as types_pb
2427
from google.cloud.bigtable.data._cross_sync import CrossSync
@@ -32,10 +35,10 @@
3235
)
3336

3437
if TYPE_CHECKING:
35-
from google.cloud.bigtable.data.mutations import RowMutationEntry
3638
from google.cloud.bigtable.data._sync_autogen.client import (
3739
_DataApiTarget as TargetType,
3840
)
41+
from google.cloud.bigtable.data.mutations import RowMutationEntry
3942
from google.cloud.bigtable_v2.services.bigtable.client import (
4043
BigtableClient as GapicClientType,
4144
)

packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_read_rows.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,12 @@
1717
# This file is automatically generated by CrossSync. Do not edit manually.
1818

1919
from __future__ import annotations
20+
2021
from typing import TYPE_CHECKING, Sequence
22+
2123
from google.api_core import retry as retries
2224
from google.api_core.retry import exponential_sleep_generator
25+
2326
from google.cloud.bigtable.data._cross_sync import CrossSync
2427
from google.cloud.bigtable.data._helpers import (
2528
_attempt_timeout_generator,

packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_swappable_channel.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,10 @@
1616
# This file is automatically generated by CrossSync. Do not edit manually.
1717

1818
from __future__ import annotations
19+
1920
from typing import Callable
20-
from grpc import ChannelConnectivity
21-
from grpc import Channel
21+
22+
from grpc import Channel, ChannelConnectivity
2223

2324

2425
class _WrappedChannel(Channel):

packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/client.py

Lines changed: 24 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -17,14 +17,16 @@
1717
# This file is automatically generated by CrossSync. Do not edit manually.
1818

1919
from __future__ import annotations
20+
2021
import abc
2122
import concurrent.futures
2223
import os
2324
import random
2425
import time
2526
import warnings
2627
from functools import partial
27-
from typing import TYPE_CHECKING, Any, Callable, Optional, Sequence, Set, cast
28+
from typing import TYPE_CHECKING, Any, Callable, Iterable, Optional, Sequence, Set, cast
29+
2830
import google.auth._default
2931
import google.auth.credentials
3032
from google.api_core import client_options as client_options_lib
@@ -39,7 +41,8 @@
3941
from google.cloud.environment_vars import BIGTABLE_EMULATOR
4042
from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper
4143
from google.protobuf.message import Message
42-
from grpc import Channel
44+
from grpc import Channel, insecure_channel, intercept_channel
45+
4346
from google.cloud.bigtable.client import _DEFAULT_BIGTABLE_EMULATOR_CLIENT
4447
from google.cloud.bigtable.data._cross_sync import CrossSync
4548
from google.cloud.bigtable.data._helpers import (
@@ -55,6 +58,13 @@
5558
_WarmedInstanceKey,
5659
)
5760
from google.cloud.bigtable.data._metrics import BigtableClientSideMetricsController
61+
from google.cloud.bigtable.data._sync_autogen._swappable_channel import (
62+
SwappableChannel as SwappableChannelType,
63+
)
64+
from google.cloud.bigtable.data._sync_autogen.metrics_interceptor import (
65+
BigtableMetricsInterceptor as MetricsInterceptorType,
66+
)
67+
from google.cloud.bigtable.data._sync_autogen.mutations_batcher import _MB_SIZE
5868
from google.cloud.bigtable.data.exceptions import (
5969
FailedQueryShardError,
6070
ShardedReadRowsExceptionGroup,
@@ -78,6 +88,10 @@
7888
RowFilterChain,
7989
StripValueTransformerFilter,
8090
)
91+
from google.cloud.bigtable_v2.services.bigtable import BigtableClient as GapicClient
92+
from google.cloud.bigtable_v2.services.bigtable.transports import (
93+
BigtableGrpcTransport as TransportType,
94+
)
8195
from google.cloud.bigtable_v2.services.bigtable.transports.base import (
8296
DEFAULT_CLIENT_INFO,
8397
)
@@ -88,19 +102,6 @@
88102
ReadModifyWriteRowRequest,
89103
SampleRowKeysRequest,
90104
)
91-
from typing import Iterable
92-
from grpc import insecure_channel, intercept_channel
93-
from google.cloud.bigtable.data._sync_autogen._swappable_channel import (
94-
SwappableChannel as SwappableChannelType,
95-
)
96-
from google.cloud.bigtable.data._sync_autogen.metrics_interceptor import (
97-
BigtableMetricsInterceptor as MetricsInterceptorType,
98-
)
99-
from google.cloud.bigtable.data._sync_autogen.mutations_batcher import _MB_SIZE
100-
from google.cloud.bigtable_v2.services.bigtable import BigtableClient as GapicClient
101-
from google.cloud.bigtable_v2.services.bigtable.transports import (
102-
BigtableGrpcTransport as TransportType,
103-
)
104105

105106
if TYPE_CHECKING:
106107
from google.cloud.bigtable.data._helpers import RowKeySamples, ShardedQuery
@@ -297,8 +298,7 @@ def _ping_and_warm_instances(
297298
instance_key: if provided, only warm the instance associated with the key
298299
channel: grpc channel to warm. If none, warms `self.transport.grpc_channel`
299300
Returns:
300-
list[BaseException | None]: sequence of results or exceptions from the ping requests
301-
"""
301+
list[BaseException | None]: sequence of results or exceptions from the ping requests"""
302302
channel = channel or self.transport.grpc_channel
303303
instance_list = (
304304
[instance_key] if instance_key is not None else self._active_instances
@@ -654,8 +654,7 @@ def execute_query(
654654
google.api_core.exceptions.GoogleAPIError: raised if the request encounters an unrecoverable error
655655
google.cloud.bigtable.data.exceptions.ParameterTypeInferenceFailed: Raised if
656656
a parameter is passed without an explicit type, and the type cannot be infered
657-
google.protobuf.message.DecodeError: raised if the deserialization of a PROTO/ENUM value fails.
658-
"""
657+
google.protobuf.message.DecodeError: raised if the deserialization of a PROTO/ENUM value fails."""
659658
instance_name = self._gapic_client.instance_path(self.project, instance_id)
660659
converted_param_types = _to_param_types(parameters, parameter_types)
661660
prepare_request = {
@@ -893,8 +892,7 @@ def read_rows_stream(
893892
google.api_core.exceptions.DeadlineExceeded: raised after operation timeout
894893
will be chained with a RetryExceptionGroup containing GoogleAPIError exceptions
895894
from any retries that failed
896-
google.api_core.exceptions.GoogleAPIError: raised if the request encounters an unrecoverable error
897-
"""
895+
google.api_core.exceptions.GoogleAPIError: raised if the request encounters an unrecoverable error"""
898896
operation_timeout, attempt_timeout = _get_timeouts(
899897
operation_timeout, attempt_timeout, self
900898
)
@@ -944,8 +942,7 @@ def read_rows(
944942
google.api_core.exceptions.DeadlineExceeded: raised after operation timeout
945943
will be chained with a RetryExceptionGroup containing GoogleAPIError exceptions
946944
from any retries that failed
947-
google.api_core.exceptions.GoogleAPIError: raised if the request encounters an unrecoverable error
948-
"""
945+
google.api_core.exceptions.GoogleAPIError: raised if the request encounters an unrecoverable error"""
949946
row_generator = self.read_rows_stream(
950947
query,
951948
operation_timeout=operation_timeout,
@@ -987,8 +984,7 @@ def read_row(
987984
google.api_core.exceptions.DeadlineExceeded: raised after operation timeout
988985
will be chained with a RetryExceptionGroup containing GoogleAPIError exceptions
989986
from any retries that failed
990-
google.api_core.exceptions.GoogleAPIError: raised if the request encounters an unrecoverable error
991-
"""
987+
google.api_core.exceptions.GoogleAPIError: raised if the request encounters an unrecoverable error"""
992988
if row_key is None:
993989
raise ValueError("row_key must be string or bytes")
994990
query = ReadRowsQuery(row_keys=row_key, row_filter=row_filter, limit=1)
@@ -1121,8 +1117,7 @@ def row_exists(
11211117
google.api_core.exceptions.DeadlineExceeded: raised after operation timeout
11221118
will be chained with a RetryExceptionGroup containing GoogleAPIError exceptions
11231119
from any retries that failed
1124-
google.api_core.exceptions.GoogleAPIError: raised if the request encounters an unrecoverable error
1125-
"""
1120+
google.api_core.exceptions.GoogleAPIError: raised if the request encounters an unrecoverable error"""
11261121
if row_key is None:
11271122
raise ValueError("row_key must be string or bytes")
11281123
strip_filter = StripValueTransformerFilter(flag=True)
@@ -1172,8 +1167,7 @@ def sample_row_keys(
11721167
google.api_core.exceptions.DeadlineExceeded: raised after operation timeout
11731168
will be chained with a RetryExceptionGroup containing GoogleAPIError exceptions
11741169
from any retries that failed
1175-
google.api_core.exceptions.GoogleAPIError: raised if the request encounters an unrecoverable error
1176-
"""
1170+
google.api_core.exceptions.GoogleAPIError: raised if the request encounters an unrecoverable error"""
11771171
operation_timeout, attempt_timeout = _get_timeouts(
11781172
operation_timeout, attempt_timeout, self
11791173
)
@@ -1236,8 +1230,7 @@ def mutations_batcher(
12361230
batch_retryable_errors: a list of errors that will be retried if encountered.
12371231
Defaults to the Table's default_mutate_rows_retryable_errors.
12381232
Returns:
1239-
MutationsBatcher: a MutationsBatcher context manager that can batch requests
1240-
"""
1233+
MutationsBatcher: a MutationsBatcher context manager that can batch requests"""
12411234
return CrossSync._Sync_Impl.MutationsBatcher(
12421235
self,
12431236
flush_interval=flush_interval,

packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/metrics_interceptor.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,15 +15,18 @@
1515
# This file is automatically generated by CrossSync. Do not edit manually.
1616

1717
from __future__ import annotations
18+
1819
import time
1920
from functools import wraps
2021
from typing import Sequence
22+
23+
from grpc import UnaryStreamClientInterceptor, UnaryUnaryClientInterceptor
24+
2125
from google.cloud.bigtable.data._metrics.data_model import (
2226
ActiveOperationMetric,
2327
OperationState,
2428
OperationType,
2529
)
26-
from grpc import UnaryStreamClientInterceptor, UnaryUnaryClientInterceptor
2730

2831

2932
def _with_active_operation(func):

packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/mutations_batcher.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,11 +16,13 @@
1616
# This file is automatically generated by CrossSync. Do not edit manually.
1717

1818
from __future__ import annotations
19+
1920
import atexit
2021
import concurrent.futures
2122
import warnings
2223
from collections import deque
2324
from typing import TYPE_CHECKING, Sequence, cast
25+
2426
from google.cloud.bigtable.data._cross_sync import CrossSync
2527
from google.cloud.bigtable.data._helpers import (
2628
TABLE_DEFAULT,
@@ -37,10 +39,10 @@
3739
)
3840

3941
if TYPE_CHECKING:
40-
from google.cloud.bigtable.data.mutations import RowMutationEntry
4142
from google.cloud.bigtable.data._sync_autogen.client import (
4243
_DataApiTarget as TargetType,
4344
)
45+
from google.cloud.bigtable.data.mutations import RowMutationEntry
4446
_MB_SIZE = 1024 * 1024
4547

4648

@@ -86,8 +88,7 @@ def _has_capacity(self, additional_count: int, additional_size: int) -> bool:
8688
additional_count: number of mutations in the pending entry
8789
additional_size: size of the pending entry
8890
Returns:
89-
bool: True if there is capacity to send the pending entry, False otherwise
90-
"""
91+
bool: True if there is capacity to send the pending entry, False otherwise"""
9192
acceptable_size = max(self._max_mutation_bytes, additional_size)
9293
acceptable_count = max(self._max_mutation_count, additional_count)
9394
new_size = self._in_flight_mutation_bytes + additional_size
@@ -440,8 +441,7 @@ def _wait_for_batch_results(
440441
list of Exceptions encountered by any of the tasks. Errors are expected
441442
to be FailedMutationEntryError, representing a failed mutation operation.
442443
If a task fails with a different exception, it will be included in the
443-
output list. Successful tasks will not be represented in the output list.
444-
"""
444+
output list. Successful tasks will not be represented in the output list."""
445445
if not tasks:
446446
return []
447447
exceptions: list[Exception] = []

packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_sync_autogen/execute_query_iterator.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -16,10 +16,13 @@
1616
# This file is automatically generated by CrossSync. Do not edit manually.
1717

1818
from __future__ import annotations
19+
1920
from typing import TYPE_CHECKING, Any, Dict, Optional, Sequence, Tuple
21+
2022
from google.api_core import retry as retries
2123
from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper
2224
from google.protobuf.message import Message
25+
2326
from google.cloud.bigtable.data._cross_sync import CrossSync
2427
from google.cloud.bigtable.data._helpers import (
2528
_attempt_timeout_generator,
@@ -89,8 +92,7 @@ def __init__(
8992
for protobuf deserialization.
9093
Raises:
9194
None
92-
:class:`ValueError <exceptions.ValueError>` as a safeguard if data is processed in an unexpected state
93-
"""
95+
:class:`ValueError <exceptions.ValueError>` as a safeguard if data is processed in an unexpected state"""
9496
self._table_name = None
9597
self._app_profile_id = app_profile_id
9698
self._client = client
@@ -193,8 +195,7 @@ def _next_impl(self) -> CrossSync._Sync_Impl.Iterator[QueryResultRow]:
193195
def __next__(self) -> QueryResultRow:
194196
"""Yields QueryResultRows representing the results of the query.
195197
196-
:raises: :class:`ValueError <exceptions.ValueError>` as a safeguard if data is processed in an unexpected state
197-
"""
198+
:raises: :class:`ValueError <exceptions.ValueError>` as a safeguard if data is processed in an unexpected state"""
198199
if self._is_closed:
199200
raise CrossSync._Sync_Impl.StopIteration
200201
return self._result_generator.__next__()
@@ -242,8 +243,7 @@ def close(self) -> None:
242243
243244
Called automatically by iterator
244245
245-
:raises: :class:`ValueError <exceptions.ValueError>` if called in an invalid state
246-
"""
246+
:raises: :class:`ValueError <exceptions.ValueError>` if called in an invalid state"""
247247
self._close_internal()
248248

249249
def _close_internal(self) -> None:

packages/google-cloud-bigtable/noxfile.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -43,8 +43,7 @@
4343
"pytest",
4444
"pytest-cov",
4545
"pytest-asyncio",
46-
BLACK_VERSION,
47-
"autoflake",
46+
RUFF_VERSION,
4847
]
4948
UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = []
5049
UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = []
@@ -553,8 +552,7 @@ def generate_sync(session):
553552
"""
554553
Re-generate sync files for the library from CrossSync-annotated async source
555554
"""
556-
session.install(BLACK_VERSION)
557-
session.install("autoflake")
555+
session.install(RUFF_VERSION)
558556
session.run("python", ".cross_sync/generate.py", ".")
559557

560558

0 commit comments

Comments
 (0)