Skip to content

Commit 01a3f29

Browse files
committed
run nox -s format
1 parent e1f39f8 commit 01a3f29

File tree

292 files changed

+4075
-3730
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

292 files changed

+4075
-3730
lines changed

packages/google-cloud-bigtable/google/cloud/bigtable/__init__.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,8 @@
1414

1515
"""Google Cloud Bigtable API package."""
1616

17-
from google.cloud.bigtable.client import Client
18-
1917
from google.cloud.bigtable import gapic_version as package_version
18+
from google.cloud.bigtable.client import Client
2019

2120
__version__: str
2221

packages/google-cloud-bigtable/google/cloud/bigtable/app_profile.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -14,13 +14,13 @@
1414

1515
"""User-friendly container for Google Cloud Bigtable AppProfile."""
1616

17-
1817
import re
1918

19+
from google.api_core.exceptions import NotFound
20+
from google.protobuf import field_mask_pb2
21+
2022
from google.cloud.bigtable.enums import RoutingPolicyType
2123
from google.cloud.bigtable_admin_v2.types import instance
22-
from google.protobuf import field_mask_pb2
23-
from google.api_core.exceptions import NotFound
2424

2525
_APP_PROFILE_NAME_RE = re.compile(
2626
r"^projects/(?P<project>[^/]+)/"
@@ -165,7 +165,7 @@ def from_pb(cls, app_profile_pb, instance):
165165
match_app_profile_name = _APP_PROFILE_NAME_RE.match(app_profile_pb.name)
166166
if match_app_profile_name is None:
167167
raise ValueError(
168-
"AppProfile protobuf name was not in the " "expected format.",
168+
"AppProfile protobuf name was not in the expected format.",
169169
app_profile_pb.name,
170170
)
171171
if match_app_profile_name.group("instance") != instance.instance_id:
@@ -175,8 +175,7 @@ def from_pb(cls, app_profile_pb, instance):
175175
)
176176
if match_app_profile_name.group("project") != instance._client.project:
177177
raise ValueError(
178-
"Project ID on app_profile does not match the "
179-
"project ID on the client"
178+
"Project ID on app_profile does not match the project ID on the client"
180179
)
181180
app_profile_id = match_app_profile_name.group("app_profile_id")
182181

packages/google-cloud-bigtable/google/cloud/bigtable/backup.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,13 +17,14 @@
1717
import re
1818

1919
from google.cloud._helpers import _datetime_to_pb_timestamp # type: ignore
20-
from google.cloud.bigtable_admin_v2 import BaseBigtableTableAdminClient
21-
from google.cloud.bigtable_admin_v2.types import table
22-
from google.cloud.bigtable.encryption_info import EncryptionInfo
23-
from google.cloud.bigtable.policy import Policy
2420
from google.cloud.exceptions import NotFound # type: ignore
2521
from google.protobuf import field_mask_pb2
2622

23+
from google.cloud.bigtable.encryption_info import EncryptionInfo
24+
from google.cloud.bigtable.policy import Policy
25+
from google.cloud.bigtable_admin_v2 import BaseBigtableTableAdminClient
26+
from google.cloud.bigtable_admin_v2.types import table
27+
2728
_BACKUP_NAME_RE = re.compile(
2829
r"^projects/(?P<project>[^/]+)/"
2930
r"instances/(?P<instance_id>[a-z][-a-z0-9]*)/"

packages/google-cloud-bigtable/google/cloud/bigtable/batcher.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -13,15 +13,14 @@
1313
# limitations under the License.
1414

1515
"""User friendly container for Google Cloud Bigtable MutationBatcher."""
16-
import threading
17-
import queue
18-
import concurrent.futures
19-
import atexit
20-
2116

22-
from google.api_core.exceptions import from_grpc_status
17+
import atexit
18+
import concurrent.futures
19+
import queue
20+
import threading
2321
from dataclasses import dataclass
2422

23+
from google.api_core.exceptions import from_grpc_status
2524

2625
FLUSH_COUNT = 100 # after this many elements, send out the batch
2726

packages/google-cloud-bigtable/google/cloud/bigtable/client.py

Lines changed: 9 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -27,33 +27,27 @@
2727
* a :class:`~google.cloud.bigtable.table.Table` owns a
2828
:class:`~google.cloud.bigtable.row.Row` (and all the cells in the row)
2929
"""
30+
3031
import os
3132
import warnings
32-
import grpc # type: ignore
3333

34+
import grpc # type: ignore
3435
from google.api_core.gapic_v1 import client_info as client_info_lib
3536
from google.auth.credentials import AnonymousCredentials # type: ignore
37+
from google.cloud.client import ClientWithProject # type: ignore
38+
from google.cloud.environment_vars import BIGTABLE_EMULATOR # type: ignore
3639

37-
from google.cloud import bigtable_v2
38-
from google.cloud import bigtable_admin_v2
39-
from google.cloud.bigtable_v2.services.bigtable.transports import BigtableGrpcTransport
40+
from google.cloud import bigtable, bigtable_admin_v2, bigtable_v2
41+
from google.cloud.bigtable.cluster import _CLUSTER_NAME_RE, Cluster
42+
from google.cloud.bigtable.instance import Instance
4043
from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin.transports import (
4144
BigtableInstanceAdminGrpcTransport,
4245
)
4346
from google.cloud.bigtable_admin_v2.services.bigtable_table_admin.transports import (
4447
BigtableTableAdminGrpcTransport,
4548
)
46-
47-
from google.cloud import bigtable
48-
from google.cloud.bigtable.instance import Instance
49-
from google.cloud.bigtable.cluster import Cluster
50-
51-
from google.cloud.client import ClientWithProject # type: ignore
52-
5349
from google.cloud.bigtable_admin_v2.types import instance
54-
from google.cloud.bigtable.cluster import _CLUSTER_NAME_RE
55-
from google.cloud.environment_vars import BIGTABLE_EMULATOR # type: ignore
56-
50+
from google.cloud.bigtable_v2.services.bigtable.transports import BigtableGrpcTransport
5751

5852
INSTANCE_TYPE_PRODUCTION = instance.Instance.Type.PRODUCTION
5953
INSTANCE_TYPE_DEVELOPMENT = instance.Instance.Type.DEVELOPMENT
@@ -164,7 +158,7 @@ def __init__(
164158
)
165159
if read_only and admin:
166160
raise ValueError(
167-
"A read-only client cannot also perform" "administrative actions."
161+
"A read-only client cannot also performadministrative actions."
168162
)
169163

170164
# NOTE: We set the scopes **before** calling the parent constructor.

packages/google-cloud-bigtable/google/cloud/bigtable/cluster.py

Lines changed: 11 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -14,12 +14,12 @@
1414

1515
"""User friendly container for Google Cloud Bigtable Cluster."""
1616

17-
1817
import re
19-
from google.cloud.bigtable_admin_v2.types import instance
18+
2019
from google.api_core.exceptions import NotFound
2120
from google.protobuf import field_mask_pb2
2221

22+
from google.cloud.bigtable_admin_v2.types import instance
2323

2424
_CLUSTER_NAME_RE = re.compile(
2525
r"^projects/(?P<project>[^/]+)/"
@@ -166,16 +166,16 @@ def from_pb(cls, cluster_pb, instance):
166166
match_cluster_name = _CLUSTER_NAME_RE.match(cluster_pb.name)
167167
if match_cluster_name is None:
168168
raise ValueError(
169-
"Cluster protobuf name was not in the " "expected format.",
169+
"Cluster protobuf name was not in the expected format.",
170170
cluster_pb.name,
171171
)
172172
if match_cluster_name.group("instance") != instance.instance_id:
173173
raise ValueError(
174-
"Instance ID on cluster does not match the " "instance ID on the client"
174+
"Instance ID on cluster does not match the instance ID on the client"
175175
)
176176
if match_cluster_name.group("project") != instance._client.project:
177177
raise ValueError(
178-
"Project ID on cluster does not match the " "project ID on the client"
178+
"Project ID on cluster does not match the project ID on the client"
179179
)
180180
cluster_id = match_cluster_name.group("cluster_id")
181181

@@ -191,15 +191,9 @@ def _update_from_pb(self, cluster_pb):
191191
self.location_id = cluster_pb.location.split("/")[-1]
192192
self.serve_nodes = cluster_pb.serve_nodes
193193

194-
self.min_serve_nodes = (
195-
cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_limits.min_serve_nodes
196-
)
197-
self.max_serve_nodes = (
198-
cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_limits.max_serve_nodes
199-
)
200-
self.cpu_utilization_percent = (
201-
cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_targets.cpu_utilization_percent
202-
)
194+
self.min_serve_nodes = cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_limits.min_serve_nodes
195+
self.max_serve_nodes = cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_limits.max_serve_nodes
196+
self.cpu_utilization_percent = cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_targets.cpu_utilization_percent
203197

204198
self.default_storage_type = cluster_pb.default_storage_type
205199
if cluster_pb.encryption_config:
@@ -528,16 +522,10 @@ def _to_pb(self):
528522
)
529523

530524
if self.min_serve_nodes:
531-
cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_limits.min_serve_nodes = (
532-
self.min_serve_nodes
533-
)
525+
cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_limits.min_serve_nodes = self.min_serve_nodes
534526
if self.max_serve_nodes:
535-
cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_limits.max_serve_nodes = (
536-
self.max_serve_nodes
537-
)
527+
cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_limits.max_serve_nodes = self.max_serve_nodes
538528
if self.cpu_utilization_percent:
539-
cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_targets.cpu_utilization_percent = (
540-
self.cpu_utilization_percent
541-
)
529+
cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_targets.cpu_utilization_percent = self.cpu_utilization_percent
542530

543531
return cluster_pb

packages/google-cloud-bigtable/google/cloud/bigtable/column_family.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,13 +14,13 @@
1414

1515
"""User friendly container for Google Cloud Bigtable Column Family."""
1616

17+
from google.api_core.gapic_v1.method import DEFAULT
1718

1819
from google.cloud import _helpers
19-
from google.cloud.bigtable_admin_v2.types import table as table_v2_pb2
2020
from google.cloud.bigtable_admin_v2.types import (
2121
bigtable_table_admin as table_admin_v2_pb2,
2222
)
23-
from google.api_core.gapic_v1.method import DEFAULT
23+
from google.cloud.bigtable_admin_v2.types import table as table_v2_pb2
2424

2525

2626
class GarbageCollectionRule(object):

packages/google-cloud-bigtable/google/cloud/bigtable/data/__init__.py

Lines changed: 40 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -14,56 +14,56 @@
1414
# limitations under the License.
1515
#
1616
from google.cloud.bigtable import gapic_version as package_version
17-
18-
from google.cloud.bigtable.data._async.client import BigtableDataClientAsync
19-
from google.cloud.bigtable.data._async.client import TableAsync
20-
from google.cloud.bigtable.data._async.client import AuthorizedViewAsync
17+
from google.cloud.bigtable.data._async._mutate_rows import _MutateRowsOperationAsync
18+
from google.cloud.bigtable.data._async._read_rows import _ReadRowsOperationAsync
19+
from google.cloud.bigtable.data._async.client import (
20+
AuthorizedViewAsync,
21+
BigtableDataClientAsync,
22+
TableAsync,
23+
)
2124
from google.cloud.bigtable.data._async.mutations_batcher import MutationsBatcherAsync
22-
from google.cloud.bigtable.data._sync_autogen.client import BigtableDataClient
23-
from google.cloud.bigtable.data._sync_autogen.client import Table
24-
from google.cloud.bigtable.data._sync_autogen.client import AuthorizedView
25+
from google.cloud.bigtable.data._cross_sync import CrossSync
26+
from google.cloud.bigtable.data._helpers import (
27+
TABLE_DEFAULT,
28+
RowKeySamples,
29+
ShardedQuery,
30+
)
31+
from google.cloud.bigtable.data._sync_autogen._mutate_rows import _MutateRowsOperation
32+
from google.cloud.bigtable.data._sync_autogen._read_rows import _ReadRowsOperation
33+
from google.cloud.bigtable.data._sync_autogen.client import (
34+
AuthorizedView,
35+
BigtableDataClient,
36+
Table,
37+
)
2538
from google.cloud.bigtable.data._sync_autogen.mutations_batcher import MutationsBatcher
26-
27-
from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
28-
from google.cloud.bigtable.data.read_rows_query import RowRange
29-
from google.cloud.bigtable.data.row import Row
30-
from google.cloud.bigtable.data.row import Cell
31-
32-
from google.cloud.bigtable.data.mutations import Mutation
33-
from google.cloud.bigtable.data.mutations import RowMutationEntry
34-
from google.cloud.bigtable.data.mutations import AddToCell
35-
from google.cloud.bigtable.data.mutations import SetCell
36-
from google.cloud.bigtable.data.mutations import DeleteRangeFromColumn
37-
from google.cloud.bigtable.data.mutations import DeleteAllFromFamily
38-
from google.cloud.bigtable.data.mutations import DeleteAllFromRow
39-
40-
from google.cloud.bigtable.data.exceptions import InvalidChunk
41-
from google.cloud.bigtable.data.exceptions import FailedMutationEntryError
42-
from google.cloud.bigtable.data.exceptions import FailedQueryShardError
43-
44-
from google.cloud.bigtable.data.exceptions import RetryExceptionGroup
45-
from google.cloud.bigtable.data.exceptions import MutationsExceptionGroup
46-
from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup
47-
from google.cloud.bigtable.data.exceptions import ParameterTypeInferenceFailed
48-
49-
from google.cloud.bigtable.data._helpers import TABLE_DEFAULT
50-
from google.cloud.bigtable.data._helpers import RowKeySamples
51-
from google.cloud.bigtable.data._helpers import ShardedQuery
39+
from google.cloud.bigtable.data.exceptions import (
40+
FailedMutationEntryError,
41+
FailedQueryShardError,
42+
InvalidChunk,
43+
MutationsExceptionGroup,
44+
ParameterTypeInferenceFailed,
45+
RetryExceptionGroup,
46+
ShardedReadRowsExceptionGroup,
47+
)
48+
from google.cloud.bigtable.data.mutations import (
49+
AddToCell,
50+
DeleteAllFromFamily,
51+
DeleteAllFromRow,
52+
DeleteRangeFromColumn,
53+
Mutation,
54+
RowMutationEntry,
55+
SetCell,
56+
)
57+
from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery, RowRange
58+
from google.cloud.bigtable.data.row import Cell, Row
5259

5360
# setup custom CrossSync mappings for library
5461
from google.cloud.bigtable_v2.services.bigtable.async_client import (
5562
BigtableAsyncClient,
5663
)
57-
from google.cloud.bigtable.data._async._read_rows import _ReadRowsOperationAsync
58-
from google.cloud.bigtable.data._async._mutate_rows import _MutateRowsOperationAsync
59-
6064
from google.cloud.bigtable_v2.services.bigtable.client import (
6165
BigtableClient,
6266
)
63-
from google.cloud.bigtable.data._sync_autogen._read_rows import _ReadRowsOperation
64-
from google.cloud.bigtable.data._sync_autogen._mutate_rows import _MutateRowsOperation
65-
66-
from google.cloud.bigtable.data._cross_sync import CrossSync
6767

6868
CrossSync.add_mapping("GapicClient", BigtableAsyncClient)
6969
CrossSync._Sync_Impl.add_mapping("GapicClient", BigtableClient)

packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/__init__.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,12 +12,9 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
from google.cloud.bigtable.data._async.client import BigtableDataClientAsync
16-
from google.cloud.bigtable.data._async.client import TableAsync
17-
15+
from google.cloud.bigtable.data._async.client import BigtableDataClientAsync, TableAsync
1816
from google.cloud.bigtable.data._async.mutations_batcher import MutationsBatcherAsync
1917

20-
2118
__all__ = [
2219
"BigtableDataClientAsync",
2320
"TableAsync",

packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_mutate_rows.py

Lines changed: 18 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -14,38 +14,42 @@
1414
#
1515
from __future__ import annotations
1616

17-
from typing import Sequence, TYPE_CHECKING
17+
from typing import TYPE_CHECKING, Sequence
1818

1919
from google.api_core import exceptions as core_exceptions
2020
from google.api_core import retry as retries
21-
import google.cloud.bigtable_v2.types.bigtable as types_pb
21+
2222
import google.cloud.bigtable.data.exceptions as bt_exceptions
23-
from google.cloud.bigtable.data._helpers import _attempt_timeout_generator
24-
from google.cloud.bigtable.data._helpers import _retry_exception_factory
23+
import google.cloud.bigtable_v2.types.bigtable as types_pb
24+
from google.cloud.bigtable.data._cross_sync import CrossSync
25+
from google.cloud.bigtable.data._helpers import (
26+
_attempt_timeout_generator,
27+
_retry_exception_factory,
28+
)
2529

2630
# mutate_rows requests are limited to this number of mutations
27-
from google.cloud.bigtable.data.mutations import _MUTATE_ROWS_REQUEST_MUTATION_LIMIT
28-
from google.cloud.bigtable.data.mutations import _EntryWithProto
29-
30-
from google.cloud.bigtable.data._cross_sync import CrossSync
31+
from google.cloud.bigtable.data.mutations import (
32+
_MUTATE_ROWS_REQUEST_MUTATION_LIMIT,
33+
_EntryWithProto,
34+
)
3135

3236
if TYPE_CHECKING:
3337
from google.cloud.bigtable.data.mutations import RowMutationEntry
3438

3539
if CrossSync.is_async:
36-
from google.cloud.bigtable_v2.services.bigtable.async_client import (
37-
BigtableAsyncClient as GapicClientType,
38-
)
3940
from google.cloud.bigtable.data._async.client import ( # type: ignore
4041
_DataApiTargetAsync as TargetType,
4142
)
42-
else:
43-
from google.cloud.bigtable_v2.services.bigtable.client import ( # type: ignore
44-
BigtableClient as GapicClientType,
43+
from google.cloud.bigtable_v2.services.bigtable.async_client import (
44+
BigtableAsyncClient as GapicClientType,
4545
)
46+
else:
4647
from google.cloud.bigtable.data._sync_autogen.client import ( # type: ignore
4748
_DataApiTarget as TargetType,
4849
)
50+
from google.cloud.bigtable_v2.services.bigtable.client import ( # type: ignore
51+
BigtableClient as GapicClientType,
52+
)
4953

5054
__CROSS_SYNC_OUTPUT__ = "google.cloud.bigtable.data._sync_autogen._mutate_rows"
5155

0 commit comments

Comments
 (0)