Skip to content
This repository was archived by the owner on Nov 12, 2025. It is now read-only.

Commit f7ae897

Browse files
committed
fix unit test
1 parent b3e1b49 commit f7ae897

2 files changed

Lines changed: 14 additions & 7 deletions

File tree

samples/pyarrow/append_rows_with_arrow.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -159,16 +159,14 @@ def generate_write_request_with_pyarrow(num_rows=TABLE_LENGTH):
159159
# Determine max_chunksize of the record batches. Because max size of
160160
# AppendRowsRequest is 10 MB, we need to split the table if it's too big.
161161
# See: https://github.com/googleapis/googleapis/blob/27296636cf8797026124cd67034b42190ab602a4/google/cloud/bigquery/storage/v1/storage.proto#L422
162-
max_request_bytes = 10 * 2**20 # 10 MB
162+
max_request_bytes = 10 * 2**20 # 10 MB
163163
chunk_num = int(table.nbytes / max_request_bytes) + 1
164164
chunk_size = int(table.num_rows / chunk_num)
165165

166166
# Construct request(s).
167167
for batch in table.to_batches(max_chunksize=chunk_size):
168168
request = gapic_types.AppendRowsRequest()
169-
request.arrow_rows.rows.serialized_record_batch = (
170-
batch.serialize().to_pybytes()
171-
)
169+
request.arrow_rows.rows.serialized_record_batch = batch.serialize().to_pybytes()
172170
yield request
173171

174172

tests/unit/test_writer_v1.py

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -203,12 +203,12 @@ def _make_mock_client():
203203
return mock.create_autospec(big_query_write.BigQueryWriteClient)
204204

205205
@staticmethod
206-
def _make_mock_stream():
206+
def _make_mock_stream(initial_template=REQUEST_TEMPLATE):
207207
from google.cloud.bigquery_storage_v1.writer import _process_request_template
208208

209209
writer = mock.Mock()
210210
template = mock.PropertyMock(
211-
return_value=_process_request_template(REQUEST_TEMPLATE)
211+
return_value=_process_request_template(initial_template)
212212
)
213213
type(writer)._initial_request_template = template
214214
return writer
@@ -251,8 +251,17 @@ def test_is_active(self):
251251
def test_initial_send(self, background_consumer, bidi_rpc):
252252
from google.cloud.bigquery_storage_v1.writer import AppendRowsFuture
253253

254+
initial_request_template = gapic_types.AppendRowsRequest(
255+
write_stream="stream-name-from-REQUEST_TEMPLATE",
256+
offset=0,
257+
proto_rows=gapic_types.AppendRowsRequest.ProtoData(
258+
writer_schema=gapic_types.ProtoSchema(
259+
proto_descriptor=descriptor_pb2.DescriptorProto()
260+
)
261+
),
262+
)
254263
mock_client = self._make_mock_client()
255-
mock_stream = self._make_mock_stream()
264+
mock_stream = self._make_mock_stream(initial_template=initial_request_template)
256265
connection = self._make_one(mock_client, mock_stream)
257266

258267
type(bidi_rpc.return_value).is_active = mock.PropertyMock(

0 commit comments

Comments
 (0)