Skip to content
This repository was archived by the owner on Nov 12, 2025. It is now read-only.

Commit 8e2ef3a

Browse files
committed
add test fixtures
1 parent 7945006 commit 8e2ef3a

3 files changed

Lines changed: 44 additions & 8 deletions

File tree

samples/conftest.py

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
import datetime
1516
import os
1617

1718
import pytest
@@ -20,3 +21,37 @@
2021
@pytest.fixture(scope="session")
2122
def project_id():
2223
return os.environ["GOOGLE_CLOUD_PROJECT"]
24+
25+
26+
@pytest.fixture(scope="session")
27+
def dataset_id():
28+
return os.environ["GOOGLE_CLOUD_PROJECT"]
29+
30+
def _make_dataset(project_id, bq_client, location):
31+
from google.cloud import bigquery
32+
33+
dataset_name = prefixer.create_prefix()
34+
35+
dataset_id = "{}.{}".format(project_id, dataset_name)
36+
dataset = bigquery.Dataset(dataset_id)
37+
dataset.location = location
38+
created_dataset = bq_client.create_dataset(dataset)
39+
40+
return created_dataset
41+
42+
43+
@pytest.fixture(scope="session")
44+
def dataset(project_id):
45+
from google.cloud import bigquery
46+
47+
client = bigquery.Client()
48+
dataset_suffix = datetime.datetime.now().strftime("%y%m%d_%H%M%S")
49+
dataset_name = "samples_tests_" + dataset_suffix
50+
51+
dataset_id = "{}.{}".format(project_id, dataset_name)
52+
dataset = bigquery.Dataset(dataset_id)
53+
dataset.location = "us-east7"
54+
created_dataset = client.create_dataset(dataset)
55+
yield created_dataset
56+
57+
client.delete_dataset(created_dataset, delete_contents=True)

samples/pyarrow/append_rows_with_arrow.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -15,14 +15,14 @@
1515
# limitations under the License.
1616
import datetime
1717
import decimal
18-
import pandas as pd
19-
import pyarrow as pa
2018

2119
from google.cloud import bigquery
2220
from google.cloud.bigquery import enums
23-
2421
from google.cloud.bigquery_storage_v1 import types as gapic_types
2522
from google.cloud.bigquery_storage_v1.writer import AppendRowsStream
23+
import pandas as pd
24+
25+
import pyarrow as pa
2626

2727

2828
def bqstorage_write_client():
@@ -31,7 +31,7 @@ def bqstorage_write_client():
3131
return bigquery_storage_v1.BigQueryWriteClient()
3232

3333

34-
def make_table(project_id, dataset, table_id, bq_client):
34+
def make_table(project_id, dataset, bq_client):
3535
schema = [
3636
bigquery.SchemaField("bool_col", enums.SqlTypeNames.BOOLEAN),
3737
bigquery.SchemaField("int64_col", enums.SqlTypeNames.INT64),
@@ -59,6 +59,7 @@ def make_table(project_id, dataset, table_id, bq_client):
5959
range_element_type="TIMESTAMP",
6060
),
6161
]
62+
table_id = "append_rows_w_arrow_test"
6263
table_id_full = f"{project_id}.{dataset}.{table_id}"
6364
bq_table = bigquery.Table(table_id_full, schema=schema)
6465
created_table = bq_client.create_table(bq_table)
@@ -167,8 +168,8 @@ def append_rows(bqstorage_write_client, table):
167168
print(e)
168169

169170

170-
def main(project_id, dataset_id, table_id):
171+
def main(project_id, dataset_id):
171172
write_client = bqstorage_write_client()
172173
bq_client = bigquery.Client()
173-
table = make_table(project_id, dataset_id, table_id, bq_client)
174+
table = make_table(project_id, dataset_id, bq_client)
174175
append_rows(write_client, table)

samples/pyarrow/append_rows_with_arrow_test.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
from . import append_rows_with_arrow
1616

1717

18-
def test_read_query_results(capsys, project_id, dataset_id, table_id):
19-
append_rows_with_arrow.main(project_id, dataset_id, table_id)
18+
def test_read_query_results(capsys, project_id, dataset_id):
19+
append_rows_with_arrow.main(project_id, dataset_id)
2020
out, _ = capsys.readouterr()
2121
assert "append_result" in out

0 commit comments

Comments
 (0)