Skip to content

Commit b1e4402

Browse files
committed
fix conflict
1 parent caf6617 commit b1e4402

2 files changed

Lines changed: 61 additions & 1 deletion

File tree

sdks/python/apache_beam/yaml/examples/testing/examples_test.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -683,7 +683,8 @@ def _kafka_test_preprocessor(
683683
'test_wordCountInclude_yaml',
684684
'test_wordCountImport_yaml',
685685
'test_wordCountInheritance_yaml',
686-
'test_iceberg_to_alloydb_yaml'
686+
'test_iceberg_to_alloydb_yaml',
687+
'test_bigquery_write_yaml'
687688
])
688689
def _io_write_test_preprocessor(
689690
test_spec: dict, expected: List[str], env: TestEnvironment):
Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
# coding=utf-8
2+
#
3+
# Licensed to the Apache Software Foundation (ASF) under one or more
4+
# contributor license agreements. See the NOTICE file distributed with
5+
# this work for additional information regarding copyright ownership.
6+
# The ASF licenses this file to You under the Apache License, Version 2.0
7+
# (the "License"); you may not use this file except in compliance with
8+
# the License. You may obtain a copy of the License at
9+
#
10+
# http://www.apache.org/licenses/LICENSE-2.0
11+
#
12+
# Unless required by applicable law or agreed to in writing, software
13+
# distributed under the License is distributed on an "AS IS" BASIS,
14+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
# See the License for the specific language governing permissions and
16+
# limitations under the License.
17+
#
18+
19+
# This pipeline demonstrates a simple write to BigQuery using Dynamic Destinations
20+
# (see https://cloud.google.com/dataflow/docs/guides/managed-io#dynamic-destinations).
21+
#
22+
# Prerequisites:
23+
# - A GCP project
24+
# - A BigQuery dataset
25+
# - Google Cloud credentials set up
26+
#
27+
# Usage:
28+
# python -m apache_beam.yaml.main --yaml_pipeline_file=<path_to_bigquery_write.yaml> \
29+
# --project=<project_id> \
30+
# --temp_location=gs://<bucket_name>/temp
31+
32+
pipeline:
33+
type: chain
34+
transforms:
35+
- type: Create
36+
name: CreateSampleData
37+
config:
38+
elements:
39+
- {id: 1, name: "Alice", country: "CN"}
40+
- {id: 2, name: "Bob", country: "UK"}
41+
- {id: 3, name: "Charlie", country: "US"}
42+
- type: WriteToBigQuery
43+
name: WriteToTable
44+
config:
45+
table: "<project_id>:<dataset_id>.users_{country}"
46+
create_disposition: CREATE_IF_NEEDED
47+
write_disposition: WRITE_APPEND
48+
num_streams: 1
49+
50+
options:
51+
temp_location: "gs://<bucket_name>/temp"
52+
53+
# Expected:
54+
# Row(id=1, name='Alice', country='CN')
55+
# Row(id=2, name='Bob', country='UK')
56+
# Row(id=3, name='Charlie', country='US')
57+
58+
59+

0 commit comments

Comments
 (0)