Skip to content

Commit

Permalink
Fix CI errors (apache#26693)
Browse files Browse the repository at this point in the history
  • Loading branch information
RyuSA committed Jun 21, 2023
1 parent 5058da6 commit ee20836
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 28 deletions.
30 changes: 4 additions & 26 deletions sdks/python/apache_beam/examples/snippets/snippets.py
Original file line number Diff line number Diff line change
Expand Up @@ -1189,32 +1189,6 @@ def model_bigqueryio(
create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED)
# [END model_bigqueryio_write]

# [START model_bigqueryio_write_with_storage_write_api]
quotes | "WriteTableWithStorageAPI" >> beam.io.WriteToBigQuery(
table_spec,
schema=table_schema,
method=beam.io.WriteToBigQuery.Method.STORAGE_WRITE_API)
# [END model_bigqueryio_write_with_storage_write_api]

# [START model_bigqueryio_write_schema]
table_schema = {
'fields': [{
"name": "request_ts", "type": "TIMESTAMP", "mode": "REQUIRED"
}, {
"name": "user_name", "type": "STRING", "mode": "REQUIRED"
}]
}
# [END model_bigqueryio_write_schema]

# [START model_bigqueryio_storage_write_api_with_frequency]
# The SDK for Python does not support `withNumStorageWriteApiStreams`
quotes | "StorageWriteAPIWithFrequency" >> beam.io.WriteToBigQuery(
table_spec,
schema=table_schema,
method=beam.io.WriteToBigQuery.Method.STORAGE_WRITE_API,
triggering_frequency=5)
# [END model_bigqueryio_storage_write_api_with_frequency]

# [START model_bigqueryio_write_dynamic_destinations]
fictional_characters_view = beam.pvalue.AsDict(
pipeline | 'CreateCharacters' >> beam.Create([('Yoda', True),
Expand Down Expand Up @@ -1245,6 +1219,7 @@ def table_fn(element, fictional_characters):
}})
# [END model_bigqueryio_time_partitioning]


def model_bigqueryio_xlang(
pipeline, write_project='', write_dataset='', write_table=''):
"""Examples for cross-language BigQuery sources and sinks."""
Expand All @@ -1253,13 +1228,15 @@ def model_bigqueryio_xlang(
if write_project and write_dataset and write_table:
table_spec = '{}:{}.{}'.format(write_project, write_dataset, write_table)

# [START model_bigqueryio_write_schema]
table_schema = {
'fields': [{
'name': 'source', 'type': 'STRING', 'mode': 'NULLABLE'
}, {
'name': 'quote', 'type': 'STRING', 'mode': 'REQUIRED'
}]
}
# [END model_bigqueryio_write_schema]

quotes = pipeline | beam.Create([
{
Expand All @@ -1286,6 +1263,7 @@ def model_bigqueryio_xlang(
method=beam.io.WriteToBigQuery.Method.STORAGE_WRITE_API)
# [END model_bigqueryio_write_with_storage_write_api]


def model_composite_transform_example(contents, output_path):
"""Example of a composite transform.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -786,8 +786,7 @@ BigQuery Storage Write API for Python SDK currently has some limitations on supp
{{< /paragraph >}}

{{< paragraph class="language-py" >}}
**Note:** If you want to run WriteToBigQuery with Storage Write API from the source code, you need to run `./gradlew :sdks:java:io:google-cloud-platform:expansion-service:build` to build the expansion-service jar. If you
are running from a released Beam SDK, the jar will already be included.
**Note:** If you want to run WriteToBigQuery with Storage Write API from the source code, you need to run `./gradlew :sdks:java:io:google-cloud-platform:expansion-service:build` to build the expansion-service jar. If you are running from a released Beam SDK, the jar will already be included.

{{< /paragraph >}}

Expand Down

0 comments on commit ee20836

Please sign in to comment.