Skip to content

Commit

Permalink
docs: improve documentation for write client
Browse files Browse the repository at this point in the history
feat: update default timeout/retry information
feat: update parent annotation for BatchCommitWriteStreamsRequest
feat: expose additional StorageError enum values

PiperOrigin-RevId: 431973595
  • Loading branch information
Google APIs authored and copybara-github committed Mar 2, 2022
1 parent c392eb0 commit 25d691b
Show file tree
Hide file tree
Showing 3 changed files with 60 additions and 34 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,38 @@
{
"service": "google.cloud.bigquery.storage.v1.BigQueryRead",
"method": "CreateReadSession"
},
}
],
"timeout": "600s",
"retryPolicy": {
"initialBackoff": "0.100s",
"maxBackoff": "60s",
"backoffMultiplier": 1.3,
"retryableStatusCodes": [
"DEADLINE_EXCEEDED",
"UNAVAILABLE"
]
}
},
{
"name": [
{
"service": "google.cloud.bigquery.storage.v1.BigQueryRead",
"method": "ReadRows"
}
],
"timeout": "86400s",
"retryPolicy": {
"initialBackoff": "0.100s",
"maxBackoff": "60s",
"backoffMultiplier": 1.3,
"retryableStatusCodes": [
"UNAVAILABLE"
]
}
},
{
"name": [
{
"service": "google.cloud.bigquery.storage.v1.BigQueryRead",
"method": "SplitReadStream"
Expand All @@ -25,8 +56,8 @@
{
"name": [
{
"service": "google.cloud.bigquery.storage.v1.BigQueryRead",
"method": "ReadRows"
"service": "google.cloud.bigquery.storage.v1.BigQueryWrite",
"method": "AppendRows"
}
],
"timeout": "86400s",
Expand All @@ -38,7 +69,8 @@
"UNAVAILABLE"
]
}
}, {
},
{
"name": [
{
"service": "google.cloud.bigquery.storage.v1.BigQueryWrite",
Expand Down Expand Up @@ -71,22 +103,6 @@
"UNAVAILABLE"
]
}
}, {
"name": [
{
"service": "google.cloud.bigquery.storage.v1.BigQueryWrite",
"method": "AppendRows"
}
],
"timeout": "86400s",
"retryPolicy": {
"initialBackoff": "0.100s",
"maxBackoff": "60s",
"backoffMultiplier": 1.3,
"retryableStatusCodes": [
"UNAVAILABLE"
]
}
}
]
}
12 changes: 2 additions & 10 deletions google/cloud/bigquery/storage/v1/bigquerystorage_v1.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,18 +15,10 @@ backend:
deadline: 21600.0
- selector: google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStream
deadline: 120.0
- selector: google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStram
deadline: 120.0
- selector: google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStream
deadline: 120.0
- selector: google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreams
deadline: 120.0
- selector: google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStream
deadline: 120.0
- selector: google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRows
- selector: 'google.cloud.bigquery.storage.v1.BigQueryWrite.*'
deadline: 120.0
- selector: google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRows
deadline: 86400.0
deadline: 21600.0

authentication:
rules:
Expand Down
26 changes: 22 additions & 4 deletions google/cloud/bigquery/storage/v1/storage.proto
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// Copyright 2021 Google LLC
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -167,6 +167,13 @@ service BigQueryWrite {
// * For PENDING streams, data is not made visible until the stream itself is
// finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly
// committed via the `BatchCommitWriteStreams` rpc.
//
// Note: For users coding against the gRPC api directly, it may be
// necessary to supply the x-goog-request-params system parameter
// with `write_stream=<full_write_stream_name>`.
//
// More information about system parameters:
// https://cloud.google.com/apis/docs/system-parameters
rpc AppendRows(stream AppendRowsRequest) returns (stream AppendRowsResponse) {
option (google.api.http) = {
post: "/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}"
Expand Down Expand Up @@ -408,10 +415,12 @@ message AppendRowsRequest {
// request.
//
// For explicitly created write streams, the format is:
// `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}`
//
// * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}`
//
// For the special default stream, the format is:
// `projects/{project}/datasets/{dataset}/tables/{table}/_default`.
//
// * `projects/{project}/datasets/{dataset}/tables/{table}/streams/_default`.
string write_stream = 1 [
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
Expand Down Expand Up @@ -498,7 +507,10 @@ message BatchCommitWriteStreamsRequest {
// Required. Parent table that all the streams should belong to, in the form of
// `projects/{project}/datasets/{dataset}/tables/{table}`.
string parent = 1 [
(google.api.field_behavior) = REQUIRED
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
type: "bigquery.googleapis.com/Table"
}
];

// Required. The group of streams that will be committed atomically.
Expand Down Expand Up @@ -593,6 +605,12 @@ message StorageError {
// There is a schema mismatch and it is caused by user schema has extra
// field than bigquery schema.
SCHEMA_MISMATCH_EXTRA_FIELDS = 7;

// Offset already exists.
OFFSET_ALREADY_EXISTS = 8;

// Offset out of range.
OFFSET_OUT_OF_RANGE = 9;
}

// BigQuery Storage specific error code.
Expand Down

0 comments on commit 25d691b

Please sign in to comment.