diff --git a/lib/gcloud.rb b/lib/gcloud.rb index 022425cafeed..e3fd1bbf73a8 100644 --- a/lib/gcloud.rb +++ b/lib/gcloud.rb @@ -222,7 +222,7 @@ def pubsub scope: nil, retries: nil, timeout: nil # * `https://www.googleapis.com/auth/bigquery` # @param [Integer] retries Number of times to retry requests on server error. # The default value is `3`. Optional. - # @param [Integer] timeout Default timeout to use in requests. Optional. + # @param [Integer] timeout Default request timeout in seconds. Optional. # # @return [Gcloud::Bigquery::Project] # diff --git a/lib/gcloud/bigquery.rb b/lib/gcloud/bigquery.rb index b3f93779f4ca..1679aea5dfc9 100644 --- a/lib/gcloud/bigquery.rb +++ b/lib/gcloud/bigquery.rb @@ -369,24 +369,23 @@ def self.bigquery project = nil, keyfile = nil, scope: nil, retries: nil, # BigQuery](https://cloud.google.com/bigquery/exporting-data-from-bigquery) # for details. # - # ## Configuring Backoff + # ## Configuring retries and timeout # - # The {Gcloud::Backoff} class allows users to globally configure how Cloud API - # requests are automatically retried in the case of some errors, such as a - # `500` or `503` status code, or a specific internal error code such as - # `rateLimitExceeded`. + # You can configure how many times API requests may be automatically retried. + # When an API request fails, the response will be inspected to see if the + # request meets criteria indicating that it may succeed on retry, such as + # `500` and `503` status codes or a specific internal error code such as + # `rateLimitExceeded`. If it meets the criteria, the request will be retried + # after a delay. If another error occurs, the delay will be increased before a + # subsequent attempt, until the `retries` limit is reached. # - # If an API call fails, the response will be inspected to see if the call - # should be retried. If the response matches the criteria, then the request - # will be retried after a delay. If another error occurs, the delay will be - # increased incrementally before a subsequent attempt. The first retry will be - # delayed one second, the second retry two seconds, and so on. + # You can also set the request `timeout` value in seconds. # # ```ruby # require "gcloud" - # require "gcloud/backoff" # - # Gcloud::Backoff.retries = 5 # Raise the maximum number of retries from 3 + # gcloud = Gcloud.new + # bigquery = gcloud.bigquery retries: 10, timeout: 120 # ``` # # See the [BigQuery error diff --git a/lib/gcloud/datastore.rb b/lib/gcloud/datastore.rb index 657bd696906e..5a966fe688e5 100644 --- a/lib/gcloud/datastore.rb +++ b/lib/gcloud/datastore.rb @@ -519,24 +519,23 @@ def self.datastore project = nil, keyfile = nil, scope: nil, retries: nil, # end # ``` # - # ## Configuring Backoff + # ## Configuring retries and timeout # - # The {Gcloud::Backoff} class allows users to globally configure how Cloud API - # requests are automatically retried in the case of some errors, such as a - # `500` or `503` status code, or a specific internal error code such as - # `rateLimitExceeded`. + # You can configure how many times API requests may be automatically retried. + # When an API request fails, the response will be inspected to see if the + # request meets criteria indicating that it may succeed on retry, such as + # `500` and `503` status codes or a specific internal error code such as + # `rateLimitExceeded`. If it meets the criteria, the request will be retried + # after a delay. If another error occurs, the delay will be increased before a + # subsequent attempt, until the `retries` limit is reached. # - # If an API call fails, the response will be inspected to see if the call - # should be retried. If the response matches the criteria, then the request - # will be retried after a delay. If another error occurs, the delay will be - # increased incrementally before a subsequent attempt. The first retry will be - # delayed one second, the second retry two seconds, and so on. + # You can also set the request `timeout` value in seconds. # # ```ruby # require "gcloud" - # require "gcloud/backoff" # - # Gcloud::Backoff.retries = 5 # Raise the maximum number of retries from 3 + # gcloud = Gcloud.new + # datastore = gcloud.datastore retries: 10, timeout: 120 # ``` # # See the [Datastore error diff --git a/lib/gcloud/dns.rb b/lib/gcloud/dns.rb index f89c9248fbb6..75d0667a8e74 100644 --- a/lib/gcloud/dns.rb +++ b/lib/gcloud/dns.rb @@ -312,24 +312,23 @@ def self.dns project = nil, keyfile = nil, scope: nil, retries: nil, # zone.export "path/to/db.example.com" # ``` # - # ## Configuring Backoff + # ## Configuring retries and timeout # - # The {Gcloud::Backoff} class allows users to globally configure how Cloud API - # requests are automatically retried in the case of some errors, such as a - # `500` or `503` status code, or a specific internal error code such as - # `rateLimitExceeded`. + # You can configure how many times API requests may be automatically retried. + # When an API request fails, the response will be inspected to see if the + # request meets criteria indicating that it may succeed on retry, such as + # `500` and `503` status codes or a specific internal error code such as + # `rateLimitExceeded`. If it meets the criteria, the request will be retried + # after a delay. If another error occurs, the delay will be increased before a + # subsequent attempt, until the `retries` limit is reached. # - # If an API call fails, the response will be inspected to see if the call - # should be retried. If the response matches the criteria, then the request - # will be retried after a delay. If another error occurs, the delay will be - # increased incrementally before a subsequent attempt. The first retry will be - # delayed one second, the second retry two seconds, and so on. + # You can also set the request `timeout` value in seconds. # # ```ruby # require "gcloud" - # require "gcloud/backoff" # - # Gcloud::Backoff.retries = 5 # Raise the maximum number of retries from 3 + # gcloud = Gcloud.new + # dns = gcloud.dns retries: 10, timeout: 120 # ``` # module Dns diff --git a/lib/gcloud/logging.rb b/lib/gcloud/logging.rb index 0ccef67f9b9a..be4ba7d36d69 100644 --- a/lib/gcloud/logging.rb +++ b/lib/gcloud/logging.rb @@ -314,24 +314,23 @@ def self.logging project = nil, keyfile = nil, scope: nil, retries: nil, # logger.info "Job started." # ``` # - # ## Configuring Backoff + # ## Configuring retries and timeout # - # The {Gcloud::Backoff} class allows users to globally configure how Cloud API - # requests are automatically retried in the case of some errors, such as a - # `500` or `503` status code, or a specific internal error code such as - # `rateLimitExceeded`. + # You can configure how many times API requests may be automatically retried. + # When an API request fails, the response will be inspected to see if the + # request meets criteria indicating that it may succeed on retry, such as + # `500` and `503` status codes or a specific internal error code such as + # `rateLimitExceeded`. If it meets the criteria, the request will be retried + # after a delay. If another error occurs, the delay will be increased before a + # subsequent attempt, until the `retries` limit is reached. # - # If an API call fails, the response will be inspected to see if the call - # should be retried. If the response matches the criteria, then the request - # will be retried after a delay. If another error occurs, the delay will be - # increased incrementally before a subsequent attempt. The first retry will be - # delayed one second, the second retry two seconds, and so on. + # You can also set the request `timeout` value in seconds. # # ```ruby # require "gcloud" - # require "gcloud/backoff" # - # Gcloud::Backoff.retries = 5 # Raise the maximum number of retries from 3 + # gcloud = Gcloud.new + # logging = gcloud.logging retries: 10, timeout: 120 # ``` # module Logging diff --git a/lib/gcloud/pubsub.rb b/lib/gcloud/pubsub.rb index 467d4b9d1bc1..1acbef5988a7 100644 --- a/lib/gcloud/pubsub.rb +++ b/lib/gcloud/pubsub.rb @@ -395,24 +395,23 @@ def self.pubsub project = nil, keyfile = nil, scope: nil, retries: nil, # end # ``` # - # ## Configuring Backoff + # ## Configuring retries and timeout # - # The {Gcloud::Backoff} class allows users to globally configure how Cloud API - # requests are automatically retried in the case of some errors, such as a - # `500` or `503` status code, or a specific internal error code such as - # `rateLimitExceeded`. + # You can configure how many times API requests may be automatically retried. + # When an API request fails, the response will be inspected to see if the + # request meets criteria indicating that it may succeed on retry, such as + # `500` and `503` status codes or a specific internal error code such as + # `rateLimitExceeded`. If it meets the criteria, the request will be retried + # after a delay. If another error occurs, the delay will be increased before a + # subsequent attempt, until the `retries` limit is reached. # - # If an API call fails, the response will be inspected to see if the call - # should be retried. If the response matches the criteria, then the request - # will be retried after a delay. If another error occurs, the delay will be - # increased incrementally before a subsequent attempt. The first retry will be - # delayed one second, the second retry two seconds, and so on. + # You can also set the request `timeout` value in seconds. # # ```ruby # require "gcloud" - # require "gcloud/backoff" # - # Gcloud::Backoff.retries = 5 # Raise the maximum number of retries from 3 + # gcloud = Gcloud.new + # pubsub = gcloud.pubsub retries: 10, timeout: 120 # ``` # # See the [Pub/Sub error codes](https://cloud.google.com/pubsub/error-codes) diff --git a/lib/gcloud/resource_manager.rb b/lib/gcloud/resource_manager.rb index d5e11fdc7739..2acb7bac91d6 100644 --- a/lib/gcloud/resource_manager.rb +++ b/lib/gcloud/resource_manager.rb @@ -207,24 +207,23 @@ def self.resource_manager keyfile = nil, scope: nil, retries: nil, # resource_manager.undelete "tokyo-rain-123" # ``` # - # ## Configuring Backoff + # ## Configuring retries and timeout # - # The {Gcloud::Backoff} class allows users to globally configure how Cloud API - # requests are automatically retried in the case of some errors, such as a - # `500` or `503` status code, or a specific internal error code such as - # `rateLimitExceeded`. + # You can configure how many times API requests may be automatically retried. + # When an API request fails, the response will be inspected to see if the + # request meets criteria indicating that it may succeed on retry, such as + # `500` and `503` status codes or a specific internal error code such as + # `rateLimitExceeded`. If it meets the criteria, the request will be retried + # after a delay. If another error occurs, the delay will be increased before a + # subsequent attempt, until the `retries` limit is reached. # - # If an API call fails, the response will be inspected to see if the call - # should be retried. If the response matches the criteria, then the request - # will be retried after a delay. If another error occurs, the delay will be - # increased incrementally before a subsequent attempt. The first retry will be - # delayed one second, the second retry two seconds, and so on. + # You can also set the request `timeout` value in seconds. # # ```ruby # require "gcloud" - # require "gcloud/backoff" # - # Gcloud::Backoff.retries = 5 # Raise the maximum number of retries from 3 + # gcloud = Gcloud.new + # resource_manager = gcloud.resource_manager retries: 10, timeout: 120 # ``` # # See the [Resource Manager error diff --git a/lib/gcloud/storage.rb b/lib/gcloud/storage.rb index 1ca8668eccce..f398b236f2a3 100644 --- a/lib/gcloud/storage.rb +++ b/lib/gcloud/storage.rb @@ -427,24 +427,23 @@ def self.storage project = nil, keyfile = nil, scope: nil, retries: nil, # file.acl.public! # ``` # - # ## Configuring Backoff + # ## Configuring retries and timeout # - # The {Gcloud::Backoff} class allows users to globally configure how Cloud API - # requests are automatically retried in the case of some errors, such as a - # `500` or `503` status code, or a specific internal error code such as - # `rateLimitExceeded`. + # You can configure how many times API requests may be automatically retried. + # When an API request fails, the response will be inspected to see if the + # request meets criteria indicating that it may succeed on retry, such as + # `500` and `503` status codes or a specific internal error code such as + # `rateLimitExceeded`. If it meets the criteria, the request will be retried + # after a delay. If another error occurs, the delay will be increased before a + # subsequent attempt, until the `retries` limit is reached. # - # If an API call fails, the response will be inspected to see if the call - # should be retried. If the response matches the criteria, then the request - # will be retried after a delay. If another error occurs, the delay will be - # increased incrementally before a subsequent attempt. The first retry will be - # delayed one second, the second retry two seconds, and so on. + # You can also set the request `timeout` value in seconds. # # ```ruby # require "gcloud" - # require "gcloud/backoff" # - # Gcloud::Backoff.retries = 5 # Raise the maximum number of retries from 3 + # gcloud = Gcloud.new + # storage = gcloud.storage retries: 10, timeout: 120 # ``` # # See the [Storage status and error diff --git a/lib/gcloud/storage/bucket.rb b/lib/gcloud/storage/bucket.rb index 32cb04263871..798c9586df33 100644 --- a/lib/gcloud/storage/bucket.rb +++ b/lib/gcloud/storage/bucket.rb @@ -327,7 +327,7 @@ def update # The bucket must be empty before it can be deleted. # # The API call to delete the bucket may be retried under certain - # conditions. See {Gcloud::Backoff} to control this behavior. + # conditions. See {Gcloud#storage} to control this behavior. # # @return [Boolean] Returns `true` if the bucket was deleted. # diff --git a/lib/gcloud/storage/project.rb b/lib/gcloud/storage/project.rb index efbffb5e3e8f..c99e33a9aa08 100644 --- a/lib/gcloud/storage/project.rb +++ b/lib/gcloud/storage/project.rb @@ -167,7 +167,7 @@ def bucket bucket_name # bucket. See {Bucket::Cors} for details. # # The API call to create the bucket may be retried under certain - # conditions. See {Gcloud::Backoff} to control this behavior. + # conditions. See {Gcloud#storage} to control this behavior. # # You can pass [website # settings](https://cloud.google.com/storage/docs/website-configuration) diff --git a/lib/gcloud/translate.rb b/lib/gcloud/translate.rb index 460f35968322..d55ae0957d67 100644 --- a/lib/gcloud/translate.rb +++ b/lib/gcloud/translate.rb @@ -242,24 +242,23 @@ def self.translate key = nil, retries: nil, timeout: nil # languages[0].name #=> "Afrikaans" # ``` # - # ## Configuring Backoff + # ## Configuring retries and timeout # - # The {Gcloud::Backoff} class allows users to globally configure how Cloud API - # requests are automatically retried in the case of some errors, such as a - # `500` or `503` status code, or a specific internal error code such as - # `rateLimitExceeded`. + # You can configure how many times API requests may be automatically retried. + # When an API request fails, the response will be inspected to see if the + # request meets criteria indicating that it may succeed on retry, such as + # `500` and `503` status codes or a specific internal error code such as + # `rateLimitExceeded`. If it meets the criteria, the request will be retried + # after a delay. If another error occurs, the delay will be increased before a + # subsequent attempt, until the `retries` limit is reached. # - # If an API call fails, the response will be inspected to see if the call - # should be retried. If the response matches the criteria, then the request - # will be retried after a delay. If another error occurs, the delay will be - # increased incrementally before a subsequent attempt. The first retry will be - # delayed one second, the second retry two seconds, and so on. + # You can also set the request `timeout` value in seconds. # # ```ruby # require "gcloud" - # require "gcloud/backoff" # - # Gcloud::Backoff.retries = 5 # Raise the maximum number of retries from 3 + # gcloud = Gcloud.new + # translate = gcloud.translate retries: 10, timeout: 120 # ``` # module Translate diff --git a/lib/gcloud/vision.rb b/lib/gcloud/vision.rb index 7957d73ddb2b..68fa68d0eade 100644 --- a/lib/gcloud/vision.rb +++ b/lib/gcloud/vision.rb @@ -249,24 +249,23 @@ def self.vision project = nil, keyfile = nil, scope: nil, retries: nil, # annotation = vision.annotate image, faces: 5 # ``` # - # ## Configuring Backoff + # ## Configuring retries and timeout # - # The {Gcloud::Backoff} class allows users to globally configure how Cloud API - # requests are automatically retried in the case of some errors, such as a - # `500` or `503` status code, or a specific internal error code such as - # `rateLimitExceeded`. + # You can configure how many times API requests may be automatically retried. + # When an API request fails, the response will be inspected to see if the + # request meets criteria indicating that it may succeed on retry, such as + # `500` and `503` status codes or a specific internal error code such as + # `rateLimitExceeded`. If it meets the criteria, the request will be retried + # after a delay. If another error occurs, the delay will be increased before a + # subsequent attempt, until the `retries` limit is reached. # - # If an API call fails, the response will be inspected to see if the call - # should be retried. If the response matches the criteria, then the request - # will be retried after a delay. If another error occurs, the delay will be - # increased incrementally before a subsequent attempt. The first retry will be - # delayed one second, the second retry two seconds, and so on. + # You can also set the request `timeout` value in seconds. # # ```ruby # require "gcloud" - # require "gcloud/backoff" # - # Gcloud::Backoff.retries = 5 # Raise the maximum number of retries from 3 + # gcloud = Gcloud.new + # vision = gcloud.vision retries: 10, timeout: 120 # ``` # module Vision