From 7078ad5a50d4ef24f5693d4f140ad9a11bae0c17 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 29 Mar 2022 00:06:29 +0000 Subject: [PATCH] chore(python): use black==22.3.0 (#87) Source-Link: https://github.com/googleapis/synthtool/commit/6fab84af09f2cf89a031fd8671d1def6b2931b11 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe --- .../.github/.OwlBot.lock.yaml | 2 +- .../google-cloud-dataflow-client/docs/conf.py | 5 +- .../flex_templates_service/async_client.py | 7 +- .../services/flex_templates_service/client.py | 46 +- .../flex_templates_service/transports/base.py | 6 +- .../flex_templates_service/transports/grpc.py | 3 +- .../services/jobs_v1_beta3/async_client.py | 59 +- .../services/jobs_v1_beta3/client.py | 100 ++- .../services/jobs_v1_beta3/transports/base.py | 30 +- .../services/jobs_v1_beta3/transports/grpc.py | 3 +- .../messages_v1_beta3/async_client.py | 12 +- .../services/messages_v1_beta3/client.py | 53 +- .../messages_v1_beta3/transports/base.py | 10 +- .../messages_v1_beta3/transports/grpc.py | 3 +- .../services/metrics_v1_beta3/async_client.py | 31 +- .../services/metrics_v1_beta3/client.py | 72 +- .../metrics_v1_beta3/transports/base.py | 10 +- .../metrics_v1_beta3/transports/grpc.py | 3 +- .../snapshots_v1_beta3/async_client.py | 21 +- .../services/snapshots_v1_beta3/client.py | 62 +- .../snapshots_v1_beta3/transports/base.py | 18 +- .../snapshots_v1_beta3/transports/grpc.py | 3 +- .../templates_service/async_client.py | 21 +- .../services/templates_service/client.py | 62 +- .../templates_service/transports/base.py | 14 +- .../templates_service/transports/grpc.py | 3 +- .../dataflow_v1beta3/types/environment.py | 388 +++++++++-- .../cloud/dataflow_v1beta3/types/jobs.py | 659 ++++++++++++++---- .../cloud/dataflow_v1beta3/types/messages.py | 142 +++- .../cloud/dataflow_v1beta3/types/metrics.py | 310 ++++++-- .../cloud/dataflow_v1beta3/types/snapshots.py | 120 +++- .../cloud/dataflow_v1beta3/types/streaming.py | 211 ++++-- .../cloud/dataflow_v1beta3/types/templates.py | 516 +++++++++++--- .../google-cloud-dataflow-client/noxfile.py | 9 +- .../test_flex_templates_service.py | 80 ++- .../dataflow_v1beta3/test_jobs_v1_beta3.py | 462 +++++++++--- .../test_messages_v1_beta3.py | 160 ++++- .../dataflow_v1beta3/test_metrics_v1_beta3.py | 282 ++++++-- .../test_snapshots_v1_beta3.py | 120 +++- .../test_templates_service.py | 118 +++- 40 files changed, 3344 insertions(+), 892 deletions(-) diff --git a/packages/google-cloud-dataflow-client/.github/.OwlBot.lock.yaml b/packages/google-cloud-dataflow-client/.github/.OwlBot.lock.yaml index 44c78f7cc12d..87dd00611576 100644 --- a/packages/google-cloud-dataflow-client/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-dataflow-client/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 + digest: sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe diff --git a/packages/google-cloud-dataflow-client/docs/conf.py b/packages/google-cloud-dataflow-client/docs/conf.py index bb4d36359905..62673d85b477 100644 --- a/packages/google-cloud-dataflow-client/docs/conf.py +++ b/packages/google-cloud-dataflow-client/docs/conf.py @@ -361,7 +361,10 @@ intersphinx_mapping = { "python": ("https://python.readthedocs.org/en/latest/", None), "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py index 321402679ddf..bb72c3ca5a3c 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py @@ -259,7 +259,12 @@ def sample_launch_flex_template(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py index 83d5cea6f17b..df9a8b99c5a3 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py @@ -56,7 +56,8 @@ class FlexTemplatesServiceClientMeta(type): _transport_registry["grpc_asyncio"] = FlexTemplatesServiceGrpcAsyncIOTransport def get_transport_class( - cls, label: str = None, + cls, + label: str = None, ) -> Type[FlexTemplatesServiceTransport]: """Returns an appropriate transport class. @@ -164,7 +165,9 @@ def transport(self) -> FlexTemplatesServiceTransport: return self._transport @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -177,9 +180,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -188,9 +195,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -199,9 +210,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -210,10 +225,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -444,7 +463,12 @@ def sample_launch_flex_template(): rpc = self._transport._wrapped_methods[self._transport.launch_flex_template] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/base.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/base.py index e2524877f181..f10c31f55153 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/base.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/base.py @@ -135,9 +135,9 @@ def _prep_wrapped_messages(self, client_info): def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc.py index 9c598261ed1e..c8958257f7a7 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc.py @@ -225,8 +225,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py index 0720a6a222be..eb7423bdc8fe 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py @@ -266,7 +266,12 @@ def sample_create_job(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -334,7 +339,12 @@ def sample_get_job(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -403,7 +413,12 @@ def sample_update_job(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -483,12 +498,20 @@ def sample_list_jobs(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListJobsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -560,12 +583,20 @@ def sample_aggregated_list_jobs(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.AggregatedListJobsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -627,7 +658,12 @@ def sample_check_active_jobs(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -686,7 +722,12 @@ def sample_snapshot_job(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py index af5658b4bd22..6684bbabdfb5 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py @@ -57,7 +57,10 @@ class JobsV1Beta3ClientMeta(type): _transport_registry["grpc"] = JobsV1Beta3GrpcTransport _transport_registry["grpc_asyncio"] = JobsV1Beta3GrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[JobsV1Beta3Transport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[JobsV1Beta3Transport]: """Returns an appropriate transport class. Args: @@ -165,7 +168,9 @@ def transport(self) -> JobsV1Beta3Transport: return self._transport @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -178,9 +183,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -189,9 +198,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -200,9 +213,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -211,10 +228,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -452,7 +473,12 @@ def sample_create_job(): rpc = self._transport._wrapped_methods[self._transport.create_job] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -521,7 +547,12 @@ def sample_get_job(): rpc = self._transport._wrapped_methods[self._transport.get_job] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -591,7 +622,12 @@ def sample_update_job(): rpc = self._transport._wrapped_methods[self._transport.update_job] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -672,12 +708,20 @@ def sample_list_jobs(): rpc = self._transport._wrapped_methods[self._transport.list_jobs] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListJobsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -750,12 +794,20 @@ def sample_aggregated_list_jobs(): rpc = self._transport._wrapped_methods[self._transport.aggregated_list_jobs] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.AggregatedListJobsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -818,7 +870,12 @@ def sample_check_active_jobs(): rpc = self._transport._wrapped_methods[self._transport.check_active_jobs] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -878,7 +935,12 @@ def sample_snapshot_job(): rpc = self._transport._wrapped_methods[self._transport.snapshot_job] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/base.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/base.py index c7f555ef75a4..65fcf8f5f7cd 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/base.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/base.py @@ -127,16 +127,24 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.create_job: gapic_v1.method.wrap_method( - self.create_job, default_timeout=None, client_info=client_info, + self.create_job, + default_timeout=None, + client_info=client_info, ), self.get_job: gapic_v1.method.wrap_method( - self.get_job, default_timeout=None, client_info=client_info, + self.get_job, + default_timeout=None, + client_info=client_info, ), self.update_job: gapic_v1.method.wrap_method( - self.update_job, default_timeout=None, client_info=client_info, + self.update_job, + default_timeout=None, + client_info=client_info, ), self.list_jobs: gapic_v1.method.wrap_method( - self.list_jobs, default_timeout=None, client_info=client_info, + self.list_jobs, + default_timeout=None, + client_info=client_info, ), self.aggregated_list_jobs: gapic_v1.method.wrap_method( self.aggregated_list_jobs, @@ -144,19 +152,23 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), self.check_active_jobs: gapic_v1.method.wrap_method( - self.check_active_jobs, default_timeout=None, client_info=client_info, + self.check_active_jobs, + default_timeout=None, + client_info=client_info, ), self.snapshot_job: gapic_v1.method.wrap_method( - self.snapshot_job, default_timeout=None, client_info=client_info, + self.snapshot_job, + default_timeout=None, + client_info=client_info, ), } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc.py index 0cae85dfd5ec..1b86f41ee277 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc.py @@ -227,8 +227,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py index 41b399ff34b3..eeb60c5a43fe 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py @@ -274,12 +274,20 @@ def sample_list_job_messages(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListJobMessagesAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py index 47b60237eb6f..4f86a0878acc 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py @@ -55,7 +55,10 @@ class MessagesV1Beta3ClientMeta(type): _transport_registry["grpc"] = MessagesV1Beta3GrpcTransport _transport_registry["grpc_asyncio"] = MessagesV1Beta3GrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[MessagesV1Beta3Transport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[MessagesV1Beta3Transport]: """Returns an appropriate transport class. Args: @@ -162,7 +165,9 @@ def transport(self) -> MessagesV1Beta3Transport: return self._transport @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -175,9 +180,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -186,9 +195,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -197,9 +210,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -208,10 +225,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -458,12 +479,20 @@ def sample_list_job_messages(): rpc = self._transport._wrapped_methods[self._transport.list_job_messages] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListJobMessagesPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/base.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/base.py index 23c97d77fd29..1d462612b6a3 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/base.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/base.py @@ -126,16 +126,18 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.list_job_messages: gapic_v1.method.wrap_method( - self.list_job_messages, default_timeout=None, client_info=client_info, + self.list_job_messages, + default_timeout=None, + client_info=client_info, ), } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc.py index 748256edde21..0ca2bc701a97 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc.py @@ -225,8 +225,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py index 50b6d5c085b0..778ce22df034 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py @@ -276,7 +276,12 @@ def sample_get_job_metrics(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -345,12 +350,20 @@ def sample_get_job_execution_details(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.GetJobExecutionDetailsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -422,12 +435,20 @@ def sample_get_stage_execution_details(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.GetStageExecutionDetailsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py index 4c101742d9f2..a57112f4a516 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py @@ -56,7 +56,10 @@ class MetricsV1Beta3ClientMeta(type): _transport_registry["grpc"] = MetricsV1Beta3GrpcTransport _transport_registry["grpc_asyncio"] = MetricsV1Beta3GrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[MetricsV1Beta3Transport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[MetricsV1Beta3Transport]: """Returns an appropriate transport class. Args: @@ -163,7 +166,9 @@ def transport(self) -> MetricsV1Beta3Transport: return self._transport @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -176,9 +181,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -187,9 +196,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -198,9 +211,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -209,10 +226,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -460,7 +481,12 @@ def sample_get_job_metrics(): rpc = self._transport._wrapped_methods[self._transport.get_job_metrics] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -532,12 +558,20 @@ def sample_get_job_execution_details(): ] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.GetJobExecutionDetailsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -612,12 +646,20 @@ def sample_get_stage_execution_details(): ] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.GetStageExecutionDetailsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/base.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/base.py index ce1b39b41056..0e3b707d39c7 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/base.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/base.py @@ -126,7 +126,9 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.get_job_metrics: gapic_v1.method.wrap_method( - self.get_job_metrics, default_timeout=None, client_info=client_info, + self.get_job_metrics, + default_timeout=None, + client_info=client_info, ), self.get_job_execution_details: gapic_v1.method.wrap_method( self.get_job_execution_details, @@ -143,9 +145,9 @@ def _prep_wrapped_messages(self, client_info): def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc.py index f92f6faf612f..72154ab99b3d 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc.py @@ -225,8 +225,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py index b7623be25027..4d2e389f90ab 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py @@ -257,7 +257,12 @@ def sample_get_snapshot(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -315,7 +320,12 @@ def sample_delete_snapshot(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -373,7 +383,12 @@ def sample_list_snapshots(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py index edf271a172ea..fffbf68b6898 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py @@ -56,7 +56,10 @@ class SnapshotsV1Beta3ClientMeta(type): _transport_registry["grpc"] = SnapshotsV1Beta3GrpcTransport _transport_registry["grpc_asyncio"] = SnapshotsV1Beta3GrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[SnapshotsV1Beta3Transport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[SnapshotsV1Beta3Transport]: """Returns an appropriate transport class. Args: @@ -163,7 +166,9 @@ def transport(self) -> SnapshotsV1Beta3Transport: return self._transport @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -176,9 +181,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -187,9 +196,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -198,9 +211,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -209,10 +226,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -441,7 +462,12 @@ def sample_get_snapshot(): rpc = self._transport._wrapped_methods[self._transport.get_snapshot] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -500,7 +526,12 @@ def sample_delete_snapshot(): rpc = self._transport._wrapped_methods[self._transport.delete_snapshot] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -559,7 +590,12 @@ def sample_list_snapshots(): rpc = self._transport._wrapped_methods[self._transport.list_snapshots] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/base.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/base.py index 9e03199dc828..de0aeae1ef2d 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/base.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/base.py @@ -126,22 +126,28 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.get_snapshot: gapic_v1.method.wrap_method( - self.get_snapshot, default_timeout=None, client_info=client_info, + self.get_snapshot, + default_timeout=None, + client_info=client_info, ), self.delete_snapshot: gapic_v1.method.wrap_method( - self.delete_snapshot, default_timeout=None, client_info=client_info, + self.delete_snapshot, + default_timeout=None, + client_info=client_info, ), self.list_snapshots: gapic_v1.method.wrap_method( - self.list_snapshots, default_timeout=None, client_info=client_info, + self.list_snapshots, + default_timeout=None, + client_info=client_info, ), } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc.py index 144fbb34d9ea..13a5cd81609b 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc.py @@ -225,8 +225,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py index 3ce20697ec86..5cfd6ebbd5cd 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py @@ -262,7 +262,12 @@ def sample_create_job_from_template(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -323,7 +328,12 @@ def sample_launch_template(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -385,7 +395,12 @@ def sample_get_template(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py index 41ea1e231832..ceb10c512459 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py @@ -58,7 +58,10 @@ class TemplatesServiceClientMeta(type): _transport_registry["grpc"] = TemplatesServiceGrpcTransport _transport_registry["grpc_asyncio"] = TemplatesServiceGrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[TemplatesServiceTransport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[TemplatesServiceTransport]: """Returns an appropriate transport class. Args: @@ -165,7 +168,9 @@ def transport(self) -> TemplatesServiceTransport: return self._transport @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -178,9 +183,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -189,9 +198,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -200,9 +213,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -211,10 +228,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -446,7 +467,12 @@ def sample_create_job_from_template(): rpc = self._transport._wrapped_methods[self._transport.create_job_from_template] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -508,7 +534,12 @@ def sample_launch_template(): rpc = self._transport._wrapped_methods[self._transport.launch_template] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -571,7 +602,12 @@ def sample_get_template(): rpc = self._transport._wrapped_methods[self._transport.get_template] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/base.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/base.py index 986f5872905f..d125dee57b21 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/base.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/base.py @@ -132,19 +132,23 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), self.launch_template: gapic_v1.method.wrap_method( - self.launch_template, default_timeout=None, client_info=client_info, + self.launch_template, + default_timeout=None, + client_info=client_info, ), self.get_template: gapic_v1.method.wrap_method( - self.get_template, default_timeout=None, client_info=client_info, + self.get_template, + default_timeout=None, + client_info=client_info, ), } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc.py index 09f8b896bcbe..ca247daa9035 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc.py @@ -226,8 +226,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/environment.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/environment.py index 280e1ff22174..a9e119248262 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/environment.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/environment.py @@ -213,27 +213,82 @@ class Environment(proto.Message): job. """ - temp_storage_prefix = proto.Field(proto.STRING, number=1,) - cluster_manager_api_service = proto.Field(proto.STRING, number=2,) - experiments = proto.RepeatedField(proto.STRING, number=3,) - service_options = proto.RepeatedField(proto.STRING, number=16,) - service_kms_key_name = proto.Field(proto.STRING, number=12,) - worker_pools = proto.RepeatedField(proto.MESSAGE, number=4, message="WorkerPool",) - user_agent = proto.Field(proto.MESSAGE, number=5, message=struct_pb2.Struct,) - version = proto.Field(proto.MESSAGE, number=6, message=struct_pb2.Struct,) - dataset = proto.Field(proto.STRING, number=7,) + temp_storage_prefix = proto.Field( + proto.STRING, + number=1, + ) + cluster_manager_api_service = proto.Field( + proto.STRING, + number=2, + ) + experiments = proto.RepeatedField( + proto.STRING, + number=3, + ) + service_options = proto.RepeatedField( + proto.STRING, + number=16, + ) + service_kms_key_name = proto.Field( + proto.STRING, + number=12, + ) + worker_pools = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="WorkerPool", + ) + user_agent = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Struct, + ) + version = proto.Field( + proto.MESSAGE, + number=6, + message=struct_pb2.Struct, + ) + dataset = proto.Field( + proto.STRING, + number=7, + ) sdk_pipeline_options = proto.Field( - proto.MESSAGE, number=8, message=struct_pb2.Struct, + proto.MESSAGE, + number=8, + message=struct_pb2.Struct, + ) + internal_experiments = proto.Field( + proto.MESSAGE, + number=9, + message=any_pb2.Any, + ) + service_account_email = proto.Field( + proto.STRING, + number=10, ) - internal_experiments = proto.Field(proto.MESSAGE, number=9, message=any_pb2.Any,) - service_account_email = proto.Field(proto.STRING, number=10,) flex_resource_scheduling_goal = proto.Field( - proto.ENUM, number=11, enum="FlexResourceSchedulingGoal", + proto.ENUM, + number=11, + enum="FlexResourceSchedulingGoal", + ) + worker_region = proto.Field( + proto.STRING, + number=13, + ) + worker_zone = proto.Field( + proto.STRING, + number=14, + ) + shuffle_mode = proto.Field( + proto.ENUM, + number=15, + enum="ShuffleMode", + ) + debug_options = proto.Field( + proto.MESSAGE, + number=17, + message="DebugOptions", ) - worker_region = proto.Field(proto.STRING, number=13,) - worker_zone = proto.Field(proto.STRING, number=14,) - shuffle_mode = proto.Field(proto.ENUM, number=15, enum="ShuffleMode",) - debug_options = proto.Field(proto.MESSAGE, number=17, message="DebugOptions",) class Package(proto.Message): @@ -259,8 +314,14 @@ class Package(proto.Message): bucket.storage.googleapis.com/ """ - name = proto.Field(proto.STRING, number=1,) - location = proto.Field(proto.STRING, number=2,) + name = proto.Field( + proto.STRING, + number=1, + ) + location = proto.Field( + proto.STRING, + number=2, + ) class Disk(proto.Message): @@ -299,9 +360,18 @@ class Disk(proto.Message): Directory in a VM where disk is mounted. """ - size_gb = proto.Field(proto.INT32, number=1,) - disk_type = proto.Field(proto.STRING, number=2,) - mount_point = proto.Field(proto.STRING, number=3,) + size_gb = proto.Field( + proto.INT32, + number=1, + ) + disk_type = proto.Field( + proto.STRING, + number=2, + ) + mount_point = proto.Field( + proto.STRING, + number=3, + ) class WorkerSettings(proto.Message): @@ -343,12 +413,30 @@ class WorkerSettings(proto.Message): bucket.storage.googleapis.com/{object} """ - base_url = proto.Field(proto.STRING, number=1,) - reporting_enabled = proto.Field(proto.BOOL, number=2,) - service_path = proto.Field(proto.STRING, number=3,) - shuffle_service_path = proto.Field(proto.STRING, number=4,) - worker_id = proto.Field(proto.STRING, number=5,) - temp_storage_prefix = proto.Field(proto.STRING, number=6,) + base_url = proto.Field( + proto.STRING, + number=1, + ) + reporting_enabled = proto.Field( + proto.BOOL, + number=2, + ) + service_path = proto.Field( + proto.STRING, + number=3, + ) + shuffle_service_path = proto.Field( + proto.STRING, + number=4, + ) + worker_id = proto.Field( + proto.STRING, + number=5, + ) + temp_storage_prefix = proto.Field( + proto.STRING, + number=6, + ) class TaskRunnerSettings(proto.Message): @@ -428,27 +516,83 @@ class TaskRunnerSettings(proto.Message): The streaming worker main class name. """ - task_user = proto.Field(proto.STRING, number=1,) - task_group = proto.Field(proto.STRING, number=2,) - oauth_scopes = proto.RepeatedField(proto.STRING, number=3,) - base_url = proto.Field(proto.STRING, number=4,) - dataflow_api_version = proto.Field(proto.STRING, number=5,) + task_user = proto.Field( + proto.STRING, + number=1, + ) + task_group = proto.Field( + proto.STRING, + number=2, + ) + oauth_scopes = proto.RepeatedField( + proto.STRING, + number=3, + ) + base_url = proto.Field( + proto.STRING, + number=4, + ) + dataflow_api_version = proto.Field( + proto.STRING, + number=5, + ) parallel_worker_settings = proto.Field( - proto.MESSAGE, number=6, message="WorkerSettings", - ) - base_task_dir = proto.Field(proto.STRING, number=7,) - continue_on_exception = proto.Field(proto.BOOL, number=8,) - log_to_serialconsole = proto.Field(proto.BOOL, number=9,) - alsologtostderr = proto.Field(proto.BOOL, number=10,) - log_upload_location = proto.Field(proto.STRING, number=11,) - log_dir = proto.Field(proto.STRING, number=12,) - temp_storage_prefix = proto.Field(proto.STRING, number=13,) - harness_command = proto.Field(proto.STRING, number=14,) - workflow_file_name = proto.Field(proto.STRING, number=15,) - commandlines_file_name = proto.Field(proto.STRING, number=16,) - vm_id = proto.Field(proto.STRING, number=17,) - language_hint = proto.Field(proto.STRING, number=18,) - streaming_worker_main_class = proto.Field(proto.STRING, number=19,) + proto.MESSAGE, + number=6, + message="WorkerSettings", + ) + base_task_dir = proto.Field( + proto.STRING, + number=7, + ) + continue_on_exception = proto.Field( + proto.BOOL, + number=8, + ) + log_to_serialconsole = proto.Field( + proto.BOOL, + number=9, + ) + alsologtostderr = proto.Field( + proto.BOOL, + number=10, + ) + log_upload_location = proto.Field( + proto.STRING, + number=11, + ) + log_dir = proto.Field( + proto.STRING, + number=12, + ) + temp_storage_prefix = proto.Field( + proto.STRING, + number=13, + ) + harness_command = proto.Field( + proto.STRING, + number=14, + ) + workflow_file_name = proto.Field( + proto.STRING, + number=15, + ) + commandlines_file_name = proto.Field( + proto.STRING, + number=16, + ) + vm_id = proto.Field( + proto.STRING, + number=17, + ) + language_hint = proto.Field( + proto.STRING, + number=18, + ) + streaming_worker_main_class = proto.Field( + proto.STRING, + number=19, + ) class AutoscalingSettings(proto.Message): @@ -462,8 +606,15 @@ class AutoscalingSettings(proto.Message): at. """ - algorithm = proto.Field(proto.ENUM, number=1, enum="AutoscalingAlgorithm",) - max_num_workers = proto.Field(proto.INT32, number=2,) + algorithm = proto.Field( + proto.ENUM, + number=1, + enum="AutoscalingAlgorithm", + ) + max_num_workers = proto.Field( + proto.INT32, + number=2, + ) class SdkHarnessContainerImage(proto.Message): @@ -492,10 +643,22 @@ class SdkHarnessContainerImage(proto.Message): https://github.com/apache/beam/blob/master/model/pipeline/src/main/proto/beam_runner_api.proto """ - container_image = proto.Field(proto.STRING, number=1,) - use_single_core_per_container = proto.Field(proto.BOOL, number=2,) - environment_id = proto.Field(proto.STRING, number=3,) - capabilities = proto.RepeatedField(proto.STRING, number=4,) + container_image = proto.Field( + proto.STRING, + number=1, + ) + use_single_core_per_container = proto.Field( + proto.BOOL, + number=2, + ) + environment_id = proto.Field( + proto.STRING, + number=3, + ) + capabilities = proto.RepeatedField( + proto.STRING, + number=4, + ) class WorkerPool(proto.Message): @@ -607,35 +770,103 @@ class WorkerPool(proto.Message): entries. """ - kind = proto.Field(proto.STRING, number=1,) - num_workers = proto.Field(proto.INT32, number=2,) - packages = proto.RepeatedField(proto.MESSAGE, number=3, message="Package",) - default_package_set = proto.Field(proto.ENUM, number=4, enum="DefaultPackageSet",) - machine_type = proto.Field(proto.STRING, number=5,) - teardown_policy = proto.Field(proto.ENUM, number=6, enum="TeardownPolicy",) - disk_size_gb = proto.Field(proto.INT32, number=7,) - disk_type = proto.Field(proto.STRING, number=16,) - disk_source_image = proto.Field(proto.STRING, number=8,) - zone = proto.Field(proto.STRING, number=9,) + kind = proto.Field( + proto.STRING, + number=1, + ) + num_workers = proto.Field( + proto.INT32, + number=2, + ) + packages = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="Package", + ) + default_package_set = proto.Field( + proto.ENUM, + number=4, + enum="DefaultPackageSet", + ) + machine_type = proto.Field( + proto.STRING, + number=5, + ) + teardown_policy = proto.Field( + proto.ENUM, + number=6, + enum="TeardownPolicy", + ) + disk_size_gb = proto.Field( + proto.INT32, + number=7, + ) + disk_type = proto.Field( + proto.STRING, + number=16, + ) + disk_source_image = proto.Field( + proto.STRING, + number=8, + ) + zone = proto.Field( + proto.STRING, + number=9, + ) taskrunner_settings = proto.Field( - proto.MESSAGE, number=10, message="TaskRunnerSettings", + proto.MESSAGE, + number=10, + message="TaskRunnerSettings", + ) + on_host_maintenance = proto.Field( + proto.STRING, + number=11, + ) + data_disks = proto.RepeatedField( + proto.MESSAGE, + number=12, + message="Disk", + ) + metadata = proto.MapField( + proto.STRING, + proto.STRING, + number=13, ) - on_host_maintenance = proto.Field(proto.STRING, number=11,) - data_disks = proto.RepeatedField(proto.MESSAGE, number=12, message="Disk",) - metadata = proto.MapField(proto.STRING, proto.STRING, number=13,) autoscaling_settings = proto.Field( - proto.MESSAGE, number=14, message="AutoscalingSettings", + proto.MESSAGE, + number=14, + message="AutoscalingSettings", + ) + pool_args = proto.Field( + proto.MESSAGE, + number=15, + message=any_pb2.Any, + ) + network = proto.Field( + proto.STRING, + number=17, + ) + subnetwork = proto.Field( + proto.STRING, + number=19, + ) + worker_harness_container_image = proto.Field( + proto.STRING, + number=18, + ) + num_threads_per_worker = proto.Field( + proto.INT32, + number=20, ) - pool_args = proto.Field(proto.MESSAGE, number=15, message=any_pb2.Any,) - network = proto.Field(proto.STRING, number=17,) - subnetwork = proto.Field(proto.STRING, number=19,) - worker_harness_container_image = proto.Field(proto.STRING, number=18,) - num_threads_per_worker = proto.Field(proto.INT32, number=20,) ip_configuration = proto.Field( - proto.ENUM, number=21, enum="WorkerIPAddressConfiguration", + proto.ENUM, + number=21, + enum="WorkerIPAddressConfiguration", ) sdk_harness_container_images = proto.RepeatedField( - proto.MESSAGE, number=22, message="SdkHarnessContainerImage", + proto.MESSAGE, + number=22, + message="SdkHarnessContainerImage", ) @@ -649,7 +880,10 @@ class DebugOptions(proto.Message): hot key to the user's Cloud Logging. """ - enable_hot_key_logging = proto.Field(proto.BOOL, number=1,) + enable_hot_key_logging = proto.Field( + proto.BOOL, + number=1, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/jobs.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/jobs.py index 68cbece1f457..7ff6818176db 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/jobs.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/jobs.py @@ -252,41 +252,120 @@ class Job(proto.Message): if it is set in any requests. """ - id = proto.Field(proto.STRING, number=1,) - project_id = proto.Field(proto.STRING, number=2,) - name = proto.Field(proto.STRING, number=3,) - type_ = proto.Field(proto.ENUM, number=4, enum=gd_environment.JobType,) + id = proto.Field( + proto.STRING, + number=1, + ) + project_id = proto.Field( + proto.STRING, + number=2, + ) + name = proto.Field( + proto.STRING, + number=3, + ) + type_ = proto.Field( + proto.ENUM, + number=4, + enum=gd_environment.JobType, + ) environment = proto.Field( - proto.MESSAGE, number=5, message=gd_environment.Environment, + proto.MESSAGE, + number=5, + message=gd_environment.Environment, + ) + steps = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="Step", + ) + steps_location = proto.Field( + proto.STRING, + number=24, + ) + current_state = proto.Field( + proto.ENUM, + number=7, + enum="JobState", ) - steps = proto.RepeatedField(proto.MESSAGE, number=6, message="Step",) - steps_location = proto.Field(proto.STRING, number=24,) - current_state = proto.Field(proto.ENUM, number=7, enum="JobState",) current_state_time = proto.Field( - proto.MESSAGE, number=8, message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + requested_state = proto.Field( + proto.ENUM, + number=9, + enum="JobState", + ) + execution_info = proto.Field( + proto.MESSAGE, + number=10, + message="JobExecutionInfo", ) - requested_state = proto.Field(proto.ENUM, number=9, enum="JobState",) - execution_info = proto.Field(proto.MESSAGE, number=10, message="JobExecutionInfo",) create_time = proto.Field( - proto.MESSAGE, number=11, message=timestamp_pb2.Timestamp, - ) - replace_job_id = proto.Field(proto.STRING, number=12,) - transform_name_mapping = proto.MapField(proto.STRING, proto.STRING, number=13,) - client_request_id = proto.Field(proto.STRING, number=14,) - replaced_by_job_id = proto.Field(proto.STRING, number=15,) - temp_files = proto.RepeatedField(proto.STRING, number=16,) - labels = proto.MapField(proto.STRING, proto.STRING, number=17,) - location = proto.Field(proto.STRING, number=18,) + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + replace_job_id = proto.Field( + proto.STRING, + number=12, + ) + transform_name_mapping = proto.MapField( + proto.STRING, + proto.STRING, + number=13, + ) + client_request_id = proto.Field( + proto.STRING, + number=14, + ) + replaced_by_job_id = proto.Field( + proto.STRING, + number=15, + ) + temp_files = proto.RepeatedField( + proto.STRING, + number=16, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=17, + ) + location = proto.Field( + proto.STRING, + number=18, + ) pipeline_description = proto.Field( - proto.MESSAGE, number=19, message="PipelineDescription", + proto.MESSAGE, + number=19, + message="PipelineDescription", ) stage_states = proto.RepeatedField( - proto.MESSAGE, number=20, message="ExecutionStageState", + proto.MESSAGE, + number=20, + message="ExecutionStageState", + ) + job_metadata = proto.Field( + proto.MESSAGE, + number=21, + message="JobMetadata", + ) + start_time = proto.Field( + proto.MESSAGE, + number=22, + message=timestamp_pb2.Timestamp, + ) + created_from_snapshot_id = proto.Field( + proto.STRING, + number=23, + ) + satisfies_pzs = proto.Field( + proto.BOOL, + number=25, ) - job_metadata = proto.Field(proto.MESSAGE, number=21, message="JobMetadata",) - start_time = proto.Field(proto.MESSAGE, number=22, message=timestamp_pb2.Timestamp,) - created_from_snapshot_id = proto.Field(proto.STRING, number=23,) - satisfies_pzs = proto.Field(proto.BOOL, number=25,) class DatastoreIODetails(proto.Message): @@ -299,8 +378,14 @@ class DatastoreIODetails(proto.Message): ProjectId accessed in the connection. """ - namespace = proto.Field(proto.STRING, number=1,) - project_id = proto.Field(proto.STRING, number=2,) + namespace = proto.Field( + proto.STRING, + number=1, + ) + project_id = proto.Field( + proto.STRING, + number=2, + ) class PubSubIODetails(proto.Message): @@ -313,8 +398,14 @@ class PubSubIODetails(proto.Message): Subscription used in the connection. """ - topic = proto.Field(proto.STRING, number=1,) - subscription = proto.Field(proto.STRING, number=2,) + topic = proto.Field( + proto.STRING, + number=1, + ) + subscription = proto.Field( + proto.STRING, + number=2, + ) class FileIODetails(proto.Message): @@ -326,7 +417,10 @@ class FileIODetails(proto.Message): connector. """ - file_pattern = proto.Field(proto.STRING, number=1,) + file_pattern = proto.Field( + proto.STRING, + number=1, + ) class BigTableIODetails(proto.Message): @@ -341,9 +435,18 @@ class BigTableIODetails(proto.Message): TableId accessed in the connection. """ - project_id = proto.Field(proto.STRING, number=1,) - instance_id = proto.Field(proto.STRING, number=2,) - table_id = proto.Field(proto.STRING, number=3,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + instance_id = proto.Field( + proto.STRING, + number=2, + ) + table_id = proto.Field( + proto.STRING, + number=3, + ) class BigQueryIODetails(proto.Message): @@ -360,10 +463,22 @@ class BigQueryIODetails(proto.Message): Query used to access data in the connection. """ - table = proto.Field(proto.STRING, number=1,) - dataset = proto.Field(proto.STRING, number=2,) - project_id = proto.Field(proto.STRING, number=3,) - query = proto.Field(proto.STRING, number=4,) + table = proto.Field( + proto.STRING, + number=1, + ) + dataset = proto.Field( + proto.STRING, + number=2, + ) + project_id = proto.Field( + proto.STRING, + number=3, + ) + query = proto.Field( + proto.STRING, + number=4, + ) class SpannerIODetails(proto.Message): @@ -378,9 +493,18 @@ class SpannerIODetails(proto.Message): DatabaseId accessed in the connection. """ - project_id = proto.Field(proto.STRING, number=1,) - instance_id = proto.Field(proto.STRING, number=2,) - database_id = proto.Field(proto.STRING, number=3,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + instance_id = proto.Field( + proto.STRING, + number=2, + ) + database_id = proto.Field( + proto.STRING, + number=3, + ) class SdkVersion(proto.Message): @@ -404,9 +528,19 @@ class SdkSupportStatus(proto.Enum): DEPRECATED = 3 UNSUPPORTED = 4 - version = proto.Field(proto.STRING, number=1,) - version_display_name = proto.Field(proto.STRING, number=2,) - sdk_support_status = proto.Field(proto.ENUM, number=3, enum=SdkSupportStatus,) + version = proto.Field( + proto.STRING, + number=1, + ) + version_display_name = proto.Field( + proto.STRING, + number=2, + ) + sdk_support_status = proto.Field( + proto.ENUM, + number=3, + enum=SdkSupportStatus, + ) class JobMetadata(proto.Message): @@ -436,24 +570,40 @@ class JobMetadata(proto.Message): the Dataflow job. """ - sdk_version = proto.Field(proto.MESSAGE, number=1, message="SdkVersion",) + sdk_version = proto.Field( + proto.MESSAGE, + number=1, + message="SdkVersion", + ) spanner_details = proto.RepeatedField( - proto.MESSAGE, number=2, message="SpannerIODetails", + proto.MESSAGE, + number=2, + message="SpannerIODetails", ) bigquery_details = proto.RepeatedField( - proto.MESSAGE, number=3, message="BigQueryIODetails", + proto.MESSAGE, + number=3, + message="BigQueryIODetails", ) big_table_details = proto.RepeatedField( - proto.MESSAGE, number=4, message="BigTableIODetails", + proto.MESSAGE, + number=4, + message="BigTableIODetails", ) pubsub_details = proto.RepeatedField( - proto.MESSAGE, number=5, message="PubSubIODetails", + proto.MESSAGE, + number=5, + message="PubSubIODetails", ) file_details = proto.RepeatedField( - proto.MESSAGE, number=6, message="FileIODetails", + proto.MESSAGE, + number=6, + message="FileIODetails", ) datastore_details = proto.RepeatedField( - proto.MESSAGE, number=7, message="DatastoreIODetails", + proto.MESSAGE, + number=7, + message="DatastoreIODetails", ) @@ -472,10 +622,19 @@ class ExecutionStageState(proto.Message): this state. """ - execution_stage_name = proto.Field(proto.STRING, number=1,) - execution_stage_state = proto.Field(proto.ENUM, number=2, enum="JobState",) + execution_stage_name = proto.Field( + proto.STRING, + number=1, + ) + execution_stage_state = proto.Field( + proto.ENUM, + number=2, + enum="JobState", + ) current_state_time = proto.Field( - proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, ) @@ -497,12 +656,20 @@ class PipelineDescription(proto.Message): """ original_pipeline_transform = proto.RepeatedField( - proto.MESSAGE, number=1, message="TransformSummary", + proto.MESSAGE, + number=1, + message="TransformSummary", ) execution_pipeline_stage = proto.RepeatedField( - proto.MESSAGE, number=2, message="ExecutionStageSummary", + proto.MESSAGE, + number=2, + message="ExecutionStageSummary", + ) + display_data = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="DisplayData", ) - display_data = proto.RepeatedField(proto.MESSAGE, number=3, message="DisplayData",) class TransformSummary(proto.Message): @@ -527,12 +694,32 @@ class TransformSummary(proto.Message): transform. """ - kind = proto.Field(proto.ENUM, number=1, enum="KindType",) - id = proto.Field(proto.STRING, number=2,) - name = proto.Field(proto.STRING, number=3,) - display_data = proto.RepeatedField(proto.MESSAGE, number=4, message="DisplayData",) - output_collection_name = proto.RepeatedField(proto.STRING, number=5,) - input_collection_name = proto.RepeatedField(proto.STRING, number=6,) + kind = proto.Field( + proto.ENUM, + number=1, + enum="KindType", + ) + id = proto.Field( + proto.STRING, + number=2, + ) + name = proto.Field( + proto.STRING, + number=3, + ) + display_data = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="DisplayData", + ) + output_collection_name = proto.RepeatedField( + proto.STRING, + number=5, + ) + input_collection_name = proto.RepeatedField( + proto.STRING, + number=6, + ) class ExecutionStageSummary(proto.Message): @@ -582,10 +769,22 @@ class StageSource(proto.Message): Size of the source, if measurable. """ - user_name = proto.Field(proto.STRING, number=1,) - name = proto.Field(proto.STRING, number=2,) - original_transform_or_collection = proto.Field(proto.STRING, number=3,) - size_bytes = proto.Field(proto.INT64, number=4,) + user_name = proto.Field( + proto.STRING, + number=1, + ) + name = proto.Field( + proto.STRING, + number=2, + ) + original_transform_or_collection = proto.Field( + proto.STRING, + number=3, + ) + size_bytes = proto.Field( + proto.INT64, + number=4, + ) class ComponentTransform(proto.Message): r"""Description of a transform executed as part of an execution @@ -604,9 +803,18 @@ class ComponentTransform(proto.Message): associated. """ - user_name = proto.Field(proto.STRING, number=1,) - name = proto.Field(proto.STRING, number=2,) - original_transform = proto.Field(proto.STRING, number=3,) + user_name = proto.Field( + proto.STRING, + number=1, + ) + name = proto.Field( + proto.STRING, + number=2, + ) + original_transform = proto.Field( + proto.STRING, + number=3, + ) class ComponentSource(proto.Message): r"""Description of an interstitial value between transforms in an @@ -625,21 +833,55 @@ class ComponentSource(proto.Message): closely associated. """ - user_name = proto.Field(proto.STRING, number=1,) - name = proto.Field(proto.STRING, number=2,) - original_transform_or_collection = proto.Field(proto.STRING, number=3,) - - name = proto.Field(proto.STRING, number=1,) - id = proto.Field(proto.STRING, number=2,) - kind = proto.Field(proto.ENUM, number=3, enum="KindType",) - input_source = proto.RepeatedField(proto.MESSAGE, number=4, message=StageSource,) - output_source = proto.RepeatedField(proto.MESSAGE, number=5, message=StageSource,) - prerequisite_stage = proto.RepeatedField(proto.STRING, number=8,) + user_name = proto.Field( + proto.STRING, + number=1, + ) + name = proto.Field( + proto.STRING, + number=2, + ) + original_transform_or_collection = proto.Field( + proto.STRING, + number=3, + ) + + name = proto.Field( + proto.STRING, + number=1, + ) + id = proto.Field( + proto.STRING, + number=2, + ) + kind = proto.Field( + proto.ENUM, + number=3, + enum="KindType", + ) + input_source = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=StageSource, + ) + output_source = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=StageSource, + ) + prerequisite_stage = proto.RepeatedField( + proto.STRING, + number=8, + ) component_transform = proto.RepeatedField( - proto.MESSAGE, number=6, message=ComponentTransform, + proto.MESSAGE, + number=6, + message=ComponentTransform, ) component_source = proto.RepeatedField( - proto.MESSAGE, number=7, message=ComponentSource, + proto.MESSAGE, + number=7, + message=ComponentSource, ) @@ -713,22 +955,63 @@ class name or programming language namespace the element. """ - key = proto.Field(proto.STRING, number=1,) - namespace = proto.Field(proto.STRING, number=2,) - str_value = proto.Field(proto.STRING, number=4, oneof="Value",) - int64_value = proto.Field(proto.INT64, number=5, oneof="Value",) - float_value = proto.Field(proto.FLOAT, number=6, oneof="Value",) - java_class_value = proto.Field(proto.STRING, number=7, oneof="Value",) + key = proto.Field( + proto.STRING, + number=1, + ) + namespace = proto.Field( + proto.STRING, + number=2, + ) + str_value = proto.Field( + proto.STRING, + number=4, + oneof="Value", + ) + int64_value = proto.Field( + proto.INT64, + number=5, + oneof="Value", + ) + float_value = proto.Field( + proto.FLOAT, + number=6, + oneof="Value", + ) + java_class_value = proto.Field( + proto.STRING, + number=7, + oneof="Value", + ) timestamp_value = proto.Field( - proto.MESSAGE, number=8, oneof="Value", message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=8, + oneof="Value", + message=timestamp_pb2.Timestamp, ) duration_value = proto.Field( - proto.MESSAGE, number=9, oneof="Value", message=duration_pb2.Duration, + proto.MESSAGE, + number=9, + oneof="Value", + message=duration_pb2.Duration, + ) + bool_value = proto.Field( + proto.BOOL, + number=10, + oneof="Value", + ) + short_str_value = proto.Field( + proto.STRING, + number=11, + ) + url = proto.Field( + proto.STRING, + number=12, + ) + label = proto.Field( + proto.STRING, + number=13, ) - bool_value = proto.Field(proto.BOOL, number=10, oneof="Value",) - short_str_value = proto.Field(proto.STRING, number=11,) - url = proto.Field(proto.STRING, number=12,) - label = proto.Field(proto.STRING, number=13,) class Step(proto.Message): @@ -771,9 +1054,19 @@ class Step(proto.Message): be provided on Create. Only retrieved with JOB_VIEW_ALL. """ - kind = proto.Field(proto.STRING, number=1,) - name = proto.Field(proto.STRING, number=2,) - properties = proto.Field(proto.MESSAGE, number=3, message=struct_pb2.Struct,) + kind = proto.Field( + proto.STRING, + number=1, + ) + name = proto.Field( + proto.STRING, + number=2, + ) + properties = proto.Field( + proto.MESSAGE, + number=3, + message=struct_pb2.Struct, + ) class JobExecutionInfo(proto.Message): @@ -787,7 +1080,10 @@ class JobExecutionInfo(proto.Message): """ stages = proto.MapField( - proto.STRING, proto.MESSAGE, number=1, message="JobExecutionStageInfo", + proto.STRING, + proto.MESSAGE, + number=1, + message="JobExecutionStageInfo", ) @@ -804,7 +1100,10 @@ class JobExecutionStageInfo(proto.Message): one stage. """ - step_name = proto.RepeatedField(proto.STRING, number=1,) + step_name = proto.RepeatedField( + proto.STRING, + number=1, + ) class CreateJobRequest(proto.Message): @@ -828,11 +1127,28 @@ class CreateJobRequest(proto.Message): that contains this job. """ - project_id = proto.Field(proto.STRING, number=1,) - job = proto.Field(proto.MESSAGE, number=2, message="Job",) - view = proto.Field(proto.ENUM, number=3, enum="JobView",) - replace_job_id = proto.Field(proto.STRING, number=4,) - location = proto.Field(proto.STRING, number=5,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + job = proto.Field( + proto.MESSAGE, + number=2, + message="Job", + ) + view = proto.Field( + proto.ENUM, + number=3, + enum="JobView", + ) + replace_job_id = proto.Field( + proto.STRING, + number=4, + ) + location = proto.Field( + proto.STRING, + number=5, + ) class GetJobRequest(proto.Message): @@ -853,10 +1169,23 @@ class GetJobRequest(proto.Message): that contains this job. """ - project_id = proto.Field(proto.STRING, number=1,) - job_id = proto.Field(proto.STRING, number=2,) - view = proto.Field(proto.ENUM, number=3, enum="JobView",) - location = proto.Field(proto.STRING, number=4,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + job_id = proto.Field( + proto.STRING, + number=2, + ) + view = proto.Field( + proto.ENUM, + number=3, + enum="JobView", + ) + location = proto.Field( + proto.STRING, + number=4, + ) class UpdateJobRequest(proto.Message): @@ -878,10 +1207,23 @@ class UpdateJobRequest(proto.Message): that contains this job. """ - project_id = proto.Field(proto.STRING, number=1,) - job_id = proto.Field(proto.STRING, number=2,) - job = proto.Field(proto.MESSAGE, number=3, message="Job",) - location = proto.Field(proto.STRING, number=4,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + job_id = proto.Field( + proto.STRING, + number=2, + ) + job = proto.Field( + proto.MESSAGE, + number=3, + message="Job", + ) + location = proto.Field( + proto.STRING, + number=4, + ) class ListJobsRequest(proto.Message): @@ -918,12 +1260,32 @@ class Filter(proto.Enum): TERMINATED = 2 ACTIVE = 3 - filter = proto.Field(proto.ENUM, number=5, enum=Filter,) - project_id = proto.Field(proto.STRING, number=1,) - view = proto.Field(proto.ENUM, number=2, enum="JobView",) - page_size = proto.Field(proto.INT32, number=3,) - page_token = proto.Field(proto.STRING, number=4,) - location = proto.Field(proto.STRING, number=17,) + filter = proto.Field( + proto.ENUM, + number=5, + enum=Filter, + ) + project_id = proto.Field( + proto.STRING, + number=1, + ) + view = proto.Field( + proto.ENUM, + number=2, + enum="JobView", + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + page_token = proto.Field( + proto.STRING, + number=4, + ) + location = proto.Field( + proto.STRING, + number=17, + ) class FailedLocation(proto.Message): @@ -938,7 +1300,10 @@ class FailedLocation(proto.Message): that failed to respond. """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class ListJobsResponse(proto.Message): @@ -964,10 +1329,19 @@ class ListJobsResponse(proto.Message): def raw_page(self): return self - jobs = proto.RepeatedField(proto.MESSAGE, number=1, message="Job",) - next_page_token = proto.Field(proto.STRING, number=2,) + jobs = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Job", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) failed_location = proto.RepeatedField( - proto.MESSAGE, number=3, message="FailedLocation", + proto.MESSAGE, + number=3, + message="FailedLocation", ) @@ -992,12 +1366,31 @@ class SnapshotJobRequest(proto.Message): Maybe empty. """ - project_id = proto.Field(proto.STRING, number=1,) - job_id = proto.Field(proto.STRING, number=2,) - ttl = proto.Field(proto.MESSAGE, number=3, message=duration_pb2.Duration,) - location = proto.Field(proto.STRING, number=4,) - snapshot_sources = proto.Field(proto.BOOL, number=5,) - description = proto.Field(proto.STRING, number=6,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + job_id = proto.Field( + proto.STRING, + number=2, + ) + ttl = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + location = proto.Field( + proto.STRING, + number=4, + ) + snapshot_sources = proto.Field( + proto.BOOL, + number=5, + ) + description = proto.Field( + proto.STRING, + number=6, + ) class CheckActiveJobsRequest(proto.Message): @@ -1008,7 +1401,10 @@ class CheckActiveJobsRequest(proto.Message): The project which owns the jobs. """ - project_id = proto.Field(proto.STRING, number=1,) + project_id = proto.Field( + proto.STRING, + number=1, + ) class CheckActiveJobsResponse(proto.Message): @@ -1020,7 +1416,10 @@ class CheckActiveJobsResponse(proto.Message): False otherwise. """ - active_jobs_exist = proto.Field(proto.BOOL, number=1,) + active_jobs_exist = proto.Field( + proto.BOOL, + number=1, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/messages.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/messages.py index 379c0d8fbdeb..533082c043c1 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/messages.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/messages.py @@ -56,10 +56,24 @@ class JobMessage(proto.Message): Importance level of the message. """ - id = proto.Field(proto.STRING, number=1,) - time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) - message_text = proto.Field(proto.STRING, number=3,) - message_importance = proto.Field(proto.ENUM, number=4, enum="JobMessageImportance",) + id = proto.Field( + proto.STRING, + number=1, + ) + time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + message_text = proto.Field( + proto.STRING, + number=3, + ) + message_importance = proto.Field( + proto.ENUM, + number=4, + enum="JobMessageImportance", + ) class StructuredMessage(proto.Message): @@ -89,12 +103,29 @@ class Parameter(proto.Message): Value for this parameter. """ - key = proto.Field(proto.STRING, number=1,) - value = proto.Field(proto.MESSAGE, number=2, message=struct_pb2.Value,) - - message_text = proto.Field(proto.STRING, number=1,) - message_key = proto.Field(proto.STRING, number=2,) - parameters = proto.RepeatedField(proto.MESSAGE, number=3, message=Parameter,) + key = proto.Field( + proto.STRING, + number=1, + ) + value = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Value, + ) + + message_text = proto.Field( + proto.STRING, + number=1, + ) + message_key = proto.Field( + proto.STRING, + number=2, + ) + parameters = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=Parameter, + ) class AutoscalingEvent(proto.Message): @@ -130,12 +161,33 @@ class AutoscalingEventType(proto.Enum): ACTUATION_FAILURE = 3 NO_CHANGE = 4 - current_num_workers = proto.Field(proto.INT64, number=1,) - target_num_workers = proto.Field(proto.INT64, number=2,) - event_type = proto.Field(proto.ENUM, number=3, enum=AutoscalingEventType,) - description = proto.Field(proto.MESSAGE, number=4, message="StructuredMessage",) - time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) - worker_pool = proto.Field(proto.STRING, number=7,) + current_num_workers = proto.Field( + proto.INT64, + number=1, + ) + target_num_workers = proto.Field( + proto.INT64, + number=2, + ) + event_type = proto.Field( + proto.ENUM, + number=3, + enum=AutoscalingEventType, + ) + description = proto.Field( + proto.MESSAGE, + number=4, + message="StructuredMessage", + ) + time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + worker_pool = proto.Field( + proto.STRING, + number=7, + ) class ListJobMessagesRequest(proto.Message): @@ -175,14 +227,41 @@ class ListJobMessagesRequest(proto.Message): that contains the job specified by job_id. """ - project_id = proto.Field(proto.STRING, number=1,) - job_id = proto.Field(proto.STRING, number=2,) - minimum_importance = proto.Field(proto.ENUM, number=3, enum="JobMessageImportance",) - page_size = proto.Field(proto.INT32, number=4,) - page_token = proto.Field(proto.STRING, number=5,) - start_time = proto.Field(proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp,) - end_time = proto.Field(proto.MESSAGE, number=7, message=timestamp_pb2.Timestamp,) - location = proto.Field(proto.STRING, number=8,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + job_id = proto.Field( + proto.STRING, + number=2, + ) + minimum_importance = proto.Field( + proto.ENUM, + number=3, + enum="JobMessageImportance", + ) + page_size = proto.Field( + proto.INT32, + number=4, + ) + page_token = proto.Field( + proto.STRING, + number=5, + ) + start_time = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + location = proto.Field( + proto.STRING, + number=8, + ) class ListJobMessagesResponse(proto.Message): @@ -203,10 +282,19 @@ class ListJobMessagesResponse(proto.Message): def raw_page(self): return self - job_messages = proto.RepeatedField(proto.MESSAGE, number=1, message="JobMessage",) - next_page_token = proto.Field(proto.STRING, number=2,) + job_messages = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="JobMessage", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) autoscaling_events = proto.RepeatedField( - proto.MESSAGE, number=3, message="AutoscalingEvent", + proto.MESSAGE, + number=3, + message="AutoscalingEvent", ) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/metrics.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/metrics.py index ac8ce9d70ffb..206bb059e04d 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/metrics.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/metrics.py @@ -71,9 +71,19 @@ class MetricStructuredName(proto.Message): PCollections in the SDK will have context['pcollection'] = . """ - origin = proto.Field(proto.STRING, number=1,) - name = proto.Field(proto.STRING, number=2,) - context = proto.MapField(proto.STRING, proto.STRING, number=3,) + origin = proto.Field( + proto.STRING, + number=1, + ) + name = proto.Field( + proto.STRING, + number=2, + ) + context = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) class MetricUpdate(proto.Message): @@ -138,17 +148,59 @@ class MetricUpdate(proto.Message): the metrics API. """ - name = proto.Field(proto.MESSAGE, number=1, message="MetricStructuredName",) - kind = proto.Field(proto.STRING, number=2,) - cumulative = proto.Field(proto.BOOL, number=3,) - scalar = proto.Field(proto.MESSAGE, number=4, message=struct_pb2.Value,) - mean_sum = proto.Field(proto.MESSAGE, number=5, message=struct_pb2.Value,) - mean_count = proto.Field(proto.MESSAGE, number=6, message=struct_pb2.Value,) - set_ = proto.Field(proto.MESSAGE, number=7, message=struct_pb2.Value,) - distribution = proto.Field(proto.MESSAGE, number=11, message=struct_pb2.Value,) - gauge = proto.Field(proto.MESSAGE, number=12, message=struct_pb2.Value,) - internal = proto.Field(proto.MESSAGE, number=8, message=struct_pb2.Value,) - update_time = proto.Field(proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp,) + name = proto.Field( + proto.MESSAGE, + number=1, + message="MetricStructuredName", + ) + kind = proto.Field( + proto.STRING, + number=2, + ) + cumulative = proto.Field( + proto.BOOL, + number=3, + ) + scalar = proto.Field( + proto.MESSAGE, + number=4, + message=struct_pb2.Value, + ) + mean_sum = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Value, + ) + mean_count = proto.Field( + proto.MESSAGE, + number=6, + message=struct_pb2.Value, + ) + set_ = proto.Field( + proto.MESSAGE, + number=7, + message=struct_pb2.Value, + ) + distribution = proto.Field( + proto.MESSAGE, + number=11, + message=struct_pb2.Value, + ) + gauge = proto.Field( + proto.MESSAGE, + number=12, + message=struct_pb2.Value, + ) + internal = proto.Field( + proto.MESSAGE, + number=8, + message=struct_pb2.Value, + ) + update_time = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) class GetJobMetricsRequest(proto.Message): @@ -169,10 +221,23 @@ class GetJobMetricsRequest(proto.Message): that contains the job specified by job_id. """ - project_id = proto.Field(proto.STRING, number=1,) - job_id = proto.Field(proto.STRING, number=2,) - start_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) - location = proto.Field(proto.STRING, number=4,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + job_id = proto.Field( + proto.STRING, + number=2, + ) + start_time = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + location = proto.Field( + proto.STRING, + number=4, + ) class JobMetrics(proto.Message): @@ -192,8 +257,16 @@ class JobMetrics(proto.Message): All metrics for this job. """ - metric_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) - metrics = proto.RepeatedField(proto.MESSAGE, number=2, message="MetricUpdate",) + metric_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + metrics = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="MetricUpdate", + ) class GetJobExecutionDetailsRequest(proto.Message): @@ -220,11 +293,26 @@ class GetJobExecutionDetailsRequest(proto.Message): of results to be returned. """ - project_id = proto.Field(proto.STRING, number=1,) - job_id = proto.Field(proto.STRING, number=2,) - location = proto.Field(proto.STRING, number=3,) - page_size = proto.Field(proto.INT32, number=4,) - page_token = proto.Field(proto.STRING, number=5,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + job_id = proto.Field( + proto.STRING, + number=2, + ) + location = proto.Field( + proto.STRING, + number=3, + ) + page_size = proto.Field( + proto.INT32, + number=4, + ) + page_token = proto.Field( + proto.STRING, + number=5, + ) class ProgressTimeseries(proto.Message): @@ -249,11 +337,25 @@ class Point(proto.Message): The value of the point. """ - time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) - value = proto.Field(proto.DOUBLE, number=2,) - - current_progress = proto.Field(proto.DOUBLE, number=1,) - data_points = proto.RepeatedField(proto.MESSAGE, number=2, message=Point,) + time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + value = proto.Field( + proto.DOUBLE, + number=2, + ) + + current_progress = proto.Field( + proto.DOUBLE, + number=1, + ) + data_points = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Point, + ) class StageSummary(proto.Message): @@ -278,12 +380,35 @@ class StageSummary(proto.Message): Metrics for this stage. """ - stage_id = proto.Field(proto.STRING, number=1,) - state = proto.Field(proto.ENUM, number=2, enum="ExecutionState",) - start_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) - end_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) - progress = proto.Field(proto.MESSAGE, number=5, message="ProgressTimeseries",) - metrics = proto.RepeatedField(proto.MESSAGE, number=6, message="MetricUpdate",) + stage_id = proto.Field( + proto.STRING, + number=1, + ) + state = proto.Field( + proto.ENUM, + number=2, + enum="ExecutionState", + ) + start_time = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + progress = proto.Field( + proto.MESSAGE, + number=5, + message="ProgressTimeseries", + ) + metrics = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="MetricUpdate", + ) class JobExecutionDetails(proto.Message): @@ -302,8 +427,15 @@ class JobExecutionDetails(proto.Message): def raw_page(self): return self - stages = proto.RepeatedField(proto.MESSAGE, number=1, message="StageSummary",) - next_page_token = proto.Field(proto.STRING, number=2,) + stages = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="StageSummary", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class GetStageExecutionDetailsRequest(proto.Message): @@ -339,14 +471,40 @@ class GetStageExecutionDetailsRequest(proto.Message): start time. """ - project_id = proto.Field(proto.STRING, number=1,) - job_id = proto.Field(proto.STRING, number=2,) - location = proto.Field(proto.STRING, number=3,) - stage_id = proto.Field(proto.STRING, number=4,) - page_size = proto.Field(proto.INT32, number=5,) - page_token = proto.Field(proto.STRING, number=6,) - start_time = proto.Field(proto.MESSAGE, number=7, message=timestamp_pb2.Timestamp,) - end_time = proto.Field(proto.MESSAGE, number=8, message=timestamp_pb2.Timestamp,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + job_id = proto.Field( + proto.STRING, + number=2, + ) + location = proto.Field( + proto.STRING, + number=3, + ) + stage_id = proto.Field( + proto.STRING, + number=4, + ) + page_size = proto.Field( + proto.INT32, + number=5, + ) + page_token = proto.Field( + proto.STRING, + number=6, + ) + start_time = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) class WorkItemDetails(proto.Message): @@ -372,13 +530,39 @@ class WorkItemDetails(proto.Message): Metrics for this work item. """ - task_id = proto.Field(proto.STRING, number=1,) - attempt_id = proto.Field(proto.STRING, number=2,) - start_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) - end_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) - state = proto.Field(proto.ENUM, number=5, enum="ExecutionState",) - progress = proto.Field(proto.MESSAGE, number=6, message="ProgressTimeseries",) - metrics = proto.RepeatedField(proto.MESSAGE, number=7, message="MetricUpdate",) + task_id = proto.Field( + proto.STRING, + number=1, + ) + attempt_id = proto.Field( + proto.STRING, + number=2, + ) + start_time = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + state = proto.Field( + proto.ENUM, + number=5, + enum="ExecutionState", + ) + progress = proto.Field( + proto.MESSAGE, + number=6, + message="ProgressTimeseries", + ) + metrics = proto.RepeatedField( + proto.MESSAGE, + number=7, + message="MetricUpdate", + ) class WorkerDetails(proto.Message): @@ -392,9 +576,14 @@ class WorkerDetails(proto.Message): by time. """ - worker_name = proto.Field(proto.STRING, number=1,) + worker_name = proto.Field( + proto.STRING, + number=1, + ) work_items = proto.RepeatedField( - proto.MESSAGE, number=2, message="WorkItemDetails", + proto.MESSAGE, + number=2, + message="WorkItemDetails", ) @@ -414,8 +603,15 @@ class StageExecutionDetails(proto.Message): def raw_page(self): return self - workers = proto.RepeatedField(proto.MESSAGE, number=1, message="WorkerDetails",) - next_page_token = proto.Field(proto.STRING, number=2,) + workers = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="WorkerDetails", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/snapshots.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/snapshots.py index 5a4979047cfd..8b20e8ad4b09 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/snapshots.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/snapshots.py @@ -56,9 +56,19 @@ class PubsubSnapshotMetadata(proto.Message): The expire time of the Pubsub snapshot. """ - topic_name = proto.Field(proto.STRING, number=1,) - snapshot_name = proto.Field(proto.STRING, number=2,) - expire_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) + topic_name = proto.Field( + proto.STRING, + number=1, + ) + snapshot_name = proto.Field( + proto.STRING, + number=2, + ) + expire_time = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) class Snapshot(proto.Message): @@ -91,20 +101,50 @@ class Snapshot(proto.Message): e.g., "us-central1". """ - id = proto.Field(proto.STRING, number=1,) - project_id = proto.Field(proto.STRING, number=2,) - source_job_id = proto.Field(proto.STRING, number=3,) + id = proto.Field( + proto.STRING, + number=1, + ) + project_id = proto.Field( + proto.STRING, + number=2, + ) + source_job_id = proto.Field( + proto.STRING, + number=3, + ) creation_time = proto.Field( - proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + ttl = proto.Field( + proto.MESSAGE, + number=5, + message=duration_pb2.Duration, + ) + state = proto.Field( + proto.ENUM, + number=6, + enum="SnapshotState", ) - ttl = proto.Field(proto.MESSAGE, number=5, message=duration_pb2.Duration,) - state = proto.Field(proto.ENUM, number=6, enum="SnapshotState",) pubsub_metadata = proto.RepeatedField( - proto.MESSAGE, number=7, message="PubsubSnapshotMetadata", + proto.MESSAGE, + number=7, + message="PubsubSnapshotMetadata", + ) + description = proto.Field( + proto.STRING, + number=8, + ) + disk_size_bytes = proto.Field( + proto.INT64, + number=9, + ) + region = proto.Field( + proto.STRING, + number=10, ) - description = proto.Field(proto.STRING, number=8,) - disk_size_bytes = proto.Field(proto.INT64, number=9,) - region = proto.Field(proto.STRING, number=10,) class GetSnapshotRequest(proto.Message): @@ -120,9 +160,18 @@ class GetSnapshotRequest(proto.Message): The location that contains this snapshot. """ - project_id = proto.Field(proto.STRING, number=1,) - snapshot_id = proto.Field(proto.STRING, number=2,) - location = proto.Field(proto.STRING, number=3,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + snapshot_id = proto.Field( + proto.STRING, + number=2, + ) + location = proto.Field( + proto.STRING, + number=3, + ) class DeleteSnapshotRequest(proto.Message): @@ -138,14 +187,22 @@ class DeleteSnapshotRequest(proto.Message): The location that contains this snapshot. """ - project_id = proto.Field(proto.STRING, number=1,) - snapshot_id = proto.Field(proto.STRING, number=2,) - location = proto.Field(proto.STRING, number=3,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + snapshot_id = proto.Field( + proto.STRING, + number=2, + ) + location = proto.Field( + proto.STRING, + number=3, + ) class DeleteSnapshotResponse(proto.Message): - r"""Response from deleting a snapshot. - """ + r"""Response from deleting a snapshot.""" class ListSnapshotsRequest(proto.Message): @@ -161,9 +218,18 @@ class ListSnapshotsRequest(proto.Message): The location to list snapshots in. """ - project_id = proto.Field(proto.STRING, number=1,) - job_id = proto.Field(proto.STRING, number=3,) - location = proto.Field(proto.STRING, number=2,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + job_id = proto.Field( + proto.STRING, + number=3, + ) + location = proto.Field( + proto.STRING, + number=2, + ) class ListSnapshotsResponse(proto.Message): @@ -174,7 +240,11 @@ class ListSnapshotsResponse(proto.Message): Returned snapshots. """ - snapshots = proto.RepeatedField(proto.MESSAGE, number=1, message="Snapshot",) + snapshots = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Snapshot", + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/streaming.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/streaming.py index bcc28120d408..e067637bbd20 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/streaming.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/streaming.py @@ -59,16 +59,28 @@ class TopologyConfig(proto.Message): """ computations = proto.RepeatedField( - proto.MESSAGE, number=1, message="ComputationTopology", + proto.MESSAGE, + number=1, + message="ComputationTopology", ) data_disk_assignments = proto.RepeatedField( - proto.MESSAGE, number=2, message="DataDiskAssignment", + proto.MESSAGE, + number=2, + message="DataDiskAssignment", ) user_stage_to_computation_name_map = proto.MapField( - proto.STRING, proto.STRING, number=3, + proto.STRING, + proto.STRING, + number=3, + ) + forwarding_key_bits = proto.Field( + proto.INT32, + number=4, + ) + persistent_state_version = proto.Field( + proto.INT32, + number=5, ) - forwarding_key_bits = proto.Field(proto.INT32, number=4,) - persistent_state_version = proto.Field(proto.INT32, number=5,) class PubsubLocation(proto.Message): @@ -102,13 +114,34 @@ class PubsubLocation(proto.Message): pubsub attributes. """ - topic = proto.Field(proto.STRING, number=1,) - subscription = proto.Field(proto.STRING, number=2,) - timestamp_label = proto.Field(proto.STRING, number=3,) - id_label = proto.Field(proto.STRING, number=4,) - drop_late_data = proto.Field(proto.BOOL, number=5,) - tracking_subscription = proto.Field(proto.STRING, number=6,) - with_attributes = proto.Field(proto.BOOL, number=7,) + topic = proto.Field( + proto.STRING, + number=1, + ) + subscription = proto.Field( + proto.STRING, + number=2, + ) + timestamp_label = proto.Field( + proto.STRING, + number=3, + ) + id_label = proto.Field( + proto.STRING, + number=4, + ) + drop_late_data = proto.Field( + proto.BOOL, + number=5, + ) + tracking_subscription = proto.Field( + proto.STRING, + number=6, + ) + with_attributes = proto.Field( + proto.BOOL, + number=7, + ) class StreamingStageLocation(proto.Message): @@ -121,7 +154,10 @@ class StreamingStageLocation(proto.Message): streaming Dataflow job. """ - stream_id = proto.Field(proto.STRING, number=1,) + stream_id = proto.Field( + proto.STRING, + number=1, + ) class StreamingSideInputLocation(proto.Message): @@ -136,8 +172,14 @@ class StreamingSideInputLocation(proto.Message): input is stored. """ - tag = proto.Field(proto.STRING, number=1,) - state_family = proto.Field(proto.STRING, number=2,) + tag = proto.Field( + proto.STRING, + number=1, + ) + state_family = proto.Field( + proto.STRING, + number=2, + ) class CustomSourceLocation(proto.Message): @@ -148,7 +190,10 @@ class CustomSourceLocation(proto.Message): Whether this source is stateful. """ - stateful = proto.Field(proto.BOOL, number=1,) + stateful = proto.Field( + proto.BOOL, + number=1, + ) class StreamLocation(proto.Message): @@ -183,16 +228,28 @@ class StreamLocation(proto.Message): """ streaming_stage_location = proto.Field( - proto.MESSAGE, number=1, oneof="location", message="StreamingStageLocation", + proto.MESSAGE, + number=1, + oneof="location", + message="StreamingStageLocation", ) pubsub_location = proto.Field( - proto.MESSAGE, number=2, oneof="location", message="PubsubLocation", + proto.MESSAGE, + number=2, + oneof="location", + message="PubsubLocation", ) side_input_location = proto.Field( - proto.MESSAGE, number=3, oneof="location", message="StreamingSideInputLocation", + proto.MESSAGE, + number=3, + oneof="location", + message="StreamingSideInputLocation", ) custom_source_location = proto.Field( - proto.MESSAGE, number=4, oneof="location", message="CustomSourceLocation", + proto.MESSAGE, + number=4, + oneof="location", + message="CustomSourceLocation", ) @@ -207,8 +264,14 @@ class StateFamilyConfig(proto.Message): operation. """ - state_family = proto.Field(proto.STRING, number=1,) - is_read = proto.Field(proto.BOOL, number=2,) + state_family = proto.Field( + proto.STRING, + number=1, + ) + is_read = proto.Field( + proto.BOOL, + number=2, + ) class ComputationTopology(proto.Message): @@ -229,15 +292,33 @@ class ComputationTopology(proto.Message): The state family values. """ - system_stage_name = proto.Field(proto.STRING, number=1,) - computation_id = proto.Field(proto.STRING, number=5,) + system_stage_name = proto.Field( + proto.STRING, + number=1, + ) + computation_id = proto.Field( + proto.STRING, + number=5, + ) key_ranges = proto.RepeatedField( - proto.MESSAGE, number=2, message="KeyRangeLocation", + proto.MESSAGE, + number=2, + message="KeyRangeLocation", + ) + inputs = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="StreamLocation", + ) + outputs = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="StreamLocation", ) - inputs = proto.RepeatedField(proto.MESSAGE, number=3, message="StreamLocation",) - outputs = proto.RepeatedField(proto.MESSAGE, number=4, message="StreamLocation",) state_families = proto.RepeatedField( - proto.MESSAGE, number=7, message="StateFamilyConfig", + proto.MESSAGE, + number=7, + message="StateFamilyConfig", ) @@ -268,11 +349,26 @@ class KeyRangeLocation(proto.Message): in the worker local filesystem. """ - start = proto.Field(proto.STRING, number=1,) - end = proto.Field(proto.STRING, number=2,) - delivery_endpoint = proto.Field(proto.STRING, number=3,) - data_disk = proto.Field(proto.STRING, number=5,) - deprecated_persistent_directory = proto.Field(proto.STRING, number=4,) + start = proto.Field( + proto.STRING, + number=1, + ) + end = proto.Field( + proto.STRING, + number=2, + ) + delivery_endpoint = proto.Field( + proto.STRING, + number=3, + ) + data_disk = proto.Field( + proto.STRING, + number=5, + ) + deprecated_persistent_directory = proto.Field( + proto.STRING, + number=4, + ) class MountedDataDisk(proto.Message): @@ -287,7 +383,10 @@ class MountedDataDisk(proto.Message): "myproject-1014-104817-4c2-harness-0-disk-1". """ - data_disk = proto.Field(proto.STRING, number=1,) + data_disk = proto.Field( + proto.STRING, + number=1, + ) class DataDiskAssignment(proto.Message): @@ -308,8 +407,14 @@ class DataDiskAssignment(proto.Message): }. """ - vm_instance = proto.Field(proto.STRING, number=1,) - data_disks = proto.RepeatedField(proto.STRING, number=2,) + vm_instance = proto.Field( + proto.STRING, + number=1, + ) + data_disks = proto.RepeatedField( + proto.STRING, + number=2, + ) class KeyRangeDataDiskAssignment(proto.Message): @@ -332,9 +437,18 @@ class KeyRangeDataDiskAssignment(proto.Message): "myproject-1014-104817-4c2-harness-0-disk-1". """ - start = proto.Field(proto.STRING, number=1,) - end = proto.Field(proto.STRING, number=2,) - data_disk = proto.Field(proto.STRING, number=3,) + start = proto.Field( + proto.STRING, + number=1, + ) + end = proto.Field( + proto.STRING, + number=2, + ) + data_disk = proto.Field( + proto.STRING, + number=3, + ) class StreamingComputationRanges(proto.Message): @@ -349,9 +463,14 @@ class StreamingComputationRanges(proto.Message): computation. """ - computation_id = proto.Field(proto.STRING, number=1,) + computation_id = proto.Field( + proto.STRING, + number=1, + ) range_assignments = proto.RepeatedField( - proto.MESSAGE, number=2, message="KeyRangeDataDiskAssignment", + proto.MESSAGE, + number=2, + message="KeyRangeDataDiskAssignment", ) @@ -367,8 +486,14 @@ class StreamingApplianceSnapshotConfig(proto.Message): appliance state. """ - snapshot_id = proto.Field(proto.STRING, number=1,) - import_state_endpoint = proto.Field(proto.STRING, number=2,) + snapshot_id = proto.Field( + proto.STRING, + number=1, + ) + import_state_endpoint = proto.Field( + proto.STRING, + number=2, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/templates.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/templates.py index 5bd9298bc6b1..7fd84332c7f2 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/templates.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/templates.py @@ -72,7 +72,11 @@ class LaunchFlexTemplateResponse(proto.Message): launched. """ - job = proto.Field(proto.MESSAGE, number=1, message=jobs.Job,) + job = proto.Field( + proto.MESSAGE, + number=1, + message=jobs.Job, + ) class ContainerSpec(proto.Message): @@ -91,11 +95,24 @@ class ContainerSpec(proto.Message): Default runtime environment for the job. """ - image = proto.Field(proto.STRING, number=1,) - metadata = proto.Field(proto.MESSAGE, number=2, message="TemplateMetadata",) - sdk_info = proto.Field(proto.MESSAGE, number=3, message="SDKInfo",) + image = proto.Field( + proto.STRING, + number=1, + ) + metadata = proto.Field( + proto.MESSAGE, + number=2, + message="TemplateMetadata", + ) + sdk_info = proto.Field( + proto.MESSAGE, + number=3, + message="SDKInfo", + ) default_environment = proto.Field( - proto.MESSAGE, number=4, message="FlexTemplateRuntimeEnvironment", + proto.MESSAGE, + number=4, + message="FlexTemplateRuntimeEnvironment", ) @@ -142,18 +159,45 @@ class LaunchFlexTemplateParameter(proto.Message): update jobs. Ex:{"oldTransformName":"newTransformName",...}' """ - job_name = proto.Field(proto.STRING, number=1,) + job_name = proto.Field( + proto.STRING, + number=1, + ) container_spec = proto.Field( - proto.MESSAGE, number=4, oneof="template", message="ContainerSpec", + proto.MESSAGE, + number=4, + oneof="template", + message="ContainerSpec", + ) + container_spec_gcs_path = proto.Field( + proto.STRING, + number=5, + oneof="template", + ) + parameters = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + launch_options = proto.MapField( + proto.STRING, + proto.STRING, + number=6, ) - container_spec_gcs_path = proto.Field(proto.STRING, number=5, oneof="template",) - parameters = proto.MapField(proto.STRING, proto.STRING, number=2,) - launch_options = proto.MapField(proto.STRING, proto.STRING, number=6,) environment = proto.Field( - proto.MESSAGE, number=7, message="FlexTemplateRuntimeEnvironment", + proto.MESSAGE, + number=7, + message="FlexTemplateRuntimeEnvironment", + ) + update = proto.Field( + proto.BOOL, + number=8, + ) + transform_name_mappings = proto.MapField( + proto.STRING, + proto.STRING, + number=9, ) - update = proto.Field(proto.BOOL, number=8,) - transform_name_mappings = proto.MapField(proto.STRING, proto.STRING, number=9,) class FlexTemplateRuntimeEnvironment(proto.Message): @@ -259,35 +303,102 @@ class FlexTemplateRuntimeEnvironment(proto.Message): job. The default is n1-standard-1. """ - num_workers = proto.Field(proto.INT32, number=1,) - max_workers = proto.Field(proto.INT32, number=2,) - zone = proto.Field(proto.STRING, number=3,) - service_account_email = proto.Field(proto.STRING, number=4,) - temp_location = proto.Field(proto.STRING, number=5,) - machine_type = proto.Field(proto.STRING, number=6,) - additional_experiments = proto.RepeatedField(proto.STRING, number=7,) - network = proto.Field(proto.STRING, number=8,) - subnetwork = proto.Field(proto.STRING, number=9,) - additional_user_labels = proto.MapField(proto.STRING, proto.STRING, number=10,) - kms_key_name = proto.Field(proto.STRING, number=11,) + num_workers = proto.Field( + proto.INT32, + number=1, + ) + max_workers = proto.Field( + proto.INT32, + number=2, + ) + zone = proto.Field( + proto.STRING, + number=3, + ) + service_account_email = proto.Field( + proto.STRING, + number=4, + ) + temp_location = proto.Field( + proto.STRING, + number=5, + ) + machine_type = proto.Field( + proto.STRING, + number=6, + ) + additional_experiments = proto.RepeatedField( + proto.STRING, + number=7, + ) + network = proto.Field( + proto.STRING, + number=8, + ) + subnetwork = proto.Field( + proto.STRING, + number=9, + ) + additional_user_labels = proto.MapField( + proto.STRING, + proto.STRING, + number=10, + ) + kms_key_name = proto.Field( + proto.STRING, + number=11, + ) ip_configuration = proto.Field( - proto.ENUM, number=12, enum=gd_environment.WorkerIPAddressConfiguration, + proto.ENUM, + number=12, + enum=gd_environment.WorkerIPAddressConfiguration, + ) + worker_region = proto.Field( + proto.STRING, + number=13, + ) + worker_zone = proto.Field( + proto.STRING, + number=14, + ) + enable_streaming_engine = proto.Field( + proto.BOOL, + number=15, ) - worker_region = proto.Field(proto.STRING, number=13,) - worker_zone = proto.Field(proto.STRING, number=14,) - enable_streaming_engine = proto.Field(proto.BOOL, number=15,) flexrs_goal = proto.Field( - proto.ENUM, number=16, enum=gd_environment.FlexResourceSchedulingGoal, + proto.ENUM, + number=16, + enum=gd_environment.FlexResourceSchedulingGoal, + ) + staging_location = proto.Field( + proto.STRING, + number=17, + ) + sdk_container_image = proto.Field( + proto.STRING, + number=18, + ) + disk_size_gb = proto.Field( + proto.INT32, + number=20, ) - staging_location = proto.Field(proto.STRING, number=17,) - sdk_container_image = proto.Field(proto.STRING, number=18,) - disk_size_gb = proto.Field(proto.INT32, number=20,) autoscaling_algorithm = proto.Field( - proto.ENUM, number=21, enum=gd_environment.AutoscalingAlgorithm, + proto.ENUM, + number=21, + enum=gd_environment.AutoscalingAlgorithm, + ) + dump_heap_on_oom = proto.Field( + proto.BOOL, + number=22, + ) + save_heap_dumps_to_gcs_path = proto.Field( + proto.STRING, + number=23, + ) + launcher_machine_type = proto.Field( + proto.STRING, + number=24, ) - dump_heap_on_oom = proto.Field(proto.BOOL, number=22,) - save_heap_dumps_to_gcs_path = proto.Field(proto.STRING, number=23,) - launcher_machine_type = proto.Field(proto.STRING, number=24,) class LaunchFlexTemplateRequest(proto.Message): @@ -309,12 +420,23 @@ class LaunchFlexTemplateRequest(proto.Message): actually executed. Defaults to false. """ - project_id = proto.Field(proto.STRING, number=1,) + project_id = proto.Field( + proto.STRING, + number=1, + ) launch_parameter = proto.Field( - proto.MESSAGE, number=2, message="LaunchFlexTemplateParameter", + proto.MESSAGE, + number=2, + message="LaunchFlexTemplateParameter", + ) + location = proto.Field( + proto.STRING, + number=3, + ) + validate_only = proto.Field( + proto.BOOL, + number=4, ) - location = proto.Field(proto.STRING, number=3,) - validate_only = proto.Field(proto.BOOL, number=4,) class RuntimeEnvironment(proto.Message): @@ -394,24 +516,72 @@ class RuntimeEnvironment(proto.Message): job. """ - num_workers = proto.Field(proto.INT32, number=11,) - max_workers = proto.Field(proto.INT32, number=1,) - zone = proto.Field(proto.STRING, number=2,) - service_account_email = proto.Field(proto.STRING, number=3,) - temp_location = proto.Field(proto.STRING, number=4,) - bypass_temp_dir_validation = proto.Field(proto.BOOL, number=5,) - machine_type = proto.Field(proto.STRING, number=6,) - additional_experiments = proto.RepeatedField(proto.STRING, number=7,) - network = proto.Field(proto.STRING, number=8,) - subnetwork = proto.Field(proto.STRING, number=9,) - additional_user_labels = proto.MapField(proto.STRING, proto.STRING, number=10,) - kms_key_name = proto.Field(proto.STRING, number=12,) + num_workers = proto.Field( + proto.INT32, + number=11, + ) + max_workers = proto.Field( + proto.INT32, + number=1, + ) + zone = proto.Field( + proto.STRING, + number=2, + ) + service_account_email = proto.Field( + proto.STRING, + number=3, + ) + temp_location = proto.Field( + proto.STRING, + number=4, + ) + bypass_temp_dir_validation = proto.Field( + proto.BOOL, + number=5, + ) + machine_type = proto.Field( + proto.STRING, + number=6, + ) + additional_experiments = proto.RepeatedField( + proto.STRING, + number=7, + ) + network = proto.Field( + proto.STRING, + number=8, + ) + subnetwork = proto.Field( + proto.STRING, + number=9, + ) + additional_user_labels = proto.MapField( + proto.STRING, + proto.STRING, + number=10, + ) + kms_key_name = proto.Field( + proto.STRING, + number=12, + ) ip_configuration = proto.Field( - proto.ENUM, number=14, enum=gd_environment.WorkerIPAddressConfiguration, + proto.ENUM, + number=14, + enum=gd_environment.WorkerIPAddressConfiguration, + ) + worker_region = proto.Field( + proto.STRING, + number=15, + ) + worker_zone = proto.Field( + proto.STRING, + number=16, + ) + enable_streaming_engine = proto.Field( + proto.BOOL, + number=17, ) - worker_region = proto.Field(proto.STRING, number=15,) - worker_zone = proto.Field(proto.STRING, number=16,) - enable_streaming_engine = proto.Field(proto.BOOL, number=17,) class ParameterMetadata(proto.Message): @@ -440,13 +610,36 @@ class ParameterMetadata(proto.Message): this parameter. """ - name = proto.Field(proto.STRING, number=1,) - label = proto.Field(proto.STRING, number=2,) - help_text = proto.Field(proto.STRING, number=3,) - is_optional = proto.Field(proto.BOOL, number=4,) - regexes = proto.RepeatedField(proto.STRING, number=5,) - param_type = proto.Field(proto.ENUM, number=6, enum="ParameterType",) - custom_metadata = proto.MapField(proto.STRING, proto.STRING, number=7,) + name = proto.Field( + proto.STRING, + number=1, + ) + label = proto.Field( + proto.STRING, + number=2, + ) + help_text = proto.Field( + proto.STRING, + number=3, + ) + is_optional = proto.Field( + proto.BOOL, + number=4, + ) + regexes = proto.RepeatedField( + proto.STRING, + number=5, + ) + param_type = proto.Field( + proto.ENUM, + number=6, + enum="ParameterType", + ) + custom_metadata = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) class TemplateMetadata(proto.Message): @@ -461,10 +654,18 @@ class TemplateMetadata(proto.Message): The parameters for the template. """ - name = proto.Field(proto.STRING, number=1,) - description = proto.Field(proto.STRING, number=2,) + name = proto.Field( + proto.STRING, + number=1, + ) + description = proto.Field( + proto.STRING, + number=2, + ) parameters = proto.RepeatedField( - proto.MESSAGE, number=3, message="ParameterMetadata", + proto.MESSAGE, + number=3, + message="ParameterMetadata", ) @@ -484,8 +685,15 @@ class Language(proto.Enum): JAVA = 1 PYTHON = 2 - language = proto.Field(proto.ENUM, number=1, enum=Language,) - version = proto.Field(proto.STRING, number=2,) + language = proto.Field( + proto.ENUM, + number=1, + enum=Language, + ) + version = proto.Field( + proto.STRING, + number=2, + ) class RuntimeMetadata(proto.Message): @@ -498,9 +706,15 @@ class RuntimeMetadata(proto.Message): The parameters for the template. """ - sdk_info = proto.Field(proto.MESSAGE, number=1, message="SDKInfo",) + sdk_info = proto.Field( + proto.MESSAGE, + number=1, + message="SDKInfo", + ) parameters = proto.RepeatedField( - proto.MESSAGE, number=2, message="ParameterMetadata", + proto.MESSAGE, + number=2, + message="ParameterMetadata", ) @@ -532,12 +746,33 @@ class CreateJobFromTemplateRequest(proto.Message): to which to direct the request. """ - project_id = proto.Field(proto.STRING, number=1,) - job_name = proto.Field(proto.STRING, number=4,) - gcs_path = proto.Field(proto.STRING, number=2, oneof="template",) - parameters = proto.MapField(proto.STRING, proto.STRING, number=3,) - environment = proto.Field(proto.MESSAGE, number=5, message="RuntimeEnvironment",) - location = proto.Field(proto.STRING, number=6,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + job_name = proto.Field( + proto.STRING, + number=4, + ) + gcs_path = proto.Field( + proto.STRING, + number=2, + oneof="template", + ) + parameters = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + environment = proto.Field( + proto.MESSAGE, + number=5, + message="RuntimeEnvironment", + ) + location = proto.Field( + proto.STRING, + number=6, + ) class GetTemplateRequest(proto.Message): @@ -568,10 +803,24 @@ class TemplateView(proto.Enum): r"""The various views of a template that may be retrieved.""" METADATA_ONLY = 0 - project_id = proto.Field(proto.STRING, number=1,) - gcs_path = proto.Field(proto.STRING, number=2, oneof="template",) - view = proto.Field(proto.ENUM, number=3, enum=TemplateView,) - location = proto.Field(proto.STRING, number=4,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + gcs_path = proto.Field( + proto.STRING, + number=2, + oneof="template", + ) + view = proto.Field( + proto.ENUM, + number=3, + enum=TemplateView, + ) + location = proto.Field( + proto.STRING, + number=4, + ) class GetTemplateResponse(proto.Message): @@ -597,10 +846,26 @@ class TemplateType(proto.Enum): LEGACY = 1 FLEX = 2 - status = proto.Field(proto.MESSAGE, number=1, message=status_pb2.Status,) - metadata = proto.Field(proto.MESSAGE, number=2, message="TemplateMetadata",) - template_type = proto.Field(proto.ENUM, number=3, enum=TemplateType,) - runtime_metadata = proto.Field(proto.MESSAGE, number=4, message="RuntimeMetadata",) + status = proto.Field( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + metadata = proto.Field( + proto.MESSAGE, + number=2, + message="TemplateMetadata", + ) + template_type = proto.Field( + proto.ENUM, + number=3, + enum=TemplateType, + ) + runtime_metadata = proto.Field( + proto.MESSAGE, + number=4, + message="RuntimeMetadata", + ) class LaunchTemplateParameters(proto.Message): @@ -625,11 +890,29 @@ class LaunchTemplateParameters(proto.Message): the new job. """ - job_name = proto.Field(proto.STRING, number=1,) - parameters = proto.MapField(proto.STRING, proto.STRING, number=2,) - environment = proto.Field(proto.MESSAGE, number=3, message="RuntimeEnvironment",) - update = proto.Field(proto.BOOL, number=4,) - transform_name_mapping = proto.MapField(proto.STRING, proto.STRING, number=5,) + job_name = proto.Field( + proto.STRING, + number=1, + ) + parameters = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + environment = proto.Field( + proto.MESSAGE, + number=3, + message="RuntimeEnvironment", + ) + update = proto.Field( + proto.BOOL, + number=4, + ) + transform_name_mapping = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) class LaunchTemplateRequest(proto.Message): @@ -670,9 +953,19 @@ class LaunchTemplateRequest(proto.Message): to which to direct the request. """ - project_id = proto.Field(proto.STRING, number=1,) - validate_only = proto.Field(proto.BOOL, number=2,) - gcs_path = proto.Field(proto.STRING, number=3, oneof="template",) + project_id = proto.Field( + proto.STRING, + number=1, + ) + validate_only = proto.Field( + proto.BOOL, + number=2, + ) + gcs_path = proto.Field( + proto.STRING, + number=3, + oneof="template", + ) dynamic_template = proto.Field( proto.MESSAGE, number=6, @@ -680,9 +973,14 @@ class LaunchTemplateRequest(proto.Message): message="DynamicTemplateLaunchParams", ) launch_parameters = proto.Field( - proto.MESSAGE, number=4, message="LaunchTemplateParameters", + proto.MESSAGE, + number=4, + message="LaunchTemplateParameters", + ) + location = proto.Field( + proto.STRING, + number=5, ) - location = proto.Field(proto.STRING, number=5,) class LaunchTemplateResponse(proto.Message): @@ -695,7 +993,11 @@ class LaunchTemplateResponse(proto.Message): launched. """ - job = proto.Field(proto.MESSAGE, number=1, message=jobs.Job,) + job = proto.Field( + proto.MESSAGE, + number=1, + message=jobs.Job, + ) class InvalidTemplateParameters(proto.Message): @@ -719,11 +1021,19 @@ class ParameterViolation(proto.Message): validate. """ - parameter = proto.Field(proto.STRING, number=1,) - description = proto.Field(proto.STRING, number=2,) + parameter = proto.Field( + proto.STRING, + number=1, + ) + description = proto.Field( + proto.STRING, + number=2, + ) parameter_violations = proto.RepeatedField( - proto.MESSAGE, number=1, message=ParameterViolation, + proto.MESSAGE, + number=1, + message=ParameterViolation, ) @@ -741,8 +1051,14 @@ class DynamicTemplateLaunchParams(proto.Message): Cloud Storage URL, beginning with ``gs://``. """ - gcs_path = proto.Field(proto.STRING, number=1,) - staging_location = proto.Field(proto.STRING, number=2,) + gcs_path = proto.Field( + proto.STRING, + number=1, + ) + staging_location = proto.Field( + proto.STRING, + number=2, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataflow-client/noxfile.py b/packages/google-cloud-dataflow-client/noxfile.py index 2a2001c49998..3addb4ed9431 100644 --- a/packages/google-cloud-dataflow-client/noxfile.py +++ b/packages/google-cloud-dataflow-client/noxfile.py @@ -24,7 +24,7 @@ import nox -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" @@ -57,7 +57,9 @@ def lint(session): """ session.install("flake8", BLACK_VERSION) session.run( - "black", "--check", *BLACK_PATHS, + "black", + "--check", + *BLACK_PATHS, ) session.run("flake8", "google", "tests") @@ -67,7 +69,8 @@ def blacken(session): """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) session.run( - "black", *BLACK_PATHS, + "black", + *BLACK_PATHS, ) diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py index 9e476bcb06d6..0ce68dba72cc 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py @@ -91,7 +91,11 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [FlexTemplatesServiceClient, FlexTemplatesServiceAsyncClient,] + "client_class", + [ + FlexTemplatesServiceClient, + FlexTemplatesServiceAsyncClient, + ], ) def test_flex_templates_service_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() @@ -133,7 +137,11 @@ def test_flex_templates_service_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", [FlexTemplatesServiceClient, FlexTemplatesServiceAsyncClient,] + "client_class", + [ + FlexTemplatesServiceClient, + FlexTemplatesServiceAsyncClient, + ], ) def test_flex_templates_service_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() @@ -515,7 +523,9 @@ def test_flex_templates_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -660,10 +670,17 @@ def test_flex_templates_service_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [templates.LaunchFlexTemplateRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + templates.LaunchFlexTemplateRequest, + dict, + ], +) def test_launch_flex_template(request_type, transport: str = "grpc"): client = FlexTemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -691,7 +708,8 @@ def test_launch_flex_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FlexTemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -709,7 +727,8 @@ async def test_launch_flex_template_async( transport: str = "grpc_asyncio", request_type=templates.LaunchFlexTemplateRequest ): client = FlexTemplatesServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -747,7 +766,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = FlexTemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -768,7 +788,8 @@ def test_credentials_transport_error(): options.api_key = "api_key" with pytest.raises(ValueError): client = FlexTemplatesServiceClient( - client_options=options, transport=transport, + client_options=options, + transport=transport, ) # It is an error to provide an api_key and a credential. @@ -785,7 +806,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = FlexTemplatesServiceClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -833,7 +855,10 @@ def test_transport_grpc_default(): client = FlexTemplatesServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) - assert isinstance(client.transport, transports.FlexTemplatesServiceGrpcTransport,) + assert isinstance( + client.transport, + transports.FlexTemplatesServiceGrpcTransport, + ) def test_flex_templates_service_base_transport_error(): @@ -876,7 +901,8 @@ def test_flex_templates_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.FlexTemplatesServiceTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -1056,7 +1082,8 @@ def test_flex_templates_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.FlexTemplatesServiceGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1068,7 +1095,8 @@ def test_flex_templates_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.FlexTemplatesServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1197,7 +1225,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = FlexTemplatesServiceClient.common_folder_path(folder) assert expected == actual @@ -1215,7 +1245,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = FlexTemplatesServiceClient.common_organization_path(organization) assert expected == actual @@ -1233,7 +1265,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = FlexTemplatesServiceClient.common_project_path(project) assert expected == actual @@ -1253,7 +1287,8 @@ def test_common_location_path(): project = "winkle" location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = FlexTemplatesServiceClient.common_location_path(project, location) assert expected == actual @@ -1278,7 +1313,8 @@ def test_client_with_default_client_info(): transports.FlexTemplatesServiceTransport, "_prep_wrapped_messages" ) as prep: client = FlexTemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1287,7 +1323,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = FlexTemplatesServiceClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1295,7 +1332,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = FlexTemplatesServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py index ec978a6db9db..40fb0948ba4d 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py @@ -87,7 +87,13 @@ def test__get_default_mtls_endpoint(): assert JobsV1Beta3Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi -@pytest.mark.parametrize("client_class", [JobsV1Beta3Client, JobsV1Beta3AsyncClient,]) +@pytest.mark.parametrize( + "client_class", + [ + JobsV1Beta3Client, + JobsV1Beta3AsyncClient, + ], +) def test_jobs_v1_beta3_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( @@ -127,7 +133,13 @@ def test_jobs_v1_beta3_client_service_account_always_use_jwt( use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class", [JobsV1Beta3Client, JobsV1Beta3AsyncClient,]) +@pytest.mark.parametrize( + "client_class", + [ + JobsV1Beta3Client, + JobsV1Beta3AsyncClient, + ], +) def test_jobs_v1_beta3_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( @@ -482,7 +494,9 @@ def test_jobs_v1_beta3_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -615,10 +629,17 @@ def test_jobs_v1_beta3_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [jobs.CreateJobRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + jobs.CreateJobRequest, + dict, + ], +) def test_create_job(request_type, transport: str = "grpc"): client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -673,7 +694,8 @@ def test_create_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -689,7 +711,8 @@ async def test_create_job_async( transport: str = "grpc_asyncio", request_type=jobs.CreateJobRequest ): client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -747,10 +770,17 @@ async def test_create_job_async_from_dict(): await test_create_job_async(request_type=dict) -@pytest.mark.parametrize("request_type", [jobs.GetJobRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + jobs.GetJobRequest, + dict, + ], +) def test_get_job(request_type, transport: str = "grpc"): client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -805,7 +835,8 @@ def test_get_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -821,7 +852,8 @@ async def test_get_job_async( transport: str = "grpc_asyncio", request_type=jobs.GetJobRequest ): client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -879,10 +911,17 @@ async def test_get_job_async_from_dict(): await test_get_job_async(request_type=dict) -@pytest.mark.parametrize("request_type", [jobs.UpdateJobRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + jobs.UpdateJobRequest, + dict, + ], +) def test_update_job(request_type, transport: str = "grpc"): client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -937,7 +976,8 @@ def test_update_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -953,7 +993,8 @@ async def test_update_job_async( transport: str = "grpc_asyncio", request_type=jobs.UpdateJobRequest ): client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1011,10 +1052,17 @@ async def test_update_job_async_from_dict(): await test_update_job_async(request_type=dict) -@pytest.mark.parametrize("request_type", [jobs.ListJobsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + jobs.ListJobsRequest, + dict, + ], +) def test_list_jobs(request_type, transport: str = "grpc"): client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1043,7 +1091,8 @@ def test_list_jobs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1059,7 +1108,8 @@ async def test_list_jobs_async( transport: str = "grpc_asyncio", request_type=jobs.ListJobsRequest ): client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1070,7 +1120,9 @@ async def test_list_jobs_async( with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - jobs.ListJobsResponse(next_page_token="next_page_token_value",) + jobs.ListJobsResponse( + next_page_token="next_page_token_value", + ) ) response = await client.list_jobs(request) @@ -1091,7 +1143,8 @@ async def test_list_jobs_async_from_dict(): def test_list_jobs_pager(transport_name: str = "grpc"): client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1099,11 +1152,29 @@ def test_list_jobs_pager(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( jobs.ListJobsResponse( - jobs=[jobs.Job(), jobs.Job(), jobs.Job(),], next_page_token="abc", + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token="abc", + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token="ghi", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], ), - jobs.ListJobsResponse(jobs=[], next_page_token="def",), - jobs.ListJobsResponse(jobs=[jobs.Job(),], next_page_token="ghi",), - jobs.ListJobsResponse(jobs=[jobs.Job(), jobs.Job(),],), RuntimeError, ) @@ -1119,7 +1190,8 @@ def test_list_jobs_pager(transport_name: str = "grpc"): def test_list_jobs_pages(transport_name: str = "grpc"): client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1127,11 +1199,29 @@ def test_list_jobs_pages(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( jobs.ListJobsResponse( - jobs=[jobs.Job(), jobs.Job(), jobs.Job(),], next_page_token="abc", + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token="abc", + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token="ghi", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], ), - jobs.ListJobsResponse(jobs=[], next_page_token="def",), - jobs.ListJobsResponse(jobs=[jobs.Job(),], next_page_token="ghi",), - jobs.ListJobsResponse(jobs=[jobs.Job(), jobs.Job(),],), RuntimeError, ) pages = list(client.list_jobs(request={}).pages) @@ -1141,7 +1231,9 @@ def test_list_jobs_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_jobs_async_pager(): - client = JobsV1Beta3AsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = JobsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1150,14 +1242,34 @@ async def test_list_jobs_async_pager(): # Set the response to a series of pages. call.side_effect = ( jobs.ListJobsResponse( - jobs=[jobs.Job(), jobs.Job(), jobs.Job(),], next_page_token="abc", + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token="abc", + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token="ghi", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], ), - jobs.ListJobsResponse(jobs=[], next_page_token="def",), - jobs.ListJobsResponse(jobs=[jobs.Job(),], next_page_token="ghi",), - jobs.ListJobsResponse(jobs=[jobs.Job(), jobs.Job(),],), RuntimeError, ) - async_pager = await client.list_jobs(request={},) + async_pager = await client.list_jobs( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1169,7 +1281,9 @@ async def test_list_jobs_async_pager(): @pytest.mark.asyncio async def test_list_jobs_async_pages(): - client = JobsV1Beta3AsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = JobsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1178,11 +1292,29 @@ async def test_list_jobs_async_pages(): # Set the response to a series of pages. call.side_effect = ( jobs.ListJobsResponse( - jobs=[jobs.Job(), jobs.Job(), jobs.Job(),], next_page_token="abc", + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token="abc", + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token="ghi", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], ), - jobs.ListJobsResponse(jobs=[], next_page_token="def",), - jobs.ListJobsResponse(jobs=[jobs.Job(),], next_page_token="ghi",), - jobs.ListJobsResponse(jobs=[jobs.Job(), jobs.Job(),],), RuntimeError, ) pages = [] @@ -1192,10 +1324,17 @@ async def test_list_jobs_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [jobs.ListJobsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + jobs.ListJobsRequest, + dict, + ], +) def test_aggregated_list_jobs(request_type, transport: str = "grpc"): client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1226,7 +1365,8 @@ def test_aggregated_list_jobs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1244,7 +1384,8 @@ async def test_aggregated_list_jobs_async( transport: str = "grpc_asyncio", request_type=jobs.ListJobsRequest ): client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1257,7 +1398,9 @@ async def test_aggregated_list_jobs_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - jobs.ListJobsResponse(next_page_token="next_page_token_value",) + jobs.ListJobsResponse( + next_page_token="next_page_token_value", + ) ) response = await client.aggregated_list_jobs(request) @@ -1278,7 +1421,8 @@ async def test_aggregated_list_jobs_async_from_dict(): def test_aggregated_list_jobs_pager(transport_name: str = "grpc"): client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1288,11 +1432,29 @@ def test_aggregated_list_jobs_pager(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( jobs.ListJobsResponse( - jobs=[jobs.Job(), jobs.Job(), jobs.Job(),], next_page_token="abc", + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token="abc", + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token="ghi", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], ), - jobs.ListJobsResponse(jobs=[], next_page_token="def",), - jobs.ListJobsResponse(jobs=[jobs.Job(),], next_page_token="ghi",), - jobs.ListJobsResponse(jobs=[jobs.Job(), jobs.Job(),],), RuntimeError, ) @@ -1308,7 +1470,8 @@ def test_aggregated_list_jobs_pager(transport_name: str = "grpc"): def test_aggregated_list_jobs_pages(transport_name: str = "grpc"): client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1318,11 +1481,29 @@ def test_aggregated_list_jobs_pages(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( jobs.ListJobsResponse( - jobs=[jobs.Job(), jobs.Job(), jobs.Job(),], next_page_token="abc", + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token="abc", + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token="ghi", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], ), - jobs.ListJobsResponse(jobs=[], next_page_token="def",), - jobs.ListJobsResponse(jobs=[jobs.Job(),], next_page_token="ghi",), - jobs.ListJobsResponse(jobs=[jobs.Job(), jobs.Job(),],), RuntimeError, ) pages = list(client.aggregated_list_jobs(request={}).pages) @@ -1332,7 +1513,9 @@ def test_aggregated_list_jobs_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_aggregated_list_jobs_async_pager(): - client = JobsV1Beta3AsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = JobsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1343,14 +1526,34 @@ async def test_aggregated_list_jobs_async_pager(): # Set the response to a series of pages. call.side_effect = ( jobs.ListJobsResponse( - jobs=[jobs.Job(), jobs.Job(), jobs.Job(),], next_page_token="abc", + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token="abc", + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token="ghi", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], ), - jobs.ListJobsResponse(jobs=[], next_page_token="def",), - jobs.ListJobsResponse(jobs=[jobs.Job(),], next_page_token="ghi",), - jobs.ListJobsResponse(jobs=[jobs.Job(), jobs.Job(),],), RuntimeError, ) - async_pager = await client.aggregated_list_jobs(request={},) + async_pager = await client.aggregated_list_jobs( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1362,7 +1565,9 @@ async def test_aggregated_list_jobs_async_pager(): @pytest.mark.asyncio async def test_aggregated_list_jobs_async_pages(): - client = JobsV1Beta3AsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = JobsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1373,11 +1578,29 @@ async def test_aggregated_list_jobs_async_pages(): # Set the response to a series of pages. call.side_effect = ( jobs.ListJobsResponse( - jobs=[jobs.Job(), jobs.Job(), jobs.Job(),], next_page_token="abc", + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token="abc", + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token="ghi", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], ), - jobs.ListJobsResponse(jobs=[], next_page_token="def",), - jobs.ListJobsResponse(jobs=[jobs.Job(),], next_page_token="ghi",), - jobs.ListJobsResponse(jobs=[jobs.Job(), jobs.Job(),],), RuntimeError, ) pages = [] @@ -1387,10 +1610,17 @@ async def test_aggregated_list_jobs_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [jobs.CheckActiveJobsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + jobs.CheckActiveJobsRequest, + dict, + ], +) def test_check_active_jobs(request_type, transport: str = "grpc"): client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1402,7 +1632,9 @@ def test_check_active_jobs(request_type, transport: str = "grpc"): type(client.transport.check_active_jobs), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = jobs.CheckActiveJobsResponse(active_jobs_exist=True,) + call.return_value = jobs.CheckActiveJobsResponse( + active_jobs_exist=True, + ) response = client.check_active_jobs(request) # Establish that the underlying gRPC stub method was called. @@ -1419,7 +1651,8 @@ def test_check_active_jobs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1437,7 +1670,8 @@ async def test_check_active_jobs_async( transport: str = "grpc_asyncio", request_type=jobs.CheckActiveJobsRequest ): client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1450,7 +1684,9 @@ async def test_check_active_jobs_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - jobs.CheckActiveJobsResponse(active_jobs_exist=True,) + jobs.CheckActiveJobsResponse( + active_jobs_exist=True, + ) ) response = await client.check_active_jobs(request) @@ -1469,10 +1705,17 @@ async def test_check_active_jobs_async_from_dict(): await test_check_active_jobs_async(request_type=dict) -@pytest.mark.parametrize("request_type", [jobs.SnapshotJobRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + jobs.SnapshotJobRequest, + dict, + ], +) def test_snapshot_job(request_type, transport: str = "grpc"): client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1513,7 +1756,8 @@ def test_snapshot_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1529,7 +1773,8 @@ async def test_snapshot_job_async( transport: str = "grpc_asyncio", request_type=jobs.SnapshotJobRequest ): client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1580,7 +1825,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -1600,7 +1846,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = JobsV1Beta3Client(client_options=options, transport=transport,) + client = JobsV1Beta3Client( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -1616,7 +1865,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = JobsV1Beta3Client( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -1646,7 +1896,10 @@ def test_transport_get_channel(): @pytest.mark.parametrize( "transport_class", - [transports.JobsV1Beta3GrpcTransport, transports.JobsV1Beta3GrpcAsyncIOTransport,], + [ + transports.JobsV1Beta3GrpcTransport, + transports.JobsV1Beta3GrpcAsyncIOTransport, + ], ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. @@ -1658,8 +1911,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = JobsV1Beta3Client(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.JobsV1Beta3GrpcTransport,) + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.JobsV1Beta3GrpcTransport, + ) def test_jobs_v1_beta3_base_transport_error(): @@ -1710,7 +1968,8 @@ def test_jobs_v1_beta3_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.JobsV1Beta3Transport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -1755,7 +2014,10 @@ def test_jobs_v1_beta3_auth_adc(): @pytest.mark.parametrize( "transport_class", - [transports.JobsV1Beta3GrpcTransport, transports.JobsV1Beta3GrpcAsyncIOTransport,], + [ + transports.JobsV1Beta3GrpcTransport, + transports.JobsV1Beta3GrpcAsyncIOTransport, + ], ) def test_jobs_v1_beta3_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use @@ -1882,7 +2144,8 @@ def test_jobs_v1_beta3_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.JobsV1Beta3GrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1894,7 +2157,8 @@ def test_jobs_v1_beta3_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.JobsV1Beta3GrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2015,7 +2279,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = JobsV1Beta3Client.common_folder_path(folder) assert expected == actual @@ -2033,7 +2299,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = JobsV1Beta3Client.common_organization_path(organization) assert expected == actual @@ -2051,7 +2319,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = JobsV1Beta3Client.common_project_path(project) assert expected == actual @@ -2071,7 +2341,8 @@ def test_common_location_path(): project = "winkle" location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = JobsV1Beta3Client.common_location_path(project, location) assert expected == actual @@ -2096,7 +2367,8 @@ def test_client_with_default_client_info(): transports.JobsV1Beta3Transport, "_prep_wrapped_messages" ) as prep: client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2105,7 +2377,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = JobsV1Beta3Client.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2113,7 +2386,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py index fbc90e074792..5eed376f2453 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py @@ -90,7 +90,11 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [MessagesV1Beta3Client, MessagesV1Beta3AsyncClient,] + "client_class", + [ + MessagesV1Beta3Client, + MessagesV1Beta3AsyncClient, + ], ) def test_messages_v1_beta3_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() @@ -132,7 +136,11 @@ def test_messages_v1_beta3_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", [MessagesV1Beta3Client, MessagesV1Beta3AsyncClient,] + "client_class", + [ + MessagesV1Beta3Client, + MessagesV1Beta3AsyncClient, + ], ) def test_messages_v1_beta3_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() @@ -506,7 +514,9 @@ def test_messages_v1_beta3_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -651,10 +661,17 @@ def test_messages_v1_beta3_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [messages.ListJobMessagesRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + messages.ListJobMessagesRequest, + dict, + ], +) def test_list_job_messages(request_type, transport: str = "grpc"): client = MessagesV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -685,7 +702,8 @@ def test_list_job_messages_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MessagesV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -703,7 +721,8 @@ async def test_list_job_messages_async( transport: str = "grpc_asyncio", request_type=messages.ListJobMessagesRequest ): client = MessagesV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -716,7 +735,9 @@ async def test_list_job_messages_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - messages.ListJobMessagesResponse(next_page_token="next_page_token_value",) + messages.ListJobMessagesResponse( + next_page_token="next_page_token_value", + ) ) response = await client.list_job_messages(request) @@ -737,7 +758,8 @@ async def test_list_job_messages_async_from_dict(): def test_list_job_messages_pager(transport_name: str = "grpc"): client = MessagesV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -754,12 +776,21 @@ def test_list_job_messages_pager(transport_name: str = "grpc"): ], next_page_token="abc", ), - messages.ListJobMessagesResponse(job_messages=[], next_page_token="def",), messages.ListJobMessagesResponse( - job_messages=[messages.JobMessage(),], next_page_token="ghi", + job_messages=[], + next_page_token="def", + ), + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + ], + next_page_token="ghi", ), messages.ListJobMessagesResponse( - job_messages=[messages.JobMessage(), messages.JobMessage(),], + job_messages=[ + messages.JobMessage(), + messages.JobMessage(), + ], ), RuntimeError, ) @@ -776,7 +807,8 @@ def test_list_job_messages_pager(transport_name: str = "grpc"): def test_list_job_messages_pages(transport_name: str = "grpc"): client = MessagesV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -793,12 +825,21 @@ def test_list_job_messages_pages(transport_name: str = "grpc"): ], next_page_token="abc", ), - messages.ListJobMessagesResponse(job_messages=[], next_page_token="def",), messages.ListJobMessagesResponse( - job_messages=[messages.JobMessage(),], next_page_token="ghi", + job_messages=[], + next_page_token="def", + ), + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + ], + next_page_token="ghi", ), messages.ListJobMessagesResponse( - job_messages=[messages.JobMessage(), messages.JobMessage(),], + job_messages=[ + messages.JobMessage(), + messages.JobMessage(), + ], ), RuntimeError, ) @@ -829,16 +870,27 @@ async def test_list_job_messages_async_pager(): ], next_page_token="abc", ), - messages.ListJobMessagesResponse(job_messages=[], next_page_token="def",), messages.ListJobMessagesResponse( - job_messages=[messages.JobMessage(),], next_page_token="ghi", + job_messages=[], + next_page_token="def", + ), + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + ], + next_page_token="ghi", ), messages.ListJobMessagesResponse( - job_messages=[messages.JobMessage(), messages.JobMessage(),], + job_messages=[ + messages.JobMessage(), + messages.JobMessage(), + ], ), RuntimeError, ) - async_pager = await client.list_job_messages(request={},) + async_pager = await client.list_job_messages( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -870,12 +922,21 @@ async def test_list_job_messages_async_pages(): ], next_page_token="abc", ), - messages.ListJobMessagesResponse(job_messages=[], next_page_token="def",), messages.ListJobMessagesResponse( - job_messages=[messages.JobMessage(),], next_page_token="ghi", + job_messages=[], + next_page_token="def", + ), + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + ], + next_page_token="ghi", ), messages.ListJobMessagesResponse( - job_messages=[messages.JobMessage(), messages.JobMessage(),], + job_messages=[ + messages.JobMessage(), + messages.JobMessage(), + ], ), RuntimeError, ) @@ -893,7 +954,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = MessagesV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -913,7 +975,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = MessagesV1Beta3Client(client_options=options, transport=transport,) + client = MessagesV1Beta3Client( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -929,7 +994,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = MessagesV1Beta3Client( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -974,8 +1040,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = MessagesV1Beta3Client(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.MessagesV1Beta3GrpcTransport,) + client = MessagesV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MessagesV1Beta3GrpcTransport, + ) def test_messages_v1_beta3_base_transport_error(): @@ -1018,7 +1089,8 @@ def test_messages_v1_beta3_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MessagesV1Beta3Transport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -1196,7 +1268,8 @@ def test_messages_v1_beta3_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.MessagesV1Beta3GrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1208,7 +1281,8 @@ def test_messages_v1_beta3_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.MessagesV1Beta3GrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1337,7 +1411,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = MessagesV1Beta3Client.common_folder_path(folder) assert expected == actual @@ -1355,7 +1431,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = MessagesV1Beta3Client.common_organization_path(organization) assert expected == actual @@ -1373,7 +1451,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = MessagesV1Beta3Client.common_project_path(project) assert expected == actual @@ -1393,7 +1473,8 @@ def test_common_location_path(): project = "winkle" location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = MessagesV1Beta3Client.common_location_path(project, location) assert expected == actual @@ -1418,7 +1499,8 @@ def test_client_with_default_client_info(): transports.MessagesV1Beta3Transport, "_prep_wrapped_messages" ) as prep: client = MessagesV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1427,7 +1509,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = MessagesV1Beta3Client.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1435,7 +1518,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = MessagesV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py index e9b70c2d98da..81e16571dd36 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py @@ -88,7 +88,11 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [MetricsV1Beta3Client, MetricsV1Beta3AsyncClient,] + "client_class", + [ + MetricsV1Beta3Client, + MetricsV1Beta3AsyncClient, + ], ) def test_metrics_v1_beta3_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() @@ -130,7 +134,11 @@ def test_metrics_v1_beta3_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", [MetricsV1Beta3Client, MetricsV1Beta3AsyncClient,] + "client_class", + [ + MetricsV1Beta3Client, + MetricsV1Beta3AsyncClient, + ], ) def test_metrics_v1_beta3_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() @@ -494,7 +502,9 @@ def test_metrics_v1_beta3_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -639,10 +649,17 @@ def test_metrics_v1_beta3_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [metrics.GetJobMetricsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + metrics.GetJobMetricsRequest, + dict, + ], +) def test_get_job_metrics(request_type, transport: str = "grpc"): client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -668,7 +685,8 @@ def test_get_job_metrics_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -684,7 +702,8 @@ async def test_get_job_metrics_async( transport: str = "grpc_asyncio", request_type=metrics.GetJobMetricsRequest ): client = MetricsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -711,10 +730,17 @@ async def test_get_job_metrics_async_from_dict(): await test_get_job_metrics_async(request_type=dict) -@pytest.mark.parametrize("request_type", [metrics.GetJobExecutionDetailsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + metrics.GetJobExecutionDetailsRequest, + dict, + ], +) def test_get_job_execution_details(request_type, transport: str = "grpc"): client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -745,7 +771,8 @@ def test_get_job_execution_details_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -763,7 +790,8 @@ async def test_get_job_execution_details_async( transport: str = "grpc_asyncio", request_type=metrics.GetJobExecutionDetailsRequest ): client = MetricsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -776,7 +804,9 @@ async def test_get_job_execution_details_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - metrics.JobExecutionDetails(next_page_token="next_page_token_value",) + metrics.JobExecutionDetails( + next_page_token="next_page_token_value", + ) ) response = await client.get_job_execution_details(request) @@ -797,7 +827,8 @@ async def test_get_job_execution_details_async_from_dict(): def test_get_job_execution_details_pager(transport_name: str = "grpc"): client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -814,12 +845,21 @@ def test_get_job_execution_details_pager(transport_name: str = "grpc"): ], next_page_token="abc", ), - metrics.JobExecutionDetails(stages=[], next_page_token="def",), metrics.JobExecutionDetails( - stages=[metrics.StageSummary(),], next_page_token="ghi", + stages=[], + next_page_token="def", + ), + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + ], + next_page_token="ghi", ), metrics.JobExecutionDetails( - stages=[metrics.StageSummary(), metrics.StageSummary(),], + stages=[ + metrics.StageSummary(), + metrics.StageSummary(), + ], ), RuntimeError, ) @@ -836,7 +876,8 @@ def test_get_job_execution_details_pager(transport_name: str = "grpc"): def test_get_job_execution_details_pages(transport_name: str = "grpc"): client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -853,12 +894,21 @@ def test_get_job_execution_details_pages(transport_name: str = "grpc"): ], next_page_token="abc", ), - metrics.JobExecutionDetails(stages=[], next_page_token="def",), metrics.JobExecutionDetails( - stages=[metrics.StageSummary(),], next_page_token="ghi", + stages=[], + next_page_token="def", ), metrics.JobExecutionDetails( - stages=[metrics.StageSummary(), metrics.StageSummary(),], + stages=[ + metrics.StageSummary(), + ], + next_page_token="ghi", + ), + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + metrics.StageSummary(), + ], ), RuntimeError, ) @@ -869,7 +919,9 @@ def test_get_job_execution_details_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_get_job_execution_details_async_pager(): - client = MetricsV1Beta3AsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = MetricsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -887,16 +939,27 @@ async def test_get_job_execution_details_async_pager(): ], next_page_token="abc", ), - metrics.JobExecutionDetails(stages=[], next_page_token="def",), metrics.JobExecutionDetails( - stages=[metrics.StageSummary(),], next_page_token="ghi", + stages=[], + next_page_token="def", ), metrics.JobExecutionDetails( - stages=[metrics.StageSummary(), metrics.StageSummary(),], + stages=[ + metrics.StageSummary(), + ], + next_page_token="ghi", + ), + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + metrics.StageSummary(), + ], ), RuntimeError, ) - async_pager = await client.get_job_execution_details(request={},) + async_pager = await client.get_job_execution_details( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -908,7 +971,9 @@ async def test_get_job_execution_details_async_pager(): @pytest.mark.asyncio async def test_get_job_execution_details_async_pages(): - client = MetricsV1Beta3AsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = MetricsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -926,12 +991,21 @@ async def test_get_job_execution_details_async_pages(): ], next_page_token="abc", ), - metrics.JobExecutionDetails(stages=[], next_page_token="def",), metrics.JobExecutionDetails( - stages=[metrics.StageSummary(),], next_page_token="ghi", + stages=[], + next_page_token="def", + ), + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + ], + next_page_token="ghi", ), metrics.JobExecutionDetails( - stages=[metrics.StageSummary(), metrics.StageSummary(),], + stages=[ + metrics.StageSummary(), + metrics.StageSummary(), + ], ), RuntimeError, ) @@ -943,11 +1017,16 @@ async def test_get_job_execution_details_async_pages(): @pytest.mark.parametrize( - "request_type", [metrics.GetStageExecutionDetailsRequest, dict,] + "request_type", + [ + metrics.GetStageExecutionDetailsRequest, + dict, + ], ) def test_get_stage_execution_details(request_type, transport: str = "grpc"): client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -978,7 +1057,8 @@ def test_get_stage_execution_details_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -997,7 +1077,8 @@ async def test_get_stage_execution_details_async( request_type=metrics.GetStageExecutionDetailsRequest, ): client = MetricsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1010,7 +1091,9 @@ async def test_get_stage_execution_details_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - metrics.StageExecutionDetails(next_page_token="next_page_token_value",) + metrics.StageExecutionDetails( + next_page_token="next_page_token_value", + ) ) response = await client.get_stage_execution_details(request) @@ -1031,7 +1114,8 @@ async def test_get_stage_execution_details_async_from_dict(): def test_get_stage_execution_details_pager(transport_name: str = "grpc"): client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1048,12 +1132,21 @@ def test_get_stage_execution_details_pager(transport_name: str = "grpc"): ], next_page_token="abc", ), - metrics.StageExecutionDetails(workers=[], next_page_token="def",), metrics.StageExecutionDetails( - workers=[metrics.WorkerDetails(),], next_page_token="ghi", + workers=[], + next_page_token="def", + ), + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + ], + next_page_token="ghi", ), metrics.StageExecutionDetails( - workers=[metrics.WorkerDetails(), metrics.WorkerDetails(),], + workers=[ + metrics.WorkerDetails(), + metrics.WorkerDetails(), + ], ), RuntimeError, ) @@ -1070,7 +1163,8 @@ def test_get_stage_execution_details_pager(transport_name: str = "grpc"): def test_get_stage_execution_details_pages(transport_name: str = "grpc"): client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1087,12 +1181,21 @@ def test_get_stage_execution_details_pages(transport_name: str = "grpc"): ], next_page_token="abc", ), - metrics.StageExecutionDetails(workers=[], next_page_token="def",), metrics.StageExecutionDetails( - workers=[metrics.WorkerDetails(),], next_page_token="ghi", + workers=[], + next_page_token="def", + ), + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + ], + next_page_token="ghi", ), metrics.StageExecutionDetails( - workers=[metrics.WorkerDetails(), metrics.WorkerDetails(),], + workers=[ + metrics.WorkerDetails(), + metrics.WorkerDetails(), + ], ), RuntimeError, ) @@ -1103,7 +1206,9 @@ def test_get_stage_execution_details_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_get_stage_execution_details_async_pager(): - client = MetricsV1Beta3AsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = MetricsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1121,16 +1226,27 @@ async def test_get_stage_execution_details_async_pager(): ], next_page_token="abc", ), - metrics.StageExecutionDetails(workers=[], next_page_token="def",), metrics.StageExecutionDetails( - workers=[metrics.WorkerDetails(),], next_page_token="ghi", + workers=[], + next_page_token="def", ), metrics.StageExecutionDetails( - workers=[metrics.WorkerDetails(), metrics.WorkerDetails(),], + workers=[ + metrics.WorkerDetails(), + ], + next_page_token="ghi", + ), + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + metrics.WorkerDetails(), + ], ), RuntimeError, ) - async_pager = await client.get_stage_execution_details(request={},) + async_pager = await client.get_stage_execution_details( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1142,7 +1258,9 @@ async def test_get_stage_execution_details_async_pager(): @pytest.mark.asyncio async def test_get_stage_execution_details_async_pages(): - client = MetricsV1Beta3AsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = MetricsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1160,12 +1278,21 @@ async def test_get_stage_execution_details_async_pages(): ], next_page_token="abc", ), - metrics.StageExecutionDetails(workers=[], next_page_token="def",), metrics.StageExecutionDetails( - workers=[metrics.WorkerDetails(),], next_page_token="ghi", + workers=[], + next_page_token="def", + ), + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + ], + next_page_token="ghi", ), metrics.StageExecutionDetails( - workers=[metrics.WorkerDetails(), metrics.WorkerDetails(),], + workers=[ + metrics.WorkerDetails(), + metrics.WorkerDetails(), + ], ), RuntimeError, ) @@ -1183,7 +1310,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -1203,7 +1331,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = MetricsV1Beta3Client(client_options=options, transport=transport,) + client = MetricsV1Beta3Client( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -1219,7 +1350,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = MetricsV1Beta3Client( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -1264,8 +1396,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = MetricsV1Beta3Client(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.MetricsV1Beta3GrpcTransport,) + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MetricsV1Beta3GrpcTransport, + ) def test_metrics_v1_beta3_base_transport_error(): @@ -1312,7 +1449,8 @@ def test_metrics_v1_beta3_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MetricsV1Beta3Transport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -1490,7 +1628,8 @@ def test_metrics_v1_beta3_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.MetricsV1Beta3GrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1502,7 +1641,8 @@ def test_metrics_v1_beta3_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.MetricsV1Beta3GrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1631,7 +1771,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = MetricsV1Beta3Client.common_folder_path(folder) assert expected == actual @@ -1649,7 +1791,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = MetricsV1Beta3Client.common_organization_path(organization) assert expected == actual @@ -1667,7 +1811,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = MetricsV1Beta3Client.common_project_path(project) assert expected == actual @@ -1687,7 +1833,8 @@ def test_common_location_path(): project = "winkle" location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = MetricsV1Beta3Client.common_location_path(project, location) assert expected == actual @@ -1712,7 +1859,8 @@ def test_client_with_default_client_info(): transports.MetricsV1Beta3Transport, "_prep_wrapped_messages" ) as prep: client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1721,7 +1869,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = MetricsV1Beta3Client.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1729,7 +1878,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = MetricsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py index eee8da513bcc..e07411030412 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py @@ -91,7 +91,11 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [SnapshotsV1Beta3Client, SnapshotsV1Beta3AsyncClient,] + "client_class", + [ + SnapshotsV1Beta3Client, + SnapshotsV1Beta3AsyncClient, + ], ) def test_snapshots_v1_beta3_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() @@ -133,7 +137,11 @@ def test_snapshots_v1_beta3_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", [SnapshotsV1Beta3Client, SnapshotsV1Beta3AsyncClient,] + "client_class", + [ + SnapshotsV1Beta3Client, + SnapshotsV1Beta3AsyncClient, + ], ) def test_snapshots_v1_beta3_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() @@ -507,7 +515,9 @@ def test_snapshots_v1_beta3_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -652,10 +662,17 @@ def test_snapshots_v1_beta3_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [snapshots.GetSnapshotRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + snapshots.GetSnapshotRequest, + dict, + ], +) def test_get_snapshot(request_type, transport: str = "grpc"): client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -696,7 +713,8 @@ def test_get_snapshot_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -712,7 +730,8 @@ async def test_get_snapshot_async( transport: str = "grpc_asyncio", request_type=snapshots.GetSnapshotRequest ): client = SnapshotsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -756,10 +775,17 @@ async def test_get_snapshot_async_from_dict(): await test_get_snapshot_async(request_type=dict) -@pytest.mark.parametrize("request_type", [snapshots.DeleteSnapshotRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + snapshots.DeleteSnapshotRequest, + dict, + ], +) def test_delete_snapshot(request_type, transport: str = "grpc"): client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -785,7 +811,8 @@ def test_delete_snapshot_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -801,7 +828,8 @@ async def test_delete_snapshot_async( transport: str = "grpc_asyncio", request_type=snapshots.DeleteSnapshotRequest ): client = SnapshotsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -830,10 +858,17 @@ async def test_delete_snapshot_async_from_dict(): await test_delete_snapshot_async(request_type=dict) -@pytest.mark.parametrize("request_type", [snapshots.ListSnapshotsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + snapshots.ListSnapshotsRequest, + dict, + ], +) def test_list_snapshots(request_type, transport: str = "grpc"): client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -859,7 +894,8 @@ def test_list_snapshots_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -875,7 +911,8 @@ async def test_list_snapshots_async( transport: str = "grpc_asyncio", request_type=snapshots.ListSnapshotsRequest ): client = SnapshotsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -911,7 +948,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -931,7 +969,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = SnapshotsV1Beta3Client(client_options=options, transport=transport,) + client = SnapshotsV1Beta3Client( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -947,7 +988,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = SnapshotsV1Beta3Client( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -992,8 +1034,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = SnapshotsV1Beta3Client(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.SnapshotsV1Beta3GrpcTransport,) + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SnapshotsV1Beta3GrpcTransport, + ) def test_snapshots_v1_beta3_base_transport_error(): @@ -1040,7 +1087,8 @@ def test_snapshots_v1_beta3_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.SnapshotsV1Beta3Transport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -1218,7 +1266,8 @@ def test_snapshots_v1_beta3_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.SnapshotsV1Beta3GrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1230,7 +1279,8 @@ def test_snapshots_v1_beta3_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.SnapshotsV1Beta3GrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1359,7 +1409,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = SnapshotsV1Beta3Client.common_folder_path(folder) assert expected == actual @@ -1377,7 +1429,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = SnapshotsV1Beta3Client.common_organization_path(organization) assert expected == actual @@ -1395,7 +1449,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = SnapshotsV1Beta3Client.common_project_path(project) assert expected == actual @@ -1415,7 +1471,8 @@ def test_common_location_path(): project = "winkle" location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = SnapshotsV1Beta3Client.common_location_path(project, location) assert expected == actual @@ -1440,7 +1497,8 @@ def test_client_with_default_client_info(): transports.SnapshotsV1Beta3Transport, "_prep_wrapped_messages" ) as prep: client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1449,7 +1507,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = SnapshotsV1Beta3Client.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1457,7 +1516,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = SnapshotsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py index ada0bb8b0fbd..1727a6ee2511 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py @@ -93,7 +93,11 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [TemplatesServiceClient, TemplatesServiceAsyncClient,] + "client_class", + [ + TemplatesServiceClient, + TemplatesServiceAsyncClient, + ], ) def test_templates_service_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() @@ -135,7 +139,11 @@ def test_templates_service_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", [TemplatesServiceClient, TemplatesServiceAsyncClient,] + "client_class", + [ + TemplatesServiceClient, + TemplatesServiceAsyncClient, + ], ) def test_templates_service_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() @@ -509,7 +517,9 @@ def test_templates_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -655,11 +665,16 @@ def test_templates_service_client_create_channel_credentials_file( @pytest.mark.parametrize( - "request_type", [templates.CreateJobFromTemplateRequest, dict,] + "request_type", + [ + templates.CreateJobFromTemplateRequest, + dict, + ], ) def test_create_job_from_template(request_type, transport: str = "grpc"): client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -716,7 +731,8 @@ def test_create_job_from_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -734,7 +750,8 @@ async def test_create_job_from_template_async( transport: str = "grpc_asyncio", request_type=templates.CreateJobFromTemplateRequest ): client = TemplatesServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -794,10 +811,17 @@ async def test_create_job_from_template_async_from_dict(): await test_create_job_from_template_async(request_type=dict) -@pytest.mark.parametrize("request_type", [templates.LaunchTemplateRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + templates.LaunchTemplateRequest, + dict, + ], +) def test_launch_template(request_type, transport: str = "grpc"): client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -823,7 +847,8 @@ def test_launch_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -839,7 +864,8 @@ async def test_launch_template_async( transport: str = "grpc_asyncio", request_type=templates.LaunchTemplateRequest ): client = TemplatesServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -868,10 +894,17 @@ async def test_launch_template_async_from_dict(): await test_launch_template_async(request_type=dict) -@pytest.mark.parametrize("request_type", [templates.GetTemplateRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + templates.GetTemplateRequest, + dict, + ], +) def test_get_template(request_type, transport: str = "grpc"): client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -900,7 +933,8 @@ def test_get_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -916,7 +950,8 @@ async def test_get_template_async( transport: str = "grpc_asyncio", request_type=templates.GetTemplateRequest ): client = TemplatesServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -955,7 +990,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -975,7 +1011,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = TemplatesServiceClient(client_options=options, transport=transport,) + client = TemplatesServiceClient( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -991,7 +1030,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = TemplatesServiceClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -1036,8 +1076,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = TemplatesServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.TemplatesServiceGrpcTransport,) + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.TemplatesServiceGrpcTransport, + ) def test_templates_service_base_transport_error(): @@ -1084,7 +1129,8 @@ def test_templates_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.TemplatesServiceTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -1262,7 +1308,8 @@ def test_templates_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.TemplatesServiceGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1274,7 +1321,8 @@ def test_templates_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.TemplatesServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1403,7 +1451,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = TemplatesServiceClient.common_folder_path(folder) assert expected == actual @@ -1421,7 +1471,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = TemplatesServiceClient.common_organization_path(organization) assert expected == actual @@ -1439,7 +1491,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = TemplatesServiceClient.common_project_path(project) assert expected == actual @@ -1459,7 +1513,8 @@ def test_common_location_path(): project = "winkle" location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = TemplatesServiceClient.common_location_path(project, location) assert expected == actual @@ -1484,7 +1539,8 @@ def test_client_with_default_client_info(): transports.TemplatesServiceTransport, "_prep_wrapped_messages" ) as prep: client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1493,7 +1549,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = TemplatesServiceClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1501,7 +1558,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = TemplatesServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close"