diff --git a/.coveragerc b/.coveragerc
index b178b094a..dd39c8546 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -1,3 +1,19 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
# Generated by synthtool. DO NOT EDIT!
[run]
branch = True
diff --git a/.flake8 b/.flake8
index 0268ecc9c..ed9316381 100644
--- a/.flake8
+++ b/.flake8
@@ -1,3 +1,19 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
# Generated by synthtool. DO NOT EDIT!
[flake8]
ignore = E203, E266, E501, W503
@@ -5,6 +21,8 @@ exclude =
# Exclude generated code.
**/proto/**
**/gapic/**
+ **/services/**
+ **/types/**
*_pb2.py
# Standard linting exemptions.
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
index 222dc82a4..5b5339350 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -11,8 +11,7 @@ Thanks for stopping by to let us know something could be better!
Please run down the following list and make sure you've tried the usual "quick fixes":
- Search the issues already opened: https://github.com/googleapis/python-bigquery/issues
- - Search the issues on our "catch-all" repository: https://github.com/googleapis/google-cloud-python
- - Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+python
+ - Search StackOverflow: https://stackoverflow.com/questions/tagged/google-cloud-platform+python
If you are still having issues, please be sure to include as much information as possible:
diff --git a/.gitignore b/.gitignore
index 3fb06e09c..b87e1ed58 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,6 +10,7 @@
dist
build
eggs
+.eggs
parts
bin
var
@@ -49,6 +50,7 @@ bigquery/docs/generated
# Virtual environment
env/
coverage.xml
+sponge_log.xml
# System test environment variables.
system_tests/local_test_setup
diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh
index de3549ef8..309212789 100755
--- a/.kokoro/publish-docs.sh
+++ b/.kokoro/publish-docs.sh
@@ -13,8 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-#!/bin/bash
-
set -eo pipefail
# Disable buffering, so that the logs stream through.
diff --git a/.kokoro/release.sh b/.kokoro/release.sh
index 55233bd89..0e58f0640 100755
--- a/.kokoro/release.sh
+++ b/.kokoro/release.sh
@@ -13,8 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-#!/bin/bash
-
set -eo pipefail
# Start the releasetool reporter
diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg
new file mode 100644
index 000000000..3e41df313
--- /dev/null
+++ b/.kokoro/samples/lint/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "lint"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-bigquery/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-bigquery/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/lint/continuous.cfg b/.kokoro/samples/lint/continuous.cfg
new file mode 100644
index 000000000..a1c8d9759
--- /dev/null
+++ b/.kokoro/samples/lint/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/lint/periodic.cfg b/.kokoro/samples/lint/periodic.cfg
new file mode 100644
index 000000000..50fec9649
--- /dev/null
+++ b/.kokoro/samples/lint/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/lint/presubmit.cfg b/.kokoro/samples/lint/presubmit.cfg
new file mode 100644
index 000000000..a1c8d9759
--- /dev/null
+++ b/.kokoro/samples/lint/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg
new file mode 100644
index 000000000..a56768eae
--- /dev/null
+++ b/.kokoro/samples/python3.6/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.6"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-bigquery/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-bigquery/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/continuous.cfg b/.kokoro/samples/python3.6/continuous.cfg
new file mode 100644
index 000000000..7218af149
--- /dev/null
+++ b/.kokoro/samples/python3.6/continuous.cfg
@@ -0,0 +1,7 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.6/periodic.cfg
new file mode 100644
index 000000000..50fec9649
--- /dev/null
+++ b/.kokoro/samples/python3.6/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/presubmit.cfg b/.kokoro/samples/python3.6/presubmit.cfg
new file mode 100644
index 000000000..a1c8d9759
--- /dev/null
+++ b/.kokoro/samples/python3.6/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg
new file mode 100644
index 000000000..c93747180
--- /dev/null
+++ b/.kokoro/samples/python3.7/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.7"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-bigquery/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-bigquery/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/continuous.cfg b/.kokoro/samples/python3.7/continuous.cfg
new file mode 100644
index 000000000..a1c8d9759
--- /dev/null
+++ b/.kokoro/samples/python3.7/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/periodic.cfg b/.kokoro/samples/python3.7/periodic.cfg
new file mode 100644
index 000000000..50fec9649
--- /dev/null
+++ b/.kokoro/samples/python3.7/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/presubmit.cfg b/.kokoro/samples/python3.7/presubmit.cfg
new file mode 100644
index 000000000..a1c8d9759
--- /dev/null
+++ b/.kokoro/samples/python3.7/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg
new file mode 100644
index 000000000..9808f15e3
--- /dev/null
+++ b/.kokoro/samples/python3.8/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.8"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-bigquery/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-bigquery/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/continuous.cfg b/.kokoro/samples/python3.8/continuous.cfg
new file mode 100644
index 000000000..a1c8d9759
--- /dev/null
+++ b/.kokoro/samples/python3.8/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/periodic.cfg b/.kokoro/samples/python3.8/periodic.cfg
new file mode 100644
index 000000000..50fec9649
--- /dev/null
+++ b/.kokoro/samples/python3.8/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/presubmit.cfg b/.kokoro/samples/python3.8/presubmit.cfg
new file mode 100644
index 000000000..a1c8d9759
--- /dev/null
+++ b/.kokoro/samples/python3.8/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
new file mode 100755
index 000000000..905732a40
--- /dev/null
+++ b/.kokoro/test-samples.sh
@@ -0,0 +1,104 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+cd github/python-bigquery
+
+# Run periodic samples tests at latest release
+if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ LATEST_RELEASE=$(git describe --abbrev=0 --tags)
+ git checkout $LATEST_RELEASE
+fi
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Debug: show build environment
+env | grep KOKORO
+
+# Install nox
+python3.6 -m pip install --upgrade --quiet nox
+
+# Use secrets acessor service account to get secrets
+if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
+ gcloud auth activate-service-account \
+ --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \
+ --project="cloud-devrel-kokoro-resources"
+fi
+
+# This script will create 3 files:
+# - testing/test-env.sh
+# - testing/service-account.json
+# - testing/client-secrets.json
+./scripts/decrypt-secrets.sh
+
+source ./testing/test-env.sh
+export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json
+
+# For cloud-run session, we activate the service account for gcloud sdk.
+gcloud auth activate-service-account \
+ --key-file "${GOOGLE_APPLICATION_CREDENTIALS}"
+
+export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json
+
+echo -e "\n******************** TESTING PROJECTS ********************"
+
+# Switch to 'fail at end' to allow all tests to complete before exiting.
+set +e
+# Use RTN to return a non-zero value if the test fails.
+RTN=0
+ROOT=$(pwd)
+# Find all requirements.txt in the samples directory (may break on whitespace).
+for file in samples/**/requirements.txt; do
+ cd "$ROOT"
+ # Navigate to the project folder.
+ file=$(dirname "$file")
+ cd "$file"
+
+ echo "------------------------------------------------------------"
+ echo "- testing $file"
+ echo "------------------------------------------------------------"
+
+ # Use nox to execute the tests for the project.
+ python3.6 -m nox -s "$RUN_TESTS_SESSION"
+ EXIT=$?
+
+ # If this is a periodic build, send the test log to the Build Cop Bot.
+ # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop.
+ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop
+ $KOKORO_GFILE_DIR/linux_amd64/buildcop
+ fi
+
+ if [[ $EXIT -ne 0 ]]; then
+ RTN=1
+ echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
+ else
+ echo -e "\n Testing completed.\n"
+ fi
+
+done
+cd "$ROOT"
+
+# Workaround for Kokoro permissions issue: delete secrets
+rm testing/{test-env.sh,client-secrets.json,service-account.json}
+
+exit "$RTN"
\ No newline at end of file
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index c812edbd1..3366287d6 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -22,7 +22,7 @@ In order to add a feature:
documentation.
- The feature must work fully on the following CPython versions: 2.7,
- 3.5, 3.6, and 3.7 on both UNIX and Windows.
+ 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows.
- The feature must not add unnecessary dependencies (where
"unnecessary" is of course subjective, but new dependencies should
@@ -214,26 +214,18 @@ We support:
- `Python 3.5`_
- `Python 3.6`_
- `Python 3.7`_
+- `Python 3.8`_
.. _Python 3.5: https://docs.python.org/3.5/
.. _Python 3.6: https://docs.python.org/3.6/
.. _Python 3.7: https://docs.python.org/3.7/
+.. _Python 3.8: https://docs.python.org/3.8/
Supported versions can be found in our ``noxfile.py`` `config`_.
.. _config: https://github.com/googleapis/python-bigquery/blob/master/noxfile.py
-We explicitly decided not to support `Python 2.5`_ due to `decreased usage`_
-and lack of continuous integration `support`_.
-
-.. _Python 2.5: https://docs.python.org/2.5/
-.. _decreased usage: https://caremad.io/2013/10/a-look-at-pypi-downloads/
-.. _support: https://blog.travis-ci.com/2013-11-18-upcoming-build-environment-updates/
-
-We have `dropped 2.6`_ as a supported version as well since Python 2.6 is no
-longer supported by the core development team.
-
Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020.
We also explicitly decided to support Python 3 beginning with version
@@ -247,7 +239,6 @@ We also explicitly decided to support Python 3 beginning with version
.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django
.. _projects: http://flask.pocoo.org/docs/0.10/python3/
.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/
-.. _dropped 2.6: https://github.com/googleapis/google-cloud-python/issues/995
**********
Versioning
diff --git a/MANIFEST.in b/MANIFEST.in
index cd011be27..e9e29d120 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,6 +1,25 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
# Generated by synthtool. DO NOT EDIT!
include README.rst LICENSE
recursive-include google *.json *.proto
recursive-include tests *
global-exclude *.py[co]
global-exclude __pycache__
+
+# Exclude scripts for samples readmegen
+prune scripts/readme-gen
\ No newline at end of file
diff --git a/docs/_static/custom.css b/docs/_static/custom.css
index 9a6f9f8dd..0abaf229f 100644
--- a/docs/_static/custom.css
+++ b/docs/_static/custom.css
@@ -1,4 +1,4 @@
div#python2-eol {
border-color: red;
border-width: medium;
-}
\ No newline at end of file
+}
\ No newline at end of file
diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html
index de457b2c2..6316a537f 100644
--- a/docs/_templates/layout.html
+++ b/docs/_templates/layout.html
@@ -1,3 +1,4 @@
+
{% extends "!layout.html" %}
{%- block content %}
{%- if theme_fixed_sidebar|lower == 'true' %}
@@ -20,8 +21,8 @@
- On January 1, 2020 this library will no longer support Python 2 on the latest released version.
- Previously released library versions will continue to be available. For more information please
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version.
+ Library versions released prior to that date will continue to be available. For more information please
visit
Python 2 support on Google Cloud.
{% block body %} {% endblock %}
diff --git a/docs/conf.py b/docs/conf.py
index 30dcac564..332b81b10 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -337,7 +337,7 @@
intersphinx_mapping = {
"python": ("http://python.readthedocs.org/en/latest/", None),
"google-auth": ("https://google-auth.readthedocs.io/en/stable", None),
- "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None),
+ "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
"grpc": ("https://grpc.io/grpc/python/", None),
}
diff --git a/google/cloud/bigquery_v2/gapic/enums.py b/google/cloud/bigquery_v2/gapic/enums.py
index 97059414f..10d7c2517 100644
--- a/google/cloud/bigquery_v2/gapic/enums.py
+++ b/google/cloud/bigquery_v2/gapic/enums.py
@@ -30,7 +30,7 @@ class DataSplitMethod(enum.IntEnum):
CUSTOM (int): Splits data with the user provided tags.
SEQUENTIAL (int): Splits data sequentially.
NO_SPLIT (int): Data split will be skipped.
- AUTO_SPLIT (int): Splits data automatically: Uses NO\_SPLIT if the data size is small.
+ AUTO_SPLIT (int): Splits data automatically: Uses NO_SPLIT if the data size is small.
Otherwise uses RANDOM.
"""
@@ -125,7 +125,7 @@ class KmeansInitializationMethod(enum.IntEnum):
KMEANS_INITIALIZATION_METHOD_UNSPECIFIED (int)
RANDOM (int): Initializes the centroids randomly.
CUSTOM (int): Initializes the centroids using data specified in
- kmeans\_initialization\_column.
+ kmeans_initialization_column.
"""
KMEANS_INITIALIZATION_METHOD_UNSPECIFIED = 0
@@ -150,9 +150,9 @@ class TypeKind(enum.IntEnum):
DATETIME (int): Encoded as RFC 3339 full-date "T" partial-time: 1985-04-12T23:20:50.52
GEOGRAPHY (int): Encoded as WKT
NUMERIC (int): Encoded as a decimal string.
- ARRAY (int): Encoded as a list with types matching Type.array\_type.
- STRUCT (int): Encoded as a list with fields of type Type.struct\_type[i]. List is used
- because a JSON object cannot have duplicate field names.
+ ARRAY (int): Encoded as a list with types matching Type.array_type.
+ STRUCT (int): Encoded as a list with fields of type Type.struct_type[i]. List is
+ used because a JSON object cannot have duplicate field names.
"""
TYPE_KIND_UNSPECIFIED = 0
diff --git a/google/cloud/bigquery_v2/proto/encryption_config_pb2.py b/google/cloud/bigquery_v2/proto/encryption_config_pb2.py
index f7b26be55..5ae21ea6f 100644
--- a/google/cloud/bigquery_v2/proto/encryption_config_pb2.py
+++ b/google/cloud/bigquery_v2/proto/encryption_config_pb2.py
@@ -1,10 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/bigquery_v2/proto/encryption_config.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
+"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
@@ -24,12 +21,9 @@
name="google/cloud/bigquery_v2/proto/encryption_config.proto",
package="google.cloud.bigquery.v2",
syntax="proto3",
- serialized_options=_b(
- "\n\034com.google.cloud.bigquery.v2B\025EncryptionConfigProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery"
- ),
- serialized_pb=_b(
- '\n6google/cloud/bigquery_v2/proto/encryption_config.proto\x12\x18google.cloud.bigquery.v2\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1cgoogle/api/annotations.proto"R\n\x17\x45ncryptionConfiguration\x12\x37\n\x0ckms_key_name\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValueB\x03\xe0\x41\x01\x42w\n\x1c\x63om.google.cloud.bigquery.v2B\x15\x45ncryptionConfigProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigqueryb\x06proto3'
- ),
+ serialized_options=b"\n\034com.google.cloud.bigquery.v2B\025EncryptionConfigProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery",
+ create_key=_descriptor._internal_create_key,
+ serialized_pb=b'\n6google/cloud/bigquery_v2/proto/encryption_config.proto\x12\x18google.cloud.bigquery.v2\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1cgoogle/api/annotations.proto"R\n\x17\x45ncryptionConfiguration\x12\x37\n\x0ckms_key_name\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValueB\x03\xe0\x41\x01\x42w\n\x1c\x63om.google.cloud.bigquery.v2B\x15\x45ncryptionConfigProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigqueryb\x06proto3',
dependencies=[
google_dot_api_dot_field__behavior__pb2.DESCRIPTOR,
google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,
@@ -44,6 +38,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="kms_key_name",
@@ -60,8 +55,9 @@
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\001"),
+ serialized_options=b"\340A\001",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -85,10 +81,10 @@
EncryptionConfiguration = _reflection.GeneratedProtocolMessageType(
"EncryptionConfiguration",
(_message.Message,),
- dict(
- DESCRIPTOR=_ENCRYPTIONCONFIGURATION,
- __module__="google.cloud.bigquery_v2.proto.encryption_config_pb2",
- __doc__="""Encryption configuration.
+ {
+ "DESCRIPTOR": _ENCRYPTIONCONFIGURATION,
+ "__module__": "google.cloud.bigquery_v2.proto.encryption_config_pb2",
+ "__doc__": """Encryption configuration.
Attributes:
kms_key_name:
@@ -98,7 +94,7 @@
to this encryption key.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.EncryptionConfiguration)
- ),
+ },
)
_sym_db.RegisterMessage(EncryptionConfiguration)
diff --git a/google/cloud/bigquery_v2/proto/encryption_config_pb2_grpc.py b/google/cloud/bigquery_v2/proto/encryption_config_pb2_grpc.py
index 07cb78fe0..8a9393943 100644
--- a/google/cloud/bigquery_v2/proto/encryption_config_pb2_grpc.py
+++ b/google/cloud/bigquery_v2/proto/encryption_config_pb2_grpc.py
@@ -1,2 +1,3 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
import grpc
diff --git a/google/cloud/bigquery_v2/proto/model_pb2.py b/google/cloud/bigquery_v2/proto/model_pb2.py
index 0b4e9d23e..7b66be8f7 100644
--- a/google/cloud/bigquery_v2/proto/model_pb2.py
+++ b/google/cloud/bigquery_v2/proto/model_pb2.py
@@ -1,10 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/bigquery_v2/proto/model.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
+"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
@@ -36,12 +33,9 @@
name="google/cloud/bigquery_v2/proto/model.proto",
package="google.cloud.bigquery.v2",
syntax="proto3",
- serialized_options=_b(
- "\n\034com.google.cloud.bigquery.v2B\nModelProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery"
- ),
- serialized_pb=_b(
- '\n*google/cloud/bigquery_v2/proto/model.proto\x12\x18google.cloud.bigquery.v2\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x36google/cloud/bigquery_v2/proto/encryption_config.proto\x1a\x34google/cloud/bigquery_v2/proto/model_reference.proto\x1a\x31google/cloud/bigquery_v2/proto/standard_sql.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1cgoogle/api/annotations.proto"\x9b\x35\n\x05Model\x12\x11\n\x04\x65tag\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x46\n\x0fmodel_reference\x18\x02 \x01(\x0b\x32(.google.cloud.bigquery.v2.ModelReferenceB\x03\xe0\x41\x02\x12\x1a\n\rcreation_time\x18\x05 \x01(\x03\x42\x03\xe0\x41\x03\x12\x1f\n\x12last_modified_time\x18\x06 \x01(\x03\x42\x03\xe0\x41\x03\x12\x18\n\x0b\x64\x65scription\x18\x0c \x01(\tB\x03\xe0\x41\x01\x12\x1a\n\rfriendly_name\x18\x0e \x01(\tB\x03\xe0\x41\x01\x12;\n\x06labels\x18\x0f \x03(\x0b\x32+.google.cloud.bigquery.v2.Model.LabelsEntry\x12\x1c\n\x0f\x65xpiration_time\x18\x10 \x01(\x03\x42\x03\xe0\x41\x01\x12\x15\n\x08location\x18\r \x01(\tB\x03\xe0\x41\x03\x12S\n\x18\x65ncryption_configuration\x18\x11 \x01(\x0b\x32\x31.google.cloud.bigquery.v2.EncryptionConfiguration\x12\x42\n\nmodel_type\x18\x07 \x01(\x0e\x32).google.cloud.bigquery.v2.Model.ModelTypeB\x03\xe0\x41\x03\x12G\n\rtraining_runs\x18\t \x03(\x0b\x32+.google.cloud.bigquery.v2.Model.TrainingRunB\x03\xe0\x41\x03\x12H\n\x0f\x66\x65\x61ture_columns\x18\n \x03(\x0b\x32*.google.cloud.bigquery.v2.StandardSqlFieldB\x03\xe0\x41\x03\x12\x46\n\rlabel_columns\x18\x0b \x03(\x0b\x32*.google.cloud.bigquery.v2.StandardSqlFieldB\x03\xe0\x41\x03\x1aq\n\x0bKmeansEnums"b\n\x1aKmeansInitializationMethod\x12,\n(KMEANS_INITIALIZATION_METHOD_UNSPECIFIED\x10\x00\x12\n\n\x06RANDOM\x10\x01\x12\n\n\x06\x43USTOM\x10\x02\x1a\xb4\x02\n\x11RegressionMetrics\x12\x39\n\x13mean_absolute_error\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x38\n\x12mean_squared_error\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12<\n\x16mean_squared_log_error\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15median_absolute_error\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\tr_squared\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x1a\xef\x02\n\x1e\x41ggregateClassificationMetrics\x12/\n\tprecision\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12,\n\x06recall\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08\x61\x63\x63uracy\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\tthreshold\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08\x66\x31_score\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08log_loss\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12-\n\x07roc_auc\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x1a\x9f\x06\n\x1b\x42inaryClassificationMetrics\x12h\n aggregate_classification_metrics\x18\x01 \x01(\x0b\x32>.google.cloud.bigquery.v2.Model.AggregateClassificationMetrics\x12w\n\x1c\x62inary_confusion_matrix_list\x18\x02 \x03(\x0b\x32Q.google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.BinaryConfusionMatrix\x12\x16\n\x0epositive_label\x18\x03 \x01(\t\x12\x16\n\x0enegative_label\x18\x04 \x01(\t\x1a\xec\x03\n\x15\x42inaryConfusionMatrix\x12>\n\x18positive_class_threshold\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x33\n\x0etrue_positives\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x34\n\x0f\x66\x61lse_positives\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x33\n\x0etrue_negatives\x18\x04 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x34\n\x0f\x66\x61lse_negatives\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12/\n\tprecision\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12,\n\x06recall\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08\x66\x31_score\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08\x61\x63\x63uracy\x18\t \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x1a\x87\x05\n\x1fMultiClassClassificationMetrics\x12h\n aggregate_classification_metrics\x18\x01 \x01(\x0b\x32>.google.cloud.bigquery.v2.Model.AggregateClassificationMetrics\x12n\n\x15\x63onfusion_matrix_list\x18\x02 \x03(\x0b\x32O.google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix\x1a\x89\x03\n\x0f\x43onfusionMatrix\x12:\n\x14\x63onfidence_threshold\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x61\n\x04rows\x18\x02 \x03(\x0b\x32S.google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Row\x1aQ\n\x05\x45ntry\x12\x17\n\x0fpredicted_label\x18\x01 \x01(\t\x12/\n\nitem_count\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x1a\x83\x01\n\x03Row\x12\x14\n\x0c\x61\x63tual_label\x18\x01 \x01(\t\x12\x66\n\x07\x65ntries\x18\x02 \x03(\x0b\x32U.google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Entry\x1a\xcb\x06\n\x11\x43lusteringMetrics\x12:\n\x14\x64\x61vies_bouldin_index\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15mean_squared_distance\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12K\n\x08\x63lusters\x18\x03 \x03(\x0b\x32\x39.google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster\x1a\xef\x04\n\x07\x43luster\x12\x13\n\x0b\x63\x65ntroid_id\x18\x01 \x01(\x03\x12^\n\x0e\x66\x65\x61ture_values\x18\x02 \x03(\x0b\x32\x46.google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue\x12*\n\x05\x63ount\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x1a\xc2\x03\n\x0c\x46\x65\x61tureValue\x12\x16\n\x0e\x66\x65\x61ture_column\x18\x01 \x01(\t\x12\x37\n\x0fnumerical_value\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValueH\x00\x12t\n\x11\x63\x61tegorical_value\x18\x03 \x01(\x0b\x32W.google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValueH\x00\x1a\xe1\x01\n\x10\x43\x61tegoricalValue\x12~\n\x0f\x63\x61tegory_counts\x18\x01 \x03(\x0b\x32\x65.google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue.CategoryCount\x1aM\n\rCategoryCount\x12\x10\n\x08\x63\x61tegory\x18\x01 \x01(\t\x12*\n\x05\x63ount\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x07\n\x05value\x1a\x95\x03\n\x11\x45valuationMetrics\x12O\n\x12regression_metrics\x18\x01 \x01(\x0b\x32\x31.google.cloud.bigquery.v2.Model.RegressionMetricsH\x00\x12\x64\n\x1d\x62inary_classification_metrics\x18\x02 \x01(\x0b\x32;.google.cloud.bigquery.v2.Model.BinaryClassificationMetricsH\x00\x12m\n"multi_class_classification_metrics\x18\x03 \x01(\x0b\x32?.google.cloud.bigquery.v2.Model.MultiClassClassificationMetricsH\x00\x12O\n\x12\x63lustering_metrics\x18\x04 \x01(\x0b\x32\x31.google.cloud.bigquery.v2.Model.ClusteringMetricsH\x00\x42\t\n\x07metrics\x1a\xab\x0f\n\x0bTrainingRun\x12U\n\x10training_options\x18\x01 \x01(\x0b\x32;.google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions\x12.\n\nstart_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12L\n\x07results\x18\x06 \x03(\x0b\x32;.google.cloud.bigquery.v2.Model.TrainingRun.IterationResult\x12M\n\x12\x65valuation_metrics\x18\x07 \x01(\x0b\x32\x31.google.cloud.bigquery.v2.Model.EvaluationMetrics\x1a\x9d\t\n\x0fTrainingOptions\x12\x16\n\x0emax_iterations\x18\x01 \x01(\x03\x12;\n\tloss_type\x18\x02 \x01(\x0e\x32(.google.cloud.bigquery.v2.Model.LossType\x12\x12\n\nlearn_rate\x18\x03 \x01(\x01\x12\x37\n\x11l1_regularization\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x37\n\x11l2_regularization\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15min_relative_progress\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\nwarm_start\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12.\n\nearly_stop\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x1b\n\x13input_label_columns\x18\t \x03(\t\x12J\n\x11\x64\x61ta_split_method\x18\n \x01(\x0e\x32/.google.cloud.bigquery.v2.Model.DataSplitMethod\x12 \n\x18\x64\x61ta_split_eval_fraction\x18\x0b \x01(\x01\x12\x19\n\x11\x64\x61ta_split_column\x18\x0c \x01(\t\x12N\n\x13learn_rate_strategy\x18\r \x01(\x0e\x32\x31.google.cloud.bigquery.v2.Model.LearnRateStrategy\x12\x1a\n\x12initial_learn_rate\x18\x10 \x01(\x01\x12o\n\x13label_class_weights\x18\x11 \x03(\x0b\x32R.google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.LabelClassWeightsEntry\x12\x43\n\rdistance_type\x18\x14 \x01(\x0e\x32,.google.cloud.bigquery.v2.Model.DistanceType\x12\x14\n\x0cnum_clusters\x18\x15 \x01(\x03\x12\x11\n\tmodel_uri\x18\x16 \x01(\t\x12S\n\x15optimization_strategy\x18\x17 \x01(\x0e\x32\x34.google.cloud.bigquery.v2.Model.OptimizationStrategy\x12l\n\x1ckmeans_initialization_method\x18! \x01(\x0e\x32\x46.google.cloud.bigquery.v2.Model.KmeansEnums.KmeansInitializationMethod\x12$\n\x1ckmeans_initialization_column\x18" \x01(\t\x1a\x38\n\x16LabelClassWeightsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x1a\xd7\x03\n\x0fIterationResult\x12*\n\x05index\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x30\n\x0b\x64uration_ms\x18\x04 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x33\n\rtraining_loss\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\teval_loss\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x12\n\nlearn_rate\x18\x07 \x01(\x01\x12^\n\rcluster_infos\x18\x08 \x03(\x0b\x32G.google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.ClusterInfo\x1a\x8b\x01\n\x0b\x43lusterInfo\x12\x13\n\x0b\x63\x65ntroid_id\x18\x01 \x01(\x03\x12\x34\n\x0e\x63luster_radius\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x31\n\x0c\x63luster_size\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"s\n\tModelType\x12\x1a\n\x16MODEL_TYPE_UNSPECIFIED\x10\x00\x12\x15\n\x11LINEAR_REGRESSION\x10\x01\x12\x17\n\x13LOGISTIC_REGRESSION\x10\x02\x12\n\n\x06KMEANS\x10\x03\x12\x0e\n\nTENSORFLOW\x10\x06"O\n\x08LossType\x12\x19\n\x15LOSS_TYPE_UNSPECIFIED\x10\x00\x12\x15\n\x11MEAN_SQUARED_LOSS\x10\x01\x12\x11\n\rMEAN_LOG_LOSS\x10\x02"H\n\x0c\x44istanceType\x12\x1d\n\x19\x44ISTANCE_TYPE_UNSPECIFIED\x10\x00\x12\r\n\tEUCLIDEAN\x10\x01\x12\n\n\x06\x43OSINE\x10\x02"z\n\x0f\x44\x61taSplitMethod\x12!\n\x1d\x44\x41TA_SPLIT_METHOD_UNSPECIFIED\x10\x00\x12\n\n\x06RANDOM\x10\x01\x12\n\n\x06\x43USTOM\x10\x02\x12\x0e\n\nSEQUENTIAL\x10\x03\x12\x0c\n\x08NO_SPLIT\x10\x04\x12\x0e\n\nAUTO_SPLIT\x10\x05"W\n\x11LearnRateStrategy\x12#\n\x1fLEARN_RATE_STRATEGY_UNSPECIFIED\x10\x00\x12\x0f\n\x0bLINE_SEARCH\x10\x01\x12\x0c\n\x08\x43ONSTANT\x10\x02"n\n\x14OptimizationStrategy\x12%\n!OPTIMIZATION_STRATEGY_UNSPECIFIED\x10\x00\x12\x1a\n\x16\x42\x41TCH_GRADIENT_DESCENT\x10\x01\x12\x13\n\x0fNORMAL_EQUATION\x10\x02"Z\n\x0fGetModelRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ndataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x15\n\x08model_id\x18\x03 \x01(\tB\x03\xe0\x41\x02"\x91\x01\n\x11PatchModelRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ndataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x15\n\x08model_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x33\n\x05model\x18\x04 \x01(\x0b\x32\x1f.google.cloud.bigquery.v2.ModelB\x03\xe0\x41\x02"]\n\x12\x44\x65leteModelRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ndataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x15\n\x08model_id\x18\x03 \x01(\tB\x03\xe0\x41\x02"\x8c\x01\n\x11ListModelsRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ndataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x31\n\x0bmax_results\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.UInt32Value\x12\x12\n\npage_token\x18\x04 \x01(\t"^\n\x12ListModelsResponse\x12/\n\x06models\x18\x01 \x03(\x0b\x32\x1f.google.cloud.bigquery.v2.Model\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xfa\x05\n\x0cModelService\x12y\n\x08GetModel\x12).google.cloud.bigquery.v2.GetModelRequest\x1a\x1f.google.cloud.bigquery.v2.Model"!\xda\x41\x1eproject_id,dataset_id,model_id\x12\x8d\x01\n\nListModels\x12+.google.cloud.bigquery.v2.ListModelsRequest\x1a,.google.cloud.bigquery.v2.ListModelsResponse"$\xda\x41!project_id,dataset_id,max_results\x12\x83\x01\n\nPatchModel\x12+.google.cloud.bigquery.v2.PatchModelRequest\x1a\x1f.google.cloud.bigquery.v2.Model"\'\xda\x41$project_id,dataset_id,model_id,model\x12v\n\x0b\x44\x65leteModel\x12,.google.cloud.bigquery.v2.DeleteModelRequest\x1a\x16.google.protobuf.Empty"!\xda\x41\x1eproject_id,dataset_id,model_id\x1a\xe0\x01\xca\x41\x17\x62igquery.googleapis.com\xd2\x41\xc2\x01https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.readonly,https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-onlyBl\n\x1c\x63om.google.cloud.bigquery.v2B\nModelProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigqueryb\x06proto3'
- ),
+ serialized_options=b"\n\034com.google.cloud.bigquery.v2B\nModelProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery",
+ create_key=_descriptor._internal_create_key,
+ serialized_pb=b'\n*google/cloud/bigquery_v2/proto/model.proto\x12\x18google.cloud.bigquery.v2\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x36google/cloud/bigquery_v2/proto/encryption_config.proto\x1a\x34google/cloud/bigquery_v2/proto/model_reference.proto\x1a\x31google/cloud/bigquery_v2/proto/standard_sql.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1cgoogle/api/annotations.proto"\x9b\x35\n\x05Model\x12\x11\n\x04\x65tag\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x46\n\x0fmodel_reference\x18\x02 \x01(\x0b\x32(.google.cloud.bigquery.v2.ModelReferenceB\x03\xe0\x41\x02\x12\x1a\n\rcreation_time\x18\x05 \x01(\x03\x42\x03\xe0\x41\x03\x12\x1f\n\x12last_modified_time\x18\x06 \x01(\x03\x42\x03\xe0\x41\x03\x12\x18\n\x0b\x64\x65scription\x18\x0c \x01(\tB\x03\xe0\x41\x01\x12\x1a\n\rfriendly_name\x18\x0e \x01(\tB\x03\xe0\x41\x01\x12;\n\x06labels\x18\x0f \x03(\x0b\x32+.google.cloud.bigquery.v2.Model.LabelsEntry\x12\x1c\n\x0f\x65xpiration_time\x18\x10 \x01(\x03\x42\x03\xe0\x41\x01\x12\x15\n\x08location\x18\r \x01(\tB\x03\xe0\x41\x03\x12S\n\x18\x65ncryption_configuration\x18\x11 \x01(\x0b\x32\x31.google.cloud.bigquery.v2.EncryptionConfiguration\x12\x42\n\nmodel_type\x18\x07 \x01(\x0e\x32).google.cloud.bigquery.v2.Model.ModelTypeB\x03\xe0\x41\x03\x12G\n\rtraining_runs\x18\t \x03(\x0b\x32+.google.cloud.bigquery.v2.Model.TrainingRunB\x03\xe0\x41\x03\x12H\n\x0f\x66\x65\x61ture_columns\x18\n \x03(\x0b\x32*.google.cloud.bigquery.v2.StandardSqlFieldB\x03\xe0\x41\x03\x12\x46\n\rlabel_columns\x18\x0b \x03(\x0b\x32*.google.cloud.bigquery.v2.StandardSqlFieldB\x03\xe0\x41\x03\x1aq\n\x0bKmeansEnums"b\n\x1aKmeansInitializationMethod\x12,\n(KMEANS_INITIALIZATION_METHOD_UNSPECIFIED\x10\x00\x12\n\n\x06RANDOM\x10\x01\x12\n\n\x06\x43USTOM\x10\x02\x1a\xb4\x02\n\x11RegressionMetrics\x12\x39\n\x13mean_absolute_error\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x38\n\x12mean_squared_error\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12<\n\x16mean_squared_log_error\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15median_absolute_error\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\tr_squared\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x1a\xef\x02\n\x1e\x41ggregateClassificationMetrics\x12/\n\tprecision\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12,\n\x06recall\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08\x61\x63\x63uracy\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\tthreshold\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08\x66\x31_score\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08log_loss\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12-\n\x07roc_auc\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x1a\x9f\x06\n\x1b\x42inaryClassificationMetrics\x12h\n aggregate_classification_metrics\x18\x01 \x01(\x0b\x32>.google.cloud.bigquery.v2.Model.AggregateClassificationMetrics\x12w\n\x1c\x62inary_confusion_matrix_list\x18\x02 \x03(\x0b\x32Q.google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.BinaryConfusionMatrix\x12\x16\n\x0epositive_label\x18\x03 \x01(\t\x12\x16\n\x0enegative_label\x18\x04 \x01(\t\x1a\xec\x03\n\x15\x42inaryConfusionMatrix\x12>\n\x18positive_class_threshold\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x33\n\x0etrue_positives\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x34\n\x0f\x66\x61lse_positives\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x33\n\x0etrue_negatives\x18\x04 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x34\n\x0f\x66\x61lse_negatives\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12/\n\tprecision\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12,\n\x06recall\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08\x66\x31_score\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08\x61\x63\x63uracy\x18\t \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x1a\x87\x05\n\x1fMultiClassClassificationMetrics\x12h\n aggregate_classification_metrics\x18\x01 \x01(\x0b\x32>.google.cloud.bigquery.v2.Model.AggregateClassificationMetrics\x12n\n\x15\x63onfusion_matrix_list\x18\x02 \x03(\x0b\x32O.google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix\x1a\x89\x03\n\x0f\x43onfusionMatrix\x12:\n\x14\x63onfidence_threshold\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x61\n\x04rows\x18\x02 \x03(\x0b\x32S.google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Row\x1aQ\n\x05\x45ntry\x12\x17\n\x0fpredicted_label\x18\x01 \x01(\t\x12/\n\nitem_count\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x1a\x83\x01\n\x03Row\x12\x14\n\x0c\x61\x63tual_label\x18\x01 \x01(\t\x12\x66\n\x07\x65ntries\x18\x02 \x03(\x0b\x32U.google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Entry\x1a\xcb\x06\n\x11\x43lusteringMetrics\x12:\n\x14\x64\x61vies_bouldin_index\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15mean_squared_distance\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12K\n\x08\x63lusters\x18\x03 \x03(\x0b\x32\x39.google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster\x1a\xef\x04\n\x07\x43luster\x12\x13\n\x0b\x63\x65ntroid_id\x18\x01 \x01(\x03\x12^\n\x0e\x66\x65\x61ture_values\x18\x02 \x03(\x0b\x32\x46.google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue\x12*\n\x05\x63ount\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x1a\xc2\x03\n\x0c\x46\x65\x61tureValue\x12\x16\n\x0e\x66\x65\x61ture_column\x18\x01 \x01(\t\x12\x37\n\x0fnumerical_value\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValueH\x00\x12t\n\x11\x63\x61tegorical_value\x18\x03 \x01(\x0b\x32W.google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValueH\x00\x1a\xe1\x01\n\x10\x43\x61tegoricalValue\x12~\n\x0f\x63\x61tegory_counts\x18\x01 \x03(\x0b\x32\x65.google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue.CategoryCount\x1aM\n\rCategoryCount\x12\x10\n\x08\x63\x61tegory\x18\x01 \x01(\t\x12*\n\x05\x63ount\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x07\n\x05value\x1a\x95\x03\n\x11\x45valuationMetrics\x12O\n\x12regression_metrics\x18\x01 \x01(\x0b\x32\x31.google.cloud.bigquery.v2.Model.RegressionMetricsH\x00\x12\x64\n\x1d\x62inary_classification_metrics\x18\x02 \x01(\x0b\x32;.google.cloud.bigquery.v2.Model.BinaryClassificationMetricsH\x00\x12m\n"multi_class_classification_metrics\x18\x03 \x01(\x0b\x32?.google.cloud.bigquery.v2.Model.MultiClassClassificationMetricsH\x00\x12O\n\x12\x63lustering_metrics\x18\x04 \x01(\x0b\x32\x31.google.cloud.bigquery.v2.Model.ClusteringMetricsH\x00\x42\t\n\x07metrics\x1a\xab\x0f\n\x0bTrainingRun\x12U\n\x10training_options\x18\x01 \x01(\x0b\x32;.google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions\x12.\n\nstart_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12L\n\x07results\x18\x06 \x03(\x0b\x32;.google.cloud.bigquery.v2.Model.TrainingRun.IterationResult\x12M\n\x12\x65valuation_metrics\x18\x07 \x01(\x0b\x32\x31.google.cloud.bigquery.v2.Model.EvaluationMetrics\x1a\x9d\t\n\x0fTrainingOptions\x12\x16\n\x0emax_iterations\x18\x01 \x01(\x03\x12;\n\tloss_type\x18\x02 \x01(\x0e\x32(.google.cloud.bigquery.v2.Model.LossType\x12\x12\n\nlearn_rate\x18\x03 \x01(\x01\x12\x37\n\x11l1_regularization\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x37\n\x11l2_regularization\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15min_relative_progress\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\nwarm_start\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12.\n\nearly_stop\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x1b\n\x13input_label_columns\x18\t \x03(\t\x12J\n\x11\x64\x61ta_split_method\x18\n \x01(\x0e\x32/.google.cloud.bigquery.v2.Model.DataSplitMethod\x12 \n\x18\x64\x61ta_split_eval_fraction\x18\x0b \x01(\x01\x12\x19\n\x11\x64\x61ta_split_column\x18\x0c \x01(\t\x12N\n\x13learn_rate_strategy\x18\r \x01(\x0e\x32\x31.google.cloud.bigquery.v2.Model.LearnRateStrategy\x12\x1a\n\x12initial_learn_rate\x18\x10 \x01(\x01\x12o\n\x13label_class_weights\x18\x11 \x03(\x0b\x32R.google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.LabelClassWeightsEntry\x12\x43\n\rdistance_type\x18\x14 \x01(\x0e\x32,.google.cloud.bigquery.v2.Model.DistanceType\x12\x14\n\x0cnum_clusters\x18\x15 \x01(\x03\x12\x11\n\tmodel_uri\x18\x16 \x01(\t\x12S\n\x15optimization_strategy\x18\x17 \x01(\x0e\x32\x34.google.cloud.bigquery.v2.Model.OptimizationStrategy\x12l\n\x1ckmeans_initialization_method\x18! \x01(\x0e\x32\x46.google.cloud.bigquery.v2.Model.KmeansEnums.KmeansInitializationMethod\x12$\n\x1ckmeans_initialization_column\x18" \x01(\t\x1a\x38\n\x16LabelClassWeightsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x1a\xd7\x03\n\x0fIterationResult\x12*\n\x05index\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x30\n\x0b\x64uration_ms\x18\x04 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x33\n\rtraining_loss\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\teval_loss\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x12\n\nlearn_rate\x18\x07 \x01(\x01\x12^\n\rcluster_infos\x18\x08 \x03(\x0b\x32G.google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.ClusterInfo\x1a\x8b\x01\n\x0b\x43lusterInfo\x12\x13\n\x0b\x63\x65ntroid_id\x18\x01 \x01(\x03\x12\x34\n\x0e\x63luster_radius\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x31\n\x0c\x63luster_size\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"s\n\tModelType\x12\x1a\n\x16MODEL_TYPE_UNSPECIFIED\x10\x00\x12\x15\n\x11LINEAR_REGRESSION\x10\x01\x12\x17\n\x13LOGISTIC_REGRESSION\x10\x02\x12\n\n\x06KMEANS\x10\x03\x12\x0e\n\nTENSORFLOW\x10\x06"O\n\x08LossType\x12\x19\n\x15LOSS_TYPE_UNSPECIFIED\x10\x00\x12\x15\n\x11MEAN_SQUARED_LOSS\x10\x01\x12\x11\n\rMEAN_LOG_LOSS\x10\x02"H\n\x0c\x44istanceType\x12\x1d\n\x19\x44ISTANCE_TYPE_UNSPECIFIED\x10\x00\x12\r\n\tEUCLIDEAN\x10\x01\x12\n\n\x06\x43OSINE\x10\x02"z\n\x0f\x44\x61taSplitMethod\x12!\n\x1d\x44\x41TA_SPLIT_METHOD_UNSPECIFIED\x10\x00\x12\n\n\x06RANDOM\x10\x01\x12\n\n\x06\x43USTOM\x10\x02\x12\x0e\n\nSEQUENTIAL\x10\x03\x12\x0c\n\x08NO_SPLIT\x10\x04\x12\x0e\n\nAUTO_SPLIT\x10\x05"W\n\x11LearnRateStrategy\x12#\n\x1fLEARN_RATE_STRATEGY_UNSPECIFIED\x10\x00\x12\x0f\n\x0bLINE_SEARCH\x10\x01\x12\x0c\n\x08\x43ONSTANT\x10\x02"n\n\x14OptimizationStrategy\x12%\n!OPTIMIZATION_STRATEGY_UNSPECIFIED\x10\x00\x12\x1a\n\x16\x42\x41TCH_GRADIENT_DESCENT\x10\x01\x12\x13\n\x0fNORMAL_EQUATION\x10\x02"Z\n\x0fGetModelRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ndataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x15\n\x08model_id\x18\x03 \x01(\tB\x03\xe0\x41\x02"\x91\x01\n\x11PatchModelRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ndataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x15\n\x08model_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x33\n\x05model\x18\x04 \x01(\x0b\x32\x1f.google.cloud.bigquery.v2.ModelB\x03\xe0\x41\x02"]\n\x12\x44\x65leteModelRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ndataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x15\n\x08model_id\x18\x03 \x01(\tB\x03\xe0\x41\x02"\x8c\x01\n\x11ListModelsRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ndataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x31\n\x0bmax_results\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.UInt32Value\x12\x12\n\npage_token\x18\x04 \x01(\t"^\n\x12ListModelsResponse\x12/\n\x06models\x18\x01 \x03(\x0b\x32\x1f.google.cloud.bigquery.v2.Model\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xfa\x05\n\x0cModelService\x12y\n\x08GetModel\x12).google.cloud.bigquery.v2.GetModelRequest\x1a\x1f.google.cloud.bigquery.v2.Model"!\xda\x41\x1eproject_id,dataset_id,model_id\x12\x8d\x01\n\nListModels\x12+.google.cloud.bigquery.v2.ListModelsRequest\x1a,.google.cloud.bigquery.v2.ListModelsResponse"$\xda\x41!project_id,dataset_id,max_results\x12\x83\x01\n\nPatchModel\x12+.google.cloud.bigquery.v2.PatchModelRequest\x1a\x1f.google.cloud.bigquery.v2.Model"\'\xda\x41$project_id,dataset_id,model_id,model\x12v\n\x0b\x44\x65leteModel\x12,.google.cloud.bigquery.v2.DeleteModelRequest\x1a\x16.google.protobuf.Empty"!\xda\x41\x1eproject_id,dataset_id,model_id\x1a\xe0\x01\xca\x41\x17\x62igquery.googleapis.com\xd2\x41\xc2\x01https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.readonly,https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-onlyBl\n\x1c\x63om.google.cloud.bigquery.v2B\nModelProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigqueryb\x06proto3',
dependencies=[
google_dot_api_dot_client__pb2.DESCRIPTOR,
google_dot_api_dot_field__behavior__pb2.DESCRIPTOR,
@@ -61,6 +55,7 @@
full_name="google.cloud.bigquery.v2.Model.KmeansEnums.KmeansInitializationMethod",
filename=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name="KMEANS_INITIALIZATION_METHOD_UNSPECIFIED",
@@ -68,12 +63,23 @@
number=0,
serialized_options=None,
type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="RANDOM", index=1, number=1, serialized_options=None, type=None
+ name="RANDOM",
+ index=1,
+ number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="CUSTOM", index=2, number=2, serialized_options=None, type=None
+ name="CUSTOM",
+ index=2,
+ number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
],
containing_type=None,
@@ -88,6 +94,7 @@
full_name="google.cloud.bigquery.v2.Model.ModelType",
filename=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name="MODEL_TYPE_UNSPECIFIED",
@@ -95,6 +102,7 @@
number=0,
serialized_options=None,
type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
name="LINEAR_REGRESSION",
@@ -102,6 +110,7 @@
number=1,
serialized_options=None,
type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
name="LOGISTIC_REGRESSION",
@@ -109,12 +118,23 @@
number=2,
serialized_options=None,
type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="KMEANS", index=3, number=3, serialized_options=None, type=None
+ name="KMEANS",
+ index=3,
+ number=3,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="TENSORFLOW", index=4, number=6, serialized_options=None, type=None
+ name="TENSORFLOW",
+ index=4,
+ number=6,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
],
containing_type=None,
@@ -129,6 +149,7 @@
full_name="google.cloud.bigquery.v2.Model.LossType",
filename=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name="LOSS_TYPE_UNSPECIFIED",
@@ -136,6 +157,7 @@
number=0,
serialized_options=None,
type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
name="MEAN_SQUARED_LOSS",
@@ -143,9 +165,15 @@
number=1,
serialized_options=None,
type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="MEAN_LOG_LOSS", index=2, number=2, serialized_options=None, type=None
+ name="MEAN_LOG_LOSS",
+ index=2,
+ number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
],
containing_type=None,
@@ -160,6 +188,7 @@
full_name="google.cloud.bigquery.v2.Model.DistanceType",
filename=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name="DISTANCE_TYPE_UNSPECIFIED",
@@ -167,12 +196,23 @@
number=0,
serialized_options=None,
type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="EUCLIDEAN", index=1, number=1, serialized_options=None, type=None
+ name="EUCLIDEAN",
+ index=1,
+ number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="COSINE", index=2, number=2, serialized_options=None, type=None
+ name="COSINE",
+ index=2,
+ number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
],
containing_type=None,
@@ -187,6 +227,7 @@
full_name="google.cloud.bigquery.v2.Model.DataSplitMethod",
filename=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name="DATA_SPLIT_METHOD_UNSPECIFIED",
@@ -194,21 +235,47 @@
number=0,
serialized_options=None,
type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="RANDOM", index=1, number=1, serialized_options=None, type=None
+ name="RANDOM",
+ index=1,
+ number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="CUSTOM", index=2, number=2, serialized_options=None, type=None
+ name="CUSTOM",
+ index=2,
+ number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="SEQUENTIAL", index=3, number=3, serialized_options=None, type=None
+ name="SEQUENTIAL",
+ index=3,
+ number=3,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="NO_SPLIT", index=4, number=4, serialized_options=None, type=None
+ name="NO_SPLIT",
+ index=4,
+ number=4,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="AUTO_SPLIT", index=5, number=5, serialized_options=None, type=None
+ name="AUTO_SPLIT",
+ index=5,
+ number=5,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
],
containing_type=None,
@@ -223,6 +290,7 @@
full_name="google.cloud.bigquery.v2.Model.LearnRateStrategy",
filename=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name="LEARN_RATE_STRATEGY_UNSPECIFIED",
@@ -230,12 +298,23 @@
number=0,
serialized_options=None,
type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="LINE_SEARCH", index=1, number=1, serialized_options=None, type=None
+ name="LINE_SEARCH",
+ index=1,
+ number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="CONSTANT", index=2, number=2, serialized_options=None, type=None
+ name="CONSTANT",
+ index=2,
+ number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
],
containing_type=None,
@@ -250,6 +329,7 @@
full_name="google.cloud.bigquery.v2.Model.OptimizationStrategy",
filename=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name="OPTIMIZATION_STRATEGY_UNSPECIFIED",
@@ -257,6 +337,7 @@
number=0,
serialized_options=None,
type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
name="BATCH_GRADIENT_DESCENT",
@@ -264,6 +345,7 @@
number=1,
serialized_options=None,
type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
name="NORMAL_EQUATION",
@@ -271,6 +353,7 @@
number=2,
serialized_options=None,
type=None,
+ create_key=_descriptor._internal_create_key,
),
],
containing_type=None,
@@ -287,6 +370,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[],
extensions=[],
nested_types=[],
@@ -306,6 +390,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="mean_absolute_error",
@@ -324,6 +409,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="mean_squared_error",
@@ -342,6 +428,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="mean_squared_log_error",
@@ -360,6 +447,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="median_absolute_error",
@@ -378,6 +466,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="r_squared",
@@ -396,6 +485,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -416,6 +506,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="precision",
@@ -434,6 +525,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="recall",
@@ -452,6 +544,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="accuracy",
@@ -470,6 +563,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="threshold",
@@ -488,6 +582,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="f1_score",
@@ -506,6 +601,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="log_loss",
@@ -524,6 +620,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="roc_auc",
@@ -542,6 +639,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -562,6 +660,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="positive_class_threshold",
@@ -580,6 +679,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="true_positives",
@@ -598,6 +698,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="false_positives",
@@ -616,6 +717,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="true_negatives",
@@ -634,6 +736,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="false_negatives",
@@ -652,6 +755,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="precision",
@@ -670,6 +774,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="recall",
@@ -688,6 +793,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="f1_score",
@@ -706,6 +812,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="accuracy",
@@ -724,6 +831,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -744,6 +852,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="aggregate_classification_metrics",
@@ -762,6 +871,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="binary_confusion_matrix_list",
@@ -780,6 +890,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="positive_label",
@@ -790,7 +901,7 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
@@ -798,6 +909,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="negative_label",
@@ -808,7 +920,7 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
@@ -816,6 +928,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -836,6 +949,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="predicted_label",
@@ -846,7 +960,7 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
@@ -854,6 +968,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="item_count",
@@ -872,6 +987,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -892,6 +1008,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="actual_label",
@@ -902,7 +1019,7 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
@@ -910,6 +1027,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="entries",
@@ -928,6 +1046,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -948,6 +1067,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="confidence_threshold",
@@ -966,6 +1086,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="rows",
@@ -984,6 +1105,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -1007,6 +1129,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="aggregate_classification_metrics",
@@ -1025,6 +1148,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="confusion_matrix_list",
@@ -1043,6 +1167,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -1063,6 +1188,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="category",
@@ -1073,7 +1199,7 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
@@ -1081,6 +1207,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="count",
@@ -1099,6 +1226,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -1119,6 +1247,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="category_counts",
@@ -1137,6 +1266,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -1159,6 +1289,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="feature_column",
@@ -1169,7 +1300,7 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
@@ -1177,6 +1308,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="numerical_value",
@@ -1195,6 +1327,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="categorical_value",
@@ -1213,6 +1346,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -1228,6 +1362,7 @@
full_name="google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.value",
index=0,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[],
),
],
@@ -1241,6 +1376,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="centroid_id",
@@ -1259,6 +1395,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="feature_values",
@@ -1277,6 +1414,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="count",
@@ -1295,6 +1433,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -1315,6 +1454,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="davies_bouldin_index",
@@ -1333,6 +1473,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="mean_squared_distance",
@@ -1351,6 +1492,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="clusters",
@@ -1369,6 +1511,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -1389,6 +1532,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="regression_metrics",
@@ -1407,6 +1551,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="binary_classification_metrics",
@@ -1425,6 +1570,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="multi_class_classification_metrics",
@@ -1443,6 +1589,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="clustering_metrics",
@@ -1461,6 +1608,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -1476,6 +1624,7 @@
full_name="google.cloud.bigquery.v2.Model.EvaluationMetrics.metrics",
index=0,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[],
),
],
@@ -1489,6 +1638,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="key",
@@ -1499,7 +1649,7 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
@@ -1507,6 +1657,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="value",
@@ -1525,12 +1676,13 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
- serialized_options=_b("8\001"),
+ serialized_options=b"8\001",
is_extendable=False,
syntax="proto3",
extension_ranges=[],
@@ -1545,6 +1697,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="max_iterations",
@@ -1563,6 +1716,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="loss_type",
@@ -1581,6 +1735,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="learn_rate",
@@ -1599,6 +1754,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="l1_regularization",
@@ -1617,6 +1773,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="l2_regularization",
@@ -1635,6 +1792,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="min_relative_progress",
@@ -1653,6 +1811,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="warm_start",
@@ -1671,6 +1830,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="early_stop",
@@ -1689,6 +1849,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="input_label_columns",
@@ -1707,6 +1868,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="data_split_method",
@@ -1725,6 +1887,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="data_split_eval_fraction",
@@ -1743,6 +1906,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="data_split_column",
@@ -1753,7 +1917,7 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
@@ -1761,6 +1925,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="learn_rate_strategy",
@@ -1779,6 +1944,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="initial_learn_rate",
@@ -1797,6 +1963,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="label_class_weights",
@@ -1815,6 +1982,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="distance_type",
@@ -1833,6 +2001,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="num_clusters",
@@ -1851,6 +2020,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="model_uri",
@@ -1861,7 +2031,7 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
@@ -1869,6 +2039,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="optimization_strategy",
@@ -1887,6 +2058,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="kmeans_initialization_method",
@@ -1905,6 +2077,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="kmeans_initialization_column",
@@ -1915,7 +2088,7 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
@@ -1923,6 +2096,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -1943,6 +2117,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="centroid_id",
@@ -1961,6 +2136,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="cluster_radius",
@@ -1979,6 +2155,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="cluster_size",
@@ -1997,6 +2174,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -2017,6 +2195,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="index",
@@ -2035,6 +2214,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="duration_ms",
@@ -2053,6 +2233,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="training_loss",
@@ -2071,6 +2252,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="eval_loss",
@@ -2089,6 +2271,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="learn_rate",
@@ -2107,6 +2290,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="cluster_infos",
@@ -2125,6 +2309,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -2145,6 +2330,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="training_options",
@@ -2163,6 +2349,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="start_time",
@@ -2181,6 +2368,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="results",
@@ -2199,6 +2387,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="evaluation_metrics",
@@ -2217,6 +2406,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -2240,6 +2430,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="key",
@@ -2250,7 +2441,7 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
@@ -2258,6 +2449,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="value",
@@ -2268,7 +2460,7 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
@@ -2276,12 +2468,13 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
- serialized_options=_b("8\001"),
+ serialized_options=b"8\001",
is_extendable=False,
syntax="proto3",
extension_ranges=[],
@@ -2296,6 +2489,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="etag",
@@ -2306,14 +2500,15 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\003"),
+ serialized_options=b"\340A\003",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="model_reference",
@@ -2330,8 +2525,9 @@
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\002"),
+ serialized_options=b"\340A\002",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="creation_time",
@@ -2348,8 +2544,9 @@
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\003"),
+ serialized_options=b"\340A\003",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="last_modified_time",
@@ -2366,8 +2563,9 @@
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\003"),
+ serialized_options=b"\340A\003",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="description",
@@ -2378,14 +2576,15 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\001"),
+ serialized_options=b"\340A\001",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="friendly_name",
@@ -2396,14 +2595,15 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\001"),
+ serialized_options=b"\340A\001",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="labels",
@@ -2422,6 +2622,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="expiration_time",
@@ -2438,8 +2639,9 @@
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\001"),
+ serialized_options=b"\340A\001",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="location",
@@ -2450,14 +2652,15 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\003"),
+ serialized_options=b"\340A\003",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="encryption_configuration",
@@ -2476,6 +2679,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="model_type",
@@ -2492,8 +2696,9 @@
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\003"),
+ serialized_options=b"\340A\003",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="training_runs",
@@ -2510,8 +2715,9 @@
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\003"),
+ serialized_options=b"\340A\003",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="feature_columns",
@@ -2528,8 +2734,9 @@
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\003"),
+ serialized_options=b"\340A\003",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="label_columns",
@@ -2546,8 +2753,9 @@
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\003"),
+ serialized_options=b"\340A\003",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -2586,6 +2794,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="project_id",
@@ -2596,14 +2805,15 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\002"),
+ serialized_options=b"\340A\002",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="dataset_id",
@@ -2614,14 +2824,15 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\002"),
+ serialized_options=b"\340A\002",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="model_id",
@@ -2632,14 +2843,15 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\002"),
+ serialized_options=b"\340A\002",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -2661,6 +2873,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="project_id",
@@ -2671,14 +2884,15 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\002"),
+ serialized_options=b"\340A\002",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="dataset_id",
@@ -2689,14 +2903,15 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\002"),
+ serialized_options=b"\340A\002",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="model_id",
@@ -2707,14 +2922,15 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\002"),
+ serialized_options=b"\340A\002",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="model",
@@ -2731,8 +2947,9 @@
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\002"),
+ serialized_options=b"\340A\002",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -2754,6 +2971,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="project_id",
@@ -2764,14 +2982,15 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\002"),
+ serialized_options=b"\340A\002",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="dataset_id",
@@ -2782,14 +3001,15 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\002"),
+ serialized_options=b"\340A\002",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="model_id",
@@ -2800,14 +3020,15 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\002"),
+ serialized_options=b"\340A\002",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -2829,6 +3050,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="project_id",
@@ -2839,14 +3061,15 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\002"),
+ serialized_options=b"\340A\002",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="dataset_id",
@@ -2857,14 +3080,15 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\002"),
+ serialized_options=b"\340A\002",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="max_results",
@@ -2883,6 +3107,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="page_token",
@@ -2893,7 +3118,7 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
@@ -2901,6 +3126,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -2922,6 +3148,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="models",
@@ -2940,6 +3167,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="next_page_token",
@@ -2950,7 +3178,7 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
@@ -2958,6 +3186,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -3294,25 +3523,24 @@
Model = _reflection.GeneratedProtocolMessageType(
"Model",
(_message.Message,),
- dict(
- KmeansEnums=_reflection.GeneratedProtocolMessageType(
+ {
+ "KmeansEnums": _reflection.GeneratedProtocolMessageType(
"KmeansEnums",
(_message.Message,),
- dict(
- DESCRIPTOR=_MODEL_KMEANSENUMS,
- __module__="google.cloud.bigquery_v2.proto.model_pb2"
+ {
+ "DESCRIPTOR": _MODEL_KMEANSENUMS,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.KmeansEnums)
- ),
+ },
),
- RegressionMetrics=_reflection.GeneratedProtocolMessageType(
+ "RegressionMetrics": _reflection.GeneratedProtocolMessageType(
"RegressionMetrics",
(_message.Message,),
- dict(
- DESCRIPTOR=_MODEL_REGRESSIONMETRICS,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""Evaluation metrics for regression and explicit feedback
- type matrix factorization models.
-
+ {
+ "DESCRIPTOR": _MODEL_REGRESSIONMETRICS,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """Evaluation metrics for regression and explicit feedback type matrix
+ factorization models.
Attributes:
mean_absolute_error:
@@ -3327,22 +3555,21 @@
R^2 score.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.RegressionMetrics)
- ),
+ },
),
- AggregateClassificationMetrics=_reflection.GeneratedProtocolMessageType(
+ "AggregateClassificationMetrics": _reflection.GeneratedProtocolMessageType(
"AggregateClassificationMetrics",
(_message.Message,),
- dict(
- DESCRIPTOR=_MODEL_AGGREGATECLASSIFICATIONMETRICS,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""Aggregate metrics for classification/classifier models.
- For multi-class models, the metrics are either macro-averaged or
- micro-averaged. When macro-averaged, the metrics are calculated for each
- label and then an unweighted average is taken of those values. When
- micro-averaged, the metric is calculated globally by counting the total
+ {
+ "DESCRIPTOR": _MODEL_AGGREGATECLASSIFICATIONMETRICS,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """Aggregate metrics for classification/classifier models. For multi-
+ class models, the metrics are either macro-averaged or micro-averaged.
+ When macro-averaged, the metrics are calculated for each label and
+ then an unweighted average is taken of those values. When micro-
+ averaged, the metric is calculated globally by counting the total
number of correctly predicted rows.
-
Attributes:
precision:
Precision is the fraction of actual positive predictions that
@@ -3371,20 +3598,19 @@
averaged metric.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.AggregateClassificationMetrics)
- ),
+ },
),
- BinaryClassificationMetrics=_reflection.GeneratedProtocolMessageType(
+ "BinaryClassificationMetrics": _reflection.GeneratedProtocolMessageType(
"BinaryClassificationMetrics",
(_message.Message,),
- dict(
- BinaryConfusionMatrix=_reflection.GeneratedProtocolMessageType(
+ {
+ "BinaryConfusionMatrix": _reflection.GeneratedProtocolMessageType(
"BinaryConfusionMatrix",
(_message.Message,),
- dict(
- DESCRIPTOR=_MODEL_BINARYCLASSIFICATIONMETRICS_BINARYCONFUSIONMATRIX,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""Confusion matrix for binary classification models.
-
+ {
+ "DESCRIPTOR": _MODEL_BINARYCLASSIFICATIONMETRICS_BINARYCONFUSIONMATRIX,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """Confusion matrix for binary classification models.
Attributes:
positive_class_threshold:
@@ -3410,13 +3636,11 @@
The fraction of predictions given the correct label.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.BinaryConfusionMatrix)
- ),
+ },
),
- DESCRIPTOR=_MODEL_BINARYCLASSIFICATIONMETRICS,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""Evaluation metrics for binary classification/classifier
- models.
-
+ "DESCRIPTOR": _MODEL_BINARYCLASSIFICATIONMETRICS,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """Evaluation metrics for binary classification/classifier models.
Attributes:
aggregate_classification_metrics:
@@ -3429,44 +3653,42 @@
Label representing the negative class.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.BinaryClassificationMetrics)
- ),
+ },
),
- MultiClassClassificationMetrics=_reflection.GeneratedProtocolMessageType(
+ "MultiClassClassificationMetrics": _reflection.GeneratedProtocolMessageType(
"MultiClassClassificationMetrics",
(_message.Message,),
- dict(
- ConfusionMatrix=_reflection.GeneratedProtocolMessageType(
+ {
+ "ConfusionMatrix": _reflection.GeneratedProtocolMessageType(
"ConfusionMatrix",
(_message.Message,),
- dict(
- Entry=_reflection.GeneratedProtocolMessageType(
+ {
+ "Entry": _reflection.GeneratedProtocolMessageType(
"Entry",
(_message.Message,),
- dict(
- DESCRIPTOR=_MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX_ENTRY,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""A single entry in the confusion matrix.
-
+ {
+ "DESCRIPTOR": _MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX_ENTRY,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """A single entry in the confusion matrix.
Attributes:
predicted_label:
- The predicted label. For confidence\_threshold > 0, we will
+ The predicted label. For confidence_threshold > 0, we will
also add an entry indicating the number of items under the
confidence threshold.
item_count:
Number of items being predicted as this label.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Entry)
- ),
+ },
),
- Row=_reflection.GeneratedProtocolMessageType(
+ "Row": _reflection.GeneratedProtocolMessageType(
"Row",
(_message.Message,),
- dict(
- DESCRIPTOR=_MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX_ROW,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""A single row in the confusion matrix.
-
+ {
+ "DESCRIPTOR": _MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX_ROW,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """A single row in the confusion matrix.
Attributes:
actual_label:
@@ -3475,12 +3697,11 @@
Info describing predicted label distribution.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Row)
- ),
+ },
),
- DESCRIPTOR=_MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""Confusion matrix for multi-class classification models.
-
+ "DESCRIPTOR": _MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """Confusion matrix for multi-class classification models.
Attributes:
confidence_threshold:
@@ -3490,13 +3711,11 @@
One row per actual label.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix)
- ),
+ },
),
- DESCRIPTOR=_MODEL_MULTICLASSCLASSIFICATIONMETRICS,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""Evaluation metrics for multi-class
- classification/classifier models.
-
+ "DESCRIPTOR": _MODEL_MULTICLASSCLASSIFICATIONMETRICS,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """Evaluation metrics for multi-class classification/classifier models.
Attributes:
aggregate_classification_metrics:
@@ -3505,33 +3724,31 @@
Confusion matrix at different thresholds.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics)
- ),
+ },
),
- ClusteringMetrics=_reflection.GeneratedProtocolMessageType(
+ "ClusteringMetrics": _reflection.GeneratedProtocolMessageType(
"ClusteringMetrics",
(_message.Message,),
- dict(
- Cluster=_reflection.GeneratedProtocolMessageType(
+ {
+ "Cluster": _reflection.GeneratedProtocolMessageType(
"Cluster",
(_message.Message,),
- dict(
- FeatureValue=_reflection.GeneratedProtocolMessageType(
+ {
+ "FeatureValue": _reflection.GeneratedProtocolMessageType(
"FeatureValue",
(_message.Message,),
- dict(
- CategoricalValue=_reflection.GeneratedProtocolMessageType(
+ {
+ "CategoricalValue": _reflection.GeneratedProtocolMessageType(
"CategoricalValue",
(_message.Message,),
- dict(
- CategoryCount=_reflection.GeneratedProtocolMessageType(
+ {
+ "CategoryCount": _reflection.GeneratedProtocolMessageType(
"CategoryCount",
(_message.Message,),
- dict(
- DESCRIPTOR=_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE_CATEGORYCOUNT,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""Represents the count of a single category within the
- cluster.
-
+ {
+ "DESCRIPTOR": _MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE_CATEGORYCOUNT,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """Represents the count of a single category within the cluster.
Attributes:
category:
@@ -3541,27 +3758,25 @@
cluster.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue.CategoryCount)
- ),
+ },
),
- DESCRIPTOR=_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""Representative value of a categorical feature.
-
+ "DESCRIPTOR": _MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """Representative value of a categorical feature.
Attributes:
category_counts:
Counts of all categories for the categorical feature. If there
are more than ten categories, we return top ten (by count) and
- return one more CategoryCount with category "*OTHER*" and
+ return one more CategoryCount with category ``*OTHER*`` and
count as aggregate counts of remaining categories.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue)
- ),
+ },
),
- DESCRIPTOR=_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""Representative value of a single feature within the cluster.
-
+ "DESCRIPTOR": _MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """Representative value of a single feature within the cluster.
Attributes:
feature_column:
@@ -3573,12 +3788,11 @@
The categorical feature value.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue)
- ),
+ },
),
- DESCRIPTOR=_MODEL_CLUSTERINGMETRICS_CLUSTER,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""Message containing the information about one cluster.
-
+ "DESCRIPTOR": _MODEL_CLUSTERINGMETRICS_CLUSTER,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """Message containing the information about one cluster.
Attributes:
centroid_id:
@@ -3590,12 +3804,11 @@
cluster.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster)
- ),
+ },
),
- DESCRIPTOR=_MODEL_CLUSTERINGMETRICS,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""Evaluation metrics for clustering models.
-
+ "DESCRIPTOR": _MODEL_CLUSTERINGMETRICS,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """Evaluation metrics for clustering models.
Attributes:
davies_bouldin_index:
@@ -3607,18 +3820,17 @@
[Beta] Information for all clusters.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.ClusteringMetrics)
- ),
+ },
),
- EvaluationMetrics=_reflection.GeneratedProtocolMessageType(
+ "EvaluationMetrics": _reflection.GeneratedProtocolMessageType(
"EvaluationMetrics",
(_message.Message,),
- dict(
- DESCRIPTOR=_MODEL_EVALUATIONMETRICS,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""Evaluation metrics of a model. These are either computed
- on all training data or just the eval data based on whether eval data
- was used during training. These are not present for imported models.
-
+ {
+ "DESCRIPTOR": _MODEL_EVALUATIONMETRICS,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """Evaluation metrics of a model. These are either computed on all
+ training data or just the eval data based on whether eval data was
+ used during training. These are not present for imported models.
Attributes:
regression_metrics:
@@ -3632,28 +3844,28 @@
Populated for clustering models.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.EvaluationMetrics)
- ),
+ },
),
- TrainingRun=_reflection.GeneratedProtocolMessageType(
+ "TrainingRun": _reflection.GeneratedProtocolMessageType(
"TrainingRun",
(_message.Message,),
- dict(
- TrainingOptions=_reflection.GeneratedProtocolMessageType(
+ {
+ "TrainingOptions": _reflection.GeneratedProtocolMessageType(
"TrainingOptions",
(_message.Message,),
- dict(
- LabelClassWeightsEntry=_reflection.GeneratedProtocolMessageType(
+ {
+ "LabelClassWeightsEntry": _reflection.GeneratedProtocolMessageType(
"LabelClassWeightsEntry",
(_message.Message,),
- dict(
- DESCRIPTOR=_MODEL_TRAININGRUN_TRAININGOPTIONS_LABELCLASSWEIGHTSENTRY,
- __module__="google.cloud.bigquery_v2.proto.model_pb2"
+ {
+ "DESCRIPTOR": _MODEL_TRAININGRUN_TRAININGOPTIONS_LABELCLASSWEIGHTSENTRY,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.LabelClassWeightsEntry)
- ),
+ },
),
- DESCRIPTOR=_MODEL_TRAININGRUN_TRAININGOPTIONS,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""Protocol buffer.
+ "DESCRIPTOR": _MODEL_TRAININGRUN_TRAININGOPTIONS,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """Protocol buffer.
Attributes:
max_iterations:
@@ -3669,31 +3881,31 @@
l2_regularization:
L2 regularization coefficient.
min_relative_progress:
- When early\_stop is true, stops training when accuracy
- improvement is less than 'min\_relative\_progress'. Used only
+ When early_stop is true, stops training when accuracy
+ improvement is less than ‘min_relative_progress’. Used only
for iterative training algorithms.
warm_start:
Whether to train a model from the last checkpoint.
early_stop:
- Whether to stop early when the loss doesn't improve
- significantly any more (compared to min\_relative\_progress).
+ Whether to stop early when the loss doesn’t improve
+ significantly any more (compared to min_relative_progress).
Used only for iterative training algorithms.
input_label_columns:
Name of input label columns in training data.
data_split_method:
- The data split type for training and evaluation, e.g. RANDOM.
+ The data split type for training and evaluation, e.g. RANDOM.
data_split_eval_fraction:
The fraction of evaluation data over the whole input data. The
rest of data will be used as training data. The format should
be double. Accurate to two decimal places. Default value is
0.2.
data_split_column:
- The column to split data with. This column won't be used as a
- feature. 1. When data\_split\_method is CUSTOM, the
+ The column to split data with. This column won’t be used as a
+ feature. 1. When data_split_method is CUSTOM, the
corresponding column should be boolean. The rows with true
value tag are eval data, and the false are training data. 2.
- When data\_split\_method is SEQ, the first
- DATA\_SPLIT\_EVAL\_FRACTION rows (from smallest to largest) in
+ When data_split_method is SEQ, the first
+ DATA_SPLIT_EVAL_FRACTION rows (from smallest to largest) in
the corresponding column are used as training data, and the
rest are eval data. It respects the order in Orderable data
types:
@@ -3722,23 +3934,22 @@
algorithm.
kmeans_initialization_column:
The column used to provide the initial centroids for kmeans
- algorithm when kmeans\_initialization\_method is CUSTOM.
+ algorithm when kmeans_initialization_method is CUSTOM.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions)
- ),
+ },
),
- IterationResult=_reflection.GeneratedProtocolMessageType(
+ "IterationResult": _reflection.GeneratedProtocolMessageType(
"IterationResult",
(_message.Message,),
- dict(
- ClusterInfo=_reflection.GeneratedProtocolMessageType(
+ {
+ "ClusterInfo": _reflection.GeneratedProtocolMessageType(
"ClusterInfo",
(_message.Message,),
- dict(
- DESCRIPTOR=_MODEL_TRAININGRUN_ITERATIONRESULT_CLUSTERINFO,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""Information about a single cluster for clustering model.
-
+ {
+ "DESCRIPTOR": _MODEL_TRAININGRUN_ITERATIONRESULT_CLUSTERINFO,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """Information about a single cluster for clustering model.
Attributes:
centroid_id:
@@ -3751,12 +3962,11 @@
cluster.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.ClusterInfo)
- ),
+ },
),
- DESCRIPTOR=_MODEL_TRAININGRUN_ITERATIONRESULT,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""Information about a single iteration of the training run.
-
+ "DESCRIPTOR": _MODEL_TRAININGRUN_ITERATIONRESULT,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """Information about a single iteration of the training run.
Attributes:
index:
@@ -3773,13 +3983,11 @@
Information about top clusters for clustering models.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.TrainingRun.IterationResult)
- ),
+ },
),
- DESCRIPTOR=_MODEL_TRAININGRUN,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""Information about a single training query run for the
- model.
-
+ "DESCRIPTOR": _MODEL_TRAININGRUN,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """Information about a single training query run for the model.
Attributes:
training_options:
@@ -3789,26 +3997,26 @@
The start time of this training run.
results:
Output of each iteration run, results.size() <=
- max\_iterations.
+ max_iterations.
evaluation_metrics:
The evaluation metrics over training/eval data that were
computed at the end of training.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.TrainingRun)
- ),
+ },
),
- LabelsEntry=_reflection.GeneratedProtocolMessageType(
+ "LabelsEntry": _reflection.GeneratedProtocolMessageType(
"LabelsEntry",
(_message.Message,),
- dict(
- DESCRIPTOR=_MODEL_LABELSENTRY,
- __module__="google.cloud.bigquery_v2.proto.model_pb2"
+ {
+ "DESCRIPTOR": _MODEL_LABELSENTRY,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.LabelsEntry)
- ),
+ },
),
- DESCRIPTOR=_MODEL,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""Protocol buffer.
+ "DESCRIPTOR": _MODEL,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """Protocol buffer.
Attributes:
etag:
@@ -3851,17 +4059,17 @@
Output only. Type of the model resource.
training_runs:
Output only. Information for all training runs in increasing
- order of start\_time.
+ order of start_time.
feature_columns:
Output only. Input feature columns that were used to train
this model.
label_columns:
Output only. Label columns that were used to train this model.
- The output of the model will have a "predicted\_" prefix to
+ The output of the model will have a ``predicted\_`` prefix to
these columns.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model)
- ),
+ },
)
_sym_db.RegisterMessage(Model)
_sym_db.RegisterMessage(Model.KmeansEnums)
@@ -3891,10 +4099,10 @@
GetModelRequest = _reflection.GeneratedProtocolMessageType(
"GetModelRequest",
(_message.Message,),
- dict(
- DESCRIPTOR=_GETMODELREQUEST,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""Protocol buffer.
+ {
+ "DESCRIPTOR": _GETMODELREQUEST,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """Protocol buffer.
Attributes:
project_id:
@@ -3905,17 +4113,17 @@
Required. Model ID of the requested model.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.GetModelRequest)
- ),
+ },
)
_sym_db.RegisterMessage(GetModelRequest)
PatchModelRequest = _reflection.GeneratedProtocolMessageType(
"PatchModelRequest",
(_message.Message,),
- dict(
- DESCRIPTOR=_PATCHMODELREQUEST,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""Protocol buffer.
+ {
+ "DESCRIPTOR": _PATCHMODELREQUEST,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """Protocol buffer.
Attributes:
project_id:
@@ -3930,17 +4138,17 @@
set to default value.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.PatchModelRequest)
- ),
+ },
)
_sym_db.RegisterMessage(PatchModelRequest)
DeleteModelRequest = _reflection.GeneratedProtocolMessageType(
"DeleteModelRequest",
(_message.Message,),
- dict(
- DESCRIPTOR=_DELETEMODELREQUEST,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""Protocol buffer.
+ {
+ "DESCRIPTOR": _DELETEMODELREQUEST,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """Protocol buffer.
Attributes:
project_id:
@@ -3951,17 +4159,17 @@
Required. Model ID of the model to delete.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.DeleteModelRequest)
- ),
+ },
)
_sym_db.RegisterMessage(DeleteModelRequest)
ListModelsRequest = _reflection.GeneratedProtocolMessageType(
"ListModelsRequest",
(_message.Message,),
- dict(
- DESCRIPTOR=_LISTMODELSREQUEST,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""Protocol buffer.
+ {
+ "DESCRIPTOR": _LISTMODELSREQUEST,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """Protocol buffer.
Attributes:
project_id:
@@ -3977,28 +4185,28 @@
page of results
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.ListModelsRequest)
- ),
+ },
)
_sym_db.RegisterMessage(ListModelsRequest)
ListModelsResponse = _reflection.GeneratedProtocolMessageType(
"ListModelsResponse",
(_message.Message,),
- dict(
- DESCRIPTOR=_LISTMODELSRESPONSE,
- __module__="google.cloud.bigquery_v2.proto.model_pb2",
- __doc__="""Protocol buffer.
+ {
+ "DESCRIPTOR": _LISTMODELSRESPONSE,
+ "__module__": "google.cloud.bigquery_v2.proto.model_pb2",
+ "__doc__": """Protocol buffer.
Attributes:
models:
Models in the requested dataset. Only the following fields are
- populated: model\_reference, model\_type, creation\_time,
- last\_modified\_time and labels.
+ populated: model_reference, model_type, creation_time,
+ last_modified_time and labels.
next_page_token:
A token to request the next page of results.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.ListModelsResponse)
- ),
+ },
)
_sym_db.RegisterMessage(ListModelsResponse)
@@ -4036,9 +4244,8 @@
full_name="google.cloud.bigquery.v2.ModelService",
file=DESCRIPTOR,
index=0,
- serialized_options=_b(
- "\312A\027bigquery.googleapis.com\322A\302\001https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.readonly,https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only"
- ),
+ serialized_options=b"\312A\027bigquery.googleapis.com\322A\302\001https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.readonly,https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only",
+ create_key=_descriptor._internal_create_key,
serialized_start=7804,
serialized_end=8566,
methods=[
@@ -4049,7 +4256,8 @@
containing_service=None,
input_type=_GETMODELREQUEST,
output_type=_MODEL,
- serialized_options=_b("\332A\036project_id,dataset_id,model_id"),
+ serialized_options=b"\332A\036project_id,dataset_id,model_id",
+ create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name="ListModels",
@@ -4058,7 +4266,8 @@
containing_service=None,
input_type=_LISTMODELSREQUEST,
output_type=_LISTMODELSRESPONSE,
- serialized_options=_b("\332A!project_id,dataset_id,max_results"),
+ serialized_options=b"\332A!project_id,dataset_id,max_results",
+ create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name="PatchModel",
@@ -4067,7 +4276,8 @@
containing_service=None,
input_type=_PATCHMODELREQUEST,
output_type=_MODEL,
- serialized_options=_b("\332A$project_id,dataset_id,model_id,model"),
+ serialized_options=b"\332A$project_id,dataset_id,model_id,model",
+ create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name="DeleteModel",
@@ -4076,7 +4286,8 @@
containing_service=None,
input_type=_DELETEMODELREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
- serialized_options=_b("\332A\036project_id,dataset_id,model_id"),
+ serialized_options=b"\332A\036project_id,dataset_id,model_id",
+ create_key=_descriptor._internal_create_key,
),
],
)
diff --git a/google/cloud/bigquery_v2/proto/model_pb2_grpc.py b/google/cloud/bigquery_v2/proto/model_pb2_grpc.py
index 5abcdf0f2..13db95717 100644
--- a/google/cloud/bigquery_v2/proto/model_pb2_grpc.py
+++ b/google/cloud/bigquery_v2/proto/model_pb2_grpc.py
@@ -1,4 +1,5 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.cloud.bigquery_v2.proto import (
@@ -8,15 +9,14 @@
class ModelServiceStub(object):
- # missing associated documentation comment in .proto file
- pass
+ """Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
- Args:
- channel: A grpc.Channel.
- """
+ Args:
+ channel: A grpc.Channel.
+ """
self.GetModel = channel.unary_unary(
"/google.cloud.bigquery.v2.ModelService/GetModel",
request_serializer=google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2.GetModelRequest.SerializeToString,
@@ -40,34 +40,33 @@ def __init__(self, channel):
class ModelServiceServicer(object):
- # missing associated documentation comment in .proto file
- pass
+ """Missing associated documentation comment in .proto file."""
def GetModel(self, request, context):
"""Gets the specified model resource by model ID.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListModels(self, request, context):
"""Lists all models in the specified dataset. Requires the READER dataset
- role.
- """
+ role.
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def PatchModel(self, request, context):
"""Patch specific fields in the specified model.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DeleteModel(self, request, context):
"""Deletes the model specified by modelId from the dataset.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
@@ -100,3 +99,116 @@ def add_ModelServiceServicer_to_server(servicer, server):
"google.cloud.bigquery.v2.ModelService", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
+
+
+# This class is part of an EXPERIMENTAL API.
+class ModelService(object):
+ """Missing associated documentation comment in .proto file."""
+
+ @staticmethod
+ def GetModel(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.bigquery.v2.ModelService/GetModel",
+ google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2.GetModelRequest.SerializeToString,
+ google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2.Model.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def ListModels(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.bigquery.v2.ModelService/ListModels",
+ google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2.ListModelsRequest.SerializeToString,
+ google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2.ListModelsResponse.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def PatchModel(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.bigquery.v2.ModelService/PatchModel",
+ google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2.PatchModelRequest.SerializeToString,
+ google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2.Model.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def DeleteModel(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.bigquery.v2.ModelService/DeleteModel",
+ google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__pb2.DeleteModelRequest.SerializeToString,
+ google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
diff --git a/google/cloud/bigquery_v2/proto/model_reference_pb2.py b/google/cloud/bigquery_v2/proto/model_reference_pb2.py
index 01e6e2952..2411c4863 100644
--- a/google/cloud/bigquery_v2/proto/model_reference_pb2.py
+++ b/google/cloud/bigquery_v2/proto/model_reference_pb2.py
@@ -1,10 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/bigquery_v2/proto/model_reference.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
+"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
@@ -23,12 +20,9 @@
name="google/cloud/bigquery_v2/proto/model_reference.proto",
package="google.cloud.bigquery.v2",
syntax="proto3",
- serialized_options=_b(
- "\n\034com.google.cloud.bigquery.v2B\023ModelReferenceProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery"
- ),
- serialized_pb=_b(
- '\n4google/cloud/bigquery_v2/proto/model_reference.proto\x12\x18google.cloud.bigquery.v2\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1cgoogle/api/annotations.proto"Y\n\x0eModelReference\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ndataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x15\n\x08model_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x42u\n\x1c\x63om.google.cloud.bigquery.v2B\x13ModelReferenceProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigqueryb\x06proto3'
- ),
+ serialized_options=b"\n\034com.google.cloud.bigquery.v2B\023ModelReferenceProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery",
+ create_key=_descriptor._internal_create_key,
+ serialized_pb=b'\n4google/cloud/bigquery_v2/proto/model_reference.proto\x12\x18google.cloud.bigquery.v2\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1cgoogle/api/annotations.proto"Y\n\x0eModelReference\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ndataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x15\n\x08model_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x42u\n\x1c\x63om.google.cloud.bigquery.v2B\x13ModelReferenceProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigqueryb\x06proto3',
dependencies=[
google_dot_api_dot_field__behavior__pb2.DESCRIPTOR,
google_dot_api_dot_annotations__pb2.DESCRIPTOR,
@@ -42,6 +36,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="project_id",
@@ -52,14 +47,15 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\002"),
+ serialized_options=b"\340A\002",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="dataset_id",
@@ -70,14 +66,15 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\002"),
+ serialized_options=b"\340A\002",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="model_id",
@@ -88,14 +85,15 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\002"),
+ serialized_options=b"\340A\002",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -116,11 +114,10 @@
ModelReference = _reflection.GeneratedProtocolMessageType(
"ModelReference",
(_message.Message,),
- dict(
- DESCRIPTOR=_MODELREFERENCE,
- __module__="google.cloud.bigquery_v2.proto.model_reference_pb2",
- __doc__="""Id path of a model.
-
+ {
+ "DESCRIPTOR": _MODELREFERENCE,
+ "__module__": "google.cloud.bigquery_v2.proto.model_reference_pb2",
+ "__doc__": """Id path of a model.
Attributes:
project_id:
@@ -129,11 +126,11 @@
Required. The ID of the dataset containing this model.
model_id:
Required. The ID of the model. The ID must contain only
- letters (a-z, A-Z), numbers (0-9), or underscores (\_). The
+ letters (a-z, A-Z), numbers (0-9), or underscores (_). The
maximum length is 1,024 characters.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.ModelReference)
- ),
+ },
)
_sym_db.RegisterMessage(ModelReference)
diff --git a/google/cloud/bigquery_v2/proto/model_reference_pb2_grpc.py b/google/cloud/bigquery_v2/proto/model_reference_pb2_grpc.py
index 07cb78fe0..8a9393943 100644
--- a/google/cloud/bigquery_v2/proto/model_reference_pb2_grpc.py
+++ b/google/cloud/bigquery_v2/proto/model_reference_pb2_grpc.py
@@ -1,2 +1,3 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
import grpc
diff --git a/google/cloud/bigquery_v2/proto/standard_sql_pb2.py b/google/cloud/bigquery_v2/proto/standard_sql_pb2.py
index ca0201405..bfe77f934 100644
--- a/google/cloud/bigquery_v2/proto/standard_sql_pb2.py
+++ b/google/cloud/bigquery_v2/proto/standard_sql_pb2.py
@@ -1,10 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/bigquery_v2/proto/standard_sql.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
+"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
@@ -23,12 +20,9 @@
name="google/cloud/bigquery_v2/proto/standard_sql.proto",
package="google.cloud.bigquery.v2",
syntax="proto3",
- serialized_options=_b(
- "\n\034com.google.cloud.bigquery.v2B\020StandardSqlProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery"
- ),
- serialized_pb=_b(
- '\n1google/cloud/bigquery_v2/proto/standard_sql.proto\x12\x18google.cloud.bigquery.v2\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1cgoogle/api/annotations.proto"\xcb\x03\n\x13StandardSqlDataType\x12N\n\ttype_kind\x18\x01 \x01(\x0e\x32\x36.google.cloud.bigquery.v2.StandardSqlDataType.TypeKindB\x03\xe0\x41\x02\x12K\n\x12\x61rray_element_type\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.v2.StandardSqlDataTypeH\x00\x12\x46\n\x0bstruct_type\x18\x03 \x01(\x0b\x32/.google.cloud.bigquery.v2.StandardSqlStructTypeH\x00"\xc2\x01\n\x08TypeKind\x12\x19\n\x15TYPE_KIND_UNSPECIFIED\x10\x00\x12\t\n\x05INT64\x10\x02\x12\x08\n\x04\x42OOL\x10\x05\x12\x0b\n\x07\x46LOAT64\x10\x07\x12\n\n\x06STRING\x10\x08\x12\t\n\x05\x42YTES\x10\t\x12\r\n\tTIMESTAMP\x10\x13\x12\x08\n\x04\x44\x41TE\x10\n\x12\x08\n\x04TIME\x10\x14\x12\x0c\n\x08\x44\x41TETIME\x10\x15\x12\r\n\tGEOGRAPHY\x10\x16\x12\x0b\n\x07NUMERIC\x10\x17\x12\t\n\x05\x41RRAY\x10\x10\x12\n\n\x06STRUCT\x10\x11\x42\n\n\x08sub_type"g\n\x10StandardSqlField\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12@\n\x04type\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.v2.StandardSqlDataTypeB\x03\xe0\x41\x01"S\n\x15StandardSqlStructType\x12:\n\x06\x66ields\x18\x01 \x03(\x0b\x32*.google.cloud.bigquery.v2.StandardSqlFieldBr\n\x1c\x63om.google.cloud.bigquery.v2B\x10StandardSqlProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigqueryb\x06proto3'
- ),
+ serialized_options=b"\n\034com.google.cloud.bigquery.v2B\020StandardSqlProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery",
+ create_key=_descriptor._internal_create_key,
+ serialized_pb=b'\n1google/cloud/bigquery_v2/proto/standard_sql.proto\x12\x18google.cloud.bigquery.v2\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1cgoogle/api/annotations.proto"\xcb\x03\n\x13StandardSqlDataType\x12N\n\ttype_kind\x18\x01 \x01(\x0e\x32\x36.google.cloud.bigquery.v2.StandardSqlDataType.TypeKindB\x03\xe0\x41\x02\x12K\n\x12\x61rray_element_type\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.v2.StandardSqlDataTypeH\x00\x12\x46\n\x0bstruct_type\x18\x03 \x01(\x0b\x32/.google.cloud.bigquery.v2.StandardSqlStructTypeH\x00"\xc2\x01\n\x08TypeKind\x12\x19\n\x15TYPE_KIND_UNSPECIFIED\x10\x00\x12\t\n\x05INT64\x10\x02\x12\x08\n\x04\x42OOL\x10\x05\x12\x0b\n\x07\x46LOAT64\x10\x07\x12\n\n\x06STRING\x10\x08\x12\t\n\x05\x42YTES\x10\t\x12\r\n\tTIMESTAMP\x10\x13\x12\x08\n\x04\x44\x41TE\x10\n\x12\x08\n\x04TIME\x10\x14\x12\x0c\n\x08\x44\x41TETIME\x10\x15\x12\r\n\tGEOGRAPHY\x10\x16\x12\x0b\n\x07NUMERIC\x10\x17\x12\t\n\x05\x41RRAY\x10\x10\x12\n\n\x06STRUCT\x10\x11\x42\n\n\x08sub_type"g\n\x10StandardSqlField\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12@\n\x04type\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.v2.StandardSqlDataTypeB\x03\xe0\x41\x01"S\n\x15StandardSqlStructType\x12:\n\x06\x66ields\x18\x01 \x03(\x0b\x32*.google.cloud.bigquery.v2.StandardSqlFieldBr\n\x1c\x63om.google.cloud.bigquery.v2B\x10StandardSqlProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigqueryb\x06proto3',
dependencies=[
google_dot_api_dot_field__behavior__pb2.DESCRIPTOR,
google_dot_api_dot_annotations__pb2.DESCRIPTOR,
@@ -41,6 +35,7 @@
full_name="google.cloud.bigquery.v2.StandardSqlDataType.TypeKind",
filename=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name="TYPE_KIND_UNSPECIFIED",
@@ -48,45 +43,111 @@
number=0,
serialized_options=None,
type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="INT64", index=1, number=2, serialized_options=None, type=None
+ name="INT64",
+ index=1,
+ number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="BOOL", index=2, number=5, serialized_options=None, type=None
+ name="BOOL",
+ index=2,
+ number=5,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="FLOAT64", index=3, number=7, serialized_options=None, type=None
+ name="FLOAT64",
+ index=3,
+ number=7,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="STRING", index=4, number=8, serialized_options=None, type=None
+ name="STRING",
+ index=4,
+ number=8,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="BYTES", index=5, number=9, serialized_options=None, type=None
+ name="BYTES",
+ index=5,
+ number=9,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="TIMESTAMP", index=6, number=19, serialized_options=None, type=None
+ name="TIMESTAMP",
+ index=6,
+ number=19,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="DATE", index=7, number=10, serialized_options=None, type=None
+ name="DATE",
+ index=7,
+ number=10,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="TIME", index=8, number=20, serialized_options=None, type=None
+ name="TIME",
+ index=8,
+ number=20,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="DATETIME", index=9, number=21, serialized_options=None, type=None
+ name="DATETIME",
+ index=9,
+ number=21,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="GEOGRAPHY", index=10, number=22, serialized_options=None, type=None
+ name="GEOGRAPHY",
+ index=10,
+ number=22,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="NUMERIC", index=11, number=23, serialized_options=None, type=None
+ name="NUMERIC",
+ index=11,
+ number=23,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="ARRAY", index=12, number=16, serialized_options=None, type=None
+ name="ARRAY",
+ index=12,
+ number=16,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
- name="STRUCT", index=13, number=17, serialized_options=None, type=None
+ name="STRUCT",
+ index=13,
+ number=17,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key,
),
],
containing_type=None,
@@ -103,6 +164,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="type_kind",
@@ -119,8 +181,9 @@
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\002"),
+ serialized_options=b"\340A\002",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="array_element_type",
@@ -139,6 +202,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="struct_type",
@@ -157,6 +221,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -172,6 +237,7 @@
full_name="google.cloud.bigquery.v2.StandardSqlDataType.sub_type",
index=0,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[],
),
],
@@ -186,6 +252,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="name",
@@ -196,14 +263,15 @@
cpp_type=9,
label=1,
has_default_value=False,
- default_value=_b("").decode("utf-8"),
+ default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\001"),
+ serialized_options=b"\340A\001",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="type",
@@ -220,8 +288,9 @@
containing_type=None,
is_extension=False,
extension_scope=None,
- serialized_options=_b("\340A\001"),
+ serialized_options=b"\340A\001",
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -243,6 +312,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="fields",
@@ -261,6 +331,7 @@
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
),
],
extensions=[],
@@ -305,39 +376,37 @@
StandardSqlDataType = _reflection.GeneratedProtocolMessageType(
"StandardSqlDataType",
(_message.Message,),
- dict(
- DESCRIPTOR=_STANDARDSQLDATATYPE,
- __module__="google.cloud.bigquery_v2.proto.standard_sql_pb2",
- __doc__="""The type of a variable, e.g., a function argument.
- Examples: INT64: {type\_kind="INT64"} ARRAY: {type\_kind="ARRAY",
- array\_element\_type="STRING"} STRUCT
:
- {type\_kind="STRUCT", struct\_type={fields=[ {name="x",
- type={type\_kind="STRING"}}, {name="y", type={type\_kind="ARRAY",
- array\_element\_type="DATE"}} ]}}
-
+ {
+ "DESCRIPTOR": _STANDARDSQLDATATYPE,
+ "__module__": "google.cloud.bigquery_v2.proto.standard_sql_pb2",
+ "__doc__": """The type of a variable, e.g., a function argument. Examples: INT64:
+ {type_kind=``INT64``} ARRAY: {type_kind=``ARRAY``,
+ array_element_type=``STRING``} STRUCT:
+ {type_kind=``STRUCT``, struct_type={fields=[ {name=``x``,
+ type={type_kind=``STRING``}}, {name=``y``, type={type_kind=``ARRAY``,
+ array_element_type=``DATE``}} ]}}
Attributes:
type_kind:
Required. The top level type of this field. Can be any
- standard SQL data type (e.g., "INT64", "DATE", "ARRAY").
+ standard SQL data type (e.g., ``INT64``, ``DATE``, ``ARRAY``).
array_element_type:
- The type of the array's elements, if type\_kind = "ARRAY".
+ The type of the array’s elements, if type_kind = ``ARRAY``.
struct_type:
- The fields of this struct, in order, if type\_kind = "STRUCT".
+ The fields of this struct, in order, if type_kind = ``STRUCT``.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.StandardSqlDataType)
- ),
+ },
)
_sym_db.RegisterMessage(StandardSqlDataType)
StandardSqlField = _reflection.GeneratedProtocolMessageType(
"StandardSqlField",
(_message.Message,),
- dict(
- DESCRIPTOR=_STANDARDSQLFIELD,
- __module__="google.cloud.bigquery_v2.proto.standard_sql_pb2",
- __doc__="""A field or a column.
-
+ {
+ "DESCRIPTOR": _STANDARDSQLFIELD,
+ "__module__": "google.cloud.bigquery_v2.proto.standard_sql_pb2",
+ "__doc__": """A field or a column.
Attributes:
name:
@@ -347,21 +416,21 @@
Optional. The type of this parameter. Absent if not explicitly
specified (e.g., CREATE FUNCTION statement can omit the return
type; in this case the output parameter does not have this
- "type" field).
+ ``type`` field).
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.StandardSqlField)
- ),
+ },
)
_sym_db.RegisterMessage(StandardSqlField)
StandardSqlStructType = _reflection.GeneratedProtocolMessageType(
"StandardSqlStructType",
(_message.Message,),
- dict(
- DESCRIPTOR=_STANDARDSQLSTRUCTTYPE,
- __module__="google.cloud.bigquery_v2.proto.standard_sql_pb2"
+ {
+ "DESCRIPTOR": _STANDARDSQLSTRUCTTYPE,
+ "__module__": "google.cloud.bigquery_v2.proto.standard_sql_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.StandardSqlStructType)
- ),
+ },
)
_sym_db.RegisterMessage(StandardSqlStructType)
diff --git a/google/cloud/bigquery_v2/proto/standard_sql_pb2_grpc.py b/google/cloud/bigquery_v2/proto/standard_sql_pb2_grpc.py
index 07cb78fe0..8a9393943 100644
--- a/google/cloud/bigquery_v2/proto/standard_sql_pb2_grpc.py
+++ b/google/cloud/bigquery_v2/proto/standard_sql_pb2_grpc.py
@@ -1,2 +1,3 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
import grpc
diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh
new file mode 100755
index 000000000..ff599eb2a
--- /dev/null
+++ b/scripts/decrypt-secrets.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+ROOT=$( dirname "$DIR" )
+
+# Work from the project root.
+cd $ROOT
+
+# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources.
+PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}"
+
+gcloud secrets versions access latest --secret="python-docs-samples-test-env" \
+ > testing/test-env.sh
+gcloud secrets versions access latest \
+ --secret="python-docs-samples-service-account" \
+ > testing/service-account.json
+gcloud secrets versions access latest \
+ --secret="python-docs-samples-client-secrets" \
+ > testing/client-secrets.json
\ No newline at end of file
diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py
new file mode 100644
index 000000000..d309d6e97
--- /dev/null
+++ b/scripts/readme-gen/readme_gen.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+
+# Copyright 2016 Google Inc
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Generates READMEs using configuration defined in yaml."""
+
+import argparse
+import io
+import os
+import subprocess
+
+import jinja2
+import yaml
+
+
+jinja_env = jinja2.Environment(
+ trim_blocks=True,
+ loader=jinja2.FileSystemLoader(
+ os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates'))))
+
+README_TMPL = jinja_env.get_template('README.tmpl.rst')
+
+
+def get_help(file):
+ return subprocess.check_output(['python', file, '--help']).decode()
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('source')
+ parser.add_argument('--destination', default='README.rst')
+
+ args = parser.parse_args()
+
+ source = os.path.abspath(args.source)
+ root = os.path.dirname(source)
+ destination = os.path.join(root, args.destination)
+
+ jinja_env.globals['get_help'] = get_help
+
+ with io.open(source, 'r') as f:
+ config = yaml.load(f)
+
+ # This allows get_help to execute in the right directory.
+ os.chdir(root)
+
+ output = README_TMPL.render(config)
+
+ with io.open(destination, 'w') as f:
+ f.write(output)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/scripts/readme-gen/templates/README.tmpl.rst b/scripts/readme-gen/templates/README.tmpl.rst
new file mode 100644
index 000000000..4fd239765
--- /dev/null
+++ b/scripts/readme-gen/templates/README.tmpl.rst
@@ -0,0 +1,87 @@
+{# The following line is a lie. BUT! Once jinja2 is done with it, it will
+ become truth! #}
+.. This file is automatically generated. Do not edit this file directly.
+
+{{product.name}} Python Samples
+===============================================================================
+
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst
+
+
+This directory contains samples for {{product.name}}. {{product.description}}
+
+{{description}}
+
+.. _{{product.name}}: {{product.url}}
+
+{% if required_api_url %}
+To run the sample, you need to enable the API at: {{required_api_url}}
+{% endif %}
+
+{% if required_role %}
+To run the sample, you need to have `{{required_role}}` role.
+{% endif %}
+
+{{other_required_steps}}
+
+{% if setup %}
+Setup
+-------------------------------------------------------------------------------
+
+{% for section in setup %}
+
+{% include section + '.tmpl.rst' %}
+
+{% endfor %}
+{% endif %}
+
+{% if samples %}
+Samples
+-------------------------------------------------------------------------------
+
+{% for sample in samples %}
+{{sample.name}}
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+{% if not sample.hide_cloudshell_button %}
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst
+{% endif %}
+
+
+{{sample.description}}
+
+To run this sample:
+
+.. code-block:: bash
+
+ $ python {{sample.file}}
+{% if sample.show_help %}
+
+ {{get_help(sample.file)|indent}}
+{% endif %}
+
+
+{% endfor %}
+{% endif %}
+
+{% if cloud_client_library %}
+
+The client library
+-------------------------------------------------------------------------------
+
+This sample uses the `Google Cloud Client Library for Python`_.
+You can read the documentation for more details on API usage and use GitHub
+to `browse the source`_ and `report issues`_.
+
+.. _Google Cloud Client Library for Python:
+ https://googlecloudplatform.github.io/google-cloud-python/
+.. _browse the source:
+ https://github.com/GoogleCloudPlatform/google-cloud-python
+.. _report issues:
+ https://github.com/GoogleCloudPlatform/google-cloud-python/issues
+
+{% endif %}
+
+.. _Google Cloud SDK: https://cloud.google.com/sdk/
\ No newline at end of file
diff --git a/scripts/readme-gen/templates/auth.tmpl.rst b/scripts/readme-gen/templates/auth.tmpl.rst
new file mode 100644
index 000000000..1446b94a5
--- /dev/null
+++ b/scripts/readme-gen/templates/auth.tmpl.rst
@@ -0,0 +1,9 @@
+Authentication
+++++++++++++++
+
+This sample requires you to have authentication setup. Refer to the
+`Authentication Getting Started Guide`_ for instructions on setting up
+credentials for applications.
+
+.. _Authentication Getting Started Guide:
+ https://cloud.google.com/docs/authentication/getting-started
diff --git a/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/scripts/readme-gen/templates/auth_api_key.tmpl.rst
new file mode 100644
index 000000000..11957ce27
--- /dev/null
+++ b/scripts/readme-gen/templates/auth_api_key.tmpl.rst
@@ -0,0 +1,14 @@
+Authentication
+++++++++++++++
+
+Authentication for this service is done via an `API Key`_. To obtain an API
+Key:
+
+1. Open the `Cloud Platform Console`_
+2. Make sure that billing is enabled for your project.
+3. From the **Credentials** page, create a new **API Key** or use an existing
+ one for your project.
+
+.. _API Key:
+ https://developers.google.com/api-client-library/python/guide/aaa_apikeys
+.. _Cloud Console: https://console.cloud.google.com/project?_
diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst
new file mode 100644
index 000000000..a0406dba8
--- /dev/null
+++ b/scripts/readme-gen/templates/install_deps.tmpl.rst
@@ -0,0 +1,29 @@
+Install Dependencies
+++++++++++++++++++++
+
+#. Clone python-docs-samples and change directory to the sample directory you want to use.
+
+ .. code-block:: bash
+
+ $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git
+
+#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions.
+
+ .. _Python Development Environment Setup Guide:
+ https://cloud.google.com/python/setup
+
+#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+.
+
+ .. code-block:: bash
+
+ $ virtualenv env
+ $ source env/bin/activate
+
+#. Install the dependencies needed to run the samples.
+
+ .. code-block:: bash
+
+ $ pip install -r requirements.txt
+
+.. _pip: https://pip.pypa.io/
+.. _virtualenv: https://virtualenv.pypa.io/
diff --git a/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/scripts/readme-gen/templates/install_portaudio.tmpl.rst
new file mode 100644
index 000000000..5ea33d18c
--- /dev/null
+++ b/scripts/readme-gen/templates/install_portaudio.tmpl.rst
@@ -0,0 +1,35 @@
+Install PortAudio
++++++++++++++++++
+
+Install `PortAudio`_. This is required by the `PyAudio`_ library to stream
+audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the
+platform.
+
+* For Mac OS X, you can use `Homebrew`_::
+
+ brew install portaudio
+
+ **Note**: if you encounter an error when running `pip install` that indicates
+ it can't find `portaudio.h`, try running `pip install` with the following
+ flags::
+
+ pip install --global-option='build_ext' \
+ --global-option='-I/usr/local/include' \
+ --global-option='-L/usr/local/lib' \
+ pyaudio
+
+* For Debian / Ubuntu Linux::
+
+ apt-get install portaudio19-dev python-all-dev
+
+* Windows may work without having to install PortAudio explicitly (it will get
+ installed with PyAudio).
+
+For more details, see the `PyAudio installation`_ page.
+
+
+.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/
+.. _PortAudio: http://www.portaudio.com/
+.. _PyAudio installation:
+ https://people.csail.mit.edu/hubert/pyaudio/#downloads
+.. _Homebrew: http://brew.sh
diff --git a/setup.cfg b/setup.cfg
index 3bd555500..c3a2b39f6 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,3 +1,19 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
# Generated by synthtool. DO NOT EDIT!
[bdist_wheel]
universal = 1
diff --git a/synth.metadata b/synth.metadata
index 86ecc1ffa..f131790f2 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -1,26 +1,25 @@
{
- "updateTime": "2020-02-04T11:46:46.343511Z",
"sources": [
{
- "generator": {
- "name": "artman",
- "version": "0.44.4",
- "dockerImage": "googleapis/artman@sha256:19e945954fc960a4bdfee6cb34695898ab21a8cf0bac063ee39b91f00a1faec8"
+ "git": {
+ "name": ".",
+ "remote": "git@github.com:googleapis/python-bigquery",
+ "sha": "0946a5c460b0d675f6dbe4f053a7801edba36443"
}
},
{
"git": {
"name": "googleapis",
"remote": "https://github.com/googleapis/googleapis.git",
- "sha": "69d9945330a5721cd679f17331a78850e2618226",
- "internalRef": "293080182"
+ "sha": "e6ab0a55f2195169feded73dd684574dd4bd9dfa",
+ "internalRef": "319180144"
}
},
{
- "template": {
- "name": "python_split_library",
- "origin": "synthtool.gcp",
- "version": "2019.10.17"
+ "git": {
+ "name": "synthtool",
+ "remote": "https://github.com/googleapis/synthtool.git",
+ "sha": "303271797a360f8a439203413f13a160f2f5b3b4"
}
}
],
@@ -31,8 +30,7 @@
"apiName": "bigquery",
"apiVersion": "v2",
"language": "python",
- "generator": "gapic",
- "config": "google/cloud/bigquery/artman_bigquery_v2.yaml"
+ "generator": "bazel"
}
}
]
diff --git a/synth.py b/synth.py
index 7fba81a5c..2bc3798ea 100644
--- a/synth.py
+++ b/synth.py
@@ -59,7 +59,9 @@
# Add templated files
# ----------------------------------------------------------------------------
templated_files = common.py_library(cov_level=100)
-s.move(templated_files, excludes=["noxfile.py"])
+
+# BigQuery has a custom multiprocessing note
+s.move(templated_files, excludes=["noxfile.py", "docs/multiprocessing.rst"])
s.replace(
"docs/conf.py",
diff --git a/testing/.gitignore b/testing/.gitignore
new file mode 100644
index 000000000..b05fbd630
--- /dev/null
+++ b/testing/.gitignore
@@ -0,0 +1,3 @@
+test-env.sh
+service-account.json
+client-secrets.json
\ No newline at end of file