From 5aa389fe7a53daee37168e392f9b148b1912802b Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 21 Nov 2024 22:42:32 +0000 Subject: [PATCH 1/5] chore: fix `npm` for Node v18 samples tests chore: fix `npm` for samples tests Source-Link: https://github.com/googleapis/synthtool/commit/4d752428d93b18b69c28acdbd9aa821a517db73a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:0d39e59663287ae929c1d4ccf8ebf7cef9946826c9b86eda7e85d8d752dbb584 --- .github/.OwlBot.lock.yaml | 4 ++-- .github/release-trigger.yml | 1 + .github/workflows/ci.yaml | 24 +++++++++++----------- .kokoro/common.cfg | 2 +- .kokoro/continuous/node18/common.cfg | 24 ++++++++++++++++++++++ .kokoro/continuous/node18/lint.cfg | 4 ++++ .kokoro/continuous/node18/samples-test.cfg | 12 +++++++++++ .kokoro/continuous/node18/system-test.cfg | 12 +++++++++++ .kokoro/continuous/node18/test.cfg | 0 .kokoro/presubmit/node18/common.cfg | 24 ++++++++++++++++++++++ .kokoro/presubmit/node18/samples-test.cfg | 12 +++++++++++ .kokoro/presubmit/node18/system-test.cfg | 12 +++++++++++ .kokoro/presubmit/node18/test.cfg | 0 .kokoro/release/docs-devsite.cfg | 2 +- .kokoro/release/docs.cfg | 2 +- .kokoro/release/docs.sh | 2 +- .kokoro/release/publish.cfg | 2 +- .kokoro/samples-test.sh | 6 ++++-- .kokoro/system-test.sh | 2 +- .kokoro/test.bat | 2 +- .kokoro/test.sh | 2 +- .kokoro/trampoline_v2.sh | 2 +- src/types.d.ts | 18 +++++++++------- 23 files changed, 139 insertions(+), 32 deletions(-) create mode 100644 .kokoro/continuous/node18/common.cfg create mode 100644 .kokoro/continuous/node18/lint.cfg create mode 100644 .kokoro/continuous/node18/samples-test.cfg create mode 100644 .kokoro/continuous/node18/system-test.cfg create mode 100644 .kokoro/continuous/node18/test.cfg create mode 100644 .kokoro/presubmit/node18/common.cfg create mode 100644 .kokoro/presubmit/node18/samples-test.cfg create mode 100644 .kokoro/presubmit/node18/system-test.cfg create mode 100644 .kokoro/presubmit/node18/test.cfg diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 24943e11..39a62ca6 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:609822e3c09b7a1bd90b99655904609f162cc15acb4704f1edf778284c36f429 -# created: 2024-10-01T19:34:30.797530443Z + digest: sha256:0d39e59663287ae929c1d4ccf8ebf7cef9946826c9b86eda7e85d8d752dbb584 +# created: 2024-11-21T22:39:44.342569463Z diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml index d4ca9418..73144bc0 100644 --- a/.github/release-trigger.yml +++ b/.github/release-trigger.yml @@ -1 +1,2 @@ enabled: true +multiScmName: nodejs-bigquery \ No newline at end of file diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 4892eb2c..e2075983 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -9,10 +9,10 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18, 20] + node: [14, 16, 18] steps: - - uses: actions/checkout@v3 - - uses: actions/setup-node@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 with: node-version: ${{ matrix.node }} - run: node --version @@ -29,10 +29,10 @@ jobs: windows: runs-on: windows-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-node@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 with: - node-version: 14 + node-version: 18 - run: npm install --engine-strict - run: npm test env: @@ -40,19 +40,19 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-node@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 with: - node-version: 14 + node-version: 18 - run: npm install - run: npm run lint docs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-node@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 with: - node-version: 14 + node-version: 18 - run: npm install - run: npm run docs - uses: JustinBeckwith/linkinator-action@v1 diff --git a/.kokoro/common.cfg b/.kokoro/common.cfg index 68f910dd..8687a020 100644 --- a/.kokoro/common.cfg +++ b/.kokoro/common.cfg @@ -16,7 +16,7 @@ build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" } env_vars: { key: "TRAMPOLINE_BUILD_FILE" diff --git a/.kokoro/continuous/node18/common.cfg b/.kokoro/continuous/node18/common.cfg new file mode 100644 index 00000000..8687a020 --- /dev/null +++ b/.kokoro/continuous/node18/common.cfg @@ -0,0 +1,24 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery/.kokoro/test.sh" +} diff --git a/.kokoro/continuous/node18/lint.cfg b/.kokoro/continuous/node18/lint.cfg new file mode 100644 index 00000000..80b86f14 --- /dev/null +++ b/.kokoro/continuous/node18/lint.cfg @@ -0,0 +1,4 @@ +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery/.kokoro/lint.sh" +} diff --git a/.kokoro/continuous/node18/samples-test.cfg b/.kokoro/continuous/node18/samples-test.cfg new file mode 100644 index 00000000..54748bab --- /dev/null +++ b/.kokoro/continuous/node18/samples-test.cfg @@ -0,0 +1,12 @@ +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery/.kokoro/samples-test.sh" +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "long-door-651-kokoro-system-test-service-account" +} \ No newline at end of file diff --git a/.kokoro/continuous/node18/system-test.cfg b/.kokoro/continuous/node18/system-test.cfg new file mode 100644 index 00000000..c89e9937 --- /dev/null +++ b/.kokoro/continuous/node18/system-test.cfg @@ -0,0 +1,12 @@ +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery/.kokoro/system-test.sh" +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "long-door-651-kokoro-system-test-service-account" +} \ No newline at end of file diff --git a/.kokoro/continuous/node18/test.cfg b/.kokoro/continuous/node18/test.cfg new file mode 100644 index 00000000..e69de29b diff --git a/.kokoro/presubmit/node18/common.cfg b/.kokoro/presubmit/node18/common.cfg new file mode 100644 index 00000000..8687a020 --- /dev/null +++ b/.kokoro/presubmit/node18/common.cfg @@ -0,0 +1,24 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery/.kokoro/test.sh" +} diff --git a/.kokoro/presubmit/node18/samples-test.cfg b/.kokoro/presubmit/node18/samples-test.cfg new file mode 100644 index 00000000..54748bab --- /dev/null +++ b/.kokoro/presubmit/node18/samples-test.cfg @@ -0,0 +1,12 @@ +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery/.kokoro/samples-test.sh" +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "long-door-651-kokoro-system-test-service-account" +} \ No newline at end of file diff --git a/.kokoro/presubmit/node18/system-test.cfg b/.kokoro/presubmit/node18/system-test.cfg new file mode 100644 index 00000000..c89e9937 --- /dev/null +++ b/.kokoro/presubmit/node18/system-test.cfg @@ -0,0 +1,12 @@ +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery/.kokoro/system-test.sh" +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "long-door-651-kokoro-system-test-service-account" +} \ No newline at end of file diff --git a/.kokoro/presubmit/node18/test.cfg b/.kokoro/presubmit/node18/test.cfg new file mode 100644 index 00000000..e69de29b diff --git a/.kokoro/release/docs-devsite.cfg b/.kokoro/release/docs-devsite.cfg index 5eb4cf6a..682c8277 100644 --- a/.kokoro/release/docs-devsite.cfg +++ b/.kokoro/release/docs-devsite.cfg @@ -11,7 +11,7 @@ before_action { # doc publications use a Python image. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" } # Download trampoline resources. diff --git a/.kokoro/release/docs.cfg b/.kokoro/release/docs.cfg index 83eb5b10..dd568cd6 100644 --- a/.kokoro/release/docs.cfg +++ b/.kokoro/release/docs.cfg @@ -11,7 +11,7 @@ before_action { # doc publications use a Python image. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" } # Download trampoline resources. diff --git a/.kokoro/release/docs.sh b/.kokoro/release/docs.sh index 1d8f3f49..e9079a60 100755 --- a/.kokoro/release/docs.sh +++ b/.kokoro/release/docs.sh @@ -16,7 +16,7 @@ set -eo pipefail -# build jsdocs (Python is installed on the Node 10 docker image). +# build jsdocs (Python is installed on the Node 18 docker image). if [[ -z "$CREDENTIALS" ]]; then # if CREDENTIALS are explicitly set, assume we're testing locally # and don't set NPM_CONFIG_PREFIX. diff --git a/.kokoro/release/publish.cfg b/.kokoro/release/publish.cfg index 3dfef4f9..15dd939c 100644 --- a/.kokoro/release/publish.cfg +++ b/.kokoro/release/publish.cfg @@ -30,7 +30,7 @@ build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" } env_vars: { diff --git a/.kokoro/samples-test.sh b/.kokoro/samples-test.sh index 8c5d108c..52877539 100755 --- a/.kokoro/samples-test.sh +++ b/.kokoro/samples-test.sh @@ -16,7 +16,9 @@ set -eo pipefail -export NPM_CONFIG_PREFIX=${HOME}/.npm-global +# Ensure the npm global directory is writable, otherwise rebuild `npm` +mkdir -p $NPM_CONFIG_PREFIX +npm config -g ls || npm i -g npm@`npm --version` # Setup service account credentials. export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/secret_manager/long-door-651-kokoro-system-test-service-account @@ -56,7 +58,7 @@ fi # codecov combines coverage across integration and unit tests. Include # the logic below for any environment you wish to collect coverage for: -COVERAGE_NODE=14 +COVERAGE_NODE=18 if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then NYC_BIN=./node_modules/nyc/bin/nyc.js if [ -f "$NYC_BIN" ]; then diff --git a/.kokoro/system-test.sh b/.kokoro/system-test.sh index 0b3043d2..a90d5cfe 100755 --- a/.kokoro/system-test.sh +++ b/.kokoro/system-test.sh @@ -49,7 +49,7 @@ npm run system-test # codecov combines coverage across integration and unit tests. Include # the logic below for any environment you wish to collect coverage for: -COVERAGE_NODE=14 +COVERAGE_NODE=18 if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then NYC_BIN=./node_modules/nyc/bin/nyc.js if [ -f "$NYC_BIN" ]; then diff --git a/.kokoro/test.bat b/.kokoro/test.bat index 0bb12405..caf82565 100644 --- a/.kokoro/test.bat +++ b/.kokoro/test.bat @@ -21,7 +21,7 @@ cd .. @rem we upgrade Node.js in the image: SET PATH=%PATH%;/cygdrive/c/Program Files/nodejs/npm -call nvm use v14.17.3 +call nvm use 18 call which node call npm install || goto :error diff --git a/.kokoro/test.sh b/.kokoro/test.sh index 862d478d..0d9f6392 100755 --- a/.kokoro/test.sh +++ b/.kokoro/test.sh @@ -39,7 +39,7 @@ npm test # codecov combines coverage across integration and unit tests. Include # the logic below for any environment you wish to collect coverage for: -COVERAGE_NODE=14 +COVERAGE_NODE=18 if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then NYC_BIN=./node_modules/nyc/bin/nyc.js if [ -f "$NYC_BIN" ]; then diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh index 4d031121..5d6cfcca 100755 --- a/.kokoro/trampoline_v2.sh +++ b/.kokoro/trampoline_v2.sh @@ -44,7 +44,7 @@ # the project root. # # Here is an example for running this script. -# TRAMPOLINE_IMAGE=gcr.io/cloud-devrel-kokoro-resources/node:10-user \ +# TRAMPOLINE_IMAGE=gcr.io/cloud-devrel-kokoro-resources/node:18-user \ # TRAMPOLINE_BUILD_FILE=.kokoro/system-test.sh \ # .kokoro/trampoline_v2.sh diff --git a/src/types.d.ts b/src/types.d.ts index f765ffd7..19b042fd 100644 --- a/src/types.d.ts +++ b/src/types.d.ts @@ -13,7 +13,7 @@ // limitations under the License. /** - * Discovery Revision: 20241013 + * Discovery Revision: 20241111 */ /** @@ -77,7 +77,7 @@ declare namespace bigquery { */ argumentKind?: 'ARGUMENT_KIND_UNSPECIFIED' | 'FIXED_TYPE' | 'ANY_TYPE'; /** - * Required unless argument_kind = ANY_TYPE. + * Set if argument_kind == FIXED_TYPE. */ dataType?: IStandardSqlDataType; /** @@ -407,19 +407,19 @@ declare namespace bigquery { */ type IBigLakeConfiguration = { /** - * Required. The connection specifying the credentials to be used to read and write to external storage, such as Cloud Storage. The connection_id can have the form `{project}.{location}.{connection_id}` or `projects/{project}/locations/{location}/connections/{connection_id}". + * Optional. The connection specifying the credentials to be used to read and write to external storage, such as Cloud Storage. The connection_id can have the form `{project}.{location}.{connection_id}` or `projects/{project}/locations/{location}/connections/{connection_id}". */ connectionId?: string; /** - * Required. The file format the table data is stored in. + * Optional. The file format the table data is stored in. */ fileFormat?: 'FILE_FORMAT_UNSPECIFIED' | 'PARQUET'; /** - * Required. The fully qualified location prefix of the external folder where table data is stored. The '*' wildcard character is not allowed. The URI should be in the format `gs://bucket/path_to_table/` + * Optional. The fully qualified location prefix of the external folder where table data is stored. The '*' wildcard character is not allowed. The URI should be in the format `gs://bucket/path_to_table/` */ storageUri?: string; /** - * Required. The table format the metadata only snapshots are stored in. + * Optional. The table format the metadata only snapshots are stored in. */ tableFormat?: 'TABLE_FORMAT_UNSPECIFIED' | 'ICEBERG'; }; @@ -4863,6 +4863,10 @@ declare namespace bigquery { * Output only. The geographic location where the table resides. This value is inherited from the dataset. */ location?: string; + /** + * Optional. If set, overrides the default managed table type configured in the dataset. + */ + managedTableType?: 'MANAGED_TABLE_TYPE_UNSPECIFIED' | 'NATIVE' | 'ICEBERG'; /** * Optional. The materialized view definition. */ @@ -5518,7 +5522,7 @@ declare namespace bigquery { */ colsampleBytree?: number; /** - * The contribution metric. Applies to contribution analysis models. Allowed formats supported are for summable and summable ratio contribution metrics. These include expressions such as "SUM(x)" or "SUM(x)/SUM(y)", where x and y are column names from the base table. + * The contribution metric. Applies to contribution analysis models. Allowed formats supported are for summable and summable ratio contribution metrics. These include expressions such as `SUM(x)` or `SUM(x)/SUM(y)`, where x and y are column names from the base table. */ contributionMetric?: string; /** From a9b0e3d88ae3fd9546261d434a9105f7d52b34e0 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 21 Nov 2024 22:48:30 +0000 Subject: [PATCH 2/5] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- src/types.d.ts | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/src/types.d.ts b/src/types.d.ts index 19b042fd..f765ffd7 100644 --- a/src/types.d.ts +++ b/src/types.d.ts @@ -13,7 +13,7 @@ // limitations under the License. /** - * Discovery Revision: 20241111 + * Discovery Revision: 20241013 */ /** @@ -77,7 +77,7 @@ declare namespace bigquery { */ argumentKind?: 'ARGUMENT_KIND_UNSPECIFIED' | 'FIXED_TYPE' | 'ANY_TYPE'; /** - * Set if argument_kind == FIXED_TYPE. + * Required unless argument_kind = ANY_TYPE. */ dataType?: IStandardSqlDataType; /** @@ -407,19 +407,19 @@ declare namespace bigquery { */ type IBigLakeConfiguration = { /** - * Optional. The connection specifying the credentials to be used to read and write to external storage, such as Cloud Storage. The connection_id can have the form `{project}.{location}.{connection_id}` or `projects/{project}/locations/{location}/connections/{connection_id}". + * Required. The connection specifying the credentials to be used to read and write to external storage, such as Cloud Storage. The connection_id can have the form `{project}.{location}.{connection_id}` or `projects/{project}/locations/{location}/connections/{connection_id}". */ connectionId?: string; /** - * Optional. The file format the table data is stored in. + * Required. The file format the table data is stored in. */ fileFormat?: 'FILE_FORMAT_UNSPECIFIED' | 'PARQUET'; /** - * Optional. The fully qualified location prefix of the external folder where table data is stored. The '*' wildcard character is not allowed. The URI should be in the format `gs://bucket/path_to_table/` + * Required. The fully qualified location prefix of the external folder where table data is stored. The '*' wildcard character is not allowed. The URI should be in the format `gs://bucket/path_to_table/` */ storageUri?: string; /** - * Optional. The table format the metadata only snapshots are stored in. + * Required. The table format the metadata only snapshots are stored in. */ tableFormat?: 'TABLE_FORMAT_UNSPECIFIED' | 'ICEBERG'; }; @@ -4863,10 +4863,6 @@ declare namespace bigquery { * Output only. The geographic location where the table resides. This value is inherited from the dataset. */ location?: string; - /** - * Optional. If set, overrides the default managed table type configured in the dataset. - */ - managedTableType?: 'MANAGED_TABLE_TYPE_UNSPECIFIED' | 'NATIVE' | 'ICEBERG'; /** * Optional. The materialized view definition. */ @@ -5522,7 +5518,7 @@ declare namespace bigquery { */ colsampleBytree?: number; /** - * The contribution metric. Applies to contribution analysis models. Allowed formats supported are for summable and summable ratio contribution metrics. These include expressions such as `SUM(x)` or `SUM(x)/SUM(y)`, where x and y are column names from the base table. + * The contribution metric. Applies to contribution analysis models. Allowed formats supported are for summable and summable ratio contribution metrics. These include expressions such as "SUM(x)" or "SUM(x)/SUM(y)", where x and y are column names from the base table. */ contributionMetric?: string; /** From c700d590e19ed3634df337092db276d108584d6e Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 21 Nov 2024 22:51:40 +0000 Subject: [PATCH 3/5] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- src/types.d.ts | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/src/types.d.ts b/src/types.d.ts index f765ffd7..19b042fd 100644 --- a/src/types.d.ts +++ b/src/types.d.ts @@ -13,7 +13,7 @@ // limitations under the License. /** - * Discovery Revision: 20241013 + * Discovery Revision: 20241111 */ /** @@ -77,7 +77,7 @@ declare namespace bigquery { */ argumentKind?: 'ARGUMENT_KIND_UNSPECIFIED' | 'FIXED_TYPE' | 'ANY_TYPE'; /** - * Required unless argument_kind = ANY_TYPE. + * Set if argument_kind == FIXED_TYPE. */ dataType?: IStandardSqlDataType; /** @@ -407,19 +407,19 @@ declare namespace bigquery { */ type IBigLakeConfiguration = { /** - * Required. The connection specifying the credentials to be used to read and write to external storage, such as Cloud Storage. The connection_id can have the form `{project}.{location}.{connection_id}` or `projects/{project}/locations/{location}/connections/{connection_id}". + * Optional. The connection specifying the credentials to be used to read and write to external storage, such as Cloud Storage. The connection_id can have the form `{project}.{location}.{connection_id}` or `projects/{project}/locations/{location}/connections/{connection_id}". */ connectionId?: string; /** - * Required. The file format the table data is stored in. + * Optional. The file format the table data is stored in. */ fileFormat?: 'FILE_FORMAT_UNSPECIFIED' | 'PARQUET'; /** - * Required. The fully qualified location prefix of the external folder where table data is stored. The '*' wildcard character is not allowed. The URI should be in the format `gs://bucket/path_to_table/` + * Optional. The fully qualified location prefix of the external folder where table data is stored. The '*' wildcard character is not allowed. The URI should be in the format `gs://bucket/path_to_table/` */ storageUri?: string; /** - * Required. The table format the metadata only snapshots are stored in. + * Optional. The table format the metadata only snapshots are stored in. */ tableFormat?: 'TABLE_FORMAT_UNSPECIFIED' | 'ICEBERG'; }; @@ -4863,6 +4863,10 @@ declare namespace bigquery { * Output only. The geographic location where the table resides. This value is inherited from the dataset. */ location?: string; + /** + * Optional. If set, overrides the default managed table type configured in the dataset. + */ + managedTableType?: 'MANAGED_TABLE_TYPE_UNSPECIFIED' | 'NATIVE' | 'ICEBERG'; /** * Optional. The materialized view definition. */ @@ -5518,7 +5522,7 @@ declare namespace bigquery { */ colsampleBytree?: number; /** - * The contribution metric. Applies to contribution analysis models. Allowed formats supported are for summable and summable ratio contribution metrics. These include expressions such as "SUM(x)" or "SUM(x)/SUM(y)", where x and y are column names from the base table. + * The contribution metric. Applies to contribution analysis models. Allowed formats supported are for summable and summable ratio contribution metrics. These include expressions such as `SUM(x)` or `SUM(x)/SUM(y)`, where x and y are column names from the base table. */ contributionMetric?: string; /** From deae88fce5df6f5ad8653ec58ec1585035c0f881 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 21 Nov 2024 22:54:51 +0000 Subject: [PATCH 4/5] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- src/types.d.ts | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/src/types.d.ts b/src/types.d.ts index 19b042fd..f765ffd7 100644 --- a/src/types.d.ts +++ b/src/types.d.ts @@ -13,7 +13,7 @@ // limitations under the License. /** - * Discovery Revision: 20241111 + * Discovery Revision: 20241013 */ /** @@ -77,7 +77,7 @@ declare namespace bigquery { */ argumentKind?: 'ARGUMENT_KIND_UNSPECIFIED' | 'FIXED_TYPE' | 'ANY_TYPE'; /** - * Set if argument_kind == FIXED_TYPE. + * Required unless argument_kind = ANY_TYPE. */ dataType?: IStandardSqlDataType; /** @@ -407,19 +407,19 @@ declare namespace bigquery { */ type IBigLakeConfiguration = { /** - * Optional. The connection specifying the credentials to be used to read and write to external storage, such as Cloud Storage. The connection_id can have the form `{project}.{location}.{connection_id}` or `projects/{project}/locations/{location}/connections/{connection_id}". + * Required. The connection specifying the credentials to be used to read and write to external storage, such as Cloud Storage. The connection_id can have the form `{project}.{location}.{connection_id}` or `projects/{project}/locations/{location}/connections/{connection_id}". */ connectionId?: string; /** - * Optional. The file format the table data is stored in. + * Required. The file format the table data is stored in. */ fileFormat?: 'FILE_FORMAT_UNSPECIFIED' | 'PARQUET'; /** - * Optional. The fully qualified location prefix of the external folder where table data is stored. The '*' wildcard character is not allowed. The URI should be in the format `gs://bucket/path_to_table/` + * Required. The fully qualified location prefix of the external folder where table data is stored. The '*' wildcard character is not allowed. The URI should be in the format `gs://bucket/path_to_table/` */ storageUri?: string; /** - * Optional. The table format the metadata only snapshots are stored in. + * Required. The table format the metadata only snapshots are stored in. */ tableFormat?: 'TABLE_FORMAT_UNSPECIFIED' | 'ICEBERG'; }; @@ -4863,10 +4863,6 @@ declare namespace bigquery { * Output only. The geographic location where the table resides. This value is inherited from the dataset. */ location?: string; - /** - * Optional. If set, overrides the default managed table type configured in the dataset. - */ - managedTableType?: 'MANAGED_TABLE_TYPE_UNSPECIFIED' | 'NATIVE' | 'ICEBERG'; /** * Optional. The materialized view definition. */ @@ -5522,7 +5518,7 @@ declare namespace bigquery { */ colsampleBytree?: number; /** - * The contribution metric. Applies to contribution analysis models. Allowed formats supported are for summable and summable ratio contribution metrics. These include expressions such as `SUM(x)` or `SUM(x)/SUM(y)`, where x and y are column names from the base table. + * The contribution metric. Applies to contribution analysis models. Allowed formats supported are for summable and summable ratio contribution metrics. These include expressions such as "SUM(x)" or "SUM(x)/SUM(y)", where x and y are column names from the base table. */ contributionMetric?: string; /** From 33bc33572ee6689474d333cea5325c6bd520b271 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 21 Nov 2024 22:57:56 +0000 Subject: [PATCH 5/5] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- src/types.d.ts | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/src/types.d.ts b/src/types.d.ts index f765ffd7..19b042fd 100644 --- a/src/types.d.ts +++ b/src/types.d.ts @@ -13,7 +13,7 @@ // limitations under the License. /** - * Discovery Revision: 20241013 + * Discovery Revision: 20241111 */ /** @@ -77,7 +77,7 @@ declare namespace bigquery { */ argumentKind?: 'ARGUMENT_KIND_UNSPECIFIED' | 'FIXED_TYPE' | 'ANY_TYPE'; /** - * Required unless argument_kind = ANY_TYPE. + * Set if argument_kind == FIXED_TYPE. */ dataType?: IStandardSqlDataType; /** @@ -407,19 +407,19 @@ declare namespace bigquery { */ type IBigLakeConfiguration = { /** - * Required. The connection specifying the credentials to be used to read and write to external storage, such as Cloud Storage. The connection_id can have the form `{project}.{location}.{connection_id}` or `projects/{project}/locations/{location}/connections/{connection_id}". + * Optional. The connection specifying the credentials to be used to read and write to external storage, such as Cloud Storage. The connection_id can have the form `{project}.{location}.{connection_id}` or `projects/{project}/locations/{location}/connections/{connection_id}". */ connectionId?: string; /** - * Required. The file format the table data is stored in. + * Optional. The file format the table data is stored in. */ fileFormat?: 'FILE_FORMAT_UNSPECIFIED' | 'PARQUET'; /** - * Required. The fully qualified location prefix of the external folder where table data is stored. The '*' wildcard character is not allowed. The URI should be in the format `gs://bucket/path_to_table/` + * Optional. The fully qualified location prefix of the external folder where table data is stored. The '*' wildcard character is not allowed. The URI should be in the format `gs://bucket/path_to_table/` */ storageUri?: string; /** - * Required. The table format the metadata only snapshots are stored in. + * Optional. The table format the metadata only snapshots are stored in. */ tableFormat?: 'TABLE_FORMAT_UNSPECIFIED' | 'ICEBERG'; }; @@ -4863,6 +4863,10 @@ declare namespace bigquery { * Output only. The geographic location where the table resides. This value is inherited from the dataset. */ location?: string; + /** + * Optional. If set, overrides the default managed table type configured in the dataset. + */ + managedTableType?: 'MANAGED_TABLE_TYPE_UNSPECIFIED' | 'NATIVE' | 'ICEBERG'; /** * Optional. The materialized view definition. */ @@ -5518,7 +5522,7 @@ declare namespace bigquery { */ colsampleBytree?: number; /** - * The contribution metric. Applies to contribution analysis models. Allowed formats supported are for summable and summable ratio contribution metrics. These include expressions such as "SUM(x)" or "SUM(x)/SUM(y)", where x and y are column names from the base table. + * The contribution metric. Applies to contribution analysis models. Allowed formats supported are for summable and summable ratio contribution metrics. These include expressions such as `SUM(x)` or `SUM(x)/SUM(y)`, where x and y are column names from the base table. */ contributionMetric?: string; /**