diff --git a/.circleci/config.yml b/.circleci/config.yml
index 8dfd895a63..31076e984f 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -3,7 +3,7 @@ jobs:
testGenerator:
docker:
- image: circleci/node:10-browsers
- working_directory: /tmp/workspace/gapic-generator-typescript
+ working_directory: /home/circleci/project/gapic-generator-typescript
steps:
- checkout
- run:
@@ -31,17 +31,14 @@ jobs:
- run:
name: copy protos to generated client library
command: |
- cp -r typescript/test/protos .test-out-showcase
- cp -r typescript/test/protos .test-out-keymanager
- cp -r typescript/test/protos .test-out-translate
- cp -r typescript/test/protos .test-out-monitoring
- cp -r typescript/test/protos .test-out-texttospeech
- cp -r typescript/test/protos .test-out-dlp
+ for dir in .test-out-* ; do
+ cp -r test-fixtures/protos $dir
+ done
- persist_to_workspace:
- root: /tmp
+ root: /home/circleci/project
paths:
- - workspace
- showcaseTsTestApplication:
+ - gapic-generator-typescript
+ showcaseTestApplications:
docker:
- image: circleci/node:10-browsers
steps:
@@ -50,23 +47,14 @@ jobs:
at: workspace
- run:
name: Run showcase test for Typescript users
- command: |
- cd workspace/workspace/gapic-generator-typescript
- npm run ts-test-application
+ working_directory: /home/circleci/project/workspace/gapic-generator-typescript
+ command: npm run ts-test-application
environment:
NPM_CONFIG_PREFIX: /tmp/.npm-global
- showcaseJsTestApplication:
- docker:
- - image: circleci/node:10-browsers
- steps:
- - checkout
- - attach_workspace:
- at: workspace
- run:
name: Run showcase test for JavaScript users
- command: |
- cd workspace/workspace/gapic-generator-typescript
- npm run js-test-application
+ working_directory: /home/circleci/project/workspace/gapic-generator-typescript
+ command: npm run js-test-application
environment:
NPM_CONFIG_PREFIX: /tmp/.npm-global
showcaseLibTest:
@@ -78,15 +66,14 @@ jobs:
at: workspace
- run:
name: Run unit tests, system tests, jsdoc generation, and gts fix of the generated Showcase library
- command: |
- cd workspace/workspace/gapic-generator-typescript/.test-out-showcase
+ working_directory: /home/circleci/project/workspace/gapic-generator-typescript/.test-out-showcase
+ command: &alltests |
npm install
npm test
npm run fix
npm run compile
npm run system-test
npm run docs
- npm run docs-test
kmsLibTest:
docker:
- image: circleci/node:10-browsers
@@ -96,15 +83,8 @@ jobs:
at: workspace
- run:
name: Run unit tests, system tests, jsdoc generation, and gts fix of the generated KMS library
- command: |
- cd workspace/workspace/gapic-generator-typescript/.test-out-keymanager
- npm install
- npm test
- npm run fix
- npm run compile
- npm run system-test
- npm run docs
- npm run docs-test
+ working_directory: /home/circleci/project/workspace/gapic-generator-typescript/.test-out-kms
+ command: *alltests
translateLibTest:
docker:
- image: circleci/node:10-browsers
@@ -114,15 +94,8 @@ jobs:
at: workspace
- run:
name: Run unit tests, system tests, jsdoc generation, and gts fix of the generated Translate library
- command: |
- cd workspace/workspace/gapic-generator-typescript/.test-out-translate
- npm install
- npm test
- npm run fix
- npm run compile
- npm run system-test
- npm run docs
- npm run docs-test
+ working_directory: /home/circleci/project/workspace/gapic-generator-typescript/.test-out-translate
+ command: *alltests
monitoringLibTest:
docker:
- image: circleci/node:10-browsers
@@ -132,13 +105,8 @@ jobs:
at: workspace
- run:
name: Run unit tests, system tests, jsdoc generation, and gts fix of the generated Monitoring library
- command: |
- cd workspace/workspace/gapic-generator-typescript/.test-out-monitoring
- npm install
- npm test
- npm run fix
- npm run compile
- npm run system-test
+ working_directory: /home/circleci/project/workspace/gapic-generator-typescript/.test-out-monitoring
+ command: *alltests
dlpLibTest:
docker:
- image: circleci/node:10-browsers
@@ -147,15 +115,9 @@ jobs:
- attach_workspace:
at: workspace
- run:
- name: Run unit tests, system tests, jsdoc generation, and gts fix of the generated dlp library
- command: |
- cd workspace/workspace/gapic-generator-typescript/.test-out-dlp
- npm install
- npm test
- npm run fix
- npm run compile
- npm run system-test
- npm run docs
+ name: Run unit tests, system tests, jsdoc generation, and gts fix of the generated DLP library
+ working_directory: /home/circleci/project/workspace/gapic-generator-typescript/.test-out-dlp
+ command: *alltests
ttsLibTest:
docker:
- image: circleci/node:10-browsers
@@ -165,24 +127,14 @@ jobs:
at: workspace
- run:
name: Run unit tests, system tests, jsdoc generation, and gts fix of the generated Text-to-Speech library
- command: |
- cd workspace/workspace/gapic-generator-typescript/.test-out-texttospeech
- npm install
- npm test
- npm run fix
- npm run compile
- npm run system-test
- npm run docs
- npm run docs-test
+ working_directory: /home/circleci/project/workspace/gapic-generator-typescript/.test-out-texttospeech
+ command: *alltests
workflows:
version: 2
tests:
jobs:
- testGenerator
- - showcaseTsTestApplication:
- requires:
- - testGenerator
- - showcaseJsTestApplication:
+ - showcaseTestApplications:
requires:
- testGenerator
- showcaseLibTest:
diff --git a/typescript/test/testdata/dlp/.gitignore.baseline b/baselines/dlp/.gitignore.baseline
similarity index 100%
rename from typescript/test/testdata/dlp/.gitignore.baseline
rename to baselines/dlp/.gitignore.baseline
diff --git a/typescript/test/testdata/dlp/.jsdoc.js.baseline b/baselines/dlp/.jsdoc.js.baseline
similarity index 100%
rename from typescript/test/testdata/dlp/.jsdoc.js.baseline
rename to baselines/dlp/.jsdoc.js.baseline
diff --git a/typescript/test/testdata/dlp/.mocharc.json.baseline b/baselines/dlp/.mocharc.json.baseline
similarity index 100%
rename from typescript/test/testdata/dlp/.mocharc.json.baseline
rename to baselines/dlp/.mocharc.json.baseline
diff --git a/typescript/test/testdata/dlp/linkinator.config.json.baseline b/baselines/dlp/linkinator.config.json.baseline
similarity index 100%
rename from typescript/test/testdata/dlp/linkinator.config.json.baseline
rename to baselines/dlp/linkinator.config.json.baseline
diff --git a/typescript/test/testdata/dlp/package.json.baseline b/baselines/dlp/package.json.baseline
similarity index 100%
rename from typescript/test/testdata/dlp/package.json.baseline
rename to baselines/dlp/package.json.baseline
diff --git a/typescript/test/testdata/dlp/proto.list.baseline b/baselines/dlp/proto.list.baseline
similarity index 100%
rename from typescript/test/testdata/dlp/proto.list.baseline
rename to baselines/dlp/proto.list.baseline
diff --git a/typescript/test/protos/google/cloud/common_resources.proto b/baselines/dlp/protos/google/cloud/common_resources.proto.baseline
similarity index 100%
rename from typescript/test/protos/google/cloud/common_resources.proto
rename to baselines/dlp/protos/google/cloud/common_resources.proto.baseline
diff --git a/typescript/test/protos/google/privacy/dlp/v2/dlp.proto b/baselines/dlp/protos/google/privacy/dlp/v2/dlp.proto.baseline
similarity index 100%
rename from typescript/test/protos/google/privacy/dlp/v2/dlp.proto
rename to baselines/dlp/protos/google/privacy/dlp/v2/dlp.proto.baseline
diff --git a/typescript/test/protos/google/privacy/dlp/v2/storage.proto b/baselines/dlp/protos/google/privacy/dlp/v2/storage.proto.baseline
similarity index 100%
rename from typescript/test/protos/google/privacy/dlp/v2/storage.proto
rename to baselines/dlp/protos/google/privacy/dlp/v2/storage.proto.baseline
diff --git a/typescript/test/testdata/dlp/src/index.ts.baseline b/baselines/dlp/src/index.ts.baseline
similarity index 100%
rename from typescript/test/testdata/dlp/src/index.ts.baseline
rename to baselines/dlp/src/index.ts.baseline
diff --git a/typescript/test/testdata/dlp/src/v2/dlp_service_client.ts.baseline b/baselines/dlp/src/v2/dlp_service_client.ts.baseline
similarity index 100%
rename from typescript/test/testdata/dlp/src/v2/dlp_service_client.ts.baseline
rename to baselines/dlp/src/v2/dlp_service_client.ts.baseline
diff --git a/typescript/test/testdata/dlp/src/v2/dlp_service_client_config.json.baseline b/baselines/dlp/src/v2/dlp_service_client_config.json.baseline
similarity index 100%
rename from typescript/test/testdata/dlp/src/v2/dlp_service_client_config.json.baseline
rename to baselines/dlp/src/v2/dlp_service_client_config.json.baseline
diff --git a/typescript/test/testdata/dlp/src/v2/dlp_service_proto_list.json.baseline b/baselines/dlp/src/v2/dlp_service_proto_list.json.baseline
similarity index 100%
rename from typescript/test/testdata/dlp/src/v2/dlp_service_proto_list.json.baseline
rename to baselines/dlp/src/v2/dlp_service_proto_list.json.baseline
diff --git a/typescript/test/testdata/dlp/src/v2/index.ts.baseline b/baselines/dlp/src/v2/index.ts.baseline
similarity index 100%
rename from typescript/test/testdata/dlp/src/v2/index.ts.baseline
rename to baselines/dlp/src/v2/index.ts.baseline
diff --git a/typescript/test/testdata/dlp/system-test/fixtures/sample/src/index.js.baseline b/baselines/dlp/system-test/fixtures/sample/src/index.js.baseline
similarity index 100%
rename from typescript/test/testdata/dlp/system-test/fixtures/sample/src/index.js.baseline
rename to baselines/dlp/system-test/fixtures/sample/src/index.js.baseline
diff --git a/typescript/test/testdata/dlp/system-test/fixtures/sample/src/index.ts.baseline b/baselines/dlp/system-test/fixtures/sample/src/index.ts.baseline
similarity index 100%
rename from typescript/test/testdata/dlp/system-test/fixtures/sample/src/index.ts.baseline
rename to baselines/dlp/system-test/fixtures/sample/src/index.ts.baseline
diff --git a/typescript/test/testdata/dlp/system-test/install.ts.baseline b/baselines/dlp/system-test/install.ts.baseline
similarity index 100%
rename from typescript/test/testdata/dlp/system-test/install.ts.baseline
rename to baselines/dlp/system-test/install.ts.baseline
diff --git a/typescript/test/testdata/dlp/test/gapic-dlp_service-v2.ts.baseline b/baselines/dlp/test/gapic-dlp_service-v2.ts.baseline
similarity index 100%
rename from typescript/test/testdata/dlp/test/gapic-dlp_service-v2.ts.baseline
rename to baselines/dlp/test/gapic-dlp_service-v2.ts.baseline
diff --git a/typescript/test/testdata/dlp/tsconfig.json.baseline b/baselines/dlp/tsconfig.json.baseline
similarity index 100%
rename from typescript/test/testdata/dlp/tsconfig.json.baseline
rename to baselines/dlp/tsconfig.json.baseline
diff --git a/typescript/test/testdata/dlp/tslint.json.baseline b/baselines/dlp/tslint.json.baseline
similarity index 100%
rename from typescript/test/testdata/dlp/tslint.json.baseline
rename to baselines/dlp/tslint.json.baseline
diff --git a/typescript/test/testdata/dlp/webpack.config.js.baseline b/baselines/dlp/webpack.config.js.baseline
similarity index 100%
rename from typescript/test/testdata/dlp/webpack.config.js.baseline
rename to baselines/dlp/webpack.config.js.baseline
diff --git a/typescript/test/testdata/keymanager/.gitignore.baseline b/baselines/kms/.gitignore.baseline
similarity index 100%
rename from typescript/test/testdata/keymanager/.gitignore.baseline
rename to baselines/kms/.gitignore.baseline
diff --git a/typescript/test/testdata/keymanager/.jsdoc.js.baseline b/baselines/kms/.jsdoc.js.baseline
similarity index 100%
rename from typescript/test/testdata/keymanager/.jsdoc.js.baseline
rename to baselines/kms/.jsdoc.js.baseline
diff --git a/typescript/test/testdata/keymanager/.mocharc.json.baseline b/baselines/kms/.mocharc.json.baseline
similarity index 100%
rename from typescript/test/testdata/keymanager/.mocharc.json.baseline
rename to baselines/kms/.mocharc.json.baseline
diff --git a/typescript/test/testdata/keymanager/linkinator.config.json.baseline b/baselines/kms/linkinator.config.json.baseline
similarity index 100%
rename from typescript/test/testdata/keymanager/linkinator.config.json.baseline
rename to baselines/kms/linkinator.config.json.baseline
diff --git a/typescript/test/testdata/keymanager/package.json.baseline b/baselines/kms/package.json.baseline
similarity index 100%
rename from typescript/test/testdata/keymanager/package.json.baseline
rename to baselines/kms/package.json.baseline
diff --git a/typescript/test/testdata/keymanager/proto.list.baseline b/baselines/kms/proto.list.baseline
similarity index 74%
rename from typescript/test/testdata/keymanager/proto.list.baseline
rename to baselines/kms/proto.list.baseline
index 3dc867ea2f..8fd04cb8b4 100644
--- a/typescript/test/testdata/keymanager/proto.list.baseline
+++ b/baselines/kms/proto.list.baseline
@@ -3,7 +3,7 @@ google/protobuf/descriptor.proto
google/api/annotations.proto
google/protobuf/duration.proto
google/protobuf/timestamp.proto
-google/kms/v1/resources.proto
+google/cloud/kms/v1/resources.proto
google/protobuf/field_mask.proto
google/api/client.proto
-google/kms/v1/service.proto
+google/cloud/kms/v1/service.proto
diff --git a/typescript/test/protos/google/kms/v1/resources.proto b/baselines/kms/protos/google/cloud/kms/v1/resources.proto.baseline
similarity index 100%
rename from typescript/test/protos/google/kms/v1/resources.proto
rename to baselines/kms/protos/google/cloud/kms/v1/resources.proto.baseline
diff --git a/baselines/kms/protos/google/cloud/kms/v1/service.proto.baseline b/baselines/kms/protos/google/cloud/kms/v1/service.proto.baseline
new file mode 100644
index 0000000000..0f1266dfe8
--- /dev/null
+++ b/baselines/kms/protos/google/cloud/kms/v1/service.proto.baseline
@@ -0,0 +1,718 @@
+// Copyright 2019 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+syntax = "proto3";
+
+package google.cloud.kms.v1;
+
+import "google/api/annotations.proto";
+import "google/cloud/kms/v1/resources.proto";
+import "google/protobuf/field_mask.proto";
+import "google/api/client.proto";
+
+option cc_enable_arenas = true;
+option csharp_namespace = "Google.Cloud.Kms.V1";
+option go_package = "google.golang.org/genproto/googleapis/cloud/kms/v1;kms";
+option java_multiple_files = true;
+option java_outer_classname = "KmsProto";
+option java_package = "com.google.cloud.kms.v1";
+option php_namespace = "Google\\Cloud\\Kms\\V1";
+
+// Google Cloud Key Management Service
+//
+// Manages cryptographic keys and operations using those keys. Implements a REST
+// model with the following objects:
+//
+// * [KeyRing][google.cloud.kms.v1.KeyRing]
+// * [CryptoKey][google.cloud.kms.v1.CryptoKey]
+// * [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]
+//
+// If you are using manual gRPC libraries, see
+// [Using gRPC with Cloud KMS](https://cloud.google.com/kms/docs/grpc).
+service KeyManagementService {
+ option (google.api.default_host) = "cloudkms.googleapis.com";
+ option (google.api.oauth_scopes) =
+ "https://www.googleapis.com/auth/cloud-platform,"
+ "https://www.googleapis.com/auth/cloudkms";
+
+ // Lists [KeyRings][google.cloud.kms.v1.KeyRing].
+ rpc ListKeyRings(ListKeyRingsRequest) returns (ListKeyRingsResponse) {
+ option (google.api.http) = {
+ get: "/v1/{parent=projects/*/locations/*}/keyRings"
+ };
+ }
+
+ // Lists [CryptoKeys][google.cloud.kms.v1.CryptoKey].
+ rpc ListCryptoKeys(ListCryptoKeysRequest) returns (ListCryptoKeysResponse) {
+ option (google.api.http) = {
+ get: "/v1/{parent=projects/*/locations/*/keyRings/*}/cryptoKeys"
+ };
+ }
+
+ // Lists [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion].
+ rpc ListCryptoKeyVersions(ListCryptoKeyVersionsRequest) returns (ListCryptoKeyVersionsResponse) {
+ option (google.api.http) = {
+ get: "/v1/{parent=projects/*/locations/*/keyRings/*/cryptoKeys/*}/cryptoKeyVersions"
+ };
+ }
+
+ // Lists [ImportJobs][google.cloud.kms.v1.ImportJob].
+ rpc ListImportJobs(ListImportJobsRequest) returns (ListImportJobsResponse) {
+ option (google.api.http) = {
+ get: "/v1/{parent=projects/*/locations/*/keyRings/*}/importJobs"
+ };
+ }
+
+ // Returns metadata for a given [KeyRing][google.cloud.kms.v1.KeyRing].
+ rpc GetKeyRing(GetKeyRingRequest) returns (KeyRing) {
+ option (google.api.http) = {
+ get: "/v1/{name=projects/*/locations/*/keyRings/*}"
+ };
+ }
+
+ // Returns metadata for a given [CryptoKey][google.cloud.kms.v1.CryptoKey], as well as its
+ // [primary][google.cloud.kms.v1.CryptoKey.primary] [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion].
+ rpc GetCryptoKey(GetCryptoKeyRequest) returns (CryptoKey) {
+ option (google.api.http) = {
+ get: "/v1/{name=projects/*/locations/*/keyRings/*/cryptoKeys/*}"
+ };
+ }
+
+ // Returns metadata for a given [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion].
+ rpc GetCryptoKeyVersion(GetCryptoKeyVersionRequest) returns (CryptoKeyVersion) {
+ option (google.api.http) = {
+ get: "/v1/{name=projects/*/locations/*/keyRings/*/cryptoKeys/*/cryptoKeyVersions/*}"
+ };
+ }
+
+ // Returns the public key for the given [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]. The
+ // [CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose] must be
+ // [ASYMMETRIC_SIGN][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose.ASYMMETRIC_SIGN] or
+ // [ASYMMETRIC_DECRYPT][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose.ASYMMETRIC_DECRYPT].
+ rpc GetPublicKey(GetPublicKeyRequest) returns (PublicKey) {
+ option (google.api.http) = {
+ get: "/v1/{name=projects/*/locations/*/keyRings/*/cryptoKeys/*/cryptoKeyVersions/*}/publicKey"
+ };
+ }
+
+ // Returns metadata for a given [ImportJob][google.cloud.kms.v1.ImportJob].
+ rpc GetImportJob(GetImportJobRequest) returns (ImportJob) {
+ option (google.api.http) = {
+ get: "/v1/{name=projects/*/locations/*/keyRings/*/importJobs/*}"
+ };
+ }
+
+ // Create a new [KeyRing][google.cloud.kms.v1.KeyRing] in a given Project and Location.
+ rpc CreateKeyRing(CreateKeyRingRequest) returns (KeyRing) {
+ option (google.api.http) = {
+ post: "/v1/{parent=projects/*/locations/*}/keyRings"
+ body: "key_ring"
+ };
+ }
+
+ // Create a new [CryptoKey][google.cloud.kms.v1.CryptoKey] within a [KeyRing][google.cloud.kms.v1.KeyRing].
+ //
+ // [CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose] and
+ // [CryptoKey.version_template.algorithm][google.cloud.kms.v1.CryptoKeyVersionTemplate.algorithm]
+ // are required.
+ rpc CreateCryptoKey(CreateCryptoKeyRequest) returns (CryptoKey) {
+ option (google.api.http) = {
+ post: "/v1/{parent=projects/*/locations/*/keyRings/*}/cryptoKeys"
+ body: "crypto_key"
+ };
+ }
+
+ // Create a new [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] in a [CryptoKey][google.cloud.kms.v1.CryptoKey].
+ //
+ // The server will assign the next sequential id. If unset,
+ // [state][google.cloud.kms.v1.CryptoKeyVersion.state] will be set to
+ // [ENABLED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.ENABLED].
+ rpc CreateCryptoKeyVersion(CreateCryptoKeyVersionRequest) returns (CryptoKeyVersion) {
+ option (google.api.http) = {
+ post: "/v1/{parent=projects/*/locations/*/keyRings/*/cryptoKeys/*}/cryptoKeyVersions"
+ body: "crypto_key_version"
+ };
+ }
+
+ // Imports a new [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] into an existing [CryptoKey][google.cloud.kms.v1.CryptoKey] using the
+ // wrapped key material provided in the request.
+ //
+ // The version ID will be assigned the next sequential id within the
+ // [CryptoKey][google.cloud.kms.v1.CryptoKey].
+ rpc ImportCryptoKeyVersion(ImportCryptoKeyVersionRequest) returns (CryptoKeyVersion) {
+ option (google.api.http) = {
+ post: "/v1/{parent=projects/*/locations/*/keyRings/*/cryptoKeys/*}/cryptoKeyVersions:import"
+ body: "*"
+ };
+ }
+
+ // Create a new [ImportJob][google.cloud.kms.v1.ImportJob] within a [KeyRing][google.cloud.kms.v1.KeyRing].
+ //
+ // [ImportJob.import_method][google.cloud.kms.v1.ImportJob.import_method] is required.
+ rpc CreateImportJob(CreateImportJobRequest) returns (ImportJob) {
+ option (google.api.http) = {
+ post: "/v1/{parent=projects/*/locations/*/keyRings/*}/importJobs"
+ body: "import_job"
+ };
+ }
+
+ // Update a [CryptoKey][google.cloud.kms.v1.CryptoKey].
+ rpc UpdateCryptoKey(UpdateCryptoKeyRequest) returns (CryptoKey) {
+ option (google.api.http) = {
+ patch: "/v1/{crypto_key.name=projects/*/locations/*/keyRings/*/cryptoKeys/*}"
+ body: "crypto_key"
+ };
+ }
+
+ // Update a [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]'s metadata.
+ //
+ // [state][google.cloud.kms.v1.CryptoKeyVersion.state] may be changed between
+ // [ENABLED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.ENABLED] and
+ // [DISABLED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.DISABLED] using this
+ // method. See [DestroyCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.DestroyCryptoKeyVersion] and [RestoreCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.RestoreCryptoKeyVersion] to
+ // move between other states.
+ rpc UpdateCryptoKeyVersion(UpdateCryptoKeyVersionRequest) returns (CryptoKeyVersion) {
+ option (google.api.http) = {
+ patch: "/v1/{crypto_key_version.name=projects/*/locations/*/keyRings/*/cryptoKeys/*/cryptoKeyVersions/*}"
+ body: "crypto_key_version"
+ };
+ }
+
+ // Encrypts data, so that it can only be recovered by a call to [Decrypt][google.cloud.kms.v1.KeyManagementService.Decrypt].
+ // The [CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose] must be
+ // [ENCRYPT_DECRYPT][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT].
+ rpc Encrypt(EncryptRequest) returns (EncryptResponse) {
+ option (google.api.http) = {
+ post: "/v1/{name=projects/*/locations/*/keyRings/*/cryptoKeys/**}:encrypt"
+ body: "*"
+ };
+ }
+
+ // Decrypts data that was protected by [Encrypt][google.cloud.kms.v1.KeyManagementService.Encrypt]. The [CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose]
+ // must be [ENCRYPT_DECRYPT][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT].
+ rpc Decrypt(DecryptRequest) returns (DecryptResponse) {
+ option (google.api.http) = {
+ post: "/v1/{name=projects/*/locations/*/keyRings/*/cryptoKeys/*}:decrypt"
+ body: "*"
+ };
+ }
+
+ // Signs data using a [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] with [CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose]
+ // ASYMMETRIC_SIGN, producing a signature that can be verified with the public
+ // key retrieved from [GetPublicKey][google.cloud.kms.v1.KeyManagementService.GetPublicKey].
+ rpc AsymmetricSign(AsymmetricSignRequest) returns (AsymmetricSignResponse) {
+ option (google.api.http) = {
+ post: "/v1/{name=projects/*/locations/*/keyRings/*/cryptoKeys/*/cryptoKeyVersions/*}:asymmetricSign"
+ body: "*"
+ };
+ }
+
+ // Decrypts data that was encrypted with a public key retrieved from
+ // [GetPublicKey][google.cloud.kms.v1.KeyManagementService.GetPublicKey] corresponding to a [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] with
+ // [CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose] ASYMMETRIC_DECRYPT.
+ rpc AsymmetricDecrypt(AsymmetricDecryptRequest) returns (AsymmetricDecryptResponse) {
+ option (google.api.http) = {
+ post: "/v1/{name=projects/*/locations/*/keyRings/*/cryptoKeys/*/cryptoKeyVersions/*}:asymmetricDecrypt"
+ body: "*"
+ };
+ }
+
+ // Update the version of a [CryptoKey][google.cloud.kms.v1.CryptoKey] that will be used in [Encrypt][google.cloud.kms.v1.KeyManagementService.Encrypt].
+ //
+ // Returns an error if called on an asymmetric key.
+ rpc UpdateCryptoKeyPrimaryVersion(UpdateCryptoKeyPrimaryVersionRequest) returns (CryptoKey) {
+ option (google.api.http) = {
+ post: "/v1/{name=projects/*/locations/*/keyRings/*/cryptoKeys/*}:updatePrimaryVersion"
+ body: "*"
+ };
+ }
+
+ // Schedule a [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] for destruction.
+ //
+ // Upon calling this method, [CryptoKeyVersion.state][google.cloud.kms.v1.CryptoKeyVersion.state] will be set to
+ // [DESTROY_SCHEDULED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.DESTROY_SCHEDULED]
+ // and [destroy_time][google.cloud.kms.v1.CryptoKeyVersion.destroy_time] will be set to a time 24
+ // hours in the future, at which point the [state][google.cloud.kms.v1.CryptoKeyVersion.state]
+ // will be changed to
+ // [DESTROYED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.DESTROYED], and the key
+ // material will be irrevocably destroyed.
+ //
+ // Before the [destroy_time][google.cloud.kms.v1.CryptoKeyVersion.destroy_time] is reached,
+ // [RestoreCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.RestoreCryptoKeyVersion] may be called to reverse the process.
+ rpc DestroyCryptoKeyVersion(DestroyCryptoKeyVersionRequest) returns (CryptoKeyVersion) {
+ option (google.api.http) = {
+ post: "/v1/{name=projects/*/locations/*/keyRings/*/cryptoKeys/*/cryptoKeyVersions/*}:destroy"
+ body: "*"
+ };
+ }
+
+ // Restore a [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] in the
+ // [DESTROY_SCHEDULED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.DESTROY_SCHEDULED]
+ // state.
+ //
+ // Upon restoration of the CryptoKeyVersion, [state][google.cloud.kms.v1.CryptoKeyVersion.state]
+ // will be set to [DISABLED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.DISABLED],
+ // and [destroy_time][google.cloud.kms.v1.CryptoKeyVersion.destroy_time] will be cleared.
+ rpc RestoreCryptoKeyVersion(RestoreCryptoKeyVersionRequest) returns (CryptoKeyVersion) {
+ option (google.api.http) = {
+ post: "/v1/{name=projects/*/locations/*/keyRings/*/cryptoKeys/*/cryptoKeyVersions/*}:restore"
+ body: "*"
+ };
+ }
+}
+
+// Request message for [KeyManagementService.ListKeyRings][google.cloud.kms.v1.KeyManagementService.ListKeyRings].
+message ListKeyRingsRequest {
+ // Required. The resource name of the location associated with the
+ // [KeyRings][google.cloud.kms.v1.KeyRing], in the format `projects/*/locations/*`.
+ string parent = 1;
+
+ // Optional limit on the number of [KeyRings][google.cloud.kms.v1.KeyRing] to include in the
+ // response. Further [KeyRings][google.cloud.kms.v1.KeyRing] can subsequently be obtained by
+ // including the [ListKeyRingsResponse.next_page_token][google.cloud.kms.v1.ListKeyRingsResponse.next_page_token] in a subsequent
+ // request. If unspecified, the server will pick an appropriate default.
+ int32 page_size = 2;
+
+ // Optional pagination token, returned earlier via
+ // [ListKeyRingsResponse.next_page_token][google.cloud.kms.v1.ListKeyRingsResponse.next_page_token].
+ string page_token = 3;
+
+ // Optional. Only include resources that match the filter in the response.
+ string filter = 4;
+
+ // Optional. Specify how the results should be sorted. If not specified, the
+ // results will be sorted in the default order.
+ string order_by = 5;
+}
+
+// Request message for [KeyManagementService.ListCryptoKeys][google.cloud.kms.v1.KeyManagementService.ListCryptoKeys].
+message ListCryptoKeysRequest {
+ // Required. The resource name of the [KeyRing][google.cloud.kms.v1.KeyRing] to list, in the format
+ // `projects/*/locations/*/keyRings/*`.
+ string parent = 1;
+
+ // Optional limit on the number of [CryptoKeys][google.cloud.kms.v1.CryptoKey] to include in the
+ // response. Further [CryptoKeys][google.cloud.kms.v1.CryptoKey] can subsequently be obtained by
+ // including the [ListCryptoKeysResponse.next_page_token][google.cloud.kms.v1.ListCryptoKeysResponse.next_page_token] in a subsequent
+ // request. If unspecified, the server will pick an appropriate default.
+ int32 page_size = 2;
+
+ // Optional pagination token, returned earlier via
+ // [ListCryptoKeysResponse.next_page_token][google.cloud.kms.v1.ListCryptoKeysResponse.next_page_token].
+ string page_token = 3;
+
+ // The fields of the primary version to include in the response.
+ CryptoKeyVersion.CryptoKeyVersionView version_view = 4;
+
+ // Optional. Only include resources that match the filter in the response.
+ string filter = 5;
+
+ // Optional. Specify how the results should be sorted. If not specified, the
+ // results will be sorted in the default order.
+ string order_by = 6;
+}
+
+// Request message for [KeyManagementService.ListCryptoKeyVersions][google.cloud.kms.v1.KeyManagementService.ListCryptoKeyVersions].
+message ListCryptoKeyVersionsRequest {
+ // Required. The resource name of the [CryptoKey][google.cloud.kms.v1.CryptoKey] to list, in the format
+ // `projects/*/locations/*/keyRings/*/cryptoKeys/*`.
+ string parent = 1;
+
+ // Optional limit on the number of [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion] to
+ // include in the response. Further [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion] can
+ // subsequently be obtained by including the
+ // [ListCryptoKeyVersionsResponse.next_page_token][google.cloud.kms.v1.ListCryptoKeyVersionsResponse.next_page_token] in a subsequent request.
+ // If unspecified, the server will pick an appropriate default.
+ int32 page_size = 2;
+
+ // Optional pagination token, returned earlier via
+ // [ListCryptoKeyVersionsResponse.next_page_token][google.cloud.kms.v1.ListCryptoKeyVersionsResponse.next_page_token].
+ string page_token = 3;
+
+ // The fields to include in the response.
+ CryptoKeyVersion.CryptoKeyVersionView view = 4;
+
+ // Optional. Only include resources that match the filter in the response.
+ string filter = 5;
+
+ // Optional. Specify how the results should be sorted. If not specified, the
+ // results will be sorted in the default order.
+ string order_by = 6;
+}
+
+// Request message for [KeyManagementService.ListImportJobs][google.cloud.kms.v1.KeyManagementService.ListImportJobs].
+message ListImportJobsRequest {
+ // Required. The resource name of the [KeyRing][google.cloud.kms.v1.KeyRing] to list, in the format
+ // `projects/*/locations/*/keyRings/*`.
+ string parent = 1;
+
+ // Optional limit on the number of [ImportJobs][google.cloud.kms.v1.ImportJob] to include in the
+ // response. Further [ImportJobs][google.cloud.kms.v1.ImportJob] can subsequently be obtained by
+ // including the [ListImportJobsResponse.next_page_token][google.cloud.kms.v1.ListImportJobsResponse.next_page_token] in a subsequent
+ // request. If unspecified, the server will pick an appropriate default.
+ int32 page_size = 2;
+
+ // Optional pagination token, returned earlier via
+ // [ListImportJobsResponse.next_page_token][google.cloud.kms.v1.ListImportJobsResponse.next_page_token].
+ string page_token = 3;
+
+ // Optional. Only include resources that match the filter in the response.
+ string filter = 4;
+
+ // Optional. Specify how the results should be sorted. If not specified, the
+ // results will be sorted in the default order.
+ string order_by = 5;
+}
+
+// Response message for [KeyManagementService.ListKeyRings][google.cloud.kms.v1.KeyManagementService.ListKeyRings].
+message ListKeyRingsResponse {
+ // The list of [KeyRings][google.cloud.kms.v1.KeyRing].
+ repeated KeyRing key_rings = 1;
+
+ // A token to retrieve next page of results. Pass this value in
+ // [ListKeyRingsRequest.page_token][google.cloud.kms.v1.ListKeyRingsRequest.page_token] to retrieve the next page of results.
+ string next_page_token = 2;
+
+ // The total number of [KeyRings][google.cloud.kms.v1.KeyRing] that matched the query.
+ int32 total_size = 3;
+}
+
+// Response message for [KeyManagementService.ListCryptoKeys][google.cloud.kms.v1.KeyManagementService.ListCryptoKeys].
+message ListCryptoKeysResponse {
+ // The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey].
+ repeated CryptoKey crypto_keys = 1;
+
+ // A token to retrieve next page of results. Pass this value in
+ // [ListCryptoKeysRequest.page_token][google.cloud.kms.v1.ListCryptoKeysRequest.page_token] to retrieve the next page of results.
+ string next_page_token = 2;
+
+ // The total number of [CryptoKeys][google.cloud.kms.v1.CryptoKey] that matched the query.
+ int32 total_size = 3;
+}
+
+// Response message for [KeyManagementService.ListCryptoKeyVersions][google.cloud.kms.v1.KeyManagementService.ListCryptoKeyVersions].
+message ListCryptoKeyVersionsResponse {
+ // The list of [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion].
+ repeated CryptoKeyVersion crypto_key_versions = 1;
+
+ // A token to retrieve next page of results. Pass this value in
+ // [ListCryptoKeyVersionsRequest.page_token][google.cloud.kms.v1.ListCryptoKeyVersionsRequest.page_token] to retrieve the next page of
+ // results.
+ string next_page_token = 2;
+
+ // The total number of [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion] that matched the
+ // query.
+ int32 total_size = 3;
+}
+
+// Response message for [KeyManagementService.ListImportJobs][google.cloud.kms.v1.KeyManagementService.ListImportJobs].
+message ListImportJobsResponse {
+ // The list of [ImportJobs][google.cloud.kms.v1.ImportJob].
+ repeated ImportJob import_jobs = 1;
+
+ // A token to retrieve next page of results. Pass this value in
+ // [ListImportJobsRequest.page_token][google.cloud.kms.v1.ListImportJobsRequest.page_token] to retrieve the next page of results.
+ string next_page_token = 2;
+
+ // The total number of [ImportJobs][google.cloud.kms.v1.ImportJob] that matched the query.
+ int32 total_size = 3;
+}
+
+// Request message for [KeyManagementService.GetKeyRing][google.cloud.kms.v1.KeyManagementService.GetKeyRing].
+message GetKeyRingRequest {
+ // The [name][google.cloud.kms.v1.KeyRing.name] of the [KeyRing][google.cloud.kms.v1.KeyRing] to get.
+ string name = 1;
+}
+
+// Request message for [KeyManagementService.GetCryptoKey][google.cloud.kms.v1.KeyManagementService.GetCryptoKey].
+message GetCryptoKeyRequest {
+ // The [name][google.cloud.kms.v1.CryptoKey.name] of the [CryptoKey][google.cloud.kms.v1.CryptoKey] to get.
+ string name = 1;
+}
+
+// Request message for [KeyManagementService.GetCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.GetCryptoKeyVersion].
+message GetCryptoKeyVersionRequest {
+ // The [name][google.cloud.kms.v1.CryptoKeyVersion.name] of the [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] to get.
+ string name = 1;
+}
+
+// Request message for [KeyManagementService.GetPublicKey][google.cloud.kms.v1.KeyManagementService.GetPublicKey].
+message GetPublicKeyRequest {
+ // The [name][google.cloud.kms.v1.CryptoKeyVersion.name] of the [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] public key to
+ // get.
+ string name = 1;
+}
+
+// Request message for [KeyManagementService.GetImportJob][google.cloud.kms.v1.KeyManagementService.GetImportJob].
+message GetImportJobRequest {
+ // The [name][google.cloud.kms.v1.ImportJob.name] of the [ImportJob][google.cloud.kms.v1.ImportJob] to get.
+ string name = 1;
+}
+
+// Request message for [KeyManagementService.CreateKeyRing][google.cloud.kms.v1.KeyManagementService.CreateKeyRing].
+message CreateKeyRingRequest {
+ // Required. The resource name of the location associated with the
+ // [KeyRings][google.cloud.kms.v1.KeyRing], in the format `projects/*/locations/*`.
+ string parent = 1;
+
+ // Required. It must be unique within a location and match the regular
+ // expression `[a-zA-Z0-9_-]{1,63}`
+ string key_ring_id = 2;
+
+ // A [KeyRing][google.cloud.kms.v1.KeyRing] with initial field values.
+ KeyRing key_ring = 3;
+}
+
+// Request message for [KeyManagementService.CreateCryptoKey][google.cloud.kms.v1.KeyManagementService.CreateCryptoKey].
+message CreateCryptoKeyRequest {
+ // Required. The [name][google.cloud.kms.v1.KeyRing.name] of the KeyRing associated with the
+ // [CryptoKeys][google.cloud.kms.v1.CryptoKey].
+ string parent = 1;
+
+ // Required. It must be unique within a KeyRing and match the regular
+ // expression `[a-zA-Z0-9_-]{1,63}`
+ string crypto_key_id = 2;
+
+ // A [CryptoKey][google.cloud.kms.v1.CryptoKey] with initial field values.
+ CryptoKey crypto_key = 3;
+
+ // If set to true, the request will create a [CryptoKey][google.cloud.kms.v1.CryptoKey] without any
+ // [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion]. You must manually call
+ // [CreateCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.CreateCryptoKeyVersion] or
+ // [ImportCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.ImportCryptoKeyVersion]
+ // before you can use this [CryptoKey][google.cloud.kms.v1.CryptoKey].
+ bool skip_initial_version_creation = 5;
+}
+
+// Request message for [KeyManagementService.CreateCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.CreateCryptoKeyVersion].
+message CreateCryptoKeyVersionRequest {
+ // Required. The [name][google.cloud.kms.v1.CryptoKey.name] of the [CryptoKey][google.cloud.kms.v1.CryptoKey] associated with
+ // the [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion].
+ string parent = 1;
+
+ // A [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] with initial field values.
+ CryptoKeyVersion crypto_key_version = 2;
+}
+
+// Request message for [KeyManagementService.ImportCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.ImportCryptoKeyVersion].
+message ImportCryptoKeyVersionRequest {
+ // Required. The [name][google.cloud.kms.v1.CryptoKey.name] of the [CryptoKey][google.cloud.kms.v1.CryptoKey] to
+ // be imported into.
+ string parent = 1;
+
+ // Required. The [algorithm][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionAlgorithm] of
+ // the key being imported. This does not need to match the
+ // [version_template][google.cloud.kms.v1.CryptoKey.version_template] of the [CryptoKey][google.cloud.kms.v1.CryptoKey] this
+ // version imports into.
+ CryptoKeyVersion.CryptoKeyVersionAlgorithm algorithm = 2;
+
+ // Required. The [name][google.cloud.kms.v1.ImportJob.name] of the [ImportJob][google.cloud.kms.v1.ImportJob] that was used to
+ // wrap this key material.
+ string import_job = 4;
+
+ // Required. The incoming wrapped key material that is to be imported.
+ oneof wrapped_key_material {
+ // Wrapped key material produced with
+ // [RSA_OAEP_3072_SHA1_AES_256][google.cloud.kms.v1.ImportJob.ImportMethod.RSA_OAEP_3072_SHA1_AES_256]
+ // or
+ // [RSA_OAEP_4096_SHA1_AES_256][google.cloud.kms.v1.ImportJob.ImportMethod.RSA_OAEP_4096_SHA1_AES_256].
+ //
+ // This field contains the concatenation of two wrapped keys:
+ //
+ // - An ephemeral AES-256 wrapping key wrapped with the
+ // [public_key][google.cloud.kms.v1.ImportJob.public_key] using RSAES-OAEP with SHA-1,
+ // MGF1 with SHA-1, and an empty label.
+ //
+ // - The key to be imported, wrapped with the ephemeral AES-256 key
+ // using AES-KWP (RFC 5649).
+ //
+ //
+ //
+ // This format is the same as the format produced by PKCS#11 mechanism
+ // CKM_RSA_AES_KEY_WRAP.
+ bytes rsa_aes_wrapped_key = 5;
+ }
+}
+
+// Request message for [KeyManagementService.CreateImportJob][google.cloud.kms.v1.KeyManagementService.CreateImportJob].
+message CreateImportJobRequest {
+ // Required. The [name][google.cloud.kms.v1.KeyRing.name] of the [KeyRing][google.cloud.kms.v1.KeyRing] associated with the
+ // [ImportJobs][google.cloud.kms.v1.ImportJob].
+ string parent = 1;
+
+ // Required. It must be unique within a KeyRing and match the regular
+ // expression `[a-zA-Z0-9_-]{1,63}`
+ string import_job_id = 2;
+
+ // Required. An [ImportJob][google.cloud.kms.v1.ImportJob] with initial field values.
+ ImportJob import_job = 3;
+}
+
+// Request message for [KeyManagementService.UpdateCryptoKey][google.cloud.kms.v1.KeyManagementService.UpdateCryptoKey].
+message UpdateCryptoKeyRequest {
+ // [CryptoKey][google.cloud.kms.v1.CryptoKey] with updated values.
+ CryptoKey crypto_key = 1;
+
+ // Required list of fields to be updated in this request.
+ google.protobuf.FieldMask update_mask = 2;
+}
+
+// Request message for [KeyManagementService.UpdateCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.UpdateCryptoKeyVersion].
+message UpdateCryptoKeyVersionRequest {
+ // [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] with updated values.
+ CryptoKeyVersion crypto_key_version = 1;
+
+ // Required list of fields to be updated in this request.
+ google.protobuf.FieldMask update_mask = 2;
+}
+
+// Request message for [KeyManagementService.Encrypt][google.cloud.kms.v1.KeyManagementService.Encrypt].
+message EncryptRequest {
+ // Required. The resource name of the [CryptoKey][google.cloud.kms.v1.CryptoKey] or [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]
+ // to use for encryption.
+ //
+ // If a [CryptoKey][google.cloud.kms.v1.CryptoKey] is specified, the server will use its
+ // [primary version][google.cloud.kms.v1.CryptoKey.primary].
+ string name = 1;
+
+ // Required. The data to encrypt. Must be no larger than 64KiB.
+ //
+ // The maximum size depends on the key version's
+ // [protection_level][google.cloud.kms.v1.CryptoKeyVersionTemplate.protection_level]. For
+ // [SOFTWARE][google.cloud.kms.v1.ProtectionLevel.SOFTWARE] keys, the plaintext must be no larger
+ // than 64KiB. For [HSM][google.cloud.kms.v1.ProtectionLevel.HSM] keys, the combined length of the
+ // plaintext and additional_authenticated_data fields must be no larger than
+ // 8KiB.
+ bytes plaintext = 2;
+
+ // Optional data that, if specified, must also be provided during decryption
+ // through [DecryptRequest.additional_authenticated_data][google.cloud.kms.v1.DecryptRequest.additional_authenticated_data].
+ //
+ // The maximum size depends on the key version's
+ // [protection_level][google.cloud.kms.v1.CryptoKeyVersionTemplate.protection_level]. For
+ // [SOFTWARE][google.cloud.kms.v1.ProtectionLevel.SOFTWARE] keys, the AAD must be no larger than
+ // 64KiB. For [HSM][google.cloud.kms.v1.ProtectionLevel.HSM] keys, the combined length of the
+ // plaintext and additional_authenticated_data fields must be no larger than
+ // 8KiB.
+ bytes additional_authenticated_data = 3;
+}
+
+// Request message for [KeyManagementService.Decrypt][google.cloud.kms.v1.KeyManagementService.Decrypt].
+message DecryptRequest {
+ // Required. The resource name of the [CryptoKey][google.cloud.kms.v1.CryptoKey] to use for decryption.
+ // The server will choose the appropriate version.
+ string name = 1;
+
+ // Required. The encrypted data originally returned in
+ // [EncryptResponse.ciphertext][google.cloud.kms.v1.EncryptResponse.ciphertext].
+ bytes ciphertext = 2;
+
+ // Optional data that must match the data originally supplied in
+ // [EncryptRequest.additional_authenticated_data][google.cloud.kms.v1.EncryptRequest.additional_authenticated_data].
+ bytes additional_authenticated_data = 3;
+}
+
+// Request message for [KeyManagementService.AsymmetricSign][google.cloud.kms.v1.KeyManagementService.AsymmetricSign].
+message AsymmetricSignRequest {
+ // Required. The resource name of the [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] to use for signing.
+ string name = 1;
+
+ // Required. The digest of the data to sign. The digest must be produced with
+ // the same digest algorithm as specified by the key version's
+ // [algorithm][google.cloud.kms.v1.CryptoKeyVersion.algorithm].
+ Digest digest = 3;
+}
+
+// Request message for [KeyManagementService.AsymmetricDecrypt][google.cloud.kms.v1.KeyManagementService.AsymmetricDecrypt].
+message AsymmetricDecryptRequest {
+ // Required. The resource name of the [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] to use for
+ // decryption.
+ string name = 1;
+
+ // Required. The data encrypted with the named [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]'s public
+ // key using OAEP.
+ bytes ciphertext = 3;
+}
+
+// Response message for [KeyManagementService.Decrypt][google.cloud.kms.v1.KeyManagementService.Decrypt].
+message DecryptResponse {
+ // The decrypted data originally supplied in [EncryptRequest.plaintext][google.cloud.kms.v1.EncryptRequest.plaintext].
+ bytes plaintext = 1;
+}
+
+// Response message for [KeyManagementService.Encrypt][google.cloud.kms.v1.KeyManagementService.Encrypt].
+message EncryptResponse {
+ // The resource name of the [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] used in encryption.
+ string name = 1;
+
+ // The encrypted data.
+ bytes ciphertext = 2;
+}
+
+// Response message for [KeyManagementService.AsymmetricSign][google.cloud.kms.v1.KeyManagementService.AsymmetricSign].
+message AsymmetricSignResponse {
+ // The created signature.
+ bytes signature = 1;
+}
+
+// Response message for [KeyManagementService.AsymmetricDecrypt][google.cloud.kms.v1.KeyManagementService.AsymmetricDecrypt].
+message AsymmetricDecryptResponse {
+ // The decrypted data originally encrypted with the matching public key.
+ bytes plaintext = 1;
+}
+
+// Request message for [KeyManagementService.UpdateCryptoKeyPrimaryVersion][google.cloud.kms.v1.KeyManagementService.UpdateCryptoKeyPrimaryVersion].
+message UpdateCryptoKeyPrimaryVersionRequest {
+ // The resource name of the [CryptoKey][google.cloud.kms.v1.CryptoKey] to update.
+ string name = 1;
+
+ // The id of the child [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] to use as primary.
+ string crypto_key_version_id = 2;
+}
+
+// Request message for [KeyManagementService.DestroyCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.DestroyCryptoKeyVersion].
+message DestroyCryptoKeyVersionRequest {
+ // The resource name of the [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] to destroy.
+ string name = 1;
+}
+
+// Request message for [KeyManagementService.RestoreCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.RestoreCryptoKeyVersion].
+message RestoreCryptoKeyVersionRequest {
+ // The resource name of the [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] to restore.
+ string name = 1;
+}
+
+// A [Digest][google.cloud.kms.v1.Digest] holds a cryptographic message digest.
+message Digest {
+ // Required. The message digest.
+ oneof digest {
+ // A message digest produced with the SHA-256 algorithm.
+ bytes sha256 = 1;
+
+ // A message digest produced with the SHA-384 algorithm.
+ bytes sha384 = 2;
+
+ // A message digest produced with the SHA-512 algorithm.
+ bytes sha512 = 3;
+ }
+}
+
+// Cloud KMS metadata for the given [google.cloud.location.Location][google.cloud.location.Location].
+message LocationMetadata {
+ // Indicates whether [CryptoKeys][google.cloud.kms.v1.CryptoKey] with
+ // [protection_level][google.cloud.kms.v1.CryptoKeyVersionTemplate.protection_level]
+ // [HSM][google.cloud.kms.v1.ProtectionLevel.HSM] can be created in this location.
+ bool hsm_available = 1;
+}
diff --git a/typescript/test/testdata/keymanager/src/index.ts.baseline b/baselines/kms/src/index.ts.baseline
similarity index 100%
rename from typescript/test/testdata/keymanager/src/index.ts.baseline
rename to baselines/kms/src/index.ts.baseline
diff --git a/typescript/test/testdata/keymanager/src/v1/index.ts.baseline b/baselines/kms/src/v1/index.ts.baseline
similarity index 100%
rename from typescript/test/testdata/keymanager/src/v1/index.ts.baseline
rename to baselines/kms/src/v1/index.ts.baseline
diff --git a/typescript/test/testdata/keymanager/src/v1/key_management_service_client.ts.baseline b/baselines/kms/src/v1/key_management_service_client.ts.baseline
similarity index 100%
rename from typescript/test/testdata/keymanager/src/v1/key_management_service_client.ts.baseline
rename to baselines/kms/src/v1/key_management_service_client.ts.baseline
diff --git a/typescript/test/testdata/keymanager/src/v1/key_management_service_client_config.json.baseline b/baselines/kms/src/v1/key_management_service_client_config.json.baseline
similarity index 100%
rename from typescript/test/testdata/keymanager/src/v1/key_management_service_client_config.json.baseline
rename to baselines/kms/src/v1/key_management_service_client_config.json.baseline
diff --git a/baselines/kms/src/v1/key_management_service_proto_list.json.baseline b/baselines/kms/src/v1/key_management_service_proto_list.json.baseline
new file mode 100644
index 0000000000..b7e20599ad
--- /dev/null
+++ b/baselines/kms/src/v1/key_management_service_proto_list.json.baseline
@@ -0,0 +1,4 @@
+[
+ "../../protos/google/cloud/kms/v1/resources.proto",
+ "../../protos/google/cloud/kms/v1/service.proto"
+]
diff --git a/typescript/test/testdata/keymanager/system-test/fixtures/sample/src/index.js.baseline b/baselines/kms/system-test/fixtures/sample/src/index.js.baseline
similarity index 100%
rename from typescript/test/testdata/keymanager/system-test/fixtures/sample/src/index.js.baseline
rename to baselines/kms/system-test/fixtures/sample/src/index.js.baseline
diff --git a/typescript/test/testdata/keymanager/system-test/fixtures/sample/src/index.ts.baseline b/baselines/kms/system-test/fixtures/sample/src/index.ts.baseline
similarity index 100%
rename from typescript/test/testdata/keymanager/system-test/fixtures/sample/src/index.ts.baseline
rename to baselines/kms/system-test/fixtures/sample/src/index.ts.baseline
diff --git a/typescript/test/testdata/keymanager/system-test/install.ts.baseline b/baselines/kms/system-test/install.ts.baseline
similarity index 100%
rename from typescript/test/testdata/keymanager/system-test/install.ts.baseline
rename to baselines/kms/system-test/install.ts.baseline
diff --git a/typescript/test/testdata/keymanager/test/gapic-key_management_service-v1.ts.baseline b/baselines/kms/test/gapic-key_management_service-v1.ts.baseline
similarity index 100%
rename from typescript/test/testdata/keymanager/test/gapic-key_management_service-v1.ts.baseline
rename to baselines/kms/test/gapic-key_management_service-v1.ts.baseline
diff --git a/typescript/test/testdata/keymanager/tsconfig.json.baseline b/baselines/kms/tsconfig.json.baseline
similarity index 100%
rename from typescript/test/testdata/keymanager/tsconfig.json.baseline
rename to baselines/kms/tsconfig.json.baseline
diff --git a/typescript/test/testdata/keymanager/tslint.json.baseline b/baselines/kms/tslint.json.baseline
similarity index 100%
rename from typescript/test/testdata/keymanager/tslint.json.baseline
rename to baselines/kms/tslint.json.baseline
diff --git a/typescript/test/testdata/keymanager/webpack.config.js.baseline b/baselines/kms/webpack.config.js.baseline
similarity index 100%
rename from typescript/test/testdata/keymanager/webpack.config.js.baseline
rename to baselines/kms/webpack.config.js.baseline
diff --git a/typescript/test/testdata/monitoring/.gitignore.baseline b/baselines/monitoring/.gitignore.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/.gitignore.baseline
rename to baselines/monitoring/.gitignore.baseline
diff --git a/typescript/test/testdata/monitoring/.jsdoc.js.baseline b/baselines/monitoring/.jsdoc.js.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/.jsdoc.js.baseline
rename to baselines/monitoring/.jsdoc.js.baseline
diff --git a/typescript/test/testdata/monitoring/.mocharc.json.baseline b/baselines/monitoring/.mocharc.json.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/.mocharc.json.baseline
rename to baselines/monitoring/.mocharc.json.baseline
diff --git a/typescript/test/testdata/monitoring/linkinator.config.json.baseline b/baselines/monitoring/linkinator.config.json.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/linkinator.config.json.baseline
rename to baselines/monitoring/linkinator.config.json.baseline
diff --git a/typescript/test/testdata/monitoring/package.json.baseline b/baselines/monitoring/package.json.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/package.json.baseline
rename to baselines/monitoring/package.json.baseline
diff --git a/typescript/test/testdata/monitoring/proto.list.baseline b/baselines/monitoring/proto.list.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/proto.list.baseline
rename to baselines/monitoring/proto.list.baseline
diff --git a/typescript/test/testdata/monitoring/protos/google/monitoring/v3/alert.proto.baseline b/baselines/monitoring/protos/google/monitoring/v3/alert.proto.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/protos/google/monitoring/v3/alert.proto.baseline
rename to baselines/monitoring/protos/google/monitoring/v3/alert.proto.baseline
diff --git a/typescript/test/testdata/monitoring/protos/google/monitoring/v3/alert_service.proto.baseline b/baselines/monitoring/protos/google/monitoring/v3/alert_service.proto.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/protos/google/monitoring/v3/alert_service.proto.baseline
rename to baselines/monitoring/protos/google/monitoring/v3/alert_service.proto.baseline
diff --git a/typescript/test/testdata/monitoring/protos/google/monitoring/v3/common.proto.baseline b/baselines/monitoring/protos/google/monitoring/v3/common.proto.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/protos/google/monitoring/v3/common.proto.baseline
rename to baselines/monitoring/protos/google/monitoring/v3/common.proto.baseline
diff --git a/typescript/test/testdata/monitoring/protos/google/monitoring/v3/dropped_labels.proto.baseline b/baselines/monitoring/protos/google/monitoring/v3/dropped_labels.proto.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/protos/google/monitoring/v3/dropped_labels.proto.baseline
rename to baselines/monitoring/protos/google/monitoring/v3/dropped_labels.proto.baseline
diff --git a/typescript/test/testdata/monitoring/protos/google/monitoring/v3/group.proto.baseline b/baselines/monitoring/protos/google/monitoring/v3/group.proto.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/protos/google/monitoring/v3/group.proto.baseline
rename to baselines/monitoring/protos/google/monitoring/v3/group.proto.baseline
diff --git a/typescript/test/testdata/monitoring/protos/google/monitoring/v3/group_service.proto.baseline b/baselines/monitoring/protos/google/monitoring/v3/group_service.proto.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/protos/google/monitoring/v3/group_service.proto.baseline
rename to baselines/monitoring/protos/google/monitoring/v3/group_service.proto.baseline
diff --git a/typescript/test/testdata/monitoring/protos/google/monitoring/v3/metric.proto.baseline b/baselines/monitoring/protos/google/monitoring/v3/metric.proto.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/protos/google/monitoring/v3/metric.proto.baseline
rename to baselines/monitoring/protos/google/monitoring/v3/metric.proto.baseline
diff --git a/typescript/test/testdata/monitoring/protos/google/monitoring/v3/metric_service.proto.baseline b/baselines/monitoring/protos/google/monitoring/v3/metric_service.proto.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/protos/google/monitoring/v3/metric_service.proto.baseline
rename to baselines/monitoring/protos/google/monitoring/v3/metric_service.proto.baseline
diff --git a/typescript/test/testdata/monitoring/protos/google/monitoring/v3/mutation_record.proto.baseline b/baselines/monitoring/protos/google/monitoring/v3/mutation_record.proto.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/protos/google/monitoring/v3/mutation_record.proto.baseline
rename to baselines/monitoring/protos/google/monitoring/v3/mutation_record.proto.baseline
diff --git a/typescript/test/testdata/monitoring/protos/google/monitoring/v3/notification.proto.baseline b/baselines/monitoring/protos/google/monitoring/v3/notification.proto.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/protos/google/monitoring/v3/notification.proto.baseline
rename to baselines/monitoring/protos/google/monitoring/v3/notification.proto.baseline
diff --git a/typescript/test/testdata/monitoring/protos/google/monitoring/v3/notification_service.proto.baseline b/baselines/monitoring/protos/google/monitoring/v3/notification_service.proto.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/protos/google/monitoring/v3/notification_service.proto.baseline
rename to baselines/monitoring/protos/google/monitoring/v3/notification_service.proto.baseline
diff --git a/typescript/test/testdata/monitoring/protos/google/monitoring/v3/service.proto.baseline b/baselines/monitoring/protos/google/monitoring/v3/service.proto.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/protos/google/monitoring/v3/service.proto.baseline
rename to baselines/monitoring/protos/google/monitoring/v3/service.proto.baseline
diff --git a/typescript/test/testdata/monitoring/protos/google/monitoring/v3/service_service.proto.baseline b/baselines/monitoring/protos/google/monitoring/v3/service_service.proto.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/protos/google/monitoring/v3/service_service.proto.baseline
rename to baselines/monitoring/protos/google/monitoring/v3/service_service.proto.baseline
diff --git a/typescript/test/testdata/monitoring/protos/google/monitoring/v3/span_context.proto.baseline b/baselines/monitoring/protos/google/monitoring/v3/span_context.proto.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/protos/google/monitoring/v3/span_context.proto.baseline
rename to baselines/monitoring/protos/google/monitoring/v3/span_context.proto.baseline
diff --git a/typescript/test/testdata/monitoring/protos/google/monitoring/v3/uptime.proto.baseline b/baselines/monitoring/protos/google/monitoring/v3/uptime.proto.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/protos/google/monitoring/v3/uptime.proto.baseline
rename to baselines/monitoring/protos/google/monitoring/v3/uptime.proto.baseline
diff --git a/typescript/test/testdata/monitoring/protos/google/monitoring/v3/uptime_service.proto.baseline b/baselines/monitoring/protos/google/monitoring/v3/uptime_service.proto.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/protos/google/monitoring/v3/uptime_service.proto.baseline
rename to baselines/monitoring/protos/google/monitoring/v3/uptime_service.proto.baseline
diff --git a/typescript/test/testdata/monitoring/src/index.ts.baseline b/baselines/monitoring/src/index.ts.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/src/index.ts.baseline
rename to baselines/monitoring/src/index.ts.baseline
diff --git a/typescript/test/testdata/monitoring/src/v3/alert_policy_service_client.ts.baseline b/baselines/monitoring/src/v3/alert_policy_service_client.ts.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/src/v3/alert_policy_service_client.ts.baseline
rename to baselines/monitoring/src/v3/alert_policy_service_client.ts.baseline
diff --git a/typescript/test/testdata/monitoring/src/v3/alert_policy_service_client_config.json.baseline b/baselines/monitoring/src/v3/alert_policy_service_client_config.json.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/src/v3/alert_policy_service_client_config.json.baseline
rename to baselines/monitoring/src/v3/alert_policy_service_client_config.json.baseline
diff --git a/typescript/test/testdata/monitoring/src/v3/alert_policy_service_proto_list.json.baseline b/baselines/monitoring/src/v3/alert_policy_service_proto_list.json.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/src/v3/alert_policy_service_proto_list.json.baseline
rename to baselines/monitoring/src/v3/alert_policy_service_proto_list.json.baseline
diff --git a/typescript/test/testdata/monitoring/src/v3/group_service_client.ts.baseline b/baselines/monitoring/src/v3/group_service_client.ts.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/src/v3/group_service_client.ts.baseline
rename to baselines/monitoring/src/v3/group_service_client.ts.baseline
diff --git a/typescript/test/testdata/monitoring/src/v3/group_service_client_config.json.baseline b/baselines/monitoring/src/v3/group_service_client_config.json.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/src/v3/group_service_client_config.json.baseline
rename to baselines/monitoring/src/v3/group_service_client_config.json.baseline
diff --git a/typescript/test/testdata/monitoring/src/v3/group_service_proto_list.json.baseline b/baselines/monitoring/src/v3/group_service_proto_list.json.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/src/v3/group_service_proto_list.json.baseline
rename to baselines/monitoring/src/v3/group_service_proto_list.json.baseline
diff --git a/typescript/test/testdata/monitoring/src/v3/index.ts.baseline b/baselines/monitoring/src/v3/index.ts.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/src/v3/index.ts.baseline
rename to baselines/monitoring/src/v3/index.ts.baseline
diff --git a/typescript/test/testdata/monitoring/src/v3/metric_service_client.ts.baseline b/baselines/monitoring/src/v3/metric_service_client.ts.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/src/v3/metric_service_client.ts.baseline
rename to baselines/monitoring/src/v3/metric_service_client.ts.baseline
diff --git a/typescript/test/testdata/monitoring/src/v3/metric_service_client_config.json.baseline b/baselines/monitoring/src/v3/metric_service_client_config.json.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/src/v3/metric_service_client_config.json.baseline
rename to baselines/monitoring/src/v3/metric_service_client_config.json.baseline
diff --git a/typescript/test/testdata/monitoring/src/v3/metric_service_proto_list.json.baseline b/baselines/monitoring/src/v3/metric_service_proto_list.json.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/src/v3/metric_service_proto_list.json.baseline
rename to baselines/monitoring/src/v3/metric_service_proto_list.json.baseline
diff --git a/typescript/test/testdata/monitoring/src/v3/notification_channel_service_client.ts.baseline b/baselines/monitoring/src/v3/notification_channel_service_client.ts.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/src/v3/notification_channel_service_client.ts.baseline
rename to baselines/monitoring/src/v3/notification_channel_service_client.ts.baseline
diff --git a/typescript/test/testdata/monitoring/src/v3/notification_channel_service_client_config.json.baseline b/baselines/monitoring/src/v3/notification_channel_service_client_config.json.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/src/v3/notification_channel_service_client_config.json.baseline
rename to baselines/monitoring/src/v3/notification_channel_service_client_config.json.baseline
diff --git a/typescript/test/testdata/monitoring/src/v3/notification_channel_service_proto_list.json.baseline b/baselines/monitoring/src/v3/notification_channel_service_proto_list.json.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/src/v3/notification_channel_service_proto_list.json.baseline
rename to baselines/monitoring/src/v3/notification_channel_service_proto_list.json.baseline
diff --git a/typescript/test/testdata/monitoring/src/v3/service_monitoring_service_client.ts.baseline b/baselines/monitoring/src/v3/service_monitoring_service_client.ts.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/src/v3/service_monitoring_service_client.ts.baseline
rename to baselines/monitoring/src/v3/service_monitoring_service_client.ts.baseline
diff --git a/typescript/test/testdata/monitoring/src/v3/service_monitoring_service_client_config.json.baseline b/baselines/monitoring/src/v3/service_monitoring_service_client_config.json.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/src/v3/service_monitoring_service_client_config.json.baseline
rename to baselines/monitoring/src/v3/service_monitoring_service_client_config.json.baseline
diff --git a/typescript/test/testdata/monitoring/src/v3/service_monitoring_service_proto_list.json.baseline b/baselines/monitoring/src/v3/service_monitoring_service_proto_list.json.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/src/v3/service_monitoring_service_proto_list.json.baseline
rename to baselines/monitoring/src/v3/service_monitoring_service_proto_list.json.baseline
diff --git a/typescript/test/testdata/monitoring/src/v3/uptime_check_service_client.ts.baseline b/baselines/monitoring/src/v3/uptime_check_service_client.ts.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/src/v3/uptime_check_service_client.ts.baseline
rename to baselines/monitoring/src/v3/uptime_check_service_client.ts.baseline
diff --git a/typescript/test/testdata/monitoring/src/v3/uptime_check_service_client_config.json.baseline b/baselines/monitoring/src/v3/uptime_check_service_client_config.json.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/src/v3/uptime_check_service_client_config.json.baseline
rename to baselines/monitoring/src/v3/uptime_check_service_client_config.json.baseline
diff --git a/typescript/test/testdata/monitoring/src/v3/uptime_check_service_proto_list.json.baseline b/baselines/monitoring/src/v3/uptime_check_service_proto_list.json.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/src/v3/uptime_check_service_proto_list.json.baseline
rename to baselines/monitoring/src/v3/uptime_check_service_proto_list.json.baseline
diff --git a/typescript/test/testdata/monitoring/system-test/fixtures/sample/src/index.js.baseline b/baselines/monitoring/system-test/fixtures/sample/src/index.js.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/system-test/fixtures/sample/src/index.js.baseline
rename to baselines/monitoring/system-test/fixtures/sample/src/index.js.baseline
diff --git a/typescript/test/testdata/monitoring/system-test/fixtures/sample/src/index.ts.baseline b/baselines/monitoring/system-test/fixtures/sample/src/index.ts.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/system-test/fixtures/sample/src/index.ts.baseline
rename to baselines/monitoring/system-test/fixtures/sample/src/index.ts.baseline
diff --git a/typescript/test/testdata/monitoring/system-test/install.ts.baseline b/baselines/monitoring/system-test/install.ts.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/system-test/install.ts.baseline
rename to baselines/monitoring/system-test/install.ts.baseline
diff --git a/typescript/test/testdata/monitoring/test/gapic-alert_policy_service-v3.ts.baseline b/baselines/monitoring/test/gapic-alert_policy_service-v3.ts.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/test/gapic-alert_policy_service-v3.ts.baseline
rename to baselines/monitoring/test/gapic-alert_policy_service-v3.ts.baseline
diff --git a/typescript/test/testdata/monitoring/test/gapic-group_service-v3.ts.baseline b/baselines/monitoring/test/gapic-group_service-v3.ts.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/test/gapic-group_service-v3.ts.baseline
rename to baselines/monitoring/test/gapic-group_service-v3.ts.baseline
diff --git a/typescript/test/testdata/monitoring/test/gapic-metric_service-v3.ts.baseline b/baselines/monitoring/test/gapic-metric_service-v3.ts.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/test/gapic-metric_service-v3.ts.baseline
rename to baselines/monitoring/test/gapic-metric_service-v3.ts.baseline
diff --git a/typescript/test/testdata/monitoring/test/gapic-notification_channel_service-v3.ts.baseline b/baselines/monitoring/test/gapic-notification_channel_service-v3.ts.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/test/gapic-notification_channel_service-v3.ts.baseline
rename to baselines/monitoring/test/gapic-notification_channel_service-v3.ts.baseline
diff --git a/typescript/test/testdata/monitoring/test/gapic-service_monitoring_service-v3.ts.baseline b/baselines/monitoring/test/gapic-service_monitoring_service-v3.ts.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/test/gapic-service_monitoring_service-v3.ts.baseline
rename to baselines/monitoring/test/gapic-service_monitoring_service-v3.ts.baseline
diff --git a/typescript/test/testdata/monitoring/test/gapic-uptime_check_service-v3.ts.baseline b/baselines/monitoring/test/gapic-uptime_check_service-v3.ts.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/test/gapic-uptime_check_service-v3.ts.baseline
rename to baselines/monitoring/test/gapic-uptime_check_service-v3.ts.baseline
diff --git a/typescript/test/testdata/monitoring/tsconfig.json.baseline b/baselines/monitoring/tsconfig.json.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/tsconfig.json.baseline
rename to baselines/monitoring/tsconfig.json.baseline
diff --git a/typescript/test/testdata/monitoring/tslint.json.baseline b/baselines/monitoring/tslint.json.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/tslint.json.baseline
rename to baselines/monitoring/tslint.json.baseline
diff --git a/typescript/test/testdata/monitoring/webpack.config.js.baseline b/baselines/monitoring/webpack.config.js.baseline
similarity index 100%
rename from typescript/test/testdata/monitoring/webpack.config.js.baseline
rename to baselines/monitoring/webpack.config.js.baseline
diff --git a/typescript/test/testdata/redis/.gitignore.baseline b/baselines/redis/.gitignore.baseline
similarity index 100%
rename from typescript/test/testdata/redis/.gitignore.baseline
rename to baselines/redis/.gitignore.baseline
diff --git a/typescript/test/testdata/redis/.jsdoc.js.baseline b/baselines/redis/.jsdoc.js.baseline
similarity index 100%
rename from typescript/test/testdata/redis/.jsdoc.js.baseline
rename to baselines/redis/.jsdoc.js.baseline
diff --git a/typescript/test/testdata/redis/.mocharc.json.baseline b/baselines/redis/.mocharc.json.baseline
similarity index 100%
rename from typescript/test/testdata/redis/.mocharc.json.baseline
rename to baselines/redis/.mocharc.json.baseline
diff --git a/typescript/test/testdata/redis/linkinator.config.json.baseline b/baselines/redis/linkinator.config.json.baseline
similarity index 100%
rename from typescript/test/testdata/redis/linkinator.config.json.baseline
rename to baselines/redis/linkinator.config.json.baseline
diff --git a/typescript/test/testdata/redis/package.json.baseline b/baselines/redis/package.json.baseline
similarity index 100%
rename from typescript/test/testdata/redis/package.json.baseline
rename to baselines/redis/package.json.baseline
diff --git a/typescript/test/testdata/redis/proto.list.baseline b/baselines/redis/proto.list.baseline
similarity index 100%
rename from typescript/test/testdata/redis/proto.list.baseline
rename to baselines/redis/proto.list.baseline
diff --git a/baselines/redis/protos/google/cloud/common_resources.proto.baseline b/baselines/redis/protos/google/cloud/common_resources.proto.baseline
new file mode 100644
index 0000000000..56c9f800d5
--- /dev/null
+++ b/baselines/redis/protos/google/cloud/common_resources.proto.baseline
@@ -0,0 +1,52 @@
+// Copyright 2019 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file contains stub messages for common resources in GCP.
+// It is not intended to be directly generated, and is instead used by
+// other tooling to be able to match common resource patterns.
+syntax = "proto3";
+
+package google.cloud;
+
+import "google/api/resource.proto";
+
+
+option (google.api.resource_definition) = {
+ type: "cloudresourcemanager.googleapis.com/Project"
+ pattern: "projects/{project}"
+};
+
+
+option (google.api.resource_definition) = {
+ type: "cloudresourcemanager.googleapis.com/Organization"
+ pattern: "organizations/{organization}"
+};
+
+
+option (google.api.resource_definition) = {
+ type: "cloudresourcemanager.googleapis.com/Folder"
+ pattern: "folders/{folder}"
+};
+
+
+option (google.api.resource_definition) = {
+ type: "cloudbilling.googleapis.com/BillingAccount"
+ pattern: "billingAccounts/{billing_account}"
+};
+
+option (google.api.resource_definition) = {
+ type: "locations.googleapis.com/Location"
+ pattern: "projects/{project}/locations/{location}"
+};
+
diff --git a/typescript/test/protos/google/cloud/redis/v1beta1/cloud_redis.proto b/baselines/redis/protos/google/cloud/redis/v1beta1/cloud_redis.proto.baseline
similarity index 100%
rename from typescript/test/protos/google/cloud/redis/v1beta1/cloud_redis.proto
rename to baselines/redis/protos/google/cloud/redis/v1beta1/cloud_redis.proto.baseline
diff --git a/typescript/test/testdata/redis/src/index.ts.baseline b/baselines/redis/src/index.ts.baseline
similarity index 100%
rename from typescript/test/testdata/redis/src/index.ts.baseline
rename to baselines/redis/src/index.ts.baseline
diff --git a/typescript/test/testdata/redis/src/v1beta1/cloud_redis_client.ts.baseline b/baselines/redis/src/v1beta1/cloud_redis_client.ts.baseline
similarity index 100%
rename from typescript/test/testdata/redis/src/v1beta1/cloud_redis_client.ts.baseline
rename to baselines/redis/src/v1beta1/cloud_redis_client.ts.baseline
diff --git a/typescript/test/testdata/redis/src/v1beta1/cloud_redis_client_config.json.baseline b/baselines/redis/src/v1beta1/cloud_redis_client_config.json.baseline
similarity index 100%
rename from typescript/test/testdata/redis/src/v1beta1/cloud_redis_client_config.json.baseline
rename to baselines/redis/src/v1beta1/cloud_redis_client_config.json.baseline
diff --git a/typescript/test/testdata/redis/src/v1beta1/cloud_redis_proto_list.json.baseline b/baselines/redis/src/v1beta1/cloud_redis_proto_list.json.baseline
similarity index 100%
rename from typescript/test/testdata/redis/src/v1beta1/cloud_redis_proto_list.json.baseline
rename to baselines/redis/src/v1beta1/cloud_redis_proto_list.json.baseline
diff --git a/typescript/test/testdata/redis/src/v1beta1/index.ts.baseline b/baselines/redis/src/v1beta1/index.ts.baseline
similarity index 100%
rename from typescript/test/testdata/redis/src/v1beta1/index.ts.baseline
rename to baselines/redis/src/v1beta1/index.ts.baseline
diff --git a/typescript/test/testdata/redis/system-test/fixtures/sample/src/index.js.baseline b/baselines/redis/system-test/fixtures/sample/src/index.js.baseline
similarity index 100%
rename from typescript/test/testdata/redis/system-test/fixtures/sample/src/index.js.baseline
rename to baselines/redis/system-test/fixtures/sample/src/index.js.baseline
diff --git a/typescript/test/testdata/redis/system-test/fixtures/sample/src/index.ts.baseline b/baselines/redis/system-test/fixtures/sample/src/index.ts.baseline
similarity index 100%
rename from typescript/test/testdata/redis/system-test/fixtures/sample/src/index.ts.baseline
rename to baselines/redis/system-test/fixtures/sample/src/index.ts.baseline
diff --git a/typescript/test/testdata/redis/system-test/install.ts.baseline b/baselines/redis/system-test/install.ts.baseline
similarity index 100%
rename from typescript/test/testdata/redis/system-test/install.ts.baseline
rename to baselines/redis/system-test/install.ts.baseline
diff --git a/typescript/test/testdata/redis/test/gapic-cloud_redis-v1beta1.ts.baseline b/baselines/redis/test/gapic-cloud_redis-v1beta1.ts.baseline
similarity index 100%
rename from typescript/test/testdata/redis/test/gapic-cloud_redis-v1beta1.ts.baseline
rename to baselines/redis/test/gapic-cloud_redis-v1beta1.ts.baseline
diff --git a/typescript/test/testdata/redis/tsconfig.json.baseline b/baselines/redis/tsconfig.json.baseline
similarity index 100%
rename from typescript/test/testdata/redis/tsconfig.json.baseline
rename to baselines/redis/tsconfig.json.baseline
diff --git a/typescript/test/testdata/redis/tslint.json.baseline b/baselines/redis/tslint.json.baseline
similarity index 100%
rename from typescript/test/testdata/redis/tslint.json.baseline
rename to baselines/redis/tslint.json.baseline
diff --git a/typescript/test/testdata/redis/webpack.config.js.baseline b/baselines/redis/webpack.config.js.baseline
similarity index 100%
rename from typescript/test/testdata/redis/webpack.config.js.baseline
rename to baselines/redis/webpack.config.js.baseline
diff --git a/typescript/test/testdata/showcase/.gitignore.baseline b/baselines/showcase/.gitignore.baseline
similarity index 100%
rename from typescript/test/testdata/showcase/.gitignore.baseline
rename to baselines/showcase/.gitignore.baseline
diff --git a/typescript/test/testdata/showcase/.jsdoc.js.baseline b/baselines/showcase/.jsdoc.js.baseline
similarity index 100%
rename from typescript/test/testdata/showcase/.jsdoc.js.baseline
rename to baselines/showcase/.jsdoc.js.baseline
diff --git a/typescript/test/testdata/showcase/.mocharc.json.baseline b/baselines/showcase/.mocharc.json.baseline
similarity index 100%
rename from typescript/test/testdata/showcase/.mocharc.json.baseline
rename to baselines/showcase/.mocharc.json.baseline
diff --git a/typescript/test/testdata/showcase/linkinator.config.json.baseline b/baselines/showcase/linkinator.config.json.baseline
similarity index 100%
rename from typescript/test/testdata/showcase/linkinator.config.json.baseline
rename to baselines/showcase/linkinator.config.json.baseline
diff --git a/typescript/test/testdata/showcase/package.json.baseline b/baselines/showcase/package.json.baseline
similarity index 100%
rename from typescript/test/testdata/showcase/package.json.baseline
rename to baselines/showcase/package.json.baseline
diff --git a/typescript/test/testdata/showcase/proto.list.baseline b/baselines/showcase/proto.list.baseline
similarity index 100%
rename from typescript/test/testdata/showcase/proto.list.baseline
rename to baselines/showcase/proto.list.baseline
diff --git a/typescript/test/testdata/showcase/protos/google/showcase/v1beta1/echo.proto.baseline b/baselines/showcase/protos/google/showcase/v1beta1/echo.proto.baseline
similarity index 100%
rename from typescript/test/testdata/showcase/protos/google/showcase/v1beta1/echo.proto.baseline
rename to baselines/showcase/protos/google/showcase/v1beta1/echo.proto.baseline
diff --git a/typescript/test/testdata/showcase/src/index.ts.baseline b/baselines/showcase/src/index.ts.baseline
similarity index 100%
rename from typescript/test/testdata/showcase/src/index.ts.baseline
rename to baselines/showcase/src/index.ts.baseline
diff --git a/typescript/test/testdata/showcase/src/v1beta1/echo_client.ts.baseline b/baselines/showcase/src/v1beta1/echo_client.ts.baseline
similarity index 100%
rename from typescript/test/testdata/showcase/src/v1beta1/echo_client.ts.baseline
rename to baselines/showcase/src/v1beta1/echo_client.ts.baseline
diff --git a/typescript/test/testdata/showcase/src/v1beta1/echo_client_config.json.baseline b/baselines/showcase/src/v1beta1/echo_client_config.json.baseline
similarity index 100%
rename from typescript/test/testdata/showcase/src/v1beta1/echo_client_config.json.baseline
rename to baselines/showcase/src/v1beta1/echo_client_config.json.baseline
diff --git a/typescript/test/testdata/showcase/src/v1beta1/echo_proto_list.json.baseline b/baselines/showcase/src/v1beta1/echo_proto_list.json.baseline
similarity index 100%
rename from typescript/test/testdata/showcase/src/v1beta1/echo_proto_list.json.baseline
rename to baselines/showcase/src/v1beta1/echo_proto_list.json.baseline
diff --git a/typescript/test/testdata/showcase/src/v1beta1/index.ts.baseline b/baselines/showcase/src/v1beta1/index.ts.baseline
similarity index 100%
rename from typescript/test/testdata/showcase/src/v1beta1/index.ts.baseline
rename to baselines/showcase/src/v1beta1/index.ts.baseline
diff --git a/typescript/test/testdata/showcase/system-test/fixtures/sample/src/index.js.baseline b/baselines/showcase/system-test/fixtures/sample/src/index.js.baseline
similarity index 100%
rename from typescript/test/testdata/showcase/system-test/fixtures/sample/src/index.js.baseline
rename to baselines/showcase/system-test/fixtures/sample/src/index.js.baseline
diff --git a/typescript/test/testdata/showcase/system-test/fixtures/sample/src/index.ts.baseline b/baselines/showcase/system-test/fixtures/sample/src/index.ts.baseline
similarity index 100%
rename from typescript/test/testdata/showcase/system-test/fixtures/sample/src/index.ts.baseline
rename to baselines/showcase/system-test/fixtures/sample/src/index.ts.baseline
diff --git a/typescript/test/testdata/showcase/system-test/install.ts.baseline b/baselines/showcase/system-test/install.ts.baseline
similarity index 100%
rename from typescript/test/testdata/showcase/system-test/install.ts.baseline
rename to baselines/showcase/system-test/install.ts.baseline
diff --git a/typescript/test/testdata/showcase/test/gapic-echo-v1beta1.ts.baseline b/baselines/showcase/test/gapic-echo-v1beta1.ts.baseline
similarity index 100%
rename from typescript/test/testdata/showcase/test/gapic-echo-v1beta1.ts.baseline
rename to baselines/showcase/test/gapic-echo-v1beta1.ts.baseline
diff --git a/typescript/test/testdata/showcase/tsconfig.json.baseline b/baselines/showcase/tsconfig.json.baseline
similarity index 100%
rename from typescript/test/testdata/showcase/tsconfig.json.baseline
rename to baselines/showcase/tsconfig.json.baseline
diff --git a/typescript/test/testdata/showcase/tslint.json.baseline b/baselines/showcase/tslint.json.baseline
similarity index 100%
rename from typescript/test/testdata/showcase/tslint.json.baseline
rename to baselines/showcase/tslint.json.baseline
diff --git a/typescript/test/testdata/showcase/webpack.config.js.baseline b/baselines/showcase/webpack.config.js.baseline
similarity index 100%
rename from typescript/test/testdata/showcase/webpack.config.js.baseline
rename to baselines/showcase/webpack.config.js.baseline
diff --git a/typescript/test/testdata/texttospeech/.gitignore.baseline b/baselines/texttospeech/.gitignore.baseline
similarity index 100%
rename from typescript/test/testdata/texttospeech/.gitignore.baseline
rename to baselines/texttospeech/.gitignore.baseline
diff --git a/typescript/test/testdata/texttospeech/.jsdoc.js.baseline b/baselines/texttospeech/.jsdoc.js.baseline
similarity index 100%
rename from typescript/test/testdata/texttospeech/.jsdoc.js.baseline
rename to baselines/texttospeech/.jsdoc.js.baseline
diff --git a/typescript/test/testdata/texttospeech/.mocharc.json.baseline b/baselines/texttospeech/.mocharc.json.baseline
similarity index 100%
rename from typescript/test/testdata/texttospeech/.mocharc.json.baseline
rename to baselines/texttospeech/.mocharc.json.baseline
diff --git a/typescript/test/testdata/texttospeech/linkinator.config.json.baseline b/baselines/texttospeech/linkinator.config.json.baseline
similarity index 100%
rename from typescript/test/testdata/texttospeech/linkinator.config.json.baseline
rename to baselines/texttospeech/linkinator.config.json.baseline
diff --git a/typescript/test/testdata/texttospeech/package.json.baseline b/baselines/texttospeech/package.json.baseline
similarity index 100%
rename from typescript/test/testdata/texttospeech/package.json.baseline
rename to baselines/texttospeech/package.json.baseline
diff --git a/typescript/test/testdata/texttospeech/proto.list.baseline b/baselines/texttospeech/proto.list.baseline
similarity index 100%
rename from typescript/test/testdata/texttospeech/proto.list.baseline
rename to baselines/texttospeech/proto.list.baseline
diff --git a/typescript/test/testdata/texttospeech/protos/google/cloud/texttospeech/v1/cloud_tts.proto.baseline b/baselines/texttospeech/protos/google/cloud/texttospeech/v1/cloud_tts.proto.baseline
similarity index 100%
rename from typescript/test/testdata/texttospeech/protos/google/cloud/texttospeech/v1/cloud_tts.proto.baseline
rename to baselines/texttospeech/protos/google/cloud/texttospeech/v1/cloud_tts.proto.baseline
diff --git a/typescript/test/testdata/texttospeech/src/index.ts.baseline b/baselines/texttospeech/src/index.ts.baseline
similarity index 100%
rename from typescript/test/testdata/texttospeech/src/index.ts.baseline
rename to baselines/texttospeech/src/index.ts.baseline
diff --git a/typescript/test/testdata/texttospeech/src/v1/index.ts.baseline b/baselines/texttospeech/src/v1/index.ts.baseline
similarity index 100%
rename from typescript/test/testdata/texttospeech/src/v1/index.ts.baseline
rename to baselines/texttospeech/src/v1/index.ts.baseline
diff --git a/typescript/test/testdata/texttospeech/src/v1/text_to_speech_client.ts.baseline b/baselines/texttospeech/src/v1/text_to_speech_client.ts.baseline
similarity index 100%
rename from typescript/test/testdata/texttospeech/src/v1/text_to_speech_client.ts.baseline
rename to baselines/texttospeech/src/v1/text_to_speech_client.ts.baseline
diff --git a/typescript/test/testdata/texttospeech/src/v1/text_to_speech_client_config.json.baseline b/baselines/texttospeech/src/v1/text_to_speech_client_config.json.baseline
similarity index 100%
rename from typescript/test/testdata/texttospeech/src/v1/text_to_speech_client_config.json.baseline
rename to baselines/texttospeech/src/v1/text_to_speech_client_config.json.baseline
diff --git a/typescript/test/testdata/texttospeech/src/v1/text_to_speech_proto_list.json.baseline b/baselines/texttospeech/src/v1/text_to_speech_proto_list.json.baseline
similarity index 100%
rename from typescript/test/testdata/texttospeech/src/v1/text_to_speech_proto_list.json.baseline
rename to baselines/texttospeech/src/v1/text_to_speech_proto_list.json.baseline
diff --git a/typescript/test/testdata/texttospeech/system-test/fixtures/sample/src/index.js.baseline b/baselines/texttospeech/system-test/fixtures/sample/src/index.js.baseline
similarity index 100%
rename from typescript/test/testdata/texttospeech/system-test/fixtures/sample/src/index.js.baseline
rename to baselines/texttospeech/system-test/fixtures/sample/src/index.js.baseline
diff --git a/typescript/test/testdata/texttospeech/system-test/fixtures/sample/src/index.ts.baseline b/baselines/texttospeech/system-test/fixtures/sample/src/index.ts.baseline
similarity index 100%
rename from typescript/test/testdata/texttospeech/system-test/fixtures/sample/src/index.ts.baseline
rename to baselines/texttospeech/system-test/fixtures/sample/src/index.ts.baseline
diff --git a/typescript/test/testdata/texttospeech/system-test/install.ts.baseline b/baselines/texttospeech/system-test/install.ts.baseline
similarity index 100%
rename from typescript/test/testdata/texttospeech/system-test/install.ts.baseline
rename to baselines/texttospeech/system-test/install.ts.baseline
diff --git a/typescript/test/testdata/texttospeech/test/gapic-text_to_speech-v1.ts.baseline b/baselines/texttospeech/test/gapic-text_to_speech-v1.ts.baseline
similarity index 100%
rename from typescript/test/testdata/texttospeech/test/gapic-text_to_speech-v1.ts.baseline
rename to baselines/texttospeech/test/gapic-text_to_speech-v1.ts.baseline
diff --git a/typescript/test/testdata/texttospeech/tsconfig.json.baseline b/baselines/texttospeech/tsconfig.json.baseline
similarity index 100%
rename from typescript/test/testdata/texttospeech/tsconfig.json.baseline
rename to baselines/texttospeech/tsconfig.json.baseline
diff --git a/typescript/test/testdata/texttospeech/tslint.json.baseline b/baselines/texttospeech/tslint.json.baseline
similarity index 100%
rename from typescript/test/testdata/texttospeech/tslint.json.baseline
rename to baselines/texttospeech/tslint.json.baseline
diff --git a/typescript/test/testdata/texttospeech/webpack.config.js.baseline b/baselines/texttospeech/webpack.config.js.baseline
similarity index 100%
rename from typescript/test/testdata/texttospeech/webpack.config.js.baseline
rename to baselines/texttospeech/webpack.config.js.baseline
diff --git a/typescript/test/testdata/translate/.gitignore.baseline b/baselines/translate/.gitignore.baseline
similarity index 100%
rename from typescript/test/testdata/translate/.gitignore.baseline
rename to baselines/translate/.gitignore.baseline
diff --git a/typescript/test/testdata/translate/.jsdoc.js.baseline b/baselines/translate/.jsdoc.js.baseline
similarity index 100%
rename from typescript/test/testdata/translate/.jsdoc.js.baseline
rename to baselines/translate/.jsdoc.js.baseline
diff --git a/typescript/test/testdata/translate/.mocharc.json.baseline b/baselines/translate/.mocharc.json.baseline
similarity index 100%
rename from typescript/test/testdata/translate/.mocharc.json.baseline
rename to baselines/translate/.mocharc.json.baseline
diff --git a/typescript/test/testdata/translate/linkinator.config.json.baseline b/baselines/translate/linkinator.config.json.baseline
similarity index 100%
rename from typescript/test/testdata/translate/linkinator.config.json.baseline
rename to baselines/translate/linkinator.config.json.baseline
diff --git a/typescript/test/testdata/translate/package.json.baseline b/baselines/translate/package.json.baseline
similarity index 100%
rename from typescript/test/testdata/translate/package.json.baseline
rename to baselines/translate/package.json.baseline
diff --git a/typescript/test/testdata/translate/proto.list.baseline b/baselines/translate/proto.list.baseline
similarity index 100%
rename from typescript/test/testdata/translate/proto.list.baseline
rename to baselines/translate/proto.list.baseline
diff --git a/baselines/translate/protos/google/cloud/common_resources.proto.baseline b/baselines/translate/protos/google/cloud/common_resources.proto.baseline
new file mode 100644
index 0000000000..56c9f800d5
--- /dev/null
+++ b/baselines/translate/protos/google/cloud/common_resources.proto.baseline
@@ -0,0 +1,52 @@
+// Copyright 2019 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file contains stub messages for common resources in GCP.
+// It is not intended to be directly generated, and is instead used by
+// other tooling to be able to match common resource patterns.
+syntax = "proto3";
+
+package google.cloud;
+
+import "google/api/resource.proto";
+
+
+option (google.api.resource_definition) = {
+ type: "cloudresourcemanager.googleapis.com/Project"
+ pattern: "projects/{project}"
+};
+
+
+option (google.api.resource_definition) = {
+ type: "cloudresourcemanager.googleapis.com/Organization"
+ pattern: "organizations/{organization}"
+};
+
+
+option (google.api.resource_definition) = {
+ type: "cloudresourcemanager.googleapis.com/Folder"
+ pattern: "folders/{folder}"
+};
+
+
+option (google.api.resource_definition) = {
+ type: "cloudbilling.googleapis.com/BillingAccount"
+ pattern: "billingAccounts/{billing_account}"
+};
+
+option (google.api.resource_definition) = {
+ type: "locations.googleapis.com/Location"
+ pattern: "projects/{project}/locations/{location}"
+};
+
diff --git a/typescript/test/protos/google/cloud/translate/v3beta1/translation_service.proto b/baselines/translate/protos/google/cloud/translate/v3beta1/translation_service.proto.baseline
similarity index 100%
rename from typescript/test/protos/google/cloud/translate/v3beta1/translation_service.proto
rename to baselines/translate/protos/google/cloud/translate/v3beta1/translation_service.proto.baseline
diff --git a/typescript/test/testdata/translate/src/index.ts.baseline b/baselines/translate/src/index.ts.baseline
similarity index 100%
rename from typescript/test/testdata/translate/src/index.ts.baseline
rename to baselines/translate/src/index.ts.baseline
diff --git a/typescript/test/testdata/translate/src/v3beta1/index.ts.baseline b/baselines/translate/src/v3beta1/index.ts.baseline
similarity index 100%
rename from typescript/test/testdata/translate/src/v3beta1/index.ts.baseline
rename to baselines/translate/src/v3beta1/index.ts.baseline
diff --git a/typescript/test/testdata/translate/src/v3beta1/translation_service_client.ts.baseline b/baselines/translate/src/v3beta1/translation_service_client.ts.baseline
similarity index 100%
rename from typescript/test/testdata/translate/src/v3beta1/translation_service_client.ts.baseline
rename to baselines/translate/src/v3beta1/translation_service_client.ts.baseline
diff --git a/typescript/test/testdata/translate/src/v3beta1/translation_service_client_config.json.baseline b/baselines/translate/src/v3beta1/translation_service_client_config.json.baseline
similarity index 100%
rename from typescript/test/testdata/translate/src/v3beta1/translation_service_client_config.json.baseline
rename to baselines/translate/src/v3beta1/translation_service_client_config.json.baseline
diff --git a/typescript/test/testdata/translate/src/v3beta1/translation_service_proto_list.json.baseline b/baselines/translate/src/v3beta1/translation_service_proto_list.json.baseline
similarity index 100%
rename from typescript/test/testdata/translate/src/v3beta1/translation_service_proto_list.json.baseline
rename to baselines/translate/src/v3beta1/translation_service_proto_list.json.baseline
diff --git a/typescript/test/testdata/translate/system-test/fixtures/sample/src/index.js.baseline b/baselines/translate/system-test/fixtures/sample/src/index.js.baseline
similarity index 100%
rename from typescript/test/testdata/translate/system-test/fixtures/sample/src/index.js.baseline
rename to baselines/translate/system-test/fixtures/sample/src/index.js.baseline
diff --git a/typescript/test/testdata/translate/system-test/fixtures/sample/src/index.ts.baseline b/baselines/translate/system-test/fixtures/sample/src/index.ts.baseline
similarity index 100%
rename from typescript/test/testdata/translate/system-test/fixtures/sample/src/index.ts.baseline
rename to baselines/translate/system-test/fixtures/sample/src/index.ts.baseline
diff --git a/typescript/test/testdata/translate/system-test/install.ts.baseline b/baselines/translate/system-test/install.ts.baseline
similarity index 100%
rename from typescript/test/testdata/translate/system-test/install.ts.baseline
rename to baselines/translate/system-test/install.ts.baseline
diff --git a/typescript/test/testdata/translate/test/gapic-translation_service-v3beta1.ts.baseline b/baselines/translate/test/gapic-translation_service-v3beta1.ts.baseline
similarity index 100%
rename from typescript/test/testdata/translate/test/gapic-translation_service-v3beta1.ts.baseline
rename to baselines/translate/test/gapic-translation_service-v3beta1.ts.baseline
diff --git a/typescript/test/testdata/translate/tsconfig.json.baseline b/baselines/translate/tsconfig.json.baseline
similarity index 100%
rename from typescript/test/testdata/translate/tsconfig.json.baseline
rename to baselines/translate/tsconfig.json.baseline
diff --git a/typescript/test/testdata/translate/tslint.json.baseline b/baselines/translate/tslint.json.baseline
similarity index 100%
rename from typescript/test/testdata/translate/tslint.json.baseline
rename to baselines/translate/tslint.json.baseline
diff --git a/typescript/test/testdata/translate/webpack.config.js.baseline b/baselines/translate/webpack.config.js.baseline
similarity index 100%
rename from typescript/test/testdata/translate/webpack.config.js.baseline
rename to baselines/translate/webpack.config.js.baseline
diff --git a/package.json b/package.json
index e8959448c9..a141a2dc28 100644
--- a/package.json
+++ b/package.json
@@ -18,18 +18,18 @@
"templates"
],
"bin": {
- "gapic-generator-typescript": "build/src/start_script.js",
+ "gapic-generator-typescript": "build/src/start-script.js",
"protoc-gen-typescript_gapic": "build/src/cli.js"
},
"scripts": {
"baseline": "node build/tools/update-baselines.js",
"clean": "gts clean",
"codecov": "c8 --reporter=lcov mocha build/test/unit && c8 report",
- "compile": "tsc -p . && cp -r typescript/test/protos build/test/",
+ "compile": "tsc -p .",
"compile-protos": "pbjs -p protos -p node_modules/google-gax/protos -t static-module -o pbjs-genfiles/plugin.js google/protobuf/compiler/plugin.proto google/api/annotations.proto google/api/field_behavior.proto google/api/resource.proto google/api/client.proto google/longrunning/operations.proto service_config.proto && pbts pbjs-genfiles/plugin.js -o pbjs-genfiles/plugin.d.ts",
"docker-test": "sh docker/test.sh",
- "ts-test-application": "mocha build/test/test_application/test_ts",
- "js-test-application": "mocha build/test/test_application/test_js",
+ "ts-test-application": "mocha build/test/test-application/test-ts",
+ "js-test-application": "mocha build/test/test-application/test-js",
"fix": "gts fix",
"lint": "gts check",
"prepare": "npm run compile-protos && npm run compile",
diff --git a/test-fixtures/protos/google/cloud/common_resources.proto b/test-fixtures/protos/google/cloud/common_resources.proto
new file mode 100644
index 0000000000..56c9f800d5
--- /dev/null
+++ b/test-fixtures/protos/google/cloud/common_resources.proto
@@ -0,0 +1,52 @@
+// Copyright 2019 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file contains stub messages for common resources in GCP.
+// It is not intended to be directly generated, and is instead used by
+// other tooling to be able to match common resource patterns.
+syntax = "proto3";
+
+package google.cloud;
+
+import "google/api/resource.proto";
+
+
+option (google.api.resource_definition) = {
+ type: "cloudresourcemanager.googleapis.com/Project"
+ pattern: "projects/{project}"
+};
+
+
+option (google.api.resource_definition) = {
+ type: "cloudresourcemanager.googleapis.com/Organization"
+ pattern: "organizations/{organization}"
+};
+
+
+option (google.api.resource_definition) = {
+ type: "cloudresourcemanager.googleapis.com/Folder"
+ pattern: "folders/{folder}"
+};
+
+
+option (google.api.resource_definition) = {
+ type: "cloudbilling.googleapis.com/BillingAccount"
+ pattern: "billingAccounts/{billing_account}"
+};
+
+option (google.api.resource_definition) = {
+ type: "locations.googleapis.com/Location"
+ pattern: "projects/{project}/locations/{location}"
+};
+
diff --git a/test-fixtures/protos/google/cloud/kms/v1/resources.proto b/test-fixtures/protos/google/cloud/kms/v1/resources.proto
new file mode 100644
index 0000000000..09baaf1bec
--- /dev/null
+++ b/test-fixtures/protos/google/cloud/kms/v1/resources.proto
@@ -0,0 +1,533 @@
+// Copyright 2019 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+syntax = "proto3";
+
+package google.cloud.kms.v1;
+
+import "google/api/annotations.proto";
+import "google/protobuf/duration.proto";
+import "google/protobuf/timestamp.proto";
+
+option cc_enable_arenas = true;
+option csharp_namespace = "Google.Cloud.Kms.V1";
+option go_package = "google.golang.org/genproto/googleapis/cloud/kms/v1;kms";
+option java_multiple_files = true;
+option java_outer_classname = "KmsResourcesProto";
+option java_package = "com.google.cloud.kms.v1";
+option php_namespace = "Google\\Cloud\\Kms\\V1";
+
+// A [KeyRing][google.cloud.kms.v1.KeyRing] is a toplevel logical grouping of [CryptoKeys][google.cloud.kms.v1.CryptoKey].
+message KeyRing {
+ // Output only. The resource name for the [KeyRing][google.cloud.kms.v1.KeyRing] in the format
+ // `projects/*/locations/*/keyRings/*`.
+ string name = 1;
+
+ // Output only. The time at which this [KeyRing][google.cloud.kms.v1.KeyRing] was created.
+ google.protobuf.Timestamp create_time = 2;
+}
+
+// A [CryptoKey][google.cloud.kms.v1.CryptoKey] represents a logical key that can be used for cryptographic
+// operations.
+//
+// A [CryptoKey][google.cloud.kms.v1.CryptoKey] is made up of one or more [versions][google.cloud.kms.v1.CryptoKeyVersion], which
+// represent the actual key material used in cryptographic operations.
+message CryptoKey {
+ // [CryptoKeyPurpose][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose] describes the cryptographic capabilities of a
+ // [CryptoKey][google.cloud.kms.v1.CryptoKey]. A given key can only be used for the operations allowed by
+ // its purpose. For more information, see
+ // [Key purposes](https://cloud.google.com/kms/docs/algorithms#key_purposes).
+ enum CryptoKeyPurpose {
+ // Not specified.
+ CRYPTO_KEY_PURPOSE_UNSPECIFIED = 0;
+
+ // [CryptoKeys][google.cloud.kms.v1.CryptoKey] with this purpose may be used with
+ // [Encrypt][google.cloud.kms.v1.KeyManagementService.Encrypt] and
+ // [Decrypt][google.cloud.kms.v1.KeyManagementService.Decrypt].
+ ENCRYPT_DECRYPT = 1;
+
+ // [CryptoKeys][google.cloud.kms.v1.CryptoKey] with this purpose may be used with
+ // [AsymmetricSign][google.cloud.kms.v1.KeyManagementService.AsymmetricSign] and
+ // [GetPublicKey][google.cloud.kms.v1.KeyManagementService.GetPublicKey].
+ ASYMMETRIC_SIGN = 5;
+
+ // [CryptoKeys][google.cloud.kms.v1.CryptoKey] with this purpose may be used with
+ // [AsymmetricDecrypt][google.cloud.kms.v1.KeyManagementService.AsymmetricDecrypt] and
+ // [GetPublicKey][google.cloud.kms.v1.KeyManagementService.GetPublicKey].
+ ASYMMETRIC_DECRYPT = 6;
+ }
+
+ // Output only. The resource name for this [CryptoKey][google.cloud.kms.v1.CryptoKey] in the format
+ // `projects/*/locations/*/keyRings/*/cryptoKeys/*`.
+ string name = 1;
+
+ // Output only. A copy of the "primary" [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] that will be used
+ // by [Encrypt][google.cloud.kms.v1.KeyManagementService.Encrypt] when this [CryptoKey][google.cloud.kms.v1.CryptoKey] is given
+ // in [EncryptRequest.name][google.cloud.kms.v1.EncryptRequest.name].
+ //
+ // The [CryptoKey][google.cloud.kms.v1.CryptoKey]'s primary version can be updated via
+ // [UpdateCryptoKeyPrimaryVersion][google.cloud.kms.v1.KeyManagementService.UpdateCryptoKeyPrimaryVersion].
+ //
+ // All keys with [purpose][google.cloud.kms.v1.CryptoKey.purpose]
+ // [ENCRYPT_DECRYPT][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT] have a
+ // primary. For other keys, this field will be omitted.
+ CryptoKeyVersion primary = 2;
+
+ // The immutable purpose of this [CryptoKey][google.cloud.kms.v1.CryptoKey].
+ CryptoKeyPurpose purpose = 3;
+
+ // Output only. The time at which this [CryptoKey][google.cloud.kms.v1.CryptoKey] was created.
+ google.protobuf.Timestamp create_time = 5;
+
+ // At [next_rotation_time][google.cloud.kms.v1.CryptoKey.next_rotation_time], the Key Management Service will automatically:
+ //
+ // 1. Create a new version of this [CryptoKey][google.cloud.kms.v1.CryptoKey].
+ // 2. Mark the new version as primary.
+ //
+ // Key rotations performed manually via
+ // [CreateCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.CreateCryptoKeyVersion] and
+ // [UpdateCryptoKeyPrimaryVersion][google.cloud.kms.v1.KeyManagementService.UpdateCryptoKeyPrimaryVersion]
+ // do not affect [next_rotation_time][google.cloud.kms.v1.CryptoKey.next_rotation_time].
+ //
+ // Keys with [purpose][google.cloud.kms.v1.CryptoKey.purpose]
+ // [ENCRYPT_DECRYPT][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT] support
+ // automatic rotation. For other keys, this field must be omitted.
+ google.protobuf.Timestamp next_rotation_time = 7;
+
+ // Controls the rate of automatic rotation.
+ oneof rotation_schedule {
+ // [next_rotation_time][google.cloud.kms.v1.CryptoKey.next_rotation_time] will be advanced by this period when the service
+ // automatically rotates a key. Must be at least one day.
+ //
+ // If [rotation_period][google.cloud.kms.v1.CryptoKey.rotation_period] is set, [next_rotation_time][google.cloud.kms.v1.CryptoKey.next_rotation_time] must also be set.
+ //
+ // Keys with [purpose][google.cloud.kms.v1.CryptoKey.purpose]
+ // [ENCRYPT_DECRYPT][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT] support
+ // automatic rotation. For other keys, this field must be omitted.
+ google.protobuf.Duration rotation_period = 8;
+ }
+
+ // A template describing settings for new [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] instances.
+ // The properties of new [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] instances created by either
+ // [CreateCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.CreateCryptoKeyVersion] or
+ // auto-rotation are controlled by this template.
+ CryptoKeyVersionTemplate version_template = 11;
+
+ // Labels with user-defined metadata. For more information, see
+ // [Labeling Keys](/kms/docs/labeling-keys).
+ map labels = 10;
+}
+
+// A [CryptoKeyVersionTemplate][google.cloud.kms.v1.CryptoKeyVersionTemplate] specifies the properties to use when creating
+// a new [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion], either manually with
+// [CreateCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.CreateCryptoKeyVersion] or
+// automatically as a result of auto-rotation.
+message CryptoKeyVersionTemplate {
+ // [ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] to use when creating a [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] based on
+ // this template. Immutable. Defaults to [SOFTWARE][google.cloud.kms.v1.ProtectionLevel.SOFTWARE].
+ ProtectionLevel protection_level = 1;
+
+ // Required. [Algorithm][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionAlgorithm] to use
+ // when creating a [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] based on this template.
+ //
+ // For backwards compatibility, GOOGLE_SYMMETRIC_ENCRYPTION is implied if both
+ // this field is omitted and [CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose] is
+ // [ENCRYPT_DECRYPT][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT].
+ CryptoKeyVersion.CryptoKeyVersionAlgorithm algorithm = 3;
+}
+
+// Contains an HSM-generated attestation about a key operation. For more
+// information, see [Verifying attestations]
+// (https://cloud.google.com/kms/docs/attest-key).
+message KeyOperationAttestation {
+ // Attestation formats provided by the HSM.
+ enum AttestationFormat {
+ // Not specified.
+ ATTESTATION_FORMAT_UNSPECIFIED = 0;
+
+ // Cavium HSM attestation compressed with gzip. Note that this format is
+ // defined by Cavium and subject to change at any time.
+ CAVIUM_V1_COMPRESSED = 3;
+
+ // Cavium HSM attestation V2 compressed with gzip. This is a new format
+ // introduced in Cavium's version 3.2-08.
+ CAVIUM_V2_COMPRESSED = 4;
+ }
+
+ // Output only. The format of the attestation data.
+ AttestationFormat format = 4;
+
+ // Output only. The attestation data provided by the HSM when the key
+ // operation was performed.
+ bytes content = 5;
+}
+
+// A [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] represents an individual cryptographic key, and the
+// associated key material.
+//
+// An [ENABLED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.ENABLED] version can be
+// used for cryptographic operations.
+//
+// For security reasons, the raw cryptographic key material represented by a
+// [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] can never be viewed or exported. It can only be used to
+// encrypt, decrypt, or sign data when an authorized user or application invokes
+// Cloud KMS.
+message CryptoKeyVersion {
+ // The algorithm of the [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion], indicating what
+ // parameters must be used for each cryptographic operation.
+ //
+ // The
+ // [GOOGLE_SYMMETRIC_ENCRYPTION][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION]
+ // algorithm is usable with [CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose]
+ // [ENCRYPT_DECRYPT][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT].
+ //
+ // Algorithms beginning with "RSA_SIGN_" are usable with [CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose]
+ // [ASYMMETRIC_SIGN][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose.ASYMMETRIC_SIGN].
+ //
+ // The fields in the name after "RSA_SIGN_" correspond to the following
+ // parameters: padding algorithm, modulus bit length, and digest algorithm.
+ //
+ // For PSS, the salt length used is equal to the length of digest
+ // algorithm. For example,
+ // [RSA_SIGN_PSS_2048_SHA256][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionAlgorithm.RSA_SIGN_PSS_2048_SHA256]
+ // will use PSS with a salt length of 256 bits or 32 bytes.
+ //
+ // Algorithms beginning with "RSA_DECRYPT_" are usable with
+ // [CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose]
+ // [ASYMMETRIC_DECRYPT][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose.ASYMMETRIC_DECRYPT].
+ //
+ // The fields in the name after "RSA_DECRYPT_" correspond to the following
+ // parameters: padding algorithm, modulus bit length, and digest algorithm.
+ //
+ // Algorithms beginning with "EC_SIGN_" are usable with [CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose]
+ // [ASYMMETRIC_SIGN][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose.ASYMMETRIC_SIGN].
+ //
+ // The fields in the name after "EC_SIGN_" correspond to the following
+ // parameters: elliptic curve, digest algorithm.
+ //
+ // For more information, see [Key purposes and algorithms]
+ // (https://cloud.google.com/kms/docs/algorithms).
+ enum CryptoKeyVersionAlgorithm {
+ // Not specified.
+ CRYPTO_KEY_VERSION_ALGORITHM_UNSPECIFIED = 0;
+
+ // Creates symmetric encryption keys.
+ GOOGLE_SYMMETRIC_ENCRYPTION = 1;
+
+ // RSASSA-PSS 2048 bit key with a SHA256 digest.
+ RSA_SIGN_PSS_2048_SHA256 = 2;
+
+ // RSASSA-PSS 3072 bit key with a SHA256 digest.
+ RSA_SIGN_PSS_3072_SHA256 = 3;
+
+ // RSASSA-PSS 4096 bit key with a SHA256 digest.
+ RSA_SIGN_PSS_4096_SHA256 = 4;
+
+ // RSASSA-PSS 4096 bit key with a SHA512 digest.
+ RSA_SIGN_PSS_4096_SHA512 = 15;
+
+ // RSASSA-PKCS1-v1_5 with a 2048 bit key and a SHA256 digest.
+ RSA_SIGN_PKCS1_2048_SHA256 = 5;
+
+ // RSASSA-PKCS1-v1_5 with a 3072 bit key and a SHA256 digest.
+ RSA_SIGN_PKCS1_3072_SHA256 = 6;
+
+ // RSASSA-PKCS1-v1_5 with a 4096 bit key and a SHA256 digest.
+ RSA_SIGN_PKCS1_4096_SHA256 = 7;
+
+ // RSASSA-PKCS1-v1_5 with a 4096 bit key and a SHA512 digest.
+ RSA_SIGN_PKCS1_4096_SHA512 = 16;
+
+ // RSAES-OAEP 2048 bit key with a SHA256 digest.
+ RSA_DECRYPT_OAEP_2048_SHA256 = 8;
+
+ // RSAES-OAEP 3072 bit key with a SHA256 digest.
+ RSA_DECRYPT_OAEP_3072_SHA256 = 9;
+
+ // RSAES-OAEP 4096 bit key with a SHA256 digest.
+ RSA_DECRYPT_OAEP_4096_SHA256 = 10;
+
+ // RSAES-OAEP 4096 bit key with a SHA512 digest.
+ RSA_DECRYPT_OAEP_4096_SHA512 = 17;
+
+ // ECDSA on the NIST P-256 curve with a SHA256 digest.
+ EC_SIGN_P256_SHA256 = 12;
+
+ // ECDSA on the NIST P-384 curve with a SHA384 digest.
+ EC_SIGN_P384_SHA384 = 13;
+ }
+
+ // The state of a [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion], indicating if it can be used.
+ enum CryptoKeyVersionState {
+ // Not specified.
+ CRYPTO_KEY_VERSION_STATE_UNSPECIFIED = 0;
+
+ // This version is still being generated. It may not be used, enabled,
+ // disabled, or destroyed yet. Cloud KMS will automatically mark this
+ // version [ENABLED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.ENABLED] as soon as the version is ready.
+ PENDING_GENERATION = 5;
+
+ // This version may be used for cryptographic operations.
+ ENABLED = 1;
+
+ // This version may not be used, but the key material is still available,
+ // and the version can be placed back into the [ENABLED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.ENABLED] state.
+ DISABLED = 2;
+
+ // This version is destroyed, and the key material is no longer stored.
+ // A version may not leave this state once entered.
+ DESTROYED = 3;
+
+ // This version is scheduled for destruction, and will be destroyed soon.
+ // Call
+ // [RestoreCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.RestoreCryptoKeyVersion]
+ // to put it back into the [DISABLED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.DISABLED] state.
+ DESTROY_SCHEDULED = 4;
+
+ // This version is still being imported. It may not be used, enabled,
+ // disabled, or destroyed yet. Cloud KMS will automatically mark this
+ // version [ENABLED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.ENABLED] as soon as the version is ready.
+ PENDING_IMPORT = 6;
+
+ // This version was not imported successfully. It may not be used, enabled,
+ // disabled, or destroyed. The submitted key material has been discarded.
+ // Additional details can be found in
+ // [CryptoKeyVersion.import_failure_reason][google.cloud.kms.v1.CryptoKeyVersion.import_failure_reason].
+ IMPORT_FAILED = 7;
+ }
+
+ // A view for [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]s. Controls the level of detail returned
+ // for [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion] in
+ // [KeyManagementService.ListCryptoKeyVersions][google.cloud.kms.v1.KeyManagementService.ListCryptoKeyVersions] and
+ // [KeyManagementService.ListCryptoKeys][google.cloud.kms.v1.KeyManagementService.ListCryptoKeys].
+ enum CryptoKeyVersionView {
+ // Default view for each [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]. Does not include
+ // the [attestation][google.cloud.kms.v1.CryptoKeyVersion.attestation] field.
+ CRYPTO_KEY_VERSION_VIEW_UNSPECIFIED = 0;
+
+ // Provides all fields in each [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion], including the
+ // [attestation][google.cloud.kms.v1.CryptoKeyVersion.attestation].
+ FULL = 1;
+ }
+
+ // Output only. The resource name for this [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] in the format
+ // `projects/*/locations/*/keyRings/*/cryptoKeys/*/cryptoKeyVersions/*`.
+ string name = 1;
+
+ // The current state of the [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion].
+ CryptoKeyVersionState state = 3;
+
+ // Output only. The [ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] describing how crypto operations are
+ // performed with this [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion].
+ ProtectionLevel protection_level = 7;
+
+ // Output only. The [CryptoKeyVersionAlgorithm][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionAlgorithm] that this
+ // [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] supports.
+ CryptoKeyVersionAlgorithm algorithm = 10;
+
+ // Output only. Statement that was generated and signed by the HSM at key
+ // creation time. Use this statement to verify attributes of the key as stored
+ // on the HSM, independently of Google. Only provided for key versions with
+ // [protection_level][google.cloud.kms.v1.CryptoKeyVersion.protection_level] [HSM][google.cloud.kms.v1.ProtectionLevel.HSM].
+ KeyOperationAttestation attestation = 8;
+
+ // Output only. The time at which this [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] was created.
+ google.protobuf.Timestamp create_time = 4;
+
+ // Output only. The time this [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]'s key material was
+ // generated.
+ google.protobuf.Timestamp generate_time = 11;
+
+ // Output only. The time this [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]'s key material is scheduled
+ // for destruction. Only present if [state][google.cloud.kms.v1.CryptoKeyVersion.state] is
+ // [DESTROY_SCHEDULED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.DESTROY_SCHEDULED].
+ google.protobuf.Timestamp destroy_time = 5;
+
+ // Output only. The time this CryptoKeyVersion's key material was
+ // destroyed. Only present if [state][google.cloud.kms.v1.CryptoKeyVersion.state] is
+ // [DESTROYED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.DESTROYED].
+ google.protobuf.Timestamp destroy_event_time = 6;
+
+ // Output only. The name of the [ImportJob][google.cloud.kms.v1.ImportJob] used to import this
+ // [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]. Only present if the underlying key material was
+ // imported.
+ string import_job = 14;
+
+ // Output only. The time at which this [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]'s key material
+ // was imported.
+ google.protobuf.Timestamp import_time = 15;
+
+ // Output only. The root cause of an import failure. Only present if
+ // [state][google.cloud.kms.v1.CryptoKeyVersion.state] is
+ // [IMPORT_FAILED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.IMPORT_FAILED].
+ string import_failure_reason = 16;
+}
+
+// The public key for a given [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]. Obtained via
+// [GetPublicKey][google.cloud.kms.v1.KeyManagementService.GetPublicKey].
+message PublicKey {
+ // The public key, encoded in PEM format. For more information, see the
+ // [RFC 7468](https://tools.ietf.org/html/rfc7468) sections for
+ // [General Considerations](https://tools.ietf.org/html/rfc7468#section-2) and
+ // [Textual Encoding of Subject Public Key Info]
+ // (https://tools.ietf.org/html/rfc7468#section-13).
+ string pem = 1;
+
+ // The [Algorithm][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionAlgorithm] associated
+ // with this key.
+ CryptoKeyVersion.CryptoKeyVersionAlgorithm algorithm = 2;
+}
+
+// [ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] specifies how cryptographic operations are performed.
+// For more information, see [Protection levels]
+// (https://cloud.google.com/kms/docs/algorithms#protection_levels).
+enum ProtectionLevel {
+ // Not specified.
+ PROTECTION_LEVEL_UNSPECIFIED = 0;
+
+ // Crypto operations are performed in software.
+ SOFTWARE = 1;
+
+ // Crypto operations are performed in a Hardware Security Module.
+ HSM = 2;
+}
+
+// An [ImportJob][google.cloud.kms.v1.ImportJob] can be used to create [CryptoKeys][google.cloud.kms.v1.CryptoKey] and
+// [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion] using pre-existing key material,
+// generated outside of Cloud KMS.
+//
+// When an [ImportJob][google.cloud.kms.v1.ImportJob] is created, Cloud KMS will generate a "wrapping key",
+// which is a public/private key pair. You use the wrapping key to encrypt (also
+// known as wrap) the pre-existing key material to protect it during the import
+// process. The nature of the wrapping key depends on the choice of
+// [import_method][google.cloud.kms.v1.ImportJob.import_method]. When the wrapping key generation
+// is complete, the [state][google.cloud.kms.v1.ImportJob.state] will be set to
+// [ACTIVE][google.cloud.kms.v1.ImportJob.ImportJobState.ACTIVE] and the [public_key][google.cloud.kms.v1.ImportJob.public_key]
+// can be fetched. The fetched public key can then be used to wrap your
+// pre-existing key material.
+//
+// Once the key material is wrapped, it can be imported into a new
+// [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] in an existing [CryptoKey][google.cloud.kms.v1.CryptoKey] by calling
+// [ImportCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.ImportCryptoKeyVersion].
+// Multiple [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion] can be imported with a single
+// [ImportJob][google.cloud.kms.v1.ImportJob]. Cloud KMS uses the private key portion of the wrapping key to
+// unwrap the key material. Only Cloud KMS has access to the private key.
+//
+// An [ImportJob][google.cloud.kms.v1.ImportJob] expires 3 days after it is created. Once expired, Cloud KMS
+// will no longer be able to import or unwrap any key material that was wrapped
+// with the [ImportJob][google.cloud.kms.v1.ImportJob]'s public key.
+//
+// For more information, see
+// [Importing a key](https://cloud.google.com/kms/docs/importing-a-key).
+message ImportJob {
+ // The public key component of the wrapping key. For details of the type of
+ // key this public key corresponds to, see the [ImportMethod][google.cloud.kms.v1.ImportJob.ImportMethod].
+ message WrappingPublicKey {
+ // The public key, encoded in PEM format. For more information, see the [RFC
+ // 7468](https://tools.ietf.org/html/rfc7468) sections for [General
+ // Considerations](https://tools.ietf.org/html/rfc7468#section-2) and
+ // [Textual Encoding of Subject Public Key Info]
+ // (https://tools.ietf.org/html/rfc7468#section-13).
+ string pem = 1;
+ }
+
+ // [ImportMethod][google.cloud.kms.v1.ImportJob.ImportMethod] describes the key wrapping method chosen for this
+ // [ImportJob][google.cloud.kms.v1.ImportJob].
+ enum ImportMethod {
+ // Not specified.
+ IMPORT_METHOD_UNSPECIFIED = 0;
+
+ // This ImportMethod represents the CKM_RSA_AES_KEY_WRAP key wrapping
+ // scheme defined in the PKCS #11 standard. In summary, this involves
+ // wrapping the raw key with an ephemeral AES key, and wrapping the
+ // ephemeral AES key with a 3072 bit RSA key. For more details, see
+ // [RSA AES key wrap
+ // mechanism](http://docs.oasis-open.org/pkcs11/pkcs11-curr/v2.40/cos01/pkcs11-curr-v2.40-cos01.html#_Toc408226908).
+ RSA_OAEP_3072_SHA1_AES_256 = 1;
+
+ // This ImportMethod represents the CKM_RSA_AES_KEY_WRAP key wrapping
+ // scheme defined in the PKCS #11 standard. In summary, this involves
+ // wrapping the raw key with an ephemeral AES key, and wrapping the
+ // ephemeral AES key with a 4096 bit RSA key. For more details, see
+ // [RSA AES key wrap
+ // mechanism](http://docs.oasis-open.org/pkcs11/pkcs11-curr/v2.40/cos01/pkcs11-curr-v2.40-cos01.html#_Toc408226908).
+ RSA_OAEP_4096_SHA1_AES_256 = 2;
+ }
+
+ // The state of the [ImportJob][google.cloud.kms.v1.ImportJob], indicating if it can be used.
+ enum ImportJobState {
+ // Not specified.
+ IMPORT_JOB_STATE_UNSPECIFIED = 0;
+
+ // The wrapping key for this job is still being generated. It may not be
+ // used. Cloud KMS will automatically mark this job as
+ // [ACTIVE][google.cloud.kms.v1.ImportJob.ImportJobState.ACTIVE] as soon as the wrapping key is generated.
+ PENDING_GENERATION = 1;
+
+ // This job may be used in
+ // [CreateCryptoKey][google.cloud.kms.v1.KeyManagementService.CreateCryptoKey] and
+ // [CreateCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.CreateCryptoKeyVersion]
+ // requests.
+ ACTIVE = 2;
+
+ // This job can no longer be used and may not leave this state once entered.
+ EXPIRED = 3;
+ }
+
+ // Output only. The resource name for this [ImportJob][google.cloud.kms.v1.ImportJob] in the format
+ // `projects/*/locations/*/keyRings/*/importJobs/*`.
+ string name = 1;
+
+ // Required and immutable. The wrapping method to be used for incoming
+ // key material.
+ ImportMethod import_method = 2;
+
+ // Required and immutable. The protection level of the [ImportJob][google.cloud.kms.v1.ImportJob]. This
+ // must match the
+ // [protection_level][google.cloud.kms.v1.CryptoKeyVersionTemplate.protection_level] of the
+ // [version_template][google.cloud.kms.v1.CryptoKey.version_template] on the [CryptoKey][google.cloud.kms.v1.CryptoKey] you
+ // attempt to import into.
+ ProtectionLevel protection_level = 9;
+
+ // Output only. The time at which this [ImportJob][google.cloud.kms.v1.ImportJob] was created.
+ google.protobuf.Timestamp create_time = 3;
+
+ // Output only. The time this [ImportJob][google.cloud.kms.v1.ImportJob]'s key material was generated.
+ google.protobuf.Timestamp generate_time = 4;
+
+ // Output only. The time at which this [ImportJob][google.cloud.kms.v1.ImportJob] is scheduled for
+ // expiration and can no longer be used to import key material.
+ google.protobuf.Timestamp expire_time = 5;
+
+ // Output only. The time this [ImportJob][google.cloud.kms.v1.ImportJob] expired. Only present if
+ // [state][google.cloud.kms.v1.ImportJob.state] is [EXPIRED][google.cloud.kms.v1.ImportJob.ImportJobState.EXPIRED].
+ google.protobuf.Timestamp expire_event_time = 10;
+
+ // Output only. The current state of the [ImportJob][google.cloud.kms.v1.ImportJob], indicating if it can
+ // be used.
+ ImportJobState state = 6;
+
+ // Output only. The public key with which to wrap key material prior to
+ // import. Only returned if [state][google.cloud.kms.v1.ImportJob.state] is
+ // [ACTIVE][google.cloud.kms.v1.ImportJob.ImportJobState.ACTIVE].
+ WrappingPublicKey public_key = 7;
+
+ // Output only. Statement that was generated and signed by the key creator
+ // (for example, an HSM) at key creation time. Use this statement to verify
+ // attributes of the key as stored on the HSM, independently of Google.
+ // Only present if the chosen [ImportMethod][google.cloud.kms.v1.ImportJob.ImportMethod] is one with a protection
+ // level of [HSM][google.cloud.kms.v1.ProtectionLevel.HSM].
+ KeyOperationAttestation attestation = 8;
+}
diff --git a/typescript/test/protos/google/kms/v1/service.proto b/test-fixtures/protos/google/cloud/kms/v1/service.proto
similarity index 99%
rename from typescript/test/protos/google/kms/v1/service.proto
rename to test-fixtures/protos/google/cloud/kms/v1/service.proto
index 6210923ead..0f1266dfe8 100644
--- a/typescript/test/protos/google/kms/v1/service.proto
+++ b/test-fixtures/protos/google/cloud/kms/v1/service.proto
@@ -18,7 +18,7 @@ syntax = "proto3";
package google.cloud.kms.v1;
import "google/api/annotations.proto";
-import "google/kms/v1/resources.proto";
+import "google/cloud/kms/v1/resources.proto";
import "google/protobuf/field_mask.proto";
import "google/api/client.proto";
diff --git a/test-fixtures/protos/google/cloud/redis/v1beta1/cloud_redis.proto b/test-fixtures/protos/google/cloud/redis/v1beta1/cloud_redis.proto
new file mode 100644
index 0000000000..eafd8eb4ea
--- /dev/null
+++ b/test-fixtures/protos/google/cloud/redis/v1beta1/cloud_redis.proto
@@ -0,0 +1,555 @@
+// Copyright 2019 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+syntax = "proto3";
+
+package google.cloud.redis.v1beta1;
+
+import "google/api/annotations.proto";
+import "google/api/client.proto";
+import "google/api/field_behavior.proto";
+import "google/api/resource.proto";
+import "google/longrunning/operations.proto";
+import "google/protobuf/field_mask.proto";
+import "google/protobuf/timestamp.proto";
+
+option go_package = "google.golang.org/genproto/googleapis/cloud/redis/v1beta1;redis";
+option java_multiple_files = true;
+option java_outer_classname = "CloudRedisServiceBetaProto";
+option java_package = "com.google.cloud.redis.v1beta1";
+
+// Configures and manages Cloud Memorystore for Redis instances
+//
+// Google Cloud Memorystore for Redis v1beta1
+//
+// The `redis.googleapis.com` service implements the Google Cloud Memorystore
+// for Redis API and defines the following resource model for managing Redis
+// instances:
+// * The service works with a collection of cloud projects, named: `/projects/*`
+// * Each project has a collection of available locations, named: `/locations/*`
+// * Each location has a collection of Redis instances, named: `/instances/*`
+// * As such, Redis instances are resources of the form:
+// `/projects/{project_id}/locations/{location_id}/instances/{instance_id}`
+//
+// Note that location_id must be refering to a GCP `region`; for example:
+// * `projects/redpepper-1290/locations/us-central1/instances/my-redis`
+service CloudRedis {
+ option (google.api.default_host) = "redis.googleapis.com";
+ option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform";
+
+ // Lists all Redis instances owned by a project in either the specified
+ // location (region) or all locations.
+ //
+ // The location should have the following format:
+ // * `projects/{project_id}/locations/{location_id}`
+ //
+ // If `location_id` is specified as `-` (wildcard), then all regions
+ // available to the project are queried, and the results are aggregated.
+ rpc ListInstances(ListInstancesRequest) returns (ListInstancesResponse) {
+ option (google.api.http) = {
+ get: "/v1beta1/{parent=projects/*/locations/*}/instances"
+ };
+ option (google.api.method_signature) = "parent";
+ }
+
+ // Gets the details of a specific Redis instance.
+ rpc GetInstance(GetInstanceRequest) returns (Instance) {
+ option (google.api.http) = {
+ get: "/v1beta1/{name=projects/*/locations/*/instances/*}"
+ };
+ option (google.api.method_signature) = "name";
+ }
+
+ // Creates a Redis instance based on the specified tier and memory size.
+ //
+ // By default, the instance is accessible from the project's
+ // [default network](/compute/docs/networks-and-firewalls#networks).
+ //
+ // The creation is executed asynchronously and callers may check the returned
+ // operation to track its progress. Once the operation is completed the Redis
+ // instance will be fully functional. Completed longrunning.Operation will
+ // contain the new instance object in the response field.
+ //
+ // The returned operation is automatically deleted after a few hours, so there
+ // is no need to call DeleteOperation.
+ rpc CreateInstance(CreateInstanceRequest) returns (google.longrunning.Operation) {
+ option (google.api.http) = {
+ post: "/v1beta1/{parent=projects/*/locations/*}/instances"
+ body: "instance"
+ };
+ option (google.api.method_signature) = "parent,instance_id,instance";
+ option (google.longrunning.operation_info) = {
+ response_type: "google.cloud.redis.v1beta1.Instance"
+ metadata_type: "google.protobuf.Any"
+ };
+ }
+
+ // Updates the metadata and configuration of a specific Redis instance.
+ //
+ // Completed longrunning.Operation will contain the new instance object
+ // in the response field. The returned operation is automatically deleted
+ // after a few hours, so there is no need to call DeleteOperation.
+ rpc UpdateInstance(UpdateInstanceRequest) returns (google.longrunning.Operation) {
+ option (google.api.http) = {
+ patch: "/v1beta1/{instance.name=projects/*/locations/*/instances/*}"
+ body: "instance"
+ };
+ option (google.api.method_signature) = "update_mask,instance";
+ option (google.longrunning.operation_info) = {
+ response_type: "google.cloud.redis.v1beta1.Instance"
+ metadata_type: "google.protobuf.Any"
+ };
+ }
+
+ // Import a Redis RDB snapshot file from Cloud Storage into a Redis instance.
+ //
+ // Redis may stop serving during this operation. Instance state will be
+ // IMPORTING for entire operation. When complete, the instance will contain
+ // only data from the imported file.
+ //
+ // The returned operation is automatically deleted after a few hours, so
+ // there is no need to call DeleteOperation.
+ rpc ImportInstance(ImportInstanceRequest) returns (google.longrunning.Operation) {
+ option (google.api.http) = {
+ post: "/v1beta1/{name=projects/*/locations/*/instances/*}:import"
+ body: "*"
+ };
+ option (google.api.method_signature) = "name,input_config";
+ option (google.longrunning.operation_info) = {
+ response_type: "google.cloud.redis.v1beta1.Instance"
+ metadata_type: "google.protobuf.Any"
+ };
+ }
+
+ // Export Redis instance data into a Redis RDB format file in Cloud Storage.
+ //
+ // Redis will continue serving during this operation.
+ //
+ // The returned operation is automatically deleted after a few hours, so
+ // there is no need to call DeleteOperation.
+ rpc ExportInstance(ExportInstanceRequest) returns (google.longrunning.Operation) {
+ option (google.api.http) = {
+ post: "/v1beta1/{name=projects/*/locations/*/instances/*}:export"
+ body: "*"
+ };
+ option (google.api.method_signature) = "name,output_config";
+ option (google.longrunning.operation_info) = {
+ response_type: "google.cloud.redis.v1beta1.Instance"
+ metadata_type: "google.protobuf.Any"
+ };
+ }
+
+ // Initiates a failover of the master node to current replica node for a
+ // specific STANDARD tier Cloud Memorystore for Redis instance.
+ rpc FailoverInstance(FailoverInstanceRequest) returns (google.longrunning.Operation) {
+ option (google.api.http) = {
+ post: "/v1beta1/{name=projects/*/locations/*/instances/*}:failover"
+ body: "*"
+ };
+ option (google.api.method_signature) = "name,data_protection_mode";
+ option (google.longrunning.operation_info) = {
+ response_type: "google.cloud.redis.v1beta1.Instance"
+ metadata_type: "google.protobuf.Any"
+ };
+ }
+
+ // Deletes a specific Redis instance. Instance stops serving and data is
+ // deleted.
+ rpc DeleteInstance(DeleteInstanceRequest) returns (google.longrunning.Operation) {
+ option (google.api.http) = {
+ delete: "/v1beta1/{name=projects/*/locations/*/instances/*}"
+ };
+ option (google.api.method_signature) = "name";
+ option (google.longrunning.operation_info) = {
+ response_type: "google.protobuf.Empty"
+ metadata_type: "google.protobuf.Any"
+ };
+ }
+}
+
+// A Google Cloud Redis instance.
+message Instance {
+ option (google.api.resource) = {
+ type: "redis.googleapis.com/Instance"
+ pattern: "projects/{project}/locations/{location}/instances/{instance}"
+ };
+
+ // Represents the different states of a Redis instance.
+ enum State {
+ // Not set.
+ STATE_UNSPECIFIED = 0;
+
+ // Redis instance is being created.
+ CREATING = 1;
+
+ // Redis instance has been created and is fully usable.
+ READY = 2;
+
+ // Redis instance configuration is being updated. Certain kinds of updates
+ // may cause the instance to become unusable while the update is in
+ // progress.
+ UPDATING = 3;
+
+ // Redis instance is being deleted.
+ DELETING = 4;
+
+ // Redis instance is being repaired and may be unusable.
+ REPAIRING = 5;
+
+ // Maintenance is being performed on this Redis instance.
+ MAINTENANCE = 6;
+
+ // Redis instance is importing data (availability may be affected).
+ IMPORTING = 8;
+
+ // Redis instance is failing over (availability may be affected).
+ FAILING_OVER = 10;
+ }
+
+ // Available service tiers to choose from
+ enum Tier {
+ // Not set.
+ TIER_UNSPECIFIED = 0;
+
+ // BASIC tier: standalone instance
+ BASIC = 1;
+
+ // STANDARD_HA tier: highly available primary/replica instances
+ STANDARD_HA = 3;
+ }
+
+ // Required. Unique name of the resource in this scope including project and
+ // location using the form:
+ // `projects/{project_id}/locations/{location_id}/instances/{instance_id}`
+ //
+ // Note: Redis instances are managed and addressed at regional level so
+ // location_id here refers to a GCP region; however, users may choose which
+ // specific zone (or collection of zones for cross-zone instances) an instance
+ // should be provisioned in. Refer to [location_id] and
+ // [alternative_location_id] fields for more details.
+ string name = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // An arbitrary and optional user-provided name for the instance.
+ string display_name = 2;
+
+ // Resource labels to represent user provided metadata
+ map labels = 3;
+
+ // Optional. The zone where the instance will be provisioned. If not provided,
+ // the service will choose a zone for the instance. For STANDARD_HA tier,
+ // instances will be created across two zones for protection against zonal
+ // failures. If [alternative_location_id] is also provided, it must be
+ // different from [location_id].
+ string location_id = 4 [(google.api.field_behavior) = OPTIONAL];
+
+ // Optional. Only applicable to STANDARD_HA tier which protects the instance
+ // against zonal failures by provisioning it across two zones. If provided, it
+ // must be a different zone from the one provided in [location_id].
+ string alternative_location_id = 5 [(google.api.field_behavior) = OPTIONAL];
+
+ // Optional. The version of Redis software.
+ // If not provided, latest supported version will be used. Updating the
+ // version will perform an upgrade/downgrade to the new version. Currently,
+ // the supported values are:
+ //
+ // * `REDIS_4_0` for Redis 4.0 compatibility (default)
+ // * `REDIS_3_2` for Redis 3.2 compatibility
+ string redis_version = 7 [(google.api.field_behavior) = OPTIONAL];
+
+ // Optional. The CIDR range of internal addresses that are reserved for this
+ // instance. If not provided, the service will choose an unused /29 block,
+ // for example, 10.0.0.0/29 or 192.168.0.0/29. Ranges must be unique
+ // and non-overlapping with existing subnets in an authorized network.
+ string reserved_ip_range = 9 [(google.api.field_behavior) = OPTIONAL];
+
+ // Output only. Hostname or IP address of the exposed Redis endpoint used by
+ // clients to connect to the service.
+ string host = 10 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Output only. The port number of the exposed Redis endpoint.
+ int32 port = 11 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Output only. The current zone where the Redis endpoint is placed. For Basic
+ // Tier instances, this will always be the same as the [location_id]
+ // provided by the user at creation time. For Standard Tier instances,
+ // this can be either [location_id] or [alternative_location_id] and can
+ // change after a failover event.
+ string current_location_id = 12 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Output only. The time the instance was created.
+ google.protobuf.Timestamp create_time = 13 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Output only. The current state of this instance.
+ State state = 14 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Output only. Additional information about the current status of this
+ // instance, if available.
+ string status_message = 15 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Optional. Redis configuration parameters, according to
+ // http://redis.io/topics/config. Currently, the only supported parameters
+ // are:
+ //
+ // Redis 3.2 and above:
+ //
+ // * maxmemory-policy
+ // * notify-keyspace-events
+ //
+ // Redis 4.0 and above:
+ //
+ // * activedefrag
+ // * lfu-log-factor
+ // * lfu-decay-time
+ map redis_configs = 16 [(google.api.field_behavior) = OPTIONAL];
+
+ // Required. The service tier of the instance.
+ Tier tier = 17 [(google.api.field_behavior) = REQUIRED];
+
+ // Required. Redis memory size in GiB.
+ int32 memory_size_gb = 18 [(google.api.field_behavior) = REQUIRED];
+
+ // Optional. The full name of the Google Compute Engine
+ // [network](/compute/docs/networks-and-firewalls#networks) to which the
+ // instance is connected. If left unspecified, the `default` network
+ // will be used.
+ string authorized_network = 20 [(google.api.field_behavior) = OPTIONAL];
+
+ // Output only. Cloud IAM identity used by import / export operations to
+ // transfer data to/from Cloud Storage. Format is
+ // "serviceAccount:". The value may change over time
+ // for a given instance so should be checked before each import/export
+ // operation.
+ string persistence_iam_identity = 21 [(google.api.field_behavior) = OUTPUT_ONLY];
+}
+
+// Request for [ListInstances][google.cloud.redis.v1beta1.CloudRedis.ListInstances].
+message ListInstancesRequest {
+ // Required. The resource name of the instance location using the form:
+ // `projects/{project_id}/locations/{location_id}`
+ // where `location_id` refers to a GCP region.
+ string parent = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "locations.googleapis.com/Location"
+ }
+ ];
+
+ // The maximum number of items to return.
+ //
+ // If not specified, a default value of 1000 will be used by the service.
+ // Regardless of the page_size value, the response may include a partial list
+ // and a caller should only rely on response's
+ // [next_page_token][CloudRedis.ListInstancesResponse.next_page_token]
+ // to determine if there are more instances left to be queried.
+ int32 page_size = 2;
+
+ // The next_page_token value returned from a previous List request,
+ // if any.
+ string page_token = 3;
+}
+
+// Response for [ListInstances][google.cloud.redis.v1beta1.CloudRedis.ListInstances].
+message ListInstancesResponse {
+ // A list of Redis instances in the project in the specified location,
+ // or across all locations.
+ //
+ // If the `location_id` in the parent field of the request is "-", all regions
+ // available to the project are queried, and the results aggregated.
+ // If in such an aggregated query a location is unavailable, a dummy Redis
+ // entry is included in the response with the "name" field set to a value of
+ // the form projects/{project_id}/locations/{location_id}/instances/- and the
+ // "status" field set to ERROR and "status_message" field set to "location not
+ // available for ListInstances".
+ repeated Instance instances = 1;
+
+ // Token to retrieve the next page of results, or empty if there are no more
+ // results in the list.
+ string next_page_token = 2;
+
+ // Locations that could not be reached.
+ repeated string unreachable = 3;
+}
+
+// Request for [GetInstance][google.cloud.redis.v1beta1.CloudRedis.GetInstance].
+message GetInstanceRequest {
+ // Required. Redis instance resource name using the form:
+ // `projects/{project_id}/locations/{location_id}/instances/{instance_id}`
+ // where `location_id` refers to a GCP region.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "redis.googleapis.com/Instance"
+ }
+ ];
+}
+
+// Request for [CreateInstance][google.cloud.redis.v1beta1.CloudRedis.CreateInstance].
+message CreateInstanceRequest {
+ // Required. The resource name of the instance location using the form:
+ // `projects/{project_id}/locations/{location_id}`
+ // where `location_id` refers to a GCP region.
+ string parent = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "locations.googleapis.com/Location"
+ }
+ ];
+
+ // Required. The logical name of the Redis instance in the customer project
+ // with the following restrictions:
+ //
+ // * Must contain only lowercase letters, numbers, and hyphens.
+ // * Must start with a letter.
+ // * Must be between 1-40 characters.
+ // * Must end with a number or a letter.
+ // * Must be unique within the customer project / location
+ string instance_id = 2 [(google.api.field_behavior) = REQUIRED];
+
+ // Required. A Redis [Instance] resource
+ Instance instance = 3 [(google.api.field_behavior) = REQUIRED];
+}
+
+// Request for [UpdateInstance][google.cloud.redis.v1beta1.CloudRedis.UpdateInstance].
+message UpdateInstanceRequest {
+ // Required. Mask of fields to update. At least one path must be supplied in
+ // this field. The elements of the repeated paths field may only include these
+ // fields from [Instance][google.cloud.redis.v1beta1.Instance]:
+ //
+ // * `displayName`
+ // * `labels`
+ // * `memorySizeGb`
+ // * `redisConfig`
+ google.protobuf.FieldMask update_mask = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // Required. Update description.
+ // Only fields specified in update_mask are updated.
+ Instance instance = 2 [(google.api.field_behavior) = REQUIRED];
+}
+
+// Request for [DeleteInstance][google.cloud.redis.v1beta1.CloudRedis.DeleteInstance].
+message DeleteInstanceRequest {
+ // Required. Redis instance resource name using the form:
+ // `projects/{project_id}/locations/{location_id}/instances/{instance_id}`
+ // where `location_id` refers to a GCP region.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "redis.googleapis.com/Instance"
+ }
+ ];
+}
+
+// The Cloud Storage location for the input content
+message GcsSource {
+ // Required. Source data URI. (e.g. 'gs://my_bucket/my_object').
+ string uri = 1 [(google.api.field_behavior) = REQUIRED];
+}
+
+// The input content
+message InputConfig {
+ // Required. Specify source location of input data
+ oneof source {
+ // Google Cloud Storage location where input content is located.
+ GcsSource gcs_source = 1;
+ }
+}
+
+// Request for [Import][google.cloud.redis.v1beta1.CloudRedis.ImportInstance].
+message ImportInstanceRequest {
+ // Required. Redis instance resource name using the form:
+ // `projects/{project_id}/locations/{location_id}/instances/{instance_id}`
+ // where `location_id` refers to a GCP region.
+ string name = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // Required. Specify data to be imported.
+ InputConfig input_config = 3 [(google.api.field_behavior) = REQUIRED];
+}
+
+// The Cloud Storage location for the output content
+message GcsDestination {
+ // Required. Data destination URI (e.g.
+ // 'gs://my_bucket/my_object'). Existing files will be overwritten.
+ string uri = 1 [(google.api.field_behavior) = REQUIRED];
+}
+
+// The output content
+message OutputConfig {
+ // Required. Specify destination location of output data
+ oneof destination {
+ // Google Cloud Storage destination for output content.
+ GcsDestination gcs_destination = 1;
+ }
+}
+
+// Request for [Export][google.cloud.redis.v1beta1.CloudRedis.ExportInstance].
+message ExportInstanceRequest {
+ // Required. Redis instance resource name using the form:
+ // `projects/{project_id}/locations/{location_id}/instances/{instance_id}`
+ // where `location_id` refers to a GCP region.
+ string name = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // Required. Specify data to be exported.
+ OutputConfig output_config = 3 [(google.api.field_behavior) = REQUIRED];
+}
+
+// Request for [Failover][google.cloud.redis.v1beta1.CloudRedis.FailoverInstance].
+message FailoverInstanceRequest {
+ enum DataProtectionMode {
+ // Defaults to LIMITED_DATA_LOSS if a data protection mode is not
+ // specified.
+ DATA_PROTECTION_MODE_UNSPECIFIED = 0;
+
+ // Instance failover will be protected with data loss control. More
+ // specifically, the failover will only be performed if the current
+ // replication offset diff between master and replica is under a certain
+ // threshold.
+ LIMITED_DATA_LOSS = 1;
+
+ // Instance failover will be performed without data loss control.
+ FORCE_DATA_LOSS = 2;
+ }
+
+ // Required. Redis instance resource name using the form:
+ // `projects/{project_id}/locations/{location_id}/instances/{instance_id}`
+ // where `location_id` refers to a GCP region.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "redis.googleapis.com/Instance"
+ }
+ ];
+
+ // Optional. Available data protection modes that the user can choose. If it's
+ // unspecified, data protection mode will be LIMITED_DATA_LOSS by default.
+ DataProtectionMode data_protection_mode = 2 [(google.api.field_behavior) = OPTIONAL];
+}
+
+// This location metadata represents additional configuration options for a
+// given location where a Redis instance may be created. All fields are output
+// only. It is returned as content of the
+// `google.cloud.location.Location.metadata` field.
+message LocationMetadata {
+ // Output only. The set of available zones in the location. The map is keyed
+ // by the lowercase ID of each zone, as defined by GCE. These keys can be
+ // specified in `location_id` or `alternative_location_id` fields when
+ // creating a Redis instance.
+ map available_zones = 1 [(google.api.field_behavior) = OUTPUT_ONLY];
+}
+
+// Defines specific information for a particular zone. Currently empty and
+// reserved for future use only.
+message ZoneMetadata {
+
+}
diff --git a/typescript/test/protos/google/cloud/texttospeech/v1/cloud_tts.proto b/test-fixtures/protos/google/cloud/texttospeech/v1/cloud_tts.proto
similarity index 100%
rename from typescript/test/protos/google/cloud/texttospeech/v1/cloud_tts.proto
rename to test-fixtures/protos/google/cloud/texttospeech/v1/cloud_tts.proto
diff --git a/typescript/test/protos/google/cloud/texttospeech/v1/texttospeech_grpc_service_config.json b/test-fixtures/protos/google/cloud/texttospeech/v1/texttospeech_grpc_service_config.json
similarity index 100%
rename from typescript/test/protos/google/cloud/texttospeech/v1/texttospeech_grpc_service_config.json
rename to test-fixtures/protos/google/cloud/texttospeech/v1/texttospeech_grpc_service_config.json
diff --git a/test-fixtures/protos/google/cloud/translate/v3beta1/translation_service.proto b/test-fixtures/protos/google/cloud/translate/v3beta1/translation_service.proto
new file mode 100644
index 0000000000..e62a508838
--- /dev/null
+++ b/test-fixtures/protos/google/cloud/translate/v3beta1/translation_service.proto
@@ -0,0 +1,902 @@
+// Copyright 2019 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+syntax = "proto3";
+
+package google.cloud.translation.v3beta1;
+
+import "google/api/annotations.proto";
+import "google/api/client.proto";
+import "google/api/field_behavior.proto";
+import "google/api/resource.proto";
+import "google/longrunning/operations.proto";
+import "google/protobuf/timestamp.proto";
+
+option cc_enable_arenas = true;
+option csharp_namespace = "Google.Cloud.Translate.V3Beta1";
+option go_package = "google.golang.org/genproto/googleapis/cloud/translate/v3beta1;translate";
+option java_multiple_files = true;
+option java_outer_classname = "TranslationServiceProto";
+option java_package = "com.google.cloud.translate.v3beta1";
+option php_namespace = "Google\\Cloud\\Translate\\V3beta1";
+option ruby_package = "Google::Cloud::Translate::V3beta1";
+
+// Proto file for the Cloud Translation API (v3beta1).
+
+// Provides natural language translation operations.
+service TranslationService {
+ option (google.api.default_host) = "translate.googleapis.com";
+ option (google.api.oauth_scopes) =
+ "https://www.googleapis.com/auth/cloud-platform,"
+ "https://www.googleapis.com/auth/cloud-translation";
+
+ // Translates input text and returns translated text.
+ rpc TranslateText(TranslateTextRequest) returns (TranslateTextResponse) {
+ option (google.api.http) = {
+ post: "/v3beta1/{parent=projects/*/locations/*}:translateText"
+ body: "*"
+ additional_bindings {
+ post: "/v3beta1/{parent=projects/*}:translateText"
+ body: "*"
+ }
+ };
+ }
+
+ // Detects the language of text within a request.
+ rpc DetectLanguage(DetectLanguageRequest) returns (DetectLanguageResponse) {
+ option (google.api.http) = {
+ post: "/v3beta1/{parent=projects/*/locations/*}:detectLanguage"
+ body: "*"
+ additional_bindings {
+ post: "/v3beta1/{parent=projects/*}:detectLanguage"
+ body: "*"
+ }
+ };
+ option (google.api.method_signature) = "parent,model,mime_type";
+ }
+
+ // Returns a list of supported languages for translation.
+ rpc GetSupportedLanguages(GetSupportedLanguagesRequest) returns (SupportedLanguages) {
+ option (google.api.http) = {
+ get: "/v3beta1/{parent=projects/*/locations/*}/supportedLanguages"
+ additional_bindings {
+ get: "/v3beta1/{parent=projects/*}/supportedLanguages"
+ }
+ };
+ option (google.api.method_signature) = "parent,display_language_code,model";
+ }
+
+ // Translates a large volume of text in asynchronous batch mode.
+ // This function provides real-time output as the inputs are being processed.
+ // If caller cancels a request, the partial results (for an input file, it's
+ // all or nothing) may still be available on the specified output location.
+ //
+ // This call returns immediately and you can
+ // use google.longrunning.Operation.name to poll the status of the call.
+ rpc BatchTranslateText(BatchTranslateTextRequest) returns (google.longrunning.Operation) {
+ option (google.api.http) = {
+ post: "/v3beta1/{parent=projects/*/locations/*}:batchTranslateText"
+ body: "*"
+ };
+ option (google.longrunning.operation_info) = {
+ response_type: "BatchTranslateResponse"
+ metadata_type: "BatchTranslateMetadata"
+ };
+ }
+
+ // Creates a glossary and returns the long-running operation. Returns
+ // NOT_FOUND, if the project doesn't exist.
+ rpc CreateGlossary(CreateGlossaryRequest) returns (google.longrunning.Operation) {
+ option (google.api.http) = {
+ post: "/v3beta1/{parent=projects/*/locations/*}/glossaries"
+ body: "glossary"
+ };
+ option (google.api.method_signature) = "parent,glossary";
+ option (google.longrunning.operation_info) = {
+ response_type: "Glossary"
+ metadata_type: "CreateGlossaryMetadata"
+ };
+ }
+
+ // Lists glossaries in a project. Returns NOT_FOUND, if the project doesn't
+ // exist.
+ rpc ListGlossaries(ListGlossariesRequest) returns (ListGlossariesResponse) {
+ option (google.api.http) = {
+ get: "/v3beta1/{parent=projects/*/locations/*}/glossaries"
+ };
+ option (google.api.method_signature) = "parent";
+ option (google.api.method_signature) = "parent,filter";
+ }
+
+ // Gets a glossary. Returns NOT_FOUND, if the glossary doesn't
+ // exist.
+ rpc GetGlossary(GetGlossaryRequest) returns (Glossary) {
+ option (google.api.http) = {
+ get: "/v3beta1/{name=projects/*/locations/*/glossaries/*}"
+ };
+ option (google.api.method_signature) = "name";
+ }
+
+ // Deletes a glossary, or cancels glossary construction
+ // if the glossary isn't created yet.
+ // Returns NOT_FOUND, if the glossary doesn't exist.
+ rpc DeleteGlossary(DeleteGlossaryRequest) returns (google.longrunning.Operation) {
+ option (google.api.http) = {
+ delete: "/v3beta1/{name=projects/*/locations/*/glossaries/*}"
+ };
+ option (google.api.method_signature) = "name";
+ option (google.longrunning.operation_info) = {
+ response_type: "DeleteGlossaryResponse"
+ metadata_type: "DeleteGlossaryMetadata"
+ };
+ }
+}
+
+// Configures which glossary should be used for a specific target language,
+// and defines options for applying that glossary.
+message TranslateTextGlossaryConfig {
+ // Required. Specifies the glossary used for this translation. Use
+ // this format: projects/*/locations/*/glossaries/*
+ string glossary = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // Optional. Indicates match is case-insensitive.
+ // Default value is false if missing.
+ bool ignore_case = 2 [(google.api.field_behavior) = OPTIONAL];
+}
+
+// The request message for synchronous translation.
+message TranslateTextRequest {
+ // Required. The content of the input in string format.
+ // We recommend the total content be less than 30k codepoints.
+ // Use BatchTranslateText for larger text.
+ repeated string contents = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // Optional. The format of the source text, for example, "text/html",
+ // "text/plain". If left blank, the MIME type defaults to "text/html".
+ string mime_type = 3 [(google.api.field_behavior) = OPTIONAL];
+
+ // Optional. The BCP-47 language code of the input text if
+ // known, for example, "en-US" or "sr-Latn". Supported language codes are
+ // listed in Language Support. If the source language isn't specified, the API
+ // attempts to identify the source language automatically and returns the
+ // source language within the response.
+ string source_language_code = 4 [(google.api.field_behavior) = OPTIONAL];
+
+ // Required. The BCP-47 language code to use for translation of the input
+ // text, set to one of the language codes listed in Language Support.
+ string target_language_code = 5 [(google.api.field_behavior) = REQUIRED];
+
+ // Required. Project or location to make a call. Must refer to a caller's
+ // project.
+ //
+ // Format: `projects/{project-id}` or
+ // `projects/{project-id}/locations/{location-id}`.
+ //
+ // For global calls, use `projects/{project-id}/locations/global` or
+ // `projects/{project-id}`.
+ //
+ // Non-global location is required for requests using AutoML models or
+ // custom glossaries.
+ //
+ // Models and glossaries must be within the same region (have same
+ // location-id), otherwise an INVALID_ARGUMENT (400) error is returned.
+ string parent = 8 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "locations.googleapis.com/Location"
+ }
+ ];
+
+ // Optional. The `model` type requested for this translation.
+ //
+ // The format depends on model type:
+ //
+ // - AutoML Translation models:
+ // `projects/{project-id}/locations/{location-id}/models/{model-id}`
+ //
+ // - General (built-in) models:
+ // `projects/{project-id}/locations/{location-id}/models/general/nmt`,
+ // `projects/{project-id}/locations/{location-id}/models/general/base`
+ //
+ //
+ // For global (non-regionalized) requests, use `location-id` `global`.
+ // For example,
+ // `projects/{project-id}/locations/global/models/general/nmt`.
+ //
+ // If missing, the system decides which google base model to use.
+ string model = 6 [(google.api.field_behavior) = OPTIONAL];
+
+ // Optional. Glossary to be applied. The glossary must be
+ // within the same region (have the same location-id) as the model, otherwise
+ // an INVALID_ARGUMENT (400) error is returned.
+ TranslateTextGlossaryConfig glossary_config = 7 [(google.api.field_behavior) = OPTIONAL];
+
+ // Optional. The labels with user-defined metadata for the request.
+ //
+ // Label keys and values can be no longer than 63 characters
+ // (Unicode codepoints), can only contain lowercase letters, numeric
+ // characters, underscores and dashes. International characters are allowed.
+ // Label values are optional. Label keys must start with a letter.
+ //
+ // See https://cloud.google.com/translate/docs/labels for more information.
+ map labels = 10 [(google.api.field_behavior) = OPTIONAL];
+}
+
+message TranslateTextResponse {
+ // Text translation responses with no glossary applied.
+ // This field has the same length as
+ // [`contents`][google.cloud.translation.v3beta1.TranslateTextRequest.contents].
+ repeated Translation translations = 1;
+
+ // Text translation responses if a glossary is provided in the request.
+ // This can be the same as
+ // [`translations`][google.cloud.translation.v3beta1.TranslateTextResponse.translations] if no terms apply.
+ // This field has the same length as
+ // [`contents`][google.cloud.translation.v3beta1.TranslateTextRequest.contents].
+ repeated Translation glossary_translations = 3;
+}
+
+// A single translation response.
+message Translation {
+ // Text translated into the target language.
+ string translated_text = 1;
+
+ // Only present when `model` is present in the request.
+ // This is same as `model` provided in the request.
+ string model = 2;
+
+ // The BCP-47 language code of source text in the initial request, detected
+ // automatically, if no source language was passed within the initial
+ // request. If the source language was passed, auto-detection of the language
+ // does not occur and this field is empty.
+ string detected_language_code = 4;
+
+ // The `glossary_config` used for this translation.
+ TranslateTextGlossaryConfig glossary_config = 3;
+}
+
+// The request message for language detection.
+message DetectLanguageRequest {
+ // Required. Project or location to make a call. Must refer to a caller's
+ // project.
+ //
+ // Format: `projects/{project-id}/locations/{location-id}` or
+ // `projects/{project-id}`.
+ //
+ // For global calls, use `projects/{project-id}/locations/global` or
+ // `projects/{project-id}`.
+ //
+ // Only models within the same region (has same location-id) can be used.
+ // Otherwise an INVALID_ARGUMENT (400) error is returned.
+ string parent = 5 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "locations.googleapis.com/Location"
+ }
+ ];
+
+ // Optional. The language detection model to be used.
+ //
+ // Format:
+ // `projects/{project-id}/locations/{location-id}/models/language-detection/{model-id}`
+ //
+ // Only one language detection model is currently supported:
+ // `projects/{project-id}/locations/{location-id}/models/language-detection/default`.
+ //
+ // If not specified, the default model is used.
+ string model = 4 [(google.api.field_behavior) = OPTIONAL];
+
+ // Required. The source of the document from which to detect the language.
+ oneof source {
+ // The content of the input stored as a string.
+ string content = 1;
+ }
+
+ // Optional. The format of the source text, for example, "text/html",
+ // "text/plain". If left blank, the MIME type defaults to "text/html".
+ string mime_type = 3 [(google.api.field_behavior) = OPTIONAL];
+
+ // Optional. The labels with user-defined metadata for the request.
+ //
+ // Label keys and values can be no longer than 63 characters
+ // (Unicode codepoints), can only contain lowercase letters, numeric
+ // characters, underscores and dashes. International characters are allowed.
+ // Label values are optional. Label keys must start with a letter.
+ //
+ // See https://cloud.google.com/translate/docs/labels for more information.
+ map labels = 6;
+}
+
+// The response message for language detection.
+message DetectedLanguage {
+ // The BCP-47 language code of source content in the request, detected
+ // automatically.
+ string language_code = 1;
+
+ // The confidence of the detection result for this language.
+ float confidence = 2;
+}
+
+// The response message for language detection.
+message DetectLanguageResponse {
+ // A list of detected languages sorted by detection confidence in descending
+ // order. The most probable language first.
+ repeated DetectedLanguage languages = 1;
+}
+
+// The request message for discovering supported languages.
+message GetSupportedLanguagesRequest {
+ // Required. Project or location to make a call. Must refer to a caller's
+ // project.
+ //
+ // Format: `projects/{project-id}` or
+ // `projects/{project-id}/locations/{location-id}`.
+ //
+ // For global calls, use `projects/{project-id}/locations/global` or
+ // `projects/{project-id}`.
+ //
+ // Non-global location is required for AutoML models.
+ //
+ // Only models within the same region (have same location-id) can be used,
+ // otherwise an INVALID_ARGUMENT (400) error is returned.
+ string parent = 3 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "locations.googleapis.com/Location"
+ }
+ ];
+
+ // Optional. The language to use to return localized, human readable names
+ // of supported languages. If missing, then display names are not returned
+ // in a response.
+ string display_language_code = 1 [(google.api.field_behavior) = OPTIONAL];
+
+ // Optional. Get supported languages of this model.
+ //
+ // The format depends on model type:
+ //
+ // - AutoML Translation models:
+ // `projects/{project-id}/locations/{location-id}/models/{model-id}`
+ //
+ // - General (built-in) models:
+ // `projects/{project-id}/locations/{location-id}/models/general/nmt`,
+ // `projects/{project-id}/locations/{location-id}/models/general/base`
+ //
+ //
+ // Returns languages supported by the specified model.
+ // If missing, we get supported languages of Google general base (PBMT) model.
+ string model = 2 [(google.api.field_behavior) = OPTIONAL];
+}
+
+// The response message for discovering supported languages.
+message SupportedLanguages {
+ // A list of supported language responses. This list contains an entry
+ // for each language the Translation API supports.
+ repeated SupportedLanguage languages = 1;
+}
+
+// A single supported language response corresponds to information related
+// to one supported language.
+message SupportedLanguage {
+ // Supported language code, generally consisting of its ISO 639-1
+ // identifier, for example, 'en', 'ja'. In certain cases, BCP-47 codes
+ // including language and region identifiers are returned (for example,
+ // 'zh-TW' and 'zh-CN')
+ string language_code = 1;
+
+ // Human readable name of the language localized in the display language
+ // specified in the request.
+ string display_name = 2;
+
+ // Can be used as source language.
+ bool support_source = 3;
+
+ // Can be used as target language.
+ bool support_target = 4;
+}
+
+// The Google Cloud Storage location for the input content.
+message GcsSource {
+ // Required. Source data URI. For example, `gs://my_bucket/my_object`.
+ string input_uri = 1 [(google.api.field_behavior) = REQUIRED];
+}
+
+// Input configuration for BatchTranslateText request.
+message InputConfig {
+ // Optional. Can be "text/plain" or "text/html".
+ // For `.tsv`, "text/html" is used if mime_type is missing.
+ // For `.html`, this field must be "text/html" or empty.
+ // For `.txt`, this field must be "text/plain" or empty.
+ string mime_type = 1 [(google.api.field_behavior) = OPTIONAL];
+
+ // Required. Specify the input.
+ oneof source {
+ // Required. Google Cloud Storage location for the source input.
+ // This can be a single file (for example,
+ // `gs://translation-test/input.tsv`) or a wildcard (for example,
+ // `gs://translation-test/*`). If a file extension is `.tsv`, it can
+ // contain either one or two columns. The first column (optional) is the id
+ // of the text request. If the first column is missing, we use the row
+ // number (0-based) from the input file as the ID in the output file. The
+ // second column is the actual text to be
+ // translated. We recommend each row be <= 10K Unicode codepoints,
+ // otherwise an error might be returned.
+ // Note that the input tsv must be RFC 4180 compliant.
+ //
+ // You could use https://github.com/Clever/csvlint to check potential
+ // formatting errors in your tsv file.
+ // csvlint --delimiter='\t' your_input_file.tsv
+ //
+ // The other supported file extensions are `.txt` or `.html`, which is
+ // treated as a single large chunk of text.
+ GcsSource gcs_source = 2;
+ }
+}
+
+// The Google Cloud Storage location for the output content.
+message GcsDestination {
+ // Required. There must be no files under 'output_uri_prefix'.
+ // 'output_uri_prefix' must end with "/" and start with "gs://", otherwise an
+ // INVALID_ARGUMENT (400) error is returned.
+ string output_uri_prefix = 1 [(google.api.field_behavior) = REQUIRED];
+}
+
+// Output configuration for BatchTranslateText request.
+message OutputConfig {
+ // Required. The destination of output.
+ oneof destination {
+ // Google Cloud Storage destination for output content.
+ // For every single input file (for example, gs://a/b/c.[extension]), we
+ // generate at most 2 * n output files. (n is the # of target_language_codes
+ // in the BatchTranslateTextRequest).
+ //
+ // Output files (tsv) generated are compliant with RFC 4180 except that
+ // record delimiters are '\n' instead of '\r\n'. We don't provide any way to
+ // change record delimiters.
+ //
+ // While the input files are being processed, we write/update an index file
+ // 'index.csv' under 'output_uri_prefix' (for example,
+ // gs://translation-test/index.csv) The index file is generated/updated as
+ // new files are being translated. The format is:
+ //
+ // input_file,target_language_code,translations_file,errors_file,
+ // glossary_translations_file,glossary_errors_file
+ //
+ // input_file is one file we matched using gcs_source.input_uri.
+ // target_language_code is provided in the request.
+ // translations_file contains the translations. (details provided below)
+ // errors_file contains the errors during processing of the file. (details
+ // below). Both translations_file and errors_file could be empty
+ // strings if we have no content to output.
+ // glossary_translations_file and glossary_errors_file are always empty
+ // strings if the input_file is tsv. They could also be empty if we have no
+ // content to output.
+ //
+ // Once a row is present in index.csv, the input/output matching never
+ // changes. Callers should also expect all the content in input_file are
+ // processed and ready to be consumed (that is, no partial output file is
+ // written).
+ //
+ // The format of translations_file (for target language code 'trg') is:
+ // gs://translation_test/a_b_c_'trg'_translations.[extension]
+ //
+ // If the input file extension is tsv, the output has the following
+ // columns:
+ // Column 1: ID of the request provided in the input, if it's not
+ // provided in the input, then the input row number is used (0-based).
+ // Column 2: source sentence.
+ // Column 3: translation without applying a glossary. Empty string if there
+ // is an error.
+ // Column 4 (only present if a glossary is provided in the request):
+ // translation after applying the glossary. Empty string if there is an
+ // error applying the glossary. Could be same string as column 3 if there is
+ // no glossary applied.
+ //
+ // If input file extension is a txt or html, the translation is directly
+ // written to the output file. If glossary is requested, a separate
+ // glossary_translations_file has format of
+ // gs://translation_test/a_b_c_'trg'_glossary_translations.[extension]
+ //
+ // The format of errors file (for target language code 'trg') is:
+ // gs://translation_test/a_b_c_'trg'_errors.[extension]
+ //
+ // If the input file extension is tsv, errors_file contains the following:
+ // Column 1: ID of the request provided in the input, if it's not
+ // provided in the input, then the input row number is used (0-based).
+ // Column 2: source sentence.
+ // Column 3: Error detail for the translation. Could be empty.
+ // Column 4 (only present if a glossary is provided in the request):
+ // Error when applying the glossary.
+ //
+ // If the input file extension is txt or html, glossary_error_file will be
+ // generated that contains error details. glossary_error_file has format of
+ // gs://translation_test/a_b_c_'trg'_glossary_errors.[extension]
+ GcsDestination gcs_destination = 1;
+ }
+}
+
+// The batch translation request.
+message BatchTranslateTextRequest {
+ // Required. Location to make a call. Must refer to a caller's project.
+ //
+ // Format: `projects/{project-id}/locations/{location-id}`.
+ //
+ // The `global` location is not supported for batch translation.
+ //
+ // Only AutoML Translation models or glossaries within the same region (have
+ // the same location-id) can be used, otherwise an INVALID_ARGUMENT (400)
+ // error is returned.
+ string parent = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "locations.googleapis.com/Location"
+ }
+ ];
+
+ // Required. Source language code.
+ string source_language_code = 2 [(google.api.field_behavior) = REQUIRED];
+
+ // Required. Specify up to 10 language codes here.
+ repeated string target_language_codes = 3 [(google.api.field_behavior) = REQUIRED];
+
+ // Optional. The models to use for translation. Map's key is target language
+ // code. Map's value is model name. Value can be a built-in general model,
+ // or an AutoML Translation model.
+ //
+ // The value format depends on model type:
+ //
+ // - AutoML Translation models:
+ // `projects/{project-id}/locations/{location-id}/models/{model-id}`
+ //
+ // - General (built-in) models:
+ // `projects/{project-id}/locations/{location-id}/models/general/nmt`,
+ // `projects/{project-id}/locations/{location-id}/models/general/base`
+ //
+ //
+ // If the map is empty or a specific model is
+ // not requested for a language pair, then default google model (nmt) is used.
+ map models = 4 [(google.api.field_behavior) = OPTIONAL];
+
+ // Required. Input configurations.
+ // The total number of files matched should be <= 1000.
+ // The total content size should be <= 100M Unicode codepoints.
+ // The files must use UTF-8 encoding.
+ repeated InputConfig input_configs = 5 [(google.api.field_behavior) = REQUIRED];
+
+ // Required. Output configuration.
+ // If 2 input configs match to the same file (that is, same input path),
+ // we don't generate output for duplicate inputs.
+ OutputConfig output_config = 6 [(google.api.field_behavior) = REQUIRED];
+
+ // Optional. Glossaries to be applied for translation.
+ // It's keyed by target language code.
+ map glossaries = 7 [(google.api.field_behavior) = OPTIONAL];
+
+ // Optional. The labels with user-defined metadata for the request.
+ //
+ // Label keys and values can be no longer than 63 characters
+ // (Unicode codepoints), can only contain lowercase letters, numeric
+ // characters, underscores and dashes. International characters are allowed.
+ // Label values are optional. Label keys must start with a letter.
+ //
+ // See https://cloud.google.com/translate/docs/labels for more information.
+ map labels = 9 [(google.api.field_behavior) = OPTIONAL];
+}
+
+// State metadata for the batch translation operation.
+message BatchTranslateMetadata {
+ // State of the job.
+ enum State {
+ // Invalid.
+ STATE_UNSPECIFIED = 0;
+
+ // Request is being processed.
+ RUNNING = 1;
+
+ // The batch is processed, and at least one item was successfully
+ // processed.
+ SUCCEEDED = 2;
+
+ // The batch is done and no item was successfully processed.
+ FAILED = 3;
+
+ // Request is in the process of being canceled after caller invoked
+ // longrunning.Operations.CancelOperation on the request id.
+ CANCELLING = 4;
+
+ // The batch is done after the user has called the
+ // longrunning.Operations.CancelOperation. Any records processed before the
+ // cancel command are output as specified in the request.
+ CANCELLED = 5;
+ }
+
+ // The state of the operation.
+ State state = 1;
+
+ // Number of successfully translated characters so far (Unicode codepoints).
+ int64 translated_characters = 2;
+
+ // Number of characters that have failed to process so far (Unicode
+ // codepoints).
+ int64 failed_characters = 3;
+
+ // Total number of characters (Unicode codepoints).
+ // This is the total number of codepoints from input files times the number of
+ // target languages and appears here shortly after the call is submitted.
+ int64 total_characters = 4;
+
+ // Time when the operation was submitted.
+ google.protobuf.Timestamp submit_time = 5;
+}
+
+// Stored in the [google.longrunning.Operation.response][google.longrunning.Operation.response] field returned by
+// BatchTranslateText if at least one sentence is translated successfully.
+message BatchTranslateResponse {
+ // Total number of characters (Unicode codepoints).
+ int64 total_characters = 1;
+
+ // Number of successfully translated characters (Unicode codepoints).
+ int64 translated_characters = 2;
+
+ // Number of characters that have failed to process (Unicode codepoints).
+ int64 failed_characters = 3;
+
+ // Time when the operation was submitted.
+ google.protobuf.Timestamp submit_time = 4;
+
+ // The time when the operation is finished and
+ // [google.longrunning.Operation.done][google.longrunning.Operation.done] is set to true.
+ google.protobuf.Timestamp end_time = 5;
+}
+
+// Input configuration for glossaries.
+message GlossaryInputConfig {
+ // Required. Specify the input.
+ oneof source {
+ // Required. Google Cloud Storage location of glossary data.
+ // File format is determined based on the filename extension. API returns
+ // [google.rpc.Code.INVALID_ARGUMENT] for unsupported URI-s and file
+ // formats. Wildcards are not allowed. This must be a single file in one of
+ // the following formats:
+ //
+ // For unidirectional glossaries:
+ //
+ // - TSV/CSV (`.tsv`/`.csv`): 2 column file, tab- or comma-separated.
+ // The first column is source text. The second column is target text.
+ // The file must not contain headers. That is, the first row is data, not
+ // column names.
+ //
+ // - TMX (`.tmx`): TMX file with parallel data defining source/target term
+ // pairs.
+ //
+ // For equivalent term sets glossaries:
+ //
+ // - CSV (`.csv`): Multi-column CSV file defining equivalent glossary terms
+ // in multiple languages. The format is defined for Google Translation
+ // Toolkit and documented in [Use a
+ // glossary](https://support.google.com/translatortoolkit/answer/6306379?hl=en).
+ GcsSource gcs_source = 1;
+ }
+}
+
+// Represents a glossary built from user provided data.
+message Glossary {
+ option (google.api.resource) = {
+ type: "translate.googleapis.com/Glossary"
+ pattern: "projects/{project}/locations/{location}/glossaries/{glossary}"
+ };
+
+ // Used with unidirectional glossaries.
+ message LanguageCodePair {
+ // Required. The BCP-47 language code of the input text, for example,
+ // "en-US". Expected to be an exact match for GlossaryTerm.language_code.
+ string source_language_code = 1;
+
+ // Required. The BCP-47 language code for translation output, for example,
+ // "zh-CN". Expected to be an exact match for GlossaryTerm.language_code.
+ string target_language_code = 2;
+ }
+
+ // Used with equivalent term set glossaries.
+ message LanguageCodesSet {
+ // The BCP-47 language code(s) for terms defined in the glossary.
+ // All entries are unique. The list contains at least two entries.
+ // Expected to be an exact match for GlossaryTerm.language_code.
+ repeated string language_codes = 1;
+ }
+
+ // Required. The resource name of the glossary. Glossary names have the form
+ // `projects/{project-id}/locations/{location-id}/glossaries/{glossary-id}`.
+ string name = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // Languages supported by the glossary.
+ oneof languages {
+ // Used with unidirectional glossaries.
+ LanguageCodePair language_pair = 3;
+
+ // Used with equivalent term set glossaries.
+ LanguageCodesSet language_codes_set = 4;
+ }
+
+ // Required. Provides examples to build the glossary from.
+ // Total glossary must not exceed 10M Unicode codepoints.
+ GlossaryInputConfig input_config = 5;
+
+ // Output only. The number of entries defined in the glossary.
+ int32 entry_count = 6 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Output only. When CreateGlossary was called.
+ google.protobuf.Timestamp submit_time = 7 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Output only. When the glossary creation was finished.
+ google.protobuf.Timestamp end_time = 8 [(google.api.field_behavior) = OUTPUT_ONLY];
+}
+
+// Request message for CreateGlossary.
+message CreateGlossaryRequest {
+ // Required. The project name.
+ string parent = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "locations.googleapis.com/Location"
+ }
+ ];
+
+ // Required. The glossary to create.
+ Glossary glossary = 2 [(google.api.field_behavior) = REQUIRED];
+}
+
+// Request message for GetGlossary.
+message GetGlossaryRequest {
+ // Required. The name of the glossary to retrieve.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "translate.googleapis.com/Glossary"
+ }
+ ];
+}
+
+// Request message for DeleteGlossary.
+message DeleteGlossaryRequest {
+ // Required. The name of the glossary to delete.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "translate.googleapis.com/Glossary"
+ }
+ ];
+}
+
+// Request message for ListGlossaries.
+message ListGlossariesRequest {
+ // Required. The name of the project from which to list all of the glossaries.
+ string parent = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "locations.googleapis.com/Location"
+ }
+ ];
+
+ // Optional. Requested page size. The server may return fewer glossaries than
+ // requested. If unspecified, the server picks an appropriate default.
+ int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL];
+
+ // Optional. A token identifying a page of results the server should return.
+ // Typically, this is the value of [ListGlossariesResponse.next_page_token]
+ // returned from the previous call to `ListGlossaries` method.
+ // The first page is returned if `page_token`is empty or missing.
+ string page_token = 3 [(google.api.field_behavior) = OPTIONAL];
+
+ // Optional. Filter specifying constraints of a list operation.
+ // Filtering is not supported yet, and the parameter currently has no effect.
+ // If missing, no filtering is performed.
+ string filter = 4 [(google.api.field_behavior) = OPTIONAL];
+}
+
+// Response message for ListGlossaries.
+message ListGlossariesResponse {
+ // The list of glossaries for a project.
+ repeated Glossary glossaries = 1;
+
+ // A token to retrieve a page of results. Pass this value in the
+ // [ListGlossariesRequest.page_token] field in the subsequent call to
+ // `ListGlossaries` method to retrieve the next page of results.
+ string next_page_token = 2;
+}
+
+// Stored in the [google.longrunning.Operation.metadata][google.longrunning.Operation.metadata] field returned by
+// CreateGlossary.
+message CreateGlossaryMetadata {
+ // Enumerates the possible states that the creation request can be in.
+ enum State {
+ // Invalid.
+ STATE_UNSPECIFIED = 0;
+
+ // Request is being processed.
+ RUNNING = 1;
+
+ // The glossary was successfully created.
+ SUCCEEDED = 2;
+
+ // Failed to create the glossary.
+ FAILED = 3;
+
+ // Request is in the process of being canceled after caller invoked
+ // longrunning.Operations.CancelOperation on the request id.
+ CANCELLING = 4;
+
+ // The glossary creation request was successfully canceled.
+ CANCELLED = 5;
+ }
+
+ // The name of the glossary that is being created.
+ string name = 1;
+
+ // The current state of the glossary creation operation.
+ State state = 2;
+
+ // The time when the operation was submitted to the server.
+ google.protobuf.Timestamp submit_time = 3;
+}
+
+// Stored in the [google.longrunning.Operation.metadata][google.longrunning.Operation.metadata] field returned by
+// DeleteGlossary.
+message DeleteGlossaryMetadata {
+ // Enumerates the possible states that the creation request can be in.
+ enum State {
+ // Invalid.
+ STATE_UNSPECIFIED = 0;
+
+ // Request is being processed.
+ RUNNING = 1;
+
+ // The glossary was successfully deleted.
+ SUCCEEDED = 2;
+
+ // Failed to delete the glossary.
+ FAILED = 3;
+
+ // Request is in the process of being canceled after caller invoked
+ // longrunning.Operations.CancelOperation on the request id.
+ CANCELLING = 4;
+
+ // The glossary deletion request was successfully canceled.
+ CANCELLED = 5;
+ }
+
+ // The name of the glossary that is being deleted.
+ string name = 1;
+
+ // The current state of the glossary deletion operation.
+ State state = 2;
+
+ // The time when the operation was submitted to the server.
+ google.protobuf.Timestamp submit_time = 3;
+}
+
+// Stored in the [google.longrunning.Operation.response][google.longrunning.Operation.response] field returned by
+// DeleteGlossary.
+message DeleteGlossaryResponse {
+ // The name of the deleted glossary.
+ string name = 1;
+
+ // The time when the operation was submitted to the server.
+ google.protobuf.Timestamp submit_time = 2;
+
+ // The time when the glossary deletion is finished and
+ // [google.longrunning.Operation.done][google.longrunning.Operation.done] is set to true.
+ google.protobuf.Timestamp end_time = 3;
+}
diff --git a/typescript/test/protos/google/monitoring/v3/alert.proto b/test-fixtures/protos/google/monitoring/v3/alert.proto
similarity index 100%
rename from typescript/test/protos/google/monitoring/v3/alert.proto
rename to test-fixtures/protos/google/monitoring/v3/alert.proto
diff --git a/typescript/test/protos/google/monitoring/v3/alert_service.proto b/test-fixtures/protos/google/monitoring/v3/alert_service.proto
similarity index 100%
rename from typescript/test/protos/google/monitoring/v3/alert_service.proto
rename to test-fixtures/protos/google/monitoring/v3/alert_service.proto
diff --git a/typescript/test/protos/google/monitoring/v3/common.proto b/test-fixtures/protos/google/monitoring/v3/common.proto
similarity index 100%
rename from typescript/test/protos/google/monitoring/v3/common.proto
rename to test-fixtures/protos/google/monitoring/v3/common.proto
diff --git a/typescript/test/protos/google/monitoring/v3/dropped_labels.proto b/test-fixtures/protos/google/monitoring/v3/dropped_labels.proto
similarity index 100%
rename from typescript/test/protos/google/monitoring/v3/dropped_labels.proto
rename to test-fixtures/protos/google/monitoring/v3/dropped_labels.proto
diff --git a/typescript/test/protos/google/monitoring/v3/group.proto b/test-fixtures/protos/google/monitoring/v3/group.proto
similarity index 100%
rename from typescript/test/protos/google/monitoring/v3/group.proto
rename to test-fixtures/protos/google/monitoring/v3/group.proto
diff --git a/typescript/test/protos/google/monitoring/v3/group_service.proto b/test-fixtures/protos/google/monitoring/v3/group_service.proto
similarity index 100%
rename from typescript/test/protos/google/monitoring/v3/group_service.proto
rename to test-fixtures/protos/google/monitoring/v3/group_service.proto
diff --git a/typescript/test/protos/google/monitoring/v3/metric.proto b/test-fixtures/protos/google/monitoring/v3/metric.proto
similarity index 100%
rename from typescript/test/protos/google/monitoring/v3/metric.proto
rename to test-fixtures/protos/google/monitoring/v3/metric.proto
diff --git a/typescript/test/protos/google/monitoring/v3/metric_service.proto b/test-fixtures/protos/google/monitoring/v3/metric_service.proto
similarity index 100%
rename from typescript/test/protos/google/monitoring/v3/metric_service.proto
rename to test-fixtures/protos/google/monitoring/v3/metric_service.proto
diff --git a/typescript/test/protos/google/monitoring/v3/mutation_record.proto b/test-fixtures/protos/google/monitoring/v3/mutation_record.proto
similarity index 100%
rename from typescript/test/protos/google/monitoring/v3/mutation_record.proto
rename to test-fixtures/protos/google/monitoring/v3/mutation_record.proto
diff --git a/typescript/test/protos/google/monitoring/v3/notification.proto b/test-fixtures/protos/google/monitoring/v3/notification.proto
similarity index 100%
rename from typescript/test/protos/google/monitoring/v3/notification.proto
rename to test-fixtures/protos/google/monitoring/v3/notification.proto
diff --git a/typescript/test/protos/google/monitoring/v3/notification_service.proto b/test-fixtures/protos/google/monitoring/v3/notification_service.proto
similarity index 100%
rename from typescript/test/protos/google/monitoring/v3/notification_service.proto
rename to test-fixtures/protos/google/monitoring/v3/notification_service.proto
diff --git a/typescript/test/protos/google/monitoring/v3/service.proto b/test-fixtures/protos/google/monitoring/v3/service.proto
similarity index 100%
rename from typescript/test/protos/google/monitoring/v3/service.proto
rename to test-fixtures/protos/google/monitoring/v3/service.proto
diff --git a/typescript/test/protos/google/monitoring/v3/service_service.proto b/test-fixtures/protos/google/monitoring/v3/service_service.proto
similarity index 100%
rename from typescript/test/protos/google/monitoring/v3/service_service.proto
rename to test-fixtures/protos/google/monitoring/v3/service_service.proto
diff --git a/typescript/test/protos/google/monitoring/v3/span_context.proto b/test-fixtures/protos/google/monitoring/v3/span_context.proto
similarity index 100%
rename from typescript/test/protos/google/monitoring/v3/span_context.proto
rename to test-fixtures/protos/google/monitoring/v3/span_context.proto
diff --git a/typescript/test/protos/google/monitoring/v3/uptime.proto b/test-fixtures/protos/google/monitoring/v3/uptime.proto
similarity index 100%
rename from typescript/test/protos/google/monitoring/v3/uptime.proto
rename to test-fixtures/protos/google/monitoring/v3/uptime.proto
diff --git a/typescript/test/protos/google/monitoring/v3/uptime_service.proto b/test-fixtures/protos/google/monitoring/v3/uptime_service.proto
similarity index 100%
rename from typescript/test/protos/google/monitoring/v3/uptime_service.proto
rename to test-fixtures/protos/google/monitoring/v3/uptime_service.proto
diff --git a/test-fixtures/protos/google/privacy/dlp/v2/dlp.proto b/test-fixtures/protos/google/privacy/dlp/v2/dlp.proto
new file mode 100644
index 0000000000..0dc4119b8a
--- /dev/null
+++ b/test-fixtures/protos/google/privacy/dlp/v2/dlp.proto
@@ -0,0 +1,3775 @@
+// Copyright 2019 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+syntax = "proto3";
+
+package google.privacy.dlp.v2;
+
+import "google/api/annotations.proto";
+import "google/api/client.proto";
+import "google/api/field_behavior.proto";
+import "google/api/resource.proto";
+import "google/privacy/dlp/v2/storage.proto";
+import "google/protobuf/duration.proto";
+import "google/protobuf/empty.proto";
+import "google/protobuf/field_mask.proto";
+import "google/protobuf/timestamp.proto";
+import "google/rpc/status.proto";
+import "google/type/date.proto";
+import "google/type/dayofweek.proto";
+import "google/type/timeofday.proto";
+
+option csharp_namespace = "Google.Cloud.Dlp.V2";
+option go_package = "google.golang.org/genproto/googleapis/privacy/dlp/v2;dlp";
+option java_multiple_files = true;
+option java_outer_classname = "DlpProto";
+option java_package = "com.google.privacy.dlp.v2";
+option php_namespace = "Google\\Cloud\\Dlp\\V2";
+
+// The Cloud Data Loss Prevention (DLP) API is a service that allows clients
+// to detect the presence of Personally Identifiable Information (PII) and other
+// privacy-sensitive data in user-supplied, unstructured data streams, like text
+// blocks or images.
+// The service also includes methods for sensitive data redaction and
+// scheduling of data scans on Google Cloud Platform based data sets.
+//
+// To learn more about concepts and find how-to guides see
+// https://cloud.google.com/dlp/docs/.
+service DlpService {
+ option (google.api.default_host) = "dlp.googleapis.com";
+ option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform";
+
+ // Finds potentially sensitive info in content.
+ // This method has limits on input size, processing time, and output size.
+ //
+ // When no InfoTypes or CustomInfoTypes are specified in this request, the
+ // system will automatically choose what detectors to run. By default this may
+ // be all types, but may change over time as detectors are updated.
+ //
+ // For how to guides, see https://cloud.google.com/dlp/docs/inspecting-images
+ // and https://cloud.google.com/dlp/docs/inspecting-text,
+ rpc InspectContent(InspectContentRequest) returns (InspectContentResponse) {
+ option (google.api.http) = {
+ post: "/v2/{parent=projects/*}/content:inspect"
+ body: "*"
+ additional_bindings {
+ post: "/v2/{parent=projects/*}/locations/{location_id}/content:inspect"
+ body: "*"
+ }
+ };
+ }
+
+ // Redacts potentially sensitive info from an image.
+ // This method has limits on input size, processing time, and output size.
+ // See https://cloud.google.com/dlp/docs/redacting-sensitive-data-images to
+ // learn more.
+ //
+ // When no InfoTypes or CustomInfoTypes are specified in this request, the
+ // system will automatically choose what detectors to run. By default this may
+ // be all types, but may change over time as detectors are updated.
+ rpc RedactImage(RedactImageRequest) returns (RedactImageResponse) {
+ option (google.api.http) = {
+ post: "/v2/{parent=projects/*}/image:redact"
+ body: "*"
+ additional_bindings {
+ post: "/v2/{parent=projects/*}/locations/{location_id}/image:redact"
+ body: "*"
+ }
+ };
+ }
+
+ // De-identifies potentially sensitive info from a ContentItem.
+ // This method has limits on input size and output size.
+ // See https://cloud.google.com/dlp/docs/deidentify-sensitive-data to
+ // learn more.
+ //
+ // When no InfoTypes or CustomInfoTypes are specified in this request, the
+ // system will automatically choose what detectors to run. By default this may
+ // be all types, but may change over time as detectors are updated.
+ rpc DeidentifyContent(DeidentifyContentRequest) returns (DeidentifyContentResponse) {
+ option (google.api.http) = {
+ post: "/v2/{parent=projects/*}/content:deidentify"
+ body: "*"
+ additional_bindings {
+ post: "/v2/{parent=projects/*}/locations/{location_id}/content:deidentify"
+ body: "*"
+ }
+ };
+ }
+
+ // Re-identifies content that has been de-identified.
+ // See
+ // https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example
+ // to learn more.
+ rpc ReidentifyContent(ReidentifyContentRequest) returns (ReidentifyContentResponse) {
+ option (google.api.http) = {
+ post: "/v2/{parent=projects/*}/content:reidentify"
+ body: "*"
+ additional_bindings {
+ post: "/v2/{parent=projects/*}/locations/{location_id}/content:reidentify"
+ body: "*"
+ }
+ };
+ }
+
+ // Returns a list of the sensitive information types that the DLP API
+ // supports. See https://cloud.google.com/dlp/docs/infotypes-reference to
+ // learn more.
+ rpc ListInfoTypes(ListInfoTypesRequest) returns (ListInfoTypesResponse) {
+ option (google.api.http) = {
+ get: "/v2/infoTypes"
+ additional_bindings {
+ get: "/v2/locations/{location_id}/infoTypes"
+ }
+ };
+ option (google.api.method_signature) = "location_id";
+ }
+
+ // Creates an InspectTemplate for re-using frequently used configuration
+ // for inspecting content, images, and storage.
+ // See https://cloud.google.com/dlp/docs/creating-templates to learn more.
+ rpc CreateInspectTemplate(CreateInspectTemplateRequest) returns (InspectTemplate) {
+ option (google.api.http) = {
+ post: "/v2/{parent=organizations/*}/inspectTemplates"
+ body: "*"
+ additional_bindings {
+ post: "/v2/{parent=organizations/*}/locations/{location_id}/inspectTemplates"
+ body: "*"
+ }
+ additional_bindings {
+ post: "/v2/{parent=projects/*}/inspectTemplates"
+ body: "*"
+ }
+ additional_bindings {
+ post: "/v2/{parent=projects/*}/locations/{location_id}/inspectTemplates"
+ body: "*"
+ }
+ };
+ option (google.api.method_signature) = "parent,inspect_template";
+ option (google.api.method_signature) = "parent,inspect_template,location_id";
+ }
+
+ // Updates the InspectTemplate.
+ // See https://cloud.google.com/dlp/docs/creating-templates to learn more.
+ rpc UpdateInspectTemplate(UpdateInspectTemplateRequest) returns (InspectTemplate) {
+ option (google.api.http) = {
+ patch: "/v2/{name=organizations/*/inspectTemplates/*}"
+ body: "*"
+ additional_bindings {
+ patch: "/v2/{name=organizations/*/locations/*/inspectTemplates/*}"
+ body: "*"
+ }
+ additional_bindings {
+ patch: "/v2/{name=projects/*/inspectTemplates/*}"
+ body: "*"
+ }
+ additional_bindings {
+ patch: "/v2/{name=projects/*/locations/*/inspectTemplates/*}"
+ body: "*"
+ }
+ };
+ option (google.api.method_signature) = "name,inspect_template,update_mask";
+ }
+
+ // Gets an InspectTemplate.
+ // See https://cloud.google.com/dlp/docs/creating-templates to learn more.
+ rpc GetInspectTemplate(GetInspectTemplateRequest) returns (InspectTemplate) {
+ option (google.api.http) = {
+ get: "/v2/{name=organizations/*/inspectTemplates/*}"
+ additional_bindings {
+ get: "/v2/{name=organizations/*/locations/*/inspectTemplates/*}"
+ }
+ additional_bindings {
+ get: "/v2/{name=projects/*/inspectTemplates/*}"
+ }
+ additional_bindings {
+ get: "/v2/{name=projects/*/locations/*/inspectTemplates/*}"
+ }
+ };
+ option (google.api.method_signature) = "name";
+ }
+
+ // Lists InspectTemplates.
+ // See https://cloud.google.com/dlp/docs/creating-templates to learn more.
+ rpc ListInspectTemplates(ListInspectTemplatesRequest) returns (ListInspectTemplatesResponse) {
+ option (google.api.http) = {
+ get: "/v2/{parent=organizations/*}/inspectTemplates"
+ additional_bindings {
+ get: "/v2/{parent=organizations/*}/locations/{location_id}/inspectTemplates"
+ }
+ additional_bindings {
+ get: "/v2/{parent=projects/*}/inspectTemplates"
+ }
+ additional_bindings {
+ get: "/v2/{parent=projects/*}/locations/{location_id}/inspectTemplates"
+ }
+ };
+ option (google.api.method_signature) = "parent";
+ }
+
+ // Deletes an InspectTemplate.
+ // See https://cloud.google.com/dlp/docs/creating-templates to learn more.
+ rpc DeleteInspectTemplate(DeleteInspectTemplateRequest) returns (google.protobuf.Empty) {
+ option (google.api.http) = {
+ delete: "/v2/{name=organizations/*/inspectTemplates/*}"
+ additional_bindings {
+ delete: "/v2/{name=organizations/*/locations/*/inspectTemplates/*}"
+ }
+ additional_bindings {
+ delete: "/v2/{name=projects/*/inspectTemplates/*}"
+ }
+ additional_bindings {
+ delete: "/v2/{name=projects/*/locations/*/inspectTemplates/*}"
+ }
+ };
+ option (google.api.method_signature) = "name";
+ }
+
+ // Creates a DeidentifyTemplate for re-using frequently used configuration
+ // for de-identifying content, images, and storage.
+ // See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
+ // more.
+ rpc CreateDeidentifyTemplate(CreateDeidentifyTemplateRequest) returns (DeidentifyTemplate) {
+ option (google.api.http) = {
+ post: "/v2/{parent=organizations/*}/deidentifyTemplates"
+ body: "*"
+ additional_bindings {
+ post: "/v2/{parent=organizations/*}/locations/{location_id}/deidentifyTemplates"
+ body: "*"
+ }
+ additional_bindings {
+ post: "/v2/{parent=projects/*}/deidentifyTemplates"
+ body: "*"
+ }
+ additional_bindings {
+ post: "/v2/{parent=projects/*}/locations/{location_id}/deidentifyTemplates"
+ body: "*"
+ }
+ };
+ option (google.api.method_signature) = "parent,deidentify_template";
+ option (google.api.method_signature) = "parent,deidentify_template,location_id";
+ }
+
+ // Updates the DeidentifyTemplate.
+ // See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
+ // more.
+ rpc UpdateDeidentifyTemplate(UpdateDeidentifyTemplateRequest) returns (DeidentifyTemplate) {
+ option (google.api.http) = {
+ patch: "/v2/{name=organizations/*/deidentifyTemplates/*}"
+ body: "*"
+ additional_bindings {
+ patch: "/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}"
+ body: "*"
+ }
+ additional_bindings {
+ patch: "/v2/{name=projects/*/deidentifyTemplates/*}"
+ body: "*"
+ }
+ additional_bindings {
+ patch: "/v2/{name=projects/*/locations/*/deidentifyTemplates/*}"
+ body: "*"
+ }
+ };
+ option (google.api.method_signature) = "name,deidentify_template,update_mask";
+ }
+
+ // Gets a DeidentifyTemplate.
+ // See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
+ // more.
+ rpc GetDeidentifyTemplate(GetDeidentifyTemplateRequest) returns (DeidentifyTemplate) {
+ option (google.api.http) = {
+ get: "/v2/{name=organizations/*/deidentifyTemplates/*}"
+ additional_bindings {
+ get: "/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}"
+ }
+ additional_bindings {
+ get: "/v2/{name=projects/*/deidentifyTemplates/*}"
+ }
+ additional_bindings {
+ get: "/v2/{name=projects/*/locations/*/deidentifyTemplates/*}"
+ }
+ };
+ option (google.api.method_signature) = "name";
+ }
+
+ // Lists DeidentifyTemplates.
+ // See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
+ // more.
+ rpc ListDeidentifyTemplates(ListDeidentifyTemplatesRequest) returns (ListDeidentifyTemplatesResponse) {
+ option (google.api.http) = {
+ get: "/v2/{parent=organizations/*}/deidentifyTemplates"
+ additional_bindings {
+ get: "/v2/{parent=organizations/*}/locations/{location_id}/deidentifyTemplates"
+ }
+ additional_bindings {
+ get: "/v2/{parent=projects/*}/deidentifyTemplates"
+ }
+ additional_bindings {
+ get: "/v2/{parent=projects/*}/locations/{location_id}/deidentifyTemplates"
+ }
+ };
+ option (google.api.method_signature) = "parent";
+ }
+
+ // Deletes a DeidentifyTemplate.
+ // See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
+ // more.
+ rpc DeleteDeidentifyTemplate(DeleteDeidentifyTemplateRequest) returns (google.protobuf.Empty) {
+ option (google.api.http) = {
+ delete: "/v2/{name=organizations/*/deidentifyTemplates/*}"
+ additional_bindings {
+ delete: "/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}"
+ }
+ additional_bindings {
+ delete: "/v2/{name=projects/*/deidentifyTemplates/*}"
+ }
+ additional_bindings {
+ delete: "/v2/{name=projects/*/locations/*/deidentifyTemplates/*}"
+ }
+ };
+ option (google.api.method_signature) = "name";
+ }
+
+ // Creates a job trigger to run DLP actions such as scanning storage for
+ // sensitive information on a set schedule.
+ // See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more.
+ rpc CreateJobTrigger(CreateJobTriggerRequest) returns (JobTrigger) {
+ option (google.api.http) = {
+ post: "/v2/{parent=projects/*}/jobTriggers"
+ body: "*"
+ additional_bindings {
+ post: "/v2/{parent=projects/*}/locations/{location_id}/jobTriggers"
+ body: "*"
+ }
+ };
+ option (google.api.method_signature) = "parent,job_trigger";
+ option (google.api.method_signature) = "parent,job_trigger,location_id";
+ }
+
+ // Updates a job trigger.
+ // See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more.
+ rpc UpdateJobTrigger(UpdateJobTriggerRequest) returns (JobTrigger) {
+ option (google.api.http) = {
+ patch: "/v2/{name=projects/*/jobTriggers/*}"
+ body: "*"
+ additional_bindings {
+ patch: "/v2/{name=projects/*/locations/*/jobTriggers/*}"
+ body: "*"
+ }
+ };
+ option (google.api.method_signature) = "name,job_trigger,update_mask";
+ }
+
+ // Gets a job trigger.
+ // See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more.
+ rpc GetJobTrigger(GetJobTriggerRequest) returns (JobTrigger) {
+ option (google.api.http) = {
+ get: "/v2/{name=projects/*/jobTriggers/*}"
+ additional_bindings {
+ get: "/v2/{name=projects/*/locations/*/jobTriggers/*}"
+ }
+ };
+ option (google.api.method_signature) = "name";
+ }
+
+ // Lists job triggers.
+ // See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more.
+ rpc ListJobTriggers(ListJobTriggersRequest) returns (ListJobTriggersResponse) {
+ option (google.api.http) = {
+ get: "/v2/{parent=projects/*}/jobTriggers"
+ additional_bindings {
+ get: "/v2/{parent=projects/*}/locations/{location_id}/jobTriggers"
+ }
+ };
+ option (google.api.method_signature) = "parent";
+ }
+
+ // Deletes a job trigger.
+ // See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more.
+ rpc DeleteJobTrigger(DeleteJobTriggerRequest) returns (google.protobuf.Empty) {
+ option (google.api.http) = {
+ delete: "/v2/{name=projects/*/jobTriggers/*}"
+ additional_bindings {
+ delete: "/v2/{name=projects/*/locations/*/jobTriggers/*}"
+ }
+ };
+ option (google.api.method_signature) = "name";
+ }
+
+ // Activate a job trigger. Causes the immediate execute of a trigger
+ // instead of waiting on the trigger event to occur.
+ rpc ActivateJobTrigger(ActivateJobTriggerRequest) returns (DlpJob) {
+ option (google.api.http) = {
+ post: "/v2/{name=projects/*/jobTriggers/*}:activate"
+ body: "*"
+ additional_bindings {
+ post: "/v2/{name=projects/*/locations/*/jobTriggers/*}:activate"
+ body: "*"
+ }
+ };
+ }
+
+ // Creates a new job to inspect storage or calculate risk metrics.
+ // See https://cloud.google.com/dlp/docs/inspecting-storage and
+ // https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more.
+ //
+ // When no InfoTypes or CustomInfoTypes are specified in inspect jobs, the
+ // system will automatically choose what detectors to run. By default this may
+ // be all types, but may change over time as detectors are updated.
+ rpc CreateDlpJob(CreateDlpJobRequest) returns (DlpJob) {
+ option (google.api.http) = {
+ post: "/v2/{parent=projects/*}/dlpJobs"
+ body: "*"
+ additional_bindings {
+ post: "/v2/{parent=projects/*}/locations/{location_id}/dlpJobs"
+ body: "*"
+ }
+ };
+ option (google.api.method_signature) = "parent,inspect_job";
+ option (google.api.method_signature) = "parent,inspect_job,location_id";
+ option (google.api.method_signature) = "parent,risk_job";
+ option (google.api.method_signature) = "parent,risk_job,location_id";
+ }
+
+ // Lists DlpJobs that match the specified filter in the request.
+ // See https://cloud.google.com/dlp/docs/inspecting-storage and
+ // https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more.
+ rpc ListDlpJobs(ListDlpJobsRequest) returns (ListDlpJobsResponse) {
+ option (google.api.http) = {
+ get: "/v2/{parent=projects/*}/dlpJobs"
+ additional_bindings {
+ get: "/v2/{parent=projects/*}/locations/{location_id}/dlpJobs"
+ }
+ };
+ option (google.api.method_signature) = "parent";
+ }
+
+ // Gets the latest state of a long-running DlpJob.
+ // See https://cloud.google.com/dlp/docs/inspecting-storage and
+ // https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more.
+ rpc GetDlpJob(GetDlpJobRequest) returns (DlpJob) {
+ option (google.api.http) = {
+ get: "/v2/{name=projects/*/dlpJobs/*}"
+ additional_bindings {
+ get: "/v2/{name=projects/*/locations/*/dlpJobs/*}"
+ }
+ };
+ option (google.api.method_signature) = "name";
+ }
+
+ // Deletes a long-running DlpJob. This method indicates that the client is
+ // no longer interested in the DlpJob result. The job will be cancelled if
+ // possible.
+ // See https://cloud.google.com/dlp/docs/inspecting-storage and
+ // https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more.
+ rpc DeleteDlpJob(DeleteDlpJobRequest) returns (google.protobuf.Empty) {
+ option (google.api.http) = {
+ delete: "/v2/{name=projects/*/dlpJobs/*}"
+ additional_bindings {
+ delete: "/v2/{name=projects/*/locations/*/dlpJobs/*}"
+ }
+ };
+ option (google.api.method_signature) = "name";
+ }
+
+ // Starts asynchronous cancellation on a long-running DlpJob. The server
+ // makes a best effort to cancel the DlpJob, but success is not
+ // guaranteed.
+ // See https://cloud.google.com/dlp/docs/inspecting-storage and
+ // https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more.
+ rpc CancelDlpJob(CancelDlpJobRequest) returns (google.protobuf.Empty) {
+ option (google.api.http) = {
+ post: "/v2/{name=projects/*/dlpJobs/*}:cancel"
+ body: "*"
+ additional_bindings {
+ post: "/v2/{name=projects/*/locations/*/dlpJobs/*}:cancel"
+ body: "*"
+ }
+ };
+ }
+
+ // Creates a pre-built stored infoType to be used for inspection.
+ // See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
+ // learn more.
+ rpc CreateStoredInfoType(CreateStoredInfoTypeRequest) returns (StoredInfoType) {
+ option (google.api.http) = {
+ post: "/v2/{parent=organizations/*}/storedInfoTypes"
+ body: "*"
+ additional_bindings {
+ post: "/v2/{parent=organizations/*}/locations/{location_id}/storedInfoTypes"
+ body: "*"
+ }
+ additional_bindings {
+ post: "/v2/{parent=projects/*}/storedInfoTypes"
+ body: "*"
+ }
+ additional_bindings {
+ post: "/v2/{parent=projects/*}/locations/{location_id}/storedInfoTypes"
+ body: "*"
+ }
+ };
+ option (google.api.method_signature) = "parent,config";
+ option (google.api.method_signature) = "parent,config,location_id";
+ }
+
+ // Updates the stored infoType by creating a new version. The existing version
+ // will continue to be used until the new version is ready.
+ // See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
+ // learn more.
+ rpc UpdateStoredInfoType(UpdateStoredInfoTypeRequest) returns (StoredInfoType) {
+ option (google.api.http) = {
+ patch: "/v2/{name=organizations/*/storedInfoTypes/*}"
+ body: "*"
+ additional_bindings {
+ patch: "/v2/{name=organizations/*/locations/*/storedInfoTypes/*}"
+ body: "*"
+ }
+ additional_bindings {
+ patch: "/v2/{name=projects/*/storedInfoTypes/*}"
+ body: "*"
+ }
+ additional_bindings {
+ patch: "/v2/{name=projects/*/locations/*/storedInfoTypes/*}"
+ body: "*"
+ }
+ };
+ option (google.api.method_signature) = "name,config,update_mask";
+ }
+
+ // Gets a stored infoType.
+ // See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
+ // learn more.
+ rpc GetStoredInfoType(GetStoredInfoTypeRequest) returns (StoredInfoType) {
+ option (google.api.http) = {
+ get: "/v2/{name=organizations/*/storedInfoTypes/*}"
+ additional_bindings {
+ get: "/v2/{name=organizations/*/locations/*/storedInfoTypes/*}"
+ }
+ additional_bindings {
+ get: "/v2/{name=projects/*/storedInfoTypes/*}"
+ }
+ additional_bindings {
+ get: "/v2/{name=projects/*/locations/*/storedInfoTypes/*}"
+ }
+ };
+ option (google.api.method_signature) = "name";
+ }
+
+ // Lists stored infoTypes.
+ // See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
+ // learn more.
+ rpc ListStoredInfoTypes(ListStoredInfoTypesRequest) returns (ListStoredInfoTypesResponse) {
+ option (google.api.http) = {
+ get: "/v2/{parent=organizations/*}/storedInfoTypes"
+ additional_bindings {
+ get: "/v2/{parent=organizations/*}/locations/{location_id}/storedInfoTypes"
+ }
+ additional_bindings {
+ get: "/v2/{parent=projects/*}/storedInfoTypes"
+ }
+ additional_bindings {
+ get: "/v2/{parent=projects/*}/locations/{location_id}/storedInfoTypes"
+ }
+ };
+ option (google.api.method_signature) = "parent";
+ }
+
+ // Deletes a stored infoType.
+ // See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
+ // learn more.
+ rpc DeleteStoredInfoType(DeleteStoredInfoTypeRequest) returns (google.protobuf.Empty) {
+ option (google.api.http) = {
+ delete: "/v2/{name=organizations/*/storedInfoTypes/*}"
+ additional_bindings {
+ delete: "/v2/{name=organizations/*/locations/*/storedInfoTypes/*}"
+ }
+ additional_bindings {
+ delete: "/v2/{name=projects/*/storedInfoTypes/*}"
+ }
+ additional_bindings {
+ delete: "/v2/{name=projects/*/locations/*/storedInfoTypes/*}"
+ }
+ };
+ option (google.api.method_signature) = "name";
+ }
+}
+
+// List of exclude infoTypes.
+message ExcludeInfoTypes {
+ // InfoType list in ExclusionRule rule drops a finding when it overlaps or
+ // contained within with a finding of an infoType from this list. For
+ // example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and
+ // `exclusion_rule` containing `exclude_info_types.info_types` with
+ // "EMAIL_ADDRESS" the phone number findings are dropped if they overlap
+ // with EMAIL_ADDRESS finding.
+ // That leads to "555-222-2222@example.org" to generate only a single
+ // finding, namely email address.
+ repeated InfoType info_types = 1;
+}
+
+// The rule that specifies conditions when findings of infoTypes specified in
+// `InspectionRuleSet` are removed from results.
+message ExclusionRule {
+ // Exclusion rule types.
+ oneof type {
+ // Dictionary which defines the rule.
+ CustomInfoType.Dictionary dictionary = 1;
+
+ // Regular expression which defines the rule.
+ CustomInfoType.Regex regex = 2;
+
+ // Set of infoTypes for which findings would affect this rule.
+ ExcludeInfoTypes exclude_info_types = 3;
+ }
+
+ // How the rule is applied, see MatchingType documentation for details.
+ MatchingType matching_type = 4;
+}
+
+// A single inspection rule to be applied to infoTypes, specified in
+// `InspectionRuleSet`.
+message InspectionRule {
+ // Inspection rule types.
+ oneof type {
+ // Hotword-based detection rule.
+ CustomInfoType.DetectionRule.HotwordRule hotword_rule = 1;
+
+ // Exclusion rule.
+ ExclusionRule exclusion_rule = 2;
+ }
+}
+
+// Rule set for modifying a set of infoTypes to alter behavior under certain
+// circumstances, depending on the specific details of the rules within the set.
+message InspectionRuleSet {
+ // List of infoTypes this rule set is applied to.
+ repeated InfoType info_types = 1;
+
+ // Set of rules to be applied to infoTypes. The rules are applied in order.
+ repeated InspectionRule rules = 2;
+}
+
+// Configuration description of the scanning process.
+// When used with redactContent only info_types and min_likelihood are currently
+// used.
+message InspectConfig {
+ // Configuration to control the number of findings returned.
+ message FindingLimits {
+ // Max findings configuration per infoType, per content item or long
+ // running DlpJob.
+ message InfoTypeLimit {
+ // Type of information the findings limit applies to. Only one limit per
+ // info_type should be provided. If InfoTypeLimit does not have an
+ // info_type, the DLP API applies the limit against all info_types that
+ // are found but not specified in another InfoTypeLimit.
+ InfoType info_type = 1;
+
+ // Max findings limit for the given infoType.
+ int32 max_findings = 2;
+ }
+
+ // Max number of findings that will be returned for each item scanned.
+ // When set within `InspectDataSourceRequest`,
+ // the maximum returned is 2000 regardless if this is set higher.
+ // When set within `InspectContentRequest`, this field is ignored.
+ int32 max_findings_per_item = 1;
+
+ // Max number of findings that will be returned per request/job.
+ // When set within `InspectContentRequest`, the maximum returned is 2000
+ // regardless if this is set higher.
+ int32 max_findings_per_request = 2;
+
+ // Configuration of findings limit given for specified infoTypes.
+ repeated InfoTypeLimit max_findings_per_info_type = 3;
+ }
+
+ // Restricts what info_types to look for. The values must correspond to
+ // InfoType values returned by ListInfoTypes or listed at
+ // https://cloud.google.com/dlp/docs/infotypes-reference.
+ //
+ // When no InfoTypes or CustomInfoTypes are specified in a request, the
+ // system may automatically choose what detectors to run. By default this may
+ // be all types, but may change over time as detectors are updated.
+ //
+ // If you need precise control and predictability as to what detectors are
+ // run you should specify specific InfoTypes listed in the reference,
+ // otherwise a default list will be used, which may change over time.
+ repeated InfoType info_types = 1;
+
+ // Only returns findings equal or above this threshold. The default is
+ // POSSIBLE.
+ // See https://cloud.google.com/dlp/docs/likelihood to learn more.
+ Likelihood min_likelihood = 2;
+
+ // Configuration to control the number of findings returned.
+ FindingLimits limits = 3;
+
+ // When true, a contextual quote from the data that triggered a finding is
+ // included in the response; see Finding.quote.
+ bool include_quote = 4;
+
+ // When true, excludes type information of the findings.
+ bool exclude_info_types = 5;
+
+ // CustomInfoTypes provided by the user. See
+ // https://cloud.google.com/dlp/docs/creating-custom-infotypes to learn more.
+ repeated CustomInfoType custom_info_types = 6;
+
+ // List of options defining data content to scan.
+ // If empty, text, images, and other content will be included.
+ repeated ContentOption content_options = 8;
+
+ // Set of rules to apply to the findings for this InspectConfig.
+ // Exclusion rules, contained in the set are executed in the end, other
+ // rules are executed in the order they are specified for each info type.
+ repeated InspectionRuleSet rule_set = 10;
+}
+
+// Container for bytes to inspect or redact.
+message ByteContentItem {
+ // The type of data being sent to in data.
+ enum BytesType {
+ // Unused
+ BYTES_TYPE_UNSPECIFIED = 0;
+
+ // Any image type.
+ IMAGE = 6;
+
+ // jpeg
+ IMAGE_JPEG = 1;
+
+ // bmp
+ IMAGE_BMP = 2;
+
+ // png
+ IMAGE_PNG = 3;
+
+ // svg
+ IMAGE_SVG = 4;
+
+ // plain text
+ TEXT_UTF8 = 5;
+
+ // avro
+ AVRO = 11;
+ }
+
+ // The type of data stored in the bytes string. Default will be TEXT_UTF8.
+ BytesType type = 1;
+
+ // Content data to inspect or redact.
+ bytes data = 2;
+}
+
+// Container structure for the content to inspect.
+message ContentItem {
+ // Data of the item either in the byte array or UTF-8 string form, or table.
+ oneof data_item {
+ // String data to inspect or redact.
+ string value = 3;
+
+ // Structured content for inspection. See
+ // https://cloud.google.com/dlp/docs/inspecting-text#inspecting_a_table to
+ // learn more.
+ Table table = 4;
+
+ // Content data to inspect or redact. Replaces `type` and `data`.
+ ByteContentItem byte_item = 5;
+ }
+}
+
+// Structured content to inspect. Up to 50,000 `Value`s per request allowed.
+// See https://cloud.google.com/dlp/docs/inspecting-text#inspecting_a_table to
+// learn more.
+message Table {
+ // Values of the row.
+ message Row {
+ // Individual cells.
+ repeated Value values = 1;
+ }
+
+ // Headers of the table.
+ repeated FieldId headers = 1;
+
+ // Rows of the table.
+ repeated Row rows = 2;
+}
+
+// All the findings for a single scanned item.
+message InspectResult {
+ // List of findings for an item.
+ repeated Finding findings = 1;
+
+ // If true, then this item might have more findings than were returned,
+ // and the findings returned are an arbitrary subset of all findings.
+ // The findings list might be truncated because the input items were too
+ // large, or because the server reached the maximum amount of resources
+ // allowed for a single API call. For best results, divide the input into
+ // smaller batches.
+ bool findings_truncated = 2;
+}
+
+// Represents a piece of potentially sensitive content.
+message Finding {
+ // The content that was found. Even if the content is not textual, it
+ // may be converted to a textual representation here.
+ // Provided if `include_quote` is true and the finding is
+ // less than or equal to 4096 bytes long. If the finding exceeds 4096 bytes
+ // in length, the quote may be omitted.
+ string quote = 1;
+
+ // The type of content that might have been found.
+ // Provided if `excluded_types` is false.
+ InfoType info_type = 2;
+
+ // Confidence of how likely it is that the `info_type` is correct.
+ Likelihood likelihood = 3;
+
+ // Where the content was found.
+ Location location = 4;
+
+ // Timestamp when finding was detected.
+ google.protobuf.Timestamp create_time = 6;
+
+ // Contains data parsed from quotes. Only populated if include_quote was set
+ // to true and a supported infoType was requested. Currently supported
+ // infoTypes: DATE, DATE_OF_BIRTH and TIME.
+ QuoteInfo quote_info = 7;
+}
+
+// Specifies the location of the finding.
+message Location {
+ // Zero-based byte offsets delimiting the finding.
+ // These are relative to the finding's containing element.
+ // Note that when the content is not textual, this references
+ // the UTF-8 encoded textual representation of the content.
+ // Omitted if content is an image.
+ Range byte_range = 1;
+
+ // Unicode character offsets delimiting the finding.
+ // These are relative to the finding's containing element.
+ // Provided when the content is text.
+ Range codepoint_range = 2;
+
+ // List of nested objects pointing to the precise location of the finding
+ // within the file or record.
+ repeated ContentLocation content_locations = 7;
+}
+
+// Findings container location data.
+message ContentLocation {
+ // Name of the container where the finding is located.
+ // The top level name is the source file name or table name. Names of some
+ // common storage containers are formatted as follows:
+ //
+ // * BigQuery tables: `{project_id}:{dataset_id}.{table_id}`
+ // * Cloud Storage files: `gs://{bucket}/{path}`
+ // * Datastore namespace: {namespace}
+ //
+ // Nested names could be absent if the embedded object has no string
+ // identifier (for an example an image contained within a document).
+ string container_name = 1;
+
+ // Type of the container within the file with location of the finding.
+ oneof location {
+ // Location within a row or record of a database table.
+ RecordLocation record_location = 2;
+
+ // Location within an image's pixels.
+ ImageLocation image_location = 3;
+
+ // Location data for document files.
+ DocumentLocation document_location = 5;
+ }
+
+ // Findings container modification timestamp, if applicable.
+ // For Google Cloud Storage contains last file modification timestamp.
+ // For BigQuery table contains last_modified_time property.
+ // For Datastore - not populated.
+ google.protobuf.Timestamp container_timestamp = 6;
+
+ // Findings container version, if available
+ // ("generation" for Google Cloud Storage).
+ string container_version = 7;
+}
+
+// Location of a finding within a document.
+message DocumentLocation {
+ // Offset of the line, from the beginning of the file, where the finding
+ // is located.
+ int64 file_offset = 1;
+}
+
+// Location of a finding within a row or record.
+message RecordLocation {
+ // Key of the finding.
+ RecordKey record_key = 1;
+
+ // Field id of the field containing the finding.
+ FieldId field_id = 2;
+
+ // Location within a `ContentItem.Table`.
+ TableLocation table_location = 3;
+}
+
+// Location of a finding within a table.
+message TableLocation {
+ // The zero-based index of the row where the finding is located.
+ int64 row_index = 1;
+}
+
+// Generic half-open interval [start, end)
+message Range {
+ // Index of the first character of the range (inclusive).
+ int64 start = 1;
+
+ // Index of the last character of the range (exclusive).
+ int64 end = 2;
+}
+
+// Location of the finding within an image.
+message ImageLocation {
+ // Bounding boxes locating the pixels within the image containing the finding.
+ repeated BoundingBox bounding_boxes = 1;
+}
+
+// Bounding box encompassing detected text within an image.
+message BoundingBox {
+ // Top coordinate of the bounding box. (0,0) is upper left.
+ int32 top = 1;
+
+ // Left coordinate of the bounding box. (0,0) is upper left.
+ int32 left = 2;
+
+ // Width of the bounding box in pixels.
+ int32 width = 3;
+
+ // Height of the bounding box in pixels.
+ int32 height = 4;
+}
+
+// Request to search for potentially sensitive info in an image and redact it
+// by covering it with a colored rectangle.
+message RedactImageRequest {
+ // Configuration for determining how redaction of images should occur.
+ message ImageRedactionConfig {
+ // Type of information to redact from images.
+ oneof target {
+ // Only one per info_type should be provided per request. If not
+ // specified, and redact_all_text is false, the DLP API will redact all
+ // text that it matches against all info_types that are found, but not
+ // specified in another ImageRedactionConfig.
+ InfoType info_type = 1;
+
+ // If true, all text found in the image, regardless whether it matches an
+ // info_type, is redacted. Only one should be provided.
+ bool redact_all_text = 2;
+ }
+
+ // The color to use when redacting content from an image. If not specified,
+ // the default is black.
+ Color redaction_color = 3;
+ }
+
+ // The parent resource name, for example projects/my-project-id.
+ string parent = 1 [(google.api.resource_reference) = {
+ type: "cloudresourcemanager.googleapis.com/Project"
+ }];
+
+ // The geographic location to process the request. Reserved for future
+ // extensions.
+ string location_id = 8;
+
+ // Configuration for the inspector.
+ InspectConfig inspect_config = 2;
+
+ // The configuration for specifying what content to redact from images.
+ repeated ImageRedactionConfig image_redaction_configs = 5;
+
+ // Whether the response should include findings along with the redacted
+ // image.
+ bool include_findings = 6;
+
+ // The content must be PNG, JPEG, SVG or BMP.
+ ByteContentItem byte_item = 7;
+}
+
+// Represents a color in the RGB color space.
+message Color {
+ // The amount of red in the color as a value in the interval [0, 1].
+ float red = 1;
+
+ // The amount of green in the color as a value in the interval [0, 1].
+ float green = 2;
+
+ // The amount of blue in the color as a value in the interval [0, 1].
+ float blue = 3;
+}
+
+// Results of redacting an image.
+message RedactImageResponse {
+ // The redacted image. The type will be the same as the original image.
+ bytes redacted_image = 1;
+
+ // If an image was being inspected and the InspectConfig's include_quote was
+ // set to true, then this field will include all text, if any, that was found
+ // in the image.
+ string extracted_text = 2;
+
+ // The findings. Populated when include_findings in the request is true.
+ InspectResult inspect_result = 3;
+}
+
+// Request to de-identify a list of items.
+message DeidentifyContentRequest {
+ // The parent resource name, for example projects/my-project-id.
+ string parent = 1 [(google.api.resource_reference) = {
+ type: "cloudresourcemanager.googleapis.com/Project"
+ }];
+
+ // Configuration for the de-identification of the content item.
+ // Items specified here will override the template referenced by the
+ // deidentify_template_name argument.
+ DeidentifyConfig deidentify_config = 2;
+
+ // Configuration for the inspector.
+ // Items specified here will override the template referenced by the
+ // inspect_template_name argument.
+ InspectConfig inspect_config = 3;
+
+ // The item to de-identify. Will be treated as text.
+ ContentItem item = 4;
+
+ // Template to use. Any configuration directly specified in
+ // inspect_config will override those set in the template. Singular fields
+ // that are set in this request will replace their corresponding fields in the
+ // template. Repeated fields are appended. Singular sub-messages and groups
+ // are recursively merged.
+ string inspect_template_name = 5;
+
+ // Template to use. Any configuration directly specified in
+ // deidentify_config will override those set in the template. Singular fields
+ // that are set in this request will replace their corresponding fields in the
+ // template. Repeated fields are appended. Singular sub-messages and groups
+ // are recursively merged.
+ string deidentify_template_name = 6;
+
+ // The geographic location to process de-identification. Reserved for future
+ // extensions.
+ string location_id = 7;
+}
+
+// Results of de-identifying a ContentItem.
+message DeidentifyContentResponse {
+ // The de-identified item.
+ ContentItem item = 1;
+
+ // An overview of the changes that were made on the `item`.
+ TransformationOverview overview = 2;
+}
+
+// Request to re-identify an item.
+message ReidentifyContentRequest {
+ // Required. The parent resource name.
+ string parent = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "cloudresourcemanager.googleapis.com/Project"
+ }
+ ];
+
+ // Configuration for the re-identification of the content item.
+ // This field shares the same proto message type that is used for
+ // de-identification, however its usage here is for the reversal of the
+ // previous de-identification. Re-identification is performed by examining
+ // the transformations used to de-identify the items and executing the
+ // reverse. This requires that only reversible transformations
+ // be provided here. The reversible transformations are:
+ //
+ // - `CryptoDeterministicConfig`
+ // - `CryptoReplaceFfxFpeConfig`
+ DeidentifyConfig reidentify_config = 2;
+
+ // Configuration for the inspector.
+ InspectConfig inspect_config = 3;
+
+ // The item to re-identify. Will be treated as text.
+ ContentItem item = 4;
+
+ // Template to use. Any configuration directly specified in
+ // `inspect_config` will override those set in the template. Singular fields
+ // that are set in this request will replace their corresponding fields in the
+ // template. Repeated fields are appended. Singular sub-messages and groups
+ // are recursively merged.
+ string inspect_template_name = 5;
+
+ // Template to use. References an instance of `DeidentifyTemplate`.
+ // Any configuration directly specified in `reidentify_config` or
+ // `inspect_config` will override those set in the template. Singular fields
+ // that are set in this request will replace their corresponding fields in the
+ // template. Repeated fields are appended. Singular sub-messages and groups
+ // are recursively merged.
+ string reidentify_template_name = 6;
+
+ // The geographic location to process content reidentification. Reserved for
+ // future extensions.
+ string location_id = 7;
+}
+
+// Results of re-identifying a item.
+message ReidentifyContentResponse {
+ // The re-identified item.
+ ContentItem item = 1;
+
+ // An overview of the changes that were made to the `item`.
+ TransformationOverview overview = 2;
+}
+
+// Request to search for potentially sensitive info in a ContentItem.
+message InspectContentRequest {
+ // The parent resource name, for example projects/my-project-id.
+ string parent = 1 [(google.api.resource_reference) = {
+ type: "cloudresourcemanager.googleapis.com/Project"
+ }];
+
+ // Configuration for the inspector. What specified here will override
+ // the template referenced by the inspect_template_name argument.
+ InspectConfig inspect_config = 2;
+
+ // The item to inspect.
+ ContentItem item = 3;
+
+ // Template to use. Any configuration directly specified in
+ // inspect_config will override those set in the template. Singular fields
+ // that are set in this request will replace their corresponding fields in the
+ // template. Repeated fields are appended. Singular sub-messages and groups
+ // are recursively merged.
+ string inspect_template_name = 4;
+
+ // The geographic location to process content inspection. Reserved for future
+ // extensions.
+ string location_id = 5;
+}
+
+// Results of inspecting an item.
+message InspectContentResponse {
+ // The findings.
+ InspectResult result = 1;
+}
+
+// Cloud repository for storing output.
+message OutputStorageConfig {
+ // Predefined schemas for storing findings.
+ enum OutputSchema {
+ // Unused.
+ OUTPUT_SCHEMA_UNSPECIFIED = 0;
+
+ // Basic schema including only `info_type`, `quote`, `certainty`, and
+ // `timestamp`.
+ BASIC_COLUMNS = 1;
+
+ // Schema tailored to findings from scanning Google Cloud Storage.
+ GCS_COLUMNS = 2;
+
+ // Schema tailored to findings from scanning Google Datastore.
+ DATASTORE_COLUMNS = 3;
+
+ // Schema tailored to findings from scanning Google BigQuery.
+ BIG_QUERY_COLUMNS = 4;
+
+ // Schema containing all columns.
+ ALL_COLUMNS = 5;
+ }
+
+ // Output storage types.
+ oneof type {
+ // Store findings in an existing table or a new table in an existing
+ // dataset. If table_id is not set a new one will be generated
+ // for you with the following format:
+ // dlp_googleapis_yyyy_mm_dd_[dlp_job_id]. Pacific timezone will be used for
+ // generating the date details.
+ //
+ // For Inspect, each column in an existing output table must have the same
+ // name, type, and mode of a field in the `Finding` object.
+ //
+ // For Risk, an existing output table should be the output of a previous
+ // Risk analysis job run on the same source table, with the same privacy
+ // metric and quasi-identifiers. Risk jobs that analyze the same table but
+ // compute a different privacy metric, or use different sets of
+ // quasi-identifiers, cannot store their results in the same table.
+ BigQueryTable table = 1;
+ }
+
+ // Schema used for writing the findings for Inspect jobs. This field is only
+ // used for Inspect and must be unspecified for Risk jobs. Columns are derived
+ // from the `Finding` object. If appending to an existing table, any columns
+ // from the predefined schema that are missing will be added. No columns in
+ // the existing table will be deleted.
+ //
+ // If unspecified, then all available columns will be used for a new table or
+ // an (existing) table with no schema, and no changes will be made to an
+ // existing table that has a schema.
+ OutputSchema output_schema = 3;
+}
+
+// Statistics regarding a specific InfoType.
+message InfoTypeStats {
+ // The type of finding this stat is for.
+ InfoType info_type = 1;
+
+ // Number of findings for this infoType.
+ int64 count = 2;
+}
+
+// The results of an inspect DataSource job.
+message InspectDataSourceDetails {
+ // Snapshot of the inspection configuration.
+ message RequestedOptions {
+ // If run with an InspectTemplate, a snapshot of its state at the time of
+ // this run.
+ InspectTemplate snapshot_inspect_template = 1;
+
+ // Inspect config.
+ InspectJobConfig job_config = 3;
+ }
+
+ // All result fields mentioned below are updated while the job is processing.
+ message Result {
+ // Total size in bytes that were processed.
+ int64 processed_bytes = 1;
+
+ // Estimate of the number of bytes to process.
+ int64 total_estimated_bytes = 2;
+
+ // Statistics of how many instances of each info type were found during
+ // inspect job.
+ repeated InfoTypeStats info_type_stats = 3;
+ }
+
+ // The configuration used for this job.
+ RequestedOptions requested_options = 2;
+
+ // A summary of the outcome of this inspect job.
+ Result result = 3;
+}
+
+// InfoType description.
+message InfoTypeDescription {
+ // Internal name of the infoType.
+ string name = 1;
+
+ // Human readable form of the infoType name.
+ string display_name = 2;
+
+ // Which parts of the API supports this InfoType.
+ repeated InfoTypeSupportedBy supported_by = 3;
+
+ // Description of the infotype. Translated when language is provided in the
+ // request.
+ string description = 4;
+}
+
+// Request for the list of infoTypes.
+message ListInfoTypesRequest {
+ // BCP-47 language code for localized infoType friendly
+ // names. If omitted, or if localized strings are not available,
+ // en-US strings will be returned.
+ string language_code = 1;
+
+ // filter to only return infoTypes supported by certain parts of the
+ // API. Defaults to supported_by=INSPECT.
+ string filter = 2;
+
+ // The geographic location to list info types. Reserved for future
+ // extensions.
+ string location_id = 3;
+}
+
+// Response to the ListInfoTypes request.
+message ListInfoTypesResponse {
+ // Set of sensitive infoTypes.
+ repeated InfoTypeDescription info_types = 1;
+}
+
+// Configuration for a risk analysis job. See
+// https://cloud.google.com/dlp/docs/concepts-risk-analysis to learn more.
+message RiskAnalysisJobConfig {
+ // Privacy metric to compute.
+ PrivacyMetric privacy_metric = 1;
+
+ // Input dataset to compute metrics over.
+ BigQueryTable source_table = 2;
+
+ // Actions to execute at the completion of the job. Are executed in the order
+ // provided.
+ repeated Action actions = 3;
+}
+
+// A column with a semantic tag attached.
+message QuasiId {
+ // Required. Identifies the column.
+ FieldId field = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // Semantic tag that identifies what a column contains, to determine which
+ // statistical model to use to estimate the reidentifiability of each
+ // value. [required]
+ oneof tag {
+ // A column can be tagged with a InfoType to use the relevant public
+ // dataset as a statistical model of population, if available. We
+ // currently support US ZIP codes, region codes, ages and genders.
+ // To programmatically obtain the list of supported InfoTypes, use
+ // ListInfoTypes with the supported_by=RISK_ANALYSIS filter.
+ InfoType info_type = 2;
+
+ // A column can be tagged with a custom tag. In this case, the user must
+ // indicate an auxiliary table that contains statistical information on
+ // the possible values of this column (below).
+ string custom_tag = 3;
+
+ // If no semantic tag is indicated, we infer the statistical model from
+ // the distribution of values in the input data
+ google.protobuf.Empty inferred = 4;
+ }
+}
+
+// An auxiliary table containing statistical information on the relative
+// frequency of different quasi-identifiers values. It has one or several
+// quasi-identifiers columns, and one column that indicates the relative
+// frequency of each quasi-identifier tuple.
+// If a tuple is present in the data but not in the auxiliary table, the
+// corresponding relative frequency is assumed to be zero (and thus, the
+// tuple is highly reidentifiable).
+message StatisticalTable {
+ // A quasi-identifier column has a custom_tag, used to know which column
+ // in the data corresponds to which column in the statistical model.
+ message QuasiIdentifierField {
+ // Identifies the column.
+ FieldId field = 1;
+
+ // A column can be tagged with a custom tag. In this case, the user must
+ // indicate an auxiliary table that contains statistical information on
+ // the possible values of this column (below).
+ string custom_tag = 2;
+ }
+
+ // Required. Auxiliary table location.
+ BigQueryTable table = 3 [(google.api.field_behavior) = REQUIRED];
+
+ // Required. Quasi-identifier columns.
+ repeated QuasiIdentifierField quasi_ids = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // Required. The relative frequency column must contain a floating-point number
+ // between 0 and 1 (inclusive). Null values are assumed to be zero.
+ FieldId relative_frequency = 2 [(google.api.field_behavior) = REQUIRED];
+}
+
+// Privacy metric to compute for reidentification risk analysis.
+message PrivacyMetric {
+ // Compute numerical stats over an individual column, including
+ // min, max, and quantiles.
+ message NumericalStatsConfig {
+ // Field to compute numerical stats on. Supported types are
+ // integer, float, date, datetime, timestamp, time.
+ FieldId field = 1;
+ }
+
+ // Compute numerical stats over an individual column, including
+ // number of distinct values and value count distribution.
+ message CategoricalStatsConfig {
+ // Field to compute categorical stats on. All column types are
+ // supported except for arrays and structs. However, it may be more
+ // informative to use NumericalStats when the field type is supported,
+ // depending on the data.
+ FieldId field = 1;
+ }
+
+ // k-anonymity metric, used for analysis of reidentification risk.
+ message KAnonymityConfig {
+ // Set of fields to compute k-anonymity over. When multiple fields are
+ // specified, they are considered a single composite key. Structs and
+ // repeated data types are not supported; however, nested fields are
+ // supported so long as they are not structs themselves or nested within
+ // a repeated field.
+ repeated FieldId quasi_ids = 1;
+
+ // Message indicating that multiple rows might be associated to a
+ // single individual. If the same entity_id is associated to multiple
+ // quasi-identifier tuples over distinct rows, we consider the entire
+ // collection of tuples as the composite quasi-identifier. This collection
+ // is a multiset: the order in which the different tuples appear in the
+ // dataset is ignored, but their frequency is taken into account.
+ //
+ // Important note: a maximum of 1000 rows can be associated to a single
+ // entity ID. If more rows are associated with the same entity ID, some
+ // might be ignored.
+ EntityId entity_id = 2;
+ }
+
+ // l-diversity metric, used for analysis of reidentification risk.
+ message LDiversityConfig {
+ // Set of quasi-identifiers indicating how equivalence classes are
+ // defined for the l-diversity computation. When multiple fields are
+ // specified, they are considered a single composite key.
+ repeated FieldId quasi_ids = 1;
+
+ // Sensitive field for computing the l-value.
+ FieldId sensitive_attribute = 2;
+ }
+
+ // Reidentifiability metric. This corresponds to a risk model similar to what
+ // is called "journalist risk" in the literature, except the attack dataset is
+ // statistically modeled instead of being perfectly known. This can be done
+ // using publicly available data (like the US Census), or using a custom
+ // statistical model (indicated as one or several BigQuery tables), or by
+ // extrapolating from the distribution of values in the input dataset.
+ message KMapEstimationConfig {
+ // A column with a semantic tag attached.
+ message TaggedField {
+ // Required. Identifies the column.
+ FieldId field = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // Semantic tag that identifies what a column contains, to determine which
+ // statistical model to use to estimate the reidentifiability of each
+ // value. [required]
+ oneof tag {
+ // A column can be tagged with a InfoType to use the relevant public
+ // dataset as a statistical model of population, if available. We
+ // currently support US ZIP codes, region codes, ages and genders.
+ // To programmatically obtain the list of supported InfoTypes, use
+ // ListInfoTypes with the supported_by=RISK_ANALYSIS filter.
+ InfoType info_type = 2;
+
+ // A column can be tagged with a custom tag. In this case, the user must
+ // indicate an auxiliary table that contains statistical information on
+ // the possible values of this column (below).
+ string custom_tag = 3;
+
+ // If no semantic tag is indicated, we infer the statistical model from
+ // the distribution of values in the input data
+ google.protobuf.Empty inferred = 4;
+ }
+ }
+
+ // An auxiliary table contains statistical information on the relative
+ // frequency of different quasi-identifiers values. It has one or several
+ // quasi-identifiers columns, and one column that indicates the relative
+ // frequency of each quasi-identifier tuple.
+ // If a tuple is present in the data but not in the auxiliary table, the
+ // corresponding relative frequency is assumed to be zero (and thus, the
+ // tuple is highly reidentifiable).
+ message AuxiliaryTable {
+ // A quasi-identifier column has a custom_tag, used to know which column
+ // in the data corresponds to which column in the statistical model.
+ message QuasiIdField {
+ // Identifies the column.
+ FieldId field = 1;
+
+ // A auxiliary field.
+ string custom_tag = 2;
+ }
+
+ // Required. Auxiliary table location.
+ BigQueryTable table = 3 [(google.api.field_behavior) = REQUIRED];
+
+ // Required. Quasi-identifier columns.
+ repeated QuasiIdField quasi_ids = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // Required. The relative frequency column must contain a floating-point number
+ // between 0 and 1 (inclusive). Null values are assumed to be zero.
+ FieldId relative_frequency = 2 [(google.api.field_behavior) = REQUIRED];
+ }
+
+ // Required. Fields considered to be quasi-identifiers. No two columns can have the
+ // same tag.
+ repeated TaggedField quasi_ids = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // ISO 3166-1 alpha-2 region code to use in the statistical modeling.
+ // Set if no column is tagged with a region-specific InfoType (like
+ // US_ZIP_5) or a region code.
+ string region_code = 2;
+
+ // Several auxiliary tables can be used in the analysis. Each custom_tag
+ // used to tag a quasi-identifiers column must appear in exactly one column
+ // of one auxiliary table.
+ repeated AuxiliaryTable auxiliary_tables = 3;
+ }
+
+ // δ-presence metric, used to estimate how likely it is for an attacker to
+ // figure out that one given individual appears in a de-identified dataset.
+ // Similarly to the k-map metric, we cannot compute δ-presence exactly without
+ // knowing the attack dataset, so we use a statistical model instead.
+ message DeltaPresenceEstimationConfig {
+ // Required. Fields considered to be quasi-identifiers. No two fields can have the
+ // same tag.
+ repeated QuasiId quasi_ids = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // ISO 3166-1 alpha-2 region code to use in the statistical modeling.
+ // Set if no column is tagged with a region-specific InfoType (like
+ // US_ZIP_5) or a region code.
+ string region_code = 2;
+
+ // Several auxiliary tables can be used in the analysis. Each custom_tag
+ // used to tag a quasi-identifiers field must appear in exactly one
+ // field of one auxiliary table.
+ repeated StatisticalTable auxiliary_tables = 3;
+ }
+
+ // Types of analysis.
+ oneof type {
+ // Numerical stats
+ NumericalStatsConfig numerical_stats_config = 1;
+
+ // Categorical stats
+ CategoricalStatsConfig categorical_stats_config = 2;
+
+ // K-anonymity
+ KAnonymityConfig k_anonymity_config = 3;
+
+ // l-diversity
+ LDiversityConfig l_diversity_config = 4;
+
+ // k-map
+ KMapEstimationConfig k_map_estimation_config = 5;
+
+ // delta-presence
+ DeltaPresenceEstimationConfig delta_presence_estimation_config = 6;
+ }
+}
+
+// Result of a risk analysis operation request.
+message AnalyzeDataSourceRiskDetails {
+ // Result of the numerical stats computation.
+ message NumericalStatsResult {
+ // Minimum value appearing in the column.
+ Value min_value = 1;
+
+ // Maximum value appearing in the column.
+ Value max_value = 2;
+
+ // List of 99 values that partition the set of field values into 100 equal
+ // sized buckets.
+ repeated Value quantile_values = 4;
+ }
+
+ // Result of the categorical stats computation.
+ message CategoricalStatsResult {
+ // Histogram of value frequencies in the column.
+ message CategoricalStatsHistogramBucket {
+ // Lower bound on the value frequency of the values in this bucket.
+ int64 value_frequency_lower_bound = 1;
+
+ // Upper bound on the value frequency of the values in this bucket.
+ int64 value_frequency_upper_bound = 2;
+
+ // Total number of values in this bucket.
+ int64 bucket_size = 3;
+
+ // Sample of value frequencies in this bucket. The total number of
+ // values returned per bucket is capped at 20.
+ repeated ValueFrequency bucket_values = 4;
+
+ // Total number of distinct values in this bucket.
+ int64 bucket_value_count = 5;
+ }
+
+ // Histogram of value frequencies in the column.
+ repeated CategoricalStatsHistogramBucket value_frequency_histogram_buckets = 5;
+ }
+
+ // Result of the k-anonymity computation.
+ message KAnonymityResult {
+ // The set of columns' values that share the same ldiversity value
+ message KAnonymityEquivalenceClass {
+ // Set of values defining the equivalence class. One value per
+ // quasi-identifier column in the original KAnonymity metric message.
+ // The order is always the same as the original request.
+ repeated Value quasi_ids_values = 1;
+
+ // Size of the equivalence class, for example number of rows with the
+ // above set of values.
+ int64 equivalence_class_size = 2;
+ }
+
+ // Histogram of k-anonymity equivalence classes.
+ message KAnonymityHistogramBucket {
+ // Lower bound on the size of the equivalence classes in this bucket.
+ int64 equivalence_class_size_lower_bound = 1;
+
+ // Upper bound on the size of the equivalence classes in this bucket.
+ int64 equivalence_class_size_upper_bound = 2;
+
+ // Total number of equivalence classes in this bucket.
+ int64 bucket_size = 3;
+
+ // Sample of equivalence classes in this bucket. The total number of
+ // classes returned per bucket is capped at 20.
+ repeated KAnonymityEquivalenceClass bucket_values = 4;
+
+ // Total number of distinct equivalence classes in this bucket.
+ int64 bucket_value_count = 5;
+ }
+
+ // Histogram of k-anonymity equivalence classes.
+ repeated KAnonymityHistogramBucket equivalence_class_histogram_buckets = 5;
+ }
+
+ // Result of the l-diversity computation.
+ message LDiversityResult {
+ // The set of columns' values that share the same ldiversity value.
+ message LDiversityEquivalenceClass {
+ // Quasi-identifier values defining the k-anonymity equivalence
+ // class. The order is always the same as the original request.
+ repeated Value quasi_ids_values = 1;
+
+ // Size of the k-anonymity equivalence class.
+ int64 equivalence_class_size = 2;
+
+ // Number of distinct sensitive values in this equivalence class.
+ int64 num_distinct_sensitive_values = 3;
+
+ // Estimated frequencies of top sensitive values.
+ repeated ValueFrequency top_sensitive_values = 4;
+ }
+
+ // Histogram of l-diversity equivalence class sensitive value frequencies.
+ message LDiversityHistogramBucket {
+ // Lower bound on the sensitive value frequencies of the equivalence
+ // classes in this bucket.
+ int64 sensitive_value_frequency_lower_bound = 1;
+
+ // Upper bound on the sensitive value frequencies of the equivalence
+ // classes in this bucket.
+ int64 sensitive_value_frequency_upper_bound = 2;
+
+ // Total number of equivalence classes in this bucket.
+ int64 bucket_size = 3;
+
+ // Sample of equivalence classes in this bucket. The total number of
+ // classes returned per bucket is capped at 20.
+ repeated LDiversityEquivalenceClass bucket_values = 4;
+
+ // Total number of distinct equivalence classes in this bucket.
+ int64 bucket_value_count = 5;
+ }
+
+ // Histogram of l-diversity equivalence class sensitive value frequencies.
+ repeated LDiversityHistogramBucket sensitive_value_frequency_histogram_buckets = 5;
+ }
+
+ // Result of the reidentifiability analysis. Note that these results are an
+ // estimation, not exact values.
+ message KMapEstimationResult {
+ // A tuple of values for the quasi-identifier columns.
+ message KMapEstimationQuasiIdValues {
+ // The quasi-identifier values.
+ repeated Value quasi_ids_values = 1;
+
+ // The estimated anonymity for these quasi-identifier values.
+ int64 estimated_anonymity = 2;
+ }
+
+ // A KMapEstimationHistogramBucket message with the following values:
+ // min_anonymity: 3
+ // max_anonymity: 5
+ // frequency: 42
+ // means that there are 42 records whose quasi-identifier values correspond
+ // to 3, 4 or 5 people in the overlying population. An important particular
+ // case is when min_anonymity = max_anonymity = 1: the frequency field then
+ // corresponds to the number of uniquely identifiable records.
+ message KMapEstimationHistogramBucket {
+ // Always positive.
+ int64 min_anonymity = 1;
+
+ // Always greater than or equal to min_anonymity.
+ int64 max_anonymity = 2;
+
+ // Number of records within these anonymity bounds.
+ int64 bucket_size = 5;
+
+ // Sample of quasi-identifier tuple values in this bucket. The total
+ // number of classes returned per bucket is capped at 20.
+ repeated KMapEstimationQuasiIdValues bucket_values = 6;
+
+ // Total number of distinct quasi-identifier tuple values in this bucket.
+ int64 bucket_value_count = 7;
+ }
+
+ // The intervals [min_anonymity, max_anonymity] do not overlap. If a value
+ // doesn't correspond to any such interval, the associated frequency is
+ // zero. For example, the following records:
+ // {min_anonymity: 1, max_anonymity: 1, frequency: 17}
+ // {min_anonymity: 2, max_anonymity: 3, frequency: 42}
+ // {min_anonymity: 5, max_anonymity: 10, frequency: 99}
+ // mean that there are no record with an estimated anonymity of 4, 5, or
+ // larger than 10.
+ repeated KMapEstimationHistogramBucket k_map_estimation_histogram = 1;
+ }
+
+ // Result of the δ-presence computation. Note that these results are an
+ // estimation, not exact values.
+ message DeltaPresenceEstimationResult {
+ // A tuple of values for the quasi-identifier columns.
+ message DeltaPresenceEstimationQuasiIdValues {
+ // The quasi-identifier values.
+ repeated Value quasi_ids_values = 1;
+
+ // The estimated probability that a given individual sharing these
+ // quasi-identifier values is in the dataset. This value, typically called
+ // δ, is the ratio between the number of records in the dataset with these
+ // quasi-identifier values, and the total number of individuals (inside
+ // *and* outside the dataset) with these quasi-identifier values.
+ // For example, if there are 15 individuals in the dataset who share the
+ // same quasi-identifier values, and an estimated 100 people in the entire
+ // population with these values, then δ is 0.15.
+ double estimated_probability = 2;
+ }
+
+ // A DeltaPresenceEstimationHistogramBucket message with the following
+ // values:
+ // min_probability: 0.1
+ // max_probability: 0.2
+ // frequency: 42
+ // means that there are 42 records for which δ is in [0.1, 0.2). An
+ // important particular case is when min_probability = max_probability = 1:
+ // then, every individual who shares this quasi-identifier combination is in
+ // the dataset.
+ message DeltaPresenceEstimationHistogramBucket {
+ // Between 0 and 1.
+ double min_probability = 1;
+
+ // Always greater than or equal to min_probability.
+ double max_probability = 2;
+
+ // Number of records within these probability bounds.
+ int64 bucket_size = 5;
+
+ // Sample of quasi-identifier tuple values in this bucket. The total
+ // number of classes returned per bucket is capped at 20.
+ repeated DeltaPresenceEstimationQuasiIdValues bucket_values = 6;
+
+ // Total number of distinct quasi-identifier tuple values in this bucket.
+ int64 bucket_value_count = 7;
+ }
+
+ // The intervals [min_probability, max_probability) do not overlap. If a
+ // value doesn't correspond to any such interval, the associated frequency
+ // is zero. For example, the following records:
+ // {min_probability: 0, max_probability: 0.1, frequency: 17}
+ // {min_probability: 0.2, max_probability: 0.3, frequency: 42}
+ // {min_probability: 0.3, max_probability: 0.4, frequency: 99}
+ // mean that there are no record with an estimated probability in [0.1, 0.2)
+ // nor larger or equal to 0.4.
+ repeated DeltaPresenceEstimationHistogramBucket delta_presence_estimation_histogram = 1;
+ }
+
+ // Privacy metric to compute.
+ PrivacyMetric requested_privacy_metric = 1;
+
+ // Input dataset to compute metrics over.
+ BigQueryTable requested_source_table = 2;
+
+ // Values associated with this metric.
+ oneof result {
+ // Numerical stats result
+ NumericalStatsResult numerical_stats_result = 3;
+
+ // Categorical stats result
+ CategoricalStatsResult categorical_stats_result = 4;
+
+ // K-anonymity result
+ KAnonymityResult k_anonymity_result = 5;
+
+ // L-divesity result
+ LDiversityResult l_diversity_result = 6;
+
+ // K-map result
+ KMapEstimationResult k_map_estimation_result = 7;
+
+ // Delta-presence result
+ DeltaPresenceEstimationResult delta_presence_estimation_result = 9;
+ }
+}
+
+// A value of a field, including its frequency.
+message ValueFrequency {
+ // A value contained in the field in question.
+ Value value = 1;
+
+ // How many times the value is contained in the field.
+ int64 count = 2;
+}
+
+// Set of primitive values supported by the system.
+// Note that for the purposes of inspection or transformation, the number
+// of bytes considered to comprise a 'Value' is based on its representation
+// as a UTF-8 encoded string. For example, if 'integer_value' is set to
+// 123456789, the number of bytes would be counted as 9, even though an
+// int64 only holds up to 8 bytes of data.
+message Value {
+ // Value types
+ oneof type {
+ // integer
+ int64 integer_value = 1;
+
+ // float
+ double float_value = 2;
+
+ // string
+ string string_value = 3;
+
+ // boolean
+ bool boolean_value = 4;
+
+ // timestamp
+ google.protobuf.Timestamp timestamp_value = 5;
+
+ // time of day
+ google.type.TimeOfDay time_value = 6;
+
+ // date
+ google.type.Date date_value = 7;
+
+ // day of week
+ google.type.DayOfWeek day_of_week_value = 8;
+ }
+}
+
+// Message for infoType-dependent details parsed from quote.
+message QuoteInfo {
+ // Object representation of the quote.
+ oneof parsed_quote {
+ // The date time indicated by the quote.
+ DateTime date_time = 2;
+ }
+}
+
+// Message for a date time object.
+// e.g. 2018-01-01, 5th August.
+message DateTime {
+ // Time zone of the date time object.
+ message TimeZone {
+ // Set only if the offset can be determined. Positive for time ahead of UTC.
+ // E.g. For "UTC-9", this value is -540.
+ int32 offset_minutes = 1;
+ }
+
+ // One or more of the following must be set.
+ // Must be a valid date or time value.
+ google.type.Date date = 1;
+
+ // Day of week
+ google.type.DayOfWeek day_of_week = 2;
+
+ // Time of day
+ google.type.TimeOfDay time = 3;
+
+ // Time zone
+ TimeZone time_zone = 4;
+}
+
+// The configuration that controls how the data will change.
+message DeidentifyConfig {
+ oneof transformation {
+ // Treat the dataset as free-form text and apply the same free text
+ // transformation everywhere.
+ InfoTypeTransformations info_type_transformations = 1;
+
+ // Treat the dataset as structured. Transformations can be applied to
+ // specific locations within structured datasets, such as transforming
+ // a column within a table.
+ RecordTransformations record_transformations = 2;
+ }
+}
+
+// A rule for transforming a value.
+message PrimitiveTransformation {
+ oneof transformation {
+ // Replace
+ ReplaceValueConfig replace_config = 1;
+
+ // Redact
+ RedactConfig redact_config = 2;
+
+ // Mask
+ CharacterMaskConfig character_mask_config = 3;
+
+ // Ffx-Fpe
+ CryptoReplaceFfxFpeConfig crypto_replace_ffx_fpe_config = 4;
+
+ // Fixed size bucketing
+ FixedSizeBucketingConfig fixed_size_bucketing_config = 5;
+
+ // Bucketing
+ BucketingConfig bucketing_config = 6;
+
+ // Replace with infotype
+ ReplaceWithInfoTypeConfig replace_with_info_type_config = 7;
+
+ // Time extraction
+ TimePartConfig time_part_config = 8;
+
+ // Crypto
+ CryptoHashConfig crypto_hash_config = 9;
+
+ // Date Shift
+ DateShiftConfig date_shift_config = 11;
+
+ // Deterministic Crypto
+ CryptoDeterministicConfig crypto_deterministic_config = 12;
+ }
+}
+
+// For use with `Date`, `Timestamp`, and `TimeOfDay`, extract or preserve a
+// portion of the value.
+message TimePartConfig {
+ // Components that make up time.
+ enum TimePart {
+ // Unused
+ TIME_PART_UNSPECIFIED = 0;
+
+ // [0-9999]
+ YEAR = 1;
+
+ // [1-12]
+ MONTH = 2;
+
+ // [1-31]
+ DAY_OF_MONTH = 3;
+
+ // [1-7]
+ DAY_OF_WEEK = 4;
+
+ // [1-53]
+ WEEK_OF_YEAR = 5;
+
+ // [0-23]
+ HOUR_OF_DAY = 6;
+ }
+
+ // The part of the time to keep.
+ TimePart part_to_extract = 1;
+}
+
+// Pseudonymization method that generates surrogates via cryptographic hashing.
+// Uses SHA-256.
+// The key size must be either 32 or 64 bytes.
+// Outputs a base64 encoded representation of the hashed output
+// (for example, L7k0BHmF1ha5U3NfGykjro4xWi1MPVQPjhMAZbSV9mM=).
+// Currently, only string and integer values can be hashed.
+// See https://cloud.google.com/dlp/docs/pseudonymization to learn more.
+message CryptoHashConfig {
+ // The key used by the hash function.
+ CryptoKey crypto_key = 1;
+}
+
+// Pseudonymization method that generates deterministic encryption for the given
+// input. Outputs a base64 encoded representation of the encrypted output.
+// Uses AES-SIV based on the RFC https://tools.ietf.org/html/rfc5297.
+message CryptoDeterministicConfig {
+ // The key used by the encryption function.
+ CryptoKey crypto_key = 1;
+
+ // The custom info type to annotate the surrogate with.
+ // This annotation will be applied to the surrogate by prefixing it with
+ // the name of the custom info type followed by the number of
+ // characters comprising the surrogate. The following scheme defines the
+ // format: {info type name}({surrogate character count}):{surrogate}
+ //
+ // For example, if the name of custom info type is 'MY_TOKEN_INFO_TYPE' and
+ // the surrogate is 'abc', the full replacement value
+ // will be: 'MY_TOKEN_INFO_TYPE(3):abc'
+ //
+ // This annotation identifies the surrogate when inspecting content using the
+ // custom info type 'Surrogate'. This facilitates reversal of the
+ // surrogate when it occurs in free text.
+ //
+ // Note: For record transformations where the entire cell in a table is being
+ // transformed, surrogates are not mandatory. Surrogates are used to denote
+ // the location of the token and are necessary for re-identification in free
+ // form text.
+ //
+ // In order for inspection to work properly, the name of this info type must
+ // not occur naturally anywhere in your data; otherwise, inspection may either
+ //
+ // - reverse a surrogate that does not correspond to an actual identifier
+ // - be unable to parse the surrogate and result in an error
+ //
+ // Therefore, choose your custom info type name carefully after considering
+ // what your data looks like. One way to select a name that has a high chance
+ // of yielding reliable detection is to include one or more unicode characters
+ // that are highly improbable to exist in your data.
+ // For example, assuming your data is entered from a regular ASCII keyboard,
+ // the symbol with the hex code point 29DD might be used like so:
+ // ⧝MY_TOKEN_TYPE.
+ InfoType surrogate_info_type = 2;
+
+ // A context may be used for higher security and maintaining
+ // referential integrity such that the same identifier in two different
+ // contexts will be given a distinct surrogate. The context is appended to
+ // plaintext value being encrypted. On decryption the provided context is
+ // validated against the value used during encryption. If a context was
+ // provided during encryption, same context must be provided during decryption
+ // as well.
+ //
+ // If the context is not set, plaintext would be used as is for encryption.
+ // If the context is set but:
+ //
+ // 1. there is no record present when transforming a given value or
+ // 2. the field is not present when transforming a given value,
+ //
+ // plaintext would be used as is for encryption.
+ //
+ // Note that case (1) is expected when an `InfoTypeTransformation` is
+ // applied to both structured and non-structured `ContentItem`s.
+ FieldId context = 3;
+}
+
+// Replace each input value with a given `Value`.
+message ReplaceValueConfig {
+ // Value to replace it with.
+ Value new_value = 1;
+}
+
+// Replace each matching finding with the name of the info_type.
+message ReplaceWithInfoTypeConfig {
+
+}
+
+// Redact a given value. For example, if used with an `InfoTypeTransformation`
+// transforming PHONE_NUMBER, and input 'My phone number is 206-555-0123', the
+// output would be 'My phone number is '.
+message RedactConfig {
+
+}
+
+// Characters to skip when doing deidentification of a value. These will be left
+// alone and skipped.
+message CharsToIgnore {
+ // Convenience enum for indication common characters to not transform.
+ enum CommonCharsToIgnore {
+ // Unused.
+ COMMON_CHARS_TO_IGNORE_UNSPECIFIED = 0;
+
+ // 0-9
+ NUMERIC = 1;
+
+ // A-Z
+ ALPHA_UPPER_CASE = 2;
+
+ // a-z
+ ALPHA_LOWER_CASE = 3;
+
+ // US Punctuation, one of !"#$%&'()*+,-./:;<=>?@[\]^_`{|}~
+ PUNCTUATION = 4;
+
+ // Whitespace character, one of [ \t\n\x0B\f\r]
+ WHITESPACE = 5;
+ }
+
+ oneof characters {
+ // Characters to not transform when masking.
+ string characters_to_skip = 1;
+
+ // Common characters to not transform when masking. Useful to avoid removing
+ // punctuation.
+ CommonCharsToIgnore common_characters_to_ignore = 2;
+ }
+}
+
+// Partially mask a string by replacing a given number of characters with a
+// fixed character. Masking can start from the beginning or end of the string.
+// This can be used on data of any type (numbers, longs, and so on) and when
+// de-identifying structured data we'll attempt to preserve the original data's
+// type. (This allows you to take a long like 123 and modify it to a string like
+// **3.
+message CharacterMaskConfig {
+ // Character to use to mask the sensitive values—for example, `*` for an
+ // alphabetic string such as a name, or `0` for a numeric string such as ZIP
+ // code or credit card number. This string must have a length of 1. If not
+ // supplied, this value defaults to `*` for strings, and `0` for digits.
+ string masking_character = 1;
+
+ // Number of characters to mask. If not set, all matching chars will be
+ // masked. Skipped characters do not count towards this tally.
+ int32 number_to_mask = 2;
+
+ // Mask characters in reverse order. For example, if `masking_character` is
+ // `0`, `number_to_mask` is `14`, and `reverse_order` is `false`, then the
+ // input string `1234-5678-9012-3456` is masked as `00000000000000-3456`.
+ // If `masking_character` is `*`, `number_to_mask` is `3`, and `reverse_order`
+ // is `true`, then the string `12345` is masked as `12***`.
+ bool reverse_order = 3;
+
+ // When masking a string, items in this list will be skipped when replacing
+ // characters. For example, if the input string is `555-555-5555` and you
+ // instruct Cloud DLP to skip `-` and mask 5 characters with `*`, Cloud DLP
+ // returns `***-**5-5555`.
+ repeated CharsToIgnore characters_to_ignore = 4;
+}
+
+// Buckets values based on fixed size ranges. The
+// Bucketing transformation can provide all of this functionality,
+// but requires more configuration. This message is provided as a convenience to
+// the user for simple bucketing strategies.
+//
+// The transformed value will be a hyphenated string of
+// {lower_bound}-{upper_bound}, i.e if lower_bound = 10 and upper_bound = 20
+// all values that are within this bucket will be replaced with "10-20".
+//
+// This can be used on data of type: double, long.
+//
+// If the bound Value type differs from the type of data
+// being transformed, we will first attempt converting the type of the data to
+// be transformed to match the type of the bound before comparing.
+//
+// See https://cloud.google.com/dlp/docs/concepts-bucketing to learn more.
+message FixedSizeBucketingConfig {
+ // Required. Lower bound value of buckets. All values less than `lower_bound` are
+ // grouped together into a single bucket; for example if `lower_bound` = 10,
+ // then all values less than 10 are replaced with the value “-10”.
+ Value lower_bound = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // Required. Upper bound value of buckets. All values greater than upper_bound are
+ // grouped together into a single bucket; for example if `upper_bound` = 89,
+ // then all values greater than 89 are replaced with the value “89+”.
+ Value upper_bound = 2 [(google.api.field_behavior) = REQUIRED];
+
+ // Required. Size of each bucket (except for minimum and maximum buckets). So if
+ // `lower_bound` = 10, `upper_bound` = 89, and `bucket_size` = 10, then the
+ // following buckets would be used: -10, 10-20, 20-30, 30-40, 40-50, 50-60,
+ // 60-70, 70-80, 80-89, 89+. Precision up to 2 decimals works.
+ double bucket_size = 3 [(google.api.field_behavior) = REQUIRED];
+}
+
+// Generalization function that buckets values based on ranges. The ranges and
+// replacement values are dynamically provided by the user for custom behavior,
+// such as 1-30 -> LOW 31-65 -> MEDIUM 66-100 -> HIGH
+// This can be used on
+// data of type: number, long, string, timestamp.
+// If the bound `Value` type differs from the type of data being transformed, we
+// will first attempt converting the type of the data to be transformed to match
+// the type of the bound before comparing.
+// See https://cloud.google.com/dlp/docs/concepts-bucketing to learn more.
+message BucketingConfig {
+ // Bucket is represented as a range, along with replacement values.
+ message Bucket {
+ // Lower bound of the range, inclusive. Type should be the same as max if
+ // used.
+ Value min = 1;
+
+ // Upper bound of the range, exclusive; type must match min.
+ Value max = 2;
+
+ // Replacement value for this bucket. If not provided
+ // the default behavior will be to hyphenate the min-max range.
+ Value replacement_value = 3;
+ }
+
+ // Set of buckets. Ranges must be non-overlapping.
+ repeated Bucket buckets = 1;
+}
+
+// Replaces an identifier with a surrogate using Format Preserving Encryption
+// (FPE) with the FFX mode of operation; however when used in the
+// `ReidentifyContent` API method, it serves the opposite function by reversing
+// the surrogate back into the original identifier. The identifier must be
+// encoded as ASCII. For a given crypto key and context, the same identifier
+// will be replaced with the same surrogate. Identifiers must be at least two
+// characters long. In the case that the identifier is the empty string, it will
+// be skipped. See https://cloud.google.com/dlp/docs/pseudonymization to learn
+// more.
+//
+// Note: We recommend using CryptoDeterministicConfig for all use cases which
+// do not require preserving the input alphabet space and size, plus warrant
+// referential integrity.
+message CryptoReplaceFfxFpeConfig {
+ // These are commonly used subsets of the alphabet that the FFX mode
+ // natively supports. In the algorithm, the alphabet is selected using
+ // the "radix". Therefore each corresponds to particular radix.
+ enum FfxCommonNativeAlphabet {
+ // Unused.
+ FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED = 0;
+
+ // [0-9] (radix of 10)
+ NUMERIC = 1;
+
+ // [0-9A-F] (radix of 16)
+ HEXADECIMAL = 2;
+
+ // [0-9A-Z] (radix of 36)
+ UPPER_CASE_ALPHA_NUMERIC = 3;
+
+ // [0-9A-Za-z] (radix of 62)
+ ALPHA_NUMERIC = 4;
+ }
+
+ // Required. The key used by the encryption algorithm.
+ CryptoKey crypto_key = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // The 'tweak', a context may be used for higher security since the same
+ // identifier in two different contexts won't be given the same surrogate. If
+ // the context is not set, a default tweak will be used.
+ //
+ // If the context is set but:
+ //
+ // 1. there is no record present when transforming a given value or
+ // 1. the field is not present when transforming a given value,
+ //
+ // a default tweak will be used.
+ //
+ // Note that case (1) is expected when an `InfoTypeTransformation` is
+ // applied to both structured and non-structured `ContentItem`s.
+ // Currently, the referenced field may be of value type integer or string.
+ //
+ // The tweak is constructed as a sequence of bytes in big endian byte order
+ // such that:
+ //
+ // - a 64 bit integer is encoded followed by a single byte of value 1
+ // - a string is encoded in UTF-8 format followed by a single byte of value 2
+ FieldId context = 2;
+
+ // Choose an alphabet which the data being transformed will be made up of.
+ oneof alphabet {
+ // Common alphabets.
+ FfxCommonNativeAlphabet common_alphabet = 4;
+
+ // This is supported by mapping these to the alphanumeric characters
+ // that the FFX mode natively supports. This happens before/after
+ // encryption/decryption.
+ // Each character listed must appear only once.
+ // Number of characters must be in the range [2, 95].
+ // This must be encoded as ASCII.
+ // The order of characters does not matter.
+ string custom_alphabet = 5;
+
+ // The native way to select the alphabet. Must be in the range [2, 95].
+ int32 radix = 6;
+ }
+
+ // The custom infoType to annotate the surrogate with.
+ // This annotation will be applied to the surrogate by prefixing it with
+ // the name of the custom infoType followed by the number of
+ // characters comprising the surrogate. The following scheme defines the
+ // format: info_type_name(surrogate_character_count):surrogate
+ //
+ // For example, if the name of custom infoType is 'MY_TOKEN_INFO_TYPE' and
+ // the surrogate is 'abc', the full replacement value
+ // will be: 'MY_TOKEN_INFO_TYPE(3):abc'
+ //
+ // This annotation identifies the surrogate when inspecting content using the
+ // custom infoType
+ // [`SurrogateType`](/dlp/docs/reference/rest/v2/InspectConfig#surrogatetype).
+ // This facilitates reversal of the surrogate when it occurs in free text.
+ //
+ // In order for inspection to work properly, the name of this infoType must
+ // not occur naturally anywhere in your data; otherwise, inspection may
+ // find a surrogate that does not correspond to an actual identifier.
+ // Therefore, choose your custom infoType name carefully after considering
+ // what your data looks like. One way to select a name that has a high chance
+ // of yielding reliable detection is to include one or more unicode characters
+ // that are highly improbable to exist in your data.
+ // For example, assuming your data is entered from a regular ASCII keyboard,
+ // the symbol with the hex code point 29DD might be used like so:
+ // ⧝MY_TOKEN_TYPE
+ InfoType surrogate_info_type = 8;
+}
+
+// This is a data encryption key (DEK) (as opposed to
+// a key encryption key (KEK) stored by KMS).
+// When using KMS to wrap/unwrap DEKs, be sure to set an appropriate
+// IAM policy on the KMS CryptoKey (KEK) to ensure an attacker cannot
+// unwrap the data crypto key.
+message CryptoKey {
+ // Sources of crypto keys.
+ oneof source {
+ // Transient crypto key
+ TransientCryptoKey transient = 1;
+
+ // Unwrapped crypto key
+ UnwrappedCryptoKey unwrapped = 2;
+
+ // Kms wrapped key
+ KmsWrappedCryptoKey kms_wrapped = 3;
+ }
+}
+
+// Use this to have a random data crypto key generated.
+// It will be discarded after the request finishes.
+message TransientCryptoKey {
+ // Required. Name of the key.
+ // This is an arbitrary string used to differentiate different keys.
+ // A unique key is generated per name: two separate `TransientCryptoKey`
+ // protos share the same generated key if their names are the same.
+ // When the data crypto key is generated, this name is not used in any way
+ // (repeating the api call will result in a different key being generated).
+ string name = 1 [(google.api.field_behavior) = REQUIRED];
+}
+
+// Using raw keys is prone to security risks due to accidentally
+// leaking the key. Choose another type of key if possible.
+message UnwrappedCryptoKey {
+ // Required. A 128/192/256 bit key.
+ bytes key = 1 [(google.api.field_behavior) = REQUIRED];
+}
+
+// Include to use an existing data crypto key wrapped by KMS.
+// The wrapped key must be a 128/192/256 bit key.
+// Authorization requires the following IAM permissions when sending a request
+// to perform a crypto transformation using a kms-wrapped crypto key:
+// dlp.kms.encrypt
+message KmsWrappedCryptoKey {
+ // Required. The wrapped data crypto key.
+ bytes wrapped_key = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // Required. The resource name of the KMS CryptoKey to use for unwrapping.
+ string crypto_key_name = 2 [(google.api.field_behavior) = REQUIRED];
+}
+
+// Shifts dates by random number of days, with option to be consistent for the
+// same context. See https://cloud.google.com/dlp/docs/concepts-date-shifting
+// to learn more.
+message DateShiftConfig {
+ // Required. Range of shift in days. Actual shift will be selected at random within this
+ // range (inclusive ends). Negative means shift to earlier in time. Must not
+ // be more than 365250 days (1000 years) each direction.
+ //
+ // For example, 3 means shift date to at most 3 days into the future.
+ int32 upper_bound_days = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // Required. For example, -5 means shift date to at most 5 days back in the past.
+ int32 lower_bound_days = 2 [(google.api.field_behavior) = REQUIRED];
+
+ // Points to the field that contains the context, for example, an entity id.
+ // If set, must also set cryptoKey. If set, shift will be consistent for the
+ // given context.
+ FieldId context = 3;
+
+ // Method for calculating shift that takes context into consideration. If
+ // set, must also set context. Can only be applied to table items.
+ oneof method {
+ // Causes the shift to be computed based on this key and the context. This
+ // results in the same shift for the same context and crypto_key. If
+ // set, must also set context. Can only be applied to table items.
+ CryptoKey crypto_key = 4;
+ }
+}
+
+// A type of transformation that will scan unstructured text and
+// apply various `PrimitiveTransformation`s to each finding, where the
+// transformation is applied to only values that were identified as a specific
+// info_type.
+message InfoTypeTransformations {
+ // A transformation to apply to text that is identified as a specific
+ // info_type.
+ message InfoTypeTransformation {
+ // InfoTypes to apply the transformation to. An empty list will cause
+ // this transformation to apply to all findings that correspond to
+ // infoTypes that were requested in `InspectConfig`.
+ repeated InfoType info_types = 1;
+
+ // Required. Primitive transformation to apply to the infoType.
+ PrimitiveTransformation primitive_transformation = 2 [(google.api.field_behavior) = REQUIRED];
+ }
+
+ // Required. Transformation for each infoType. Cannot specify more than one
+ // for a given infoType.
+ repeated InfoTypeTransformation transformations = 1 [(google.api.field_behavior) = REQUIRED];
+}
+
+// The transformation to apply to the field.
+message FieldTransformation {
+ // Required. Input field(s) to apply the transformation to.
+ repeated FieldId fields = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // Only apply the transformation if the condition evaluates to true for the
+ // given `RecordCondition`. The conditions are allowed to reference fields
+ // that are not used in the actual transformation.
+ //
+ // Example Use Cases:
+ //
+ // - Apply a different bucket transformation to an age column if the zip code
+ // column for the same record is within a specific range.
+ // - Redact a field if the date of birth field is greater than 85.
+ RecordCondition condition = 3;
+
+ // Transformation to apply. [required]
+ oneof transformation {
+ // Apply the transformation to the entire field.
+ PrimitiveTransformation primitive_transformation = 4;
+
+ // Treat the contents of the field as free text, and selectively
+ // transform content that matches an `InfoType`.
+ InfoTypeTransformations info_type_transformations = 5;
+ }
+}
+
+// A type of transformation that is applied over structured data such as a
+// table.
+message RecordTransformations {
+ // Transform the record by applying various field transformations.
+ repeated FieldTransformation field_transformations = 1;
+
+ // Configuration defining which records get suppressed entirely. Records that
+ // match any suppression rule are omitted from the output.
+ repeated RecordSuppression record_suppressions = 2;
+}
+
+// Configuration to suppress records whose suppression conditions evaluate to
+// true.
+message RecordSuppression {
+ // A condition that when it evaluates to true will result in the record being
+ // evaluated to be suppressed from the transformed content.
+ RecordCondition condition = 1;
+}
+
+// A condition for determining whether a transformation should be applied to
+// a field.
+message RecordCondition {
+ // The field type of `value` and `field` do not need to match to be
+ // considered equal, but not all comparisons are possible.
+ // EQUAL_TO and NOT_EQUAL_TO attempt to compare even with incompatible types,
+ // but all other comparisons are invalid with incompatible types.
+ // A `value` of type:
+ //
+ // - `string` can be compared against all other types
+ // - `boolean` can only be compared against other booleans
+ // - `integer` can be compared against doubles or a string if the string value
+ // can be parsed as an integer.
+ // - `double` can be compared against integers or a string if the string can
+ // be parsed as a double.
+ // - `Timestamp` can be compared against strings in RFC 3339 date string
+ // format.
+ // - `TimeOfDay` can be compared against timestamps and strings in the format
+ // of 'HH:mm:ss'.
+ //
+ // If we fail to compare do to type mismatch, a warning will be given and
+ // the condition will evaluate to false.
+ message Condition {
+ // Required. Field within the record this condition is evaluated against.
+ FieldId field = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // Required. Operator used to compare the field or infoType to the value.
+ RelationalOperator operator = 3 [(google.api.field_behavior) = REQUIRED];
+
+ // Value to compare against. [Mandatory, except for `EXISTS` tests.]
+ Value value = 4;
+ }
+
+ // A collection of conditions.
+ message Conditions {
+ // A collection of conditions.
+ repeated Condition conditions = 1;
+ }
+
+ // An expression, consisting or an operator and conditions.
+ message Expressions {
+ // Logical operators for conditional checks.
+ enum LogicalOperator {
+ // Unused
+ LOGICAL_OPERATOR_UNSPECIFIED = 0;
+
+ // Conditional AND
+ AND = 1;
+ }
+
+ // The operator to apply to the result of conditions. Default and currently
+ // only supported value is `AND`.
+ LogicalOperator logical_operator = 1;
+
+ // Expression types.
+ oneof type {
+ // Conditions to apply to the expression.
+ Conditions conditions = 3;
+ }
+ }
+
+ // An expression.
+ Expressions expressions = 3;
+}
+
+// Overview of the modifications that occurred.
+message TransformationOverview {
+ // Total size in bytes that were transformed in some way.
+ int64 transformed_bytes = 2;
+
+ // Transformations applied to the dataset.
+ repeated TransformationSummary transformation_summaries = 3;
+}
+
+// Summary of a single transformation.
+// Only one of 'transformation', 'field_transformation', or 'record_suppress'
+// will be set.
+message TransformationSummary {
+ // A collection that informs the user the number of times a particular
+ // `TransformationResultCode` and error details occurred.
+ message SummaryResult {
+ // Number of transformations counted by this result.
+ int64 count = 1;
+
+ // Outcome of the transformation.
+ TransformationResultCode code = 2;
+
+ // A place for warnings or errors to show up if a transformation didn't
+ // work as expected.
+ string details = 3;
+ }
+
+ // Possible outcomes of transformations.
+ enum TransformationResultCode {
+ // Unused
+ TRANSFORMATION_RESULT_CODE_UNSPECIFIED = 0;
+
+ // Transformation completed without an error.
+ SUCCESS = 1;
+
+ // Transformation had an error.
+ ERROR = 2;
+ }
+
+ // Set if the transformation was limited to a specific InfoType.
+ InfoType info_type = 1;
+
+ // Set if the transformation was limited to a specific FieldId.
+ FieldId field = 2;
+
+ // The specific transformation these stats apply to.
+ PrimitiveTransformation transformation = 3;
+
+ // The field transformation that was applied.
+ // If multiple field transformations are requested for a single field,
+ // this list will contain all of them; otherwise, only one is supplied.
+ repeated FieldTransformation field_transformations = 5;
+
+ // The specific suppression option these stats apply to.
+ RecordSuppression record_suppress = 6;
+
+ // Collection of all transformations that took place or had an error.
+ repeated SummaryResult results = 4;
+
+ // Total size in bytes that were transformed in some way.
+ int64 transformed_bytes = 7;
+}
+
+// Schedule for triggeredJobs.
+message Schedule {
+ oneof option {
+ // With this option a job is started a regular periodic basis. For
+ // example: every day (86400 seconds).
+ //
+ // A scheduled start time will be skipped if the previous
+ // execution has not ended when its scheduled time occurs.
+ //
+ // This value must be set to a time duration greater than or equal
+ // to 1 day and can be no longer than 60 days.
+ google.protobuf.Duration recurrence_period_duration = 1;
+ }
+}
+
+// The inspectTemplate contains a configuration (set of types of sensitive data
+// to be detected) to be used anywhere you otherwise would normally specify
+// InspectConfig. See https://cloud.google.com/dlp/docs/concepts-templates
+// to learn more.
+message InspectTemplate {
+ option (google.api.resource) = {
+ type: "dlp.googleapis.com/InspectTemplate"
+ pattern: "organizations/{organization}/inspectTemplates/{inspect_template}"
+ pattern: "projects/{project}/inspectTemplates/{inspect_template}"
+ };
+
+ // Output only. The template name.
+ //
+ // The template will have one of the following formats:
+ // `projects/PROJECT_ID/inspectTemplates/TEMPLATE_ID` OR
+ // `organizations/ORGANIZATION_ID/inspectTemplates/TEMPLATE_ID`;
+ string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Display name (max 256 chars).
+ string display_name = 2;
+
+ // Short description (max 256 chars).
+ string description = 3;
+
+ // Output only. The creation timestamp of an inspectTemplate.
+ google.protobuf.Timestamp create_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Output only. The last update timestamp of an inspectTemplate.
+ google.protobuf.Timestamp update_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // The core content of the template. Configuration of the scanning process.
+ InspectConfig inspect_config = 6;
+}
+
+// DeidentifyTemplates contains instructions on how to de-identify content.
+// See https://cloud.google.com/dlp/docs/concepts-templates to learn more.
+message DeidentifyTemplate {
+ option (google.api.resource) = {
+ type: "dlp.googleapis.com/DeidentifyTemplate"
+ pattern: "organizations/{organization}/deidentifyTemplates/{deidentify_template}"
+ pattern: "projects/{project}/deidentifyTemplates/{deidentify_template}"
+ };
+
+ // Output only. The template name.
+ //
+ // The template will have one of the following formats:
+ // `projects/PROJECT_ID/deidentifyTemplates/TEMPLATE_ID` OR
+ // `organizations/ORGANIZATION_ID/deidentifyTemplates/TEMPLATE_ID`
+ string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Display name (max 256 chars).
+ string display_name = 2;
+
+ // Short description (max 256 chars).
+ string description = 3;
+
+ // Output only. The creation timestamp of an inspectTemplate.
+ google.protobuf.Timestamp create_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Output only. The last update timestamp of an inspectTemplate.
+ google.protobuf.Timestamp update_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // ///////////// // The core content of the template // ///////////////
+ DeidentifyConfig deidentify_config = 6;
+}
+
+// Details information about an error encountered during job execution or
+// the results of an unsuccessful activation of the JobTrigger.
+message Error {
+ // Detailed error codes and messages.
+ google.rpc.Status details = 1;
+
+ // The times the error occurred.
+ repeated google.protobuf.Timestamp timestamps = 2;
+}
+
+// Contains a configuration to make dlp api calls on a repeating basis.
+// See https://cloud.google.com/dlp/docs/concepts-job-triggers to learn more.
+message JobTrigger {
+ option (google.api.resource) = {
+ type: "dlp.googleapis.com/JobTrigger"
+ pattern: "projects/{project}/jobTriggers/{job_trigger}"
+ };
+
+ // What event needs to occur for a new job to be started.
+ message Trigger {
+ oneof trigger {
+ // Create a job on a repeating basis based on the elapse of time.
+ Schedule schedule = 1;
+ }
+ }
+
+ // Whether the trigger is currently active. If PAUSED or CANCELLED, no jobs
+ // will be created with this configuration. The service may automatically
+ // pause triggers experiencing frequent errors. To restart a job, set the
+ // status to HEALTHY after correcting user errors.
+ enum Status {
+ // Unused.
+ STATUS_UNSPECIFIED = 0;
+
+ // Trigger is healthy.
+ HEALTHY = 1;
+
+ // Trigger is temporarily paused.
+ PAUSED = 2;
+
+ // Trigger is cancelled and can not be resumed.
+ CANCELLED = 3;
+ }
+
+ // Unique resource name for the triggeredJob, assigned by the service when the
+ // triggeredJob is created, for example
+ // `projects/dlp-test-project/jobTriggers/53234423`.
+ string name = 1;
+
+ // Display name (max 100 chars)
+ string display_name = 2;
+
+ // User provided description (max 256 chars)
+ string description = 3;
+
+ // The configuration details for the specific type of job to run.
+ oneof job {
+ // For inspect jobs, a snapshot of the configuration.
+ InspectJobConfig inspect_job = 4;
+ }
+
+ // A list of triggers which will be OR'ed together. Only one in the list
+ // needs to trigger for a job to be started. The list may contain only
+ // a single Schedule trigger and must have at least one object.
+ repeated Trigger triggers = 5;
+
+ // Output only. A stream of errors encountered when the trigger was activated. Repeated
+ // errors may result in the JobTrigger automatically being paused.
+ // Will return the last 100 errors. Whenever the JobTrigger is modified
+ // this list will be cleared.
+ repeated Error errors = 6 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Output only. The creation timestamp of a triggeredJob.
+ google.protobuf.Timestamp create_time = 7 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Output only. The last update timestamp of a triggeredJob.
+ google.protobuf.Timestamp update_time = 8 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Output only. The timestamp of the last time this trigger executed.
+ google.protobuf.Timestamp last_run_time = 9 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Required. A status for this trigger.
+ Status status = 10 [(google.api.field_behavior) = REQUIRED];
+}
+
+// A task to execute on the completion of a job.
+// See https://cloud.google.com/dlp/docs/concepts-actions to learn more.
+message Action {
+ // If set, the detailed findings will be persisted to the specified
+ // OutputStorageConfig. Only a single instance of this action can be
+ // specified.
+ // Compatible with: Inspect, Risk
+ message SaveFindings {
+ // Location to store findings outside of DLP.
+ OutputStorageConfig output_config = 1;
+ }
+
+ // Publish a message into given Pub/Sub topic when DlpJob has completed. The
+ // message contains a single field, `DlpJobName`, which is equal to the
+ // finished job's
+ // [`DlpJob.name`](/dlp/docs/reference/rest/v2/projects.dlpJobs#DlpJob).
+ // Compatible with: Inspect, Risk
+ message PublishToPubSub {
+ // Cloud Pub/Sub topic to send notifications to. The topic must have given
+ // publishing access rights to the DLP API service account executing
+ // the long running DlpJob sending the notifications.
+ // Format is projects/{project}/topics/{topic}.
+ string topic = 1;
+ }
+
+ // Publish the result summary of a DlpJob to the Cloud Security
+ // Command Center (CSCC Alpha).
+ // This action is only available for projects which are parts of
+ // an organization and whitelisted for the alpha Cloud Security Command
+ // Center.
+ // The action will publish count of finding instances and their info types.
+ // The summary of findings will be persisted in CSCC and are governed by CSCC
+ // service-specific policy, see https://cloud.google.com/terms/service-terms
+ // Only a single instance of this action can be specified.
+ // Compatible with: Inspect
+ message PublishSummaryToCscc {
+
+ }
+
+ // Publish findings of a DlpJob to Cloud Data Catalog. Labels summarizing the
+ // results of the DlpJob will be applied to the entry for the resource scanned
+ // in Cloud Data Catalog. Any labels previously written by another DlpJob will
+ // be deleted. InfoType naming patterns are strictly enforced when using this
+ // feature. Note that the findings will be persisted in Cloud Data Catalog
+ // storage and are governed by Data Catalog service-specific policy, see
+ // https://cloud.google.com/terms/service-terms
+ // Only a single instance of this action can be specified and only allowed if
+ // all resources being scanned are BigQuery tables.
+ // Compatible with: Inspect
+ message PublishFindingsToCloudDataCatalog {
+
+ }
+
+ // Enable email notification to project owners and editors on jobs's
+ // completion/failure.
+ message JobNotificationEmails {
+
+ }
+
+ // Enable Stackdriver metric dlp.googleapis.com/finding_count. This
+ // will publish a metric to stack driver on each infotype requested and
+ // how many findings were found for it. CustomDetectors will be bucketed
+ // as 'Custom' under the Stackdriver label 'info_type'.
+ message PublishToStackdriver {
+
+ }
+
+ oneof action {
+ // Save resulting findings in a provided location.
+ SaveFindings save_findings = 1;
+
+ // Publish a notification to a pubsub topic.
+ PublishToPubSub pub_sub = 2;
+
+ // Publish summary to Cloud Security Command Center (Alpha).
+ PublishSummaryToCscc publish_summary_to_cscc = 3;
+
+ // Publish findings to Cloud Datahub.
+ PublishFindingsToCloudDataCatalog publish_findings_to_cloud_data_catalog = 5;
+
+ // Enable email notification for project owners and editors on job's
+ // completion/failure.
+ JobNotificationEmails job_notification_emails = 8;
+
+ // Enable Stackdriver metric dlp.googleapis.com/finding_count.
+ PublishToStackdriver publish_to_stackdriver = 9;
+ }
+}
+
+// Request message for CreateInspectTemplate.
+message CreateInspectTemplateRequest {
+ // Required. The parent resource name, for example projects/my-project-id or
+ // organizations/my-org-id.
+ string parent = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ child_type: "dlp.googleapis.com/InspectTemplate"
+ }
+ ];
+
+ // Required. The InspectTemplate to create.
+ InspectTemplate inspect_template = 2 [(google.api.field_behavior) = REQUIRED];
+
+ // The template id can contain uppercase and lowercase letters,
+ // numbers, and hyphens; that is, it must match the regular
+ // expression: `[a-zA-Z\\d-_]+`. The maximum length is 100
+ // characters. Can be empty to allow the system to generate one.
+ string template_id = 3;
+
+ // The geographic location to store the inspection template. Reserved for
+ // future extensions.
+ string location_id = 4;
+}
+
+// Request message for UpdateInspectTemplate.
+message UpdateInspectTemplateRequest {
+ // Required. Resource name of organization and inspectTemplate to be updated, for
+ // example `organizations/433245324/inspectTemplates/432452342` or
+ // projects/project-id/inspectTemplates/432452342.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "dlp.googleapis.com/InspectTemplate"
+ }
+ ];
+
+ // New InspectTemplate value.
+ InspectTemplate inspect_template = 2;
+
+ // Mask to control which fields get updated.
+ google.protobuf.FieldMask update_mask = 3;
+}
+
+// Request message for GetInspectTemplate.
+message GetInspectTemplateRequest {
+ // Required. Resource name of the organization and inspectTemplate to be read, for
+ // example `organizations/433245324/inspectTemplates/432452342` or
+ // projects/project-id/inspectTemplates/432452342.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "dlp.googleapis.com/InspectTemplate"
+ }
+ ];
+}
+
+// Request message for ListInspectTemplates.
+message ListInspectTemplatesRequest {
+ // Required. The parent resource name, for example projects/my-project-id or
+ // organizations/my-org-id.
+ string parent = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ child_type: "dlp.googleapis.com/InspectTemplate"
+ }
+ ];
+
+ // Page token to continue retrieval. Comes from previous call
+ // to `ListInspectTemplates`.
+ string page_token = 2;
+
+ // Size of the page, can be limited by server. If zero server returns
+ // a page of max size 100.
+ int32 page_size = 3;
+
+ // Comma separated list of fields to order by,
+ // followed by `asc` or `desc` postfix. This list is case-insensitive,
+ // default sorting order is ascending, redundant space characters are
+ // insignificant.
+ //
+ // Example: `name asc,update_time, create_time desc`
+ //
+ // Supported fields are:
+ //
+ // - `create_time`: corresponds to time the template was created.
+ // - `update_time`: corresponds to time the template was last updated.
+ // - `name`: corresponds to template's name.
+ // - `display_name`: corresponds to template's display name.
+ string order_by = 4;
+
+ // The geographic location where inspection templates will be retrieved from.
+ // Use `-` for all locations. Reserved for future extensions.
+ string location_id = 5;
+}
+
+// Response message for ListInspectTemplates.
+message ListInspectTemplatesResponse {
+ // List of inspectTemplates, up to page_size in ListInspectTemplatesRequest.
+ repeated InspectTemplate inspect_templates = 1;
+
+ // If the next page is available then the next page token to be used
+ // in following ListInspectTemplates request.
+ string next_page_token = 2;
+}
+
+// Request message for DeleteInspectTemplate.
+message DeleteInspectTemplateRequest {
+ // Required. Resource name of the organization and inspectTemplate to be deleted, for
+ // example `organizations/433245324/inspectTemplates/432452342` or
+ // projects/project-id/inspectTemplates/432452342.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "dlp.googleapis.com/InspectTemplate"
+ }
+ ];
+}
+
+// Request message for CreateJobTrigger.
+message CreateJobTriggerRequest {
+ // Required. The parent resource name, for example projects/my-project-id.
+ string parent = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "cloudresourcemanager.googleapis.com/Project"
+ }
+ ];
+
+ // Required. The JobTrigger to create.
+ JobTrigger job_trigger = 2 [(google.api.field_behavior) = REQUIRED];
+
+ // The trigger id can contain uppercase and lowercase letters,
+ // numbers, and hyphens; that is, it must match the regular
+ // expression: `[a-zA-Z\\d-_]+`. The maximum length is 100
+ // characters. Can be empty to allow the system to generate one.
+ string trigger_id = 3;
+
+ // The geographic location to store the job trigger. Reserved for
+ // future extensions.
+ string location_id = 4;
+}
+
+// Request message for ActivateJobTrigger.
+message ActivateJobTriggerRequest {
+ // Required. Resource name of the trigger to activate, for example
+ // `projects/dlp-test-project/jobTriggers/53234423`.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "dlp.googleapis.com/JobTrigger"
+ }
+ ];
+}
+
+// Request message for UpdateJobTrigger.
+message UpdateJobTriggerRequest {
+ // Required. Resource name of the project and the triggeredJob, for example
+ // `projects/dlp-test-project/jobTriggers/53234423`.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "dlp.googleapis.com/JobTrigger"
+ }
+ ];
+
+ // New JobTrigger value.
+ JobTrigger job_trigger = 2;
+
+ // Mask to control which fields get updated.
+ google.protobuf.FieldMask update_mask = 3;
+}
+
+// Request message for GetJobTrigger.
+message GetJobTriggerRequest {
+ // Required. Resource name of the project and the triggeredJob, for example
+ // `projects/dlp-test-project/jobTriggers/53234423`.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "dlp.googleapis.com/JobTrigger"
+ }
+ ];
+}
+
+// Request message for CreateDlpJobRequest. Used to initiate long running
+// jobs such as calculating risk metrics or inspecting Google Cloud
+// Storage.
+message CreateDlpJobRequest {
+ // Required. The parent resource name, for example projects/my-project-id.
+ string parent = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "cloudresourcemanager.googleapis.com/Project"
+ }
+ ];
+
+ // The configuration details for the specific type of job to run.
+ oneof job {
+ // Set to control what and how to inspect.
+ InspectJobConfig inspect_job = 2;
+
+ // Set to choose what metric to calculate.
+ RiskAnalysisJobConfig risk_job = 3;
+ }
+
+ // The job id can contain uppercase and lowercase letters,
+ // numbers, and hyphens; that is, it must match the regular
+ // expression: `[a-zA-Z\\d-_]+`. The maximum length is 100
+ // characters. Can be empty to allow the system to generate one.
+ string job_id = 4;
+
+ // The geographic location to store and process the job. Reserved for
+ // future extensions.
+ string location_id = 5;
+}
+
+// Request message for ListJobTriggers.
+message ListJobTriggersRequest {
+ // Required. The parent resource name, for example `projects/my-project-id`.
+ string parent = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "cloudresourcemanager.googleapis.com/Project"
+ }
+ ];
+
+ // Page token to continue retrieval. Comes from previous call
+ // to ListJobTriggers. `order_by` field must not
+ // change for subsequent calls.
+ string page_token = 2;
+
+ // Size of the page, can be limited by a server.
+ int32 page_size = 3;
+
+ // Comma separated list of triggeredJob fields to order by,
+ // followed by `asc` or `desc` postfix. This list is case-insensitive,
+ // default sorting order is ascending, redundant space characters are
+ // insignificant.
+ //
+ // Example: `name asc,update_time, create_time desc`
+ //
+ // Supported fields are:
+ //
+ // - `create_time`: corresponds to time the JobTrigger was created.
+ // - `update_time`: corresponds to time the JobTrigger was last updated.
+ // - `last_run_time`: corresponds to the last time the JobTrigger ran.
+ // - `name`: corresponds to JobTrigger's name.
+ // - `display_name`: corresponds to JobTrigger's display name.
+ // - `status`: corresponds to JobTrigger's status.
+ string order_by = 4;
+
+ // Allows filtering.
+ //
+ // Supported syntax:
+ //
+ // * Filter expressions are made up of one or more restrictions.
+ // * Restrictions can be combined by `AND` or `OR` logical operators. A
+ // sequence of restrictions implicitly uses `AND`.
+ // * A restriction has the form of `{field} {operator} {value}`.
+ // * Supported fields/values for inspect jobs:
+ // - `status` - HEALTHY|PAUSED|CANCELLED
+ // - `inspected_storage` - DATASTORE|CLOUD_STORAGE|BIGQUERY
+ // - 'last_run_time` - RFC 3339 formatted timestamp, surrounded by
+ // quotation marks. Nanoseconds are ignored.
+ // - 'error_count' - Number of errors that have occurred while running.
+ // * The operator must be `=` or `!=` for status and inspected_storage.
+ //
+ // Examples:
+ //
+ // * inspected_storage = cloud_storage AND status = HEALTHY
+ // * inspected_storage = cloud_storage OR inspected_storage = bigquery
+ // * inspected_storage = cloud_storage AND (state = PAUSED OR state = HEALTHY)
+ // * last_run_time > \"2017-12-12T00:00:00+00:00\"
+ //
+ // The length of this field should be no more than 500 characters.
+ string filter = 5;
+
+ // The geographic location where job triggers will be retrieved from.
+ // Use `-` for all locations. Reserved for future extensions.
+ string location_id = 7;
+}
+
+// Response message for ListJobTriggers.
+message ListJobTriggersResponse {
+ // List of triggeredJobs, up to page_size in ListJobTriggersRequest.
+ repeated JobTrigger job_triggers = 1;
+
+ // If the next page is available then the next page token to be used
+ // in following ListJobTriggers request.
+ string next_page_token = 2;
+}
+
+// Request message for DeleteJobTrigger.
+message DeleteJobTriggerRequest {
+ // Required. Resource name of the project and the triggeredJob, for example
+ // `projects/dlp-test-project/jobTriggers/53234423`.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "dlp.googleapis.com/JobTrigger"
+ }
+ ];
+}
+
+// Controls what and how to inspect for findings.
+message InspectJobConfig {
+ // The data to scan.
+ StorageConfig storage_config = 1;
+
+ // How and what to scan for.
+ InspectConfig inspect_config = 2;
+
+ // If provided, will be used as the default for all values in InspectConfig.
+ // `inspect_config` will be merged into the values persisted as part of the
+ // template.
+ string inspect_template_name = 3;
+
+ // Actions to execute at the completion of the job.
+ repeated Action actions = 4;
+}
+
+// Combines all of the information about a DLP job.
+message DlpJob {
+ option (google.api.resource) = {
+ type: "dlp.googleapis.com/DlpJob"
+ pattern: "projects/{project}/dlpJobs/{dlp_job}"
+ };
+
+ // Possible states of a job.
+ enum JobState {
+ // Unused.
+ JOB_STATE_UNSPECIFIED = 0;
+
+ // The job has not yet started.
+ PENDING = 1;
+
+ // The job is currently running.
+ RUNNING = 2;
+
+ // The job is no longer running.
+ DONE = 3;
+
+ // The job was canceled before it could complete.
+ CANCELED = 4;
+
+ // The job had an error and did not complete.
+ FAILED = 5;
+ }
+
+ // The server-assigned name.
+ string name = 1;
+
+ // The type of job.
+ DlpJobType type = 2;
+
+ // State of a job.
+ JobState state = 3;
+
+ oneof details {
+ // Results from analyzing risk of a data source.
+ AnalyzeDataSourceRiskDetails risk_details = 4;
+
+ // Results from inspecting a data source.
+ InspectDataSourceDetails inspect_details = 5;
+ }
+
+ // Time when the job was created.
+ google.protobuf.Timestamp create_time = 6;
+
+ // Time when the job started.
+ google.protobuf.Timestamp start_time = 7;
+
+ // Time when the job finished.
+ google.protobuf.Timestamp end_time = 8;
+
+ // If created by a job trigger, the resource name of the trigger that
+ // instantiated the job.
+ string job_trigger_name = 10;
+
+ // A stream of errors encountered running the job.
+ repeated Error errors = 11;
+}
+
+// The request message for [DlpJobs.GetDlpJob][].
+message GetDlpJobRequest {
+ // Required. The name of the DlpJob resource.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "dlp.googleapis.com/DlpJob"
+ }
+ ];
+}
+
+// The request message for listing DLP jobs.
+message ListDlpJobsRequest {
+ // Required. The parent resource name, for example projects/my-project-id.
+ string parent = 4 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "cloudresourcemanager.googleapis.com/Project"
+ }
+ ];
+
+ // Allows filtering.
+ //
+ // Supported syntax:
+ //
+ // * Filter expressions are made up of one or more restrictions.
+ // * Restrictions can be combined by `AND` or `OR` logical operators. A
+ // sequence of restrictions implicitly uses `AND`.
+ // * A restriction has the form of `{field} {operator} {value}`.
+ // * Supported fields/values for inspect jobs:
+ // - `state` - PENDING|RUNNING|CANCELED|FINISHED|FAILED
+ // - `inspected_storage` - DATASTORE|CLOUD_STORAGE|BIGQUERY
+ // - `trigger_name` - The resource name of the trigger that created job.
+ // - 'end_time` - Corresponds to time the job finished.
+ // - 'start_time` - Corresponds to time the job finished.
+ // * Supported fields for risk analysis jobs:
+ // - `state` - RUNNING|CANCELED|FINISHED|FAILED
+ // - 'end_time` - Corresponds to time the job finished.
+ // - 'start_time` - Corresponds to time the job finished.
+ // * The operator must be `=` or `!=`.
+ //
+ // Examples:
+ //
+ // * inspected_storage = cloud_storage AND state = done
+ // * inspected_storage = cloud_storage OR inspected_storage = bigquery
+ // * inspected_storage = cloud_storage AND (state = done OR state = canceled)
+ // * end_time > \"2017-12-12T00:00:00+00:00\"
+ //
+ // The length of this field should be no more than 500 characters.
+ string filter = 1;
+
+ // The standard list page size.
+ int32 page_size = 2;
+
+ // The standard list page token.
+ string page_token = 3;
+
+ // The type of job. Defaults to `DlpJobType.INSPECT`
+ DlpJobType type = 5;
+
+ // Comma separated list of fields to order by,
+ // followed by `asc` or `desc` postfix. This list is case-insensitive,
+ // default sorting order is ascending, redundant space characters are
+ // insignificant.
+ //
+ // Example: `name asc, end_time asc, create_time desc`
+ //
+ // Supported fields are:
+ //
+ // - `create_time`: corresponds to time the job was created.
+ // - `end_time`: corresponds to time the job ended.
+ // - `name`: corresponds to job's name.
+ // - `state`: corresponds to `state`
+ string order_by = 6;
+
+ // The geographic location where jobs will be retrieved from.
+ // Use `-` for all locations. Reserved for future extensions.
+ string location_id = 7;
+}
+
+// The response message for listing DLP jobs.
+message ListDlpJobsResponse {
+ // A list of DlpJobs that matches the specified filter in the request.
+ repeated DlpJob jobs = 1;
+
+ // The standard List next-page token.
+ string next_page_token = 2;
+}
+
+// The request message for canceling a DLP job.
+message CancelDlpJobRequest {
+ // Required. The name of the DlpJob resource to be cancelled.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "dlp.googleapis.com/DlpJob"
+ }
+ ];
+}
+
+// The request message for deleting a DLP job.
+message DeleteDlpJobRequest {
+ // Required. The name of the DlpJob resource to be deleted.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "dlp.googleapis.com/DlpJob"
+ }
+ ];
+}
+
+// Request message for CreateDeidentifyTemplate.
+message CreateDeidentifyTemplateRequest {
+ // Required. The parent resource name, for example projects/my-project-id or
+ // organizations/my-org-id.
+ string parent = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ child_type: "dlp.googleapis.com/DeidentifyTemplate"
+ }
+ ];
+
+ // Required. The DeidentifyTemplate to create.
+ DeidentifyTemplate deidentify_template = 2 [(google.api.field_behavior) = REQUIRED];
+
+ // The template id can contain uppercase and lowercase letters,
+ // numbers, and hyphens; that is, it must match the regular
+ // expression: `[a-zA-Z\\d-_]+`. The maximum length is 100
+ // characters. Can be empty to allow the system to generate one.
+ string template_id = 3;
+
+ // The geographic location to store the deidentification template. Reserved
+ // for future extensions.
+ string location_id = 4;
+}
+
+// Request message for UpdateDeidentifyTemplate.
+message UpdateDeidentifyTemplateRequest {
+ // Required. Resource name of organization and deidentify template to be updated, for
+ // example `organizations/433245324/deidentifyTemplates/432452342` or
+ // projects/project-id/deidentifyTemplates/432452342.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "dlp.googleapis.com/DeidentifyTemplate"
+ }
+ ];
+
+ // New DeidentifyTemplate value.
+ DeidentifyTemplate deidentify_template = 2;
+
+ // Mask to control which fields get updated.
+ google.protobuf.FieldMask update_mask = 3;
+}
+
+// Request message for GetDeidentifyTemplate.
+message GetDeidentifyTemplateRequest {
+ // Required. Resource name of the organization and deidentify template to be read, for
+ // example `organizations/433245324/deidentifyTemplates/432452342` or
+ // projects/project-id/deidentifyTemplates/432452342.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "dlp.googleapis.com/DeidentifyTemplate"
+ }
+ ];
+}
+
+// Request message for ListDeidentifyTemplates.
+message ListDeidentifyTemplatesRequest {
+ // Required. The parent resource name, for example projects/my-project-id or
+ // organizations/my-org-id.
+ string parent = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ child_type: "dlp.googleapis.com/DeidentifyTemplate"
+ }
+ ];
+
+ // Page token to continue retrieval. Comes from previous call
+ // to `ListDeidentifyTemplates`.
+ string page_token = 2;
+
+ // Size of the page, can be limited by server. If zero server returns
+ // a page of max size 100.
+ int32 page_size = 3;
+
+ // Comma separated list of fields to order by,
+ // followed by `asc` or `desc` postfix. This list is case-insensitive,
+ // default sorting order is ascending, redundant space characters are
+ // insignificant.
+ //
+ // Example: `name asc,update_time, create_time desc`
+ //
+ // Supported fields are:
+ //
+ // - `create_time`: corresponds to time the template was created.
+ // - `update_time`: corresponds to time the template was last updated.
+ // - `name`: corresponds to template's name.
+ // - `display_name`: corresponds to template's display name.
+ string order_by = 4;
+
+ // The geographic location where deidentifications templates will be retrieved
+ // from. Use `-` for all locations. Reserved for future extensions.
+ string location_id = 5;
+}
+
+// Response message for ListDeidentifyTemplates.
+message ListDeidentifyTemplatesResponse {
+ // List of deidentify templates, up to page_size in
+ // ListDeidentifyTemplatesRequest.
+ repeated DeidentifyTemplate deidentify_templates = 1;
+
+ // If the next page is available then the next page token to be used
+ // in following ListDeidentifyTemplates request.
+ string next_page_token = 2;
+}
+
+// Request message for DeleteDeidentifyTemplate.
+message DeleteDeidentifyTemplateRequest {
+ // Required. Resource name of the organization and deidentify template to be deleted,
+ // for example `organizations/433245324/deidentifyTemplates/432452342` or
+ // projects/project-id/deidentifyTemplates/432452342.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "dlp.googleapis.com/DeidentifyTemplate"
+ }
+ ];
+}
+
+// Configuration for a custom dictionary created from a data source of any size
+// up to the maximum size defined in the
+// [limits](https://cloud.google.com/dlp/limits) page. The artifacts of
+// dictionary creation are stored in the specified Google Cloud Storage
+// location. Consider using `CustomInfoType.Dictionary` for smaller dictionaries
+// that satisfy the size requirements.
+message LargeCustomDictionaryConfig {
+ // Location to store dictionary artifacts in Google Cloud Storage. These files
+ // will only be accessible by project owners and the DLP API. If any of these
+ // artifacts are modified, the dictionary is considered invalid and can no
+ // longer be used.
+ CloudStoragePath output_path = 1;
+
+ oneof source {
+ // Set of files containing newline-delimited lists of dictionary phrases.
+ CloudStorageFileSet cloud_storage_file_set = 2;
+
+ // Field in a BigQuery table where each cell represents a dictionary phrase.
+ BigQueryField big_query_field = 3;
+ }
+}
+
+// Summary statistics of a custom dictionary.
+message LargeCustomDictionaryStats {
+ // Approximate number of distinct phrases in the dictionary.
+ int64 approx_num_phrases = 1;
+}
+
+// Configuration for stored infoTypes. All fields and subfield are provided
+// by the user. For more information, see
+// https://cloud.google.com/dlp/docs/creating-custom-infotypes.
+message StoredInfoTypeConfig {
+ // Display name of the StoredInfoType (max 256 characters).
+ string display_name = 1;
+
+ // Description of the StoredInfoType (max 256 characters).
+ string description = 2;
+
+ // Stored infotype types.
+ oneof type {
+ // StoredInfoType where findings are defined by a dictionary of phrases.
+ LargeCustomDictionaryConfig large_custom_dictionary = 3;
+ }
+}
+
+// Statistics for a StoredInfoType.
+message StoredInfoTypeStats {
+ // Stat types
+ oneof type {
+ // StoredInfoType where findings are defined by a dictionary of phrases.
+ LargeCustomDictionaryStats large_custom_dictionary = 1;
+ }
+}
+
+// Version of a StoredInfoType, including the configuration used to build it,
+// create timestamp, and current state.
+message StoredInfoTypeVersion {
+ // StoredInfoType configuration.
+ StoredInfoTypeConfig config = 1;
+
+ // Create timestamp of the version. Read-only, determined by the system
+ // when the version is created.
+ google.protobuf.Timestamp create_time = 2;
+
+ // Stored info type version state. Read-only, updated by the system
+ // during dictionary creation.
+ StoredInfoTypeState state = 3;
+
+ // Errors that occurred when creating this storedInfoType version, or
+ // anomalies detected in the storedInfoType data that render it unusable. Only
+ // the five most recent errors will be displayed, with the most recent error
+ // appearing first.
+ //
+ // For example, some of the data for stored custom dictionaries is put in
+ // the user's Google Cloud Storage bucket, and if this data is modified or
+ // deleted by the user or another system, the dictionary becomes invalid.
+ //
+ // If any errors occur, fix the problem indicated by the error message and
+ // use the UpdateStoredInfoType API method to create another version of the
+ // storedInfoType to continue using it, reusing the same `config` if it was
+ // not the source of the error.
+ repeated Error errors = 4;
+
+ // Statistics about this storedInfoType version.
+ StoredInfoTypeStats stats = 5;
+}
+
+// StoredInfoType resource message that contains information about the current
+// version and any pending updates.
+message StoredInfoType {
+ option (google.api.resource) = {
+ type: "dlp.googleapis.com/StoredInfoType"
+ pattern: "organizations/{organization}/storedInfoTypes/{stored_info_type}"
+ pattern: "projects/{project}/storedInfoTypes/{stored_info_type}"
+ };
+
+ // Resource name.
+ string name = 1;
+
+ // Current version of the stored info type.
+ StoredInfoTypeVersion current_version = 2;
+
+ // Pending versions of the stored info type. Empty if no versions are
+ // pending.
+ repeated StoredInfoTypeVersion pending_versions = 3;
+}
+
+// Request message for CreateStoredInfoType.
+message CreateStoredInfoTypeRequest {
+ // Required. The parent resource name, for example projects/my-project-id or
+ // organizations/my-org-id.
+ string parent = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ child_type: "dlp.googleapis.com/StoredInfoType"
+ }
+ ];
+
+ // Required. Configuration of the storedInfoType to create.
+ StoredInfoTypeConfig config = 2 [(google.api.field_behavior) = REQUIRED];
+
+ // The storedInfoType ID can contain uppercase and lowercase letters,
+ // numbers, and hyphens; that is, it must match the regular
+ // expression: `[a-zA-Z\\d-_]+`. The maximum length is 100
+ // characters. Can be empty to allow the system to generate one.
+ string stored_info_type_id = 3;
+
+ // The geographic location to store the stored infoType. Reserved for
+ // future extensions.
+ string location_id = 4;
+}
+
+// Request message for UpdateStoredInfoType.
+message UpdateStoredInfoTypeRequest {
+ // Required. Resource name of organization and storedInfoType to be updated, for
+ // example `organizations/433245324/storedInfoTypes/432452342` or
+ // projects/project-id/storedInfoTypes/432452342.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "dlp.googleapis.com/StoredInfoType"
+ }
+ ];
+
+ // Updated configuration for the storedInfoType. If not provided, a new
+ // version of the storedInfoType will be created with the existing
+ // configuration.
+ StoredInfoTypeConfig config = 2;
+
+ // Mask to control which fields get updated.
+ google.protobuf.FieldMask update_mask = 3;
+}
+
+// Request message for GetStoredInfoType.
+message GetStoredInfoTypeRequest {
+ // Required. Resource name of the organization and storedInfoType to be read, for
+ // example `organizations/433245324/storedInfoTypes/432452342` or
+ // projects/project-id/storedInfoTypes/432452342.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "dlp.googleapis.com/StoredInfoType"
+ }
+ ];
+}
+
+// Request message for ListStoredInfoTypes.
+message ListStoredInfoTypesRequest {
+ // Required. The parent resource name, for example projects/my-project-id or
+ // organizations/my-org-id.
+ string parent = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ child_type: "dlp.googleapis.com/StoredInfoType"
+ }
+ ];
+
+ // Page token to continue retrieval. Comes from previous call
+ // to `ListStoredInfoTypes`.
+ string page_token = 2;
+
+ // Size of the page, can be limited by server. If zero server returns
+ // a page of max size 100.
+ int32 page_size = 3;
+
+ // Comma separated list of fields to order by,
+ // followed by `asc` or `desc` postfix. This list is case-insensitive,
+ // default sorting order is ascending, redundant space characters are
+ // insignificant.
+ //
+ // Example: `name asc, display_name, create_time desc`
+ //
+ // Supported fields are:
+ //
+ // - `create_time`: corresponds to time the most recent version of the
+ // resource was created.
+ // - `state`: corresponds to the state of the resource.
+ // - `name`: corresponds to resource name.
+ // - `display_name`: corresponds to info type's display name.
+ string order_by = 4;
+
+ // The geographic location where stored infoTypes will be retrieved from.
+ // Use `-` for all locations. Reserved for future extensions.
+ string location_id = 5;
+}
+
+// Response message for ListStoredInfoTypes.
+message ListStoredInfoTypesResponse {
+ // List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
+ repeated StoredInfoType stored_info_types = 1;
+
+ // If the next page is available then the next page token to be used
+ // in following ListStoredInfoTypes request.
+ string next_page_token = 2;
+}
+
+// Request message for DeleteStoredInfoType.
+message DeleteStoredInfoTypeRequest {
+ // Required. Resource name of the organization and storedInfoType to be deleted, for
+ // example `organizations/433245324/storedInfoTypes/432452342` or
+ // projects/project-id/storedInfoTypes/432452342.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "dlp.googleapis.com/StoredInfoType"
+ }
+ ];
+}
+
+// Operators available for comparing the value of fields.
+enum RelationalOperator {
+ // Unused
+ RELATIONAL_OPERATOR_UNSPECIFIED = 0;
+
+ // Equal. Attempts to match even with incompatible types.
+ EQUAL_TO = 1;
+
+ // Not equal to. Attempts to match even with incompatible types.
+ NOT_EQUAL_TO = 2;
+
+ // Greater than.
+ GREATER_THAN = 3;
+
+ // Less than.
+ LESS_THAN = 4;
+
+ // Greater than or equals.
+ GREATER_THAN_OR_EQUALS = 5;
+
+ // Less than or equals.
+ LESS_THAN_OR_EQUALS = 6;
+
+ // Exists
+ EXISTS = 7;
+}
+
+// Type of the match which can be applied to different ways of matching, like
+// Dictionary, regular expression and intersecting with findings of another
+// info type.
+enum MatchingType {
+ // Invalid.
+ MATCHING_TYPE_UNSPECIFIED = 0;
+
+ // Full match.
+ //
+ // - Dictionary: join of Dictionary results matched complete finding quote
+ // - Regex: all regex matches fill a finding quote start to end
+ // - Exclude info type: completely inside affecting info types findings
+ MATCHING_TYPE_FULL_MATCH = 1;
+
+ // Partial match.
+ //
+ // - Dictionary: at least one of the tokens in the finding matches
+ // - Regex: substring of the finding matches
+ // - Exclude info type: intersects with affecting info types findings
+ MATCHING_TYPE_PARTIAL_MATCH = 2;
+
+ // Inverse match.
+ //
+ // - Dictionary: no tokens in the finding match the dictionary
+ // - Regex: finding doesn't match the regex
+ // - Exclude info type: no intersection with affecting info types findings
+ MATCHING_TYPE_INVERSE_MATCH = 3;
+}
+
+// Options describing which parts of the provided content should be scanned.
+enum ContentOption {
+ // Includes entire content of a file or a data stream.
+ CONTENT_UNSPECIFIED = 0;
+
+ // Text content within the data, excluding any metadata.
+ CONTENT_TEXT = 1;
+
+ // Images found in the data.
+ CONTENT_IMAGE = 2;
+}
+
+// Parts of the APIs which use certain infoTypes.
+enum InfoTypeSupportedBy {
+ // Unused.
+ ENUM_TYPE_UNSPECIFIED = 0;
+
+ // Supported by the inspect operations.
+ INSPECT = 1;
+
+ // Supported by the risk analysis operations.
+ RISK_ANALYSIS = 2;
+}
+
+// An enum to represent the various types of DLP jobs.
+enum DlpJobType {
+ // Unused
+ DLP_JOB_TYPE_UNSPECIFIED = 0;
+
+ // The job inspected Google Cloud for sensitive data.
+ INSPECT_JOB = 1;
+
+ // The job executed a Risk Analysis computation.
+ RISK_ANALYSIS_JOB = 2;
+}
+
+// State of a StoredInfoType version.
+enum StoredInfoTypeState {
+ // Unused
+ STORED_INFO_TYPE_STATE_UNSPECIFIED = 0;
+
+ // StoredInfoType version is being created.
+ PENDING = 1;
+
+ // StoredInfoType version is ready for use.
+ READY = 2;
+
+ // StoredInfoType creation failed. All relevant error messages are returned in
+ // the `StoredInfoTypeVersion` message.
+ FAILED = 3;
+
+ // StoredInfoType is no longer valid because artifacts stored in
+ // user-controlled storage were modified. To fix an invalid StoredInfoType,
+ // use the `UpdateStoredInfoType` method to create a new version.
+ INVALID = 4;
+}
diff --git a/test-fixtures/protos/google/privacy/dlp/v2/storage.proto b/test-fixtures/protos/google/privacy/dlp/v2/storage.proto
new file mode 100644
index 0000000000..aebc873ad8
--- /dev/null
+++ b/test-fixtures/protos/google/privacy/dlp/v2/storage.proto
@@ -0,0 +1,648 @@
+// Copyright 2019 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+syntax = "proto3";
+
+package google.privacy.dlp.v2;
+
+import "google/api/resource.proto";
+import "google/protobuf/timestamp.proto";
+
+option csharp_namespace = "Google.Cloud.Dlp.V2";
+option go_package = "google.golang.org/genproto/googleapis/privacy/dlp/v2;dlp";
+option java_multiple_files = true;
+option java_outer_classname = "DlpStorage";
+option java_package = "com.google.privacy.dlp.v2";
+option php_namespace = "Google\\Cloud\\Dlp\\V2";
+
+// Type of information detected by the API.
+message InfoType {
+ // Name of the information type. Either a name of your choosing when
+ // creating a CustomInfoType, or one of the names listed
+ // at https://cloud.google.com/dlp/docs/infotypes-reference when specifying
+ // a built-in type. InfoType names should conform to the pattern
+ // [a-zA-Z0-9_]{1,64}.
+ string name = 1;
+}
+
+// A reference to a StoredInfoType to use with scanning.
+message StoredType {
+ // Resource name of the requested `StoredInfoType`, for example
+ // `organizations/433245324/storedInfoTypes/432452342` or
+ // `projects/project-id/storedInfoTypes/432452342`.
+ string name = 1;
+
+ // Timestamp indicating when the version of the `StoredInfoType` used for
+ // inspection was created. Output-only field, populated by the system.
+ google.protobuf.Timestamp create_time = 2;
+}
+
+// Categorization of results based on how likely they are to represent a match,
+// based on the number of elements they contain which imply a match.
+enum Likelihood {
+ // Default value; same as POSSIBLE.
+ LIKELIHOOD_UNSPECIFIED = 0;
+
+ // Few matching elements.
+ VERY_UNLIKELY = 1;
+
+ UNLIKELY = 2;
+
+ // Some matching elements.
+ POSSIBLE = 3;
+
+ LIKELY = 4;
+
+ // Many matching elements.
+ VERY_LIKELY = 5;
+}
+
+// Custom information type provided by the user. Used to find domain-specific
+// sensitive information configurable to the data in question.
+message CustomInfoType {
+ // Custom information type based on a dictionary of words or phrases. This can
+ // be used to match sensitive information specific to the data, such as a list
+ // of employee IDs or job titles.
+ //
+ // Dictionary words are case-insensitive and all characters other than letters
+ // and digits in the unicode [Basic Multilingual
+ // Plane](https://en.wikipedia.org/wiki/Plane_%28Unicode%29#Basic_Multilingual_Plane)
+ // will be replaced with whitespace when scanning for matches, so the
+ // dictionary phrase "Sam Johnson" will match all three phrases "sam johnson",
+ // "Sam, Johnson", and "Sam (Johnson)". Additionally, the characters
+ // surrounding any match must be of a different type than the adjacent
+ // characters within the word, so letters must be next to non-letters and
+ // digits next to non-digits. For example, the dictionary word "jen" will
+ // match the first three letters of the text "jen123" but will return no
+ // matches for "jennifer".
+ //
+ // Dictionary words containing a large number of characters that are not
+ // letters or digits may result in unexpected findings because such characters
+ // are treated as whitespace. The
+ // [limits](https://cloud.google.com/dlp/limits) page contains details about
+ // the size limits of dictionaries. For dictionaries that do not fit within
+ // these constraints, consider using `LargeCustomDictionaryConfig` in the
+ // `StoredInfoType` API.
+ message Dictionary {
+ // Message defining a list of words or phrases to search for in the data.
+ message WordList {
+ // Words or phrases defining the dictionary. The dictionary must contain
+ // at least one phrase and every phrase must contain at least 2 characters
+ // that are letters or digits. [required]
+ repeated string words = 1;
+ }
+
+ oneof source {
+ // List of words or phrases to search for.
+ WordList word_list = 1;
+
+ // Newline-delimited file of words in Cloud Storage. Only a single file
+ // is accepted.
+ CloudStoragePath cloud_storage_path = 3;
+ }
+ }
+
+ // Message defining a custom regular expression.
+ message Regex {
+ // Pattern defining the regular expression. Its syntax
+ // (https://github.com/google/re2/wiki/Syntax) can be found under the
+ // google/re2 repository on GitHub.
+ string pattern = 1;
+
+ // The index of the submatch to extract as findings. When not
+ // specified, the entire match is returned. No more than 3 may be included.
+ repeated int32 group_indexes = 2;
+ }
+
+ // Message for detecting output from deidentification transformations
+ // such as
+ // [`CryptoReplaceFfxFpeConfig`](/dlp/docs/reference/rest/v2/organizations.deidentifyTemplates#cryptoreplaceffxfpeconfig).
+ // These types of transformations are
+ // those that perform pseudonymization, thereby producing a "surrogate" as
+ // output. This should be used in conjunction with a field on the
+ // transformation such as `surrogate_info_type`. This CustomInfoType does
+ // not support the use of `detection_rules`.
+ message SurrogateType {
+
+ }
+
+ // Deprecated; use `InspectionRuleSet` instead. Rule for modifying a
+ // `CustomInfoType` to alter behavior under certain circumstances, depending
+ // on the specific details of the rule. Not supported for the `surrogate_type`
+ // custom infoType.
+ message DetectionRule {
+ // Message for specifying a window around a finding to apply a detection
+ // rule.
+ message Proximity {
+ // Number of characters before the finding to consider.
+ int32 window_before = 1;
+
+ // Number of characters after the finding to consider.
+ int32 window_after = 2;
+ }
+
+ // Message for specifying an adjustment to the likelihood of a finding as
+ // part of a detection rule.
+ message LikelihoodAdjustment {
+ oneof adjustment {
+ // Set the likelihood of a finding to a fixed value.
+ Likelihood fixed_likelihood = 1;
+
+ // Increase or decrease the likelihood by the specified number of
+ // levels. For example, if a finding would be `POSSIBLE` without the
+ // detection rule and `relative_likelihood` is 1, then it is upgraded to
+ // `LIKELY`, while a value of -1 would downgrade it to `UNLIKELY`.
+ // Likelihood may never drop below `VERY_UNLIKELY` or exceed
+ // `VERY_LIKELY`, so applying an adjustment of 1 followed by an
+ // adjustment of -1 when base likelihood is `VERY_LIKELY` will result in
+ // a final likelihood of `LIKELY`.
+ int32 relative_likelihood = 2;
+ }
+ }
+
+ // The rule that adjusts the likelihood of findings within a certain
+ // proximity of hotwords.
+ message HotwordRule {
+ // Regular expression pattern defining what qualifies as a hotword.
+ Regex hotword_regex = 1;
+
+ // Proximity of the finding within which the entire hotword must reside.
+ // The total length of the window cannot exceed 1000 characters. Note that
+ // the finding itself will be included in the window, so that hotwords may
+ // be used to match substrings of the finding itself. For example, the
+ // certainty of a phone number regex "\(\d{3}\) \d{3}-\d{4}" could be
+ // adjusted upwards if the area code is known to be the local area code of
+ // a company office using the hotword regex "\(xxx\)", where "xxx"
+ // is the area code in question.
+ Proximity proximity = 2;
+
+ // Likelihood adjustment to apply to all matching findings.
+ LikelihoodAdjustment likelihood_adjustment = 3;
+ }
+
+ oneof type {
+ // Hotword-based detection rule.
+ HotwordRule hotword_rule = 1;
+ }
+ }
+
+ enum ExclusionType {
+ // A finding of this custom info type will not be excluded from results.
+ EXCLUSION_TYPE_UNSPECIFIED = 0;
+
+ // A finding of this custom info type will be excluded from final results,
+ // but can still affect rule execution.
+ EXCLUSION_TYPE_EXCLUDE = 1;
+ }
+
+ // CustomInfoType can either be a new infoType, or an extension of built-in
+ // infoType, when the name matches one of existing infoTypes and that infoType
+ // is specified in `InspectContent.info_types` field. Specifying the latter
+ // adds findings to the one detected by the system. If built-in info type is
+ // not specified in `InspectContent.info_types` list then the name is treated
+ // as a custom info type.
+ InfoType info_type = 1;
+
+ // Likelihood to return for this CustomInfoType. This base value can be
+ // altered by a detection rule if the finding meets the criteria specified by
+ // the rule. Defaults to `VERY_LIKELY` if not specified.
+ Likelihood likelihood = 6;
+
+ oneof type {
+ // A list of phrases to detect as a CustomInfoType.
+ Dictionary dictionary = 2;
+
+ // Regular expression based CustomInfoType.
+ Regex regex = 3;
+
+ // Message for detecting output from deidentification transformations that
+ // support reversing.
+ SurrogateType surrogate_type = 4;
+
+ // Load an existing `StoredInfoType` resource for use in
+ // `InspectDataSource`. Not currently supported in `InspectContent`.
+ StoredType stored_type = 5;
+ }
+
+ // Set of detection rules to apply to all findings of this CustomInfoType.
+ // Rules are applied in order that they are specified. Not supported for the
+ // `surrogate_type` CustomInfoType.
+ repeated DetectionRule detection_rules = 7;
+
+ // If set to EXCLUSION_TYPE_EXCLUDE this infoType will not cause a finding
+ // to be returned. It still can be used for rules matching.
+ ExclusionType exclusion_type = 8;
+}
+
+// General identifier of a data field in a storage service.
+message FieldId {
+ // Name describing the field.
+ string name = 1;
+}
+
+// Datastore partition ID.
+// A partition ID identifies a grouping of entities. The grouping is always
+// by project and namespace, however the namespace ID may be empty.
+//
+// A partition ID contains several dimensions:
+// project ID and namespace ID.
+message PartitionId {
+ // The ID of the project to which the entities belong.
+ string project_id = 2;
+
+ // If not empty, the ID of the namespace to which the entities belong.
+ string namespace_id = 4;
+}
+
+// A representation of a Datastore kind.
+message KindExpression {
+ // The name of the kind.
+ string name = 1;
+}
+
+// Options defining a data set within Google Cloud Datastore.
+message DatastoreOptions {
+ // A partition ID identifies a grouping of entities. The grouping is always
+ // by project and namespace, however the namespace ID may be empty.
+ PartitionId partition_id = 1;
+
+ // The kind to process.
+ KindExpression kind = 2;
+}
+
+// Message representing a set of files in a Cloud Storage bucket. Regular
+// expressions are used to allow fine-grained control over which files in the
+// bucket to include.
+//
+// Included files are those that match at least one item in `include_regex` and
+// do not match any items in `exclude_regex`. Note that a file that matches
+// items from both lists will _not_ be included. For a match to occur, the
+// entire file path (i.e., everything in the url after the bucket name) must
+// match the regular expression.
+//
+// For example, given the input `{bucket_name: "mybucket", include_regex:
+// ["directory1/.*"], exclude_regex:
+// ["directory1/excluded.*"]}`:
+//
+// * `gs://mybucket/directory1/myfile` will be included
+// * `gs://mybucket/directory1/directory2/myfile` will be included (`.*` matches
+// across `/`)
+// * `gs://mybucket/directory0/directory1/myfile` will _not_ be included (the
+// full path doesn't match any items in `include_regex`)
+// * `gs://mybucket/directory1/excludedfile` will _not_ be included (the path
+// matches an item in `exclude_regex`)
+//
+// If `include_regex` is left empty, it will match all files by default
+// (this is equivalent to setting `include_regex: [".*"]`).
+//
+// Some other common use cases:
+//
+// * `{bucket_name: "mybucket", exclude_regex: [".*\.pdf"]}` will include all
+// files in `mybucket` except for .pdf files
+// * `{bucket_name: "mybucket", include_regex: ["directory/[^/]+"]}` will
+// include all files directly under `gs://mybucket/directory/`, without matching
+// across `/`
+message CloudStorageRegexFileSet {
+ // The name of a Cloud Storage bucket. Required.
+ string bucket_name = 1;
+
+ // A list of regular expressions matching file paths to include. All files in
+ // the bucket that match at least one of these regular expressions will be
+ // included in the set of files, except for those that also match an item in
+ // `exclude_regex`. Leaving this field empty will match all files by default
+ // (this is equivalent to including `.*` in the list).
+ //
+ // Regular expressions use RE2
+ // [syntax](https://github.com/google/re2/wiki/Syntax); a guide can be found
+ // under the google/re2 repository on GitHub.
+ repeated string include_regex = 2;
+
+ // A list of regular expressions matching file paths to exclude. All files in
+ // the bucket that match at least one of these regular expressions will be
+ // excluded from the scan.
+ //
+ // Regular expressions use RE2
+ // [syntax](https://github.com/google/re2/wiki/Syntax); a guide can be found
+ // under the google/re2 repository on GitHub.
+ repeated string exclude_regex = 3;
+}
+
+// Options defining a file or a set of files within a Google Cloud Storage
+// bucket.
+message CloudStorageOptions {
+ // Set of files to scan.
+ message FileSet {
+ // The Cloud Storage url of the file(s) to scan, in the format
+ // `gs:///`. Trailing wildcard in the path is allowed.
+ //
+ // If the url ends in a trailing slash, the bucket or directory represented
+ // by the url will be scanned non-recursively (content in sub-directories
+ // will not be scanned). This means that `gs://mybucket/` is equivalent to
+ // `gs://mybucket/*`, and `gs://mybucket/directory/` is equivalent to
+ // `gs://mybucket/directory/*`.
+ //
+ // Exactly one of `url` or `regex_file_set` must be set.
+ string url = 1;
+
+ // The regex-filtered set of files to scan. Exactly one of `url` or
+ // `regex_file_set` must be set.
+ CloudStorageRegexFileSet regex_file_set = 2;
+ }
+
+ // How to sample bytes if not all bytes are scanned. Meaningful only when used
+ // in conjunction with bytes_limit_per_file. If not specified, scanning would
+ // start from the top.
+ enum SampleMethod {
+ SAMPLE_METHOD_UNSPECIFIED = 0;
+
+ // Scan from the top (default).
+ TOP = 1;
+
+ // For each file larger than bytes_limit_per_file, randomly pick the offset
+ // to start scanning. The scanned bytes are contiguous.
+ RANDOM_START = 2;
+ }
+
+ // The set of one or more files to scan.
+ FileSet file_set = 1;
+
+ // Max number of bytes to scan from a file. If a scanned file's size is bigger
+ // than this value then the rest of the bytes are omitted. Only one
+ // of bytes_limit_per_file and bytes_limit_per_file_percent can be specified.
+ int64 bytes_limit_per_file = 4;
+
+ // Max percentage of bytes to scan from a file. The rest are omitted. The
+ // number of bytes scanned is rounded down. Must be between 0 and 100,
+ // inclusively. Both 0 and 100 means no limit. Defaults to 0. Only one
+ // of bytes_limit_per_file and bytes_limit_per_file_percent can be specified.
+ int32 bytes_limit_per_file_percent = 8;
+
+ // List of file type groups to include in the scan.
+ // If empty, all files are scanned and available data format processors
+ // are applied. In addition, the binary content of the selected files
+ // is always scanned as well.
+ repeated FileType file_types = 5;
+
+ SampleMethod sample_method = 6;
+
+ // Limits the number of files to scan to this percentage of the input FileSet.
+ // Number of files scanned is rounded down. Must be between 0 and 100,
+ // inclusively. Both 0 and 100 means no limit. Defaults to 0.
+ int32 files_limit_percent = 7;
+}
+
+// Message representing a set of files in Cloud Storage.
+message CloudStorageFileSet {
+ // The url, in the format `gs:///`. Trailing wildcard in the
+ // path is allowed.
+ string url = 1;
+}
+
+// Message representing a single file or path in Cloud Storage.
+message CloudStoragePath {
+ // A url representing a file or path (no wildcards) in Cloud Storage.
+ // Example: gs://[BUCKET_NAME]/dictionary.txt
+ string path = 1;
+}
+
+// Options defining BigQuery table and row identifiers.
+message BigQueryOptions {
+ // How to sample rows if not all rows are scanned. Meaningful only when used
+ // in conjunction with either rows_limit or rows_limit_percent. If not
+ // specified, scanning would start from the top.
+ enum SampleMethod {
+ SAMPLE_METHOD_UNSPECIFIED = 0;
+
+ // Scan from the top (default).
+ TOP = 1;
+
+ // Randomly pick the row to start scanning. The scanned rows are contiguous.
+ RANDOM_START = 2;
+ }
+
+ // Complete BigQuery table reference.
+ BigQueryTable table_reference = 1;
+
+ // References to fields uniquely identifying rows within the table.
+ // Nested fields in the format, like `person.birthdate.year`, are allowed.
+ repeated FieldId identifying_fields = 2;
+
+ // Max number of rows to scan. If the table has more rows than this value, the
+ // rest of the rows are omitted. If not set, or if set to 0, all rows will be
+ // scanned. Only one of rows_limit and rows_limit_percent can be specified.
+ // Cannot be used in conjunction with TimespanConfig.
+ int64 rows_limit = 3;
+
+ // Max percentage of rows to scan. The rest are omitted. The number of rows
+ // scanned is rounded down. Must be between 0 and 100, inclusively. Both 0 and
+ // 100 means no limit. Defaults to 0. Only one of rows_limit and
+ // rows_limit_percent can be specified. Cannot be used in conjunction with
+ // TimespanConfig.
+ int32 rows_limit_percent = 6;
+
+ SampleMethod sample_method = 4;
+
+ // References to fields excluded from scanning. This allows you to skip
+ // inspection of entire columns which you know have no findings.
+ repeated FieldId excluded_fields = 5;
+}
+
+// Shared message indicating Cloud storage type.
+message StorageConfig {
+ // Configuration of the timespan of the items to include in scanning.
+ // Currently only supported when inspecting Google Cloud Storage and BigQuery.
+ message TimespanConfig {
+ // Exclude files or rows older than this value.
+ google.protobuf.Timestamp start_time = 1;
+
+ // Exclude files or rows newer than this value.
+ // If set to zero, no upper time limit is applied.
+ google.protobuf.Timestamp end_time = 2;
+
+ // Specification of the field containing the timestamp of scanned items.
+ // Used for data sources like Datastore and BigQuery.
+ //
+ // For BigQuery:
+ // Required to filter out rows based on the given start and
+ // end times. If not specified and the table was modified between the given
+ // start and end times, the entire table will be scanned.
+ // The valid data types of the timestamp field are: `INTEGER`, `DATE`,
+ // `TIMESTAMP`, or `DATETIME` BigQuery column.
+ //
+ // For Datastore.
+ // Valid data types of the timestamp field are: `TIMESTAMP`.
+ // Datastore entity will be scanned if the timestamp property does not
+ // exist or its value is empty or invalid.
+ FieldId timestamp_field = 3;
+
+ // When the job is started by a JobTrigger we will automatically figure out
+ // a valid start_time to avoid scanning files that have not been modified
+ // since the last time the JobTrigger executed. This will be based on the
+ // time of the execution of the last run of the JobTrigger.
+ bool enable_auto_population_of_timespan_config = 4;
+ }
+
+ oneof type {
+ // Google Cloud Datastore options specification.
+ DatastoreOptions datastore_options = 2;
+
+ // Google Cloud Storage options specification.
+ CloudStorageOptions cloud_storage_options = 3;
+
+ // BigQuery options specification.
+ BigQueryOptions big_query_options = 4;
+ }
+
+ TimespanConfig timespan_config = 6;
+}
+
+// Definitions of file type groups to scan.
+enum FileType {
+ // Includes all files.
+ FILE_TYPE_UNSPECIFIED = 0;
+
+ // Includes all file extensions not covered by text file types.
+ BINARY_FILE = 1;
+
+ // Included file extensions:
+ // asc, brf, c, cc, cpp, csv, cxx, c++, cs, css, dart, eml, go, h, hh, hpp,
+ // hxx, h++, hs, html, htm, shtml, shtm, xhtml, lhs, ini, java, js, json,
+ // ocaml, md, mkd, markdown, m, ml, mli, pl, pm, php, phtml, pht, py, pyw,
+ // rb, rbw, rs, rc, scala, sh, sql, tex, txt, text, tsv, vcard, vcs, wml,
+ // xml, xsl, xsd, yml, yaml.
+ TEXT_FILE = 2;
+
+ // Included file extensions:
+ // bmp, gif, jpg, jpeg, jpe, png.
+ // bytes_limit_per_file has no effect on image files.
+ IMAGE = 3;
+
+ // Included file extensions:
+ // avro
+ AVRO = 7;
+}
+
+// Row key for identifying a record in BigQuery table.
+message BigQueryKey {
+ // Complete BigQuery table reference.
+ BigQueryTable table_reference = 1;
+
+ // Absolute number of the row from the beginning of the table at the time
+ // of scanning.
+ int64 row_number = 2;
+}
+
+// Record key for a finding in Cloud Datastore.
+message DatastoreKey {
+ // Datastore entity key.
+ Key entity_key = 1;
+}
+
+// A unique identifier for a Datastore entity.
+// If a key's partition ID or any of its path kinds or names are
+// reserved/read-only, the key is reserved/read-only.
+// A reserved/read-only key is forbidden in certain documented contexts.
+message Key {
+ // A (kind, ID/name) pair used to construct a key path.
+ //
+ // If either name or ID is set, the element is complete.
+ // If neither is set, the element is incomplete.
+ message PathElement {
+ // The kind of the entity.
+ // A kind matching regex `__.*__` is reserved/read-only.
+ // A kind must not contain more than 1500 bytes when UTF-8 encoded.
+ // Cannot be `""`.
+ string kind = 1;
+
+ // The type of ID.
+ oneof id_type {
+ // The auto-allocated ID of the entity.
+ // Never equal to zero. Values less than zero are discouraged and may not
+ // be supported in the future.
+ int64 id = 2;
+
+ // The name of the entity.
+ // A name matching regex `__.*__` is reserved/read-only.
+ // A name must not be more than 1500 bytes when UTF-8 encoded.
+ // Cannot be `""`.
+ string name = 3;
+ }
+ }
+
+ // Entities are partitioned into subsets, currently identified by a project
+ // ID and namespace ID.
+ // Queries are scoped to a single partition.
+ PartitionId partition_id = 1;
+
+ // The entity path.
+ // An entity path consists of one or more elements composed of a kind and a
+ // string or numerical identifier, which identify entities. The first
+ // element identifies a _root entity_, the second element identifies
+ // a _child_ of the root entity, the third element identifies a child of the
+ // second entity, and so forth. The entities identified by all prefixes of
+ // the path are called the element's _ancestors_.
+ //
+ // A path can never be empty, and a path can have at most 100 elements.
+ repeated PathElement path = 2;
+}
+
+// Message for a unique key indicating a record that contains a finding.
+message RecordKey {
+ oneof type {
+ DatastoreKey datastore_key = 2;
+
+ BigQueryKey big_query_key = 3;
+ }
+
+ // Values of identifying columns in the given row. Order of values matches
+ // the order of field identifiers specified in the scanning request.
+ repeated string id_values = 5;
+}
+
+// Message defining the location of a BigQuery table. A table is uniquely
+// identified by its project_id, dataset_id, and table_name. Within a query
+// a table is often referenced with a string in the format of:
+// `:.` or
+// `..`.
+message BigQueryTable {
+ // The Google Cloud Platform project ID of the project containing the table.
+ // If omitted, project ID is inferred from the API call.
+ string project_id = 1;
+
+ // Dataset ID of the table.
+ string dataset_id = 2;
+
+ // Name of the table.
+ string table_id = 3;
+}
+
+// Message defining a field of a BigQuery table.
+message BigQueryField {
+ // Source table of the field.
+ BigQueryTable table = 1;
+
+ // Designated field in the BigQuery table.
+ FieldId field = 2;
+}
+
+// An entity in a dataset is a field or set of fields that correspond to a
+// single person. For example, in medical records the `EntityId` might be a
+// patient identifier, or for financial records it might be an account
+// identifier. This message is used when generalizations or analysis must take
+// into account that multiple rows correspond to the same entity.
+message EntityId {
+ // Composite key indicating which field contains the entity identifier.
+ FieldId field = 1;
+}
diff --git a/typescript/test/protos/google/showcase/v1beta1/echo.proto b/test-fixtures/protos/google/showcase/v1beta1/echo.proto
similarity index 100%
rename from typescript/test/protos/google/showcase/v1beta1/echo.proto
rename to test-fixtures/protos/google/showcase/v1beta1/echo.proto
diff --git a/typescript/test/test_application_js/.gitignore b/test-fixtures/test-application-js/.gitignore
similarity index 100%
rename from typescript/test/test_application_js/.gitignore
rename to test-fixtures/test-application-js/.gitignore
diff --git a/typescript/test/test_application_js/browser-test.js b/test-fixtures/test-application-js/browser-test.js
similarity index 100%
rename from typescript/test/test_application_js/browser-test.js
rename to test-fixtures/test-application-js/browser-test.js
diff --git a/typescript/test/test_application_js/index.js b/test-fixtures/test-application-js/index.js
similarity index 100%
rename from typescript/test/test_application_js/index.js
rename to test-fixtures/test-application-js/index.js
diff --git a/typescript/test/test_application_js/integration-test.js b/test-fixtures/test-application-js/integration-test.js
similarity index 100%
rename from typescript/test/test_application_js/integration-test.js
rename to test-fixtures/test-application-js/integration-test.js
diff --git a/typescript/test/test_application_js/karma.conf.js b/test-fixtures/test-application-js/karma.conf.js
similarity index 100%
rename from typescript/test/test_application_js/karma.conf.js
rename to test-fixtures/test-application-js/karma.conf.js
diff --git a/typescript/test/test_application_js/package.json b/test-fixtures/test-application-js/package.json
similarity index 100%
rename from typescript/test/test_application_js/package.json
rename to test-fixtures/test-application-js/package.json
diff --git a/typescript/test/test_application_js/prettier.config.js b/test-fixtures/test-application-js/prettier.config.js
similarity index 100%
rename from typescript/test/test_application_js/prettier.config.js
rename to test-fixtures/test-application-js/prettier.config.js
diff --git a/typescript/test/test_application_js/server.js b/test-fixtures/test-application-js/server.js
similarity index 100%
rename from typescript/test/test_application_js/server.js
rename to test-fixtures/test-application-js/server.js
diff --git a/typescript/test/test_application_js/webpack.config.js b/test-fixtures/test-application-js/webpack.config.js
similarity index 100%
rename from typescript/test/test_application_js/webpack.config.js
rename to test-fixtures/test-application-js/webpack.config.js
diff --git a/typescript/test/test_application_ts/karma.conf.js b/test-fixtures/test-application-ts/karma.conf.js
similarity index 100%
rename from typescript/test/test_application_ts/karma.conf.js
rename to test-fixtures/test-application-ts/karma.conf.js
diff --git a/typescript/test/test_application_ts/package.json b/test-fixtures/test-application-ts/package.json
similarity index 100%
rename from typescript/test/test_application_ts/package.json
rename to test-fixtures/test-application-ts/package.json
diff --git a/typescript/test/test_application_ts/prettier.config.js b/test-fixtures/test-application-ts/prettier.config.js
similarity index 100%
rename from typescript/test/test_application_ts/prettier.config.js
rename to test-fixtures/test-application-ts/prettier.config.js
diff --git a/typescript/test/test_application_ts/src/browser-test.ts b/test-fixtures/test-application-ts/src/browser-test.ts
similarity index 100%
rename from typescript/test/test_application_ts/src/browser-test.ts
rename to test-fixtures/test-application-ts/src/browser-test.ts
diff --git a/typescript/test/test_application_ts/src/index.ts b/test-fixtures/test-application-ts/src/index.ts
similarity index 100%
rename from typescript/test/test_application_ts/src/index.ts
rename to test-fixtures/test-application-ts/src/index.ts
diff --git a/typescript/test/test_application_ts/src/integration-test.ts b/test-fixtures/test-application-ts/src/integration-test.ts
similarity index 100%
rename from typescript/test/test_application_ts/src/integration-test.ts
rename to test-fixtures/test-application-ts/src/integration-test.ts
diff --git a/typescript/test/test_application_ts/src/server.ts b/test-fixtures/test-application-ts/src/server.ts
similarity index 100%
rename from typescript/test/test_application_ts/src/server.ts
rename to test-fixtures/test-application-ts/src/server.ts
diff --git a/typescript/test/test_application_ts/tsconfig.json b/test-fixtures/test-application-ts/tsconfig.json
similarity index 100%
rename from typescript/test/test_application_ts/tsconfig.json
rename to test-fixtures/test-application-ts/tsconfig.json
diff --git a/typescript/test/test_application_ts/tslint.json b/test-fixtures/test-application-ts/tslint.json
similarity index 100%
rename from typescript/test/test_application_ts/tslint.json
rename to test-fixtures/test-application-ts/tslint.json
diff --git a/typescript/test/test_application_ts/webpack.config.js b/test-fixtures/test-application-ts/webpack.config.js
similarity index 100%
rename from typescript/test/test_application_ts/webpack.config.js
rename to test-fixtures/test-application-ts/webpack.config.js
diff --git a/tsconfig.json b/tsconfig.json
index 49933ab87a..c0ccb61bba 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -7,8 +7,5 @@
},
"include": [
"typescript/**/*.ts"
- ],
- "exclude":[
- "typescript/test/test_application_ts/src/*.ts"
]
}
diff --git a/typescript/src/schema/api.ts b/typescript/src/schema/api.ts
index 106e9917aa..97f970bdcd 100644
--- a/typescript/src/schema/api.ts
+++ b/typescript/src/schema/api.ts
@@ -13,15 +13,10 @@
// limitations under the License.
import * as plugin from '../../../pbjs-genfiles/plugin';
-import * as fs from 'fs';
-import * as path from 'path';
import { Naming, Options as namingOptions } from './naming';
-import { Proto, MessagesMap } from './proto';
-import { ResourceDatabase, ResourceDescriptor } from './resourceDatabase';
-
-const googleGaxLocation = path.dirname(require.resolve('google-gax'));
-const gaxProtosLocation = path.join(googleGaxLocation, '..', '..', 'protos');
+import { Proto } from './proto';
+import { ResourceDatabase, ResourceDescriptor } from './resource-database';
export interface ProtosMap {
[filename: string]: Proto;
diff --git a/typescript/src/schema/proto.ts b/typescript/src/schema/proto.ts
index 90f9b237b5..5bf3ed0ed2 100644
--- a/typescript/src/schema/proto.ts
+++ b/typescript/src/schema/proto.ts
@@ -14,29 +14,14 @@
import * as plugin from '../../../pbjs-genfiles/plugin';
import { CommentsMap, Comment } from './comments';
-import * as objectHash from 'object-hash';
import { milliseconds } from '../util';
-import { ResourceDescriptor, ResourceDatabase } from './resourceDatabase';
-
-const defaultNonIdempotentRetryCodesName = 'non_idempotent';
-const defaultNonIdempotentCodes: plugin.google.rpc.Code[] = [];
-const defaultIdempotentRetryCodesName = 'idempotent';
-const defaultIdempotentCodes = [
- plugin.google.rpc.Code.DEADLINE_EXCEEDED,
- plugin.google.rpc.Code.UNAVAILABLE,
-];
-const defaultParametersName = 'default';
-const defaultParameters = {
- initial_retry_delay_millis: 100,
- retry_delay_multiplier: 1.3,
- max_retry_delay_millis: 60000,
- // note: the following four parameters are unused but currently required by google-gax.
- // setting them to some big safe default values.
- initial_rpc_timeout_millis: 60000,
- rpc_timeout_multiplier: 1.0,
- max_rpc_timeout_millis: 60000,
- total_timeout_millis: 600000,
-};
+import { ResourceDescriptor, ResourceDatabase } from './resource-database';
+import {
+ RetryableCodeMap,
+ defaultParametersName,
+ defaultNonIdempotentRetryCodesName,
+ defaultParameters,
+} from './retryable-code-map';
interface MethodDescriptorProto
extends plugin.google.protobuf.IMethodDescriptorProto {
@@ -65,108 +50,6 @@ interface MethodDescriptorProto
headerRequestParams: string[][];
}
-export class RetryableCodeMap {
- codeEnumMapping: { [index: string]: string };
- uniqueCodesNamesMap: { [uniqueName: string]: string };
- prettyCodesNamesMap: { [prettyName: string]: string[] };
- uniqueParamsNamesMap: { [uniqueName: string]: string };
- prettyParamNamesMap: { [prettyName: string]: {} };
-
- constructor() {
- this.uniqueCodesNamesMap = {};
- this.prettyCodesNamesMap = {};
- this.uniqueParamsNamesMap = {};
- this.prettyParamNamesMap = {};
-
- // build reverse mapping for enum: 0 => OK, 1 => CANCELLED, etc.
- this.codeEnumMapping = {};
- const allCodes = Object.keys(plugin.google.rpc.Code);
- for (const code of allCodes) {
- this.codeEnumMapping[
- ((plugin.google.rpc.Code as unknown) as {
- [key: string]: plugin.google.rpc.Code;
- })[code].toString()
- ] = code;
- }
-
- // generate some pre-defined code sets for compatibility with existing configs
- this.getRetryableCodesName(
- defaultNonIdempotentCodes,
- defaultNonIdempotentRetryCodesName
- );
- this.getRetryableCodesName(
- defaultIdempotentCodes,
- defaultIdempotentRetryCodesName
- );
- this.getParamsName(defaultParameters, 'default');
- }
-
- private buildUniqueCodesName(
- retryableStatusCodes: plugin.google.rpc.Code[]
- ): string {
- // generate an unique readable name for the given retryable set of codes
- const sortedCodes = retryableStatusCodes.sort(
- (a, b) => Number(a) - Number(b)
- );
- const uniqueName = sortedCodes
- .map(code => this.codeEnumMapping[code])
- .join('_')
- // toSnakeCase() splits on uppercase and we only want to split on
- // underscores since all enum codes are uppercase.
- .toLowerCase()
- .toSnakeCase();
- return uniqueName;
- }
-
- private buildUniqueParamsName(params: {}): string {
- // generate an unique not so readable name for the given set of parameters
- return objectHash(params);
- }
-
- getRetryableCodesName(
- retryableStatusCodes: plugin.google.rpc.Code[],
- suggestedName?: string
- ): string {
- const uniqueName = this.buildUniqueCodesName(retryableStatusCodes);
- const prettyName =
- this.uniqueCodesNamesMap[uniqueName] || suggestedName || uniqueName;
- if (!this.uniqueCodesNamesMap[uniqueName]) {
- this.uniqueCodesNamesMap[uniqueName] = prettyName;
- this.prettyCodesNamesMap[prettyName] = retryableStatusCodes.map(
- code => this.codeEnumMapping[code]
- );
- }
- return prettyName;
- }
-
- getParamsName(params: {}, suggestedName?: string): string {
- const uniqueName = this.buildUniqueParamsName(params);
- const prettyName =
- this.uniqueParamsNamesMap[uniqueName] || suggestedName || uniqueName;
- if (!this.uniqueParamsNamesMap[uniqueName]) {
- this.uniqueParamsNamesMap[uniqueName] = prettyName;
- this.prettyParamNamesMap[prettyName] = params;
- }
- return prettyName;
- }
-
- getPrettyCodesNames(): string[] {
- return Object.keys(this.prettyCodesNamesMap);
- }
-
- getCodesJSON(prettyName: string): string {
- return JSON.stringify(this.prettyCodesNamesMap[prettyName]);
- }
-
- getPrettyParamsNames(): string[] {
- return Object.keys(this.prettyParamNamesMap);
- }
-
- getParamsJSON(prettyName: string): string {
- return JSON.stringify(this.prettyParamNamesMap[prettyName]);
- }
-}
-
interface ServiceDescriptorProto
extends plugin.google.protobuf.IServiceDescriptorProto {
packageName: string;
diff --git a/typescript/src/schema/resourceDatabase.ts b/typescript/src/schema/resource-database.ts
similarity index 100%
rename from typescript/src/schema/resourceDatabase.ts
rename to typescript/src/schema/resource-database.ts
diff --git a/typescript/src/schema/retryable-code-map.ts b/typescript/src/schema/retryable-code-map.ts
new file mode 100644
index 0000000000..d0de19c505
--- /dev/null
+++ b/typescript/src/schema/retryable-code-map.ts
@@ -0,0 +1,137 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+import * as objectHash from 'object-hash';
+import * as plugin from '../../../pbjs-genfiles/plugin';
+
+export const defaultNonIdempotentRetryCodesName = 'non_idempotent';
+export const defaultNonIdempotentCodes: plugin.google.rpc.Code[] = [];
+export const defaultIdempotentRetryCodesName = 'idempotent';
+export const defaultIdempotentCodes = [
+ plugin.google.rpc.Code.DEADLINE_EXCEEDED,
+ plugin.google.rpc.Code.UNAVAILABLE,
+];
+export const defaultParametersName = 'default';
+export const defaultParameters = {
+ initial_retry_delay_millis: 100,
+ retry_delay_multiplier: 1.3,
+ max_retry_delay_millis: 60000,
+ // note: the following four parameters are unused but currently required by google-gax.
+ // setting them to some big safe default values.
+ initial_rpc_timeout_millis: 60000,
+ rpc_timeout_multiplier: 1.0,
+ max_rpc_timeout_millis: 60000,
+ total_timeout_millis: 600000,
+};
+
+export class RetryableCodeMap {
+ codeEnumMapping: {
+ [index: string]: string;
+ };
+ uniqueCodesNamesMap: {
+ [uniqueName: string]: string;
+ };
+ prettyCodesNamesMap: {
+ [prettyName: string]: string[];
+ };
+ uniqueParamsNamesMap: {
+ [uniqueName: string]: string;
+ };
+ prettyParamNamesMap: {
+ [prettyName: string]: {};
+ };
+ constructor() {
+ this.uniqueCodesNamesMap = {};
+ this.prettyCodesNamesMap = {};
+ this.uniqueParamsNamesMap = {};
+ this.prettyParamNamesMap = {};
+ // build reverse mapping for enum: 0 => OK, 1 => CANCELLED, etc.
+ this.codeEnumMapping = {};
+ const allCodes = Object.keys(plugin.google.rpc.Code);
+ for (const code of allCodes) {
+ this.codeEnumMapping[
+ ((plugin.google.rpc.Code as unknown) as {
+ [key: string]: plugin.google.rpc.Code;
+ })[code].toString()
+ ] = code;
+ }
+ // generate some pre-defined code sets for compatibility with existing configs
+ this.getRetryableCodesName(
+ defaultNonIdempotentCodes,
+ defaultNonIdempotentRetryCodesName
+ );
+ this.getRetryableCodesName(
+ defaultIdempotentCodes,
+ defaultIdempotentRetryCodesName
+ );
+ this.getParamsName(defaultParameters, 'default');
+ }
+ private buildUniqueCodesName(
+ retryableStatusCodes: plugin.google.rpc.Code[]
+ ): string {
+ // generate an unique readable name for the given retryable set of codes
+ const sortedCodes = retryableStatusCodes.sort(
+ (a, b) => Number(a) - Number(b)
+ );
+ const uniqueName = sortedCodes
+ .map(code => this.codeEnumMapping[code])
+ .join('_')
+ // toSnakeCase() splits on uppercase and we only want to split on
+ // underscores since all enum codes are uppercase.
+ .toLowerCase()
+ .toSnakeCase();
+ return uniqueName;
+ }
+ private buildUniqueParamsName(params: {}): string {
+ // generate an unique not so readable name for the given set of parameters
+ return objectHash(params);
+ }
+ getRetryableCodesName(
+ retryableStatusCodes: plugin.google.rpc.Code[],
+ suggestedName?: string
+ ): string {
+ const uniqueName = this.buildUniqueCodesName(retryableStatusCodes);
+ const prettyName =
+ this.uniqueCodesNamesMap[uniqueName] || suggestedName || uniqueName;
+ if (!this.uniqueCodesNamesMap[uniqueName]) {
+ this.uniqueCodesNamesMap[uniqueName] = prettyName;
+ this.prettyCodesNamesMap[prettyName] = retryableStatusCodes.map(
+ code => this.codeEnumMapping[code]
+ );
+ }
+ return prettyName;
+ }
+ getParamsName(params: {}, suggestedName?: string): string {
+ const uniqueName = this.buildUniqueParamsName(params);
+ const prettyName =
+ this.uniqueParamsNamesMap[uniqueName] || suggestedName || uniqueName;
+ if (!this.uniqueParamsNamesMap[uniqueName]) {
+ this.uniqueParamsNamesMap[uniqueName] = prettyName;
+ this.prettyParamNamesMap[prettyName] = params;
+ }
+ return prettyName;
+ }
+ getPrettyCodesNames(): string[] {
+ return Object.keys(this.prettyCodesNamesMap);
+ }
+ getCodesJSON(prettyName: string): string {
+ return JSON.stringify(this.prettyCodesNamesMap[prettyName]);
+ }
+ getPrettyParamsNames(): string[] {
+ return Object.keys(this.prettyParamNamesMap);
+ }
+ getParamsJSON(prettyName: string): string {
+ return JSON.stringify(this.prettyParamNamesMap[prettyName]);
+ }
+}
diff --git a/typescript/src/start_script.ts b/typescript/src/start-script.ts
similarity index 100%
rename from typescript/src/start_script.ts
rename to typescript/src/start-script.ts
diff --git a/typescript/test/test_application/test_js.ts b/typescript/test/test-application/test-js.ts
similarity index 51%
rename from typescript/test/test_application/test_js.ts
rename to typescript/test/test-application/test-js.ts
index 65e1d3d19b..57530cccf7 100644
--- a/typescript/test/test_application/test_js.ts
+++ b/typescript/test/test-application/test-js.ts
@@ -32,8 +32,7 @@ const PROTOS = path.join(
'..',
'..',
'..',
- 'typescript',
- 'test',
+ 'test-fixtures',
'protos'
);
const LOCAL_JS_APPLICATION = path.join(
@@ -48,37 +47,34 @@ const JS_TEST_APPLICATION = path.join(
'..',
'..',
'..',
- 'typescript',
- 'test',
- 'test_application_js'
+ 'test-fixtures',
+ 'test-application-js'
);
-describe('TestApplication', () => {
- describe('Test application for js users', () => {
- it('npm install showcase', async function() {
- this.timeout(60000);
- // copy protos to generated client library and copy test application to local.
- fs.copySync(PROTOS, path.join(SHOWCASE_LIB, 'protos'));
- fs.copySync(JS_TEST_APPLICATION, LOCAL_JS_APPLICATION);
- process.chdir(SHOWCASE_LIB);
- await exec(`npm install`);
- });
- it('npm pack showcase library and copy it to test application', async function() {
- this.timeout(60000);
- await exec(`npm pack`);
- process.chdir(LOCAL_JS_APPLICATION);
- fs.copySync(PACKED_LIB_PATH, path.join(LOCAL_JS_APPLICATION, PACKED_LIB));
- });
- it('npm install showcase library in test application', async function() {
- this.timeout(60000);
- await exec(`npm install`);
- });
- it('run integration in test application', async function() {
- this.timeout(60000);
- await exec(`npm test`);
- });
- it('run browser test in application', async function() {
- this.timeout(120000);
- await exec(`npm run browser-test`);
- });
+describe('Test application for JavaScript users', () => {
+ it('npm install showcase', async function() {
+ this.timeout(60000);
+ // copy protos to generated client library and copy test application to local.
+ fs.copySync(PROTOS, path.join(SHOWCASE_LIB, 'protos'));
+ fs.copySync(JS_TEST_APPLICATION, LOCAL_JS_APPLICATION);
+ process.chdir(SHOWCASE_LIB);
+ await exec(`npm install`);
+ });
+ it('npm pack showcase library and copy it to test application', async function() {
+ this.timeout(60000);
+ await exec(`npm pack`);
+ process.chdir(LOCAL_JS_APPLICATION);
+ fs.copySync(PACKED_LIB_PATH, path.join(LOCAL_JS_APPLICATION, PACKED_LIB));
+ });
+ it('npm install showcase library in test application', async function() {
+ this.timeout(60000);
+ await exec(`npm install`);
+ });
+ it('run integration in test application', async function() {
+ this.timeout(60000);
+ await exec(`npm test`);
+ });
+ it('run browser test in application', async function() {
+ this.timeout(120000);
+ await exec(`npm run browser-test`);
});
});
diff --git a/typescript/test/test-application/test-ts.ts b/typescript/test/test-application/test-ts.ts
new file mode 100644
index 0000000000..e7921f3d86
--- /dev/null
+++ b/typescript/test/test-application/test-ts.ts
@@ -0,0 +1,84 @@
+// Copyright 2019 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+import * as util from 'util';
+import * as child_process from 'child_process';
+import * as fs from 'fs-extra';
+import * as path from 'path';
+import { describe, it } from 'mocha';
+const exec = util.promisify(child_process.exec);
+const SHOWCASE_LIB = path.join(
+ __dirname,
+ '..',
+ '..',
+ '..',
+ '.test-out-showcase'
+);
+const PACKED_LIB = 'showcase-0.1.0.tgz';
+const PACKED_LIB_PATH = path.join(SHOWCASE_LIB, PACKED_LIB);
+const PROTOS = path.join(
+ __dirname,
+ '..',
+ '..',
+ '..',
+ 'test-fixtures',
+ 'protos'
+);
+const LOCAL_TS_APPLICATION = path.join(
+ __dirname,
+ '..',
+ '..',
+ '..',
+ '.test-application-ts'
+);
+const TS_TEST_APPLICATION = path.join(
+ __dirname,
+ '..',
+ '..',
+ '..',
+ 'test-fixtures',
+ 'test-application-ts'
+);
+describe('Test application for TypeScript users', () => {
+ it('npm install showcase', async function() {
+ this.timeout(60000);
+ // copy protos to generated client library and copy test application to local.
+ if (!fs.existsSync(path.join(SHOWCASE_LIB, 'protos'))) {
+ fs.copySync(PROTOS, path.join(SHOWCASE_LIB, 'protos'));
+ }
+ if (!fs.existsSync(LOCAL_TS_APPLICATION)) {
+ fs.copySync(TS_TEST_APPLICATION, LOCAL_TS_APPLICATION);
+ }
+ process.chdir(SHOWCASE_LIB);
+ await exec(`npm install`);
+ });
+ it('npm pack showcase library and copy it to test application', async function() {
+ this.timeout(60000);
+ await exec(`npm pack`);
+ process.chdir(LOCAL_TS_APPLICATION);
+ fs.copySync(PACKED_LIB_PATH, path.join(LOCAL_TS_APPLICATION, PACKED_LIB));
+ });
+ it('npm install showcase library in test application', async function() {
+ this.timeout(60000);
+ await exec(`npm install`);
+ });
+ it('run integration in test application', async function() {
+ this.timeout(120000);
+ await exec(`npm test`);
+ });
+ it('run browser test in application', async function() {
+ this.timeout(120000);
+ await exec(`npm run browser-test`);
+ });
+});
diff --git a/typescript/test/test_application/test_ts.ts b/typescript/test/test_application/test_ts.ts
deleted file mode 100644
index 1e5e8fc89a..0000000000
--- a/typescript/test/test_application/test_ts.ts
+++ /dev/null
@@ -1,88 +0,0 @@
-// Copyright 2019 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-import * as util from 'util';
-import * as child_process from 'child_process';
-import * as fs from 'fs-extra';
-import * as path from 'path';
-import { describe, it } from 'mocha';
-const exec = util.promisify(child_process.exec);
-const SHOWCASE_LIB = path.join(
- __dirname,
- '..',
- '..',
- '..',
- '.test-out-showcase'
-);
-const PACKED_LIB = 'showcase-0.1.0.tgz';
-const PACKED_LIB_PATH = path.join(SHOWCASE_LIB, PACKED_LIB);
-const PROTOS = path.join(
- __dirname,
- '..',
- '..',
- '..',
- 'typescript',
- 'test',
- 'protos'
-);
-const LOCAL_TS_APPLICATION = path.join(
- __dirname,
- '..',
- '..',
- '..',
- '.test-application-ts'
-);
-const TS_TEST_APPLICATION = path.join(
- __dirname,
- '..',
- '..',
- '..',
- 'typescript',
- 'test',
- 'test_application_ts'
-);
-describe('TestApplication', () => {
- describe('Test application for ts users', () => {
- it('npm install showcase', async function() {
- this.timeout(60000);
- // copy protos to generated client library and copy test application to local.
- if (!fs.existsSync(path.join(SHOWCASE_LIB, 'protos'))) {
- fs.copySync(PROTOS, path.join(SHOWCASE_LIB, 'protos'));
- }
- if (!fs.existsSync(LOCAL_TS_APPLICATION)) {
- fs.copySync(TS_TEST_APPLICATION, LOCAL_TS_APPLICATION);
- }
- process.chdir(SHOWCASE_LIB);
- await exec(`npm install`);
- });
- it('npm pack showcase library and copy it to test application', async function() {
- this.timeout(60000);
- await exec(`npm pack`);
- process.chdir(LOCAL_TS_APPLICATION);
- fs.copySync(PACKED_LIB_PATH, path.join(LOCAL_TS_APPLICATION, PACKED_LIB));
- });
- it('npm install showcase library in test application', async function() {
- this.timeout(60000);
- await exec(`npm install`);
- });
- it('run integration in test application', async function() {
- this.timeout(120000);
- await exec(`npm test`);
- });
- it('run browser test in application', async function() {
- this.timeout(120000);
- await exec(`npm run browser-test`);
- });
- });
-});
diff --git a/typescript/test/testdata/keymanager/src/v1/key_management_service_proto_list.json.baseline b/typescript/test/testdata/keymanager/src/v1/key_management_service_proto_list.json.baseline
deleted file mode 100644
index 32ef7312bc..0000000000
--- a/typescript/test/testdata/keymanager/src/v1/key_management_service_proto_list.json.baseline
+++ /dev/null
@@ -1,4 +0,0 @@
-[
- "../../protos/google/kms/v1/resources.proto",
- "../../protos/google/kms/v1/service.proto"
-]
diff --git a/typescript/test/unit/api.ts b/typescript/test/unit/api.ts
index 76acb50deb..5e7e0b4b80 100644
--- a/typescript/test/unit/api.ts
+++ b/typescript/test/unit/api.ts
@@ -17,7 +17,7 @@ import * as plugin from '../../../pbjs-genfiles/plugin';
import * as assert from 'assert';
import { describe, it } from 'mocha';
-describe('schema/api.ts', () => {
+describe('src/schema/api.ts', () => {
it('should construct an API object and return list of protos', () => {
const fd = new plugin.google.protobuf.FileDescriptorProto();
fd.name = 'google/cloud/test/v1/test.proto';
diff --git a/typescript/test/unit/baselines.ts b/typescript/test/unit/baselines.ts
new file mode 100644
index 0000000000..00b4a0f95f
--- /dev/null
+++ b/typescript/test/unit/baselines.ts
@@ -0,0 +1,74 @@
+// Copyright 2019 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+import { describe } from 'mocha';
+import { runBaselineTest, initBaselineTest } from '../util';
+
+describe('Baseline tests', () => {
+ initBaselineTest();
+
+ runBaselineTest({
+ baselineName: 'dlp',
+ outputDir: '.test-out-dlp',
+ protoPath: 'google/privacy/dlp/v2/*.proto',
+ useCommonProto: true,
+ });
+
+ runBaselineTest({
+ baselineName: 'texttospeech',
+ outputDir: '.test-out-texttospeech',
+ protoPath: 'google/cloud/texttospeech/v1/*.proto',
+ useCommonProto: false,
+ grpcServiceConfig:
+ 'google/cloud/texttospeech/v1/texttospeech_grpc_service_config.json',
+ packageName: '@google-cloud/text-to-speech',
+ });
+
+ runBaselineTest({
+ baselineName: 'kms',
+ outputDir: '.test-out-kms',
+ protoPath: 'google/cloud/kms/v1/*.proto',
+ useCommonProto: false,
+ });
+
+ runBaselineTest({
+ baselineName: 'monitoring',
+ outputDir: '.test-out-monitoring',
+ protoPath: 'google/monitoring/v3/*.proto',
+ useCommonProto: false,
+ mainServiceName: 'monitoring',
+ });
+
+ runBaselineTest({
+ baselineName: 'redis',
+ outputDir: '.test-out-redis',
+ protoPath: 'google/cloud/redis/v1beta1/*.proto',
+ useCommonProto: true,
+ });
+
+ runBaselineTest({
+ baselineName: 'showcase',
+ outputDir: '.test-out-showcase',
+ protoPath: 'google/showcase/v1beta1/echo.proto',
+ useCommonProto: false,
+ mainServiceName: 'ShowcaseService',
+ });
+
+ runBaselineTest({
+ baselineName: 'translate',
+ outputDir: '.test-out-translate',
+ protoPath: 'google/cloud/translate/v3beta1/*.proto',
+ useCommonProto: true,
+ });
+});
diff --git a/typescript/test/unit/extra-option.ts b/typescript/test/unit/extra-option.ts
deleted file mode 100644
index 86272670df..0000000000
--- a/typescript/test/unit/extra-option.ts
+++ /dev/null
@@ -1,107 +0,0 @@
-// Copyright 2019 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-import * as assert from 'assert';
-import { execSync } from 'child_process';
-import * as fs from 'fs';
-import * as path from 'path';
-import * as rimraf from 'rimraf';
-import { describe, it } from 'mocha';
-import { equalToBaseline } from '../util';
-
-const cwd = process.cwd();
-
-const OUTPUT_DIR = path.join(cwd, '.test-out-texttospeech');
-const GOOGLE_GAX_PROTOS_DIR = path.join(
- cwd,
- 'node_modules',
- 'google-gax',
- 'protos'
-);
-const PROTOS_DIR = path.join(cwd, 'build', 'test', 'protos');
-const TTS_PROTO_FILE = path.join(
- PROTOS_DIR,
- 'google',
- 'cloud',
- 'texttospeech',
- 'v1',
- 'cloud_tts.proto'
-);
-
-const GRPC_SERVICE_CONFIG = path.join(
- PROTOS_DIR,
- 'google',
- 'cloud',
- 'texttospeech',
- 'v1',
- 'texttospeech_grpc_service_config.json'
-);
-
-const BASELINE_DIR = path.join(
- __dirname,
- '..',
- '..',
- '..',
- 'typescript',
- 'test',
- 'testdata',
- 'texttospeech'
-);
-
-const PACKAGE_NAME = '@google-cloud/text-to-speech';
-const SRCDIR = path.join(cwd, 'build', 'src');
-const CLI = path.join(SRCDIR, 'cli.js');
-
-describe('Package Name & grpc Config', () => {
- describe('Generate Text-to-Speech library', () => {
- it('Generated library name & grpc Config should be same with baseline.', function() {
- this.timeout(10000);
- if (fs.existsSync(OUTPUT_DIR)) {
- rimraf.sync(OUTPUT_DIR);
- }
- fs.mkdirSync(OUTPUT_DIR);
-
- try {
- execSync(`chmod +x ${CLI}`);
- } catch (err) {
- console.warn(`Failed to chmod +x ${CLI}: ${err}. Ignoring...`);
- }
-
- execSync(
- `node build/src/start_script.js ` +
- `--output-dir=${OUTPUT_DIR} ` +
- `-I ${GOOGLE_GAX_PROTOS_DIR} ` +
- `-I ${PROTOS_DIR} ` +
- `--grpc-service-config=${GRPC_SERVICE_CONFIG} ` +
- `--package-name=${PACKAGE_NAME} ` +
- TTS_PROTO_FILE
- );
- assert(equalToBaseline(OUTPUT_DIR, BASELINE_DIR));
- });
-
- it('Use alias name should also work.', function() {
- this.timeout(10000);
- execSync(
- `node build/src/start_script.js ` +
- `--output-dir=${OUTPUT_DIR} ` +
- `-I ${GOOGLE_GAX_PROTOS_DIR} ` +
- `-I ${PROTOS_DIR} ` +
- `--grpc_service_config=${GRPC_SERVICE_CONFIG} ` +
- `--package_name=${PACKAGE_NAME} ` +
- TTS_PROTO_FILE
- );
- assert(equalToBaseline(OUTPUT_DIR, BASELINE_DIR));
- });
- });
-});
diff --git a/typescript/test/unit/monitor.ts b/typescript/test/unit/monitor.ts
deleted file mode 100644
index 2bca28c8e3..0000000000
--- a/typescript/test/unit/monitor.ts
+++ /dev/null
@@ -1,77 +0,0 @@
-import * as assert from 'assert';
-import { execSync } from 'child_process';
-import * as fs from 'fs';
-import { describe, it } from 'mocha';
-import * as path from 'path';
-import * as rimraf from 'rimraf';
-
-import { equalToBaseline } from '../util';
-
-const cwd = process.cwd();
-
-const START_SCRIPT = path.join(
- process.cwd(),
- 'build',
- 'src',
- 'start_script.js'
-);
-
-const OUTPUT_DIR = path.join(cwd, '.test-out-monitoring');
-
-const PROTOS_DIR = path.join(cwd, 'build', 'test', 'protos');
-const MONITOR_PROTO_FILES = path.join(
- PROTOS_DIR,
- 'google',
- 'monitoring',
- 'v3',
- '*.proto'
-);
-
-const BASELINE_DIR = path.join(
- __dirname,
- '..',
- '..',
- '..',
- 'typescript',
- 'test',
- 'testdata'
-);
-
-const BASELINE_DIR_MONITOR = path.join(BASELINE_DIR, 'monitoring');
-const SRCDIR = path.join(cwd, 'build', 'src');
-const CLI = path.join(SRCDIR, 'cli.js');
-const PLUGIN = path.join(SRCDIR, 'protoc-gen-typescript_gapic');
-
-describe('MonitoringGenerateTest', () => {
- describe('Generate Client library', () => {
- it('Generated monitoring library have same output with baseline.', function() {
- this.timeout(10000);
- if (fs.existsSync(OUTPUT_DIR)) {
- rimraf.sync(OUTPUT_DIR);
- }
- fs.mkdirSync(OUTPUT_DIR);
-
- if (fs.existsSync(PLUGIN)) {
- rimraf.sync(PLUGIN);
- }
- fs.copyFileSync(CLI, PLUGIN);
- process.env['PATH'] = SRCDIR + path.delimiter + process.env['PATH'];
-
- try {
- execSync(`chmod +x ${PLUGIN}`);
- } catch (err) {
- console.warn(`Failed to chmod +x ${PLUGIN}: ${err}. Ignoring...`);
- }
-
- execSync(
- 'node ' +
- START_SCRIPT +
- ` -I${PROTOS_DIR}` +
- ` ${MONITOR_PROTO_FILES}` +
- ` --output_dir=${OUTPUT_DIR}` +
- ` --main_service=monitoring`
- );
- assert(equalToBaseline(OUTPUT_DIR, BASELINE_DIR_MONITOR));
- });
- });
-});
diff --git a/typescript/test/unit/multi_pattern_resource.ts b/typescript/test/unit/multi_pattern_resource.ts
deleted file mode 100644
index 7c793ee83f..0000000000
--- a/typescript/test/unit/multi_pattern_resource.ts
+++ /dev/null
@@ -1,95 +0,0 @@
-// Copyright 2019 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-import * as assert from 'assert';
-import { execSync } from 'child_process';
-import * as fs from 'fs';
-import * as path from 'path';
-import * as rimraf from 'rimraf';
-import { describe, it } from 'mocha';
-import { equalToBaseline } from '../util';
-
-const cwd = process.cwd();
-
-const OUTPUT_DIR = path.join(cwd, '.test-out-dlp');
-const GOOGLE_GAX_PROTOS_DIR = path.join(
- cwd,
- 'node_modules',
- 'google-gax',
- 'protos'
-);
-const PROTOS_DIR = path.join(cwd, 'build', 'test', 'protos');
-const DLP_PROTO_FILE = path.join(
- PROTOS_DIR,
- 'google',
- 'privacy',
- 'dlp',
- 'v2',
- '*.proto '
-);
-
-const COMMON_PROTO_FILE = path.join(
- PROTOS_DIR,
- 'google',
- 'cloud',
- 'common_resources.proto'
-);
-
-const BASELINE_DIR = path.join(
- __dirname,
- '..',
- '..',
- '..',
- 'typescript',
- 'test',
- 'testdata'
-);
-
-const BASELINE_DIR_KM = path.join(BASELINE_DIR, 'dlp');
-const SRCDIR = path.join(cwd, 'build', 'src');
-const CLI = path.join(SRCDIR, 'cli.js');
-const PLUGIN = path.join(SRCDIR, 'protoc-gen-typescript_gapic');
-
-describe('LibraryWithMultiplePatternResourceGenerateTest', () => {
- describe('Generate Client library', () => {
- it('Generated client library with multiple pattern resource should have same output with baseline.', function() {
- this.timeout(60000);
- if (fs.existsSync(OUTPUT_DIR)) {
- rimraf.sync(OUTPUT_DIR);
- }
- fs.mkdirSync(OUTPUT_DIR);
-
- if (fs.existsSync(PLUGIN)) {
- rimraf.sync(PLUGIN);
- }
- fs.copyFileSync(CLI, PLUGIN);
- process.env['PATH'] = SRCDIR + path.delimiter + process.env['PATH'];
-
- try {
- execSync(`chmod +x ${PLUGIN}`);
- } catch (err) {
- console.warn(`Failed to chmod +x ${PLUGIN}: ${err}. Ignoring...`);
- }
-
- execSync(
- `protoc --typescript_gapic_out=${OUTPUT_DIR} ` +
- `-I${GOOGLE_GAX_PROTOS_DIR} ` +
- `-I${PROTOS_DIR} ` +
- DLP_PROTO_FILE +
- COMMON_PROTO_FILE
- );
- assert(equalToBaseline(OUTPUT_DIR, BASELINE_DIR_KM));
- });
- });
-});
diff --git a/typescript/test/unit/multi_proto.ts b/typescript/test/unit/multi_proto.ts
deleted file mode 100644
index 9c0aa70a27..0000000000
--- a/typescript/test/unit/multi_proto.ts
+++ /dev/null
@@ -1,86 +0,0 @@
-// Copyright 2019 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-import * as assert from 'assert';
-import { execSync } from 'child_process';
-import * as fs from 'fs';
-import * as path from 'path';
-import * as rimraf from 'rimraf';
-import { describe, it } from 'mocha';
-import { equalToBaseline } from '../util';
-
-const cwd = process.cwd();
-
-const OUTPUT_DIR = path.join(cwd, '.test-out-keymanager');
-const GOOGLE_GAX_PROTOS_DIR = path.join(
- cwd,
- 'node_modules',
- 'google-gax',
- 'protos'
-);
-const PROTOS_DIR = path.join(cwd, 'build', 'test', 'protos');
-const KMS_PROTO_FILE = path.join(
- PROTOS_DIR,
- 'google',
- 'kms',
- 'v1',
- 'service.proto'
-);
-
-const BASELINE_DIR = path.join(
- __dirname,
- '..',
- '..',
- '..',
- 'typescript',
- 'test',
- 'testdata'
-);
-
-const BASELINE_DIR_KM = path.join(BASELINE_DIR, 'keymanager');
-const SRCDIR = path.join(cwd, 'build', 'src');
-const CLI = path.join(SRCDIR, 'cli.js');
-const PLUGIN = path.join(SRCDIR, 'protoc-gen-typescript_gapic');
-
-describe('MultiProtoListGenerateTest', () => {
- describe('Generate Client library', () => {
- it('Generated proto list should have same output with baseline.', function() {
- this.timeout(10000);
- if (fs.existsSync(OUTPUT_DIR)) {
- rimraf.sync(OUTPUT_DIR);
- }
- fs.mkdirSync(OUTPUT_DIR);
-
- if (fs.existsSync(PLUGIN)) {
- rimraf.sync(PLUGIN);
- }
- fs.copyFileSync(CLI, PLUGIN);
- process.env['PATH'] = SRCDIR + path.delimiter + process.env['PATH'];
-
- try {
- execSync(`chmod +x ${PLUGIN}`);
- } catch (err) {
- console.warn(`Failed to chmod +x ${PLUGIN}: ${err}. Ignoring...`);
- }
-
- execSync(
- `protoc --typescript_gapic_out=${OUTPUT_DIR} ` +
- `-I${GOOGLE_GAX_PROTOS_DIR} ` +
- `-I${PROTOS_DIR} ` +
- KMS_PROTO_FILE
- );
- assert(equalToBaseline(OUTPUT_DIR, BASELINE_DIR_KM));
- });
- });
-});
diff --git a/typescript/test/unit/naming.ts b/typescript/test/unit/naming.ts
index e38f6a6c63..da3a2d7180 100644
--- a/typescript/test/unit/naming.ts
+++ b/typescript/test/unit/naming.ts
@@ -17,7 +17,7 @@ import { describe, it } from 'mocha';
import * as plugin from '../../../pbjs-genfiles/plugin';
import { Naming, Options } from '../../src/schema/naming';
-describe('schema/naming.ts', () => {
+describe('src/schema/naming.ts', () => {
it('parses name correctly', () => {
const descriptor1 = new plugin.google.protobuf.FileDescriptorProto();
const descriptor2 = new plugin.google.protobuf.FileDescriptorProto();
diff --git a/typescript/test/unit/proto.ts b/typescript/test/unit/proto.ts
index 02ada6ec19..a32d5342ee 100644
--- a/typescript/test/unit/proto.ts
+++ b/typescript/test/unit/proto.ts
@@ -17,7 +17,7 @@ import { describe, it } from 'mocha';
import * as plugin from '../../../pbjs-genfiles/plugin';
import { getHeaderRequestParams } from '../../src/schema/proto';
-describe('schema/proto.ts', () => {
+describe('src/schema/proto.ts', () => {
describe('should get header parameters from http rule', () => {
it('works with no parameter', () => {
const httpRule: plugin.google.api.IHttpRule = {
diff --git a/typescript/test/unit/redis.ts b/typescript/test/unit/redis.ts
deleted file mode 100644
index c177bbcd8e..0000000000
--- a/typescript/test/unit/redis.ts
+++ /dev/null
@@ -1,95 +0,0 @@
-// Copyright 2019 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-import * as assert from 'assert';
-import { execSync } from 'child_process';
-import * as fs from 'fs';
-import * as path from 'path';
-import * as rimraf from 'rimraf';
-import { describe, it } from 'mocha';
-import { equalToBaseline } from '../util';
-
-const cwd = process.cwd();
-
-const OUTPUT_DIR = path.join(cwd, '.test-out-redis');
-const GOOGLE_GAX_PROTOS_DIR = path.join(
- cwd,
- 'node_modules',
- 'google-gax',
- 'protos'
-);
-const PROTOS_DIR = path.join(cwd, 'build', 'test', 'protos');
-const TRANSLATE_PROTO_FILE = path.join(
- PROTOS_DIR,
- 'google',
- 'cloud',
- 'redis',
- 'v1beta1',
- 'cloud_redis.proto '
-);
-
-const COMMON_PROTO_FILE = path.join(
- PROTOS_DIR,
- 'google',
- 'cloud',
- 'common_resources.proto'
-);
-
-const BASELINE_DIR = path.join(
- __dirname,
- '..',
- '..',
- '..',
- 'typescript',
- 'test',
- 'testdata'
-);
-
-const BASELINE_DIR_KM = path.join(BASELINE_DIR, 'redis');
-const SRCDIR = path.join(cwd, 'build', 'src');
-const CLI = path.join(SRCDIR, 'cli.js');
-const PLUGIN = path.join(SRCDIR, 'protoc-gen-typescript_gapic');
-
-describe('LongRunning Maetadata & Response Test', () => {
- describe('Generate Client library', () => {
- it('Generated client library with common resource should have same output with baseline.', function() {
- this.timeout(60000);
- if (fs.existsSync(OUTPUT_DIR)) {
- rimraf.sync(OUTPUT_DIR);
- }
- fs.mkdirSync(OUTPUT_DIR);
-
- if (fs.existsSync(PLUGIN)) {
- rimraf.sync(PLUGIN);
- }
- fs.copyFileSync(CLI, PLUGIN);
- process.env['PATH'] = SRCDIR + path.delimiter + process.env['PATH'];
-
- try {
- execSync(`chmod +x ${PLUGIN}`);
- } catch (err) {
- console.warn(`Failed to chmod +x ${PLUGIN}: ${err}. Ignoring...`);
- }
-
- execSync(
- `protoc --typescript_gapic_out=${OUTPUT_DIR} ` +
- `-I${GOOGLE_GAX_PROTOS_DIR} ` +
- `-I${PROTOS_DIR} ` +
- TRANSLATE_PROTO_FILE +
- COMMON_PROTO_FILE
- );
- assert(equalToBaseline(OUTPUT_DIR, BASELINE_DIR_KM));
- });
- });
-});
diff --git a/typescript/test/unit/resourceDatabase.ts b/typescript/test/unit/resource-database.ts
similarity index 98%
rename from typescript/test/unit/resourceDatabase.ts
rename to typescript/test/unit/resource-database.ts
index 6df4a7feda..0f398c036b 100644
--- a/typescript/test/unit/resourceDatabase.ts
+++ b/typescript/test/unit/resource-database.ts
@@ -13,11 +13,11 @@
// limitations under the License.
import * as plugin from '../../../pbjs-genfiles/plugin';
-import { ResourceDatabase } from '../../src/schema/resourceDatabase';
+import { ResourceDatabase } from '../../src/schema/resource-database';
import { describe, it, beforeEach, afterEach } from 'mocha';
import * as assert from 'assert';
-describe('ResourceDatabase', () => {
+describe('src/schema/resource-database.ts', () => {
let warnings: string[] = [];
const savedWarn = console.warn;
const errorLocation = 'ERROR LOCATION';
diff --git a/typescript/test/unit/resource.ts b/typescript/test/unit/resource.ts
deleted file mode 100644
index 72be295421..0000000000
--- a/typescript/test/unit/resource.ts
+++ /dev/null
@@ -1,95 +0,0 @@
-// Copyright 2019 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-import * as assert from 'assert';
-import { execSync } from 'child_process';
-import * as fs from 'fs';
-import * as path from 'path';
-import * as rimraf from 'rimraf';
-import { describe, it } from 'mocha';
-import { equalToBaseline } from '../util';
-
-const cwd = process.cwd();
-
-const OUTPUT_DIR = path.join(cwd, '.test-out-translate');
-const GOOGLE_GAX_PROTOS_DIR = path.join(
- cwd,
- 'node_modules',
- 'google-gax',
- 'protos'
-);
-const PROTOS_DIR = path.join(cwd, 'build', 'test', 'protos');
-const TRANSLATE_PROTO_FILE = path.join(
- PROTOS_DIR,
- 'google',
- 'cloud',
- 'translate',
- 'v3beta1',
- 'translation_service.proto '
-);
-
-const COMMON_PROTO_FILE = path.join(
- PROTOS_DIR,
- 'google',
- 'cloud',
- 'common_resources.proto'
-);
-
-const BASELINE_DIR = path.join(
- __dirname,
- '..',
- '..',
- '..',
- 'typescript',
- 'test',
- 'testdata'
-);
-
-const BASELINE_DIR_KM = path.join(BASELINE_DIR, 'translate');
-const SRCDIR = path.join(cwd, 'build', 'src');
-const CLI = path.join(SRCDIR, 'cli.js');
-const PLUGIN = path.join(SRCDIR, 'protoc-gen-typescript_gapic');
-
-describe('LibraryWithCommonResourceGenerateTest', () => {
- describe('Generate Client library', () => {
- it('Generated client library with common resource should have same output with baseline.', function() {
- this.timeout(60000);
- if (fs.existsSync(OUTPUT_DIR)) {
- rimraf.sync(OUTPUT_DIR);
- }
- fs.mkdirSync(OUTPUT_DIR);
-
- if (fs.existsSync(PLUGIN)) {
- rimraf.sync(PLUGIN);
- }
- fs.copyFileSync(CLI, PLUGIN);
- process.env['PATH'] = SRCDIR + path.delimiter + process.env['PATH'];
-
- try {
- execSync(`chmod +x ${PLUGIN}`);
- } catch (err) {
- console.warn(`Failed to chmod +x ${PLUGIN}: ${err}. Ignoring...`);
- }
-
- execSync(
- `protoc --typescript_gapic_out=${OUTPUT_DIR} ` +
- `-I${GOOGLE_GAX_PROTOS_DIR} ` +
- `-I${PROTOS_DIR} ` +
- TRANSLATE_PROTO_FILE +
- COMMON_PROTO_FILE
- );
- assert(equalToBaseline(OUTPUT_DIR, BASELINE_DIR_KM));
- });
- });
-});
diff --git a/typescript/test/unit/codemap.ts b/typescript/test/unit/retryable-code-map.ts
similarity index 98%
rename from typescript/test/unit/codemap.ts
rename to typescript/test/unit/retryable-code-map.ts
index 66897c9dc4..3d933acf5d 100644
--- a/typescript/test/unit/codemap.ts
+++ b/typescript/test/unit/retryable-code-map.ts
@@ -12,14 +12,14 @@
// See the License for the specific language governing permissions and
// limitations under the License.
-import { RetryableCodeMap } from '../../src/schema/proto';
+import { RetryableCodeMap } from '../../src/schema/retryable-code-map';
import * as plugin from '../../../pbjs-genfiles/plugin';
import * as assert from 'assert';
import { describe, it } from 'mocha';
const Code = plugin.google.rpc.Code;
-describe('RetryableCodeMap', () => {
+describe('src/schema/retryable-code-map.ts', () => {
describe('Retry codes', () => {
it('has readable names for common code lists', () => {
const map = new RetryableCodeMap();
diff --git a/typescript/test/unit/starter_script_test.ts b/typescript/test/unit/starter_script_test.ts
deleted file mode 100644
index 555b3c641a..0000000000
--- a/typescript/test/unit/starter_script_test.ts
+++ /dev/null
@@ -1,71 +0,0 @@
-// Copyright 2019 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-import * as assert from 'assert';
-import { execSync } from 'child_process';
-import * as fs from 'fs';
-import * as path from 'path';
-import * as rimraf from 'rimraf';
-import * as util from 'util';
-import { describe, it } from 'mocha';
-import { equalToBaseline } from '../util';
-
-const rmrf = util.promisify(rimraf);
-
-const START_SCRIPT = path.join(
- process.cwd(),
- 'build',
- 'src',
- 'start_script.js'
-);
-const OUTPUT_DIR = path.join(__dirname, '..', '..', '..', '.test-out-showcase');
-const PROTOS_DIR = path.join(process.cwd(), 'build', 'test', 'protos');
-const PROTO_FILE = path.join(
- PROTOS_DIR,
- 'google',
- 'showcase',
- 'v1beta1',
- 'echo.proto'
-);
-const BASELINE_DIR_SHOWCASE = path.join(
- __dirname,
- '..',
- '..',
- '..',
- 'typescript',
- 'test',
- 'testdata',
- 'showcase'
-);
-
-describe('StarterScriptTest', () => {
- describe('use start script for generating showcase library ', () => {
- it('use custom folder for generated client library.', async function() {
- this.timeout(10000);
- if (fs.existsSync(OUTPUT_DIR)) {
- await rmrf(OUTPUT_DIR);
- }
- fs.mkdirSync(OUTPUT_DIR);
- execSync(
- 'node ' +
- START_SCRIPT +
- ` -I${PROTOS_DIR}` +
- ` ${PROTO_FILE}` +
- ` --output_dir=${OUTPUT_DIR}` +
- ` --main_service=ShowcaseService`
- );
- assert(equalToBaseline(OUTPUT_DIR, BASELINE_DIR_SHOWCASE));
- });
- });
-});
diff --git a/typescript/test/unit/util.ts b/typescript/test/unit/util.ts
index a4c020452d..346cfb5e3b 100644
--- a/typescript/test/unit/util.ts
+++ b/typescript/test/unit/util.ts
@@ -17,7 +17,7 @@ import { describe, it } from 'mocha';
import { commonPrefix, duration, seconds, milliseconds } from '../../src/util';
import * as plugin from '../../../pbjs-genfiles/plugin';
-describe('util.ts', () => {
+describe('src/util.ts', () => {
describe('CommonPrefix', () => {
it('should return correct result', () => {
assert.strictEqual(commonPrefix(['abc', 'abcd', 'ab']), 'ab');
diff --git a/typescript/test/util.ts b/typescript/test/util.ts
index b7b85d204f..d385d1b07f 100644
--- a/typescript/test/util.ts
+++ b/typescript/test/util.ts
@@ -14,6 +14,10 @@
import * as fs from 'fs-extra';
import * as path from 'path';
+import { before, it } from 'mocha';
+import * as rimraf from 'rimraf';
+import { execSync } from 'child_process';
+import * as assert from 'assert';
const NO_OUTPUT_FILE = 0;
const IDENTICAL_FILE = 1;
@@ -21,10 +25,95 @@ const FILE_WITH_DIFF_CONTENT = 2;
const BASELINE_EXTENSION = '.baseline';
-export function equalToBaseline(
- outpurDir: string,
- baselineDir: string
-): boolean {
+export interface BaselineOptions {
+ outputDir: string;
+ protoPath: string;
+ useCommonProto: boolean;
+ baselineName: string;
+ mainServiceName?: string;
+ grpcServiceConfig?: string;
+ packageName?: string;
+}
+
+const cwd = process.cwd();
+const googleGaxProtosDir = path.join(
+ cwd,
+ 'node_modules',
+ 'google-gax',
+ 'protos'
+);
+const protosDirRoot = path.join(cwd, 'test-fixtures', 'protos');
+const commonProtoFilePath = path.join(
+ protosDirRoot,
+ 'google',
+ 'cloud',
+ 'common_resources.proto'
+);
+const baselineRootDir = path.join(cwd, 'baselines');
+const srcDir = path.join(cwd, 'build', 'src');
+const cliPath = path.join(srcDir, 'cli.js');
+const pluginPath = path.join(srcDir, 'protoc-gen-typescript_gapic');
+const startScriptPath = path.join(cwd, 'build', 'src', 'start-script.js');
+
+export function initBaselineTest() {
+ before(() => {
+ if (fs.existsSync(pluginPath)) {
+ rimraf.sync(pluginPath);
+ }
+ fs.copyFileSync(cliPath, pluginPath);
+ process.env['PATH'] = srcDir + path.delimiter + process.env['PATH'];
+
+ try {
+ execSync(`chmod +x ${pluginPath} ${cliPath}`);
+ } catch (err) {
+ console.warn(`Failed to chmod +x ${pluginPath}: ${err}. Ignoring...`);
+ }
+ });
+}
+
+export function runBaselineTest(options: BaselineOptions) {
+ const outputDir = path.join(cwd, options.outputDir);
+ const protoPath = path.join(
+ protosDirRoot,
+ options.protoPath.split('/').join(path.sep)
+ );
+ const baselineDir = path.join(baselineRootDir, options.baselineName);
+ const grpcServiceConfig = options.grpcServiceConfig
+ ? path.join(
+ protosDirRoot,
+ options.grpcServiceConfig.split('/').join(path.sep)
+ )
+ : undefined;
+
+ it(options.baselineName, function() {
+ this.timeout(60000);
+ if (fs.existsSync(outputDir)) {
+ rimraf.sync(outputDir);
+ }
+ fs.mkdirSync(outputDir);
+
+ let commandLine =
+ `node ${startScriptPath} --output_dir=${outputDir} ` +
+ `-I${protosDirRoot} -I${googleGaxProtosDir} ${protoPath}`;
+ if (options.useCommonProto) {
+ commandLine += ` ${commonProtoFilePath}`;
+ }
+ if (options.mainServiceName) {
+ commandLine += ` --main-service=${options.mainServiceName}`;
+ }
+ if (grpcServiceConfig) {
+ commandLine += ` --grpc-service-config=${grpcServiceConfig}`;
+ }
+ if (options.packageName) {
+ commandLine += ` --package-name=${options.packageName}`;
+ }
+
+ execSync(commandLine);
+ assert(equalToBaseline(outputDir, baselineDir));
+ });
+}
+
+function equalToBaseline(outpurDir: string, baselineDir: string): boolean {
let result = true;
// put all baseline files into fileStack
let fileStack: string[] = [];