diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml index 4a30a08e..1350faef 100644 --- a/.github/sync-repo-settings.yaml +++ b/.github/sync-repo-settings.yaml @@ -9,9 +9,9 @@ branchProtectionRules: - "ci/kokoro: System test" - docs - lint - - test (12) - test (14) - test (16) + - test (18) - cla/google - windows - OwlBot Post Processor diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index bd813c79..711957ba 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [12, 14, 16, 18] + node: [14, 16, 18, 20] steps: - uses: actions/checkout@v3 - uses: actions/setup-node@v3 diff --git a/.kokoro/continuous/node12/common.cfg b/.kokoro/continuous/node12/common.cfg deleted file mode 100644 index 5e26eaed..00000000 --- a/.kokoro/continuous/node12/common.cfg +++ /dev/null @@ -1,24 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:12-user" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery/.kokoro/test.sh" -} diff --git a/.kokoro/continuous/node12/lint.cfg b/.kokoro/continuous/node12/lint.cfg deleted file mode 100644 index 80b86f14..00000000 --- a/.kokoro/continuous/node12/lint.cfg +++ /dev/null @@ -1,4 +0,0 @@ -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery/.kokoro/lint.sh" -} diff --git a/.kokoro/continuous/node12/samples-test.cfg b/.kokoro/continuous/node12/samples-test.cfg deleted file mode 100644 index 54748bab..00000000 --- a/.kokoro/continuous/node12/samples-test.cfg +++ /dev/null @@ -1,12 +0,0 @@ -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery/.kokoro/samples-test.sh" -} - -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "long-door-651-kokoro-system-test-service-account" -} \ No newline at end of file diff --git a/.kokoro/continuous/node12/system-test.cfg b/.kokoro/continuous/node12/system-test.cfg deleted file mode 100644 index c89e9937..00000000 --- a/.kokoro/continuous/node12/system-test.cfg +++ /dev/null @@ -1,12 +0,0 @@ -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery/.kokoro/system-test.sh" -} - -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "long-door-651-kokoro-system-test-service-account" -} \ No newline at end of file diff --git a/.kokoro/continuous/node12/test.cfg b/.kokoro/continuous/node12/test.cfg deleted file mode 100644 index e69de29b..00000000 diff --git a/.kokoro/presubmit/node12/common.cfg b/.kokoro/presubmit/node12/common.cfg deleted file mode 100644 index 5e26eaed..00000000 --- a/.kokoro/presubmit/node12/common.cfg +++ /dev/null @@ -1,24 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:12-user" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery/.kokoro/test.sh" -} diff --git a/.kokoro/presubmit/node12/samples-test.cfg b/.kokoro/presubmit/node12/samples-test.cfg deleted file mode 100644 index 54748bab..00000000 --- a/.kokoro/presubmit/node12/samples-test.cfg +++ /dev/null @@ -1,12 +0,0 @@ -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery/.kokoro/samples-test.sh" -} - -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "long-door-651-kokoro-system-test-service-account" -} \ No newline at end of file diff --git a/.kokoro/presubmit/node12/system-test.cfg b/.kokoro/presubmit/node12/system-test.cfg deleted file mode 100644 index c89e9937..00000000 --- a/.kokoro/presubmit/node12/system-test.cfg +++ /dev/null @@ -1,12 +0,0 @@ -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery/.kokoro/system-test.sh" -} - -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "long-door-651-kokoro-system-test-service-account" -} \ No newline at end of file diff --git a/.kokoro/presubmit/node12/test.cfg b/.kokoro/presubmit/node12/test.cfg deleted file mode 100644 index e69de29b..00000000 diff --git a/benchmark/bench.ts b/benchmark/bench.ts index 65673e4f..155cc422 100644 --- a/benchmark/bench.ts +++ b/benchmark/bench.ts @@ -63,8 +63,9 @@ async function doQuery(queryTxt: string) { .on('end', () => { const timeTotalMilli = new Date().getTime() - startMilli; console.log( - `"${queryTxt}",${numRows},${numCols},${timeFirstByteMilli / - 1000},${timeTotalMilli / 1000}` + `"${queryTxt}",${numRows},${numCols},${timeFirstByteMilli / 1000},${ + timeTotalMilli / 1000 + }` ); resolve(); }); diff --git a/package.json b/package.json index ad244ca8..a2cfcc0f 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,7 @@ "license": "Apache-2.0", "author": "Google LLC", "engines": { - "node": ">=12.0.0" + "node": ">=14.0.0" }, "repository": "googleapis/nodejs-bigquery", "main": "./build/src/index.js", @@ -77,8 +77,9 @@ "c8": "^8.0.0", "codecov": "^3.5.0", "discovery-tsd": "^0.3.0", + "eslint-plugin-prettier": "^5.0.0", "execa": "^5.0.0", - "gts": "^3.1.0", + "gts": "^5.0.0", "jsdoc": "^4.0.0", "jsdoc-fresh": "^2.0.0", "jsdoc-region-tag": "^2.0.0", @@ -86,9 +87,10 @@ "mocha": "^9.2.2", "mv": "^2.1.1", "ncp": "^2.0.0", + "prettier": "^3.0.0", "proxyquire": "^2.1.0", "sinon": "^15.0.0", "tmp": "0.2.1", - "typescript": "^4.6.4" + "typescript": "^5.1.6" } } diff --git a/samples/addColumnLoadAppend.js b/samples/addColumnLoadAppend.js index fedcc8e5..ed6db0c6 100644 --- a/samples/addColumnLoadAppend.js +++ b/samples/addColumnLoadAppend.js @@ -42,10 +42,7 @@ function main( const schema = 'Name:STRING, Age:INTEGER, Weight:FLOAT, IsMagic:BOOLEAN'; // Retrieve destination table reference - const [table] = await bigquery - .dataset(datasetId) - .table(tableId) - .get(); + const [table] = await bigquery.dataset(datasetId).table(tableId).get(); const destinationTableRef = table.metadata.tableReference; // Set load job options diff --git a/samples/addColumnQueryAppend.js b/samples/addColumnQueryAppend.js index e99607b3..a00c1df8 100644 --- a/samples/addColumnQueryAppend.js +++ b/samples/addColumnQueryAppend.js @@ -32,10 +32,7 @@ function main(datasetId = 'my_dataset', tableId = 'my_table') { // const tableId = 'my_table'; // Retrieve destination table reference - const [table] = await bigquery - .dataset(datasetId) - .table(tableId) - .get(); + const [table] = await bigquery.dataset(datasetId).table(tableId).get(); const destinationTableRef = table.metadata.tableReference; // In this example, the existing table contains only the 'name' column. diff --git a/samples/deleteTable.js b/samples/deleteTable.js index 474564bd..fd310a74 100644 --- a/samples/deleteTable.js +++ b/samples/deleteTable.js @@ -30,10 +30,7 @@ function main(datasetId = 'my_dataset', tableId = 'my_table') { // const tableId = "my_table"; // Delete the table - await bigquery - .dataset(datasetId) - .table(tableId) - .delete(); + await bigquery.dataset(datasetId).table(tableId).delete(); console.log(`Table ${tableId} deleted.`); } diff --git a/samples/insertRowsAsStream.js b/samples/insertRowsAsStream.js index 389db3d8..2d30e0fc 100644 --- a/samples/insertRowsAsStream.js +++ b/samples/insertRowsAsStream.js @@ -34,10 +34,7 @@ function main(datasetId = 'my_dataset', tableId = 'my_table') { ]; // Insert data into a table - await bigquery - .dataset(datasetId) - .table(tableId) - .insert(rows); + await bigquery.dataset(datasetId).table(tableId).insert(rows); console.log(`Inserted ${rows.length} rows`); } // [END bigquery_table_insert_rows] diff --git a/samples/insertingDataTypes.js b/samples/insertingDataTypes.js index e2b005dc..41d2b684 100644 --- a/samples/insertingDataTypes.js +++ b/samples/insertingDataTypes.js @@ -132,10 +132,7 @@ function main(datasetId = 'my_dataset', tableId = 'my_table') { ]; // Insert data into table - await bigquery - .dataset(datasetId) - .table(tableId) - .insert(rows); + await bigquery.dataset(datasetId).table(tableId).insert(rows); console.log(`Inserted ${rows.length} rows`); } diff --git a/samples/package.json b/samples/package.json index 85cffd6b..96573d0e 100644 --- a/samples/package.json +++ b/samples/package.json @@ -10,7 +10,7 @@ "author": "Google LLC", "repository": "googleapis/nodejs-bigquery", "engines": { - "node": ">=12.0.0" + "node": ">=14.0.0" }, "scripts": { "test": "mocha --timeout 200000" diff --git a/samples/relaxColumnLoadAppend.js b/samples/relaxColumnLoadAppend.js index e278e0df..967d70bc 100644 --- a/samples/relaxColumnLoadAppend.js +++ b/samples/relaxColumnLoadAppend.js @@ -41,10 +41,7 @@ function main( const schema = 'Age:INTEGER, Weight:FLOAT, IsMagic:BOOLEAN'; // Retrieve destination table reference - const [table] = await bigquery - .dataset(datasetId) - .table(tableId) - .get(); + const [table] = await bigquery.dataset(datasetId).table(tableId).get(); const destinationTableRef = table.metadata.tableReference; // Set load job options diff --git a/samples/test/authViewTutorial.test.js b/samples/test/authViewTutorial.test.js index defd973c..15c432ab 100644 --- a/samples/test/authViewTutorial.test.js +++ b/samples/test/authViewTutorial.test.js @@ -38,10 +38,7 @@ const bigquery = new BigQuery(); describe('Authorized View Tutorial', () => { after(async () => { - await bigquery - .dataset(datasetId) - .delete({force: true}) - .catch(console.warn); + await bigquery.dataset(datasetId).delete({force: true}).catch(console.warn); await bigquery .dataset(sourceDatasetId) .delete({force: true}) diff --git a/samples/test/datasets.test.js b/samples/test/datasets.test.js index 4b10758b..fd4aecd1 100644 --- a/samples/test/datasets.test.js +++ b/samples/test/datasets.test.js @@ -36,10 +36,7 @@ describe('Datasets', () => { }); after(async () => { - await bigquery - .dataset(datasetId) - .delete({force: true}) - .catch(console.warn); + await bigquery.dataset(datasetId).delete({force: true}).catch(console.warn); }); it('should create a dataset', async () => { diff --git a/samples/test/models.test.js b/samples/test/models.test.js index df36bd28..90904621 100644 --- a/samples/test/models.test.js +++ b/samples/test/models.test.js @@ -26,7 +26,7 @@ const GCLOUD_TESTS_PREFIX = 'nodejs_samples_tests_models'; const bigquery = new BigQuery(); -describe('Models', function() { +describe('Models', function () { // Increase timeout to accommodate model creation. this.timeout(300000); const datasetId = `${GCLOUD_TESTS_PREFIX}_${uuid.v4()}`.replace(/-/gi, '_'); @@ -65,10 +65,7 @@ describe('Models', function() { }); after(async () => { - await bigquery - .dataset(datasetId) - .delete({force: true}) - .catch(console.warn); + await bigquery.dataset(datasetId).delete({force: true}).catch(console.warn); }); it('should retrieve a model if it exists', async () => { @@ -112,29 +109,20 @@ describe('Create/Delete Model', () => { }); after(async () => { - await bigquery - .dataset(datasetId) - .delete({force: true}) - .catch(console.warn); + await bigquery.dataset(datasetId).delete({force: true}).catch(console.warn); }); it('should create a model', async () => { const output = execSync(`node createModel.js ${datasetId} ${modelId}`); assert.include(output, `Model ${modelId} created.`); - const [exists] = await bigquery - .dataset(datasetId) - .model(modelId) - .exists(); + const [exists] = await bigquery.dataset(datasetId).model(modelId).exists(); assert.strictEqual(exists, true); }); it('should delete a model', async () => { const output = execSync(`node deleteModel.js ${datasetId} ${modelId}`); assert.include(output, `Model ${modelId} deleted.`); - const [exists] = await bigquery - .dataset(datasetId) - .model(modelId) - .exists(); + const [exists] = await bigquery.dataset(datasetId).model(modelId).exists(); assert.strictEqual(exists, false); }); }); diff --git a/samples/test/queries.test.js b/samples/test/queries.test.js index a3405f22..beaedeed 100644 --- a/samples/test/queries.test.js +++ b/samples/test/queries.test.js @@ -51,10 +51,7 @@ describe('Queries', () => { }); after(async () => { - await bigquery - .dataset(datasetId) - .delete({force: true}) - .catch(console.warn); + await bigquery.dataset(datasetId).delete({force: true}).catch(console.warn); }); it('should query stackoverflow', async () => { diff --git a/samples/test/routines.test.js b/samples/test/routines.test.js index ec40b01b..266da7ac 100644 --- a/samples/test/routines.test.js +++ b/samples/test/routines.test.js @@ -36,10 +36,7 @@ const bigquery = new BigQuery(); describe('Routines', () => { after(async () => { - await bigquery - .dataset(datasetId) - .delete({force: true}) - .catch(console.warn); + await bigquery.dataset(datasetId).delete({force: true}).catch(console.warn); }); before(async () => { diff --git a/samples/test/tables.test.js b/samples/test/tables.test.js index 13dbdb8e..e7f94c5f 100644 --- a/samples/test/tables.test.js +++ b/samples/test/tables.test.js @@ -21,10 +21,8 @@ const uuid = require('uuid'); const cp = require('child_process'); const {Storage} = require('@google-cloud/storage'); const {BigQuery} = require('@google-cloud/bigquery'); -const { - DataCatalogClient, - PolicyTagManagerClient, -} = require('@google-cloud/datacatalog').v1; +const {DataCatalogClient, PolicyTagManagerClient} = + require('@google-cloud/datacatalog').v1; const dataCatalog = new DataCatalogClient(); const policyTagManager = new PolicyTagManagerClient(); @@ -109,10 +107,7 @@ describe('Tables', () => { .dataset(destDatasetId) .delete({force: true}) .catch(console.warn); - await bigquery - .dataset(datasetId) - .delete({force: true}) - .catch(console.warn); + await bigquery.dataset(datasetId).delete({force: true}).catch(console.warn); await storage .bucket(bucketName) .deleteFiles({force: true}) @@ -125,19 +120,13 @@ describe('Tables', () => { .dataset(srcDatasetId) .delete({force: true}) .catch(console.warn); - await storage - .bucket(bucketName) - .delete() - .catch(console.warn); + await storage.bucket(bucketName).delete().catch(console.warn); }); it('should create a table', async () => { const output = execSync(`node createTable.js ${datasetId} ${tableId}`); assert.include(output, `Table ${tableId} created.`); - const [exists] = await bigquery - .dataset(datasetId) - .table(tableId) - .exists(); + const [exists] = await bigquery.dataset(datasetId).table(tableId).exists(); assert.ok(exists); }); @@ -348,10 +337,7 @@ describe('Tables', () => { `node loadLocalFile.js ${datasetId} ${tableId} ${localFilePath}` ); assert.match(output, /completed\./); - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); + const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); assert.strictEqual(rows.length, 1); }); @@ -407,10 +393,7 @@ describe('Tables', () => { const tableId = generateUuid(); const output = execSync(`node loadTableGCSORC.js ${datasetId} ${tableId}`); assert.match(output, /completed\./); - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); + const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); assert.ok(rows.length > 0); }); @@ -420,10 +403,7 @@ describe('Tables', () => { `node loadTableGCSParquet.js ${datasetId} ${tableId}` ); assert.match(output, /completed\./); - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); + const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); assert.ok(rows.length > 0); }); @@ -431,10 +411,7 @@ describe('Tables', () => { const tableId = generateUuid(); const output = execSync(`node loadTableGCSAvro.js ${datasetId} ${tableId}`); assert.match(output, /completed\./); - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); + const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); assert.ok(rows.length > 0); }); @@ -444,10 +421,7 @@ describe('Tables', () => { `node loadTableURIFirestore.js ${datasetId} ${tableId}` ); assert.match(output, /completed\./); - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); + const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); assert.ok(rows.length > 0); }); @@ -455,10 +429,7 @@ describe('Tables', () => { const tableId = generateUuid(); const output = execSync(`node loadCSVFromGCS.js ${datasetId} ${tableId}`); assert.match(output, /completed\./); - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); + const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); assert.ok(rows.length > 0); }); @@ -466,10 +437,7 @@ describe('Tables', () => { const tableId = generateUuid(); const output = execSync(`node loadJSONFromGCS.js ${datasetId} ${tableId}`); assert.match(output, /completed\./); - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); + const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); assert.ok(rows.length > 0); }); @@ -479,10 +447,7 @@ describe('Tables', () => { `node loadTablePartitioned.js ${datasetId} ${tableId}` ); assert.match(output, /completed\./); - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); + const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); assert.ok(rows.length > 0); }); @@ -492,10 +457,7 @@ describe('Tables', () => { `node loadTableClustered.js ${datasetId} ${tableId}` ); assert.match(output, /completed\./); - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); + const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); assert.ok(rows.length > 0); }); @@ -508,10 +470,7 @@ describe('Tables', () => { `node addColumnLoadAppend.js ${datasetId} ${destTableId} ${localFilePath}` ); assert.match(output, /completed\./); - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); + const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); assert.ok(rows.length > 0); }); @@ -522,10 +481,7 @@ describe('Tables', () => { `node relaxColumnLoadAppend.js ${datasetId} ${destTableId} ${partialDataFilePath}` ); assert.match(output, /completed\./); - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); + const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); assert.ok(rows.length > 0); }); @@ -535,10 +491,7 @@ describe('Tables', () => { `node loadCSVFromGCSAutodetect.js ${datasetId} ${tableId}` ); assert.match(output, /completed\./); - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); + const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); assert.ok(rows.length > 0); }); @@ -548,10 +501,7 @@ describe('Tables', () => { `node loadJSONFromGCSAutodetect.js ${datasetId} ${tableId}` ); assert.match(output, /completed\./); - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); + const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); assert.ok(rows.length > 0); }); @@ -562,10 +512,7 @@ describe('Tables', () => { ); assert.match(output, /completed\./); assert.include(output, 'Write disposition used: WRITE_TRUNCATE.'); - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); + const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); assert.ok(rows.length > 0); }); @@ -576,10 +523,7 @@ describe('Tables', () => { ); assert.match(output, /completed\./); assert.include(output, 'Write disposition used: WRITE_TRUNCATE.'); - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); + const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); assert.ok(rows.length > 0); }); @@ -590,10 +534,7 @@ describe('Tables', () => { ); assert.match(output, /completed\./); assert.include(output, 'Write disposition used: WRITE_TRUNCATE.'); - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); + const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); assert.ok(rows.length > 0); }); @@ -604,10 +545,7 @@ describe('Tables', () => { ); assert.match(output, /completed\./); assert.include(output, 'Write disposition used: WRITE_TRUNCATE.'); - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); + const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); assert.ok(rows.length > 0); }); @@ -618,10 +556,7 @@ describe('Tables', () => { ); assert.match(output, /completed\./); assert.include(output, 'Write disposition used: WRITE_TRUNCATE.'); - const [rows] = await bigquery - .dataset(datasetId) - .table(tableId) - .getRows(); + const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); assert.ok(rows.length > 0); }); @@ -687,10 +622,7 @@ describe('Tables', () => { it('should create a view', async () => { const output = execSync(`node createView.js ${datasetId} ${viewId}`); assert.include(output, `View ${viewId} created.`); - const [exists] = await bigquery - .dataset(datasetId) - .table(viewId) - .exists(); + const [exists] = await bigquery.dataset(datasetId).table(viewId).exists(); assert.ok(exists); }); diff --git a/samples/undeleteTable.js b/samples/undeleteTable.js index b695e867..9b872bea 100644 --- a/samples/undeleteTable.js +++ b/samples/undeleteTable.js @@ -42,10 +42,7 @@ function main( const snapshotEpoch = Date.now(); // Delete the table - await bigquery - .dataset(datasetId) - .table(tableId) - .delete(); + await bigquery.dataset(datasetId).table(tableId).delete(); console.log(`Table ${tableId} deleted.`); diff --git a/samples/updateTableColumnACL.js b/samples/updateTableColumnACL.js index 94e18301..9bb52b07 100644 --- a/samples/updateTableColumnACL.js +++ b/samples/updateTableColumnACL.js @@ -41,10 +41,7 @@ function main( ]; // Get the existing table. - const [table] = await bigquery - .dataset(datasetId) - .table(tableId) - .get(); + const [table] = await bigquery.dataset(datasetId).table(tableId).get(); // Retreive the table metadata. const [metadata] = await table.getMetadata(); diff --git a/src/bigquery.ts b/src/bigquery.ts index eeeade64..8fd9e59c 100644 --- a/src/bigquery.ts +++ b/src/bigquery.ts @@ -510,9 +510,8 @@ export class BigQuery extends Service { .map(c => c.join('.')); } - return arrify(rows) - .map(mergeSchema) - .map(flattenRows); + return arrify(rows).map(mergeSchema).map(flattenRows); + function mergeSchema(row: TableRow) { return row.f!.map((field: TableRowField, index: number) => { const schemaField = schema.fields![index]; @@ -2296,7 +2295,9 @@ export class BigQueryInt extends Number { try { return this.typeCastFunction!(this.value); } catch (error) { - (error as Error).message = `integerTypeCastFunction threw an error:\n\n - ${ + ( + error as Error + ).message = `integerTypeCastFunction threw an error:\n\n - ${ (error as Error).message }`; throw error; diff --git a/src/model.ts b/src/model.ts index 7eb0bfd5..3bab7ee6 100644 --- a/src/model.ts +++ b/src/model.ts @@ -49,11 +49,10 @@ export type JobMetadataResponse = [JobMetadata]; export type JobResponse = [Job, bigquery.IJob]; export type JobCallback = ResourceCallback; -export type CreateExtractJobOptions = JobRequest< - bigquery.IJobConfigurationExtract -> & { - format?: 'ML_TF_SAVED_MODEL' | 'ML_XGBOOST_BOOSTER'; -}; +export type CreateExtractJobOptions = + JobRequest & { + format?: 'ML_TF_SAVED_MODEL' | 'ML_XGBOOST_BOOSTER'; + }; /** * The model export formats accepted by BigQuery. diff --git a/src/table.ts b/src/table.ts index 145e7670..d4a6a03f 100644 --- a/src/table.ts +++ b/src/table.ts @@ -86,7 +86,7 @@ export type InsertRowsOptions = bigquery.ITableDataInsertAllRequest & { }; export type InsertRowsResponse = [ - bigquery.ITableDataInsertAllResponse | bigquery.ITable + bigquery.ITableDataInsertAllResponse | bigquery.ITable, ]; export type InsertRowsCallback = RequestCallback< bigquery.ITableDataInsertAllResponse | bigquery.ITable @@ -120,12 +120,11 @@ export type JobLoadMetadata = JobRequest & { format?: string; }; -export type CreateExtractJobOptions = JobRequest< - bigquery.IJobConfigurationExtract -> & { - format?: 'CSV' | 'JSON' | 'AVRO' | 'PARQUET' | 'ORC'; - gzip?: boolean; -}; +export type CreateExtractJobOptions = + JobRequest & { + format?: 'CSV' | 'JSON' | 'AVRO' | 'PARQUET' | 'ORC'; + gzip?: boolean; + }; export type JobResponse = [Job, bigquery.IJob]; export type JobCallback = ResourceCallback; @@ -604,7 +603,7 @@ class Table extends ServiceObject { * @private */ static formatMetadata_(options: TableMetadata): FormattedMetadata { - const body = (extend(true, {}, options) as {}) as FormattedMetadata; + const body = extend(true, {}, options) as {} as FormattedMetadata; if (options.name) { body.friendlyName = options.name; @@ -1179,10 +1178,7 @@ class Table extends ServiceObject { // If no explicit format was provided, attempt to find a match from the // file's extension. If no match, don't set, and default upstream to // CSV. - const format = path - .extname(dest.name) - .substr(1) - .toLowerCase(); + const format = path.extname(dest.name).substr(1).toLowerCase(); if (!options.destinationFormat && !options.format && FORMATS[format]) { options.destinationFormat = FORMATS[format]; } @@ -1375,12 +1371,7 @@ class Table extends ServiceObject { // A path to a file was given. If a sourceFormat wasn't specified, try to // find a match from the file's extension. const detectedFormat = - FORMATS[ - path - .extname(source) - .substr(1) - .toLowerCase() - ]; + FORMATS[path.extname(source).substr(1).toLowerCase()]; if (!metadata.sourceFormat && detectedFormat) { metadata.sourceFormat = detectedFormat; } @@ -1430,13 +1421,7 @@ class Table extends ServiceObject { // If no explicit format was provided, attempt to find a match from // the file's extension. If no match, don't set, and default upstream // to CSV. - const format = - FORMATS[ - path - .extname(src.name) - .substr(1) - .toLowerCase() - ]; + const format = FORMATS[path.extname(src.name).substr(1).toLowerCase()]; if (!metadata.sourceFormat && format) { body.configuration.load.sourceFormat = format; } diff --git a/src/types.d.ts b/src/types.d.ts index c3972333..010185f5 100644 --- a/src/types.d.ts +++ b/src/types.d.ts @@ -121,9 +121,7 @@ declare namespace bigquery { /** * Repeated as there can be many metric sets (one for each model) in auto-arima and the large-scale case. */ - arimaSingleModelForecastingMetrics?: Array< - IArimaSingleModelForecastingMetrics - >; + arimaSingleModelForecastingMetrics?: Array; /** * Whether Arima model fitted with drift or not. It is always false when d is not 1. */ diff --git a/system-test/bigquery.ts b/system-test/bigquery.ts index a66ae94f..7fae0475 100644 --- a/system-test/bigquery.ts +++ b/system-test/bigquery.ts @@ -570,7 +570,7 @@ describe('BigQuery', () => { query: QUERY, }, (e, job) => { - const err = (e as {}) as GoogleErrorBody; + const err = e as {} as GoogleErrorBody; assert.strictEqual(err.errors![0].reason, 'notFound'); assert.strictEqual(job!.location, 'US'); done(); @@ -896,7 +896,7 @@ describe('BigQuery', () => { data?: {tableId?: number}; table: Table; } - const TABLES = ([{data: {tableId: 1}}, {}] as {}) as TableItem[]; + const TABLES = [{data: {tableId: 1}}, {}] as {} as TableItem[]; const SCHEMA = 'tableId:integer'; @@ -1027,7 +1027,7 @@ describe('BigQuery', () => { }; table.insert([data, improperData], e => { - const err = (e as {}) as GoogleErrorBody; + const err = e as {} as GoogleErrorBody; // eslint-disable-next-line @typescript-eslint/no-explicit-any assert.strictEqual((err as any).name, 'PartialFailureError'); diff --git a/system-test/install.ts b/system-test/install.ts index d16339ae..bd7c0c0a 100644 --- a/system-test/install.ts +++ b/system-test/install.ts @@ -21,7 +21,7 @@ import {describe, it, before, after} from 'mocha'; const RUNNING_IN_VPCSC = !!process.env['GOOGLE_CLOUD_TESTS_IN_VPCSC']; -const mvp = (promisify(mv) as {}) as (...args: string[]) => Promise; +const mvp = promisify(mv) as {} as (...args: string[]) => Promise; const ncpp = promisify(ncp); const stagingDir = tmp.dirSync({unsafeCleanup: true}); const stagingPath = stagingDir.name; @@ -29,7 +29,7 @@ const stagingPath = stagingDir.name; const pkg = require('../../package.json'); describe('Installation test', () => { - before(function() { + before(function () { if (RUNNING_IN_VPCSC) this.skip(); }); diff --git a/test/bigquery.ts b/test/bigquery.ts index a0dadaa3..791a83af 100644 --- a/test/bigquery.ts +++ b/test/bigquery.ts @@ -884,10 +884,7 @@ describe('BigQuery', () => { it('should call through to the static method', () => { const fakeInt = new BigQueryInt(INPUT_STRING); - sandbox - .stub(BigQuery, 'int') - .withArgs(INPUT_STRING) - .returns(fakeInt); + sandbox.stub(BigQuery, 'int').withArgs(INPUT_STRING).returns(fakeInt); const int = bq.int(INPUT_STRING); assert.strictEqual(int, fakeInt); @@ -973,14 +970,20 @@ describe('BigQuery', () => { const smallIntegerValue = Number.MIN_SAFE_INTEGER - 1; // should throw when Number is passed - assert.throws(() => { - new BigQueryInt(largeIntegerValue).valueOf(); - }, expectedError({integerValue: largeIntegerValue})); + assert.throws( + () => { + new BigQueryInt(largeIntegerValue).valueOf(); + }, + expectedError({integerValue: largeIntegerValue}) + ); // should throw when string is passed - assert.throws(() => { - new BigQueryInt(smallIntegerValue.toString()).valueOf(); - }, expectedError({integerValue: smallIntegerValue})); + assert.throws( + () => { + new BigQueryInt(smallIntegerValue.toString()).valueOf(); + }, + expectedError({integerValue: smallIntegerValue}) + ); }); it('should not auto throw on initialization', () => { @@ -990,9 +993,12 @@ describe('BigQuery', () => { integerValue: largeIntegerValue, }; - assert.doesNotThrow(() => { - new BigQueryInt(valueObject); - }, new RegExp(`Integer value ${largeIntegerValue} is out of bounds.`)); + assert.doesNotThrow( + () => { + new BigQueryInt(valueObject); + }, + new RegExp(`Integer value ${largeIntegerValue} is out of bounds.`) + ); }); describe('integerTypeCastFunction is provided', () => { @@ -2124,8 +2130,8 @@ describe('BigQuery', () => { const fakeQueryParameter = {fake: 'query parameter'}; bq.createJob = (reqOpts: JobOptions) => { - const queryParameters = reqOpts.configuration!.query! - .queryParameters; + const queryParameters = + reqOpts.configuration!.query!.queryParameters; assert.deepStrictEqual(queryParameters, [fakeQueryParameter]); done(); }; diff --git a/test/dataset.ts b/test/dataset.ts index d92678ac..4fea70fc 100644 --- a/test/dataset.ts +++ b/test/dataset.ts @@ -81,10 +81,10 @@ class FakeServiceObject extends ServiceObject { } describe('BigQuery/Dataset', () => { - const BIGQUERY = ({ + const BIGQUERY = { projectId: 'my-project', createDataset: util.noop, - } as {}) as _root.BigQuery; + } as {} as _root.BigQuery; const DATASET_ID = 'kittens'; const LOCATION = 'asia-northeast1'; @@ -456,7 +456,7 @@ describe('BigQuery/Dataset', () => { Table.formatMetadata_ = options => { assert.strictEqual(options, fakeOptions); - return (formatted as {}) as FormattedMetadata; + return formatted as {} as FormattedMetadata; }; ds.request = (reqOpts: DecorateRequestOptions) => { diff --git a/test/routine.ts b/test/routine.ts index 7864ae3f..094e21fe 100644 --- a/test/routine.ts +++ b/test/routine.ts @@ -51,11 +51,11 @@ class FakeServiceObject extends ServiceObject { } describe('BigQuery/Routine', () => { - const DATASET = ({ + const DATASET = { id: 'kittens', parent: {}, createRoutine: util.noop, - } as {}) as _root.Dataset; + } as {} as _root.Dataset; const ROUTINE_ID = 'my_routine'; // tslint:disable-next-line variable-name @@ -107,7 +107,7 @@ describe('BigQuery/Routine', () => { const config = {a: 'b'}; const dataset = extend(true, {}, DATASET, { - createRoutine: function(config_: {}, callback: Function) { + createRoutine: function (config_: {}, callback: Function) { assert.strictEqual(this, dataset); assert.deepStrictEqual(config_, config); callback(); // done() @@ -132,7 +132,7 @@ describe('BigQuery/Routine', () => { }; // eslint-disable-next-line @typescript-eslint/no-explicit-any - (FakeServiceObject.prototype as any).setMetadata = function( + (FakeServiceObject.prototype as any).setMetadata = function ( metadata: {}, callback: Function ) { diff --git a/test/rowQueue.ts b/test/rowQueue.ts index f8f943a3..cccf734a 100644 --- a/test/rowQueue.ts +++ b/test/rowQueue.ts @@ -50,7 +50,7 @@ class FakeRowBatch { } } -const DATASET = ({ +const DATASET = { id: 'dataset-id', createTable: util.noop, bigQuery: { @@ -61,7 +61,7 @@ const DATASET = ({ apiEndpoint: 'bigquery.googleapis.com', request: util.noop, }, -} as {}) as _root.Dataset; +} as {} as _root.Dataset; describe('Queues', () => { const sandbox = sinon.createSandbox(); @@ -218,7 +218,7 @@ describe('Queues', () => { }); it('should cancel any pending insert calls', () => { - const fakeHandle = (1234 as unknown) as NodeJS.Timer; + const fakeHandle = 1234 as unknown as NodeJS.Timer; const stub = sandbox.stub(global, 'clearTimeout').withArgs(fakeHandle); queue.pending = fakeHandle; @@ -269,7 +269,7 @@ describe('Queues', () => { }; }), }; - const error = ({ + const error = { errors: [ { row: dataApiFormat.rows[0].json, @@ -280,7 +280,7 @@ describe('Queues', () => { errors: [row1Error], }, ], - } as unknown) as Error; + } as unknown as Error; it('should make the correct request', () => { const stub = sandbox.stub(fakeTable, 'request'); diff --git a/test/table.ts b/test/table.ts index 3edc92e1..3aa1176d 100644 --- a/test/table.ts +++ b/test/table.ts @@ -2843,7 +2843,7 @@ describe('BigQuery/Table', () => { return formattedMetadata; }; // eslint-disable-next-line @typescript-eslint/no-explicit-any - (FakeServiceObject.prototype as any).setMetadata = function( + (FakeServiceObject.prototype as any).setMetadata = function ( metadata: {}, callback: Function ) {