Skip to content

Commit

Permalink
chore!: migrate to node 14 (#1236)
Browse files Browse the repository at this point in the history
Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
  • Loading branch information
alvarowolfx and gcf-owl-bot[bot] authored Jul 28, 2023
1 parent e62f75a commit 73bf31f
Show file tree
Hide file tree
Showing 39 changed files with 111 additions and 335 deletions.
2 changes: 1 addition & 1 deletion .github/sync-repo-settings.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@ branchProtectionRules:
- "ci/kokoro: System test"
- docs
- lint
- test (12)
- test (14)
- test (16)
- test (18)
- cla/google
- windows
- OwlBot Post Processor
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
node: [12, 14, 16, 18]
node: [14, 16, 18, 20]
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
Expand Down
24 changes: 0 additions & 24 deletions .kokoro/continuous/node12/common.cfg

This file was deleted.

4 changes: 0 additions & 4 deletions .kokoro/continuous/node12/lint.cfg

This file was deleted.

12 changes: 0 additions & 12 deletions .kokoro/continuous/node12/samples-test.cfg

This file was deleted.

12 changes: 0 additions & 12 deletions .kokoro/continuous/node12/system-test.cfg

This file was deleted.

Empty file removed .kokoro/continuous/node12/test.cfg
Empty file.
24 changes: 0 additions & 24 deletions .kokoro/presubmit/node12/common.cfg

This file was deleted.

12 changes: 0 additions & 12 deletions .kokoro/presubmit/node12/samples-test.cfg

This file was deleted.

12 changes: 0 additions & 12 deletions .kokoro/presubmit/node12/system-test.cfg

This file was deleted.

Empty file removed .kokoro/presubmit/node12/test.cfg
Empty file.
5 changes: 3 additions & 2 deletions benchmark/bench.ts
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,9 @@ async function doQuery(queryTxt: string) {
.on('end', () => {
const timeTotalMilli = new Date().getTime() - startMilli;
console.log(
`"${queryTxt}",${numRows},${numCols},${timeFirstByteMilli /
1000},${timeTotalMilli / 1000}`
`"${queryTxt}",${numRows},${numCols},${timeFirstByteMilli / 1000},${
timeTotalMilli / 1000
}`
);
resolve();
});
Expand Down
8 changes: 5 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
"license": "Apache-2.0",
"author": "Google LLC",
"engines": {
"node": ">=12.0.0"
"node": ">=14.0.0"
},
"repository": "googleapis/nodejs-bigquery",
"main": "./build/src/index.js",
Expand Down Expand Up @@ -77,18 +77,20 @@
"c8": "^8.0.0",
"codecov": "^3.5.0",
"discovery-tsd": "^0.3.0",
"eslint-plugin-prettier": "^5.0.0",
"execa": "^5.0.0",
"gts": "^3.1.0",
"gts": "^5.0.0",
"jsdoc": "^4.0.0",
"jsdoc-fresh": "^2.0.0",
"jsdoc-region-tag": "^2.0.0",
"linkinator": "^5.0.0",
"mocha": "^9.2.2",
"mv": "^2.1.1",
"ncp": "^2.0.0",
"prettier": "^3.0.0",
"proxyquire": "^2.1.0",
"sinon": "^15.0.0",
"tmp": "0.2.1",
"typescript": "^4.6.4"
"typescript": "^5.1.6"
}
}
5 changes: 1 addition & 4 deletions samples/addColumnLoadAppend.js
Original file line number Diff line number Diff line change
Expand Up @@ -42,10 +42,7 @@ function main(
const schema = 'Name:STRING, Age:INTEGER, Weight:FLOAT, IsMagic:BOOLEAN';

// Retrieve destination table reference
const [table] = await bigquery
.dataset(datasetId)
.table(tableId)
.get();
const [table] = await bigquery.dataset(datasetId).table(tableId).get();
const destinationTableRef = table.metadata.tableReference;

// Set load job options
Expand Down
5 changes: 1 addition & 4 deletions samples/addColumnQueryAppend.js
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,7 @@ function main(datasetId = 'my_dataset', tableId = 'my_table') {
// const tableId = 'my_table';

// Retrieve destination table reference
const [table] = await bigquery
.dataset(datasetId)
.table(tableId)
.get();
const [table] = await bigquery.dataset(datasetId).table(tableId).get();
const destinationTableRef = table.metadata.tableReference;

// In this example, the existing table contains only the 'name' column.
Expand Down
5 changes: 1 addition & 4 deletions samples/deleteTable.js
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,7 @@ function main(datasetId = 'my_dataset', tableId = 'my_table') {
// const tableId = "my_table";

// Delete the table
await bigquery
.dataset(datasetId)
.table(tableId)
.delete();
await bigquery.dataset(datasetId).table(tableId).delete();

console.log(`Table ${tableId} deleted.`);
}
Expand Down
5 changes: 1 addition & 4 deletions samples/insertRowsAsStream.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,7 @@ function main(datasetId = 'my_dataset', tableId = 'my_table') {
];

// Insert data into a table
await bigquery
.dataset(datasetId)
.table(tableId)
.insert(rows);
await bigquery.dataset(datasetId).table(tableId).insert(rows);
console.log(`Inserted ${rows.length} rows`);
}
// [END bigquery_table_insert_rows]
Expand Down
5 changes: 1 addition & 4 deletions samples/insertingDataTypes.js
Original file line number Diff line number Diff line change
Expand Up @@ -132,10 +132,7 @@ function main(datasetId = 'my_dataset', tableId = 'my_table') {
];

// Insert data into table
await bigquery
.dataset(datasetId)
.table(tableId)
.insert(rows);
await bigquery.dataset(datasetId).table(tableId).insert(rows);

console.log(`Inserted ${rows.length} rows`);
}
Expand Down
2 changes: 1 addition & 1 deletion samples/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
"author": "Google LLC",
"repository": "googleapis/nodejs-bigquery",
"engines": {
"node": ">=12.0.0"
"node": ">=14.0.0"
},
"scripts": {
"test": "mocha --timeout 200000"
Expand Down
5 changes: 1 addition & 4 deletions samples/relaxColumnLoadAppend.js
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,7 @@ function main(
const schema = 'Age:INTEGER, Weight:FLOAT, IsMagic:BOOLEAN';

// Retrieve destination table reference
const [table] = await bigquery
.dataset(datasetId)
.table(tableId)
.get();
const [table] = await bigquery.dataset(datasetId).table(tableId).get();
const destinationTableRef = table.metadata.tableReference;

// Set load job options
Expand Down
5 changes: 1 addition & 4 deletions samples/test/authViewTutorial.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,7 @@ const bigquery = new BigQuery();

describe('Authorized View Tutorial', () => {
after(async () => {
await bigquery
.dataset(datasetId)
.delete({force: true})
.catch(console.warn);
await bigquery.dataset(datasetId).delete({force: true}).catch(console.warn);
await bigquery
.dataset(sourceDatasetId)
.delete({force: true})
Expand Down
5 changes: 1 addition & 4 deletions samples/test/datasets.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,7 @@ describe('Datasets', () => {
});

after(async () => {
await bigquery
.dataset(datasetId)
.delete({force: true})
.catch(console.warn);
await bigquery.dataset(datasetId).delete({force: true}).catch(console.warn);
});

it('should create a dataset', async () => {
Expand Down
22 changes: 5 additions & 17 deletions samples/test/models.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ const GCLOUD_TESTS_PREFIX = 'nodejs_samples_tests_models';

const bigquery = new BigQuery();

describe('Models', function() {
describe('Models', function () {
// Increase timeout to accommodate model creation.
this.timeout(300000);
const datasetId = `${GCLOUD_TESTS_PREFIX}_${uuid.v4()}`.replace(/-/gi, '_');
Expand Down Expand Up @@ -65,10 +65,7 @@ describe('Models', function() {
});

after(async () => {
await bigquery
.dataset(datasetId)
.delete({force: true})
.catch(console.warn);
await bigquery.dataset(datasetId).delete({force: true}).catch(console.warn);
});

it('should retrieve a model if it exists', async () => {
Expand Down Expand Up @@ -112,29 +109,20 @@ describe('Create/Delete Model', () => {
});

after(async () => {
await bigquery
.dataset(datasetId)
.delete({force: true})
.catch(console.warn);
await bigquery.dataset(datasetId).delete({force: true}).catch(console.warn);
});

it('should create a model', async () => {
const output = execSync(`node createModel.js ${datasetId} ${modelId}`);
assert.include(output, `Model ${modelId} created.`);
const [exists] = await bigquery
.dataset(datasetId)
.model(modelId)
.exists();
const [exists] = await bigquery.dataset(datasetId).model(modelId).exists();
assert.strictEqual(exists, true);
});

it('should delete a model', async () => {
const output = execSync(`node deleteModel.js ${datasetId} ${modelId}`);
assert.include(output, `Model ${modelId} deleted.`);
const [exists] = await bigquery
.dataset(datasetId)
.model(modelId)
.exists();
const [exists] = await bigquery.dataset(datasetId).model(modelId).exists();
assert.strictEqual(exists, false);
});
});
5 changes: 1 addition & 4 deletions samples/test/queries.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -51,10 +51,7 @@ describe('Queries', () => {
});

after(async () => {
await bigquery
.dataset(datasetId)
.delete({force: true})
.catch(console.warn);
await bigquery.dataset(datasetId).delete({force: true}).catch(console.warn);
});

it('should query stackoverflow', async () => {
Expand Down
5 changes: 1 addition & 4 deletions samples/test/routines.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,7 @@ const bigquery = new BigQuery();

describe('Routines', () => {
after(async () => {
await bigquery
.dataset(datasetId)
.delete({force: true})
.catch(console.warn);
await bigquery.dataset(datasetId).delete({force: true}).catch(console.warn);
});

before(async () => {
Expand Down
Loading

0 comments on commit 73bf31f

Please sign in to comment.