From b5f33bfb9a771b70dd9a9a232c23409c27864c34 Mon Sep 17 00:00:00 2001 From: Jason Dobry Date: Wed, 7 Sep 2016 15:34:18 -0700 Subject: [PATCH] Simplify BigQuery samples according to our standard. (#207) * Simplify BigQuery samples according to our standard. * Address comments * Add region tag. * Re-enable cache. Remove .travis.yml --- .travis.yml | 95 ------ bigquery/README.md | 100 +++---- bigquery/datasets.js | 94 ++---- bigquery/getting_started.js | 74 ----- bigquery/queries.js | 186 ++++++------ bigquery/system-test/datasets.test.js | 33 +- bigquery/system-test/getting_started.test.js | 29 -- bigquery/system-test/queries.test.js | 53 ++-- bigquery/system-test/tables.test.js | 150 +++++----- bigquery/tables.js | 298 ++++++++----------- bigquery/test/datasets.test.js | 8 +- bigquery/test/getting_started.test.js | 18 -- bigquery/test/queries.test.js | 155 +++++----- bigquery/test/tables.test.js | 293 +++++++----------- circle.yml | 118 ++++---- 15 files changed, 671 insertions(+), 1033 deletions(-) delete mode 100644 .travis.yml delete mode 100644 bigquery/getting_started.js delete mode 100644 bigquery/system-test/getting_started.test.js delete mode 100644 bigquery/test/getting_started.test.js diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index d59119159f..0000000000 --- a/.travis.yml +++ /dev/null @@ -1,95 +0,0 @@ -# Copyright 2015-2016, Google, Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -sudo: false -language: node_js -node_js: -- "6" -- "4" -- "0.12" - -cache: - directories: - - appengine/analytics/node_modules/ - - appengine/bower/node_modules/ - - appengine/cloudsql/node_modules/ - - appengine/datastore/node_modules/ - - appengine/disk/node_modules/ - - appengine/express/node_modules/ - - appengine/express-memcached-session/node_modules/ - - appengine/extending-runtime/node_modules/ - - appengine/geddy/node_modules/ - - appengine/grunt/node_modules/ - - appengine/hapi/node_modules/ - - appengine/hello-world/node_modules/ - - appengine/koa/node_modules/ - - appengine/kraken/node_modules/ - - appengine/logging/node_modules/ - - appengine/loopback/node_modules/ - - appengine/mailgun/node_modules/ - - appengine/mailjet/node_modules/ - - appengine/memcached/node_modules/ - - appengine/mongodb/node_modules/ - - appengine/parse-server/node_modules/ - - appengine/pubsub/node_modules/ - - appengine/redis/node_modules/ - - appengine/restify/node_modules/ - - appengine/sails/node_modules/ - - appengine/sendgrid/node_modules/ - - appengine/static-files/node_modules/ - - appengine/storage/node_modules/ - - appengine/twilio/node_modules/ - - appengine/webpack/node_modules/ - - appengine/websockets/node_modules/ - - bigquery/node_modules/ - - computeengine/node_modules/ - - datastore/node_modules/ - - debugger/node_modules/ - - functions/background/node_modules/ - - functions/datastore/node_modules/ - - functions/errorreporting/node_modules/ - - functions/gcs/node_modules/ - - functions/helloworld/node_modules/ - - functions/http/node_modules/ - - functions/log/node_modules/ - - functions/ocr/node_modules/ - - functions/pubsub/node_modules/ - - functions/sendgrid/node_modules/ - - functions/slack/node_modules/ - - functions/uuid/node_modules/ - - language/node_modules/ - - logging/node_modules/ - - monitoring/node_modules/ - - prediction/node_modules/ - - pubsub/node_modules/ - - speech/node_modules/ - - storage/node_modules/ - - trace/node_modules/ - - translate/node_modules/ - - vision/node_modules/ - -env: - global: - - GOOGLE_APPLICATION_CREDENTIALS=$TRAVIS_BUILD_DIR/key.json - - TEST_BUCKET_NAME=nodejs-docs-samples - - GCLOUD_PROJECT=nodejs-docs-samples - -before_install: - - openssl aes-256-cbc -K $encrypted_fda0b707c7d5_key -iv $encrypted_fda0b707c7d5_iv -in key.json.enc -out key.json -d - - npm install -g npm - - npm set progress=false - -before_script: - - node scripts/install - -script: npm test diff --git a/bigquery/README.md b/bigquery/README.md index 0192714938..a67297bf28 100644 --- a/bigquery/README.md +++ b/bigquery/README.md @@ -11,7 +11,6 @@ analytics data warehouse. * [Setup](#setup) * [Samples](#samples) - * [Create A Simple Application With the API](#create-a-simple-application-with-the-api) * [Datasets](#datasets) * [Queries](#queries) * [Tables](#tables) @@ -28,17 +27,6 @@ analytics data warehouse. ## Samples -### Create A Simple Application With the API - -View the [documentation][basics_docs] or the [source code][basics_code]. - -__Run the sample:__ - - node getting_started - -[basics_docs]: https://cloud.google.com/bigquery/create-simple-app-api -[basics_code]: getting_started.js - ### Datasets View the [documentation][datasets_docs] or the [source code][datasets_code]. @@ -47,25 +35,22 @@ __Usage:__ `node datasets --help` ``` Commands: - create Create a new dataset. - delete Delete the specified dataset. - list List datasets in the authenticated project. + create Create a new dataset with the specified ID. + delete Delete the dataset with the specified ID. + list List datasets in the specified project. size Calculate the size of the specified dataset. Options: - --projectId, -p Optionally specify the project ID to use. - [string] - --help Show help [boolean] + --projectId, -p Optionally specify the project ID to use. [string] [default: "nodejs-docs-samples"] + --help Show help [boolean] Examples: - node datasets create my_dataset Create a new dataset named "my_dataset". - node datasets delete my_dataset Delete "my_dataset". - node datasets list List datasets. - node datasets list -p bigquery-public-data List datasets in a project other than the - authenticated project. - node datasets size my_dataset Calculate the size of "my_dataset". - node datasets size hacker_news -p Calculate the size of - bigquery-public-data "bigquery-public-data:hacker_news". + node datasets create my_dataset Create a new dataset with the ID "my_dataset". + node datasets delete my_dataset Delete a dataset identified as "my_dataset". + node datasets list List datasets. + node datasets list -p bigquery-public-data List datasets in the "bigquery-public-data" project. + node datasets size my_dataset Calculate the size of "my_dataset". + node datasets size hacker_news -p bigquery-public-data Calculate the size of "bigquery-public-data:hacker_news". For more information, see https://cloud.google.com/bigquery/docs ``` @@ -81,17 +66,19 @@ __Usage:__ `node queries --help` ``` Commands: - sync Run a synchronous query. - async Start an asynchronous query. - poll Get the status of a job. + sync Run the specified synchronous query. + async Start the specified asynchronous query. + wait Wait for the specified job to complete and retrieve its results. Options: - --help Show help [boolean] + --help Show help [boolean] Examples: - node queries sync "SELECT * FROM publicdata:samples.natality LIMIT 5;" - node queries async "SELECT * FROM publicdata:samples.natality LIMIT 5;" - node queries poll 12345 + node queries sync "SELECT * FROM + `publicdata.samples.natality` LIMIT 5;" + node queries async "SELECT * FROM + `publicdata.samples.natality` LIMIT 5;" + node queries wait job_VwckYXnR8yz54GBDMykIGnrc2 For more information, see https://cloud.google.com/bigquery/docs ``` @@ -107,27 +94,40 @@ __Usage:__ `node tables --help` ``` Commands: - create Create a new table in the specified dataset. - list List tables in the specified dataset. - delete
Delete a table in the specified dataset. - import
Import data from a local file or a Google Cloud Storage - file into BigQuery. - export
Export a table from BigQuery to Google Cloud Storage. + create Create a new table with the specified ID in the + specified dataset. + list List tables in the specified dataset. + delete Delete the specified table from the specified dataset. + copy Make a copy of an existing table. + + browse List the rows from the specified table. + import Import data from a local file or a Google Cloud Storage + file into the specified table. + export Export a table from BigQuery to Google Cloud Storage. + insert Insert a JSON array (as a string or newline-delimited + file) into a BigQuery table. Options: - --help Show help [boolean] + --help Show help [boolean] Examples: - node tables create my_dataset my_table Create table "my_table" in "my_dataset". - node tables list my_dataset List tables in "my_dataset". - node tables delete my_dataset my_table Delete "my_table" from "my_dataset". - node tables import my_dataset my_table ./data.csv Import a local file into a table. - node tables import my_dataset my_table data.csv Import a GCS file into a table. - --bucket my-bucket - node tables export my_dataset my_table my-bucket Export my_dataset:my_table to - my-file gcs://my-bucket/my-file as raw CSV - node tables export my_dataset my_table my-bucket Export my_dataset:my_table to - my-file -f JSON --gzip gcs://my-bucket/my-file as gzipped JSON + node tables create my_dataset my_table Create table "my_table" in "my_dataset". + node tables list my_dataset List tables in "my_dataset". + node tables browse my_dataset my_table Display rows from "my_table" in "my_dataset". + node tables delete my_dataset my_table Delete "my_table" from "my_dataset". + node tables import my_dataset my_table ./data.csv Import a local file into a table. + node tables import my_dataset my_table data.csv --bucket Import a GCS file into a table. + my-bucket + node tables export my_dataset my_table my-bucket my-file Export my_dataset:my_table to gcs://my-bucket/my-file as + raw CSV. + node tables export my_dataset my_table my-bucket my-file -f Export my_dataset:my_table to gcs://my-bucket/my-file as + JSON --gzip gzipped JSON. + node tables insert my_dataset my_table json_string Insert the JSON array represented by json_string into + my_dataset:my_table. + node tables insert my_dataset my_table json_file Insert the JSON objects contained in json_file (one per + line) into my_dataset:my_table. + node tables copy src_dataset src_table dest_dataset Copy src_dataset:src_table to dest_dataset:dest_table. + dest_table For more information, see https://cloud.google.com/bigquery/docs ``` diff --git a/bigquery/datasets.js b/bigquery/datasets.js index 575061703b..4d1b1f0eda 100644 --- a/bigquery/datasets.js +++ b/bigquery/datasets.js @@ -13,79 +13,50 @@ 'use strict'; -// [START all] // [START setup] // By default, the client will authenticate using the service account file // specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable and use // the project specified by the GCLOUD_PROJECT environment variable. See -// https://googlecloudplatform.github.io/gcloud-node/#/docs/google-cloud/latest/guides/authentication +// https://googlecloudplatform.github.io/google-cloud-node/#/docs/google-cloud/latest/guides/authentication var BigQuery = require('@google-cloud/bigquery'); - -// Instantiate the bigquery client -var bigquery = BigQuery(); // [END setup] -// Control-flow helper library -var async = require('async'); +function createDataset (datasetId, callback) { + var bigquery = BigQuery(); + var dataset = bigquery.dataset(datasetId); -// [START create_dataset] -/** - * List datasets in the authenticated project. - * - * @param {string} name The name for the new dataset. - * @param {function} callback The callback function. - */ -function createDataset (name, callback) { - var dataset = bigquery.dataset(name); - - // See https://googlecloudplatform.github.io/gcloud-node/#/docs/bigquery/latest/bigquery - dataset.create(function (err, dataset) { + // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/dataset?method=create + dataset.create(function (err, dataset, apiResponse) { if (err) { return callback(err); } - console.log('Created dataset: %s', name); - return callback(null, dataset); + console.log('Created dataset: %s', datasetId); + return callback(null, dataset, apiResponse); }); } -// [END create_dataset] - -// [START delete_dataset] -/** - * List datasets in the authenticated project. - * - * @param {string} name The name for the new dataset. - * @param {function} callback The callback function. - */ -function deleteDataset (name, callback) { - var dataset = bigquery.dataset(name); - - // See https://googlecloudplatform.github.io/gcloud-node/#/docs/bigquery/latest/bigquery + +function deleteDataset (datasetId, callback) { + var bigquery = BigQuery(); + var dataset = bigquery.dataset(datasetId); + + // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/dataset?method=delete dataset.delete(function (err) { if (err) { return callback(err); } - console.log('Deleted dataset: %s', name); + console.log('Deleted dataset: %s', datasetId); return callback(null); }); } -// [END delete_dataset] - -// [START list_datasets] -/** - * List datasets in the authenticated project. - * - * @param {string} projectId The project ID to use. - * @param {function} callback The callback function. - */ + function listDatasets (projectId, callback) { - // Instantiate a bigquery client var bigquery = BigQuery({ projectId: projectId }); - // See https://googlecloudplatform.github.io/gcloud-node/#/docs/bigquery/latest/bigquery + // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery?method=getDatasets bigquery.getDatasets(function (err, datasets) { if (err) { return callback(err); @@ -95,16 +66,11 @@ function listDatasets (projectId, callback) { return callback(null, datasets); }); } -// [END list_datasets] // [START get_dataset_size] -/** - * Calculate the size of the specified dataset. - * - * @param {string} datasetId The ID of the dataset. - * @param {string} projectId The project ID. - * @param {function} callback The callback function. - */ +// Control-flow helper library +var async = require('async'); + function getDatasetSize (datasetId, projectId, callback) { // Instantiate a bigquery client var bigquery = BigQuery({ @@ -112,7 +78,7 @@ function getDatasetSize (datasetId, projectId, callback) { }); var dataset = bigquery.dataset(datasetId); - // See https://googlecloudplatform.github.io/gcloud-node/#/docs/bigquery/latest/bigquery/dataset + // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/dataset?method=getTables dataset.getTables(function (err, tables) { if (err) { return callback(err); @@ -120,6 +86,7 @@ function getDatasetSize (datasetId, projectId, callback) { return async.map(tables, function (table, cb) { // Fetch more detailed info for each table + // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/table?method=get table.get(function (err, tableInfo) { if (err) { return cb(err); @@ -142,7 +109,6 @@ function getDatasetSize (datasetId, projectId, callback) { }); } // [END get_dataset_size] -// [END all] // The command-line program var cli = require('yargs'); @@ -161,13 +127,13 @@ var program = module.exports = { cli .demand(1) - .command('create ', 'Create a new dataset.', {}, function (options) { - program.createDataset(options.name, makeHandler()); + .command('create ', 'Create a new dataset with the specified ID.', {}, function (options) { + program.createDataset(options.datasetId, makeHandler()); }) - .command('delete ', 'Delete the specified dataset.', {}, function (options) { + .command('delete ', 'Delete the dataset with the specified ID.', {}, function (options) { program.deleteDataset(options.datasetId, makeHandler()); }) - .command('list', 'List datasets in the authenticated project.', {}, function (options) { + .command('list', 'List datasets in the specified project.', {}, function (options) { program.listDatasets(options.projectId, makeHandler(true, 'id')); }) .command('size ', 'Calculate the size of the specified dataset.', {}, function (options) { @@ -181,13 +147,13 @@ cli description: 'Optionally specify the project ID to use.', global: true }) - .example('node $0 create my_dataset', 'Create a new dataset named "my_dataset".') - .example('node $0 delete my_dataset', 'Delete "my_dataset".') + .example('node $0 create my_dataset', 'Create a new dataset with the ID "my_dataset".') + .example('node $0 delete my_dataset', 'Delete a dataset identified as "my_dataset".') .example('node $0 list', 'List datasets.') - .example('node $0 list -p bigquery-public-data', 'List datasets in a project other than the authenticated project.') + .example('node $0 list -p bigquery-public-data', 'List datasets in the "bigquery-public-data" project.') .example('node $0 size my_dataset', 'Calculate the size of "my_dataset".') .example('node $0 size hacker_news -p bigquery-public-data', 'Calculate the size of "bigquery-public-data:hacker_news".') - .wrap(100) + .wrap(120) .recommendCommands() .epilogue('For more information, see https://cloud.google.com/bigquery/docs'); diff --git a/bigquery/getting_started.js b/bigquery/getting_started.js deleted file mode 100644 index 906249a594..0000000000 --- a/bigquery/getting_started.js +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// [START complete] -'use strict'; - -// [START auth] -// By default, the client will authenticate using the service account file -// specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable and use -// the project specified by the GCLOUD_PROJECT environment variable. See -// https://googlecloudplatform.github.io/gcloud-node/#/docs/google-cloud/latest/guides/authentication -var BigQuery = require('@google-cloud/bigquery'); - -// Instantiate a bigquery client -var bigquery = BigQuery(); -// [END auth] - -// [START print] -function printExample (rows) { - console.log('Query Results:'); - rows.forEach(function (row) { - var str = ''; - for (var key in row) { - if (str) { - str += '\t'; - } - str += key + ': ' + row[key]; - } - console.log(str); - }); -} -// [END print] - -// [START query] -/** - * Run an example query. - * - * @param {Function} callback Callback function. - */ -function queryExample (callback) { - var query = 'SELECT TOP(corpus, 10) as title, COUNT(*) as unique_words\n' + - 'FROM [publicdata:samples.shakespeare];'; - - bigquery.query(query, function (err, rows) { - if (err) { - return callback(err); - } - - printExample(rows); - callback(null, rows); - }); -} -// [END query] - -// [END complete] - -// Run the examples -exports.main = function (cb) { - queryExample(cb); -}; - -if (module === require.main) { - exports.main(console.log); -} diff --git a/bigquery/queries.js b/bigquery/queries.js index c2750bc03f..f5e92c12b5 100644 --- a/bigquery/queries.js +++ b/bigquery/queries.js @@ -11,133 +11,149 @@ // See the License for the specific language governing permissions and // limitations under the License. -// [START all] -/** - * Command-line application to perform an synchronous query in BigQuery. - * - * This sample is used on this page: - * - * https://cloud.google.com/bigquery/querying-data - * - * For more information, see the README.md under /bigquery. - */ - 'use strict'; -// [START auth] +// [START complete] +// [START setup] // By default, gcloud will authenticate using the service account file specified // by the GOOGLE_APPLICATION_CREDENTIALS environment variable and use the // project specified by the GCLOUD_PROJECT environment variable. See // https://googlecloudplatform.github.io/gcloud-node/#/docs/guides/authentication var BigQuery = require('@google-cloud/bigquery'); +// [END setup] + +function printExample (rows) { + console.log('Query Results:'); + rows.forEach(function (row) { + var str = ''; + for (var key in row) { + if (str) { + str += '\n'; + } + str += key + ': ' + row[key]; + } + console.log(str); + }); +} -// Instantiate the bigquery client -var bigquery = BigQuery(); -// [END auth] - -// [START sync_query] -/** - * Run a synchronous query. - * @param {string} query The BigQuery query to run, as a string. - * @param {function} callback Callback function to receive query results. - */ -function syncQuery (query, callback) { - if (!query) { - return callback(new Error('"query" is required!')); - } +function queryShakespeare (callback) { + var bigquery = BigQuery(); + + var sqlQuery = 'SELECT\n' + + ' TOP(corpus, 10) as title,\n' + + ' COUNT(*) as unique_words\n' + + 'FROM `publicdata.samples.shakespeare`;'; // Construct query object. // Query options list: https://cloud.google.com/bigquery/docs/reference/v2/jobs/query - var queryObj = { - query: query, - timeoutMs: 10000 // Time out after 10 seconds. + var options = { + query: sqlQuery, + + // Use standard SQL syntax for queries. + // See: https://cloud.google.com/bigquery/sql-reference/ + useLegacySql: false }; - // Run query - bigquery.query(queryObj, function (err, rows) { + // Run the query + // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery?method=query + bigquery.query(options, function (err, rows) { if (err) { return callback(err); } - console.log('SyncQuery: found %d rows!', rows.length); + // Print the result + printExample(rows); + return callback(null, rows); }); } -// [END sync_query] - -// [START async_query] -/** - * Run an asynchronous query. - * @param {string} query The BigQuery query to run, as a string. - * @param {function} callback Callback function to receive job data. - */ -function asyncQuery (query, callback) { - if (!query) { - return callback(new Error('"query" is required!')); - } +// [END complete] - // Construct query object +function syncQuery (sqlQuery, callback) { + var bigquery = BigQuery(); + + // Construct query object. // Query options list: https://cloud.google.com/bigquery/docs/reference/v2/jobs/query - var queryObj = { - query: query + var options = { + query: sqlQuery, + + // Time out after 10 seconds. + timeoutMs: 10000, + + // Use standard SQL syntax for queries. + // See: https://cloud.google.com/bigquery/sql-reference/ + useLegacySql: false }; - // Submit query asynchronously - bigquery.startQuery(queryObj, function (err, job) { + // Run the query + // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery?method=query + bigquery.query(options, function (err, rows) { if (err) { return callback(err); } - console.log('AsyncQuery: submitted job %s!', job.id); - return callback(null, job); + console.log('Received %d row(s)!', rows.length); + return callback(null, rows); }); } -/** - * Poll an asynchronous query job for results. - * @param {object} jobId The ID of the BigQuery job to poll. - * @param {function} callback Callback function to receive query results. - */ -function asyncPoll (jobId, callback) { - if (!jobId) { - return callback(new Error('"jobId" is required!')); - } +function waitForJob (jobId, callback) { + var bigquery = BigQuery(); - // Check for job status + // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/job var job = bigquery.job(jobId); - job.getMetadata(function (err, metadata) { - if (err) { - return callback(err); - } - console.log('Job status: %s', metadata.status.state); - // If job is done, get query results; if not, return an error. - if (metadata.status.state === 'DONE') { + job + .on('error', callback) + .on('complete', function (metadata) { + // The job is done, get query results + // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/job?method=getQueryResults job.getQueryResults(function (err, rows) { if (err) { return callback(err); } - console.log('AsyncQuery: polled job %s; got %d rows!', jobId, rows.length); + console.log('Job complete, received %d row(s)!', rows.length); return callback(null, rows); }); - } else { - return callback(new Error('Job %s is not done', jobId)); + }); +} + +function asyncQuery (sqlQuery, callback) { + var bigquery = BigQuery(); + + // Construct query object + // Query options list: https://cloud.google.com/bigquery/docs/reference/v2/jobs/query + var options = { + query: sqlQuery, + + // Use standard SQL syntax for queries. + // See: https://cloud.google.com/bigquery/sql-reference/ + useLegacySql: false + }; + + // Run the query asynchronously + // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery?method=startQuery + bigquery.startQuery(options, function (err, job) { + if (err) { + return callback(err); } + + console.log('Started job: %s', job.id); + return waitForJob(job.id, callback); }); } -// [END async_query] -// [END all] // The command-line program var cli = require('yargs'); var makeHandler = require('../utils').makeHandler; var program = module.exports = { + printExample: printExample, + queryShakespeare: queryShakespeare, asyncQuery: asyncQuery, - asyncPoll: asyncPoll, + waitForJob: waitForJob, syncQuery: syncQuery, - bigquery: bigquery, main: function (args) { // Run the command-line program cli.help().strict().parse(args).argv; @@ -146,19 +162,19 @@ var program = module.exports = { cli .demand(1) - .command('sync ', 'Run a synchronous query.', {}, function (options) { - program.syncQuery(options.query, makeHandler()); + .command('sync ', 'Run the specified synchronous query.', {}, function (options) { + program.syncQuery(options.sqlQuery, makeHandler()); }) - .command('async ', 'Start an asynchronous query.', {}, function (options) { - program.asyncQuery(options.query, makeHandler()); + .command('async ', 'Start the specified asynchronous query.', {}, function (options) { + program.asyncQuery(options.sqlQuery, makeHandler()); }) - .command('poll ', 'Get the status of a job.', {}, function (options) { - program.asyncPoll(options.jobId, makeHandler()); + .command('wait ', 'Wait for the specified job to complete and retrieve its results.', {}, function (options) { + program.waitForJob(options.jobId, makeHandler()); }) - .example('node $0 sync "SELECT * FROM publicdata:samples.natality LIMIT 5;"') - .example('node $0 async "SELECT * FROM publicdata:samples.natality LIMIT 5;"') - .example('node $0 poll 12345') - .wrap(80) + .example('node $0 sync "SELECT * FROM `publicdata.samples.natality` LIMIT 5;"') + .example('node $0 async "SELECT * FROM `publicdata.samples.natality` LIMIT 5;"') + .example('node $0 wait job_VwckYXnR8yz54GBDMykIGnrc2') + .wrap(120) .recommendCommands() .epilogue('For more information, see https://cloud.google.com/bigquery/docs'); diff --git a/bigquery/system-test/datasets.test.js b/bigquery/system-test/datasets.test.js index 0045ae35d0..aadb99badd 100644 --- a/bigquery/system-test/datasets.test.js +++ b/bigquery/system-test/datasets.test.js @@ -36,11 +36,14 @@ describe('bigquery:datasets', function () { describe('createDataset', function () { it('should create a new dataset', function (done) { - program.createDataset(datasetId, function (err, dataset) { - assert.ifError(err); - assert(dataset, 'new dataset was created'); + program.createDataset(datasetId, function (err, dataset, apiResponse) { + assert.equal(err, null); + assert.notEqual(dataset, undefined); assert.equal(dataset.id, datasetId); - assert(console.log.calledWith('Created dataset: %s', datasetId)); + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, ['Created dataset: %s', datasetId]); + assert.notEqual(apiResponse, undefined); + done(); }); }); @@ -49,15 +52,16 @@ describe('bigquery:datasets', function () { describe('listDatasets', function () { it('should list datasets', function (done) { program.listDatasets(projectId, function (err, datasets) { - assert.ifError(err); - assert(Array.isArray(datasets)); - assert(datasets.length > 0); - assert(datasets[0].id); + assert.equal(err, null); + assert.equal(Array.isArray(datasets), true); + assert.equal(datasets.length > 0, true); var matchingDatasets = datasets.filter(function (dataset) { return dataset.id === datasetId; }); assert.equal(matchingDatasets.length, 1, 'newly created dataset is in list'); - assert(console.log.calledWith('Found %d dataset(s)!', datasets.length)); + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, ['Found %d dataset(s)!', datasets.length]); + done(); }); }); @@ -66,8 +70,11 @@ describe('bigquery:datasets', function () { describe('getDatasetSize', function () { it('should return the size of a dataset', function (done) { program.getDatasetSize(datasetId, projectId, function (err, size) { - assert.ifError(err); + assert.equal(err, null); assert.equal(typeof size, 'number', 'should have received a number'); + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, ['Size of %s: %d MB', datasetId, size]); + done(); }); }); @@ -76,8 +83,10 @@ describe('bigquery:datasets', function () { describe('deleteDataset', function () { it('should list datasets', function (done) { program.deleteDataset(datasetId, function (err) { - assert.ifError(err); - assert(console.log.calledWith('Deleted dataset: %s', datasetId)); + assert.equal(err, null); + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, ['Deleted dataset: %s', datasetId]); + done(); }); }); diff --git a/bigquery/system-test/getting_started.test.js b/bigquery/system-test/getting_started.test.js deleted file mode 100644 index 142a0ae02e..0000000000 --- a/bigquery/system-test/getting_started.test.js +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -'use strict'; - -var gettingStartedExample = require('../getting_started'); - -describe('bigquery:getting_started', function () { - it('should run a query', function (done) { - gettingStartedExample.main( - function (err, rows) { - assert.ifError(err); - assert(Array.isArray(rows)); - assert.equal(rows.length, 10); - done(); - } - ); - }); -}); diff --git a/bigquery/system-test/queries.test.js b/bigquery/system-test/queries.test.js index dff2faf770..cccb677357 100644 --- a/bigquery/system-test/queries.test.js +++ b/bigquery/system-test/queries.test.js @@ -13,47 +13,32 @@ 'use strict'; -var example = require('../queries'); +var program = require('../queries'); -describe('bigquery:query', function () { - describe('sync_query', function () { +var sqlQuery = 'SELECT * FROM `publicdata.samples.natality` LIMIT 5;'; + +describe('bigquery:queries', function () { + describe('syncQuery', function () { it('should fetch data given a query', function (done) { - example.syncQuery('SELECT * FROM publicdata:samples.natality LIMIT 5;', - function (err, data) { - assert.ifError(err); - assert.notEqual(data, null); - assert(Array.isArray(data)); - assert(data.length === 5); - done(); - } - ); + program.syncQuery(sqlQuery, function (err, data) { + assert.equal(err, null); + assert.equal(Array.isArray(data), true); + assert.equal(data.length, 5); + + done(); + }); }); }); - describe('async_query', function () { + describe('asyncQuery', function () { it('should submit a job and fetch its results', function (done) { - example.asyncQuery('SELECT * FROM publicdata:samples.natality LIMIT 5;', - function (err, job) { - assert.ifError(err); - assert.notEqual(job.id, null); - - var poller = function (tries) { - example.asyncPoll(job.id, function (err, data) { - if (!err || tries === 0) { - assert.ifError(err); - assert.notEqual(data, null); - assert(Array.isArray(data)); - assert(data.length === 5); - done(); - } else { - setTimeout(function () { poller(tries - 1); }, 1000); - } - }); - }; + program.asyncQuery(sqlQuery, function (err, data) { + assert.equal(err, null); + assert.equal(Array.isArray(data), true); + assert.equal(data.length, 5); - poller(5); - } - ); + done(); + }); }); }); }); diff --git a/bigquery/system-test/tables.test.js b/bigquery/system-test/tables.test.js index b42f2267f7..607938f68e 100644 --- a/bigquery/system-test/tables.test.js +++ b/bigquery/system-test/tables.test.js @@ -22,6 +22,7 @@ var path = require('path'); function generateUuid () { return 'nodejs_docs_samples_' + uuid.v4().replace(/-/gi, '_'); } + var rows = [ { Name: 'foo', Age: 27, Weight: 80.3, IsMagic: true }, { Name: 'bar', Age: 13, Weight: 54.6, IsMagic: false } @@ -29,31 +30,31 @@ var rows = [ var options = { projectId: process.env.GCLOUD_PROJECT, localFilePath: path.join(__dirname, '../resources/data.csv'), - bucket: generateUuid(), - file: 'data.json', - dataset: generateUuid(), - table: generateUuid(), + bucketName: generateUuid(), + fileName: 'data.json', + datasetId: generateUuid(), + tableId: generateUuid(), schema: 'Name:string, Age:integer, Weight:float, IsMagic:boolean', rows: rows }; -var srcDataset = options.dataset; -var srcTable = options.table; -var destDataset = generateUuid(); -var destTable = generateUuid(); +var srcDatasetId = options.datasetId; +var srcTableId = options.tableId; +var destDatasetId = generateUuid(); +var destTableId = generateUuid(); -describe('bigquery:tables', function () { +describe.only('bigquery:tables', function () { before(function (done) { // Create bucket - storage.createBucket(options.bucket, function (err, bucket) { + storage.createBucket(options.bucketName, function (err, bucket) { assert.ifError(err, 'bucket creation succeeded'); // Upload data.csv bucket.upload(options.localFilePath, function (err) { assert.ifError(err, 'file upload succeeded'); // Create srcDataset - bigquery.createDataset(srcDataset, function (err) { + bigquery.createDataset(srcDatasetId, function (err) { assert.ifError(err, 'srcDataset creation succeeded'); // Create destDataset - bigquery.createDataset(destDataset, function (err) { + bigquery.createDataset(destDatasetId, function (err) { assert.ifError(err, 'destDataset creation succeeded'); done(); }); @@ -64,17 +65,17 @@ describe('bigquery:tables', function () { after(function (done) { // Delete srcDataset - bigquery.dataset(srcDataset).delete({ force: true }, function () { + bigquery.dataset(srcDatasetId).delete({ force: true }, function () { // Delete destDataset - bigquery.dataset(destDataset).delete({ force: true }, function () { + bigquery.dataset(destDatasetId).delete({ force: true }, function () { // Delete files - storage.bucket(options.bucket).deleteFiles({ force: true }, function (err) { + storage.bucket(options.bucketName).deleteFiles({ force: true }, function (err) { if (err) { return done(err); } // Delete bucket setTimeout(function () { - storage.bucket(options.bucket).delete(done); + storage.bucket(options.bucketName).delete(done); }, 2000); }); }); @@ -83,46 +84,47 @@ describe('bigquery:tables', function () { describe('createTable', function () { it('should create a new table', function (done) { - program.createTable(options, function (err, table) { - assert.ifError(err); - assert(table, 'new table was created'); - assert.equal(table.id, options.table); - assert(console.log.calledWith('Created table: %s', options.table)); - done(); + program.createTable(options.datasetId, options.tableId, options.schema, function (err, table) { + assert.equal(err, null); + assert.notEqual(table, undefined); + assert.equal(table.id, options.tableId); + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, ['Created table %s in %s', options.tableId, options.datasetId]); + + // Listing is eventually consistent, give the index time to update + setTimeout(done, 5000); }); }); }); describe('listTables', function () { it('should list tables', function (done) { - program.listTables(options, function (err, tables) { - assert.ifError(err); - assert(Array.isArray(tables)); - assert(tables.length > 0); - assert(tables[0].id); + program.listTables(options.datasetId, function (err, tables) { + assert.equal(err, null); + assert.equal(Array.isArray(tables), true); + assert.equal(tables.length > 0, true); var matchingTables = tables.filter(function (table) { - return table.id === options.table; + return table.id === options.tableId; }); assert.equal(matchingTables.length, 1, 'newly created table is in list'); - assert(console.log.calledWith('Found %d table(s)!', tables.length)); + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, ['Found %d table(s)!', tables.length]); + done(); }); }); }); - describe('import', function () { + describe('importLocalFile', function () { it('should import local file', function (done) { - program.importFile({ - file: options.localFilePath, - projectId: options.projectId, - dataset: options.dataset, - table: options.table - }, function (err, metadata) { - assert.ifError(err); - assert(metadata, 'got metadata'); + program.importLocalFile(options.datasetId, options.tableId, options.localFilePath, function (err, metadata, apiResponse) { + assert.equal(err, null); + assert.notEqual(metadata, undefined); assert.deepEqual(metadata.status, { state: 'DONE' }, 'job completed'); + assert.notEqual(apiResponse, undefined); + done(); }); }); @@ -130,15 +132,16 @@ describe('bigquery:tables', function () { describe('exportTableToGCS', function () { it('should export data to GCS', function (done) { - program.exportTableToGCS(options, function (err, metadata) { - assert.ifError(err, 'no error occurred'); - assert(metadata, 'job metadata was received'); - assert(metadata.status, 'job metadata has status'); - assert.equal(metadata.status.state, 'DONE', 'job was finished'); - - storage.bucket(options.bucket).file(options.file).exists(function (err, exists) { - assert.ifError(err, 'file existence check succeeded'); - assert(exists, 'export destination exists'); + program.exportTableToGCS(options.datasetId, options.tableId, options.bucketName, options.fileName, function (err, metadata, apiResponse) { + assert.equal(err, null); + assert.notEqual(metadata, undefined); + assert.deepEqual(metadata.status, { state: 'DONE' }); + assert.notEqual(apiResponse, undefined); + + storage.bucket(options.bucketName).file(options.fileName).exists(function (err, exists) { + assert.equal(err, null); + assert.equal(exists, true); + done(); }); }); @@ -147,18 +150,21 @@ describe('bigquery:tables', function () { describe('insertRowsAsStream', function () { it('should insert rows into a table', function (done) { - var table = bigquery.dataset(options.dataset).table(options.table); - table.getRows({}, function (err, startRows) { + var table = bigquery.dataset(options.datasetId).table(options.tableId); + + table.getRows(function (err, startRows) { assert.equal(err, null); - program.insertRowsAsStream(options, function (err, insertErrors) { + program.insertRowsAsStream(options.datasetId, options.tableId, options.rows, function (err, insertErrors, apiResponse) { assert.equal(err, null); - assert.deepEqual(insertErrors, [], 'no per-row insert errors occurred'); + assert.deepEqual(insertErrors, []); + assert.notEqual(apiResponse, undefined); setTimeout(function () { - table.getRows({}, function (err, endRows) { + table.getRows(function (err, endRows) { assert.equal(err, null); - assert.equal(startRows.length + 2, endRows.length, 'insertRows() added 2 rows'); + assert.equal(startRows.length + 2, endRows.length); + done(); }); }, 2000); @@ -169,36 +175,36 @@ describe('bigquery:tables', function () { describe('copyTable', function () { it('should copy a table between datasets', function (done) { - program.copyTable(srcDataset, srcTable, destDataset, destTable, function (err, metadata) { + program.copyTable(srcDatasetId, srcTableId, destDatasetId, destTableId, function (err, metadata, apiResponse) { assert.equal(err, null); + assert.notEqual(metadata, undefined); assert.deepEqual(metadata.status, { state: 'DONE' }); + assert.notEqual(apiResponse, undefined); - bigquery.dataset(srcDataset).table(srcTable).exists( - function (err, exists) { + bigquery.dataset(srcDatasetId).table(srcTableId).exists(function (err, exists) { + assert.equal(err, null); + assert.equal(exists, true); + + bigquery.dataset(destDatasetId).table(destTableId).exists(function (err, exists) { assert.equal(err, null); - assert.equal(exists, true, 'srcTable exists'); - - bigquery.dataset(destDataset).table(destTable).exists( - function (err, exists) { - assert.equal(err, null); - assert.equal(exists, true, 'destTable exists'); - done(); - } - ); - } - ); + assert.equal(exists, true); + + done(); + }); + }); }); }); }); describe('browseRows', function () { it('should display rows in a table', function (done) { - program.browseRows(options.dataset, options.table, function (err, rows) { + program.browseRows(options.datasetId, options.tableId, function (err, rows) { assert.equal(err, null); assert.equal(Array.isArray(rows), true); assert.equal(rows.length > 0, true); assert.equal(console.log.calledOnce, true); assert.deepEqual(console.log.firstCall.args, ['Found %d row(s)!', rows.length]); + done(); }); }); @@ -206,9 +212,11 @@ describe('bigquery:tables', function () { describe('deleteTable', function () { it('should delete table', function (done) { - program.deleteTable(options, function (err) { - assert.ifError(err); - assert(console.log.calledWith('Deleted table: %s', options.table)); + program.deleteTable(options.datasetId, options.tableId, function (err) { + assert.equal(err, null); + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, ['Deleted table %s from %s', options.tableId, options.datasetId]); + done(); }); }); diff --git a/bigquery/tables.js b/bigquery/tables.js index 694c0fc640..2858c87375 100644 --- a/bigquery/tables.js +++ b/bigquery/tables.js @@ -13,62 +13,41 @@ 'use strict'; -// [START all] // [START setup] // By default, gcloud will authenticate using the service account file specified // by the GOOGLE_APPLICATION_CREDENTIALS environment variable and use the // project specified by the GCLOUD_PROJECT environment variable. See -// https://googlecloudplatform.github.io/gcloud-node/#/docs/guides/authentication +// https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/authentication var BigQuery = require('@google-cloud/bigquery'); -var Storage = require('@google-cloud/storage'); - -// Instantiate the BigQuery and Storage clients -var bigquery = BigQuery(); -var storage = Storage(); // [END setup] -// [START create_table] -/** - * Creates a new table with the given name in the specified dataset. - * - * @param {object} options Configuration options. - * @param {string} options.dataset The dataset of the new table. - * @param {string} options.table The name for the new table. - * @param {string|object} [options.schema] The schema for the new table. - * @param {function} cb The callback function. - */ -function createTable (options, callback) { - // var table = bigquery.dataset(options.dataset).table(options.table); - var dataset = bigquery.dataset(options.dataset); - var config = {}; - if (options.schema) { - config.schema = options.schema; - } +function createTable (datasetId, tableId, schema, callback) { + var bigquery = BigQuery(); + var dataset = bigquery.dataset(datasetId); + + // For all options, see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource + var options = { + schema: schema + }; - // See https://googlecloudplatform.github.io/gcloud-node/#/docs/bigquery/latest/bigquery/table - dataset.createTable(options.table, config, function (err, table) { + // Create a new table in the given dataset + // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/dataset?method=createTable + dataset.createTable(tableId, options, function (err, table, apiResponse) { if (err) { return callback(err); } - console.log('Created table: %s', options.table); - return callback(null, table); + console.log('Created table %s in %s', tableId, datasetId); + return callback(null, table, apiResponse); }); } -// [END create_table] - -// [START list_tables] -/** - * List tables in the specified dataset. - * - * @param {object} options Configuration options. - * @param {string} options.dataset The dataset of the new table. - * @param {Function} callback Callback function. - */ -function listTables (options, callback) { - var dataset = bigquery.dataset(options.dataset); - - // See https://googlecloudplatform.github.io/gcloud-node/#/docs/bigquery/latest/bigquery/dataset + +function listTables (datasetId, callback) { + var bigquery = BigQuery(); + var dataset = bigquery.dataset(datasetId); + + // List the tables in the specified dataset + // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/dataset?method=getTables dataset.getTables(function (err, tables) { if (err) { return callback(err); @@ -78,14 +57,14 @@ function listTables (options, callback) { return callback(null, tables); }); } -// [END list_tables] -function browseRows (dataset, table, callback) { +function browseRows (datasetId, tableId, callback) { var bigquery = BigQuery(); - var tableObj = bigquery.dataset(dataset).table(table); + var table = bigquery.dataset(datasetId).table(tableId); + // Retrieve rows from the specified table // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/table?method=getRows - tableObj.getRows(function (err, rows) { + table.getRows(function (err, rows) { if (err) { return callback(err); } @@ -95,38 +74,30 @@ function browseRows (dataset, table, callback) { }); } -// [START delete_table] -/** - * Deletes a table with the specified name from the specified dataset. - * - * @param {object} options Configuration options. - * @param {string} options.dataset The dataset of the new table. - * @param {string} options.table The name for the new table. - * @param {function} cb The callback function. - */ -function deleteTable (options, callback) { - var table = bigquery.dataset(options.dataset).table(options.table); - - // See https://googlecloudplatform.github.io/gcloud-node/#/docs/bigquery/latest/bigquery/table +function deleteTable (datasetId, tableId, callback) { + var bigquery = BigQuery(); + var table = bigquery.dataset(datasetId).table(tableId); + + // Delete the specified table + // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/table?method=delete table.delete(function (err) { if (err) { return callback(err); } - console.log('Deleted table: %s', options.table); + console.log('Deleted table %s from %s', tableId, datasetId); return callback(null); }); } -// [END delete_table] -function copyTable (srcDataset, srcTable, destDataset, destTable, callback) { +function copyTable (srcDatasetId, srcTableId, destDatasetId, destTableId, callback) { var bigquery = BigQuery(); - var srcTableObj = bigquery.dataset(srcDataset).table(srcTable); - var destTableObj = bigquery.dataset(destDataset).table(destTable); + var srcTable = bigquery.dataset(srcDatasetId).table(srcTableId); + var destTable = bigquery.dataset(destDatasetId).table(destTableId); // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/table?method=copy - srcTableObj.copy(destTableObj, function (err, job) { + srcTable.copy(destTable, function (err, job, apiResponse) { if (err) { return callback(err); } @@ -136,37 +107,17 @@ function copyTable (srcDataset, srcTable, destDataset, destTable, callback) { .on('error', callback) .on('complete', function (metadata) { console.log('Completed job: %s', job.id); - return callback(null, metadata); + return callback(null, metadata, apiResponse); }); }); } -// [START import_file] -/** - * Load a csv file into a BigQuery table. - * - * @param {string} file Path to file to load. - * @param {string} dataset The dataset. - * @param {string} table The table. - * @param {string} [format] The format of the file to be imported. - * @param {function} callback The callback function. - */ -function importFile (options, callback) { - var file; - if (options.bucket) { - // File is in Google Cloud Storage, e.g. gs://my-bucket/file.csv - file = storage.bucket(options.bucket).file(options.file); - } else { - // File is local, e.g. ./data/file.csv - file = options.file; - } - var table = bigquery.dataset(options.dataset).table(options.table); - var config = { - format: options.format - }; +function importLocalFile (datasetId, tableId, fileName, callback) { + var bigquery = BigQuery(); + var table = bigquery.dataset(datasetId).table(tableId); - // See https://googlecloudplatform.github.io/gcloud-node/#/docs/bigquery/latest/bigquery/table?method=import - table.import(file, config, function (err, job) { + // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/table?method=import + table.import(fileName, function (err, job, apiResponse) { if (err) { console.log(err.stack); return callback(err); @@ -177,36 +128,53 @@ function importFile (options, callback) { .on('error', callback) .on('complete', function (metadata) { console.log('Completed job: %s', job.id); - return callback(null, metadata); + return callback(null, metadata, apiResponse); + }); + }); +} + +// [START import_file_from_gcs] +var Storage = require('@google-cloud/storage'); + +function importFileFromGCS (datasetId, tableId, bucketName, fileName, callback) { + var bigquery = BigQuery(); + var storage = Storage(); + + var table = bigquery.dataset(datasetId).table(tableId); + var file = storage.bucket(bucketName).file(fileName); + + // Import the file from Google Cloud Storage + // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/table?method=import + table.import(file, function (err, job, apiResponse) { + if (err) { + return callback(err); + } + + console.log('Started job: %s', job.id); + job + .on('error', callback) + .on('complete', function (metadata) { + console.log('Completed job: %s', job.id); + return callback(null, metadata, apiResponse); }); }); } -// [END import_file] +// [END import_file_from_gcs] +/* eslint-disable no-redeclare */ // [START export_table_to_gcs] -/** - * Export a table from BigQuery to Google Cloud Storage. - * - * @param {object} options Configuration options. - * @param {string} options.bucket A Google Cloud Storage bucket to use for storage. - * @param {string} options.file The file to save results to within Google Cloud Storage. - * @param {string} options.dataset The ID of the dataset to use. - * @param {string} options.table The ID of the project to use. - * @param {string} options.format Format to export as - either 'CSV', 'JSON', or 'AVRO'. - * @param {boolean} [options.gzip] Optional. Whether or not data should be compressed using GZIP. - * @param {function} callback Callback function to receive query results. - */ -function exportTableToGCS (options, callback) { - var gcsFileObj = storage.bucket(options.bucket).file(options.file); - var table = bigquery.dataset(options.dataset).table(options.table); - var config = { - format: options.format, - gzip: options.gzip - }; +var Storage = require('@google-cloud/storage'); + +function exportTableToGCS (datasetId, tableId, bucketName, fileName, callback) { + var bigquery = BigQuery(); + var storage = Storage(); - // Export table - // See https://googlecloudplatform.github.io/gcloud-node/#/docs/bigquery/latest/bigquery/table?method=export - table.export(gcsFileObj, config, function (err, job) { + var table = bigquery.dataset(datasetId).table(tableId); + var file = storage.bucket(bucketName).file(fileName); + + // Export a table to Google Cloud Storage + // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/table?method=export + table.export(file, function (err, job, apiResponse) { if (err) { return callback(err); } @@ -216,33 +184,27 @@ function exportTableToGCS (options, callback) { .on('error', callback) .on('complete', function (metadata) { console.log('Completed job: %s', job.id); - return callback(null, metadata); + return callback(null, metadata, apiResponse); }); }); } // [END export_table_to_gcs] +/* eslint-enable no-redeclare */ + +function insertRowsAsStream (datasetId, tableId, rows, callback) { + var bigquery = BigQuery(); + var table = bigquery.dataset(datasetId).table(tableId); -// [START insert_rows_as_stream] -/** - * Insert rows (as a stream) into a BigQuery table. - * @param {object} options Configuration options. - * @param {array} options.rows An array of rows to insert into a BigQuery table. - * @param {string} options.dataset The ID of the dataset containing the target table. - * @param {string} options.table The ID of the table to insert rows into. - * @param {function} callback Callback function to receive query status. - */ -function insertRowsAsStream (options, callback) { - var table = bigquery.dataset(options.dataset).table(options.table); - table.insert(options.rows, function (err, insertErrors) { + // Insert rows into a table + // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/table?method=insert + table.insert(rows, function (err, insertErrors, apiResponse) { if (err) { return callback(err); } - console.log('Inserted %d rows!', options.rows.length); - return callback(null, insertErrors); + console.log('Inserted %d row(s)!', rows.length); + return callback(null, insertErrors, apiResponse); }); } -// [END insert_rows_as_stream] -// [END all] // The command-line program var cli = require('yargs'); @@ -254,7 +216,8 @@ var program = module.exports = { listTables: listTables, browseRows: browseRows, deleteTable: deleteTable, - importFile: importFile, + importLocalFile: importLocalFile, + importFileFromGCS: importFileFromGCS, exportTableToGCS: exportTableToGCS, insertRowsAsStream: insertRowsAsStream, copyTable: copyTable, @@ -266,61 +229,39 @@ var program = module.exports = { cli .demand(1) - .command('create
', 'Create a new table in the specified dataset.', {}, function (options) { - program.createTable(utils.pick(options, ['dataset', 'table']), utils.makeHandler()); + .command('create ', 'Create a new table with the specified ID in the specified dataset.', {}, function (options) { + program.createTable(options.datasetId, options.tableId, utils.makeHandler(false)); }) - .command('list ', 'List tables in the specified dataset.', {}, function (options) { - program.listTables(utils.pick(options, ['dataset']), utils.makeHandler(true, 'id')); + .command('list ', 'List tables in the specified dataset.', {}, function (options) { + program.listTables(options.datasetId, utils.makeHandler(true, 'id')); }) - .command('delete
', 'Delete a table in the specified dataset.', {}, function (options) { - program.deleteTable(utils.pick(options, ['dataset', 'table']), utils.makeHandler()); + .command('delete ', 'Delete the specified table from the specified dataset.', {}, function (options) { + program.deleteTable(options.datasetId, options.tableId, utils.makeHandler(false)); }) - .command('copy ', - 'Make a copy of an existing table.', {}, - function (options) { - program.copyTable( - options.srcDataset, - options.srcTable, - options.destDataset, - options.destTable, - utils.makeHandler() - ); - } - ) - .command('browse
', 'List the rows in a BigQuery table.', {}, function (options) { - program.browseRows(options.dataset, options.table, utils.makeHandler()); + .command('copy ', 'Make a copy of an existing table.', {}, function (options) { + program.copyTable(options.srcDatasetId, options.srcTableId, options.destDatasetId, options.destTableId, utils.makeHandler(false)); }) - .command('import
', 'Import data from a local file or a Google Cloud Storage file into BigQuery.', { - bucket: { + .command('browse ', 'List the rows from the specified table.', {}, function (options) { + program.browseRows(options.datasetId, options.tableId, utils.makeHandler()); + }) + .command('import ', 'Import data from a local file or a Google Cloud Storage file into the specified table.', { + bucketName: { alias: 'b', requiresArg: true, - description: 'Specify Cloud Storage bucket.', + description: 'Specify a Cloud Storage bucket.', type: 'string' - }, - format: { - alias: 'f', - requiresArg: true, - type: 'string', - choices: ['JSON', 'CSV', 'AVRO'] } }, function (options) { - program.importFile(utils.pick(options, ['dataset', 'table', 'file', 'format', 'bucket']), utils.makeHandler()); - }) - .command('export
', 'Export a table from BigQuery to Google Cloud Storage.', { - format: { - alias: 'f', - requiresArg: true, - type: 'string', - choices: ['JSON', 'CSV', 'AVRO'] - }, - gzip: { - type: 'boolean', - description: 'Whether to compress the exported table using gzip. Defaults to false.' + if (options.bucketName) { + program.importFileFromGCS(options.datasetId, options.tableId, options.bucketName, options.fileName, utils.makeHandler(false)); + } else { + program.importLocalFile(options.datasetId, options.tableId, options.fileName, utils.makeHandler(false)); } - }, function (options) { - program.exportTableToGCS(utils.pick(options, ['dataset', 'table', 'bucket', 'file', 'format', 'gzip']), utils.makeHandler()); }) - .command('insert
', + .command('export ', 'Export a table from BigQuery to Google Cloud Storage.', {}, function (options) { + program.exportTableToGCS(options.datasetId, options.tableId, options.bucketName, options.fileName, utils.makeHandler(false)); + }) + .command('insert ', 'Insert a JSON array (as a string or newline-delimited file) into a BigQuery table.', {}, function (options) { var content; @@ -339,8 +280,7 @@ cli throw new Error('"json_or_file" (or the file it points to) is not a valid JSON array.'); } - options.rows = rows; - program.insertRowsAsStream(utils.pick(options, ['rows', 'dataset', 'table']), utils.makeHandler()); + program.insertRowsAsStream(options.datasetId, options.tableId, rows, utils.makeHandler(false)); } ) .example( diff --git a/bigquery/test/datasets.test.js b/bigquery/test/datasets.test.js index aa2601a3f6..5d91dda111 100644 --- a/bigquery/test/datasets.test.js +++ b/bigquery/test/datasets.test.js @@ -18,6 +18,7 @@ var datasetId = 'foo'; var projectId = process.env.GCLOUD_PROJECT; function getSample () { + var apiResponseMock = {}; var tableMock = { get: sinon.stub(), metadata: { @@ -29,7 +30,7 @@ function getSample () { var datasetsMock = [{ id: datasetId }]; var datasetMock = { getTables: sinon.stub().yields(null, tablesMock), - create: sinon.stub().yields(null, datasetsMock[0]), + create: sinon.stub().yields(null, datasetsMock[0], apiResponseMock), delete: sinon.stub().yields(null) }; var bigqueryMock = { @@ -48,7 +49,8 @@ function getSample () { datasets: datasetsMock, dataset: datasetMock, tables: tablesMock, - table: tableMock + table: tableMock, + apiResponse: apiResponseMock } }; } @@ -64,7 +66,7 @@ describe('bigquery:datasets', function () { assert.equal(sample.mocks.dataset.create.calledOnce, true); assert.deepEqual(sample.mocks.dataset.create.firstCall.args.slice(0, -1), []); assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, sample.mocks.datasets[0]]); + assert.deepEqual(callback.firstCall.args, [null, sample.mocks.datasets[0], sample.mocks.apiResponse]); assert.equal(console.log.calledOnce, true); assert.deepEqual(console.log.firstCall.args, ['Created dataset: %s', datasetId]); }); diff --git a/bigquery/test/getting_started.test.js b/bigquery/test/getting_started.test.js deleted file mode 100644 index 6b7177e6b7..0000000000 --- a/bigquery/test/getting_started.test.js +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -'use strict'; - -describe('bigquery:getting_started', function () { - it('should be tested'); -}); diff --git a/bigquery/test/queries.test.js b/bigquery/test/queries.test.js index 1622696bfc..c5d95c6e27 100644 --- a/bigquery/test/queries.test.js +++ b/bigquery/test/queries.test.js @@ -15,6 +15,11 @@ var proxyquire = require('proxyquire').noCallThru(); +var shakespeareQuery = 'SELECT\n' + + ' TOP(corpus, 10) as title,\n' + + ' COUNT(*) as unique_words\n' + + 'FROM `publicdata.samples.shakespeare`;'; + function getSample () { var natalityMock = [ { year: '2001' }, @@ -31,8 +36,10 @@ function getSample () { var jobMock = { id: jobId, getQueryResults: sinon.stub().yields(null, natalityMock), - getMetadata: sinon.stub().yields(null, metadataMock) + getMetadata: sinon.stub().yields(null, metadataMock), + on: sinon.stub().returnsThis() }; + jobMock.on.withArgs('complete').yields(null, metadataMock); var bigqueryMock = { job: sinon.stub().returns(jobMock), @@ -58,36 +65,76 @@ function getSample () { } describe('bigquery:query', function () { - describe('syncQuery', function () { - var query = 'foo'; - + describe('printExample', function () { it('should return results', function () { var example = getSample(); + + example.program.printExample([ + { + foo: 'bar', + beep: 'boop' + } + ]); + + assert.equal(console.log.calledTwice, true); + assert.deepEqual(console.log.firstCall.args, ['Query Results:']); + assert.deepEqual(console.log.secondCall.args, ['foo: bar\nbeep: boop']); + }); + }); + + describe('queryShakespeare', function () { + it('should query shakespeare', function () { + var example = getSample(); var callback = sinon.stub(); + var mockResult = []; + example.mocks.bigquery.query.yields(null, mockResult); - example.program.syncQuery(query, callback); + example.program.queryShakespeare(callback); assert.equal(example.mocks.bigquery.query.calledOnce, true); assert.deepEqual(example.mocks.bigquery.query.firstCall.args.slice(0, -1), [{ - query: query, - timeoutMs: 10000 + query: shakespeareQuery, + useLegacySql: false }]); assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, example.mocks.natality]); + assert.deepEqual(callback.firstCall.args, [null, mockResult]); assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['SyncQuery: found %d rows!', example.mocks.natality.length]); + assert.deepEqual(console.log.firstCall.args, ['Query Results:']); }); - it('should require a query', function () { - var error = new Error('"query" is required!'); + it('should handle error', function () { + var error = new Error('error'); var example = getSample(); var callback = sinon.stub(); + example.mocks.bigquery.query.yields(error); - example.program.syncQuery(null, callback); + example.program.queryShakespeare(callback); assert.equal(callback.calledOnce, true); assert.deepEqual(callback.firstCall.args, [error]); }); + }); + + describe('syncQuery', function () { + var query = 'foo'; + + it('should return results', function () { + var example = getSample(); + var callback = sinon.stub(); + + example.program.syncQuery(query, callback); + + assert.equal(example.mocks.bigquery.query.calledOnce, true); + assert.deepEqual(example.mocks.bigquery.query.firstCall.args.slice(0, -1), [{ + query: query, + timeoutMs: 10000, + useLegacySql: false + }]); + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [null, example.mocks.natality]); + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, ['Received %d row(s)!', example.mocks.natality.length]); + }); it('should handle error', function () { var error = new Error('error'); @@ -113,23 +160,14 @@ describe('bigquery:query', function () { assert.equal(example.mocks.bigquery.startQuery.calledOnce, true); assert.deepEqual(example.mocks.bigquery.startQuery.firstCall.args.slice(0, -1), [{ - query: query + query: query, + useLegacySql: false }]); assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, example.mocks.job]); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['AsyncQuery: submitted job %s!', example.jobId]); - }); - - it('should require a query', function () { - var error = new Error('"query" is required!'); - var example = getSample(); - var callback = sinon.stub(); - - example.program.asyncQuery(null, callback); - - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [error]); + assert.deepEqual(callback.firstCall.args, [null, example.mocks.natality]); + assert.equal(console.log.calledTwice, true); + assert.deepEqual(console.log.firstCall.args, ['Started job: %s', example.jobId]); + assert.deepEqual(console.log.secondCall.args, ['Job complete, received %d row(s)!', example.mocks.natality.length]); }); it('should handle error', function () { @@ -145,62 +183,23 @@ describe('bigquery:query', function () { }); }); - describe('asyncPoll', function () { + describe('waitForJob', function () { it('should get the results of a job given its ID', function () { var example = getSample(); var callback = sinon.stub(); example.mocks.bigquery.job.returns(example.mocks.job); - example.program.asyncPoll(example.jobId, callback); + example.program.waitForJob(example.jobId, callback); - assert.equal(example.mocks.job.getMetadata.calledOnce, true); - assert.deepEqual(example.mocks.job.getMetadata.firstCall.args.slice(0, -1), []); + assert.equal(example.mocks.job.on.calledTwice, true); + assert.deepEqual(example.mocks.job.on.firstCall.args.slice(0, -1), ['error']); + assert.deepEqual(example.mocks.job.on.secondCall.args.slice(0, -1), ['complete']); assert.equal(example.mocks.job.getQueryResults.calledOnce, true); assert.deepEqual(example.mocks.job.getQueryResults.firstCall.args.slice(0, -1), []); assert.equal(callback.calledOnce, true); assert.deepEqual(callback.firstCall.args, [null, example.mocks.natality]); - assert.equal(console.log.calledTwice, true); - assert.deepEqual(console.log.firstCall.args, ['Job status: %s', example.mocks.metadata.status.state]); - assert.deepEqual(console.log.secondCall.args, ['AsyncQuery: polled job %s; got %d rows!', example.jobId, example.mocks.natality.length]); - }); - - it('should error when job is not finished', function () { - var example = getSample(); - var callback = sinon.stub(); - - var pendingState = { status: { state: 'PENDING' } }; - example.mocks.job.getMetadata.yields(null, pendingState); - - example.program.asyncPoll(example.jobId, callback); - - assert.equal(example.mocks.job.getMetadata.calledOnce, true); - assert.deepEqual(example.mocks.job.getMetadata.firstCall.args.slice(0, -1), []); - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [new Error('Job %s is not done', example.jobId)]); assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Job status: %s', pendingState.status.state]); - }); - - it('should require a job ID', function () { - var example = getSample(); - var callback = sinon.stub(); - - example.program.asyncPoll(null, callback); - - assert.deepEqual(callback.firstCall.args[0], Error('"jobId" is required!')); - assert.equal(callback.firstCall.args[1], undefined); - }); - - it('should handle getMetadata error', function () { - var error = new Error('error'); - var example = getSample(); - var callback = sinon.stub(); - example.mocks.job.getMetadata.yields(error); - - example.program.asyncPoll(example.jobId, callback); - - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [error]); + assert.deepEqual(console.log.firstCall.args, ['Job complete, received %d row(s)!', example.mocks.natality.length]); }); it('should handle error', function () { @@ -209,7 +208,7 @@ describe('bigquery:query', function () { var callback = sinon.stub(); example.mocks.job.getQueryResults.yields(error); - example.program.asyncPoll(example.jobId, callback); + example.program.waitForJob(example.jobId, callback); assert.equal(callback.calledOnce, true); assert.deepEqual(callback.firstCall.args, [error]); @@ -238,13 +237,13 @@ describe('bigquery:query', function () { assert.deepEqual(program.asyncQuery.firstCall.args.slice(0, -1), [query]); }); - it('should call asyncPoll', function () { + it('should call waitForJob', function () { var program = getSample().program; - sinon.stub(program, 'asyncPoll'); - program.main(['poll', jobId]); - assert.equal(program.asyncPoll.calledOnce, true); - assert.deepEqual(program.asyncPoll.firstCall.args.slice(0, -1), [jobId]); + sinon.stub(program, 'waitForJob'); + program.main(['wait', jobId]); + assert.equal(program.waitForJob.calledOnce, true); + assert.deepEqual(program.waitForJob.firstCall.args.slice(0, -1), [jobId]); }); }); }); diff --git a/bigquery/test/tables.test.js b/bigquery/test/tables.test.js index b77b172c3f..16592ffc5c 100644 --- a/bigquery/test/tables.test.js +++ b/bigquery/test/tables.test.js @@ -14,16 +14,15 @@ 'use strict'; var proxyquire = require('proxyquire').noCallThru(); -var bucket = 'bucket'; -var file = 'file'; +var bucketName = 'bucket'; +var fileName = 'file'; var jobId = 'job'; -var dataset = 'dataset'; -var table = 'table'; -var srcDataset = dataset; -var srcTable = table; -var destDataset = dataset + '_dest'; -var destTable = table + '_dest'; -var format = 'JSON'; +var datasetId = 'dataset'; +var tableId = 'table'; +var srcDatasetId = datasetId; +var srcTableId = tableId; +var destDatasetId = datasetId + '_dest'; +var destTableId = tableId + '_dest'; var schema = 'schema'; var jsonArray = [ { name: 'foo', age: 27 }, @@ -36,9 +35,10 @@ var invalidJsonString = 'INVALID'; var errorList = ['error 1', 'error 2']; function getSample () { + var apiResponseMock = {}; var tableMocks = [ { - id: table + id: tableId } ]; var bucketMock = { @@ -56,16 +56,16 @@ function getSample () { }; jobMock.on.withArgs('complete').yields(metadataMock); var tableMock = { - export: sinon.stub().yields(null, jobMock), - copy: sinon.stub().yields(null, jobMock), - import: sinon.stub().yields(null, jobMock), - insert: sinon.stub().yields(null, errorList), + export: sinon.stub().yields(null, jobMock, apiResponseMock), + copy: sinon.stub().yields(null, jobMock, apiResponseMock), + import: sinon.stub().yields(null, jobMock, apiResponseMock), + insert: sinon.stub().yields(null, errorList, apiResponseMock), getRows: sinon.stub().yields(null, jsonArray), delete: sinon.stub().yields(null) }; var datasetMock = { table: sinon.stub().returns(tableMock), - createTable: sinon.stub().yields(null, tableMocks[0]), + createTable: sinon.stub().yields(null, tableMocks[0], apiResponseMock), getTables: sinon.stub().yields(null, tableMocks) }; var bigqueryMock = { @@ -98,7 +98,8 @@ function getSample () { bucket: bucketMock, dataset: datasetMock, fs: fsMock, - tables: tableMocks + tables: tableMocks, + apiResponse: apiResponseMock } }; } @@ -108,38 +109,29 @@ describe('bigquery:tables', function () { it('should create a table', function () { var sample = getSample(); var callback = sinon.stub(); - var options = { - dataset: dataset, - table: table - }; - sample.program.createTable(options, callback); + sample.program.createTable(datasetId, tableId, undefined, callback); assert.equal(sample.mocks.dataset.createTable.calledOnce, true); - assert.deepEqual(sample.mocks.dataset.createTable.firstCall.args.slice(0, -1), [options.table, {}]); + assert.deepEqual(sample.mocks.dataset.createTable.firstCall.args.slice(0, -1), [tableId, { schema: undefined }]); assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, sample.mocks.tables[0]]); + assert.deepEqual(callback.firstCall.args, [null, sample.mocks.tables[0], sample.mocks.apiResponse]); assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Created table: %s', options.table]); + assert.deepEqual(console.log.firstCall.args, ['Created table %s in %s', tableId, datasetId]); }); it('should create a table with a schema', function () { var sample = getSample(); var callback = sinon.stub(); - var options = { - dataset: dataset, - table: table, - schema: schema - }; - sample.program.createTable(options, callback); + sample.program.createTable(datasetId, tableId, schema, callback); assert.equal(sample.mocks.dataset.createTable.calledOnce, true); - assert.deepEqual(sample.mocks.dataset.createTable.firstCall.args.slice(0, -1), [options.table, { schema: schema }]); + assert.deepEqual(sample.mocks.dataset.createTable.firstCall.args.slice(0, -1), [tableId, { schema: schema }]); assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, sample.mocks.tables[0]]); + assert.deepEqual(callback.firstCall.args, [null, sample.mocks.tables[0], sample.mocks.apiResponse]); assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Created table: %s', options.table]); + assert.deepEqual(console.log.firstCall.args, ['Created table %s in %s', tableId, datasetId]); }); it('should handle error', function () { @@ -148,7 +140,7 @@ describe('bigquery:tables', function () { var callback = sinon.stub(); sample.mocks.dataset.createTable.yields(error); - sample.program.createTable({}, callback); + sample.program.createTable(datasetId, tableId, undefined, callback); assert.equal(callback.calledOnce, true); assert.deepEqual(callback.firstCall.args, [error]); @@ -159,11 +151,8 @@ describe('bigquery:tables', function () { it('should list tables', function () { var sample = getSample(); var callback = sinon.stub(); - var options = { - dataset: dataset - }; - sample.program.listTables(options, callback); + sample.program.listTables(datasetId, callback); assert.equal(sample.mocks.dataset.getTables.calledOnce, true); assert.deepEqual(sample.mocks.dataset.getTables.firstCall.args.slice(0, -1), []); @@ -191,7 +180,7 @@ describe('bigquery:tables', function () { var sample = getSample(); var callback = sinon.stub(); - sample.program.browseRows(dataset, table, callback); + sample.program.browseRows(datasetId, tableId, callback); assert.equal(sample.mocks.table.getRows.calledOnce, true); assert.deepEqual(sample.mocks.table.getRows.firstCall.args.slice(0, -1), []); @@ -207,7 +196,7 @@ describe('bigquery:tables', function () { var callback = sinon.stub(); sample.mocks.table.getRows.yields(error); - sample.program.browseRows(dataset, table, callback); + sample.program.browseRows(datasetId, tableId, callback); assert.equal(callback.calledOnce, true); assert.deepEqual(callback.firstCall.args, [error]); @@ -218,19 +207,15 @@ describe('bigquery:tables', function () { it('should delete a table', function () { var sample = getSample(); var callback = sinon.stub(); - var options = { - dataset: dataset, - table: table - }; - sample.program.deleteTable(options, callback); + sample.program.deleteTable(datasetId, tableId, callback); assert.equal(sample.mocks.table.delete.calledOnce, true); assert.deepEqual(sample.mocks.table.delete.firstCall.args.slice(0, -1), []); assert.equal(callback.calledOnce, true); assert.deepEqual(callback.firstCall.args, [null]); assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Deleted table: %s', options.table]); + assert.deepEqual(console.log.firstCall.args, ['Deleted table %s from %s', tableId, datasetId]); }); it('should handle error', function () { @@ -239,51 +224,53 @@ describe('bigquery:tables', function () { var callback = sinon.stub(); sample.mocks.table.delete.yields(error); - sample.program.deleteTable({}, callback); + sample.program.deleteTable(datasetId, tableId, callback); assert.equal(callback.calledOnce, true); assert.deepEqual(callback.firstCall.args, [error]); }); }); - describe('importFile', function () { + describe('importLocalFile', function () { it('should import a local file', function () { var sample = getSample(); var callback = sinon.stub(); - var options = { - dataset: dataset, - table: table, - file: file - }; - sample.program.importFile(options, callback); + sample.program.importLocalFile(datasetId, tableId, fileName, callback); assert.equal(sample.mocks.table.import.calledOnce, true); - assert.deepEqual(sample.mocks.table.import.firstCall.args.slice(0, -1), [options.file, { format: undefined }]); + assert.deepEqual(sample.mocks.table.import.firstCall.args.slice(0, -1), [fileName]); assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, sample.mocks.metadata]); + assert.deepEqual(callback.firstCall.args, [null, sample.mocks.metadata, sample.mocks.apiResponse]); assert.equal(console.log.calledTwice, true); assert.deepEqual(console.log.firstCall.args, ['Started job: %s', sample.mocks.job.id]); assert.deepEqual(console.log.secondCall.args, ['Completed job: %s', sample.mocks.job.id]); }); + it('should handle error', function () { + var error = new Error('error'); + var sample = getSample(); + var callback = sinon.stub(); + sample.mocks.table.import.yields(error); + + sample.program.importLocalFile(datasetId, tableId, fileName, callback); + + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [error]); + }); + }); + + describe('importFileFromGCS', function () { it('should import a GCS file', function () { var sample = getSample(); var callback = sinon.stub(); - var options = { - dataset: dataset, - table: table, - file: file, - bucket: bucket, - format: format - }; - sample.program.importFile(options, callback); + sample.program.importFileFromGCS(datasetId, tableId, bucketName, fileName, callback); assert.equal(sample.mocks.table.import.calledOnce, true); - assert.deepEqual(sample.mocks.table.import.firstCall.args.slice(0, -1), [sample.mocks.file, { format: format }]); + assert.deepEqual(sample.mocks.table.import.firstCall.args.slice(0, -1), [sample.mocks.file]); assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, sample.mocks.metadata]); + assert.deepEqual(callback.firstCall.args, [null, sample.mocks.metadata, sample.mocks.apiResponse]); assert.equal(console.log.calledTwice, true); assert.deepEqual(console.log.firstCall.args, ['Started job: %s', sample.mocks.job.id]); assert.deepEqual(console.log.secondCall.args, ['Completed job: %s', sample.mocks.job.id]); @@ -295,7 +282,7 @@ describe('bigquery:tables', function () { var callback = sinon.stub(); sample.mocks.table.import.yields(error); - sample.program.importFile({}, callback); + sample.program.importFileFromGCS(datasetId, tableId, bucketName, fileName, callback); assert.equal(callback.calledOnce, true); assert.deepEqual(callback.firstCall.args, [error]); @@ -307,7 +294,7 @@ describe('bigquery:tables', function () { var sample = getSample(); var callback = sinon.stub(); - sample.program.copyTable(srcDataset, srcTable, destDataset, destTable, callback); + sample.program.copyTable(srcDatasetId, srcTableId, destDatasetId, destTableId, callback); assert.equal(sample.mocks.table.copy.calledOnce, true); assert.deepEqual( @@ -315,7 +302,7 @@ describe('bigquery:tables', function () { [sample.mocks.table] ); assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, sample.mocks.metadata]); + assert.deepEqual(callback.firstCall.args, [null, sample.mocks.metadata, sample.mocks.apiResponse]); assert.equal(console.log.calledTwice, true); assert.deepEqual(console.log.firstCall.args, ['Started job: %s', sample.mocks.job.id]); assert.deepEqual(console.log.secondCall.args, ['Completed job: %s', sample.mocks.job.id]); @@ -327,7 +314,7 @@ describe('bigquery:tables', function () { var callback = sinon.stub(); sample.mocks.table.copy.yields(error); - sample.program.copyTable(srcDataset, srcTable, destDataset, destTable, callback); + sample.program.copyTable(srcDatasetId, srcTableId, destDatasetId, destTableId, callback); assert.equal(callback.calledOnce, true); assert.deepEqual(callback.firstCall.args, [error]); @@ -337,22 +324,14 @@ describe('bigquery:tables', function () { describe('exportTableToGCS', function () { it('should export to a table', function () { var sample = getSample(); - var options = { - bucket: bucket, - file: file, - dataset: dataset, - table: table, - format: format, - gzip: true - }; var callback = sinon.stub(); - sample.program.exportTableToGCS(options, callback); + sample.program.exportTableToGCS(datasetId, tableId, bucketName, fileName, callback); assert.equal(sample.mocks.table.export.calledOnce, true); - assert.deepEqual(sample.mocks.table.export.firstCall.args.slice(0, -1), [sample.mocks.file, { format: format, gzip: true }]); + assert.deepEqual(sample.mocks.table.export.firstCall.args.slice(0, -1), [sample.mocks.file]); assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, sample.mocks.metadata]); + assert.deepEqual(callback.firstCall.args, [null, sample.mocks.metadata, sample.mocks.apiResponse]); assert.equal(console.log.calledTwice, true); assert.deepEqual(console.log.firstCall.args, ['Started job: %s', sample.mocks.job.id]); assert.deepEqual(console.log.secondCall.args, ['Completed job: %s', sample.mocks.job.id]); @@ -364,7 +343,7 @@ describe('bigquery:tables', function () { var callback = sinon.stub(); example.mocks.table.export.yields(error); - example.program.exportTableToGCS({ format: format }, callback); + example.program.exportTableToGCS(datasetId, tableId, bucketName, fileName, callback); assert.equal(callback.calledOnce, true); assert.deepEqual(callback.firstCall.args, [error]); @@ -372,44 +351,41 @@ describe('bigquery:tables', function () { }); describe('insertRowsAsStream', function () { - var options = { - file: file, - dataset: dataset, - table: table, - rows: jsonArray - }; - it('should stream-insert rows into a table', function () { - var program = getSample().program; + var sample = getSample(); var callback = sinon.stub(); - program.insertRowsAsStream(options, callback); + sample.program.insertRowsAsStream(datasetId, tableId, jsonArray, callback); + assert.equal(sample.mocks.table.insert.calledOnce, true); + assert.deepEqual(sample.mocks.table.insert.firstCall.args.slice(0, -1), [jsonArray]); assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, errorList]); + assert.deepEqual(callback.firstCall.args, [null, errorList, sample.mocks.apiResponse]); + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, ['Inserted %d row(s)!', jsonArray.length]); }); it('should handle API errors', function () { - var example = getSample(); + var sample = getSample(); var callback = sinon.stub(); var error = new Error('error'); - example.mocks.table.insert.yields(error); + sample.mocks.table.insert.yields(error); - example.program.insertRowsAsStream(options, callback); + sample.program.insertRowsAsStream(datasetId, tableId, jsonArray, callback); assert.equal(callback.calledOnce, true); assert.deepEqual(callback.firstCall.args, [error]); }); it('should handle (per-row) insert errors', function () { - var example = getSample(); + var sample = getSample(); var callback = sinon.stub(); - example.mocks.table.insert.yields(null, errorList); + sample.mocks.table.insert.yields(null, errorList, sample.mocks.apiResponse); - example.program.insertRowsAsStream(options, callback); + sample.program.insertRowsAsStream(datasetId, tableId, jsonArray, callback); assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, errorList]); + assert.deepEqual(callback.firstCall.args, [null, errorList, sample.mocks.apiResponse]); }); }); @@ -418,140 +394,93 @@ describe('bigquery:tables', function () { var program = getSample().program; program.createTable = sinon.stub(); - program.main(['create', dataset, table]); + program.main(['create', datasetId, tableId]); assert.equal(program.createTable.calledOnce, true); - assert.deepEqual(program.createTable.firstCall.args.slice(0, -1), [{ dataset: dataset, table: table }]); + assert.deepEqual(program.createTable.firstCall.args.slice(0, -1), [datasetId, tableId]); }); it('should call listTables', function () { var program = getSample().program; program.listTables = sinon.stub(); - program.main(['list', dataset]); + program.main(['list', datasetId]); assert.equal(program.listTables.calledOnce, true); - assert.deepEqual(program.listTables.firstCall.args.slice(0, -1), [{ dataset: dataset }]); + assert.deepEqual(program.listTables.firstCall.args.slice(0, -1), [datasetId]); }); it('should call browseRows', function () { var program = getSample().program; program.browseRows = sinon.stub(); - program.main(['browse', dataset, table]); + program.main(['browse', datasetId, tableId]); assert.equal(program.browseRows.calledOnce, true); - assert.deepEqual(program.browseRows.firstCall.args.slice(0, -1), [dataset, table]); + assert.deepEqual(program.browseRows.firstCall.args.slice(0, -1), [datasetId, tableId]); }); it('should call deleteTable', function () { var program = getSample().program; program.deleteTable = sinon.stub(); - program.main(['delete', dataset, table]); + program.main(['delete', datasetId, tableId]); assert.equal(program.deleteTable.calledOnce, true); - assert.deepEqual(program.deleteTable.firstCall.args.slice(0, -1), [{ dataset: dataset, table: table }]); + assert.deepEqual(program.deleteTable.firstCall.args.slice(0, -1), [datasetId, tableId]); }); - it('should call importFile', function () { + it('should call importLocalFile', function () { var program = getSample().program; - program.importFile = sinon.stub(); - - program.main(['import', dataset, table, file]); - assert.equal(program.importFile.calledOnce, true); - assert.deepEqual(program.importFile.firstCall.args.slice(0, -1), [{ - dataset: dataset, - table: table, - file: file, - bucket: undefined, - format: undefined - }]); + program.importLocalFile = sinon.stub(); + + program.main(['import', datasetId, tableId, fileName]); + assert.equal(program.importLocalFile.calledOnce, true); + assert.deepEqual(program.importLocalFile.firstCall.args.slice(0, -1), [datasetId, tableId, fileName]); + }); + + it('should call importFileFromGCS', function () { + var program = getSample().program; + program.importFileFromGCS = sinon.stub(); + + program.main(['import', datasetId, tableId, fileName, '-b', bucketName]); + assert.equal(program.importFileFromGCS.calledOnce, true); + assert.deepEqual(program.importFileFromGCS.firstCall.args.slice(0, -1), [datasetId, tableId, bucketName, fileName]); }); it('should call copyTable', function () { var program = getSample().program; program.copyTable = sinon.stub(); - program.main(['copy', srcDataset, srcTable, destDataset, destTable]); + program.main(['copy', srcDatasetId, srcTableId, destDatasetId, destTableId]); assert.equal(program.copyTable.calledOnce, true); - assert.deepEqual(program.copyTable.firstCall.args.slice(0, -1), - [srcDataset, srcTable, destDataset, destTable] - ); + assert.deepEqual(program.copyTable.firstCall.args.slice(0, -1), [srcDatasetId, srcTableId, destDatasetId, destTableId]); }); it('should call exportTableToGCS', function () { var program = getSample().program; program.exportTableToGCS = sinon.stub(); - program.main(['export', dataset, table, bucket, file]); + program.main(['export', datasetId, tableId, bucketName, fileName]); assert.equal(program.exportTableToGCS.calledOnce, true); - assert.deepEqual(program.exportTableToGCS.firstCall.args.slice(0, -1), [{ - dataset: dataset, - table: table, - file: file, - bucket: bucket, - format: undefined, - gzip: false - }]); + assert.deepEqual(program.exportTableToGCS.firstCall.args.slice(0, -1), [datasetId, tableId, bucketName, fileName]); }); it('should call insertRowsAsStream', function () { var program = getSample().program; program.insertRowsAsStream = sinon.stub(); - program.main(['insert', dataset, table, validJsonFile]); + program.main(['insert', datasetId, tableId, validJsonFile]); assert.equal(program.insertRowsAsStream.calledOnce, true); - assert.deepEqual( - program.insertRowsAsStream.firstCall.args.slice(0, -1), - [{ rows: jsonArray, dataset: dataset, table: table }] - ); - }); - - it('should recognize --gzip flag', function () { - var program = getSample().program; - program.exportTableToGCS = sinon.stub(); - - program.main(['export', dataset, table, bucket, file, '--gzip']); - assert.equal(program.exportTableToGCS.calledOnce, true); - assert.deepEqual(program.exportTableToGCS.firstCall.args.slice(0, -1), [{ - dataset: dataset, - table: table, - file: file, - bucket: bucket, - format: undefined, - gzip: true - }]); - }); - - it('should recognize --format flag', function () { - var program = getSample().program; - program.exportTableToGCS = sinon.stub(); - - program.main(['export', dataset, table, bucket, file, '--format', 'CSV']); - assert.equal(program.exportTableToGCS.calledOnce, true); - assert.deepEqual(program.exportTableToGCS.firstCall.args.slice(0, -1), [{ - dataset: dataset, - table: table, - file: file, - bucket: bucket, - format: 'CSV', - gzip: false - }]); + assert.deepEqual(program.insertRowsAsStream.firstCall.args.slice(0, -1), [datasetId, tableId, jsonArray]); }); describe('insert', function () { - var options = { - dataset: dataset, - table: table, - rows: jsonArray - }; - it('should accept valid JSON files', function () { var program = getSample().program; program.insertRowsAsStream = sinon.stub(); - program.main(['insert', dataset, table, validJsonFile]); + program.main(['insert', datasetId, tableId, validJsonFile]); assert.equal(program.insertRowsAsStream.calledOnce, true); - assert.deepEqual(program.insertRowsAsStream.firstCall.args.slice(0, -1), [options]); + assert.deepEqual(program.insertRowsAsStream.firstCall.args.slice(0, -1), [datasetId, tableId, jsonArray]); }); it('should reject files with invalid JSON', function () { @@ -559,7 +488,7 @@ describe('bigquery:tables', function () { program.insertRowsAsStream = sinon.stub(); assert.throws( - function () { program.main(['insert', dataset, table, invalidJsonFile]); }, + function () { program.main(['insert', datasetId, tableId, invalidJsonFile]); }, /"json_or_file" \(or the file it points to\) is not a valid JSON array\./ ); assert.equal(program.insertRowsAsStream.called, false); @@ -570,7 +499,7 @@ describe('bigquery:tables', function () { program.insertRowsAsStream = sinon.stub(); assert.throws( - function () { program.main(['insert', dataset, table, '']); }, + function () { program.main(['insert', datasetId, tableId, '']); }, /"json_or_file" \(or the file it points to\) is not a valid JSON array\./ ); assert.equal(program.insertRowsAsStream.called, false); @@ -580,9 +509,9 @@ describe('bigquery:tables', function () { var program = getSample().program; program.insertRowsAsStream = sinon.stub(); - program.main(['insert', dataset, table, validJsonString]); + program.main(['insert', datasetId, tableId, validJsonString]); assert.equal(program.insertRowsAsStream.calledOnce, true); - assert.deepEqual(program.insertRowsAsStream.firstCall.args.slice(0, -1), [options]); + assert.deepEqual(program.insertRowsAsStream.firstCall.args.slice(0, -1), [datasetId, tableId, jsonArray]); }); it('should reject invalid JSON strings', function () { @@ -590,7 +519,7 @@ describe('bigquery:tables', function () { program.insertRowsAsStream = sinon.stub(); assert.throws( - function () { program.main(['insert', dataset, table, invalidJsonString]); }, + function () { program.main(['insert', datasetId, tableId, invalidJsonString]); }, /"json_or_file" \(or the file it points to\) is not a valid JSON array\./ ); assert.equal(program.insertRowsAsStream.called, false); diff --git a/circle.yml b/circle.yml index e54dcbbd73..193eee5c13 100644 --- a/circle.yml +++ b/circle.yml @@ -5,62 +5,62 @@ machine: - redis - memcached -# dependencies: -# cache_directories: -# - appengine/analytics/node_modules/ -# - appengine/bower/node_modules/ -# - appengine/cloudsql/node_modules/ -# - appengine/datastore/node_modules/ -# - appengine/disk/node_modules/ -# - appengine/express/node_modules/ -# - appengine/express-memcached-session/node_modules/ -# - appengine/extending-runtime/node_modules/ -# - appengine/geddy/node_modules/ -# - appengine/grunt/node_modules/ -# - appengine/hapi/node_modules/ -# - appengine/hello-world/node_modules/ -# - appengine/koa/node_modules/ -# - appengine/kraken/node_modules/ -# - appengine/logging/node_modules/ -# - appengine/loopback/node_modules/ -# - appengine/mailgun/node_modules/ -# - appengine/mailjet/node_modules/ -# - appengine/memcached/node_modules/ -# - appengine/mongodb/node_modules/ -# - appengine/parse-server/node_modules/ -# - appengine/pubsub/node_modules/ -# - appengine/redis/node_modules/ -# - appengine/restify/node_modules/ -# - appengine/sails/node_modules/ -# - appengine/sendgrid/node_modules/ -# - appengine/static-files/node_modules/ -# - appengine/storage/node_modules/ -# - appengine/twilio/node_modules/ -# - appengine/webpack/node_modules/ -# - appengine/websockets/node_modules/ -# - bigquery/node_modules/ -# - computeengine/node_modules/ -# - datastore/node_modules/ -# - debugger/node_modules/ -# - functions/background/node_modules/ -# - functions/datastore/node_modules/ -# - functions/errorreporting/node_modules/ -# - functions/gcs/node_modules/ -# - functions/helloworld/node_modules/ -# - functions/http/node_modules/ -# - functions/log/node_modules/ -# - functions/ocr/app/node_modules/ -# - functions/pubsub/node_modules/ -# - functions/sendgrid/node_modules/ -# - functions/slack/node_modules/ -# - functions/uuid/node_modules/ -# - language/node_modules/ -# - logging/node_modules/ -# - monitoring/node_modules/ -# - prediction/node_modules/ -# - pubsub/node_modules/ -# - speech/node_modules/ -# - storage/node_modules/ -# - trace/node_modules/ -# - translate/node_modules/ -# - vision/node_modules/ +dependencies: + cache_directories: + - appengine/analytics/node_modules/ + - appengine/bower/node_modules/ + - appengine/cloudsql/node_modules/ + - appengine/datastore/node_modules/ + - appengine/disk/node_modules/ + - appengine/express/node_modules/ + - appengine/express-memcached-session/node_modules/ + - appengine/extending-runtime/node_modules/ + - appengine/geddy/node_modules/ + - appengine/grunt/node_modules/ + - appengine/hapi/node_modules/ + - appengine/hello-world/node_modules/ + - appengine/koa/node_modules/ + - appengine/kraken/node_modules/ + - appengine/logging/node_modules/ + - appengine/loopback/node_modules/ + - appengine/mailgun/node_modules/ + - appengine/mailjet/node_modules/ + - appengine/memcached/node_modules/ + - appengine/mongodb/node_modules/ + - appengine/parse-server/node_modules/ + - appengine/pubsub/node_modules/ + - appengine/redis/node_modules/ + - appengine/restify/node_modules/ + - appengine/sails/node_modules/ + - appengine/sendgrid/node_modules/ + - appengine/static-files/node_modules/ + - appengine/storage/node_modules/ + - appengine/twilio/node_modules/ + - appengine/webpack/node_modules/ + - appengine/websockets/node_modules/ + - bigquery/node_modules/ + - computeengine/node_modules/ + - datastore/node_modules/ + - debugger/node_modules/ + - functions/background/node_modules/ + - functions/datastore/node_modules/ + - functions/errorreporting/node_modules/ + - functions/gcs/node_modules/ + - functions/helloworld/node_modules/ + - functions/http/node_modules/ + - functions/log/node_modules/ + - functions/ocr/app/node_modules/ + - functions/pubsub/node_modules/ + - functions/sendgrid/node_modules/ + - functions/slack/node_modules/ + - functions/uuid/node_modules/ + - language/node_modules/ + - logging/node_modules/ + - monitoring/node_modules/ + - prediction/node_modules/ + - pubsub/node_modules/ + - speech/node_modules/ + - storage/node_modules/ + - trace/node_modules/ + - translate/node_modules/ + - vision/node_modules/