From 9817a15da4c80425fb77273ed7c9acbe020f0f48 Mon Sep 17 00:00:00 2001 From: Michael Kret <88898367+michael-radency@users.noreply.github.com> Date: Wed, 19 Apr 2023 15:55:01 +0300 Subject: [PATCH] feat(Google BigQuery Node): Node improvements (#4877) * :zap: setup * :zap: finished v2 setup * :zap: fix return all, fix simplify with nested schema * :zap: fix for external tables, updated scopes * :zap: query operation * :zap: linter fixes * :zap: fixed not processed errors when inserting, move main loop to execute function to allow bulk request * :zap: customizible batch size when inserting, improoved errors * :zap: options for mapping input * :zap: fix for inserting RECORD type * :zap: updated simplify logic * :zap: fix for return with selected fields * :zap: option to return table schema * :zap: linter fixes * :zap: fix imports * :zap: query resource and fixes, rlc for projects * :zap: removed simplify, added raw output option * :zap: rlc for tables and datasets, no urls option * :zap: updated hints and description of query parameter, fix getMany VIEW, multioptions fo fields * :zap: added case when rows are empty * :zap: linter fixes * :zap: UI update, one resource * :zap: fix for output with field named json * :zap: using jobs instead queries * :zap: added error message * :zap: search for RLCs, fixes * :zap: json processing * :zap: removed getAll operation * :zap: executeQuery update * :zap: unit test * :zap: tests setup, fixes * :zap: tests * Remove script for checking unused loadOptions --------- Co-authored-by: agobrech --- .../GoogleBigQueryOAuth2Api.credentials.ts | 6 +- .../Google/BigQuery/GoogleBigQuery.node.ts | 313 ++---------------- .../test/v2/node/executeQuery.test.ts | 79 +++++ .../test/v2/node/executeQuery.workflow.json | 62 ++++ .../test/v2/node/insert.autoMapMode.test.ts | 85 +++++ .../v2/node/insert.autoMapMode.workflow.json | 133 ++++++++ .../test/v2/node/insert.manualMode.test.ts | 82 +++++ .../v2/node/insert.manualMode.workflow.json | 99 ++++++ .../BigQuery/test/v2/utils/utils.test.ts | 265 +++++++++++++++ .../BigQuery/{ => v1}/GenericFunctions.ts | 0 .../BigQuery/v1/GoogleBigQueryV1.node.ts | 310 +++++++++++++++++ .../BigQuery/{ => v1}/RecordDescription.ts | 0 .../BigQuery/v2/GoogleBigQueryV2.node.ts | 30 ++ .../commonDescriptions/RLC.description.ts | 121 +++++++ .../v2/actions/database/Database.resource.ts | 64 ++++ .../database/executeQuery.operation.ts | 299 +++++++++++++++++ .../v2/actions/database/insert.operation.ts | 285 ++++++++++++++++ .../Google/BigQuery/v2/actions/node.type.ts | 9 + .../Google/BigQuery/v2/actions/router.ts | 28 ++ .../BigQuery/v2/actions/versionDescription.ts | 72 ++++ .../Google/BigQuery/v2/helpers/interfaces.ts | 31 ++ .../nodes/Google/BigQuery/v2/helpers/utils.ts | 135 ++++++++ .../nodes/Google/BigQuery/v2/methods/index.ts | 2 + .../Google/BigQuery/v2/methods/listSearch.ts | 115 +++++++ .../Google/BigQuery/v2/methods/loadOptions.ts | 53 +++ .../Google/BigQuery/v2/transport/index.ts | 136 ++++++++ packages/nodes-base/package.json | 2 +- 27 files changed, 2525 insertions(+), 291 deletions(-) create mode 100644 packages/nodes-base/nodes/Google/BigQuery/test/v2/node/executeQuery.test.ts create mode 100644 packages/nodes-base/nodes/Google/BigQuery/test/v2/node/executeQuery.workflow.json create mode 100644 packages/nodes-base/nodes/Google/BigQuery/test/v2/node/insert.autoMapMode.test.ts create mode 100644 packages/nodes-base/nodes/Google/BigQuery/test/v2/node/insert.autoMapMode.workflow.json create mode 100644 packages/nodes-base/nodes/Google/BigQuery/test/v2/node/insert.manualMode.test.ts create mode 100644 packages/nodes-base/nodes/Google/BigQuery/test/v2/node/insert.manualMode.workflow.json create mode 100644 packages/nodes-base/nodes/Google/BigQuery/test/v2/utils/utils.test.ts rename packages/nodes-base/nodes/Google/BigQuery/{ => v1}/GenericFunctions.ts (100%) create mode 100644 packages/nodes-base/nodes/Google/BigQuery/v1/GoogleBigQueryV1.node.ts rename packages/nodes-base/nodes/Google/BigQuery/{ => v1}/RecordDescription.ts (100%) create mode 100644 packages/nodes-base/nodes/Google/BigQuery/v2/GoogleBigQueryV2.node.ts create mode 100644 packages/nodes-base/nodes/Google/BigQuery/v2/actions/commonDescriptions/RLC.description.ts create mode 100644 packages/nodes-base/nodes/Google/BigQuery/v2/actions/database/Database.resource.ts create mode 100644 packages/nodes-base/nodes/Google/BigQuery/v2/actions/database/executeQuery.operation.ts create mode 100644 packages/nodes-base/nodes/Google/BigQuery/v2/actions/database/insert.operation.ts create mode 100644 packages/nodes-base/nodes/Google/BigQuery/v2/actions/node.type.ts create mode 100644 packages/nodes-base/nodes/Google/BigQuery/v2/actions/router.ts create mode 100644 packages/nodes-base/nodes/Google/BigQuery/v2/actions/versionDescription.ts create mode 100644 packages/nodes-base/nodes/Google/BigQuery/v2/helpers/interfaces.ts create mode 100644 packages/nodes-base/nodes/Google/BigQuery/v2/helpers/utils.ts create mode 100644 packages/nodes-base/nodes/Google/BigQuery/v2/methods/index.ts create mode 100644 packages/nodes-base/nodes/Google/BigQuery/v2/methods/listSearch.ts create mode 100644 packages/nodes-base/nodes/Google/BigQuery/v2/methods/loadOptions.ts create mode 100644 packages/nodes-base/nodes/Google/BigQuery/v2/transport/index.ts diff --git a/packages/nodes-base/credentials/GoogleBigQueryOAuth2Api.credentials.ts b/packages/nodes-base/credentials/GoogleBigQueryOAuth2Api.credentials.ts index 1ad98e7df4403..0564048612f6c 100644 --- a/packages/nodes-base/credentials/GoogleBigQueryOAuth2Api.credentials.ts +++ b/packages/nodes-base/credentials/GoogleBigQueryOAuth2Api.credentials.ts @@ -1,6 +1,10 @@ import type { ICredentialType, INodeProperties } from 'n8n-workflow'; -const scopes = ['https://www.googleapis.com/auth/bigquery']; +const scopes = [ + 'https://www.googleapis.com/auth/bigquery', + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/drive', +]; export class GoogleBigQueryOAuth2Api implements ICredentialType { name = 'googleBigQueryOAuth2Api'; diff --git a/packages/nodes-base/nodes/Google/BigQuery/GoogleBigQuery.node.ts b/packages/nodes-base/nodes/Google/BigQuery/GoogleBigQuery.node.ts index f4bd44385fc2f..3059d5699e84a 100644 --- a/packages/nodes-base/nodes/Google/BigQuery/GoogleBigQuery.node.ts +++ b/packages/nodes-base/nodes/Google/BigQuery/GoogleBigQuery.node.ts @@ -1,291 +1,26 @@ -import type { - IExecuteFunctions, - IDataObject, - ILoadOptionsFunctions, - INodeExecutionData, - INodePropertyOptions, - INodeType, - INodeTypeDescription, - JsonObject, -} from 'n8n-workflow'; -import { NodeApiError } from 'n8n-workflow'; - -import { googleApiRequest, googleApiRequestAllItems, simplify } from './GenericFunctions'; - -import { recordFields, recordOperations } from './RecordDescription'; - -import { v4 as uuid } from 'uuid'; - -export class GoogleBigQuery implements INodeType { - description: INodeTypeDescription = { - displayName: 'Google BigQuery', - name: 'googleBigQuery', - icon: 'file:googleBigQuery.svg', - group: ['input'], - version: 1, - subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}', - description: 'Consume Google BigQuery API', - defaults: { - name: 'Google BigQuery', - }, - inputs: ['main'], - outputs: ['main'], - credentials: [ - { - name: 'googleApi', - required: true, - displayOptions: { - show: { - authentication: ['serviceAccount'], - }, - }, - }, - { - name: 'googleBigQueryOAuth2Api', - required: true, - displayOptions: { - show: { - authentication: ['oAuth2'], - }, - }, - }, - ], - properties: [ - { - displayName: 'Authentication', - name: 'authentication', - type: 'options', - noDataExpression: true, - options: [ - { - // eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased - name: 'OAuth2 (recommended)', - value: 'oAuth2', - }, - { - name: 'Service Account', - value: 'serviceAccount', - }, - ], - default: 'oAuth2', - }, - { - displayName: 'Resource', - name: 'resource', - type: 'options', - noDataExpression: true, - options: [ - { - name: 'Record', - value: 'record', - }, - ], - default: 'record', - }, - ...recordOperations, - ...recordFields, - ], - }; - - methods = { - loadOptions: { - async getProjects(this: ILoadOptionsFunctions): Promise { - const returnData: INodePropertyOptions[] = []; - const { projects } = await googleApiRequest.call(this, 'GET', '/v2/projects'); - for (const project of projects) { - returnData.push({ - name: project.friendlyName as string, - value: project.id, - }); - } - return returnData; - }, - async getDatasets(this: ILoadOptionsFunctions): Promise { - const projectId = this.getCurrentNodeParameter('projectId'); - const returnData: INodePropertyOptions[] = []; - const { datasets } = await googleApiRequest.call( - this, - 'GET', - `/v2/projects/${projectId}/datasets`, - ); - for (const dataset of datasets) { - returnData.push({ - name: dataset.datasetReference.datasetId as string, - value: dataset.datasetReference.datasetId, - }); - } - return returnData; - }, - async getTables(this: ILoadOptionsFunctions): Promise { - const projectId = this.getCurrentNodeParameter('projectId'); - const datasetId = this.getCurrentNodeParameter('datasetId'); - const returnData: INodePropertyOptions[] = []; - const { tables } = await googleApiRequest.call( - this, - 'GET', - `/v2/projects/${projectId}/datasets/${datasetId}/tables`, - ); - for (const table of tables) { - returnData.push({ - name: table.tableReference.tableId as string, - value: table.tableReference.tableId, - }); - } - return returnData; - }, - }, - }; - - async execute(this: IExecuteFunctions): Promise { - const items = this.getInputData(); - const returnData: INodeExecutionData[] = []; - const length = items.length; - const qs: IDataObject = {}; - let responseData; - const resource = this.getNodeParameter('resource', 0); - const operation = this.getNodeParameter('operation', 0); - - if (resource === 'record') { - // ********************************************************************* - // record - // ********************************************************************* - - if (operation === 'create') { - // ---------------------------------- - // record: create - // ---------------------------------- - - // https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll - - const projectId = this.getNodeParameter('projectId', 0) as string; - const datasetId = this.getNodeParameter('datasetId', 0) as string; - const tableId = this.getNodeParameter('tableId', 0) as string; - const rows: IDataObject[] = []; - const body: IDataObject = {}; - - for (let i = 0; i < length; i++) { - const options = this.getNodeParameter('options', i); - Object.assign(body, options); - if (body.traceId === undefined) { - body.traceId = uuid(); - } - const columns = this.getNodeParameter('columns', i) as string; - const columnList = columns.split(',').map((column) => column.trim()); - const record: IDataObject = {}; - - for (const key of Object.keys(items[i].json)) { - if (columnList.includes(key)) { - record[`${key}`] = items[i].json[key]; - } - } - rows.push({ json: record }); - } - - body.rows = rows; - - try { - responseData = await googleApiRequest.call( - this, - 'POST', - `/v2/projects/${projectId}/datasets/${datasetId}/tables/${tableId}/insertAll`, - body, - ); - - const executionData = this.helpers.constructExecutionMetaData( - this.helpers.returnJsonArray(responseData as IDataObject), - { itemData: { item: 0 } }, - ); - returnData.push(...executionData); - } catch (error) { - if (this.continueOnFail()) { - const executionErrorData = this.helpers.constructExecutionMetaData( - this.helpers.returnJsonArray({ error: error.message }), - { itemData: { item: 0 } }, - ); - returnData.push(...executionErrorData); - } - throw new NodeApiError(this.getNode(), error as JsonObject, { itemIndex: 0 }); - } - } else if (operation === 'getAll') { - // ---------------------------------- - // record: getAll - // ---------------------------------- - - // https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/get - - const returnAll = this.getNodeParameter('returnAll', 0); - const projectId = this.getNodeParameter('projectId', 0) as string; - const datasetId = this.getNodeParameter('datasetId', 0) as string; - const tableId = this.getNodeParameter('tableId', 0) as string; - const simple = this.getNodeParameter('simple', 0) as boolean; - let fields; - - if (simple) { - const { schema } = await googleApiRequest.call( - this, - 'GET', - `/v2/projects/${projectId}/datasets/${datasetId}/tables/${tableId}`, - {}, - ); - fields = schema.fields.map((field: IDataObject) => field.name); - } - - for (let i = 0; i < length; i++) { - try { - const options = this.getNodeParameter('options', i); - Object.assign(qs, options); - - if (qs.selectedFields) { - fields = (qs.selectedFields as string).split(','); - } - - if (returnAll) { - responseData = await googleApiRequestAllItems.call( - this, - 'rows', - 'GET', - `/v2/projects/${projectId}/datasets/${datasetId}/tables/${tableId}/data`, - {}, - qs, - ); - } else { - qs.maxResults = this.getNodeParameter('limit', i); - responseData = await googleApiRequest.call( - this, - 'GET', - `/v2/projects/${projectId}/datasets/${datasetId}/tables/${tableId}/data`, - {}, - qs, - ); - } - - if (!returnAll) { - responseData = responseData.rows; - } - responseData = simple - ? simplify(responseData as IDataObject[], fields as string[]) - : responseData; - - const executionData = this.helpers.constructExecutionMetaData( - this.helpers.returnJsonArray(responseData as IDataObject[]), - { itemData: { item: i } }, - ); - returnData.push(...executionData); - } catch (error) { - if (this.continueOnFail()) { - const executionErrorData = this.helpers.constructExecutionMetaData( - this.helpers.returnJsonArray({ error: error.message }), - { itemData: { item: i } }, - ); - returnData.push(...executionErrorData); - continue; - } - throw new NodeApiError(this.getNode(), error as JsonObject, { itemIndex: i }); - } - } - } - } - - return this.prepareOutputData(returnData); +import type { INodeTypeBaseDescription, IVersionedNodeType } from 'n8n-workflow'; +import { VersionedNodeType } from 'n8n-workflow'; + +import { GoogleBigQueryV1 } from './v1/GoogleBigQueryV1.node'; +import { GoogleBigQueryV2 } from './v2/GoogleBigQueryV2.node'; + +export class GoogleBigQuery extends VersionedNodeType { + constructor() { + const baseDescription: INodeTypeBaseDescription = { + displayName: 'Google BigQuery', + name: 'googleBigQuery', + icon: 'file:googleBigQuery.svg', + group: ['input'], + subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}', + description: 'Consume Google BigQuery API', + defaultVersion: 2, + }; + + const nodeVersions: IVersionedNodeType['nodeVersions'] = { + 1: new GoogleBigQueryV1(baseDescription), + 2: new GoogleBigQueryV2(baseDescription), + }; + + super(nodeVersions, baseDescription); } } diff --git a/packages/nodes-base/nodes/Google/BigQuery/test/v2/node/executeQuery.test.ts b/packages/nodes-base/nodes/Google/BigQuery/test/v2/node/executeQuery.test.ts new file mode 100644 index 0000000000000..7991f36da60e5 --- /dev/null +++ b/packages/nodes-base/nodes/Google/BigQuery/test/v2/node/executeQuery.test.ts @@ -0,0 +1,79 @@ +import type { INodeTypes } from 'n8n-workflow'; + +import { setup, workflowToTests } from '../../../../../../test/nodes/Helpers'; +import type { WorkflowTestData } from '../../../../../../test/nodes/types'; +import { executeWorkflow } from '../../../../../../test/nodes/ExecuteWorkflow'; + +import * as transport from '../../../v2/transport'; + +import nock from 'nock'; + +jest.mock('../../../v2/transport', () => { + const originalModule = jest.requireActual('../../../v2/transport'); + return { + ...originalModule, + googleApiRequest: jest.fn(async (method: string, resource: string) => { + if (resource === '/v2/projects/test-project/jobs' && method === 'POST') { + return Promise.resolve({ + jobReference: { + jobId: 'job_123', + }, + status: { + state: 'DONE', + }, + }); + } + if (resource === '/v2/projects/test-project/queries/job_123' && method === 'GET') { + return Promise.resolve({}); + } + return Promise.resolve(); + }), + // googleApiRequestAllItems: jest.fn(async () => Promise.resolve()), + }; +}); + +describe('Test Google BigQuery V2, executeQuery', () => { + const workflows = ['nodes/Google/BigQuery/test/v2/node/executeQuery.workflow.json']; + const tests = workflowToTests(workflows); + + beforeAll(() => { + nock.disableNetConnect(); + }); + + afterAll(() => { + nock.restore(); + jest.unmock('../../../v2/transport'); + }); + + const nodeTypes = setup(tests); + + const testNode = async (testData: WorkflowTestData, types: INodeTypes) => { + const { result } = await executeWorkflow(testData, types); + + expect(transport.googleApiRequest).toHaveBeenCalledTimes(2); + expect(transport.googleApiRequest).toHaveBeenCalledWith( + 'POST', + '/v2/projects/test-project/jobs', + { + configuration: { + query: { + query: 'SELECT * FROM bigquery_node_dev_test_dataset.test_json;', + useLegacySql: false, + }, + }, + }, + ); + expect(transport.googleApiRequest).toHaveBeenCalledWith( + 'GET', + '/v2/projects/test-project/queries/job_123', + undefined, + {}, + ); + + expect(result.finished).toEqual(true); + }; + + for (const testData of tests) { + test(testData.description, async () => testNode(testData, nodeTypes)); + } +}); diff --git a/packages/nodes-base/nodes/Google/BigQuery/test/v2/node/executeQuery.workflow.json b/packages/nodes-base/nodes/Google/BigQuery/test/v2/node/executeQuery.workflow.json new file mode 100644 index 0000000000000..d65238fbc89fc --- /dev/null +++ b/packages/nodes-base/nodes/Google/BigQuery/test/v2/node/executeQuery.workflow.json @@ -0,0 +1,62 @@ +{ + "name": "My workflow 12", + "nodes": [ + { + "parameters": {}, + "id": "7db7d51a-83c2-4aa0-a736-9c3d1c031b60", + "name": "When clicking \"Execute Workflow\"", + "type": "n8n-nodes-base.manualTrigger", + "typeVersion": 1, + "position": [360, 340] + }, + { + "parameters": { + "authentication": "serviceAccount", + "projectId": { + "__rl": true, + "value": "test-project", + "mode": "list", + "cachedResultName": "test-project", + "cachedResultUrl": "https://console.cloud.google.com/bigquery?project=test-project" + }, + "sqlQuery": "SELECT * FROM bigquery_node_dev_test_dataset.test_json;", + "options": {} + }, + "id": "83d00275-0f98-4d5e-a3d6-bbca940ff8ac", + "name": "Google BigQuery", + "type": "n8n-nodes-base.googleBigQuery", + "typeVersion": 2, + "position": [620, 340], + "credentials": { + "googleApi": { + "id": "66", + "name": "Google account 5" + } + } + } + ], + "pinData": { + "Google BigQuery": [] + }, + "connections": { + "When clicking \"Execute Workflow\"": { + "main": [ + [ + { + "node": "Google BigQuery", + "type": "main", + "index": 0 + } + ] + ] + } + }, + "active": false, + "settings": {}, + "versionId": "be2fc126-5d71-4e86-9a4e-eb62ad266860", + "id": "156", + "meta": { + "instanceId": "36203ea1ce3cef713fa25999bd9874ae26b9e4c2c3a90a365f2882a154d031d0" + }, + "tags": [] +} diff --git a/packages/nodes-base/nodes/Google/BigQuery/test/v2/node/insert.autoMapMode.test.ts b/packages/nodes-base/nodes/Google/BigQuery/test/v2/node/insert.autoMapMode.test.ts new file mode 100644 index 0000000000000..14d5771f1b792 --- /dev/null +++ b/packages/nodes-base/nodes/Google/BigQuery/test/v2/node/insert.autoMapMode.test.ts @@ -0,0 +1,85 @@ +import type { INodeTypes } from 'n8n-workflow'; + +import { setup, workflowToTests } from '../../../../../../test/nodes/Helpers'; +import type { WorkflowTestData } from '../../../../../../test/nodes/types'; +import { executeWorkflow } from '../../../../../../test/nodes/ExecuteWorkflow'; +import nock from 'nock'; + +import * as transport from '../../../v2/transport'; + +jest.mock('../../../v2/transport', () => { + const originalModule = jest.requireActual('../../../v2/transport'); + return { + ...originalModule, + googleApiRequest: jest.fn(async (method: string, resource: string) => { + if ( + resource === + '/v2/projects/test-project/datasets/bigquery_node_dev_test_dataset/tables/num_text' && + method === 'GET' + ) { + return Promise.resolve({ + schema: { + fields: [ + { name: 'id', type: 'INT' }, + { name: 'test', type: 'STRING' }, + ], + }, + }); + } + if ( + resource === + '/v2/projects/test-project/datasets/bigquery_node_dev_test_dataset/tables/num_text/insertAll' && + method === 'POST' + ) { + return Promise.resolve({ kind: 'bigquery#tableDataInsertAllResponse' }); + } + return Promise.resolve(); + }), + googleApiRequestAllItems: jest.fn(async () => Promise.resolve()), + }; +}); + +describe('Test Google BigQuery V2, insert auto map', () => { + const workflows = ['nodes/Google/BigQuery/test/v2/node/insert.autoMapMode.workflow.json']; + const tests = workflowToTests(workflows); + + beforeAll(() => { + nock.disableNetConnect(); + }); + + afterAll(() => { + nock.restore(); + jest.unmock('../../../v2/transport'); + }); + + const nodeTypes = setup(tests); + + const testNode = async (testData: WorkflowTestData, types: INodeTypes) => { + const { result } = await executeWorkflow(testData, types); + + expect(transport.googleApiRequest).toHaveBeenCalledTimes(2); + expect(transport.googleApiRequest).toHaveBeenCalledWith( + 'GET', + '/v2/projects/test-project/datasets/bigquery_node_dev_test_dataset/tables/num_text', + {}, + ); + expect(transport.googleApiRequest).toHaveBeenCalledWith( + 'POST', + '/v2/projects/test-project/datasets/bigquery_node_dev_test_dataset/tables/num_text/insertAll', + { + rows: [ + { json: { id: 1, test: '111' } }, + { json: { id: 2, test: '222' } }, + { json: { id: 3, test: '333' } }, + ], + traceId: 'trace_id', + }, + ); + + expect(result.finished).toEqual(true); + }; + + for (const testData of tests) { + test(testData.description, async () => testNode(testData, nodeTypes)); + } +}); diff --git a/packages/nodes-base/nodes/Google/BigQuery/test/v2/node/insert.autoMapMode.workflow.json b/packages/nodes-base/nodes/Google/BigQuery/test/v2/node/insert.autoMapMode.workflow.json new file mode 100644 index 0000000000000..e3e6f10f7632f --- /dev/null +++ b/packages/nodes-base/nodes/Google/BigQuery/test/v2/node/insert.autoMapMode.workflow.json @@ -0,0 +1,133 @@ +{ + "name": "My workflow 12", + "nodes": [ + { + "parameters": {}, + "id": "7db7d51a-83c2-4aa0-a736-9c3d1c031b60", + "name": "When clicking \"Execute Workflow\"", + "type": "n8n-nodes-base.manualTrigger", + "typeVersion": 1, + "position": [20, 340] + }, + { + "parameters": { + "authentication": "serviceAccount", + "operation": "insert", + "projectId": { + "__rl": true, + "value": "test-project", + "mode": "list", + "cachedResultName": "test-project", + "cachedResultUrl": "https://console.cloud.google.com/bigquery?project=test-project" + }, + "datasetId": { + "__rl": true, + "value": "bigquery_node_dev_test_dataset", + "mode": "list", + "cachedResultName": "bigquery_node_dev_test_dataset" + }, + "tableId": { + "__rl": true, + "value": "num_text", + "mode": "list", + "cachedResultName": "num_text" + }, + "options": { + "traceId": "trace_id" + } + }, + "id": "83d00275-0f98-4d5e-a3d6-bbca940ff8ac", + "name": "Google BigQuery", + "type": "n8n-nodes-base.googleBigQuery", + "typeVersion": 2, + "position": [500, 340], + "credentials": { + "googleApi": { + "id": "66", + "name": "Google account 5" + } + } + }, + { + "parameters": { + "jsCode": "return [\n{\n\"id\":\n1,\n\"test\":\n\"111\"\n},\n{\n\"id\":\n2,\n\"test\":\n\"222\"\n},\n{\n\"id\":\n3,\n\"test\":\n\"333\"\n},\n];" + }, + "id": "11d06660-cbd3-4bd2-9619-68e82438a0e3", + "name": "Code", + "type": "n8n-nodes-base.code", + "typeVersion": 1, + "position": [240, 340] + } + ], + "pinData": { + "Code": [ + { + "json": { + "id": 1, + "test": "111" + } + }, + { + "json": { + "id": 2, + "test": "222" + } + }, + { + "json": { + "id": 3, + "test": "333" + } + } + ], + "Google BigQuery": [ + { + "json": { + "kind": "bigquery#tableDataInsertAllResponse" + } + }, + { + "json": { + "kind": "bigquery#tableDataInsertAllResponse" + } + }, + { + "json": { + "kind": "bigquery#tableDataInsertAllResponse" + } + } + ] + }, + "connections": { + "When clicking \"Execute Workflow\"": { + "main": [ + [ + { + "node": "Code", + "type": "main", + "index": 0 + } + ] + ] + }, + "Code": { + "main": [ + [ + { + "node": "Google BigQuery", + "type": "main", + "index": 0 + } + ] + ] + } + }, + "active": false, + "settings": {}, + "versionId": "30d3e38a-b5a4-4999-816d-7c05a68f31c8", + "id": "156", + "meta": { + "instanceId": "36203ea1ce3cef713fa25999bd9874ae26b9e4c2c3a90a365f2882a154d031d0" + }, + "tags": [] +} diff --git a/packages/nodes-base/nodes/Google/BigQuery/test/v2/node/insert.manualMode.test.ts b/packages/nodes-base/nodes/Google/BigQuery/test/v2/node/insert.manualMode.test.ts new file mode 100644 index 0000000000000..c4181e9495452 --- /dev/null +++ b/packages/nodes-base/nodes/Google/BigQuery/test/v2/node/insert.manualMode.test.ts @@ -0,0 +1,82 @@ +import type { INodeTypes } from 'n8n-workflow'; + +import { setup, workflowToTests } from '../../../../../../test/nodes/Helpers'; +import type { WorkflowTestData } from '../../../../../../test/nodes/types'; +import { executeWorkflow } from '../../../../../../test/nodes/ExecuteWorkflow'; +import nock from 'nock'; + +import * as transport from '../../../v2/transport'; + +jest.mock('../../../v2/transport', () => { + const originalModule = jest.requireActual('../../../v2/transport'); + return { + ...originalModule, + googleApiRequest: jest.fn(async (method: string, resource: string) => { + if ( + resource === + '/v2/projects/test-project/datasets/bigquery_node_dev_test_dataset/tables/test_json' && + method === 'GET' + ) { + return Promise.resolve({ + schema: { + fields: [ + { name: 'json', type: 'JSON' }, + { name: 'name with space', type: 'STRING' }, + { name: 'active', type: 'BOOLEAN' }, + ], + }, + }); + } + if ( + resource === + '/v2/projects/test-project/datasets/bigquery_node_dev_test_dataset/tables/test_json/insertAll' && + method === 'POST' + ) { + return Promise.resolve({ kind: 'bigquery#tableDataInsertAllResponse' }); + } + return Promise.resolve(); + }), + googleApiRequestAllItems: jest.fn(async () => Promise.resolve()), + }; +}); + +describe('Test Google BigQuery V2, insert define manualy', () => { + const workflows = ['nodes/Google/BigQuery/test/v2/node/insert.manualMode.workflow.json']; + const tests = workflowToTests(workflows); + + beforeAll(() => { + nock.disableNetConnect(); + }); + + afterAll(() => { + nock.restore(); + jest.unmock('../../../v2/transport'); + }); + + const nodeTypes = setup(tests); + + const testNode = async (testData: WorkflowTestData, types: INodeTypes) => { + const { result } = await executeWorkflow(testData, types); + + expect(transport.googleApiRequest).toHaveBeenCalledTimes(2); + expect(transport.googleApiRequest).toHaveBeenCalledWith( + 'GET', + '/v2/projects/test-project/datasets/bigquery_node_dev_test_dataset/tables/test_json', + {}, + ); + expect(transport.googleApiRequest).toHaveBeenCalledWith( + 'POST', + '/v2/projects/test-project/datasets/bigquery_node_dev_test_dataset/tables/test_json/insertAll', + { + rows: [{ json: { active: 'true', json: '{"test": 1}', 'name with space': 'some name' } }], + traceId: 'trace_id', + }, + ); + + expect(result.finished).toEqual(true); + }; + + for (const testData of tests) { + test(testData.description, async () => testNode(testData, nodeTypes)); + } +}); diff --git a/packages/nodes-base/nodes/Google/BigQuery/test/v2/node/insert.manualMode.workflow.json b/packages/nodes-base/nodes/Google/BigQuery/test/v2/node/insert.manualMode.workflow.json new file mode 100644 index 0000000000000..88b66e1e2a1cb --- /dev/null +++ b/packages/nodes-base/nodes/Google/BigQuery/test/v2/node/insert.manualMode.workflow.json @@ -0,0 +1,99 @@ +{ + "name": "My workflow 12", + "nodes": [ + { + "parameters": {}, + "id": "7db7d51a-83c2-4aa0-a736-9c3d1c031b60", + "name": "When clicking \"Execute Workflow\"", + "type": "n8n-nodes-base.manualTrigger", + "typeVersion": 1, + "position": [360, 340] + }, + { + "parameters": { + "authentication": "serviceAccount", + "operation": "insert", + "projectId": { + "__rl": true, + "value": "test-project", + "mode": "list", + "cachedResultName": "test-project", + "cachedResultUrl": "https://console.cloud.google.com/bigquery?project=test-project" + }, + "datasetId": { + "__rl": true, + "value": "bigquery_node_dev_test_dataset", + "mode": "list", + "cachedResultName": "bigquery_node_dev_test_dataset" + }, + "tableId": { + "__rl": true, + "value": "test_json", + "mode": "list", + "cachedResultName": "test_json" + }, + "dataMode": "define", + "fieldsUi": { + "values": [ + { + "fieldId": "active", + "fieldValue": "true" + }, + { + "fieldId": "name with space", + "fieldValue": "some name" + }, + { + "fieldId": "json", + "fieldValue": "{\"test\": 1}" + } + ] + }, + "options": { + "traceId": "trace_id" + } + }, + "id": "83d00275-0f98-4d5e-a3d6-bbca940ff8ac", + "name": "Google BigQuery", + "type": "n8n-nodes-base.googleBigQuery", + "typeVersion": 2, + "position": [620, 340], + "credentials": { + "googleApi": { + "id": "66", + "name": "Google account 5" + } + } + } + ], + "pinData": { + "Google BigQuery": [ + { + "json": { + "kind": "bigquery#tableDataInsertAllResponse" + } + } + ] + }, + "connections": { + "When clicking \"Execute Workflow\"": { + "main": [ + [ + { + "node": "Google BigQuery", + "type": "main", + "index": 0 + } + ] + ] + } + }, + "active": false, + "settings": {}, + "versionId": "abd49f26-184d-4f9b-95f0-389ea20df809", + "id": "156", + "meta": { + "instanceId": "36203ea1ce3cef713fa25999bd9874ae26b9e4c2c3a90a365f2882a154d031d0" + }, + "tags": [] +} diff --git a/packages/nodes-base/nodes/Google/BigQuery/test/v2/utils/utils.test.ts b/packages/nodes-base/nodes/Google/BigQuery/test/v2/utils/utils.test.ts new file mode 100644 index 0000000000000..0af16e8eb2dfb --- /dev/null +++ b/packages/nodes-base/nodes/Google/BigQuery/test/v2/utils/utils.test.ts @@ -0,0 +1,265 @@ +import type { IDataObject } from 'n8n-workflow'; +import { prepareOutput } from '../../../v2/helpers/utils'; + +describe('Google BigQuery v2 Utils', () => { + it('should prepareOutput', () => { + const response: IDataObject = { + kind: 'bigquery#getQueryResultsResponse', + etag: 'e_tag', + schema: { + fields: [ + { + name: 'nodes', + type: 'RECORD', + mode: 'REPEATED', + fields: [ + { + name: 'webhookId', + type: 'STRING', + mode: 'NULLABLE', + }, + { + name: 'position', + type: 'INTEGER', + mode: 'REPEATED', + }, + { + name: 'name', + type: 'STRING', + mode: 'NULLABLE', + }, + { + name: 'typeVersion', + type: 'INTEGER', + mode: 'NULLABLE', + }, + { + name: 'credentials', + type: 'RECORD', + mode: 'NULLABLE', + fields: [ + { + name: 'zendeskApi', + type: 'RECORD', + mode: 'NULLABLE', + fields: [ + { + name: 'name', + type: 'STRING', + mode: 'NULLABLE', + }, + { + name: 'id', + type: 'INTEGER', + mode: 'NULLABLE', + }, + ], + }, + ], + }, + { + name: 'type', + type: 'STRING', + mode: 'NULLABLE', + }, + { + name: 'parameters', + type: 'RECORD', + mode: 'NULLABLE', + fields: [ + { + name: 'conditions', + type: 'RECORD', + mode: 'NULLABLE', + fields: [ + { + name: 'all', + type: 'RECORD', + mode: 'REPEATED', + fields: [ + { + name: 'value', + type: 'STRING', + mode: 'NULLABLE', + }, + ], + }, + ], + }, + { + name: 'options', + type: 'RECORD', + mode: 'NULLABLE', + fields: [ + { + name: 'fields', + type: 'STRING', + mode: 'REPEATED', + }, + ], + }, + ], + }, + ], + }, + ], + }, + jobReference: { + projectId: 'project_id', + jobId: 'job_ref', + location: 'US', + }, + totalRows: '1', + rows: [ + { + f: [ + { + v: [ + { + v: { + f: [ + { + v: 'web_hook_id', + }, + { + v: [ + { + v: '100', + }, + { + v: '100', + }, + ], + }, + { + v: 'Zendesk Trigger', + }, + { + v: '1', + }, + { + v: { + f: [ + { + v: { + f: [ + { + v: 'Zendesk account', + }, + { + v: '8', + }, + ], + }, + }, + ], + }, + }, + { + v: 'n8n-nodes-base.zendeskTrigger', + }, + { + v: { + f: [ + { + v: { + f: [ + { + v: [ + { + v: { + f: [ + { + v: 'closed', + }, + ], + }, + }, + ], + }, + ], + }, + }, + { + v: { + f: [ + { + v: [ + { + v: 'ticket.title', + }, + { + v: 'ticket.description', + }, + ], + }, + ], + }, + }, + ], + }, + }, + ], + }, + }, + ], + }, + ], + }, + ], + totalBytesProcessed: '0', + jobComplete: true, + cacheHit: true, + }; + const returnData = prepareOutput(response, 0, false, false); + + expect(returnData).toBeDefined(); + // expect(returnData).toHaveProperty('nodes'); + expect(returnData).toEqual([ + { + json: { + nodes: [ + { + webhookId: 'web_hook_id', + position: ['100', '100'], + name: 'Zendesk Trigger', + typeVersion: '1', + credentials: [ + { + zendeskApi: [ + { + name: 'Zendesk account', + id: '8', + }, + ], + }, + ], + type: 'n8n-nodes-base.zendeskTrigger', + parameters: [ + { + conditions: [ + { + all: [ + { + value: 'closed', + }, + ], + }, + ], + options: [ + { + fields: ['ticket.title', 'ticket.description'], + }, + ], + }, + ], + }, + ], + }, + pairedItem: { + item: 0, + }, + }, + ]); + }); +}); diff --git a/packages/nodes-base/nodes/Google/BigQuery/GenericFunctions.ts b/packages/nodes-base/nodes/Google/BigQuery/v1/GenericFunctions.ts similarity index 100% rename from packages/nodes-base/nodes/Google/BigQuery/GenericFunctions.ts rename to packages/nodes-base/nodes/Google/BigQuery/v1/GenericFunctions.ts diff --git a/packages/nodes-base/nodes/Google/BigQuery/v1/GoogleBigQueryV1.node.ts b/packages/nodes-base/nodes/Google/BigQuery/v1/GoogleBigQueryV1.node.ts new file mode 100644 index 0000000000000..49ed5b9599028 --- /dev/null +++ b/packages/nodes-base/nodes/Google/BigQuery/v1/GoogleBigQueryV1.node.ts @@ -0,0 +1,310 @@ +/* eslint-disable n8n-nodes-base/node-filename-against-convention */ +import type { IExecuteFunctions } from 'n8n-core'; + +import type { + IDataObject, + ILoadOptionsFunctions, + INodeExecutionData, + INodePropertyOptions, + INodeType, + INodeTypeBaseDescription, + INodeTypeDescription, + JsonObject, +} from 'n8n-workflow'; + +import { NodeApiError } from 'n8n-workflow'; + +import { googleApiRequest, googleApiRequestAllItems, simplify } from './GenericFunctions'; + +import { recordFields, recordOperations } from './RecordDescription'; + +import { v4 as uuid } from 'uuid'; + +const versionDescription: INodeTypeDescription = { + displayName: 'Google BigQuery', + name: 'googleBigQuery', + icon: 'file:googleBigQuery.svg', + group: ['input'], + version: 1, + subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}', + description: 'Consume Google BigQuery API', + defaults: { + name: 'Google BigQuery', + }, + inputs: ['main'], + outputs: ['main'], + credentials: [ + { + name: 'googleApi', + required: true, + displayOptions: { + show: { + authentication: ['serviceAccount'], + }, + }, + }, + { + name: 'googleBigQueryOAuth2Api', + required: true, + displayOptions: { + show: { + authentication: ['oAuth2'], + }, + }, + }, + ], + properties: [ + { + displayName: 'Version 1', + name: 'version1', + type: 'notice', + default: '', + }, + { + displayName: 'Authentication', + name: 'authentication', + type: 'options', + noDataExpression: true, + options: [ + { + // eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased + name: 'OAuth2 (recommended)', + value: 'oAuth2', + }, + { + name: 'Service Account', + value: 'serviceAccount', + }, + ], + default: 'oAuth2', + }, + { + displayName: 'Resource', + name: 'resource', + type: 'options', + noDataExpression: true, + options: [ + { + name: 'Record', + value: 'record', + }, + ], + default: 'record', + }, + ...recordOperations, + ...recordFields, + ], +}; + +export class GoogleBigQueryV1 implements INodeType { + description: INodeTypeDescription; + + constructor(baseDescription: INodeTypeBaseDescription) { + this.description = { + ...baseDescription, + ...versionDescription, + }; + } + + methods = { + loadOptions: { + async getProjects(this: ILoadOptionsFunctions): Promise { + const returnData: INodePropertyOptions[] = []; + const { projects } = await googleApiRequest.call(this, 'GET', '/v2/projects'); + for (const project of projects) { + returnData.push({ + name: project.friendlyName as string, + value: project.id, + }); + } + return returnData; + }, + async getDatasets(this: ILoadOptionsFunctions): Promise { + const projectId = this.getCurrentNodeParameter('projectId'); + const returnData: INodePropertyOptions[] = []; + const { datasets } = await googleApiRequest.call( + this, + 'GET', + `/v2/projects/${projectId}/datasets`, + ); + for (const dataset of datasets) { + returnData.push({ + name: dataset.datasetReference.datasetId as string, + value: dataset.datasetReference.datasetId, + }); + } + return returnData; + }, + async getTables(this: ILoadOptionsFunctions): Promise { + const projectId = this.getCurrentNodeParameter('projectId'); + const datasetId = this.getCurrentNodeParameter('datasetId'); + const returnData: INodePropertyOptions[] = []; + const { tables } = await googleApiRequest.call( + this, + 'GET', + `/v2/projects/${projectId}/datasets/${datasetId}/tables`, + ); + for (const table of tables) { + returnData.push({ + name: table.tableReference.tableId as string, + value: table.tableReference.tableId, + }); + } + return returnData; + }, + }, + }; + + async execute(this: IExecuteFunctions): Promise { + const items = this.getInputData(); + const returnData: INodeExecutionData[] = []; + const length = items.length; + const qs: IDataObject = {}; + let responseData; + const resource = this.getNodeParameter('resource', 0); + const operation = this.getNodeParameter('operation', 0); + + if (resource === 'record') { + // ********************************************************************* + // record + // ********************************************************************* + + if (operation === 'create') { + // ---------------------------------- + // record: create + // ---------------------------------- + + // https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll + + const projectId = this.getNodeParameter('projectId', 0) as string; + const datasetId = this.getNodeParameter('datasetId', 0) as string; + const tableId = this.getNodeParameter('tableId', 0) as string; + const rows: IDataObject[] = []; + const body: IDataObject = {}; + + for (let i = 0; i < length; i++) { + const options = this.getNodeParameter('options', i); + Object.assign(body, options); + if (body.traceId === undefined) { + body.traceId = uuid(); + } + const columns = this.getNodeParameter('columns', i) as string; + const columnList = columns.split(',').map((column) => column.trim()); + const record: IDataObject = {}; + + for (const key of Object.keys(items[i].json)) { + if (columnList.includes(key)) { + record[`${key}`] = items[i].json[key]; + } + } + rows.push({ json: record }); + } + + body.rows = rows; + + try { + responseData = await googleApiRequest.call( + this, + 'POST', + `/v2/projects/${projectId}/datasets/${datasetId}/tables/${tableId}/insertAll`, + body, + ); + + const executionData = this.helpers.constructExecutionMetaData( + this.helpers.returnJsonArray(responseData as IDataObject[]), + { itemData: { item: 0 } }, + ); + returnData.push(...executionData); + } catch (error) { + if (this.continueOnFail()) { + const executionErrorData = this.helpers.constructExecutionMetaData( + this.helpers.returnJsonArray({ error: error.message }), + { itemData: { item: 0 } }, + ); + returnData.push(...executionErrorData); + } + throw new NodeApiError(this.getNode(), error as JsonObject, { itemIndex: 0 }); + } + } else if (operation === 'getAll') { + // ---------------------------------- + // record: getAll + // ---------------------------------- + + // https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/get + + const returnAll = this.getNodeParameter('returnAll', 0); + const projectId = this.getNodeParameter('projectId', 0) as string; + const datasetId = this.getNodeParameter('datasetId', 0) as string; + const tableId = this.getNodeParameter('tableId', 0) as string; + const simple = this.getNodeParameter('simple', 0) as boolean; + let fields; + + if (simple) { + const { schema } = await googleApiRequest.call( + this, + 'GET', + `/v2/projects/${projectId}/datasets/${datasetId}/tables/${tableId}`, + {}, + ); + fields = (schema.fields || []).map((field: IDataObject) => field.name); + } + + for (let i = 0; i < length; i++) { + try { + const options = this.getNodeParameter('options', i); + Object.assign(qs, options); + + if (qs.selectedFields) { + fields = (qs.selectedFields as string).split(','); + } + + if (returnAll) { + responseData = await googleApiRequestAllItems.call( + this, + 'rows', + 'GET', + `/v2/projects/${projectId}/datasets/${datasetId}/tables/${tableId}/data`, + {}, + qs, + ); + } else { + qs.maxResults = this.getNodeParameter('limit', i); + responseData = await googleApiRequest.call( + this, + 'GET', + `/v2/projects/${projectId}/datasets/${datasetId}/tables/${tableId}/data`, + {}, + qs, + ); + } + + if (!returnAll) { + responseData = responseData.rows; + } + responseData = simple + ? simplify(responseData as IDataObject[], fields as string[]) + : responseData; + + const executionData = this.helpers.constructExecutionMetaData( + this.helpers.returnJsonArray(responseData as IDataObject[]), + { itemData: { item: i } }, + ); + returnData.push(...executionData); + } catch (error) { + if (this.continueOnFail()) { + const executionErrorData = this.helpers.constructExecutionMetaData( + this.helpers.returnJsonArray({ error: error.message }), + { itemData: { item: i } }, + ); + returnData.push(...executionErrorData); + continue; + } + throw new NodeApiError(this.getNode(), error as JsonObject, { itemIndex: i }); + } + } + } + } + + return this.prepareOutputData(returnData); + } +} diff --git a/packages/nodes-base/nodes/Google/BigQuery/RecordDescription.ts b/packages/nodes-base/nodes/Google/BigQuery/v1/RecordDescription.ts similarity index 100% rename from packages/nodes-base/nodes/Google/BigQuery/RecordDescription.ts rename to packages/nodes-base/nodes/Google/BigQuery/v1/RecordDescription.ts diff --git a/packages/nodes-base/nodes/Google/BigQuery/v2/GoogleBigQueryV2.node.ts b/packages/nodes-base/nodes/Google/BigQuery/v2/GoogleBigQueryV2.node.ts new file mode 100644 index 0000000000000..771b93e95f97e --- /dev/null +++ b/packages/nodes-base/nodes/Google/BigQuery/v2/GoogleBigQueryV2.node.ts @@ -0,0 +1,30 @@ +/* eslint-disable n8n-nodes-base/node-filename-against-convention */ +import type { IExecuteFunctions } from 'n8n-core'; + +import type { + INodeExecutionData, + INodeType, + INodeTypeBaseDescription, + INodeTypeDescription, +} from 'n8n-workflow'; + +import { loadOptions, listSearch } from './methods'; +import { versionDescription } from './actions/versionDescription'; +import { router } from './actions/router'; + +export class GoogleBigQueryV2 implements INodeType { + description: INodeTypeDescription; + + constructor(baseDescription: INodeTypeBaseDescription) { + this.description = { + ...baseDescription, + ...versionDescription, + }; + } + + methods = { loadOptions, listSearch }; + + async execute(this: IExecuteFunctions): Promise { + return router.call(this); + } +} diff --git a/packages/nodes-base/nodes/Google/BigQuery/v2/actions/commonDescriptions/RLC.description.ts b/packages/nodes-base/nodes/Google/BigQuery/v2/actions/commonDescriptions/RLC.description.ts new file mode 100644 index 0000000000000..0c1f94700cb8b --- /dev/null +++ b/packages/nodes-base/nodes/Google/BigQuery/v2/actions/commonDescriptions/RLC.description.ts @@ -0,0 +1,121 @@ +import type { INodeProperties } from 'n8n-workflow'; + +export const projectRLC: INodeProperties = { + displayName: 'Project', + name: 'projectId', + type: 'resourceLocator', + default: { mode: 'list', value: '' }, + required: true, + modes: [ + { + displayName: 'From List', + name: 'list', + type: 'list', + typeOptions: { + searchListMethod: 'searchProjects', + searchable: true, + }, + }, + { + displayName: 'By URL', + name: 'url', + type: 'string', + extractValue: { + type: 'regex', + regex: 'https:\\/\\/console.cloud.google.com\\/bigquery\\?project=([0-9a-zA-Z\\-_]+).{0,}', + }, + validation: [ + { + type: 'regex', + properties: { + regex: + 'https:\\/\\/console.cloud.google.com\\/bigquery\\?project=([0-9a-zA-Z\\-_]+).{0,}', + errorMessage: 'Not a valid BigQuery Project URL', + }, + }, + ], + }, + { + displayName: 'By ID', + name: 'id', + type: 'string', + validation: [ + { + type: 'regex', + properties: { + regex: '[a-zA-Z0-9\\-_]{2,}', + errorMessage: 'Not a valid BigQuery Project ID', + }, + }, + ], + url: '=https://console.cloud.google.com/bigquery?project={{$value}}', + }, + ], + description: 'Projects to which you have been granted any project role', +}; + +export const datasetRLC: INodeProperties = { + displayName: 'Dataset', + name: 'datasetId', + type: 'resourceLocator', + default: { mode: 'list', value: '' }, + required: true, + modes: [ + { + displayName: 'From List', + name: 'list', + type: 'list', + typeOptions: { + searchListMethod: 'searchDatasets', + searchable: true, + }, + }, + { + displayName: 'By ID', + name: 'id', + type: 'string', + validation: [ + { + type: 'regex', + properties: { + regex: '[a-zA-Z0-9\\-_]{2,}', + errorMessage: 'Not a valid Dataset ID', + }, + }, + ], + }, + ], +}; + +export const tableRLC: INodeProperties = { + displayName: 'Table', + name: 'tableId', + type: 'resourceLocator', + default: { mode: 'list', value: '' }, + required: true, + modes: [ + { + displayName: 'From List', + name: 'list', + type: 'list', + typeOptions: { + searchListMethod: 'searchTables', + searchable: true, + }, + }, + { + displayName: 'By ID', + name: 'id', + type: 'string', + validation: [ + { + type: 'regex', + properties: { + regex: '[a-zA-Z0-9\\-_]{2,}', + errorMessage: 'Not a valid Table ID', + }, + }, + ], + }, + ], +}; diff --git a/packages/nodes-base/nodes/Google/BigQuery/v2/actions/database/Database.resource.ts b/packages/nodes-base/nodes/Google/BigQuery/v2/actions/database/Database.resource.ts new file mode 100644 index 0000000000000..76aa6d612f2db --- /dev/null +++ b/packages/nodes-base/nodes/Google/BigQuery/v2/actions/database/Database.resource.ts @@ -0,0 +1,64 @@ +import type { INodeProperties } from 'n8n-workflow'; +import { datasetRLC, projectRLC, tableRLC } from '../commonDescriptions/RLC.description'; +import * as insert from './insert.operation'; +import * as executeQuery from './executeQuery.operation'; + +export { executeQuery, insert }; + +export const description: INodeProperties[] = [ + { + displayName: 'Operation', + name: 'operation', + type: 'options', + noDataExpression: true, + displayOptions: { + show: { + resource: ['database'], + }, + }, + options: [ + { + name: 'Execute Query', + value: 'executeQuery', + description: 'Execute a SQL query', + action: 'Execute a SQL query', + }, + { + name: 'Insert', + value: 'insert', + description: 'Insert rows in a table', + action: 'Insert rows in a table', + }, + ], + default: 'executeQuery', + }, + { + ...projectRLC, + displayOptions: { + show: { + resource: ['database'], + operation: ['executeQuery', 'insert'], + }, + }, + }, + { + ...datasetRLC, + displayOptions: { + show: { + resource: ['database'], + operation: ['insert'], + }, + }, + }, + { + ...tableRLC, + displayOptions: { + show: { + resource: ['database'], + operation: ['insert'], + }, + }, + }, + ...executeQuery.description, + ...insert.description, +]; diff --git a/packages/nodes-base/nodes/Google/BigQuery/v2/actions/database/executeQuery.operation.ts b/packages/nodes-base/nodes/Google/BigQuery/v2/actions/database/executeQuery.operation.ts new file mode 100644 index 0000000000000..a806e5afacc33 --- /dev/null +++ b/packages/nodes-base/nodes/Google/BigQuery/v2/actions/database/executeQuery.operation.ts @@ -0,0 +1,299 @@ +import type { IExecuteFunctions } from 'n8n-core'; + +import type { IDataObject, INodeExecutionData, INodeProperties } from 'n8n-workflow'; + +import { NodeOperationError, sleep } from 'n8n-workflow'; +import { updateDisplayOptions } from '../../../../../../utils/utilities'; +import type { JobInsertResponse } from '../../helpers/interfaces'; + +import { prepareOutput } from '../../helpers/utils'; +import { googleApiRequest } from '../../transport'; + +const properties: INodeProperties[] = [ + { + displayName: 'SQL Query', + name: 'sqlQuery', + type: 'string', + displayOptions: { + hide: { + '/options.useLegacySql': [true], + }, + }, + default: '', + placeholder: 'SELECT * FROM dataset.table LIMIT 100', + description: + 'SQL query to execute, you can find more information here. Standard SQL syntax used by default, but you can also use Legacy SQL syntax by using optinon \'Use Legacy SQL\'.', + }, + { + displayName: 'SQL Query', + name: 'sqlQuery', + type: 'string', + displayOptions: { + show: { + '/options.useLegacySql': [true], + }, + }, + default: '', + placeholder: 'SELECT * FROM [project:dataset.table] LIMIT 100;', + hint: 'Legacy SQL syntax', + description: + 'SQL query to execute, you can find more information about Legacy SQL syntax here', + }, + { + displayName: 'Options', + name: 'options', + type: 'collection', + placeholder: 'Add Options', + default: {}, + options: [ + { + displayName: 'Default Dataset Name or ID', + name: 'defaultDataset', + type: 'options', + typeOptions: { + loadOptionsMethod: 'getDatasets', + loadOptionsDependsOn: ['projectId.value'], + }, + default: '', + description: + 'If not set, all table names in the query string must be qualified in the format \'datasetId.tableId\'. Choose from the list, or specify an ID using an expression.', + }, + { + displayName: 'Dry Run', + name: 'dryRun', + type: 'boolean', + default: false, + description: + "Whether set to true BigQuery doesn't run the job. Instead, if the query is valid, BigQuery returns statistics about the job such as how many bytes would be processed. If the query is invalid, an error returns.", + }, + { + displayName: 'Include Schema in Output', + name: 'includeSchema', + type: 'boolean', + default: false, + description: + "Whether to include the schema in the output. If set to true, the output will contain key '_schema' with the schema of the table.", + displayOptions: { + hide: { + rawOutput: [true], + }, + }, + }, + { + displayName: 'Location', + name: 'location', + type: 'string', + default: '', + placeholder: 'e.g. europe-west3', + description: + 'Location or the region where data would be stored and processed. Pricing for storage and analysis is also defined by location of data and reservations, more information here.', + }, + { + displayName: 'Maximum Bytes Billed', + name: 'maximumBytesBilled', + type: 'string', + default: '', + description: + 'Limits the bytes billed for this query. Queries with bytes billed above this limit will fail (without incurring a charge). String in Int64Value format', + }, + { + displayName: 'Max Results', + name: 'maxResults', + type: 'number', + default: 1000, + description: 'The maximum number of rows of data to return', + }, + { + displayName: 'Timeout', + name: 'timeoutMs', + type: 'number', + default: 10000, + description: 'How long to wait for the query to complete, in milliseconds', + }, + { + displayName: 'Raw Output', + name: 'rawOutput', + type: 'boolean', + default: false, + displayOptions: { + hide: { + dryRun: [true], + }, + }, + }, + { + displayName: 'Use Legacy SQL', + name: 'useLegacySql', + type: 'boolean', + default: false, + description: + "Whether to use BigQuery's legacy SQL dialect for this query. If set to false, the query will use BigQuery's standard SQL.", + }, + ], + }, +]; + +const displayOptions = { + show: { + resource: ['database'], + operation: ['executeQuery'], + }, +}; + +export const description = updateDisplayOptions(displayOptions, properties); + +export async function execute(this: IExecuteFunctions): Promise { + // https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query + + const items = this.getInputData(); + const length = items.length; + + const returnData: INodeExecutionData[] = []; + + let jobs = []; + + for (let i = 0; i < length; i++) { + try { + const sqlQuery = this.getNodeParameter('sqlQuery', i) as string; + const options = this.getNodeParameter('options', i); + const projectId = this.getNodeParameter('projectId', i, undefined, { + extractValue: true, + }); + + let rawOutput = false; + let includeSchema = false; + + if (options.rawOutput !== undefined) { + rawOutput = options.rawOutput as boolean; + delete options.rawOutput; + } + + if (options.includeSchema !== undefined) { + includeSchema = options.includeSchema as boolean; + delete options.includeSchema; + } + + const body: IDataObject = { ...options }; + + body.query = sqlQuery; + + if (body.defaultDataset) { + body.defaultDataset = { + datasetId: options.defaultDataset, + projectId, + }; + } + + if (body.useLegacySql === undefined) { + body.useLegacySql = false; + } + + const response: JobInsertResponse = await googleApiRequest.call( + this, + 'POST', + `/v2/projects/${projectId}/jobs`, + { + configuration: { + query: body, + }, + }, + ); + + if (!response?.jobReference?.jobId) { + throw new NodeOperationError(this.getNode(), `No job ID returned, item ${i}`, { + description: `sql: ${sqlQuery}`, + itemIndex: i, + }); + } + + const jobId = response?.jobReference?.jobId; + const raw = rawOutput || (options.dryRun as boolean) || false; + + if (response.status?.state === 'DONE') { + const qs = options.location ? { location: options.location } : {}; + + const queryResponse: IDataObject = await googleApiRequest.call( + this, + 'GET', + `/v2/projects/${projectId}/queries/${jobId}`, + undefined, + qs, + ); + + returnData.push(...prepareOutput(queryResponse, i, raw, includeSchema)); + } else { + jobs.push({ jobId, projectId, i, raw, includeSchema, location: options.location }); + } + } catch (error) { + if (this.continueOnFail()) { + const executionErrorData = this.helpers.constructExecutionMetaData( + this.helpers.returnJsonArray({ error: error.message }), + { itemData: { item: i } }, + ); + returnData.push(...executionErrorData); + continue; + } + throw new NodeOperationError(this.getNode(), error.message as string, { + itemIndex: i, + description: error?.description, + }); + } + } + + let waitTime = 1000; + while (jobs.length > 0) { + const completedJobs: string[] = []; + + for (const job of jobs) { + try { + const qs = job.location ? { location: job.location } : {}; + + const response: IDataObject = await googleApiRequest.call( + this, + 'GET', + `/v2/projects/${job.projectId}/queries/${job.jobId}`, + undefined, + qs, + ); + + if (response.jobComplete) { + completedJobs.push(job.jobId); + + returnData.push(...prepareOutput(response, job.i, job.raw, job.includeSchema)); + } + if ((response?.errors as IDataObject[])?.length) { + const errorMessages = (response.errors as IDataObject[]).map((error) => error.message); + throw new Error( + `Error(s) ocurring while executing query from item ${job.i.toString()}: ${errorMessages.join( + ', ', + )}`, + ); + } + } catch (error) { + if (this.continueOnFail()) { + const executionErrorData = this.helpers.constructExecutionMetaData( + this.helpers.returnJsonArray({ error: error.message }), + { itemData: { item: job.i } }, + ); + returnData.push(...executionErrorData); + continue; + } + throw new NodeOperationError(this.getNode(), error.message as string, { + itemIndex: job.i, + description: error?.description, + }); + } + } + + jobs = jobs.filter((job) => !completedJobs.includes(job.jobId)); + + if (jobs.length > 0) { + await sleep(waitTime); + if (waitTime < 30000) { + waitTime = waitTime * 2; + } + } + } + + return returnData; +} diff --git a/packages/nodes-base/nodes/Google/BigQuery/v2/actions/database/insert.operation.ts b/packages/nodes-base/nodes/Google/BigQuery/v2/actions/database/insert.operation.ts new file mode 100644 index 0000000000000..f85cd9d9886f2 --- /dev/null +++ b/packages/nodes-base/nodes/Google/BigQuery/v2/actions/database/insert.operation.ts @@ -0,0 +1,285 @@ +import type { IExecuteFunctions } from 'n8n-core'; +import type { IDataObject, INodeExecutionData, INodeProperties } from 'n8n-workflow'; +import { NodeOperationError } from 'n8n-workflow'; +import { v4 as uuid } from 'uuid'; +import { updateDisplayOptions } from '../../../../../../utils/utilities'; +import type { TableSchema } from '../../helpers/interfaces'; +import { checkSchema, wrapData } from '../../helpers/utils'; +import { googleApiRequest } from '../../transport'; + +const properties: INodeProperties[] = [ + { + displayName: 'Data Mode', + name: 'dataMode', + type: 'options', + options: [ + { + name: 'Auto-Map Input Data', + value: 'autoMap', + description: 'Use when node input properties match destination field names', + }, + { + name: 'Map Each Field Below', + value: 'define', + description: 'Set the value for each destination field', + }, + ], + default: 'autoMap', + description: 'Whether to insert the input data this node receives in the new row', + }, + { + displayName: + "In this mode, make sure the incoming data fields are named the same as the columns in BigQuery. (Use a 'set' node before this node to change them if required.)", + name: 'info', + type: 'notice', + default: '', + displayOptions: { + show: { + dataMode: ['autoMap'], + }, + }, + }, + { + displayName: 'Fields to Send', + name: 'fieldsUi', + placeholder: 'Add Field', + type: 'fixedCollection', + typeOptions: { + multipleValueButtonText: 'Add Field', + multipleValues: true, + }, + default: {}, + options: [ + { + displayName: 'Field', + name: 'values', + values: [ + { + displayName: 'Field Name or ID', + name: 'fieldId', + type: 'options', + description: + 'Choose from the list, or specify an ID using an expression', + typeOptions: { + loadOptionsDependsOn: ['projectId.value', 'datasetId.value', 'tableId.value'], + loadOptionsMethod: 'getSchema', + }, + default: '', + }, + { + displayName: 'Field Value', + name: 'fieldValue', + type: 'string', + default: '', + }, + ], + }, + ], + displayOptions: { + show: { + dataMode: ['define'], + }, + }, + }, + { + displayName: 'Options', + name: 'options', + type: 'collection', + placeholder: 'Add Options', + default: {}, + options: [ + { + displayName: 'Batch Size', + name: 'batchSize', + type: 'number', + default: 100, + typeOptions: { + minValue: 1, + }, + }, + { + displayName: 'Ignore Unknown Values', + name: 'ignoreUnknownValues', + type: 'boolean', + default: false, + description: 'Whether to gnore row values that do not match the schema', + }, + { + displayName: 'Skip Invalid Rows', + name: 'skipInvalidRows', + type: 'boolean', + default: false, + description: 'Whether to skip rows with values that do not match the schema', + }, + { + displayName: 'Template Suffix', + name: 'templateSuffix', + type: 'string', + default: '', + description: + 'Create a new table based on the destination table and insert rows into the new table. The new table will be named {destinationTable}{templateSuffix}', + }, + { + displayName: 'Trace ID', + name: 'traceId', + type: 'string', + default: '', + description: + 'Unique ID for the request, for debugging only. It is case-sensitive, limited to up to 36 ASCII characters. A UUID is recommended.', + }, + ], + }, +]; + +const displayOptions = { + show: { + resource: ['database'], + operation: ['insert'], + }, +}; + +export const description = updateDisplayOptions(displayOptions, properties); + +export async function execute(this: IExecuteFunctions): Promise { + // https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll + const projectId = this.getNodeParameter('projectId', 0, undefined, { + extractValue: true, + }); + const datasetId = this.getNodeParameter('datasetId', 0, undefined, { + extractValue: true, + }); + const tableId = this.getNodeParameter('tableId', 0, undefined, { + extractValue: true, + }); + + const options = this.getNodeParameter('options', 0); + const dataMode = this.getNodeParameter('dataMode', 0) as string; + + let batchSize = 100; + if (options.batchSize) { + batchSize = options.batchSize as number; + delete options.batchSize; + } + + const items = this.getInputData(); + const length = items.length; + + const returnData: INodeExecutionData[] = []; + const rows: IDataObject[] = []; + const body: IDataObject = {}; + + Object.assign(body, options); + if (body.traceId === undefined) { + body.traceId = uuid(); + } + + const schema = ( + await googleApiRequest.call( + this, + 'GET', + `/v2/projects/${projectId}/datasets/${datasetId}/tables/${tableId}`, + {}, + ) + ).schema as TableSchema; + + if (schema === undefined) { + throw new NodeOperationError(this.getNode(), 'The destination table has no defined schema'); + } + + for (let i = 0; i < length; i++) { + try { + const record: IDataObject = {}; + + if (dataMode === 'autoMap') { + schema.fields.forEach(({ name }) => { + record[name] = items[i].json[name]; + }); + } + + if (dataMode === 'define') { + const fields = this.getNodeParameter('fieldsUi.values', i, []) as IDataObject[]; + + fields.forEach(({ fieldId, fieldValue }) => { + record[`${fieldId}`] = fieldValue; + }); + } + + rows.push({ json: checkSchema.call(this, schema, record, i) }); + } catch (error) { + if (this.continueOnFail()) { + const executionErrorData = this.helpers.constructExecutionMetaData( + this.helpers.returnJsonArray({ error: error.message }), + { itemData: { item: i } }, + ); + returnData.push(...executionErrorData); + continue; + } + throw new NodeOperationError(this.getNode(), error.message as string, { + itemIndex: i, + description: error?.description, + }); + } + } + + for (let i = 0; i < rows.length; i += batchSize) { + const batch = rows.slice(i, i + batchSize); + body.rows = batch; + + const responseData = await googleApiRequest.call( + this, + 'POST', + `/v2/projects/${projectId}/datasets/${datasetId}/tables/${tableId}/insertAll`, + body, + ); + + if (responseData?.insertErrors && !options.skipInvalidRows) { + const errors: string[] = []; + const failedRows: number[] = []; + const stopedRows: number[] = []; + + (responseData.insertErrors as IDataObject[]).forEach((entry) => { + const invalidRows = (entry.errors as IDataObject[]).filter( + (error) => error.reason !== 'stopped', + ); + if (invalidRows.length) { + const entryIndex = (entry.index as number) + i; + errors.push( + `Row ${entryIndex} failed with error: ${invalidRows + .map((error) => error.message) + .join(', ')}`, + ); + failedRows.push(entryIndex); + } else { + const entryIndex = (entry.index as number) + i; + stopedRows.push(entryIndex); + } + }); + + if (this.continueOnFail()) { + const executionErrorData = this.helpers.constructExecutionMetaData( + this.helpers.returnJsonArray({ error: errors.join('\n, ') }), + { itemData: { item: i } }, + ); + returnData.push(...executionErrorData); + continue; + } + + const failedMessage = `Problem inserting item(s) [${failedRows.join(', ')}]`; + const stoppedMessage = stopedRows.length + ? `, nothing was inserted item(s) [${stopedRows.join(', ')}]` + : ''; + throw new NodeOperationError(this.getNode(), `${failedMessage}${stoppedMessage}`, { + description: errors.join('\n, '), + }); + } + + const executionData = this.helpers.constructExecutionMetaData( + wrapData(responseData as IDataObject[]), + { itemData: { item: 0 } }, + ); + + returnData.push(...executionData); + } + + return returnData; +} diff --git a/packages/nodes-base/nodes/Google/BigQuery/v2/actions/node.type.ts b/packages/nodes-base/nodes/Google/BigQuery/v2/actions/node.type.ts new file mode 100644 index 0000000000000..85a96a4f9e780 --- /dev/null +++ b/packages/nodes-base/nodes/Google/BigQuery/v2/actions/node.type.ts @@ -0,0 +1,9 @@ +import type { AllEntities, Entity } from 'n8n-workflow'; + +type GoogleBigQueryMap = { + database: 'executeQuery' | 'insert'; +}; + +export type GoogleBigQuery = AllEntities; + +export type GoogleBigQueryDatabase = Entity; diff --git a/packages/nodes-base/nodes/Google/BigQuery/v2/actions/router.ts b/packages/nodes-base/nodes/Google/BigQuery/v2/actions/router.ts new file mode 100644 index 0000000000000..e2888a85ee85e --- /dev/null +++ b/packages/nodes-base/nodes/Google/BigQuery/v2/actions/router.ts @@ -0,0 +1,28 @@ +import type { IExecuteFunctions } from 'n8n-core'; +import type { INodeExecutionData } from 'n8n-workflow'; +import { NodeOperationError } from 'n8n-workflow'; +import type { GoogleBigQuery } from './node.type'; + +import * as record from './database/Database.resource'; + +export async function router(this: IExecuteFunctions): Promise { + const resource = this.getNodeParameter('resource', 0); + const operation = this.getNodeParameter('operation', 0); + + let returnData: INodeExecutionData[] = []; + + const googleBigQuery = { + resource, + operation, + } as GoogleBigQuery; + + switch (googleBigQuery.resource) { + case 'database': + returnData = await record[googleBigQuery.operation].execute.call(this); + break; + default: + throw new NodeOperationError(this.getNode(), `The resource "${resource}" is not known`); + } + + return this.prepareOutputData(returnData); +} diff --git a/packages/nodes-base/nodes/Google/BigQuery/v2/actions/versionDescription.ts b/packages/nodes-base/nodes/Google/BigQuery/v2/actions/versionDescription.ts new file mode 100644 index 0000000000000..c1df1cf7a20be --- /dev/null +++ b/packages/nodes-base/nodes/Google/BigQuery/v2/actions/versionDescription.ts @@ -0,0 +1,72 @@ +/* eslint-disable n8n-nodes-base/node-filename-against-convention */ +import type { INodeTypeDescription } from 'n8n-workflow'; +import * as database from './database/Database.resource'; + +export const versionDescription: INodeTypeDescription = { + displayName: 'Google BigQuery', + name: 'googleBigQuery', + icon: 'file:googleBigQuery.svg', + group: ['input'], + version: 2, + subtitle: '={{$parameter["operation"]}}', + description: 'Consume Google BigQuery API', + defaults: { + name: 'Google BigQuery', + }, + inputs: ['main'], + outputs: ['main'], + credentials: [ + { + name: 'googleApi', + required: true, + displayOptions: { + show: { + authentication: ['serviceAccount'], + }, + }, + }, + { + name: 'googleBigQueryOAuth2Api', + required: true, + displayOptions: { + show: { + authentication: ['oAuth2'], + }, + }, + }, + ], + properties: [ + { + displayName: 'Authentication', + name: 'authentication', + type: 'options', + noDataExpression: true, + options: [ + { + // eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased + name: 'OAuth2 (recommended)', + value: 'oAuth2', + }, + { + name: 'Service Account', + value: 'serviceAccount', + }, + ], + default: 'oAuth2', + }, + { + displayName: 'Resource', + name: 'resource', + type: 'hidden', + noDataExpression: true, + options: [ + { + name: 'Database', + value: 'database', + }, + ], + default: 'database', + }, + ...database.description, + ], +}; diff --git a/packages/nodes-base/nodes/Google/BigQuery/v2/helpers/interfaces.ts b/packages/nodes-base/nodes/Google/BigQuery/v2/helpers/interfaces.ts new file mode 100644 index 0000000000000..ffd8e9d2275e2 --- /dev/null +++ b/packages/nodes-base/nodes/Google/BigQuery/v2/helpers/interfaces.ts @@ -0,0 +1,31 @@ +import type { IDataObject } from 'n8n-workflow'; + +export type SchemaField = { + name: string; + type: string; + mode: string; + fields?: SchemaField[]; +}; + +export type TableSchema = { + fields: SchemaField[]; +}; + +export type TableRawData = { + f: Array<{ v: IDataObject | TableRawData }>; +}; + +export type JobReference = { + projectId: string; + jobId: string; + location: string; +}; + +export type JobInsertResponse = { + kind: string; + id: string; + jobReference: JobReference; + status: { + state: 'PENDING' | 'RUNNING' | 'DONE'; + }; +}; diff --git a/packages/nodes-base/nodes/Google/BigQuery/v2/helpers/utils.ts b/packages/nodes-base/nodes/Google/BigQuery/v2/helpers/utils.ts new file mode 100644 index 0000000000000..7325446592a82 --- /dev/null +++ b/packages/nodes-base/nodes/Google/BigQuery/v2/helpers/utils.ts @@ -0,0 +1,135 @@ +import type { IExecuteFunctions } from 'n8n-core'; +import { constructExecutionMetaData } from 'n8n-core'; +import type { IDataObject, INodeExecutionData } from 'n8n-workflow'; +import { jsonParse, NodeOperationError } from 'n8n-workflow'; +import type { SchemaField, TableRawData, TableSchema } from './interfaces'; + +function getFieldValue(schemaField: SchemaField, field: IDataObject) { + if (schemaField.type === 'RECORD') { + // eslint-disable-next-line @typescript-eslint/no-use-before-define + return simplify([field.v as TableRawData], schemaField.fields as unknown as SchemaField[]); + } else { + let value = field.v; + if (schemaField.type === 'JSON') { + try { + value = jsonParse(value as string); + } catch (error) {} + } + return value; + } +} + +export function wrapData(data: IDataObject | IDataObject[]): INodeExecutionData[] { + if (!Array.isArray(data)) { + return [{ json: data }]; + } + return data.map((item) => ({ + json: item, + })); +} + +export function simplify(data: TableRawData[], schema: SchemaField[], includeSchema = false) { + const returnData: IDataObject[] = []; + for (const entry of data) { + const record: IDataObject = {}; + + for (const [index, field] of entry.f.entries()) { + if (schema[index].mode !== 'REPEATED') { + record[schema[index].name] = getFieldValue(schema[index], field); + } else { + record[schema[index].name] = (field.v as unknown as IDataObject[]).flatMap( + (repeatedField) => { + return getFieldValue(schema[index], repeatedField as unknown as IDataObject); + }, + ); + } + } + + if (includeSchema) { + record._schema = schema; + } + + returnData.push(record); + } + + return returnData; +} + +export function prepareOutput( + response: IDataObject, + itemIndex: number, + rawOutput: boolean, + includeSchema = false, +) { + let responseData; + + if (response === undefined) return []; + + if (rawOutput) { + responseData = response; + } else { + const { rows, schema } = response; + + if (rows !== undefined && schema !== undefined) { + const fields = (schema as TableSchema).fields; + responseData = rows; + + responseData = simplify(responseData as TableRawData[], fields, includeSchema); + } else if (schema && includeSchema) { + responseData = { success: true, _schema: schema }; + } else { + responseData = { success: true }; + } + } + + const executionData = constructExecutionMetaData(wrapData(responseData as IDataObject[]), { + itemData: { item: itemIndex }, + }); + + return executionData; +} + +export function checkSchema( + this: IExecuteFunctions, + schema: TableSchema, + record: IDataObject, + i: number, +) { + const returnData = { ...record }; + + schema.fields.forEach(({ name, mode, type, fields }) => { + if (mode === 'REQUIRED' && returnData[name] === undefined) { + throw new NodeOperationError( + this.getNode(), + `The property '${name}' is required, please define it in the 'Fields to Send'`, + { itemIndex: i }, + ); + } + if (type !== 'STRING' && returnData[name] === '') { + returnData[name] = null; + } + if (type === 'JSON') { + let value = returnData[name]; + if (typeof value === 'object') { + value = JSON.stringify(value); + } + returnData[name] = value; + } + if (type === 'RECORD' && typeof returnData[name] !== 'object') { + let parsedField; + try { + parsedField = jsonParse(returnData[name] as string); + } catch (error) { + const recordField = fields ? `Field Schema:\n ${JSON.stringify(fields)}` : ''; + throw new NodeOperationError( + this.getNode(), + `The property '${name}' is a RECORD type, but the value is nor an object nor a valid JSON string`, + { itemIndex: i, description: recordField }, + ); + } + returnData[name] = parsedField as IDataObject; + } + }); + + return returnData; +} diff --git a/packages/nodes-base/nodes/Google/BigQuery/v2/methods/index.ts b/packages/nodes-base/nodes/Google/BigQuery/v2/methods/index.ts new file mode 100644 index 0000000000000..a5508a3e0fa86 --- /dev/null +++ b/packages/nodes-base/nodes/Google/BigQuery/v2/methods/index.ts @@ -0,0 +1,2 @@ +export * as loadOptions from './loadOptions'; +export * as listSearch from './listSearch'; diff --git a/packages/nodes-base/nodes/Google/BigQuery/v2/methods/listSearch.ts b/packages/nodes-base/nodes/Google/BigQuery/v2/methods/listSearch.ts new file mode 100644 index 0000000000000..40f44b6151ef7 --- /dev/null +++ b/packages/nodes-base/nodes/Google/BigQuery/v2/methods/listSearch.ts @@ -0,0 +1,115 @@ +import type { IDataObject, ILoadOptionsFunctions, INodeListSearchResult } from 'n8n-workflow'; +import { googleApiRequest } from '../transport'; + +export async function searchProjects( + this: ILoadOptionsFunctions, + filter?: string, + paginationToken?: string, +): Promise { + const qs = { + pageToken: (paginationToken as string) || undefined, + }; + + const response = await googleApiRequest.call(this, 'GET', '/v2/projects', undefined, qs); + + let { projects } = response; + + if (filter) { + projects = projects.filter( + (project: IDataObject) => + (project.friendlyName as string).includes(filter) || + (project.id as string).includes(filter), + ); + } + + return { + results: projects.map((project: IDataObject) => ({ + name: project.friendlyName as string, + value: project.id, + url: `https://console.cloud.google.com/bigquery?project=${project.id as string}`, + })), + paginationToken: response.nextPageToken, + }; +} + +export async function searchDatasets( + this: ILoadOptionsFunctions, + filter?: string, + paginationToken?: string, +): Promise { + const projectId = this.getNodeParameter('projectId', undefined, { + extractValue: true, + }); + + const qs = { + pageToken: (paginationToken as string) || undefined, + }; + + const response = await googleApiRequest.call( + this, + 'GET', + `/v2/projects/${projectId}/datasets`, + undefined, + qs, + ); + + let { datasets } = response; + + if (filter) { + datasets = datasets.filter((dataset: { datasetReference: IDataObject }) => + (dataset.datasetReference.datasetId as string).includes(filter), + ); + } + + return { + results: datasets.map((dataset: { datasetReference: IDataObject }) => ({ + name: dataset.datasetReference.datasetId as string, + value: dataset.datasetReference.datasetId, + })), + paginationToken: response.nextPageToken, + }; +} + +export async function searchTables( + this: ILoadOptionsFunctions, + filter?: string, + paginationToken?: string, +): Promise { + const projectId = this.getNodeParameter('projectId', undefined, { + extractValue: true, + }); + + const datasetId = this.getNodeParameter('datasetId', undefined, { + extractValue: true, + }); + + const qs = { + pageToken: (paginationToken as string) || undefined, + }; + + const response = await googleApiRequest.call( + this, + 'GET', + `/v2/projects/${projectId}/datasets/${datasetId}/tables`, + undefined, + qs, + ); + + let { tables } = response; + + if (filter) { + tables = tables.filter((table: { tableReference: IDataObject }) => + (table.tableReference.tableId as string).includes(filter), + ); + } + + const returnData = { + results: tables.map((table: { tableReference: IDataObject }) => ({ + name: table.tableReference.tableId as string, + value: table.tableReference.tableId, + })), + paginationToken: response.nextPageToken, + }; + + return returnData; +} diff --git a/packages/nodes-base/nodes/Google/BigQuery/v2/methods/loadOptions.ts b/packages/nodes-base/nodes/Google/BigQuery/v2/methods/loadOptions.ts new file mode 100644 index 0000000000000..41966678c68a9 --- /dev/null +++ b/packages/nodes-base/nodes/Google/BigQuery/v2/methods/loadOptions.ts @@ -0,0 +1,53 @@ +import type { IDataObject, ILoadOptionsFunctions, INodePropertyOptions } from 'n8n-workflow'; +import { googleApiRequest } from '../transport'; + +export async function getDatasets(this: ILoadOptionsFunctions): Promise { + const projectId = this.getNodeParameter('projectId', undefined, { + extractValue: true, + }); + const returnData: INodePropertyOptions[] = []; + const { datasets } = await googleApiRequest.call( + this, + 'GET', + `/v2/projects/${projectId}/datasets`, + ); + for (const dataset of datasets) { + returnData.push({ + name: dataset.datasetReference.datasetId as string, + value: dataset.datasetReference.datasetId, + }); + } + return returnData; +} + +export async function getSchema(this: ILoadOptionsFunctions): Promise { + const projectId = this.getNodeParameter('projectId', undefined, { + extractValue: true, + }); + const datasetId = this.getNodeParameter('datasetId', undefined, { + extractValue: true, + }); + const tableId = this.getNodeParameter('tableId', undefined, { + extractValue: true, + }); + + const returnData: INodePropertyOptions[] = []; + + const { schema } = await googleApiRequest.call( + this, + 'GET', + `/v2/projects/${projectId}/datasets/${datasetId}/tables/${tableId}`, + {}, + ); + + for (const field of schema.fields as IDataObject[]) { + returnData.push({ + name: field.name as string, + value: field.name as string, + // eslint-disable-next-line n8n-nodes-base/node-param-description-lowercase-first-char + description: + `type: ${field.type as string}` + (field.mode ? ` mode: ${field.mode as string}` : ''), + }); + } + return returnData; +} diff --git a/packages/nodes-base/nodes/Google/BigQuery/v2/transport/index.ts b/packages/nodes-base/nodes/Google/BigQuery/v2/transport/index.ts new file mode 100644 index 0000000000000..8b61e324fdae4 --- /dev/null +++ b/packages/nodes-base/nodes/Google/BigQuery/v2/transport/index.ts @@ -0,0 +1,136 @@ +import type { OptionsWithUri } from 'request'; +import moment from 'moment-timezone'; +import * as jwt from 'jsonwebtoken'; + +import type { IExecuteFunctions, IExecuteSingleFunctions, ILoadOptionsFunctions } from 'n8n-core'; +import type { IDataObject, JsonObject } from 'n8n-workflow'; +import { NodeApiError, NodeOperationError } from 'n8n-workflow'; + +async function getAccessToken( + this: IExecuteFunctions | IExecuteSingleFunctions | ILoadOptionsFunctions, + credentials: IDataObject, +): Promise { + //https://developers.google.com/identity/protocols/oauth2/service-account#httprest + + const privateKey = (credentials.privateKey as string).replace(/\\n/g, '\n').trim(); + + const scopes = ['https://www.googleapis.com/auth/bigquery']; + + const now = moment().unix(); + + const signature = jwt.sign( + { + iss: credentials.email as string, + sub: credentials.delegatedEmail || (credentials.email as string), + scope: scopes.join(' '), + aud: 'https://oauth2.googleapis.com/token', + iat: now, + exp: now + 3600, + }, + privateKey, + { + algorithm: 'RS256', + header: { + kid: privateKey, + typ: 'JWT', + alg: 'RS256', + }, + }, + ); + + const options: OptionsWithUri = { + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + }, + method: 'POST', + form: { + grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', + assertion: signature, + }, + uri: 'https://oauth2.googleapis.com/token', + json: true, + }; + + return this.helpers.request(options); +} + +export async function googleApiRequest( + this: IExecuteFunctions | IExecuteSingleFunctions | ILoadOptionsFunctions, + method: string, + resource: string, + body: IDataObject = {}, + qs: IDataObject = {}, + uri?: string, + headers: IDataObject = {}, +) { + const authenticationMethod = this.getNodeParameter( + 'authentication', + 0, + 'serviceAccount', + ) as string; + + const options: OptionsWithUri = { + headers: { + 'Content-Type': 'application/json', + }, + method, + body, + qs, + uri: uri || `https://bigquery.googleapis.com/bigquery${resource}`, + json: true, + }; + try { + if (Object.keys(headers).length !== 0) { + options.headers = Object.assign({}, options.headers, headers); + } + if (Object.keys(body).length === 0) { + delete options.body; + } + + if (authenticationMethod === 'serviceAccount') { + const credentials = await this.getCredentials('googleApi'); + + if (credentials === undefined) { + throw new NodeOperationError(this.getNode(), 'No credentials got returned!'); + } + + const { access_token } = await getAccessToken.call(this, credentials as IDataObject); + + options.headers!.Authorization = `Bearer ${access_token}`; + return await this.helpers.request(options); + } else { + return await this.helpers.requestOAuth2.call(this, 'googleBigQueryOAuth2Api', options); + } + } catch (error) { + if (error.code === 'ERR_OSSL_PEM_NO_START_LINE') { + error.statusCode = '401'; + } + + throw new NodeApiError(this.getNode(), error as JsonObject, { + message: error?.error?.error?.message || error.message, + }); + } +} + +export async function googleApiRequestAllItems( + this: IExecuteFunctions | ILoadOptionsFunctions, + propertyName: string, + method: string, + endpoint: string, + body: IDataObject = {}, + query: IDataObject = {}, +) { + const returnData: IDataObject[] = []; + + let responseData; + query.maxResults = 10000; + + do { + responseData = await googleApiRequest.call(this, method, endpoint, body, query); + + query.pageToken = responseData.pageToken; + returnData.push.apply(returnData, responseData[propertyName] as IDataObject[]); + } while (responseData.pageToken !== undefined && responseData.pageToken !== ''); + + return returnData; +} diff --git a/packages/nodes-base/package.json b/packages/nodes-base/package.json index c3386b986dfd2..2693c7bb41d7a 100644 --- a/packages/nodes-base/package.json +++ b/packages/nodes-base/package.json @@ -21,7 +21,7 @@ "build:translations": "gulp build:translations", "build:metadata": "pnpm n8n-generate-known && pnpm n8n-generate-ui-types", "format": "prettier --write . --ignore-path ../../.prettierignore", - "lint": "eslint --quiet nodes credentials; node ./scripts/validate-load-options-methods.js", + "lint": "eslint --quiet nodes credentials", "lintfix": "eslint nodes credentials --fix", "watch": "tsc-watch -p tsconfig.build.json --onSuccess \"pnpm n8n-generate-ui-types\"", "test": "jest"