diff --git a/packages/@n8n/nodes-langchain/jest.config.js b/packages/@n8n/nodes-langchain/jest.config.js
new file mode 100644
index 0000000000000..d6c48554a79a4
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/jest.config.js
@@ -0,0 +1,2 @@
+/** @type {import('jest').Config} */
+module.exports = require('../../../jest.config');
diff --git a/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.ts b/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.ts
index 67ea020d14ae2..4a74d062ba0d2 100644
--- a/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.ts
+++ b/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.ts
@@ -16,6 +16,7 @@ export class OpenAiAssistant implements INodeType {
description: INodeTypeDescription = {
displayName: 'OpenAI Assistant',
name: 'openAiAssistant',
+ hidden: true,
icon: 'fa:robot',
group: ['transform'],
version: 1,
diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts
index 12b57b52cf71d..cd983171d7ba5 100644
--- a/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts
+++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts
@@ -46,6 +46,13 @@ export class ToolCode implements INodeType {
outputNames: ['Tool'],
properties: [
getConnectionHintNoticeField([NodeConnectionType.AiAgent]),
+ {
+ displayName:
+ 'See an example of a conversational agent with custom tool written in JavaScript here.',
+ name: 'noticeTemplateExample',
+ type: 'notice',
+ default: '',
+ },
{
displayName: 'Name',
name: 'name',
@@ -95,11 +102,12 @@ export class ToolCode implements INodeType {
editor: 'codeNodeEditor',
editorLanguage: 'javaScript',
},
- default: '',
+ default:
+ '// Example: convert the incoming query to uppercase and return it\nreturn query.toUpperCase()',
// TODO: Add proper text here later
hint: 'You can access the input the tool receives via the input property "query". The returned value should be a single string.',
- description:
- 'JavaScript code to execute.
Tip: You can use luxon vars like $today for dates and $jmespath for querying JSON structures. Learn more.',
+ // eslint-disable-next-line n8n-nodes-base/node-param-description-missing-final-period
+ description: 'E.g. Converts any text to uppercase',
noDataExpression: true,
},
{
@@ -115,11 +123,12 @@ export class ToolCode implements INodeType {
editor: 'codeNodeEditor',
editorLanguage: 'python',
},
- default: '',
+ default:
+ '# Example: convert the incoming query to uppercase and return it\nreturn query.upper()',
// TODO: Add proper text here later
hint: 'You can access the input the tool receives via the input property "query". The returned value should be a single string.',
- description:
- 'Python code to execute.
Tip: You can use built-in methods and variables like _today for dates and _jmespath for querying JSON structures. Learn more.',
+ // eslint-disable-next-line n8n-nodes-base/node-param-description-missing-final-period
+ description: 'E.g. Converts any text to uppercase',
noDataExpression: true,
},
],
diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts
index 88c7578e16008..de6d9350ed2f4 100644
--- a/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts
+++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts
@@ -49,6 +49,13 @@ export class ToolWorkflow implements INodeType {
outputNames: ['Tool'],
properties: [
getConnectionHintNoticeField([NodeConnectionType.AiAgent]),
+ {
+ displayName:
+ 'See an example of a workflow to suggest meeting slots using AI here.',
+ name: 'noticeTemplateExample',
+ type: 'notice',
+ default: '',
+ },
{
displayName: 'Name',
name: 'name',
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/OpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/OpenAi.node.ts
new file mode 100644
index 0000000000000..1743c6961859f
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/OpenAi.node.ts
@@ -0,0 +1,17 @@
+import type { IExecuteFunctions, INodeType } from 'n8n-workflow';
+import { router } from './actions/router';
+import { versionDescription } from './actions/versionDescription';
+import { listSearch, loadOptions } from './methods';
+
+export class OpenAi implements INodeType {
+ description = versionDescription;
+
+ methods = {
+ listSearch,
+ loadOptions,
+ };
+
+ async execute(this: IExecuteFunctions) {
+ return await router.call(this);
+ }
+}
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/create.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/create.operation.ts
new file mode 100644
index 0000000000000..3e34aed9e26f7
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/create.operation.ts
@@ -0,0 +1,252 @@
+import type {
+ INodeProperties,
+ IExecuteFunctions,
+ INodeExecutionData,
+ IDataObject,
+} from 'n8n-workflow';
+import { NodeOperationError, updateDisplayOptions } from 'n8n-workflow';
+import { apiRequest } from '../../transport';
+import { modelRLC } from '../descriptions';
+
+const properties: INodeProperties[] = [
+ modelRLC,
+ {
+ displayName: 'Name',
+ name: 'name',
+ type: 'string',
+ default: '',
+ description: 'The name of the assistant. The maximum length is 256 characters.',
+ placeholder: 'e.g. My Assistant',
+ required: true,
+ },
+ {
+ displayName: 'Description',
+ name: 'description',
+ type: 'string',
+ default: '',
+ description: 'The description of the assistant. The maximum length is 512 characters.',
+ placeholder: 'e.g. My personal assistant',
+ },
+ {
+ displayName: 'Instructions',
+ name: 'instructions',
+ type: 'string',
+ description:
+ 'The system instructions that the assistant uses. The maximum length is 32768 characters.',
+ default: '',
+ typeOptions: {
+ rows: 2,
+ },
+ },
+ {
+ displayName: 'Code Interpreter',
+ name: 'codeInterpreter',
+ type: 'boolean',
+ default: false,
+ description:
+ 'Whether to enable the code interpreter that allows the assistants to write and run Python code in a sandboxed execution environment, find more here',
+ },
+ {
+ displayName: 'Knowledge Retrieval',
+ name: 'knowledgeRetrieval',
+ type: 'boolean',
+ default: false,
+ description:
+ 'Whether to augments the assistant with knowledge from outside its model, such as proprietary product information or documents, find more here',
+ },
+ //we want to display Files selector only when codeInterpreter true or knowledgeRetrieval true or both
+ {
+ // eslint-disable-next-line n8n-nodes-base/node-param-display-name-wrong-for-dynamic-multi-options
+ displayName: 'Files',
+ name: 'file_ids',
+ type: 'multiOptions',
+ // eslint-disable-next-line n8n-nodes-base/node-param-description-wrong-for-dynamic-multi-options
+ description:
+ 'The files to be used by the assistant, there can be a maximum of 20 files attached to the assistant',
+ typeOptions: {
+ loadOptionsMethod: 'getFiles',
+ },
+ default: [],
+ hint: "Add more files by using the 'Upload a File' operation",
+ displayOptions: {
+ show: {
+ codeInterpreter: [true],
+ },
+ hide: {
+ knowledgeRetrieval: [true],
+ },
+ },
+ },
+ {
+ // eslint-disable-next-line n8n-nodes-base/node-param-display-name-wrong-for-dynamic-multi-options
+ displayName: 'Files',
+ name: 'file_ids',
+ type: 'multiOptions',
+ // eslint-disable-next-line n8n-nodes-base/node-param-description-wrong-for-dynamic-multi-options
+ description:
+ 'The files to be used by the assistant, there can be a maximum of 20 files attached to the assistant',
+ typeOptions: {
+ loadOptionsMethod: 'getFiles',
+ },
+ default: [],
+ hint: "Add more files by using the 'Upload a File' operation",
+ displayOptions: {
+ show: {
+ knowledgeRetrieval: [true],
+ },
+ hide: {
+ codeInterpreter: [true],
+ },
+ },
+ },
+ {
+ // eslint-disable-next-line n8n-nodes-base/node-param-display-name-wrong-for-dynamic-multi-options
+ displayName: 'Files',
+ name: 'file_ids',
+ type: 'multiOptions',
+ // eslint-disable-next-line n8n-nodes-base/node-param-description-wrong-for-dynamic-multi-options
+ description:
+ 'The files to be used by the assistant, there can be a maximum of 20 files attached to the assistant',
+ typeOptions: {
+ loadOptionsMethod: 'getFiles',
+ },
+ default: [],
+ hint: "Add more files by using the 'Upload a File' operation",
+ displayOptions: {
+ show: {
+ knowledgeRetrieval: [true],
+ codeInterpreter: [true],
+ },
+ },
+ },
+ {
+ displayName: "Add custom n8n tools when using the 'Message Assistant' operation",
+ name: 'noticeTools',
+ type: 'notice',
+ default: '',
+ },
+ {
+ displayName: 'Options',
+ name: 'options',
+ placeholder: 'Add Option',
+ type: 'collection',
+ default: {},
+ options: [
+ {
+ displayName: 'Fail if Assistant Already Exists',
+ name: 'failIfExists',
+ type: 'boolean',
+ default: false,
+ description:
+ 'Whether to fail an operation if the assistant with the same name already exists',
+ },
+ ],
+ },
+];
+
+const displayOptions = {
+ show: {
+ operation: ['create'],
+ resource: ['assistant'],
+ },
+};
+
+export const description = updateDisplayOptions(displayOptions, properties);
+
+export async function execute(this: IExecuteFunctions, i: number): Promise {
+ const model = this.getNodeParameter('modelId', i, '', { extractValue: true }) as string;
+ const name = this.getNodeParameter('name', i) as string;
+ const assistantDescription = this.getNodeParameter('description', i) as string;
+ const instructions = this.getNodeParameter('instructions', i) as string;
+ const codeInterpreter = this.getNodeParameter('codeInterpreter', i) as boolean;
+ const knowledgeRetrieval = this.getNodeParameter('knowledgeRetrieval', i) as boolean;
+ const file_ids = this.getNodeParameter('file_ids', i, []) as string[];
+ const options = this.getNodeParameter('options', i, {});
+
+ if (options.failIfExists) {
+ const assistants: string[] = [];
+
+ let has_more = true;
+ let after: string | undefined;
+
+ do {
+ const response = await apiRequest.call(this, 'GET', '/assistants', {
+ headers: {
+ 'OpenAI-Beta': 'assistants=v1',
+ },
+ qs: {
+ limit: 100,
+ after,
+ },
+ });
+
+ for (const assistant of response.data || []) {
+ assistants.push(assistant.name);
+ }
+
+ has_more = response.has_more;
+
+ if (has_more) {
+ after = response.last_id as string;
+ } else {
+ break;
+ }
+ } while (has_more);
+
+ if (assistants.includes(name)) {
+ throw new NodeOperationError(
+ this.getNode(),
+ `An assistant with the same name '${name}' already exists`,
+ { itemIndex: i },
+ );
+ }
+ }
+
+ if (file_ids.length > 20) {
+ throw new NodeOperationError(
+ this.getNode(),
+ 'The maximum number of files that can be attached to the assistant is 20',
+ { itemIndex: i },
+ );
+ }
+
+ const body: IDataObject = {
+ model,
+ name,
+ description: assistantDescription,
+ instructions,
+ file_ids,
+ };
+
+ const tools = [];
+
+ if (codeInterpreter) {
+ tools.push({
+ type: 'code_interpreter',
+ });
+ }
+
+ if (knowledgeRetrieval) {
+ tools.push({
+ type: 'retrieval',
+ });
+ }
+
+ if (tools.length) {
+ body.tools = tools;
+ }
+
+ const response = await apiRequest.call(this, 'POST', '/assistants', {
+ body,
+ headers: {
+ 'OpenAI-Beta': 'assistants=v1',
+ },
+ });
+
+ return [
+ {
+ json: response,
+ pairedItem: { item: i },
+ },
+ ];
+}
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/deleteAssistant.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/deleteAssistant.operation.ts
new file mode 100644
index 0000000000000..83f958e866228
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/deleteAssistant.operation.ts
@@ -0,0 +1,32 @@
+import type { INodeProperties, IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
+import { updateDisplayOptions } from 'n8n-workflow';
+import { apiRequest } from '../../transport';
+import { assistantRLC } from '../descriptions';
+
+const properties: INodeProperties[] = [assistantRLC];
+
+const displayOptions = {
+ show: {
+ operation: ['deleteAssistant'],
+ resource: ['assistant'],
+ },
+};
+
+export const description = updateDisplayOptions(displayOptions, properties);
+
+export async function execute(this: IExecuteFunctions, i: number): Promise {
+ const assistantId = this.getNodeParameter('assistantId', i, '', { extractValue: true }) as string;
+
+ const response = await apiRequest.call(this, 'DELETE', `/assistants/${assistantId}`, {
+ headers: {
+ 'OpenAI-Beta': 'assistants=v1',
+ },
+ });
+
+ return [
+ {
+ json: response,
+ pairedItem: { item: i },
+ },
+ ];
+}
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/index.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/index.ts
new file mode 100644
index 0000000000000..3f869ffcc877c
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/index.ts
@@ -0,0 +1,62 @@
+import type { INodeProperties } from 'n8n-workflow';
+
+import * as create from './create.operation';
+import * as deleteAssistant from './deleteAssistant.operation';
+import * as message from './message.operation';
+import * as list from './list.operation';
+import * as update from './update.operation';
+
+export { create, deleteAssistant, message, list, update };
+
+export const description: INodeProperties[] = [
+ {
+ displayName: 'Operation',
+ name: 'operation',
+ type: 'options',
+ noDataExpression: true,
+ options: [
+ {
+ name: 'Create an Assistant',
+ value: 'create',
+ action: 'Create an assistant',
+ description: 'Create a new assistant',
+ },
+ {
+ name: 'Delete an Assistant',
+ value: 'deleteAssistant',
+ action: 'Delete an assistant',
+ description: 'Delete an assistant from the account',
+ },
+ {
+ name: 'List Assistants',
+ value: 'list',
+ action: 'List assistants',
+ description: 'List assistants in the organization',
+ },
+ {
+ name: 'Message an Assistant',
+ value: 'message',
+ action: 'Message an assistant',
+ description: 'Send messages to an assistant',
+ },
+ {
+ name: 'Update an Assistant',
+ value: 'update',
+ action: 'Update an assistant',
+ description: 'Update an existing assistant',
+ },
+ ],
+ default: 'message',
+ displayOptions: {
+ show: {
+ resource: ['assistant'],
+ },
+ },
+ },
+
+ ...create.description,
+ ...deleteAssistant.description,
+ ...message.description,
+ ...list.description,
+ ...update.description,
+];
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/list.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/list.operation.ts
new file mode 100644
index 0000000000000..ac9adfa169bc2
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/list.operation.ts
@@ -0,0 +1,75 @@
+import type { INodeProperties, IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
+import { updateDisplayOptions } from 'n8n-workflow';
+import { apiRequest } from '../../transport';
+
+const properties: INodeProperties[] = [
+ {
+ displayName: 'Simplify Output',
+ name: 'simplify',
+ type: 'boolean',
+ default: true,
+ description: 'Whether to return a simplified version of the response instead of the raw data',
+ },
+];
+
+const displayOptions = {
+ show: {
+ operation: ['list'],
+ resource: ['assistant'],
+ },
+};
+
+export const description = updateDisplayOptions(displayOptions, properties);
+
+export async function execute(this: IExecuteFunctions, i: number): Promise {
+ const returnData: INodeExecutionData[] = [];
+
+ let has_more = true;
+ let after: string | undefined;
+
+ do {
+ const response = await apiRequest.call(this, 'GET', '/assistants', {
+ headers: {
+ 'OpenAI-Beta': 'assistants=v1',
+ },
+ qs: {
+ limit: 100,
+ after,
+ },
+ });
+
+ for (const assistant of response.data || []) {
+ try {
+ assistant.created_at = new Date(assistant.created_at * 1000).toISOString();
+ } catch (error) {}
+
+ returnData.push({ json: assistant, pairedItem: { item: i } });
+ }
+
+ has_more = response.has_more;
+
+ if (has_more) {
+ after = response.last_id as string;
+ } else {
+ break;
+ }
+ } while (has_more);
+
+ const simplify = this.getNodeParameter('simplify', i) as boolean;
+
+ if (simplify) {
+ return returnData.map((item) => {
+ const { id, name, model } = item.json;
+ return {
+ json: {
+ id,
+ name,
+ model,
+ },
+ pairedItem: { item: i },
+ };
+ });
+ }
+
+ return returnData;
+}
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts
new file mode 100644
index 0000000000000..e58ecd74c2375
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts
@@ -0,0 +1,174 @@
+import { AgentExecutor } from 'langchain/agents';
+import type { Tool } from 'langchain/tools';
+import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant';
+import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema';
+import { OpenAI as OpenAIClient } from 'openai';
+
+import { NodeConnectionType, NodeOperationError, updateDisplayOptions } from 'n8n-workflow';
+import type { IExecuteFunctions, INodeExecutionData, INodeProperties } from 'n8n-workflow';
+
+import { formatToOpenAIAssistantTool } from '../../helpers/utils';
+import { assistantRLC } from '../descriptions';
+
+const properties: INodeProperties[] = [
+ assistantRLC,
+ {
+ displayName: 'Prompt',
+ name: 'prompt',
+ type: 'options',
+ options: [
+ {
+ // eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased
+ name: 'Take from previous node automatically',
+ value: 'auto',
+ description: 'Looks for an input field called chatInput',
+ },
+ {
+ // eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased
+ name: 'Define below',
+ value: 'define',
+ description: 'Use an expression to reference data in previous nodes or enter static text',
+ },
+ ],
+ default: 'auto',
+ },
+ {
+ displayName: 'Text',
+ name: 'text',
+ type: 'string',
+ required: true,
+ default: '',
+ placeholder: 'e.g. Hello, how can you help me?',
+ typeOptions: {
+ rows: 2,
+ },
+ displayOptions: {
+ show: {
+ prompt: ['define'],
+ },
+ },
+ },
+ {
+ displayName: 'Connect your own custom n8n tools to this node on the canvas',
+ name: 'noticeTools',
+ type: 'notice',
+ default: '',
+ },
+ {
+ displayName: 'Options',
+ name: 'options',
+ placeholder: 'Add Option',
+ description: 'Additional options to add',
+ type: 'collection',
+ default: {},
+ options: [
+ {
+ displayName: 'Base URL',
+ name: 'baseURL',
+ default: 'https://api.openai.com/v1',
+ description: 'Override the default base URL for the API',
+ type: 'string',
+ },
+ {
+ displayName: 'Max Retries',
+ name: 'maxRetries',
+ default: 2,
+ description: 'Maximum number of retries to attempt',
+ type: 'number',
+ },
+ {
+ displayName: 'Timeout',
+ name: 'timeout',
+ default: 10000,
+ description: 'Maximum amount of time a request is allowed to take in milliseconds',
+ type: 'number',
+ },
+ ],
+ },
+];
+
+const displayOptions = {
+ show: {
+ operation: ['message'],
+ resource: ['assistant'],
+ },
+};
+
+export const description = updateDisplayOptions(displayOptions, properties);
+
+export async function execute(this: IExecuteFunctions, i: number): Promise {
+ const credentials = await this.getCredentials('openAiApi');
+
+ const prompt = this.getNodeParameter('prompt', i) as string;
+
+ let input;
+ if (prompt === 'auto') {
+ input = this.evaluateExpression('{{ $json["chatInput"] }}', i) as string;
+ } else {
+ input = this.getNodeParameter('text', i) as string;
+ }
+
+ if (input === undefined) {
+ throw new NodeOperationError(this.getNode(), 'No prompt specified', {
+ description:
+ "Expected to find the prompt in an input field called 'chatInput' (this is what the chat trigger node outputs). To use something else, change the 'Prompt' parameter",
+ });
+ }
+
+ const assistantId = this.getNodeParameter('assistantId', i, '', { extractValue: true }) as string;
+
+ const options = this.getNodeParameter('options', i, {}) as {
+ baseURL?: string;
+ maxRetries: number;
+ timeout: number;
+ };
+
+ const client = new OpenAIClient({
+ apiKey: credentials.apiKey as string,
+ maxRetries: options.maxRetries ?? 2,
+ timeout: options.timeout ?? 10000,
+ baseURL: options.baseURL,
+ });
+
+ const agent = new OpenAIAssistantRunnable({ assistantId, client, asAgent: true });
+
+ const tools = ((await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]) || [];
+
+ if (tools.length) {
+ const transformedConnectedTools = tools?.map(formatToOpenAIAssistantTool) ?? [];
+ const nativeToolsParsed: OpenAIToolType = [];
+
+ const assistant = await client.beta.assistants.retrieve(assistantId);
+
+ const useCodeInterpreter = assistant.tools.some((tool) => tool.type === 'code_interpreter');
+ if (useCodeInterpreter) {
+ nativeToolsParsed.push({
+ type: 'code_interpreter',
+ });
+ }
+
+ const useRetrieval = assistant.tools.some((tool) => tool.type === 'retrieval');
+ if (useRetrieval) {
+ nativeToolsParsed.push({
+ type: 'retrieval',
+ });
+ }
+
+ await client.beta.assistants.update(assistantId, {
+ tools: [...nativeToolsParsed, ...transformedConnectedTools],
+ });
+ }
+
+ const agentExecutor = AgentExecutor.fromAgentAndTools({
+ agent,
+ tools: tools ?? [],
+ });
+
+ const response = await agentExecutor.call({
+ content: input,
+ signal: this.getExecutionCancelSignal(),
+ timeout: options.timeout ?? 10000,
+ });
+
+ return [{ json: response, pairedItem: { item: i } }];
+}
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/update.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/update.operation.ts
new file mode 100644
index 0000000000000..1fe365b505808
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/update.operation.ts
@@ -0,0 +1,194 @@
+import type {
+ INodeProperties,
+ IExecuteFunctions,
+ INodeExecutionData,
+ IDataObject,
+} from 'n8n-workflow';
+import { NodeOperationError, updateDisplayOptions } from 'n8n-workflow';
+import { apiRequest } from '../../transport';
+import { assistantRLC, modelRLC } from '../descriptions';
+
+const properties: INodeProperties[] = [
+ assistantRLC,
+ {
+ displayName: 'Options',
+ name: 'options',
+ placeholder: 'Add Option',
+ type: 'collection',
+ default: {},
+ options: [
+ {
+ displayName: 'Code Interpreter',
+ name: 'codeInterpreter',
+ type: 'boolean',
+ default: false,
+ description:
+ 'Whether to enable the code interpreter that allows the assistants to write and run Python code in a sandboxed execution environment, find more here',
+ },
+ {
+ displayName: 'Description',
+ name: 'description',
+ type: 'string',
+ default: '',
+ description: 'The description of the assistant. The maximum length is 512 characters.',
+ placeholder: 'e.g. My personal assistant',
+ },
+
+ {
+ // eslint-disable-next-line n8n-nodes-base/node-param-display-name-wrong-for-dynamic-multi-options
+ displayName: 'Files',
+ name: 'file_ids',
+ type: 'multiOptions',
+ // eslint-disable-next-line n8n-nodes-base/node-param-description-wrong-for-dynamic-multi-options
+ description:
+ 'The files to be used by the assistant, there can be a maximum of 20 files attached to the assistant',
+ typeOptions: {
+ loadOptionsMethod: 'getFiles',
+ },
+ default: [],
+ hint: "Add more files by using the 'Upload a File' operation, any existing files not selected here will be removed.",
+ },
+ {
+ displayName: 'Instructions',
+ name: 'instructions',
+ type: 'string',
+ description:
+ 'The system instructions that the assistant uses. The maximum length is 32768 characters.',
+ default: '',
+ typeOptions: {
+ rows: 2,
+ },
+ },
+ {
+ displayName: 'Knowledge Retrieval',
+ name: 'knowledgeRetrieval',
+ type: 'boolean',
+ default: false,
+ description:
+ 'Whether to augments the assistant with knowledge from outside its model, such as proprietary product information or documents, find more here',
+ },
+ { ...modelRLC, required: false },
+ {
+ displayName: 'Name',
+ name: 'name',
+ type: 'string',
+ default: '',
+ description: 'The name of the assistant. The maximum length is 256 characters.',
+ placeholder: 'e.g. My Assistant',
+ },
+
+ {
+ displayName: 'Remove All Custom Tools (Functions)',
+ name: 'removeCustomTools',
+ type: 'boolean',
+ default: false,
+ description: 'Whether to remove all custom tools (functions) from the assistant',
+ },
+ ],
+ },
+];
+
+const displayOptions = {
+ show: {
+ operation: ['update'],
+ resource: ['assistant'],
+ },
+};
+
+export const description = updateDisplayOptions(displayOptions, properties);
+
+export async function execute(this: IExecuteFunctions, i: number): Promise {
+ const assistantId = this.getNodeParameter('assistantId', i, '', { extractValue: true }) as string;
+ const options = this.getNodeParameter('options', i, {});
+
+ const {
+ modelId,
+ name,
+ instructions,
+ codeInterpreter,
+ knowledgeRetrieval,
+ file_ids,
+ removeCustomTools,
+ } = options;
+
+ const assistantDescription = options.description as string;
+
+ const body: IDataObject = {};
+
+ if (file_ids) {
+ if ((file_ids as IDataObject[]).length > 20) {
+ throw new NodeOperationError(
+ this.getNode(),
+ 'The maximum number of files that can be attached to the assistant is 20',
+ { itemIndex: i },
+ );
+ }
+
+ body.file_ids = file_ids;
+ }
+
+ if (modelId) {
+ body.model = this.getNodeParameter('options.modelId', i, '', { extractValue: true }) as string;
+ }
+
+ if (name) {
+ body.name = name;
+ }
+
+ if (assistantDescription) {
+ body.description = assistantDescription;
+ }
+
+ if (instructions) {
+ body.instructions = instructions;
+ }
+
+ let tools =
+ ((
+ await apiRequest.call(this, 'GET', `/assistants/${assistantId}`, {
+ headers: {
+ 'OpenAI-Beta': 'assistants=v1',
+ },
+ })
+ ).tools as IDataObject[]) || [];
+
+ if (codeInterpreter && !tools.find((tool) => tool.type === 'code_interpreter')) {
+ tools.push({
+ type: 'code_interpreter',
+ });
+ }
+
+ if (codeInterpreter === false && tools.find((tool) => tool.type === 'code_interpreter')) {
+ tools = tools.filter((tool) => tool.type !== 'code_interpreter');
+ }
+
+ if (knowledgeRetrieval && !tools.find((tool) => tool.type === 'retrieval')) {
+ tools.push({
+ type: 'retrieval',
+ });
+ }
+
+ if (knowledgeRetrieval === false && tools.find((tool) => tool.type === 'retrieval')) {
+ tools = tools.filter((tool) => tool.type !== 'retrieval');
+ }
+
+ if (removeCustomTools) {
+ tools = tools.filter((tool) => tool.type !== 'function');
+ }
+
+ body.tools = tools;
+
+ const response = await apiRequest.call(this, 'POST', `/assistants/${assistantId}`, {
+ body,
+ headers: {
+ 'OpenAI-Beta': 'assistants=v1',
+ },
+ });
+
+ return [
+ {
+ json: response,
+ pairedItem: { item: i },
+ },
+ ];
+}
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/audio/generate.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/audio/generate.operation.ts
new file mode 100644
index 0000000000000..2a1c59090d1db
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/audio/generate.operation.ts
@@ -0,0 +1,187 @@
+import type {
+ INodeProperties,
+ IExecuteFunctions,
+ IDataObject,
+ INodeExecutionData,
+} from 'n8n-workflow';
+import { updateDisplayOptions } from 'n8n-workflow';
+import { apiRequest } from '../../transport';
+
+const properties: INodeProperties[] = [
+ {
+ displayName: 'Model',
+ name: 'model',
+ type: 'options',
+ default: 'tts-1',
+ options: [
+ {
+ name: 'TTS-1',
+ value: 'tts-1',
+ },
+ {
+ name: 'TTS-1-HD',
+ value: 'tts-1-hd',
+ },
+ ],
+ },
+ {
+ displayName: 'Text Input',
+ name: 'input',
+ type: 'string',
+ placeholder: 'e.g. The quick brown fox jumped over the lazy dog',
+ description: 'The text to generate audio for. The maximum length is 4096 characters.',
+ default: '',
+ typeOptions: {
+ rows: 2,
+ },
+ },
+ {
+ displayName: 'Voice',
+ name: 'voice',
+ type: 'options',
+ default: 'alloy',
+ description: 'The voice to use when generating the audio',
+ options: [
+ {
+ name: 'Alloy',
+ value: 'alloy',
+ },
+ {
+ name: 'Echo',
+ value: 'echo',
+ },
+ {
+ name: 'Fable',
+ value: 'fable',
+ },
+ {
+ name: 'Nova',
+ value: 'nova',
+ },
+ {
+ name: 'Onyx',
+ value: 'onyx',
+ },
+ {
+ name: 'Shimmer',
+ value: 'shimmer',
+ },
+ ],
+ },
+ {
+ displayName: 'Options',
+ name: 'options',
+ placeholder: 'Add Option',
+ type: 'collection',
+ default: {},
+ options: [
+ {
+ displayName: 'Response Format',
+ name: 'response_format',
+ type: 'options',
+ default: 'mp3',
+ options: [
+ {
+ name: 'MP3',
+ value: 'mp3',
+ },
+ {
+ name: 'OPUS',
+ value: 'opus',
+ },
+ {
+ name: 'AAC',
+ value: 'aac',
+ },
+ {
+ name: 'FLAC',
+ value: 'flac',
+ },
+ ],
+ },
+ {
+ displayName: 'Audio Speed',
+ name: 'speed',
+ type: 'number',
+ default: 1,
+ typeOptions: {
+ minValue: 0.25,
+ maxValue: 4,
+ numberPrecision: 1,
+ },
+ },
+ {
+ displayName: 'Put Output in Field',
+ name: 'binaryPropertyOutput',
+ type: 'string',
+ default: 'data',
+ hint: 'The name of the output field to put the binary file data in',
+ },
+ ],
+ },
+];
+
+const displayOptions = {
+ show: {
+ operation: ['generate'],
+ resource: ['audio'],
+ },
+};
+
+export const description = updateDisplayOptions(displayOptions, properties);
+
+export async function execute(this: IExecuteFunctions, i: number): Promise {
+ const model = this.getNodeParameter('model', i) as string;
+ const input = this.getNodeParameter('input', i) as string;
+ const voice = this.getNodeParameter('voice', i) as string;
+ let response_format = 'mp3';
+ let speed = 1;
+
+ const options = this.getNodeParameter('options', i, {});
+
+ if (options.response_format) {
+ response_format = options.response_format as string;
+ }
+
+ if (options.speed) {
+ speed = options.speed as number;
+ }
+
+ const body: IDataObject = {
+ model,
+ input,
+ voice,
+ response_format,
+ speed,
+ };
+
+ const option = {
+ useStream: true,
+ returnFullResponse: true,
+ encoding: 'arraybuffer',
+ json: false,
+ };
+
+ const response = await apiRequest.call(this, 'POST', '/audio/speech', { body, option });
+
+ const binaryData = await this.helpers.prepareBinaryData(
+ response,
+ `audio.${response_format}`,
+ `audio/${response_format}`,
+ );
+
+ const binaryPropertyOutput = (options.binaryPropertyOutput as string) || 'data';
+
+ const newItem: INodeExecutionData = {
+ json: {
+ ...binaryData,
+ data: undefined,
+ },
+ pairedItem: { item: i },
+ binary: {
+ [binaryPropertyOutput]: binaryData,
+ },
+ };
+
+ return [newItem];
+}
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/audio/index.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/audio/index.ts
new file mode 100644
index 0000000000000..c6e4fb5f613ea
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/audio/index.ts
@@ -0,0 +1,57 @@
+import type { INodeProperties } from 'n8n-workflow';
+
+import * as generate from './generate.operation';
+import * as transcribe from './transcribe.operation';
+import * as translate from './translate.operation';
+
+export { generate, transcribe, translate };
+
+export const description: INodeProperties[] = [
+ {
+ displayName: 'Operation',
+ name: 'operation',
+ type: 'options',
+ noDataExpression: true,
+ options: [
+ {
+ name: 'Generate Audio',
+ value: 'generate',
+ action: 'Generate audio',
+ description: 'Creates audio from a text prompt',
+ },
+ {
+ name: 'Transcribe a Recording',
+ value: 'transcribe',
+ action: 'Transcribe a recording',
+ description: 'Transcribes audio into the text',
+ },
+ {
+ name: 'Translate a Recording',
+ value: 'translate',
+ action: 'Translate a recording',
+ description: 'Translate audio into the text in the english language',
+ },
+ ],
+ default: 'generate',
+ displayOptions: {
+ show: {
+ resource: ['audio'],
+ },
+ },
+ },
+ {
+ displayName: 'OpenAI API limits the size of the audio file to 25 MB',
+ name: 'fileSizeLimitNotice',
+ type: 'notice',
+ default: ' ',
+ displayOptions: {
+ show: {
+ resource: ['audio'],
+ operation: ['translate', 'transcribe'],
+ },
+ },
+ },
+ ...generate.description,
+ ...transcribe.description,
+ ...translate.description,
+];
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/audio/transcribe.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/audio/transcribe.operation.ts
new file mode 100644
index 0000000000000..1f419066ad3e6
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/audio/transcribe.operation.ts
@@ -0,0 +1,95 @@
+import type { INodeProperties, IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
+import { updateDisplayOptions } from 'n8n-workflow';
+
+import FormData from 'form-data';
+import { apiRequest } from '../../transport';
+
+const properties: INodeProperties[] = [
+ {
+ displayName: 'Input Data Field Name',
+ name: 'binaryPropertyName',
+ type: 'string',
+ default: 'data',
+ placeholder: 'e.g. data',
+ hint: 'The name of the input field containing the binary file data to be processed',
+ description:
+ 'Name of the binary property which contains the audio file in one of these formats: flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm',
+ },
+ {
+ displayName: 'Options',
+ name: 'options',
+ placeholder: 'Add Option',
+ type: 'collection',
+ default: {},
+ options: [
+ {
+ displayName: 'Language of the Audio File',
+ name: 'language',
+ type: 'string',
+ description:
+ 'The language of the input audio. Supplying the input language in ISO-639-1 format will improve accuracy and latency.',
+ default: '',
+ },
+ {
+ displayName: 'Output Randomness (Temperature)',
+ name: 'temperature',
+ type: 'number',
+ default: 0,
+ typeOptions: {
+ minValue: 0,
+ maxValue: 1,
+ numberPrecision: 1,
+ },
+ },
+ ],
+ },
+];
+
+const displayOptions = {
+ show: {
+ operation: ['transcribe'],
+ resource: ['audio'],
+ },
+};
+
+export const description = updateDisplayOptions(displayOptions, properties);
+
+export async function execute(this: IExecuteFunctions, i: number): Promise {
+ const model = 'whisper-1';
+ const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i);
+ const options = this.getNodeParameter('options', i, {});
+
+ const formData = new FormData();
+
+ formData.append('model', model);
+
+ if (options.language) {
+ formData.append('language', options.language);
+ }
+
+ if (options.temperature) {
+ formData.append('temperature', options.temperature.toString());
+ }
+
+ const binaryData = this.helpers.assertBinaryData(i, binaryPropertyName);
+ const dataBuffer = await this.helpers.getBinaryDataBuffer(i, binaryPropertyName);
+
+ formData.append('file', dataBuffer, {
+ filename: binaryData.fileName,
+ contentType: binaryData.mimeType,
+ });
+
+ const response = await apiRequest.call(this, 'POST', '/audio/transcriptions', {
+ option: { formData },
+ headers: {
+ 'Content-Type': 'multipart/form-data',
+ },
+ });
+
+ return [
+ {
+ json: response,
+ pairedItem: { item: i },
+ },
+ ];
+}
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/audio/translate.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/audio/translate.operation.ts
new file mode 100644
index 0000000000000..3ba7e53c64991
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/audio/translate.operation.ts
@@ -0,0 +1,82 @@
+import type { INodeProperties, IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
+import { updateDisplayOptions } from 'n8n-workflow';
+import FormData from 'form-data';
+import { apiRequest } from '../../transport';
+
+const properties: INodeProperties[] = [
+ {
+ displayName: 'Input Data Field Name',
+ name: 'binaryPropertyName',
+ type: 'string',
+ default: 'data',
+ hint: 'The name of the input field containing the binary file data to be processed',
+ placeholder: 'e.g. data',
+ description:
+ 'Name of the binary property which contains the audio file in one of these formats: flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm',
+ },
+ {
+ displayName: 'Options',
+ name: 'options',
+ placeholder: 'Add Option',
+ type: 'collection',
+ default: {},
+ options: [
+ {
+ displayName: 'Output Randomness (Temperature)',
+ name: 'temperature',
+ type: 'number',
+ default: 0,
+ typeOptions: {
+ minValue: 0,
+ maxValue: 1,
+ numberPrecision: 1,
+ },
+ },
+ ],
+ },
+];
+
+const displayOptions = {
+ show: {
+ operation: ['translate'],
+ resource: ['audio'],
+ },
+};
+
+export const description = updateDisplayOptions(displayOptions, properties);
+
+export async function execute(this: IExecuteFunctions, i: number): Promise {
+ const model = 'whisper-1';
+ const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i);
+ const options = this.getNodeParameter('options', i, {});
+
+ const formData = new FormData();
+
+ formData.append('model', model);
+
+ if (options.temperature) {
+ formData.append('temperature', options.temperature.toString());
+ }
+
+ const binaryData = this.helpers.assertBinaryData(i, binaryPropertyName);
+ const dataBuffer = await this.helpers.getBinaryDataBuffer(i, binaryPropertyName);
+
+ formData.append('file', dataBuffer, {
+ filename: binaryData.fileName,
+ contentType: binaryData.mimeType,
+ });
+
+ const response = await apiRequest.call(this, 'POST', '/audio/translations', {
+ option: { formData },
+ headers: {
+ 'Content-Type': 'multipart/form-data',
+ },
+ });
+
+ return [
+ {
+ json: response,
+ pairedItem: { item: i },
+ },
+ ];
+}
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/descriptions.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/descriptions.ts
new file mode 100644
index 0000000000000..8416e794d2bbc
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/descriptions.ts
@@ -0,0 +1,53 @@
+import type { INodeProperties } from 'n8n-workflow';
+
+export const modelRLC: INodeProperties = {
+ displayName: 'Model',
+ name: 'modelId',
+ type: 'resourceLocator',
+ default: { mode: 'list', value: '' },
+ required: true,
+ modes: [
+ {
+ displayName: 'From List',
+ name: 'list',
+ type: 'list',
+ typeOptions: {
+ searchListMethod: 'modelSearch',
+ searchable: true,
+ },
+ },
+ {
+ displayName: 'ID',
+ name: 'id',
+ type: 'string',
+ placeholder: 'e.g. gpt-4',
+ },
+ ],
+};
+
+export const assistantRLC: INodeProperties = {
+ displayName: 'Assistant',
+ name: 'assistantId',
+ type: 'resourceLocator',
+ description:
+ 'Assistant to respond to the message. You can add, modify or remove assistants in the playground.',
+ default: { mode: 'list', value: '' },
+ required: true,
+ modes: [
+ {
+ displayName: 'From List',
+ name: 'list',
+ type: 'list',
+ typeOptions: {
+ searchListMethod: 'assistantSearch',
+ searchable: true,
+ },
+ },
+ {
+ displayName: 'ID',
+ name: 'id',
+ type: 'string',
+ placeholder: 'e.g. asst_abc123',
+ },
+ ],
+};
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/file/deleteFile.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/file/deleteFile.operation.ts
new file mode 100644
index 0000000000000..8ffc438565675
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/file/deleteFile.operation.ts
@@ -0,0 +1,62 @@
+import type { INodeProperties, IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
+import { updateDisplayOptions } from 'n8n-workflow';
+
+import { apiRequest } from '../../transport';
+
+const properties: INodeProperties[] = [
+ {
+ displayName: 'File',
+ name: 'fileId',
+ type: 'resourceLocator',
+ default: { mode: 'list', value: '' },
+ required: true,
+ modes: [
+ {
+ displayName: 'From List',
+ name: 'list',
+ type: 'list',
+ typeOptions: {
+ searchListMethod: 'fileSearch',
+ searchable: true,
+ },
+ },
+ {
+ displayName: 'ID',
+ name: 'id',
+ type: 'string',
+ validation: [
+ {
+ type: 'regex',
+ properties: {
+ regex: 'file-[a-zA-Z0-9]',
+ errorMessage: 'Not a valid File ID',
+ },
+ },
+ ],
+ placeholder: 'e.g. file-1234567890',
+ },
+ ],
+ },
+];
+
+const displayOptions = {
+ show: {
+ operation: ['deleteFile'],
+ resource: ['file'],
+ },
+};
+
+export const description = updateDisplayOptions(displayOptions, properties);
+
+export async function execute(this: IExecuteFunctions, i: number): Promise {
+ const fileId = this.getNodeParameter('fileId', i, '', { extractValue: true });
+
+ const response = await apiRequest.call(this, 'DELETE', `/files/${fileId}`);
+
+ return [
+ {
+ json: response,
+ pairedItem: { item: i },
+ },
+ ];
+}
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/file/index.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/file/index.ts
new file mode 100644
index 0000000000000..809c1ec94751f
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/file/index.ts
@@ -0,0 +1,46 @@
+import type { INodeProperties } from 'n8n-workflow';
+
+import * as upload from './upload.operation';
+import * as deleteFile from './deleteFile.operation';
+import * as list from './list.operation';
+
+export { upload, deleteFile, list };
+
+export const description: INodeProperties[] = [
+ {
+ displayName: 'Operation',
+ name: 'operation',
+ type: 'options',
+ noDataExpression: true,
+ options: [
+ {
+ name: 'Delete a File',
+ value: 'deleteFile',
+ action: 'Delete a file',
+ description: 'Delete a file from the server',
+ },
+ {
+ name: 'List Files',
+ value: 'list',
+ action: 'List files',
+ description: "Returns a list of files that belong to the user's organization",
+ },
+ {
+ name: 'Upload a File',
+ value: 'upload',
+ action: 'Upload a file',
+ description: 'Upload a file that can be used across various endpoints',
+ },
+ ],
+ default: 'upload',
+ displayOptions: {
+ show: {
+ resource: ['file'],
+ },
+ },
+ },
+
+ ...upload.description,
+ ...deleteFile.description,
+ ...list.description,
+];
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/file/list.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/file/list.operation.ts
new file mode 100644
index 0000000000000..305267613e461
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/file/list.operation.ts
@@ -0,0 +1,67 @@
+import type {
+ IDataObject,
+ INodeProperties,
+ IExecuteFunctions,
+ INodeExecutionData,
+} from 'n8n-workflow';
+import { updateDisplayOptions } from 'n8n-workflow';
+
+import { apiRequest } from '../../transport';
+
+const properties: INodeProperties[] = [
+ {
+ displayName: 'Options',
+ name: 'options',
+ placeholder: 'Add Option',
+ type: 'collection',
+ default: {},
+ options: [
+ {
+ displayName: 'Purpose',
+ name: 'purpose',
+ type: 'options',
+ default: 'any',
+ description: 'Only return files with the given purpose',
+ options: [
+ {
+ name: 'Any [Default]',
+ value: 'any',
+ },
+ {
+ name: 'Assistants',
+ value: 'assistants',
+ },
+ {
+ name: 'Fine-Tune',
+ value: 'fine-tune',
+ },
+ ],
+ },
+ ],
+ },
+];
+
+const displayOptions = {
+ show: {
+ operation: ['list'],
+ resource: ['file'],
+ },
+};
+
+export const description = updateDisplayOptions(displayOptions, properties);
+
+export async function execute(this: IExecuteFunctions, i: number): Promise {
+ const options = this.getNodeParameter('options', i, {});
+ const qs: IDataObject = {};
+
+ if (options.purpose && options.purpose !== 'any') {
+ qs.purpose = options.purpose as string;
+ }
+
+ const { data } = await apiRequest.call(this, 'GET', '/files', { qs });
+
+ return (data || []).map((file: IDataObject) => ({
+ json: file,
+ pairedItem: { item: i },
+ }));
+}
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/file/upload.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/file/upload.operation.ts
new file mode 100644
index 0000000000000..ecd1e4c6a2b4c
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/file/upload.operation.ts
@@ -0,0 +1,98 @@
+import type { INodeProperties, IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
+import { updateDisplayOptions, NodeOperationError } from 'n8n-workflow';
+import FormData from 'form-data';
+import { apiRequest } from '../../transport';
+
+const properties: INodeProperties[] = [
+ {
+ displayName: 'Input Data Field Name',
+ name: 'binaryPropertyName',
+ type: 'string',
+ default: 'data',
+ hint: 'The name of the input field containing the binary file data to be processed',
+ placeholder: 'e.g. data',
+ description:
+ 'Name of the binary property which contains the file. The size of individual files can be a maximum of 512 MB or 2 million tokens for Assistants.',
+ },
+ {
+ displayName: 'Options',
+ name: 'options',
+ placeholder: 'Add Option',
+ type: 'collection',
+ default: {},
+ options: [
+ {
+ displayName: 'Purpose',
+ name: 'purpose',
+ type: 'options',
+ default: 'assistants',
+ description:
+ "The intended purpose of the uploaded file, the 'Fine-tuning' only supports .jsonl files",
+ options: [
+ {
+ name: 'Assistants',
+ value: 'assistants',
+ },
+ {
+ name: 'Fine-Tune',
+ value: 'fine-tune',
+ },
+ ],
+ },
+ ],
+ },
+];
+
+const displayOptions = {
+ show: {
+ operation: ['upload'],
+ resource: ['file'],
+ },
+};
+
+export const description = updateDisplayOptions(displayOptions, properties);
+
+export async function execute(this: IExecuteFunctions, i: number): Promise {
+ const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i);
+ const options = this.getNodeParameter('options', i, {});
+
+ const formData = new FormData();
+
+ formData.append('purpose', options.purpose || 'assistants');
+
+ const binaryData = this.helpers.assertBinaryData(i, binaryPropertyName);
+ const dataBuffer = await this.helpers.getBinaryDataBuffer(i, binaryPropertyName);
+
+ formData.append('file', dataBuffer, {
+ filename: binaryData.fileName,
+ contentType: binaryData.mimeType,
+ });
+
+ try {
+ const response = await apiRequest.call(this, 'POST', '/files', {
+ option: { formData },
+ headers: {
+ 'Content-Type': 'multipart/form-data',
+ },
+ });
+
+ return [
+ {
+ json: response,
+ pairedItem: { item: i },
+ },
+ ];
+ } catch (error) {
+ if (
+ error.message.includes('Bad request') &&
+ error.description &&
+ error.description.includes('Expected file to have JSONL format')
+ ) {
+ throw new NodeOperationError(this.getNode(), 'The file content is not in JSONL format', {
+ description:
+ 'Fine-tuning accepts only files in JSONL format, where every line is a valid JSON dictionary',
+ });
+ }
+ throw error;
+ }
+}
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/image/analyze.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/image/analyze.operation.ts
new file mode 100644
index 0000000000000..b29019602afb2
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/image/analyze.operation.ts
@@ -0,0 +1,211 @@
+import type {
+ INodeProperties,
+ IExecuteFunctions,
+ INodeExecutionData,
+ IDataObject,
+} from 'n8n-workflow';
+import { updateDisplayOptions, NodeOperationError } from 'n8n-workflow';
+import { apiRequest } from '../../transport';
+
+const properties: INodeProperties[] = [
+ {
+ displayName: 'Text Input',
+ name: 'text',
+ type: 'string',
+ placeholder: "e.g. What's in this image?",
+ default: "What's in this image?",
+ typeOptions: {
+ rows: 2,
+ },
+ },
+ {
+ displayName: 'Input Type',
+ name: 'inputType',
+ type: 'options',
+ default: 'url',
+ options: [
+ {
+ name: 'Image URL(s)',
+ value: 'url',
+ },
+ {
+ name: 'Binary File(s)',
+ value: 'base64',
+ },
+ ],
+ },
+ {
+ displayName: 'URL(s)',
+ name: 'imageUrls',
+ type: 'string',
+ placeholder: 'e.g. https://example.com/image.jpeg',
+ description: 'URL(s) of the image(s) to analyze, multiple URLs can be added separated by comma',
+ default: '',
+ displayOptions: {
+ show: {
+ inputType: ['url'],
+ },
+ },
+ },
+ {
+ displayName: 'Input Data Field Name',
+ name: 'binaryPropertyName',
+ type: 'string',
+ default: 'data',
+ placeholder: 'e.g. data',
+ hint: 'The name of the input field containing the binary file data to be processed',
+ description: 'Name of the binary property which contains the image(s)',
+ displayOptions: {
+ show: {
+ inputType: ['base64'],
+ },
+ },
+ },
+ {
+ displayName: 'Simplify Output',
+ name: 'simplify',
+ type: 'boolean',
+ default: true,
+ description: 'Whether to simplify the response or not',
+ },
+ {
+ displayName: 'Options',
+ name: 'options',
+ placeholder: 'Add Option',
+ type: 'collection',
+ default: {},
+ options: [
+ {
+ displayName: 'Detail',
+ name: 'detail',
+ type: 'options',
+ default: 'auto',
+ options: [
+ {
+ name: 'Auto',
+ value: 'auto',
+ description:
+ 'Model will look at the image input size and decide if it should use the low or high setting',
+ },
+ {
+ name: 'Low',
+ value: 'low',
+ description: 'Return faster responses and consume fewer tokens',
+ },
+ {
+ name: 'High',
+ value: 'high',
+ description: 'Return more detailed responses, consumes more tokens',
+ },
+ ],
+ },
+ {
+ displayName: 'Length of Description (Max Tokens)',
+ description: 'Fewer tokens will result in shorter, less detailed image description',
+ name: 'maxTokens',
+ type: 'number',
+ default: 300,
+ typeOptions: {
+ minValue: 1,
+ },
+ },
+ ],
+ },
+];
+
+const displayOptions = {
+ show: {
+ operation: ['analyze'],
+ resource: ['image'],
+ },
+};
+
+export const description = updateDisplayOptions(displayOptions, properties);
+
+export async function execute(this: IExecuteFunctions, i: number): Promise {
+ const model = 'gpt-4-vision-preview';
+ const text = this.getNodeParameter('text', i, '') as string;
+ const inputType = this.getNodeParameter('inputType', i) as string;
+ const options = this.getNodeParameter('options', i, {});
+
+ const content: IDataObject[] = [
+ {
+ type: 'text',
+ text,
+ },
+ ];
+
+ const detail = (options.detail as string) || 'auto';
+
+ if (inputType === 'url') {
+ const imageUrls = (this.getNodeParameter('imageUrls', i) as string)
+ .split(',')
+ .map((url) => url.trim());
+
+ for (const url of imageUrls) {
+ content.push({
+ type: 'image_url',
+ image_url: {
+ url,
+ detail,
+ },
+ });
+ }
+ } else {
+ const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i)
+ .split(',')
+ .map((propertyName) => propertyName.trim());
+
+ for (const propertyName of binaryPropertyName) {
+ const binaryData = this.helpers.assertBinaryData(i, propertyName);
+
+ let fileBase64;
+ if (binaryData.id) {
+ const chunkSize = 256 * 1024;
+ const stream = await this.helpers.getBinaryStream(binaryData.id, chunkSize);
+ const buffer = await this.helpers.binaryToBuffer(stream);
+ fileBase64 = buffer.toString('base64');
+ } else {
+ fileBase64 = binaryData.data;
+ }
+
+ if (!binaryData) {
+ throw new NodeOperationError(this.getNode(), 'No binary data exists on item!');
+ }
+
+ content.push({
+ type: 'image_url',
+ image_url: {
+ url: `data:${binaryData.mimeType};base64,${fileBase64}`,
+ detail,
+ },
+ });
+ }
+ }
+
+ const body = {
+ model,
+ messages: [
+ {
+ role: 'user',
+ content,
+ },
+ ],
+ max_tokens: (options.maxTokens as number) || 300,
+ };
+
+ let response = await apiRequest.call(this, 'POST', '/chat/completions', { body });
+
+ const simplify = this.getNodeParameter('simplify', i) as boolean;
+
+ if (simplify && response.choices) {
+ response = { content: response.choices[0].message.content };
+ }
+
+ return [
+ {
+ json: response,
+ pairedItem: { item: i },
+ },
+ ];
+}
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/image/generate.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/image/generate.operation.ts
new file mode 100644
index 0000000000000..48a03158b5e85
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/image/generate.operation.ts
@@ -0,0 +1,244 @@
+import type {
+ INodeProperties,
+ IExecuteFunctions,
+ INodeExecutionData,
+ IDataObject,
+} from 'n8n-workflow';
+import { updateDisplayOptions } from 'n8n-workflow';
+import { apiRequest } from '../../transport';
+
+const properties: INodeProperties[] = [
+ {
+ displayName: 'Model',
+ name: 'model',
+ type: 'options',
+ default: 'dall-e-3',
+ description: 'The model to use for image generation',
+ options: [
+ {
+ name: 'DALL-E-2',
+ value: 'dall-e-2',
+ },
+ {
+ name: 'DALL-E-3',
+ value: 'dall-e-3',
+ },
+ ],
+ },
+ {
+ displayName: 'Prompt',
+ name: 'prompt',
+ type: 'string',
+ placeholder: 'e.g. A cute cat eating a dinosaur',
+ description:
+ 'A text description of the desired image(s). The maximum length is 1000 characters for dall-e-2 and 4000 characters for dall-e-3.',
+ default: '',
+ typeOptions: {
+ rows: 2,
+ },
+ },
+ {
+ displayName: 'Options',
+ name: 'options',
+ placeholder: 'Add Option',
+ type: 'collection',
+ default: {},
+ options: [
+ {
+ displayName: 'Number of Images',
+ name: 'n',
+ default: 1,
+ description: 'Number of images to generate',
+ type: 'number',
+ typeOptions: {
+ minValue: 1,
+ maxValue: 10,
+ },
+ displayOptions: {
+ show: {
+ '/model': ['dall-e-2'],
+ },
+ },
+ },
+ {
+ displayName: 'Quality',
+ name: 'quality',
+ type: 'options',
+ description:
+ 'The quality of the image that will be generated, HD creates images with finer details and greater consistency across the image',
+ options: [
+ {
+ name: 'HD',
+ value: 'hd',
+ },
+ {
+ name: 'Standard',
+ value: 'standard',
+ },
+ ],
+ displayOptions: {
+ show: {
+ '/model': ['dall-e-3'],
+ },
+ },
+ default: 'standard',
+ },
+ {
+ displayName: 'Resolution',
+ name: 'size',
+ type: 'options',
+ options: [
+ {
+ name: '256x256',
+ value: '256x256',
+ },
+ {
+ name: '512x512',
+ value: '512x512',
+ },
+ {
+ name: '1024x1024',
+ value: '1024x1024',
+ },
+ ],
+ displayOptions: {
+ show: {
+ '/model': ['dall-e-2'],
+ },
+ },
+ default: '1024x1024',
+ },
+ {
+ displayName: 'Resolution',
+ name: 'size',
+ type: 'options',
+ options: [
+ {
+ name: '1024x1024',
+ value: '1024x1024',
+ },
+ {
+ name: '1792x1024',
+ value: '1792x1024',
+ },
+ {
+ name: '1024x1792',
+ value: '1024x1792',
+ },
+ ],
+ displayOptions: {
+ show: {
+ '/model': ['dall-e-3'],
+ },
+ },
+ default: '1024x1024',
+ },
+ {
+ displayName: 'Style',
+ name: 'style',
+ type: 'options',
+ options: [
+ {
+ name: 'Natural',
+ value: 'natural',
+ description: 'Produce more natural looking images',
+ },
+ {
+ name: 'Vivid',
+ value: 'vivid',
+ description: 'Lean towards generating hyper-real and dramatic images',
+ },
+ ],
+ displayOptions: {
+ show: {
+ '/model': ['dall-e-3'],
+ },
+ },
+ default: 'vivid',
+ },
+ {
+ displayName: 'Respond with Image URL(s)',
+ name: 'returnImageUrls',
+ type: 'boolean',
+ default: false,
+ description: 'Whether to return image URL(s) instead of binary file(s)',
+ },
+ {
+ displayName: 'Put Output in Field',
+ name: 'binaryPropertyOutput',
+ type: 'string',
+ default: 'data',
+ hint: 'The name of the output field to put the binary file data in',
+ displayOptions: {
+ show: {
+ returnImageUrls: [false],
+ },
+ },
+ },
+ ],
+ },
+];
+
+const displayOptions = {
+ show: {
+ operation: ['generate'],
+ resource: ['image'],
+ },
+};
+
+export const description = updateDisplayOptions(displayOptions, properties);
+
+export async function execute(this: IExecuteFunctions, i: number): Promise {
+ const model = this.getNodeParameter('model', i) as string;
+ const prompt = this.getNodeParameter('prompt', i) as string;
+ const options = this.getNodeParameter('options', i, {});
+ let response_format = 'b64_json';
+ let binaryPropertyOutput = 'data';
+
+ if (options.returnImageUrls) {
+ response_format = 'url';
+ }
+
+ if (options.binaryPropertyOutput) {
+ binaryPropertyOutput = options.binaryPropertyOutput as string;
+ delete options.binaryPropertyOutput;
+ }
+
+ delete options.returnImageUrls;
+
+ const body: IDataObject = {
+ prompt,
+ model,
+ response_format,
+ ...options,
+ };
+
+ const { data } = await apiRequest.call(this, 'POST', '/images/generations', { body });
+
+ if (response_format === 'url') {
+ return ((data as IDataObject[]) || []).map((entry) => ({
+ json: entry,
+ pairedItem: { item: i },
+ }));
+ } else {
+ const returnData: INodeExecutionData[] = [];
+
+ for (const entry of data) {
+ const binaryData = await this.helpers.prepareBinaryData(
+ Buffer.from(entry.b64_json as string, 'base64'),
+ 'data',
+ );
+ returnData.push({
+ json: Object.assign({}, binaryData, {
+ data: undefined,
+ }),
+ binary: {
+ [binaryPropertyOutput]: binaryData,
+ },
+ pairedItem: { item: i },
+ });
+ }
+
+ return returnData;
+ }
+}
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/image/index.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/image/index.ts
new file mode 100644
index 0000000000000..0516ae3ce4bee
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/image/index.ts
@@ -0,0 +1,37 @@
+import type { INodeProperties } from 'n8n-workflow';
+
+import * as generate from './generate.operation';
+import * as analyze from './analyze.operation';
+
+export { generate, analyze };
+
+export const description: INodeProperties[] = [
+ {
+ displayName: 'Operation',
+ name: 'operation',
+ type: 'options',
+ noDataExpression: true,
+ options: [
+ {
+ name: 'Analyze Image',
+ value: 'analyze',
+ action: 'Analyze image',
+ description: 'Take in images and answer questions about them',
+ },
+ {
+ name: 'Generate an Image',
+ value: 'generate',
+ action: 'Generate an image',
+ description: 'Creates an image from a text prompt',
+ },
+ ],
+ default: 'generate',
+ displayOptions: {
+ show: {
+ resource: ['image'],
+ },
+ },
+ },
+ ...generate.description,
+ ...analyze.description,
+];
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/node.type.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/node.type.ts
new file mode 100644
index 0000000000000..e998aa2c2613e
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/node.type.ts
@@ -0,0 +1,11 @@
+import type { AllEntities } from 'n8n-workflow';
+
+type NodeMap = {
+ assistant: 'message' | 'create' | 'deleteAssistant' | 'list' | 'update';
+ audio: 'generate' | 'transcribe' | 'translate';
+ file: 'upload' | 'deleteFile' | 'list';
+ image: 'generate' | 'analyze';
+ text: 'message' | 'classify';
+};
+
+export type OpenAiType = AllEntities;
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/router.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/router.ts
new file mode 100644
index 0000000000000..ebd9fba2a4e71
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/router.ts
@@ -0,0 +1,65 @@
+import { NodeOperationError, type IExecuteFunctions, type INodeExecutionData } from 'n8n-workflow';
+
+import * as assistant from './assistant';
+import * as audio from './audio';
+import * as file from './file';
+import * as image from './image';
+import * as text from './text';
+
+import type { OpenAiType } from './node.type';
+
+export async function router(this: IExecuteFunctions) {
+ const returnData: INodeExecutionData[] = [];
+
+ const items = this.getInputData();
+ const resource = this.getNodeParameter('resource', 0);
+ const operation = this.getNodeParameter('operation', 0);
+
+ const openAiTypeData = {
+ resource,
+ operation,
+ } as OpenAiType;
+
+ let execute;
+ switch (openAiTypeData.resource) {
+ case 'assistant':
+ execute = assistant[openAiTypeData.operation].execute;
+ break;
+ case 'audio':
+ execute = audio[openAiTypeData.operation].execute;
+ break;
+ case 'file':
+ execute = file[openAiTypeData.operation].execute;
+ break;
+ case 'image':
+ execute = image[openAiTypeData.operation].execute;
+ break;
+ case 'text':
+ execute = text[openAiTypeData.operation].execute;
+ break;
+ default:
+ throw new NodeOperationError(
+ this.getNode(),
+ `The operation "${operation}" is not supported!`,
+ );
+ }
+
+ for (let i = 0; i < items.length; i++) {
+ try {
+ const responseData = await execute.call(this, i);
+
+ returnData.push(...responseData);
+ } catch (error) {
+ if (this.continueOnFail()) {
+ returnData.push({ json: { error: error.message }, pairedItem: { item: i } });
+ continue;
+ }
+ throw new NodeOperationError(this.getNode(), error, {
+ itemIndex: i,
+ description: error.description,
+ });
+ }
+ }
+
+ return [returnData];
+}
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/text/classify.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/text/classify.operation.ts
new file mode 100644
index 0000000000000..338d4ade1e32c
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/text/classify.operation.ts
@@ -0,0 +1,83 @@
+import type { INodeProperties, IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
+import { updateDisplayOptions } from 'n8n-workflow';
+import { apiRequest } from '../../transport';
+
+const properties: INodeProperties[] = [
+ {
+ displayName: 'Text Input',
+ name: 'input',
+ type: 'string',
+ placeholder: 'e.g. Sample text goes here',
+ description: 'The input text to classify if it is violates the moderation policy',
+ default: '',
+ typeOptions: {
+ rows: 2,
+ },
+ },
+ {
+ displayName: 'Simplify Output',
+ name: 'simplify',
+ type: 'boolean',
+ default: false,
+ description: 'Whether to return a simplified version of the response instead of the raw data',
+ },
+ {
+ displayName: 'Options',
+ name: 'options',
+ placeholder: 'Add Option',
+ type: 'collection',
+ default: {},
+ options: [
+ {
+ displayName: 'Use Stable Model',
+ name: 'useStableModel',
+ type: 'boolean',
+ default: false,
+ description:
+ 'Whether to use the stable version of the model instead of the latest version, accuracy may be slightly lower',
+ },
+ ],
+ },
+];
+
+const displayOptions = {
+ show: {
+ operation: ['classify'],
+ resource: ['text'],
+ },
+};
+
+export const description = updateDisplayOptions(displayOptions, properties);
+
+export async function execute(this: IExecuteFunctions, i: number): Promise {
+ const input = this.getNodeParameter('input', i) as string;
+ const options = this.getNodeParameter('options', i);
+ const model = options.useStableModel ? 'text-moderation-stable' : 'text-moderation-latest';
+
+ const body = {
+ input,
+ model,
+ };
+
+ const { results } = await apiRequest.call(this, 'POST', '/moderations', { body });
+
+ if (!results) return [];
+
+ const simplify = this.getNodeParameter('simplify', i) as boolean;
+
+ if (simplify && results) {
+ return [
+ {
+ json: { flagged: results[0].flagged },
+ pairedItem: { item: i },
+ },
+ ];
+ } else {
+ return [
+ {
+ json: results[0],
+ pairedItem: { item: i },
+ },
+ ];
+ }
+}
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/text/index.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/text/index.ts
new file mode 100644
index 0000000000000..0b9d344d7167e
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/text/index.ts
@@ -0,0 +1,39 @@
+import type { INodeProperties } from 'n8n-workflow';
+
+import * as classify from './classify.operation';
+import * as message from './message.operation';
+
+export { classify, message };
+
+export const description: INodeProperties[] = [
+ {
+ displayName: 'Operation',
+ name: 'operation',
+ type: 'options',
+ noDataExpression: true,
+ options: [
+ {
+ name: 'Message a Model',
+ value: 'message',
+ action: 'Message a model',
+ // eslint-disable-next-line n8n-nodes-base/node-param-description-excess-final-period
+ description: 'Create a completion with GPT 3, 4, etc.',
+ },
+ {
+ name: 'Classify Text for Violations',
+ value: 'classify',
+ action: 'Classify text for violations',
+ description: 'Check whether content complies with usage policies',
+ },
+ ],
+ default: 'message',
+ displayOptions: {
+ show: {
+ resource: ['text'],
+ },
+ },
+ },
+
+ ...classify.description,
+ ...message.description,
+];
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/text/message.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/text/message.operation.ts
new file mode 100644
index 0000000000000..90c20f800480d
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/text/message.operation.ts
@@ -0,0 +1,272 @@
+import type {
+ INodeProperties,
+ IExecuteFunctions,
+ INodeExecutionData,
+ IDataObject,
+} from 'n8n-workflow';
+import { NodeConnectionType, updateDisplayOptions } from 'n8n-workflow';
+
+import { apiRequest } from '../../transport';
+import type { ChatCompletion } from '../../helpers/interfaces';
+import type { Tool } from 'langchain/tools';
+import { formatToOpenAIAssistantTool } from '../../helpers/utils';
+import { modelRLC } from '../descriptions';
+
+const properties: INodeProperties[] = [
+ modelRLC,
+ {
+ displayName: 'Messages',
+ name: 'messages',
+ type: 'fixedCollection',
+ typeOptions: {
+ sortable: true,
+ multipleValues: true,
+ },
+ placeholder: 'Add Message',
+ default: { values: [{ content: '' }] },
+ options: [
+ {
+ displayName: 'Values',
+ name: 'values',
+ values: [
+ {
+ displayName: 'Text',
+ name: 'content',
+ type: 'string',
+ description: 'The content of the message to be send',
+ default: '',
+ typeOptions: {
+ rows: 2,
+ },
+ },
+ {
+ displayName: 'Role',
+ name: 'role',
+ type: 'options',
+ description:
+ "Role in shaping the model's response, it tells the model how it should behave and interact with the user",
+ options: [
+ {
+ name: 'User',
+ value: 'user',
+ description: 'Send a message as a user and get a response from the model',
+ },
+ {
+ name: 'Assistant',
+ value: 'assistant',
+ description: 'Tell the model to adopt a specific tone or personality',
+ },
+ {
+ name: 'System',
+ value: 'system',
+ description:
+ "Usually used to set the model's behavior or context for the next user message",
+ },
+ ],
+ default: 'user',
+ },
+ ],
+ },
+ ],
+ },
+ {
+ displayName: 'Simplify Output',
+ name: 'simplify',
+ type: 'boolean',
+ default: true,
+ description: 'Whether to return a simplified version of the response instead of the raw data',
+ },
+ {
+ displayName: 'Output Content as JSON',
+ name: 'jsonOutput',
+ type: 'boolean',
+ description:
+ 'Whether to attempt to return the response in JSON format, supported by gpt-3.5-turbo-1106 and gpt-4-1106-preview',
+ default: false,
+ displayOptions: {
+ show: {
+ modelId: ['gpt-3.5-turbo-1106', 'gpt-4-1106-preview'],
+ },
+ },
+ },
+ {
+ displayName: 'Connect your own custom n8n tools to this node on the canvas',
+ name: 'noticeTools',
+ type: 'notice',
+ default: '',
+ },
+ {
+ displayName: 'Options',
+ name: 'options',
+ placeholder: 'Add Option',
+ type: 'collection',
+ default: {},
+ options: [
+ {
+ displayName: 'Frequency Penalty',
+ name: 'frequency_penalty',
+ default: 0,
+ typeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 },
+ description:
+ "Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim",
+ type: 'number',
+ },
+ {
+ displayName: 'Maximum Number of Tokens',
+ name: 'maxTokens',
+ default: 16,
+ description:
+ 'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).',
+ type: 'number',
+ typeOptions: {
+ maxValue: 32768,
+ },
+ },
+ {
+ displayName: 'Number of Completions',
+ name: 'n',
+ default: 1,
+ description:
+ 'How many completions to generate for each prompt. Note: Because this parameter generates many completions, it can quickly consume your token quota. Use carefully and ensure that you have reasonable settings for max_tokens and stop.',
+ type: 'number',
+ },
+ {
+ displayName: 'Presence Penalty',
+ name: 'presence_penalty',
+ default: 0,
+ typeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 },
+ description:
+ "Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics",
+ type: 'number',
+ },
+ {
+ displayName: 'Output Randomness (Temperature)',
+ name: 'temperature',
+ default: 1,
+ typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
+ description:
+ 'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive. We generally recommend altering this or temperature but not both.',
+ type: 'number',
+ },
+ {
+ displayName: 'Output Randomness (Top P)',
+ name: 'topP',
+ default: 1,
+ typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
+ description:
+ 'An alternative to sampling with temperature, controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',
+ type: 'number',
+ },
+ ],
+ },
+];
+
+const displayOptions = {
+ show: {
+ operation: ['message'],
+ resource: ['text'],
+ },
+};
+
+export const description = updateDisplayOptions(displayOptions, properties);
+
+export async function execute(this: IExecuteFunctions, i: number): Promise {
+ const model = this.getNodeParameter('modelId', i, '', { extractValue: true });
+ let messages = this.getNodeParameter('messages.values', i, []) as IDataObject[];
+ const options = this.getNodeParameter('options', i, {});
+ const jsonOutput = this.getNodeParameter('jsonOutput', i, false) as boolean;
+
+ let response_format;
+ if (jsonOutput) {
+ response_format = { type: 'json_object' };
+ messages = [
+ {
+ role: 'system',
+ content: 'You are a helpful assistant designed to output JSON.',
+ },
+ ...messages,
+ ];
+ }
+
+ const externalTools =
+ ((await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]) || [];
+ let tools;
+
+ if (externalTools.length) {
+ tools = externalTools.length ? externalTools?.map(formatToOpenAIAssistantTool) : undefined;
+ }
+
+ const body: IDataObject = {
+ model,
+ messages,
+ tools,
+ response_format,
+ ...options,
+ };
+
+ let response = (await apiRequest.call(this, 'POST', '/chat/completions', {
+ body,
+ })) as ChatCompletion;
+
+ if (!response) return [];
+
+ let toolCalls = response?.choices[0]?.message?.tool_calls;
+
+ while (toolCalls && toolCalls.length) {
+ messages.push(response.choices[0].message);
+
+ for (const toolCall of toolCalls) {
+ const functionName = toolCall.function.name;
+ const functionArgs = toolCall.function.arguments;
+
+ let functionResponse;
+ for (const tool of externalTools ?? []) {
+ if (tool.name === functionName) {
+ functionResponse = await tool.invoke(functionArgs);
+ }
+ }
+
+ if (typeof functionResponse === 'object') {
+ functionResponse = JSON.stringify(functionResponse);
+ }
+
+ messages.push({
+ tool_call_id: toolCall.id,
+ role: 'tool',
+ content: functionResponse,
+ });
+ }
+
+ response = (await apiRequest.call(this, 'POST', '/chat/completions', {
+ body,
+ })) as ChatCompletion;
+
+ toolCalls = response.choices[0].message.tool_calls;
+ }
+
+ if (response_format) {
+ response.choices = response.choices.map((choice) => {
+ try {
+ choice.message.content = JSON.parse(choice.message.content);
+ } catch (error) {}
+ return choice;
+ });
+ }
+
+ const simplify = this.getNodeParameter('simplify', i) as boolean;
+
+ const returnData: INodeExecutionData[] = [];
+
+ if (simplify) {
+ for (const entry of response.choices) {
+ returnData.push({
+ json: entry,
+ pairedItem: { item: i },
+ });
+ }
+ } else {
+ returnData.push({ json: response, pairedItem: { item: i } });
+ }
+
+ return returnData;
+}
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/versionDescription.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/versionDescription.ts
new file mode 100644
index 0000000000000..168452a0c11f9
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/versionDescription.ts
@@ -0,0 +1,127 @@
+/* eslint-disable n8n-nodes-base/node-filename-against-convention */
+import type { INodeTypeDescription } from 'n8n-workflow';
+import { NodeConnectionType } from 'n8n-workflow';
+
+import * as assistant from './assistant';
+import * as audio from './audio';
+import * as file from './file';
+import * as image from './image';
+import * as text from './text';
+
+const prettifyOperation = (resource: string, operation: string) => {
+ if (operation === 'deleteAssistant') {
+ return 'Delete Assistant';
+ }
+
+ if (operation === 'deleteFile') {
+ return 'Delete File';
+ }
+
+ if (operation === 'classify') {
+ return 'Classify Text';
+ }
+
+ if (operation === 'message' && resource === 'text') {
+ return 'Message Model';
+ }
+
+ const capitalize = (str: string) => {
+ const chars = str.split('');
+ chars[0] = chars[0].toUpperCase();
+ return chars.join('');
+ };
+
+ if (['transcribe', 'translate'].includes(operation)) {
+ resource = 'recording';
+ }
+
+ if (operation === 'list') {
+ resource = resource + 's';
+ }
+
+ return `${capitalize(operation)} ${capitalize(resource)}`;
+};
+
+const configureNodeInputs = (resource: string, operation: string) => {
+ if (['assistant', 'text'].includes(resource) && operation === 'message') {
+ return [
+ { type: NodeConnectionType.Main },
+ { type: NodeConnectionType.AiTool, displayName: 'Tools' },
+ ];
+ }
+
+ return [NodeConnectionType.Main];
+};
+
+// eslint-disable-next-line n8n-nodes-base/node-class-description-missing-subtitle
+export const versionDescription: INodeTypeDescription = {
+ displayName: 'OpenAI',
+ name: 'openAi',
+ icon: 'file:openAi.svg',
+ group: ['transform'],
+ version: 1,
+ subtitle: `={{(${prettifyOperation})($parameter.resource, $parameter.operation)}}`,
+ description: 'Message an assistant or GPT, analyze images, generate audio, etc.',
+ defaults: {
+ name: 'OpenAI',
+ },
+ codex: {
+ alias: ['LangChain', 'ChatGPT', 'DallE'],
+ categories: ['AI'],
+ subcategories: {
+ AI: ['Agents', 'Miscellaneous'],
+ },
+ resources: {
+ primaryDocumentation: [
+ {
+ url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.openai/',
+ },
+ ],
+ },
+ },
+ inputs: `={{(${configureNodeInputs})($parameter.resource, $parameter.operation)}}`,
+ outputs: ['main'],
+ credentials: [
+ {
+ name: 'openAiApi',
+ required: true,
+ },
+ ],
+ properties: [
+ {
+ displayName: 'Resource',
+ name: 'resource',
+ type: 'options',
+ noDataExpression: true,
+ // eslint-disable-next-line n8n-nodes-base/node-param-options-type-unsorted-items
+ options: [
+ {
+ name: 'Assistant',
+ value: 'assistant',
+ },
+ {
+ name: 'Text',
+ value: 'text',
+ },
+ {
+ name: 'Image',
+ value: 'image',
+ },
+ {
+ name: 'Audio',
+ value: 'audio',
+ },
+ {
+ name: 'File',
+ value: 'file',
+ },
+ ],
+ default: 'text',
+ },
+ ...assistant.description,
+ ...audio.description,
+ ...file.description,
+ ...image.description,
+ ...text.description,
+ ],
+};
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/helpers/interfaces.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/helpers/interfaces.ts
new file mode 100644
index 0000000000000..1db7db6f742cb
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/helpers/interfaces.ts
@@ -0,0 +1,58 @@
+import type { IDataObject } from 'n8n-workflow';
+
+export type ChatCompletion = {
+ id: string;
+ object: string;
+ created: number;
+ model: string;
+ choices: Array<{
+ index: number;
+ message: {
+ role: string;
+ content: string;
+ tool_calls?: Array<{
+ id: string;
+ type: 'function';
+ function: {
+ name: string;
+ arguments: string;
+ };
+ }>;
+ };
+ finish_reason?: 'tool_calls';
+ }>;
+ usage: {
+ prompt_tokens: number;
+ completion_tokens: number;
+ total_tokens: number;
+ };
+ system_fingerprint: string;
+};
+
+export type ThreadMessage = {
+ id: string;
+ object: string;
+ created_at: number;
+ thread_id: string;
+ role: string;
+ content: Array<{
+ type: string;
+ text: {
+ value: string;
+ annotations: string[];
+ };
+ }>;
+ file_ids: string[];
+ assistant_id: string;
+ run_id: string;
+ metadata: IDataObject;
+};
+
+export type ExternalApiCallOptions = {
+ callExternalApi: boolean;
+ url: string;
+ path: string;
+ method: string;
+ requestOptions: IDataObject;
+ sendParametersIn: string;
+};
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/helpers/utils.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/helpers/utils.ts
new file mode 100644
index 0000000000000..575444193c2de
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/helpers/utils.ts
@@ -0,0 +1,47 @@
+import { zodToJsonSchema } from 'zod-to-json-schema';
+import type { OpenAI as OpenAIClient } from 'openai';
+import type { StructuredTool } from 'langchain/tools';
+
+// Copied from langchain(`langchain/src/tools/convert_to_openai.ts`)
+// since these functions are not exported
+
+/**
+ * Formats a `StructuredTool` instance into a format that is compatible
+ * with OpenAI's ChatCompletionFunctions. It uses the `zodToJsonSchema`
+ * function to convert the schema of the `StructuredTool` into a JSON
+ * schema, which is then used as the parameters for the OpenAI function.
+ */
+export function formatToOpenAIFunction(
+ tool: StructuredTool,
+): OpenAIClient.Chat.ChatCompletionCreateParams.Function {
+ return {
+ name: tool.name,
+ description: tool.description,
+ parameters: zodToJsonSchema(tool.schema),
+ };
+}
+
+export function formatToOpenAITool(tool: StructuredTool): OpenAIClient.Chat.ChatCompletionTool {
+ const schema = zodToJsonSchema(tool.schema);
+ return {
+ type: 'function',
+ function: {
+ name: tool.name,
+ description: tool.description,
+ parameters: schema,
+ },
+ };
+}
+
+export function formatToOpenAIAssistantTool(
+ tool: StructuredTool,
+): OpenAIClient.Beta.AssistantCreateParams.AssistantToolsFunction {
+ return {
+ type: 'function',
+ function: {
+ name: tool.name,
+ description: tool.description,
+ parameters: zodToJsonSchema(tool.schema),
+ },
+ };
+}
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/methods/index.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/methods/index.ts
new file mode 100644
index 0000000000000..073c80a2eacca
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/methods/index.ts
@@ -0,0 +1,2 @@
+export * as listSearch from './listSearch';
+export * as loadOptions from './loadOptions';
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/methods/listSearch.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/methods/listSearch.ts
new file mode 100644
index 0000000000000..ae46fde099947
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/methods/listSearch.ts
@@ -0,0 +1,119 @@
+import type {
+ IDataObject,
+ ILoadOptionsFunctions,
+ INodeListSearchItems,
+ INodeListSearchResult,
+} from 'n8n-workflow';
+
+import { apiRequest } from '../transport';
+
+export async function fileSearch(
+ this: ILoadOptionsFunctions,
+ filter?: string,
+): Promise {
+ const { data } = await apiRequest.call(this, 'GET', '/files');
+
+ if (filter) {
+ const results: INodeListSearchItems[] = [];
+
+ for (const file of data || []) {
+ if ((file.filename as string)?.toLowerCase().includes(filter.toLowerCase())) {
+ results.push({
+ name: file.filename as string,
+ value: file.id as string,
+ });
+ }
+ }
+
+ return {
+ results,
+ };
+ } else {
+ return {
+ results: (data || []).map((file: IDataObject) => ({
+ name: file.filename as string,
+ value: file.id as string,
+ })),
+ };
+ }
+}
+
+export async function modelSearch(
+ this: ILoadOptionsFunctions,
+ filter?: string,
+): Promise {
+ let { data } = await apiRequest.call(this, 'GET', '/models');
+
+ data = data?.filter((model: IDataObject) => (model.id as string).startsWith('gpt-'));
+
+ let results: INodeListSearchItems[] = [];
+
+ if (filter) {
+ for (const model of data || []) {
+ if ((model.id as string)?.toLowerCase().includes(filter.toLowerCase())) {
+ results.push({
+ name: (model.id as string).toUpperCase(),
+ value: model.id as string,
+ });
+ }
+ }
+ } else {
+ results = (data || []).map((model: IDataObject) => ({
+ name: (model.id as string).toUpperCase(),
+ value: model.id as string,
+ }));
+ }
+
+ results = results.sort((a, b) => a.name.localeCompare(b.name));
+
+ return {
+ results,
+ };
+}
+
+export async function assistantSearch(
+ this: ILoadOptionsFunctions,
+ filter?: string,
+ paginationToken?: string,
+): Promise {
+ const { data, has_more, last_id } = await apiRequest.call(this, 'GET', '/assistants', {
+ headers: {
+ 'OpenAI-Beta': 'assistants=v1',
+ },
+ qs: {
+ limit: 100,
+ after: paginationToken,
+ },
+ });
+
+ if (has_more === true) {
+ paginationToken = last_id;
+ } else {
+ paginationToken = undefined;
+ }
+
+ if (filter) {
+ const results: INodeListSearchItems[] = [];
+
+ for (const assistant of data || []) {
+ if ((assistant.name as string)?.toLowerCase().includes(filter.toLowerCase())) {
+ results.push({
+ name: assistant.name as string,
+ value: assistant.id as string,
+ });
+ }
+ }
+
+ return {
+ results,
+ };
+ } else {
+ return {
+ results: (data || []).map((assistant: IDataObject) => ({
+ name: assistant.name as string,
+ value: assistant.id as string,
+ })),
+ paginationToken,
+ };
+ }
+}
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/methods/loadOptions.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/methods/loadOptions.ts
new file mode 100644
index 0000000000000..a25f24c6988cd
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/methods/loadOptions.ts
@@ -0,0 +1,17 @@
+import type { ILoadOptionsFunctions, INodePropertyOptions } from 'n8n-workflow';
+import { apiRequest } from '../transport';
+
+export async function getFiles(this: ILoadOptionsFunctions): Promise {
+ const { data } = await apiRequest.call(this, 'GET', '/files', { qs: { purpose: 'assistants' } });
+
+ const returnData: INodePropertyOptions[] = [];
+
+ for (const file of data || []) {
+ returnData.push({
+ name: file.filename as string,
+ value: file.id as string,
+ });
+ }
+
+ return returnData;
+}
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/openAi.svg b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/openAi.svg
new file mode 100644
index 0000000000000..4f8812da72e68
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/openAi.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/test/OpenAi.node.test.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/test/OpenAi.node.test.ts
new file mode 100644
index 0000000000000..a105cdf887ff4
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/test/OpenAi.node.test.ts
@@ -0,0 +1,522 @@
+import * as assistant from '../actions/assistant';
+import * as audio from '../actions/audio';
+import * as file from '../actions/file';
+import * as image from '../actions/image';
+import * as text from '../actions/text';
+
+import type { IDataObject, IExecuteFunctions } from 'n8n-workflow';
+import * as transport from '../transport';
+
+import get from 'lodash/get';
+
+const createExecuteFunctionsMock = (parameters: IDataObject) => {
+ const nodeParameters = parameters;
+ return {
+ getNodeParameter(parameter: string) {
+ return get(nodeParameters, parameter);
+ },
+ getNode() {
+ return {};
+ },
+ getInputConnectionData() {
+ return undefined;
+ },
+ helpers: {
+ prepareBinaryData() {
+ return {};
+ },
+ assertBinaryData() {
+ return {
+ filename: 'filenale.flac',
+ contentType: 'audio/flac',
+ };
+ },
+ getBinaryDataBuffer() {
+ return 'data buffer data';
+ },
+ },
+ } as unknown as IExecuteFunctions;
+};
+
+describe('OpenAi, Assistant resource', () => {
+ beforeEach(() => {
+ (transport as any).apiRequest = jest.fn();
+ });
+
+ it('create => should throw an error if an assistant with the same name already exists', async () => {
+ (transport.apiRequest as jest.Mock).mockResolvedValueOnce({
+ data: [{ name: 'name' }],
+ has_more: false,
+ });
+
+ try {
+ await assistant.create.execute.call(
+ createExecuteFunctionsMock({
+ name: 'name',
+ options: {
+ failIfExists: true,
+ },
+ }),
+ 0,
+ );
+ expect(true).toBe(false);
+ } catch (error) {
+ expect(error.message).toBe("An assistant with the same name 'name' already exists");
+ }
+ });
+
+ it('create => should call apiRequest with correct parameters', async () => {
+ (transport.apiRequest as jest.Mock).mockResolvedValueOnce({});
+
+ await assistant.create.execute.call(
+ createExecuteFunctionsMock({
+ modelId: 'gpt-model',
+ name: 'name',
+ description: 'description',
+ instructions: 'some instructions',
+ codeInterpreter: true,
+ knowledgeRetrieval: true,
+ file_ids: [],
+ options: {},
+ }),
+ 0,
+ );
+
+ expect(transport.apiRequest).toHaveBeenCalledWith('POST', '/assistants', {
+ body: {
+ description: 'description',
+ file_ids: [],
+ instructions: 'some instructions',
+ model: 'gpt-model',
+ name: 'name',
+ tools: [{ type: 'code_interpreter' }, { type: 'retrieval' }],
+ },
+ headers: { 'OpenAI-Beta': 'assistants=v1' },
+ });
+ });
+
+ it('create => should throw error if more then 20 files selected', async () => {
+ (transport.apiRequest as jest.Mock).mockResolvedValueOnce({});
+
+ try {
+ await assistant.create.execute.call(
+ createExecuteFunctionsMock({
+ file_ids: Array.from({ length: 25 }),
+ options: {},
+ }),
+ 0,
+ );
+ expect(true).toBe(false);
+ } catch (error) {
+ expect(error.message).toBe(
+ 'The maximum number of files that can be attached to the assistant is 20',
+ );
+ }
+ });
+
+ it('delete => should call apiRequest with correct parameters', async () => {
+ (transport.apiRequest as jest.Mock).mockResolvedValueOnce({});
+
+ await assistant.deleteAssistant.execute.call(
+ createExecuteFunctionsMock({
+ assistantId: 'assistant-id',
+ }),
+ 0,
+ );
+
+ expect(transport.apiRequest).toHaveBeenCalledWith('DELETE', '/assistants/assistant-id', {
+ headers: { 'OpenAI-Beta': 'assistants=v1' },
+ });
+ });
+
+ it('list => should call apiRequest with correct parameters', async () => {
+ (transport.apiRequest as jest.Mock).mockResolvedValueOnce({
+ data: [
+ { name: 'name1', id: 'id-1', model: 'gpt-model', other: 'other' },
+ { name: 'name2', id: 'id-2', model: 'gpt-model', other: 'other' },
+ { name: 'name3', id: 'id-3', model: 'gpt-model', other: 'other' },
+ ],
+ has_more: false,
+ });
+
+ const response = await assistant.list.execute.call(
+ createExecuteFunctionsMock({
+ simplify: true,
+ }),
+ 0,
+ );
+
+ expect(response).toEqual([
+ {
+ json: { name: 'name1', id: 'id-1', model: 'gpt-model' },
+ pairedItem: { item: 0 },
+ },
+ {
+ json: { name: 'name2', id: 'id-2', model: 'gpt-model' },
+ pairedItem: { item: 0 },
+ },
+ {
+ json: { name: 'name3', id: 'id-3', model: 'gpt-model' },
+ pairedItem: { item: 0 },
+ },
+ ]);
+ });
+
+ it('update => should call apiRequest with correct parameters', async () => {
+ (transport.apiRequest as jest.Mock).mockResolvedValueOnce({
+ tools: [{ type: 'existing_tool' }],
+ });
+ (transport.apiRequest as jest.Mock).mockResolvedValueOnce({});
+
+ await assistant.update.execute.call(
+ createExecuteFunctionsMock({
+ assistantId: 'assistant-id',
+ options: {
+ modelId: 'gpt-model',
+ name: 'name',
+ instructions: 'some instructions',
+ codeInterpreter: true,
+ knowledgeRetrieval: true,
+ file_ids: [],
+ removeCustomTools: false,
+ },
+ }),
+ 0,
+ );
+
+ expect(transport.apiRequest).toHaveBeenCalledTimes(2);
+ expect(transport.apiRequest).toHaveBeenCalledWith('GET', '/assistants/assistant-id', {
+ headers: { 'OpenAI-Beta': 'assistants=v1' },
+ });
+ expect(transport.apiRequest).toHaveBeenCalledWith('POST', '/assistants/assistant-id', {
+ body: {
+ file_ids: [],
+ instructions: 'some instructions',
+ model: 'gpt-model',
+ name: 'name',
+ tools: [{ type: 'existing_tool' }, { type: 'code_interpreter' }, { type: 'retrieval' }],
+ },
+ headers: { 'OpenAI-Beta': 'assistants=v1' },
+ });
+ });
+});
+
+describe('OpenAi, Audio resource', () => {
+ beforeEach(() => {
+ (transport as any).apiRequest = jest.fn();
+ });
+
+ it('generate => should call apiRequest with correct parameters', async () => {
+ (transport.apiRequest as jest.Mock).mockResolvedValueOnce({});
+
+ const returnData = await audio.generate.execute.call(
+ createExecuteFunctionsMock({
+ model: 'tts-model',
+ input: 'input',
+ voice: 'fable',
+ options: {
+ response_format: 'flac',
+ speed: 1.25,
+ binaryPropertyOutput: 'myData',
+ },
+ }),
+ 0,
+ );
+
+ expect(returnData.length).toEqual(1);
+ expect(returnData[0].binary?.myData).toBeDefined();
+ expect(returnData[0].pairedItem).toBeDefined();
+
+ expect(transport.apiRequest).toHaveBeenCalledWith('POST', '/audio/speech', {
+ body: {
+ input: 'input',
+ model: 'tts-model',
+ response_format: 'flac',
+ speed: 1.25,
+ voice: 'fable',
+ },
+ option: { encoding: 'arraybuffer', json: false, returnFullResponse: true, useStream: true },
+ });
+ });
+
+ it('transcribe => should call apiRequest with correct parameters', async () => {
+ (transport.apiRequest as jest.Mock).mockResolvedValueOnce({ text: 'transcribtion' });
+
+ const returnData = await audio.transcribe.execute.call(
+ createExecuteFunctionsMock({
+ binaryPropertyName: 'myData',
+ options: {
+ language: 'en',
+ temperature: 1.1,
+ },
+ }),
+ 0,
+ );
+
+ expect(returnData.length).toEqual(1);
+ expect(returnData[0].pairedItem).toBeDefined();
+ expect(returnData[0].json).toEqual({ text: 'transcribtion' });
+
+ expect(transport.apiRequest).toHaveBeenCalledWith(
+ 'POST',
+ '/audio/transcriptions',
+ expect.objectContaining({
+ headers: { 'Content-Type': 'multipart/form-data' },
+ }),
+ );
+ });
+
+ it('translate => should call apiRequest with correct parameters', async () => {
+ (transport.apiRequest as jest.Mock).mockResolvedValueOnce({ text: 'translations' });
+
+ const returnData = await audio.translate.execute.call(
+ createExecuteFunctionsMock({
+ binaryPropertyName: 'myData',
+ options: {},
+ }),
+ 0,
+ );
+
+ expect(returnData.length).toEqual(1);
+ expect(returnData[0].pairedItem).toBeDefined();
+ expect(returnData[0].json).toEqual({ text: 'translations' });
+
+ expect(transport.apiRequest).toHaveBeenCalledWith(
+ 'POST',
+ '/audio/translations',
+ expect.objectContaining({
+ headers: { 'Content-Type': 'multipart/form-data' },
+ }),
+ );
+ });
+});
+
+describe('OpenAi, File resource', () => {
+ beforeEach(() => {
+ (transport as any).apiRequest = jest.fn();
+ });
+
+ it('deleteFile => should call apiRequest with correct parameters', async () => {
+ (transport.apiRequest as jest.Mock).mockResolvedValueOnce({});
+
+ await file.deleteFile.execute.call(
+ createExecuteFunctionsMock({
+ fileId: 'file-id',
+ }),
+ 0,
+ );
+
+ expect(transport.apiRequest).toHaveBeenCalledWith('DELETE', '/files/file-id');
+ });
+
+ it('list => should return list of files', async () => {
+ (transport.apiRequest as jest.Mock).mockResolvedValueOnce({
+ data: [{ file: 'file1' }, { file: 'file2' }, { file: 'file3' }],
+ });
+
+ const returnData = await file.list.execute.call(createExecuteFunctionsMock({ options: {} }), 2);
+
+ expect(returnData.length).toEqual(3);
+ expect(returnData).toEqual([
+ {
+ json: { file: 'file1' },
+ pairedItem: { item: 2 },
+ },
+ {
+ json: { file: 'file2' },
+ pairedItem: { item: 2 },
+ },
+ {
+ json: { file: 'file3' },
+ pairedItem: { item: 2 },
+ },
+ ]);
+ });
+
+ it('upload => should call apiRequest with correct parameters', async () => {
+ (transport.apiRequest as jest.Mock).mockResolvedValueOnce({ success: true });
+
+ const returnData = await file.upload.execute.call(
+ createExecuteFunctionsMock({
+ binaryPropertyName: 'myData',
+ options: {},
+ }),
+ 0,
+ );
+
+ expect(returnData.length).toEqual(1);
+ expect(returnData[0].pairedItem).toBeDefined();
+ expect(returnData[0].json).toEqual({ success: true });
+
+ expect(transport.apiRequest).toHaveBeenCalledWith(
+ 'POST',
+ '/files',
+ expect.objectContaining({
+ headers: { 'Content-Type': 'multipart/form-data' },
+ }),
+ );
+ });
+});
+
+describe('OpenAi, Image resource', () => {
+ beforeEach(() => {
+ (transport as any).apiRequest = jest.fn();
+ });
+
+ it('generate => should call apiRequest with correct parameters, return binary', async () => {
+ (transport.apiRequest as jest.Mock).mockResolvedValueOnce({ data: [{ b64_json: 'image1' }] });
+
+ const returnData = await image.generate.execute.call(
+ createExecuteFunctionsMock({
+ model: 'dall-e-3',
+ prompt: 'cat with a hat',
+ options: {
+ size: '1024x1024',
+ style: 'vivid',
+ quality: 'hd',
+ binaryPropertyOutput: 'myData',
+ },
+ }),
+ 0,
+ );
+
+ expect(returnData.length).toEqual(1);
+ expect(returnData[0].binary?.myData).toBeDefined();
+ expect(returnData[0].pairedItem).toBeDefined();
+
+ expect(transport.apiRequest).toHaveBeenCalledWith('POST', '/images/generations', {
+ body: {
+ model: 'dall-e-3',
+ prompt: 'cat with a hat',
+ quality: 'hd',
+ response_format: 'b64_json',
+ size: '1024x1024',
+ style: 'vivid',
+ },
+ });
+ });
+
+ it('generate => should call apiRequest with correct parameters, return urls', async () => {
+ (transport.apiRequest as jest.Mock).mockResolvedValueOnce({ data: [{ url: 'image-url' }] });
+
+ const returnData = await image.generate.execute.call(
+ createExecuteFunctionsMock({
+ model: 'dall-e-3',
+ prompt: 'cat with a hat',
+ options: {
+ size: '1024x1024',
+ style: 'vivid',
+ quality: 'hd',
+ binaryPropertyOutput: 'myData',
+ returnImageUrls: true,
+ },
+ }),
+ 0,
+ );
+
+ expect(returnData.length).toEqual(1);
+ expect(returnData[0].pairedItem).toBeDefined();
+ expect(returnData).toEqual([{ json: { url: 'image-url' }, pairedItem: { item: 0 } }]);
+
+ expect(transport.apiRequest).toHaveBeenCalledWith('POST', '/images/generations', {
+ body: {
+ model: 'dall-e-3',
+ prompt: 'cat with a hat',
+ quality: 'hd',
+ response_format: 'url',
+ size: '1024x1024',
+ style: 'vivid',
+ },
+ });
+ });
+
+ it('analyze => should call apiRequest with correct parameters', async () => {
+ (transport.apiRequest as jest.Mock).mockResolvedValueOnce({ success: true });
+
+ const returnData = await image.analyze.execute.call(
+ createExecuteFunctionsMock({
+ text: 'image text',
+ inputType: 'url',
+ imageUrls: 'image-url1, image-url2',
+ options: {
+ detail: 'low',
+ },
+ }),
+ 0,
+ );
+
+ expect(returnData.length).toEqual(1);
+ expect(returnData[0].pairedItem).toBeDefined();
+ expect(returnData[0].json).toEqual({ success: true });
+
+ expect(transport.apiRequest).toHaveBeenCalledWith('POST', '/chat/completions', {
+ body: {
+ max_tokens: 300,
+ messages: [
+ {
+ content: [
+ { text: 'image text', type: 'text' },
+ { image_url: { detail: 'low', url: 'image-url1' }, type: 'image_url' },
+ { image_url: { detail: 'low', url: 'image-url2' }, type: 'image_url' },
+ ],
+ role: 'user',
+ },
+ ],
+ model: 'gpt-4-vision-preview',
+ },
+ });
+ });
+});
+
+describe('OpenAi, Text resource', () => {
+ beforeEach(() => {
+ (transport as any).apiRequest = jest.fn();
+ });
+
+ it('classify => should call apiRequest with correct parameters', async () => {
+ (transport.apiRequest as jest.Mock).mockResolvedValueOnce({ results: [{ flagged: true }] });
+
+ const returnData = await text.classify.execute.call(
+ createExecuteFunctionsMock({
+ input: 'input',
+ options: { useStableModel: true },
+ }),
+ 0,
+ );
+
+ expect(returnData.length).toEqual(1);
+ expect(returnData[0].pairedItem).toBeDefined();
+ expect(returnData[0].json).toEqual({ flagged: true });
+
+ expect(transport.apiRequest).toHaveBeenCalledWith('POST', '/moderations', {
+ body: { input: 'input', model: 'text-moderation-stable' },
+ });
+ });
+
+ it('message => should call apiRequest with correct parameters, no tool call', async () => {
+ (transport.apiRequest as jest.Mock).mockResolvedValueOnce({
+ choices: [{ message: { tool_calls: undefined } }],
+ });
+
+ await text.message.execute.call(
+ createExecuteFunctionsMock({
+ modelId: 'gpt-model',
+ messages: {
+ values: [{ role: 'user', content: 'message' }],
+ },
+
+ options: {},
+ }),
+ 0,
+ );
+
+ expect(transport.apiRequest).toHaveBeenCalledWith('POST', '/chat/completions', {
+ body: {
+ messages: [{ content: 'message', role: 'user' }],
+ model: 'gpt-model',
+ response_format: undefined,
+ tools: undefined,
+ },
+ });
+ });
+});
diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/transport/index.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/transport/index.ts
new file mode 100644
index 0000000000000..36a5773245e4b
--- /dev/null
+++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/transport/index.ts
@@ -0,0 +1,37 @@
+import type {
+ IDataObject,
+ IExecuteFunctions,
+ IHttpRequestMethods,
+ ILoadOptionsFunctions,
+} from 'n8n-workflow';
+type RequestParameters = {
+ headers?: IDataObject;
+ body?: IDataObject | string;
+ qs?: IDataObject;
+ uri?: string;
+ option?: IDataObject;
+};
+
+export async function apiRequest(
+ this: IExecuteFunctions | ILoadOptionsFunctions,
+ method: IHttpRequestMethods,
+ endpoint: string,
+ parameters?: RequestParameters,
+) {
+ const { body, qs, uri, option, headers } = parameters ?? {};
+
+ const options = {
+ headers,
+ method,
+ body,
+ qs,
+ uri: uri ?? `https://api.openai.com/v1${endpoint}`,
+ json: true,
+ };
+
+ if (option && Object.keys(option).length !== 0) {
+ Object.assign(options, option);
+ }
+
+ return await this.helpers.requestWithAuthentication.call(this, 'openAiApi', options);
+}
diff --git a/packages/@n8n/nodes-langchain/package.json b/packages/@n8n/nodes-langchain/package.json
index ccef21bfd7e83..c79a06a17cd8c 100644
--- a/packages/@n8n/nodes-langchain/package.json
+++ b/packages/@n8n/nodes-langchain/package.json
@@ -18,7 +18,9 @@
"format": "prettier nodes credentials --write",
"lint": "eslint nodes credentials",
"lintfix": "eslint nodes credentials --fix",
- "watch": "tsc-watch -p tsconfig.build.json --onCompilationComplete \"tsc-alias -p tsconfig.build.json\" --onSuccess \"pnpm n8n-generate-ui-types\""
+ "watch": "tsc-watch -p tsconfig.build.json --onCompilationComplete \"tsc-alias -p tsconfig.build.json\" --onSuccess \"pnpm n8n-generate-ui-types\"",
+ "test": "jest",
+ "test:dev": "jest --watch"
},
"files": [
"dist"
@@ -42,6 +44,7 @@
"dist/credentials/ZepApi.credentials.js"
],
"nodes": [
+ "dist/nodes/vendors/OpenAi/OpenAi.node.js",
"dist/nodes/agents/Agent/Agent.node.js",
"dist/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.js",
"dist/nodes/chains/ChainSummarization/ChainSummarization.node.js",
@@ -140,6 +143,7 @@
"cohere-ai": "6.2.2",
"d3-dsv": "2.0.0",
"epub2": "3.0.1",
+ "form-data": "4.0.0",
"html-to-text": "9.0.5",
"json-schema-to-zod": "1.2.0",
"langchain": "0.0.198",
diff --git a/packages/@n8n/nodes-langchain/utils/sharedFields.ts b/packages/@n8n/nodes-langchain/utils/sharedFields.ts
index 4a22829a0c8df..f51d4727b5b8c 100644
--- a/packages/@n8n/nodes-langchain/utils/sharedFields.ts
+++ b/packages/@n8n/nodes-langchain/utils/sharedFields.ts
@@ -105,9 +105,12 @@ export function getConnectionHintNoticeField(
if (groupedConnections.size === 1) {
const [[connection, locales]] = Array.from(groupedConnections);
- displayName = `This node must be connected to ${determineArticle(
- locales[0],
- )} ${locales[0].toLowerCase()}. Insert one`;
+ displayName = `This node must be connected to ${determineArticle(locales[0])} ${locales[0]
+ .toLowerCase()
+ .replace(
+ /^ai /,
+ 'AI ',
+ )}. Insert one`;
} else {
const ahrefs = Array.from(groupedConnections, ([connection, locales]) => {
// If there are multiple locales, join them with ' or '
diff --git a/packages/core/src/DirectoryLoader.ts b/packages/core/src/DirectoryLoader.ts
index 94fefddbc5f0d..cd4de72cba54a 100644
--- a/packages/core/src/DirectoryLoader.ts
+++ b/packages/core/src/DirectoryLoader.ts
@@ -247,7 +247,15 @@ export abstract class DirectoryLoader {
isCustom: boolean;
}) {
try {
- const codex = this.getCodex(filePath);
+ let codex;
+
+ if (!isCustom) {
+ codex = node.description.codex;
+ }
+
+ if (codex === undefined) {
+ codex = this.getCodex(filePath);
+ }
if (isCustom) {
codex.categories = codex.categories
diff --git a/packages/editor-ui/public/static/open-ai.svg b/packages/editor-ui/public/static/open-ai.svg
new file mode 100644
index 0000000000000..4f8812da72e68
--- /dev/null
+++ b/packages/editor-ui/public/static/open-ai.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/packages/editor-ui/src/components/Node/NodeCreator/Modes/ActionsMode.vue b/packages/editor-ui/src/components/Node/NodeCreator/Modes/ActionsMode.vue
index e6e738136916f..cc0801494d546 100644
--- a/packages/editor-ui/src/components/Node/NodeCreator/Modes/ActionsMode.vue
+++ b/packages/editor-ui/src/components/Node/NodeCreator/Modes/ActionsMode.vue
@@ -13,6 +13,8 @@ import {
REGULAR_NODE_CREATOR_VIEW,
TRIGGER_NODE_CREATOR_VIEW,
CUSTOM_API_CALL_KEY,
+ OPEN_AI_NODE_MESSAGE_ASSISTANT_TYPE,
+ OPEN_AI_NODE_TYPE,
} from '@/constants';
import { useUsersStore } from '@/stores/users.store';
@@ -24,6 +26,7 @@ import { useViewStacks } from '../composables/useViewStacks';
import ItemsRenderer from '../Renderers/ItemsRenderer.vue';
import CategorizedItemsRenderer from '../Renderers/CategorizedItemsRenderer.vue';
+import type { IDataObject } from 'n8n-workflow';
const emit = defineEmits({
nodeTypeSelected: (nodeTypes: string[]) => true,
@@ -145,6 +148,12 @@ function onSelected(actionCreateElement: INodeCreateElement) {
const actionNode = actions.value[0].key;
emit('nodeTypeSelected', [actionData.key as string, actionNode]);
+ } else if (
+ actionData.key === OPEN_AI_NODE_TYPE &&
+ (actionData?.value as IDataObject)?.resource === 'assistant' &&
+ (actionData?.value as IDataObject)?.operation === 'message'
+ ) {
+ emit('nodeTypeSelected', [OPEN_AI_NODE_MESSAGE_ASSISTANT_TYPE]);
} else {
emit('nodeTypeSelected', [actionData.key as string]);
}
diff --git a/packages/editor-ui/src/components/Node/NodeCreator/composables/useActions.ts b/packages/editor-ui/src/components/Node/NodeCreator/composables/useActions.ts
index a36719a8afe5e..0e1b15462795b 100644
--- a/packages/editor-ui/src/components/Node/NodeCreator/composables/useActions.ts
+++ b/packages/editor-ui/src/components/Node/NodeCreator/composables/useActions.ts
@@ -18,6 +18,8 @@ import {
NODE_CREATOR_OPEN_SOURCES,
NO_OP_NODE_TYPE,
OPEN_AI_ASSISTANT_NODE_TYPE,
+ OPEN_AI_NODE_MESSAGE_ASSISTANT_TYPE,
+ OPEN_AI_NODE_TYPE,
QA_CHAIN_NODE_TYPE,
SCHEDULE_TRIGGER_NODE_TYPE,
SPLIT_IN_BATCHES_NODE_TYPE,
@@ -188,6 +190,7 @@ export const useActions = () => {
AGENT_NODE_TYPE,
BASIC_CHAIN_NODE_TYPE,
OPEN_AI_ASSISTANT_NODE_TYPE,
+ OPEN_AI_NODE_MESSAGE_ASSISTANT_TYPE,
];
const isChatTriggerMissing =
@@ -228,6 +231,10 @@ export const useActions = () => {
}
addedNodes.forEach((node, index) => {
+ if (node.type === OPEN_AI_NODE_MESSAGE_ASSISTANT_TYPE) {
+ node.type = OPEN_AI_NODE_TYPE;
+ }
+
nodes.push(node);
switch (node.type) {
diff --git a/packages/editor-ui/src/components/Node/NodeCreator/viewsData.ts b/packages/editor-ui/src/components/Node/NodeCreator/viewsData.ts
index c85f36aa11ae5..7fe3917602918 100644
--- a/packages/editor-ui/src/components/Node/NodeCreator/viewsData.ts
+++ b/packages/editor-ui/src/components/Node/NodeCreator/viewsData.ts
@@ -97,7 +97,9 @@ interface NodeView {
function getAiNodesBySubcategory(nodes: INodeTypeDescription[], subcategory: string) {
return nodes
- .filter((node) => node.codex?.subcategories?.[AI_SUBCATEGORY]?.includes(subcategory))
+ .filter(
+ (node) => !node.hidden && node.codex?.subcategories?.[AI_SUBCATEGORY]?.includes(subcategory),
+ )
.map((node) => ({
key: node.name,
type: 'node',
@@ -109,6 +111,13 @@ function getAiNodesBySubcategory(nodes: INodeTypeDescription[], subcategory: str
description: node.description,
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
icon: node.icon!,
+ iconData: node.name.toLowerCase().includes('openai')
+ ? {
+ type: 'file',
+ icon: 'openai',
+ fileBuffer: '/static/open-ai.svg',
+ }
+ : undefined,
},
}))
.sort((a, b) => a.properties.displayName.localeCompare(b.properties.displayName));
diff --git a/packages/editor-ui/src/constants.ts b/packages/editor-ui/src/constants.ts
index 8ea66af18059f..4696fb720790e 100644
--- a/packages/editor-ui/src/constants.ts
+++ b/packages/editor-ui/src/constants.ts
@@ -130,6 +130,9 @@ export const MANUAL_TRIGGER_NODE_TYPE = 'n8n-nodes-base.manualTrigger';
export const MANUAL_CHAT_TRIGGER_NODE_TYPE = '@n8n/n8n-nodes-langchain.manualChatTrigger';
export const CHAT_TRIGGER_NODE_TYPE = '@n8n/n8n-nodes-langchain.chatTrigger';
export const AGENT_NODE_TYPE = '@n8n/n8n-nodes-langchain.agent';
+export const OPEN_AI_NODE_TYPE = '@n8n/n8n-nodes-langchain.openAi';
+export const OPEN_AI_NODE_MESSAGE_ASSISTANT_TYPE =
+ '@n8n/n8n-nodes-langchain.openAi.assistant.message';
export const OPEN_AI_ASSISTANT_NODE_TYPE = '@n8n/n8n-nodes-langchain.openAiAssistant';
export const BASIC_CHAIN_NODE_TYPE = '@n8n/n8n-nodes-langchain.chainLlm';
export const QA_CHAIN_NODE_TYPE = '@n8n/n8n-nodes-langchain.chainRetrievalQa';
diff --git a/packages/nodes-base/nodes/OpenAi/OpenAi.node.ts b/packages/nodes-base/nodes/OpenAi/OpenAi.node.ts
index a0aacffe65e47..7264d566b0598 100644
--- a/packages/nodes-base/nodes/OpenAi/OpenAi.node.ts
+++ b/packages/nodes-base/nodes/OpenAi/OpenAi.node.ts
@@ -2,11 +2,13 @@ import type { INodeType, INodeTypeDescription } from 'n8n-workflow';
import { imageFields, imageOperations } from './ImageDescription';
import { textFields, textOperations } from './TextDescription';
import { chatFields, chatOperations } from './ChatDescription';
+import { oldVersionNotice } from '../../utils/descriptions';
export class OpenAi implements INodeType {
description: INodeTypeDescription = {
displayName: 'OpenAI',
name: 'openAi',
+ hidden: true,
icon: 'file:openAi.svg',
group: ['transform'],
version: [1, 1.1],
@@ -28,13 +30,7 @@ export class OpenAi implements INodeType {
baseURL: 'https://api.openai.com',
},
properties: [
- {
- displayName:
- 'For more advanced uses, consider using an advanced AI node',
- name: 'noticeAdvanceAi',
- type: 'notice',
- default: '',
- },
+ oldVersionNotice,
{
displayName: 'Resource',
name: 'resource',
diff --git a/packages/workflow/src/index.ts b/packages/workflow/src/index.ts
index e3fc71cb0c86a..7ac82621ce3f4 100644
--- a/packages/workflow/src/index.ts
+++ b/packages/workflow/src/index.ts
@@ -33,6 +33,7 @@ export {
fileTypeFromMimeType,
assert,
removeCircularRefs,
+ updateDisplayOptions,
} from './utils';
export {
isINodeProperties,
diff --git a/packages/workflow/src/utils.ts b/packages/workflow/src/utils.ts
index 8cff1329e1933..e7e5182443fd7 100644
--- a/packages/workflow/src/utils.ts
+++ b/packages/workflow/src/utils.ts
@@ -1,7 +1,9 @@
import FormData from 'form-data';
-import type { BinaryFileType, JsonObject } from './Interfaces';
+import type { BinaryFileType, IDisplayOptions, INodeProperties, JsonObject } from './Interfaces';
import { ApplicationError } from './errors/application.error';
+import { merge } from 'lodash';
+
const readStreamClasses = new Set(['ReadStream', 'Readable', 'ReadableStream']);
// NOTE: BigInt.prototype.toJSON is not available, which causes JSON.stringify to throw an error
@@ -165,3 +167,15 @@ export const removeCircularRefs = (obj: JsonObject, seen = new Set()) => {
}
});
};
+
+export function updateDisplayOptions(
+ displayOptions: IDisplayOptions,
+ properties: INodeProperties[],
+) {
+ return properties.map((nodeProperty) => {
+ return {
+ ...nodeProperty,
+ displayOptions: merge({}, nodeProperty.displayOptions, displayOptions),
+ };
+ });
+}
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index d6a76f8fac25b..2b20638054058 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -226,6 +226,9 @@ importers:
epub2:
specifier: 3.0.1
version: 3.0.1(ts-toolbelt@9.6.0)
+ form-data:
+ specifier: 4.0.0
+ version: 4.0.0
html-to-text:
specifier: 9.0.5
version: 9.0.5