From f736a268f55a26d0510106f4b604a419ad6cdc51 Mon Sep 17 00:00:00 2001 From: cybercoder Date: Wed, 3 Jan 2024 11:18:24 +0300 Subject: [PATCH 01/15] Update env_example add MISTRAL key template --- IntelliNode/env_example | 1 + 1 file changed, 1 insertion(+) diff --git a/IntelliNode/env_example b/IntelliNode/env_example index bc032c9..5247196 100644 --- a/IntelliNode/env_example +++ b/IntelliNode/env_example @@ -7,4 +7,5 @@ AZURE_OPENAI_API_KEY= AWS_ACCESS_KEY_ID= AWS_SECRET_ACCESS_KEY= REPLICATE_API_KEY= +MISTRAL_API_KEY= AWS_API_URL= From 1b5336ea09fa3788dbafb3f200413c3c5d9b22ef Mon Sep 17 00:00:00 2001 From: cybercoder Date: Wed, 3 Jan 2024 11:52:02 +0300 Subject: [PATCH 02/15] Add Mistral wrapper and test case --- IntelliNode/config.json | 5 ++ .../test/integration/MistralAIWrapper.test.js | 53 +++++++++++++++++++ IntelliNode/wrappers/MistralAIWrapper | 40 ++++++++++++++ 3 files changed, 98 insertions(+) create mode 100644 IntelliNode/test/integration/MistralAIWrapper.test.js create mode 100644 IntelliNode/wrappers/MistralAIWrapper diff --git a/IntelliNode/config.json b/IntelliNode/config.json index 53e0bfa..4117111 100644 --- a/IntelliNode/config.json +++ b/IntelliNode/config.json @@ -50,6 +50,11 @@ "replicate": { "base": "https://api.replicate.com", "predictions": "/v1/predictions" + }, + "mistral": { + "base": "https://api.mistral.ai", + "completions": "/v1/chat/completions", + "embed": "/v1/embeddings" } }, "models": { diff --git a/IntelliNode/test/integration/MistralAIWrapper.test.js b/IntelliNode/test/integration/MistralAIWrapper.test.js new file mode 100644 index 0000000..91385d9 --- /dev/null +++ b/IntelliNode/test/integration/MistralAIWrapper.test.js @@ -0,0 +1,53 @@ +require('dotenv').config(); +const assert = require('assert'); +const MistralAIWrapper = require('../../wrappers/MistralAIWrapper'); +const mistral = new MistralAIWrapper(process.env.MISTRAL_API_KEY); + +async function testMistralGenerateModel() { + try { + const params = { + model: 'mistral-tiny', + messages: [{"role": "user", "content": "Who is the most renowned French painter?"}] + }; + + const result = await mistral.generateText(params); + + console.log('Mistral Language Model Message:', result.choices[0]['message']['content']); + + } catch (error) { + console.error('Mistral Language Model Error:', error); + } +} + +async function testMistralEmbeddings() { + try { + const params = { + model: 'mistral-embed', + input: ["Embed this sentence.", "As well as this one."] + }; + + const result = await mistral.getEmbeddings(params); + + console.log('result: ', result); + + const embeddings = result.data; + + console.log( + 'Mistral Embeddings Result Sample:', + embeddings[0]['embedding'] + ); + + assert( + embeddings.length > 0, + 'testMistralEmbeddings response length should be greater than 0' + ); + + } catch (error) { + console.error('Mistral Embeddings Error:', error); + } +} + +(async () => { + await testMistralGenerateModel(); + await testMistralEmbeddings(); +})(); \ No newline at end of file diff --git a/IntelliNode/wrappers/MistralAIWrapper b/IntelliNode/wrappers/MistralAIWrapper new file mode 100644 index 0000000..5075c25 --- /dev/null +++ b/IntelliNode/wrappers/MistralAIWrapper @@ -0,0 +1,40 @@ +const axios = require('axios'); +const config = require('../config.json'); +const connHelper = require('../utils/ConnHelper'); + +class MistralAIWrapper { + constructor(apiKey) { + this.API_BASE_URL = config.url.mistral.base; + + this.httpClient = axios.create({ + baseURL: this.API_BASE_URL, + headers: { + 'Content-Type': 'application/json', + 'Accept': 'application/json', + Authorization: `Bearer ${apiKey}` + }, + }); + } + + async generateText(params) { + const url = config.url.mistral.completions; + try { + const response = await this.httpClient.post(url, params); + return response.data; + } catch (error) { + throw new Error(connHelper.getErrorMessage(error)); + } + } + + async getEmbeddings(params) { + const url = config.url.mistral.embed; + try { + const response = await this.httpClient.post(url, params); + return response.data; + } catch (error) { + throw new Error(connHelper.getErrorMessage(error)); + } + } +} + +module.exports = MistralAIWrapper; \ No newline at end of file From 820b5b08e04f2ef8a6ea035e4c8954578265d998 Mon Sep 17 00:00:00 2001 From: cybercoder Date: Wed, 3 Jan 2024 11:58:27 +0300 Subject: [PATCH 03/15] update the docs --- IntelliNode/README.md | 1 + IntelliNode/package.json | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/IntelliNode/README.md b/IntelliNode/README.md index 4bb2c1c..2d48200 100644 --- a/IntelliNode/README.md +++ b/IntelliNode/README.md @@ -19,6 +19,7 @@ IntelliNode is the ultimate tool to integrate with the latest language models and deep learning frameworks using **javascript**. The library provides intuitive functions for sending input to models like ChatGPT, WaveNet and Stable diffusion, and receiving generated text, speech, or images. With just a few lines of code, you can easily access the power of cutting-edge AI models to enhance your projects. # Latest Updates +- Add support for Mistral AI mixer models (one of the largest open-source models). - Update the chatbot to augment answers with your documents, allowing for a multi-model agent approach. - Update Openai with DALL·E 3 vision, speech, and ChatGPT functions (automation). - Improve Llama v2 chat speed and support llama code models. 🦙 diff --git a/IntelliNode/package.json b/IntelliNode/package.json index 9570afd..e75219c 100644 --- a/IntelliNode/package.json +++ b/IntelliNode/package.json @@ -15,7 +15,8 @@ "image generation", "speech synthesis", "prompt", - "automation" + "automation", + "Mistral" ], "author": "IntelliNode", "license": "Apache", From 27d8d1d6943b263f5a0b7423bf43d81ee18e6984 Mon Sep 17 00:00:00 2001 From: cybercoder Date: Wed, 3 Jan 2024 11:58:48 +0300 Subject: [PATCH 04/15] update the Mistral file name --- IntelliNode/wrappers/MistralAIWrapper | 40 --------------------------- 1 file changed, 40 deletions(-) delete mode 100644 IntelliNode/wrappers/MistralAIWrapper diff --git a/IntelliNode/wrappers/MistralAIWrapper b/IntelliNode/wrappers/MistralAIWrapper deleted file mode 100644 index 5075c25..0000000 --- a/IntelliNode/wrappers/MistralAIWrapper +++ /dev/null @@ -1,40 +0,0 @@ -const axios = require('axios'); -const config = require('../config.json'); -const connHelper = require('../utils/ConnHelper'); - -class MistralAIWrapper { - constructor(apiKey) { - this.API_BASE_URL = config.url.mistral.base; - - this.httpClient = axios.create({ - baseURL: this.API_BASE_URL, - headers: { - 'Content-Type': 'application/json', - 'Accept': 'application/json', - Authorization: `Bearer ${apiKey}` - }, - }); - } - - async generateText(params) { - const url = config.url.mistral.completions; - try { - const response = await this.httpClient.post(url, params); - return response.data; - } catch (error) { - throw new Error(connHelper.getErrorMessage(error)); - } - } - - async getEmbeddings(params) { - const url = config.url.mistral.embed; - try { - const response = await this.httpClient.post(url, params); - return response.data; - } catch (error) { - throw new Error(connHelper.getErrorMessage(error)); - } - } -} - -module.exports = MistralAIWrapper; \ No newline at end of file From 45fbf625a019ddd6b4b6edcaf07821353b8535d6 Mon Sep 17 00:00:00 2001 From: cybercoder Date: Wed, 3 Jan 2024 11:59:07 +0300 Subject: [PATCH 05/15] update read me --- IntelliNode/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/IntelliNode/README.md b/IntelliNode/README.md index 2d48200..cf638d9 100644 --- a/IntelliNode/README.md +++ b/IntelliNode/README.md @@ -19,7 +19,7 @@ IntelliNode is the ultimate tool to integrate with the latest language models and deep learning frameworks using **javascript**. The library provides intuitive functions for sending input to models like ChatGPT, WaveNet and Stable diffusion, and receiving generated text, speech, or images. With just a few lines of code, you can easily access the power of cutting-edge AI models to enhance your projects. # Latest Updates -- Add support for Mistral AI mixer models (one of the largest open-source models). +- Add support for Mistral AI mixer models (a rubost open-source models). - Update the chatbot to augment answers with your documents, allowing for a multi-model agent approach. - Update Openai with DALL·E 3 vision, speech, and ChatGPT functions (automation). - Improve Llama v2 chat speed and support llama code models. 🦙 From b7aed046796f634ff0553cf199aa5f014392fbd6 Mon Sep 17 00:00:00 2001 From: cybercoder Date: Wed, 3 Jan 2024 11:59:11 +0300 Subject: [PATCH 06/15] Create MistralAIWrapper.js --- IntelliNode/wrappers/MistralAIWrapper.js | 40 ++++++++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100644 IntelliNode/wrappers/MistralAIWrapper.js diff --git a/IntelliNode/wrappers/MistralAIWrapper.js b/IntelliNode/wrappers/MistralAIWrapper.js new file mode 100644 index 0000000..5075c25 --- /dev/null +++ b/IntelliNode/wrappers/MistralAIWrapper.js @@ -0,0 +1,40 @@ +const axios = require('axios'); +const config = require('../config.json'); +const connHelper = require('../utils/ConnHelper'); + +class MistralAIWrapper { + constructor(apiKey) { + this.API_BASE_URL = config.url.mistral.base; + + this.httpClient = axios.create({ + baseURL: this.API_BASE_URL, + headers: { + 'Content-Type': 'application/json', + 'Accept': 'application/json', + Authorization: `Bearer ${apiKey}` + }, + }); + } + + async generateText(params) { + const url = config.url.mistral.completions; + try { + const response = await this.httpClient.post(url, params); + return response.data; + } catch (error) { + throw new Error(connHelper.getErrorMessage(error)); + } + } + + async getEmbeddings(params) { + const url = config.url.mistral.embed; + try { + const response = await this.httpClient.post(url, params); + return response.data; + } catch (error) { + throw new Error(connHelper.getErrorMessage(error)); + } + } +} + +module.exports = MistralAIWrapper; \ No newline at end of file From 43c268fa4fa818008ad9171b6e8a63d0c34384ef Mon Sep 17 00:00:00 2001 From: cybercoder Date: Wed, 3 Jan 2024 12:03:24 +0300 Subject: [PATCH 07/15] Update MistralAIWrapper.js add the license --- IntelliNode/wrappers/MistralAIWrapper.js | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/IntelliNode/wrappers/MistralAIWrapper.js b/IntelliNode/wrappers/MistralAIWrapper.js index 5075c25..54c8f63 100644 --- a/IntelliNode/wrappers/MistralAIWrapper.js +++ b/IntelliNode/wrappers/MistralAIWrapper.js @@ -1,3 +1,10 @@ +/* +Apache License + +Copyright 2023 Github.com/Barqawiz/IntelliNode + + Licensed under the Apache License, Version 2.0 (the "License"); +*/ const axios = require('axios'); const config = require('../config.json'); const connHelper = require('../utils/ConnHelper'); From c86b92904ad3e497b038bb6a4e12c2d8ae870063 Mon Sep 17 00:00:00 2001 From: cybercoder Date: Wed, 3 Jan 2024 12:23:27 +0300 Subject: [PATCH 08/15] Update the chatbot to support Mistral --- IntelliNode/function/Chatbot.js | 27 ++++++++++++++++-- IntelliNode/model/input/ChatModelInput.js | 26 ++++++++++++++++++ IntelliNode/test/integration/Chatbot.test.js | 29 +++++++++++++++++++- 3 files changed, 79 insertions(+), 3 deletions(-) diff --git a/IntelliNode/function/Chatbot.js b/IntelliNode/function/Chatbot.js index 5548471..118b49d 100644 --- a/IntelliNode/function/Chatbot.js +++ b/IntelliNode/function/Chatbot.js @@ -12,6 +12,7 @@ const { GPTStreamParser } = require('../utils/StreamParser'); const { CohereStreamParser } = require('../utils/StreamParser'); const CohereAIWrapper = require('../wrappers/CohereAIWrapper'); const IntellicloudWrapper = require("../wrappers/IntellicloudWrapper"); +const MistralAIWrapper = require('../wrappers/MistralAIWrapper'); const SystemHelper = require("../utils/SystemHelper"); const { @@ -21,14 +22,16 @@ const { ChatLLamaInput, LLamaReplicateInput, CohereInput, - LLamaSageInput + LLamaSageInput, + MistralInput } = require("../model/input/ChatModelInput"); const SupportedChatModels = { OPENAI: "openai", REPLICATE: "replicate", SAGEMAKER: "sagemaker", - COHERE: "cohere" + COHERE: "cohere", + MISTRAL: "mistral" }; class Chatbot { @@ -58,6 +61,8 @@ class Chatbot { this.sagemakerWrapper = new AWSEndpointWrapper(customProxyHelper.url, keyValue); } else if (provider === SupportedChatModels.COHERE) { this.cohereWrapper = new CohereAIWrapper(keyValue); + } else if (provider === SupportedChatModels.MISTRAL) { + this.mistralWrapper = new MistralAIWrapper(keyValue); } else { throw new Error("Invalid provider name"); } @@ -97,6 +102,8 @@ class Chatbot { return this._chatSageMaker(modelInput); } else if (this.provider === SupportedChatModels.COHERE) { return this._chatCohere(modelInput); + } else if (this.provider === SupportedChatModels.MISTRAL) { + return this._chatMistral(modelInput); } else { throw new Error("The provider is not supported"); } @@ -372,6 +379,22 @@ class Chatbot { } } + async _chatMistral(modelInput) { + let params; + + if (modelInput instanceof MistralInput) { + params = modelInput.getChatInput(); + } else if (typeof modelInput === "object") { + params = modelInput; + } else { + throw new Error("Invalid input: Must be an instance of MistralInput or an object"); + } + + const results = await this.mistralWrapper.generateText(params); + + return results.choices.map(choice => choice.message.content); + } + } /*chatbot class*/ module.exports = { diff --git a/IntelliNode/model/input/ChatModelInput.js b/IntelliNode/model/input/ChatModelInput.js index 084277e..59ad36a 100644 --- a/IntelliNode/model/input/ChatModelInput.js +++ b/IntelliNode/model/input/ChatModelInput.js @@ -151,6 +151,31 @@ class CohereInput extends ChatGPTInput { } +class MistralInput extends ChatGPTInput { + constructor(systemMessage, options = {}) { + super(systemMessage, options); + + this.model = options.model || 'mistral-tiny'; + + } + + getChatInput() { + // Prepare the messages in the expected format + const messages = this.messages.map((message) => ({ + role: message.role, + content: message.content, + })); + + // Construct Mistral input parameters + const params = { + model: this.model, + messages: messages, + }; + + return params; + } +} + class ChatLLamaInput extends ChatModelInput { constructor(systemMessage, options = {}) { super(options); @@ -344,4 +369,5 @@ module.exports = { LLamaSageInput, LLamaReplicateInput, CohereInput, + MistralInput }; diff --git a/IntelliNode/test/integration/Chatbot.test.js b/IntelliNode/test/integration/Chatbot.test.js index 0bb8c26..1d76974 100644 --- a/IntelliNode/test/integration/Chatbot.test.js +++ b/IntelliNode/test/integration/Chatbot.test.js @@ -7,10 +7,13 @@ const { ChatGPTInput, ChatLLamaInput, CohereInput, LLamaReplicateInput, - LLamaSageInput } = require("../../model/input/ChatModelInput"); + LLamaSageInput, + MistralInput } = require("../../model/input/ChatModelInput"); +// env key const apiKey = process.env.OPENAI_API_KEY; const replicateApiKey = process.env.REPLICATE_API_KEY; +const mistralApiKey = process.env.MISTRAL_API_KEY; // openai bot const bot = new Chatbot(apiKey, SupportedChatModels.OPENAI); // llama - replicate bot @@ -18,6 +21,8 @@ const replicateBot = new Chatbot(replicateApiKey, SupportedChatModels.REPLICATE) // llama - sagemaker bot (open access) const sageBot = new Chatbot(null, SupportedChatModels.SAGEMAKER, {url: process.env.AWS_API_URL}); +// Mistral bot (Add the Mistral bot testing setup) +const mistralBot = new Chatbot(mistralApiKey, SupportedChatModels.MISTRAL); async function testOpenaiChatGPTCase1() { try { @@ -208,6 +213,24 @@ async function testCohereChatStream() { assert(fullText.length > 0, "Cohere chat stream response length should be greater than 0"); } +async function testMistralChatCase() { + try { + + console.log('\nchat test case 1 for Mistral: \n'); + + const input = new MistralInput("You are a helpful art assistant."); + input.addUserMessage("who is the most renowned italian painter ?"); + + const responses = await mistralBot.chat(input); + responses.forEach(response => console.log("- " + response)); + + assert(responses.length > 0, "Mistral chat response length should be greater than 0"); + + } catch (error) { + console.error("Test case failed with exception:", error.message); + } +} + (async () => { console.log('### Openai model ###') @@ -225,6 +248,10 @@ async function testCohereChatStream() { await testCohereChatCase(); await testCohereChatStream(); + console.log('### Mistral model ###') + testMistralChatCase(); + + console.log('### SageMaker llama model ###') //await testSageMakerLLamaCase(); From 606dea90a4b18f262127e2afc12444b357fe8861 Mon Sep 17 00:00:00 2001 From: cybercoder Date: Wed, 3 Jan 2024 12:30:50 +0300 Subject: [PATCH 09/15] Update Chatbot.js update Mistral chatbot to accept more inputs --- IntelliNode/function/Chatbot.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/IntelliNode/function/Chatbot.js b/IntelliNode/function/Chatbot.js index 118b49d..ac09062 100644 --- a/IntelliNode/function/Chatbot.js +++ b/IntelliNode/function/Chatbot.js @@ -384,14 +384,16 @@ class Chatbot { if (modelInput instanceof MistralInput) { params = modelInput.getChatInput(); + } if (modelInput instanceof ChatGPTInput) { + params = modelInput.getChatInput(); } else if (typeof modelInput === "object") { params = modelInput; } else { throw new Error("Invalid input: Must be an instance of MistralInput or an object"); } - - const results = await this.mistralWrapper.generateText(params); + const results = await this.mistralWrapper.generateText(params); + return results.choices.map(choice => choice.message.content); } From 8c7a045a70346dd708853c07bfacbf7b4b54ea4e Mon Sep 17 00:00:00 2001 From: cybercoder Date: Wed, 3 Jan 2024 12:30:58 +0300 Subject: [PATCH 10/15] Update index.js export Mistral --- IntelliNode/index.js | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/IntelliNode/index.js b/IntelliNode/index.js index 1934a97..4af4f1e 100644 --- a/IntelliNode/index.js +++ b/IntelliNode/index.js @@ -37,7 +37,8 @@ const { LLamaReplicateInput, ChatGPTMessage, LLamaSageInput, - CohereInput + CohereInput, + MistralInput } = require('./model/input/ChatModelInput'); const FunctionModelInput = require('./model/input/FunctionModelInput'); const EmbedInput = require('./model/input/EmbedInput'); @@ -50,6 +51,7 @@ const HuggingWrapper = require('./wrappers/HuggingWrapper'); const ReplicateWrapper = require('./wrappers/ReplicateWrapper'); const AWSEndpointWrapper = require('./wrappers/AWSEndpointWrapper'); const IntellicloudWrapper = require('./wrappers/IntellicloudWrapper'); +const MistralAIWrapper = require('./wrappers/MistralAIWrapper'); // utils const { LLMEvaluation } = require('./utils/LLMEvaluation'); const AudioHelper = require('./utils/AudioHelper'); @@ -104,5 +106,7 @@ module.exports = { CohereStreamParser, ChatContext, CohereInput, - IntellicloudWrapper + IntellicloudWrapper, + MistralAIWrapper, + MistralInput }; From 3ba8e91a4e7af820527c54c64f4feb590981956d Mon Sep 17 00:00:00 2001 From: cybercoder Date: Wed, 3 Jan 2024 12:38:46 +0300 Subject: [PATCH 11/15] Update README.md update npm documentation --- IntelliNode/README.md | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/IntelliNode/README.md b/IntelliNode/README.md index cf638d9..2bc719c 100644 --- a/IntelliNode/README.md +++ b/IntelliNode/README.md @@ -19,7 +19,7 @@ IntelliNode is the ultimate tool to integrate with the latest language models and deep learning frameworks using **javascript**. The library provides intuitive functions for sending input to models like ChatGPT, WaveNet and Stable diffusion, and receiving generated text, speech, or images. With just a few lines of code, you can easily access the power of cutting-edge AI models to enhance your projects. # Latest Updates -- Add support for Mistral AI mixer models (a rubost open-source models). +- Add Mistral AI models as chatbot providers (a rubost mixer open-source models). - Update the chatbot to augment answers with your documents, allowing for a multi-model agent approach. - Update Openai with DALL·E 3 vision, speech, and ChatGPT functions (automation). - Improve Llama v2 chat speed and support llama code models. 🦙 @@ -36,7 +36,7 @@ Join the [discord server](https://discord.gg/VYgCh2p3Ww) for the latest updates ### Chatbot 1. imports: ```js -const { Chatbot, ChatGPTInput, ChatGPTMessage } = require('intellinode'); +const { Chatbot, ChatGPTInput } = require('intellinode'); ``` 2. call: ```js @@ -48,7 +48,24 @@ input.addUserMessage('What is the distance between the Earth and the Moon?'); const bot = new Chatbot(apiKey); const responses = await bot.chat(input); ``` + +### Mistral Chatbot +IntelliNode enable effortless swapping between AI models. +1. imports: +```js +const { Chatbot, MistralInput } = require('intellinode'); +``` +2. call: +```js +const input = new MistralInput('You are an art expert.'); +input.addUserMessage('Who painted the Mona Lisa?'); + +const mistralBot = new Chatbot('your-mistral-ai-api-key', 'mistral'); +const responses = await mistralBot.chat(input); +``` + The documentation on how to switch the chatbot between ChatGPT and LLama can be found in the [IntelliNode Wiki](https://github.com/Barqawiz/IntelliNode/wiki/ChatBot). + ### Semantic Search 1. imports: ```js From a9a0b873bc8bc814a4d6c701ab3fd354fa5ba7e1 Mon Sep 17 00:00:00 2001 From: cybercoder Date: Wed, 3 Jan 2024 12:41:10 +0300 Subject: [PATCH 12/15] increase the version --- IntelliNode/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/IntelliNode/package.json b/IntelliNode/package.json index e75219c..24c364f 100644 --- a/IntelliNode/package.json +++ b/IntelliNode/package.json @@ -1,6 +1,6 @@ { "name": "intellinode", - "version": "1.6.0", + "version": "1.7.0", "description": "Integrate and evaluate various AI models, such as ChatGPT, Llama, Diffusion, Cohere, and Hugging Face.", "main": "index.js", "keywords": [ From 54a6aa8ab69435b81ed910d540d5259b5ac62ab5 Mon Sep 17 00:00:00 2001 From: cybercoder Date: Wed, 3 Jan 2024 12:49:43 +0300 Subject: [PATCH 13/15] Update README.md adjust the release notes --- IntelliNode/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/IntelliNode/README.md b/IntelliNode/README.md index 2bc719c..bcdb7af 100644 --- a/IntelliNode/README.md +++ b/IntelliNode/README.md @@ -19,7 +19,7 @@ IntelliNode is the ultimate tool to integrate with the latest language models and deep learning frameworks using **javascript**. The library provides intuitive functions for sending input to models like ChatGPT, WaveNet and Stable diffusion, and receiving generated text, speech, or images. With just a few lines of code, you can easily access the power of cutting-edge AI models to enhance your projects. # Latest Updates -- Add Mistral AI models as chatbot providers (a rubost mixer open-source models). +- Add Mistral SMoE model as a chatbot provider (open source mixture of experts). - Update the chatbot to augment answers with your documents, allowing for a multi-model agent approach. - Update Openai with DALL·E 3 vision, speech, and ChatGPT functions (automation). - Improve Llama v2 chat speed and support llama code models. 🦙 From bc86349d26f08ebe7292689dd7cbb4b5e33b1943 Mon Sep 17 00:00:00 2001 From: cybercoder Date: Wed, 3 Jan 2024 13:01:40 +0300 Subject: [PATCH 14/15] update the documentation and version --- IntelliNode/README.md | 5 +++-- IntelliNode/package.json | 4 ++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/IntelliNode/README.md b/IntelliNode/README.md index bcdb7af..130de33 100644 --- a/IntelliNode/README.md +++ b/IntelliNode/README.md @@ -53,14 +53,15 @@ const responses = await bot.chat(input); IntelliNode enable effortless swapping between AI models. 1. imports: ```js -const { Chatbot, MistralInput } = require('intellinode'); +const { Chatbot, MistralInput, SupportedChatModels } = require('intellinode'); ``` 2. call: ```js const input = new MistralInput('You are an art expert.'); input.addUserMessage('Who painted the Mona Lisa?'); -const mistralBot = new Chatbot('your-mistral-ai-api-key', 'mistral'); +// get the api key from mistral.ai +const mistralBot = new Chatbot(apiKey, SupportedChatModels.MISTRAL); const responses = await mistralBot.chat(input); ``` diff --git a/IntelliNode/package.json b/IntelliNode/package.json index 24c364f..01ada39 100644 --- a/IntelliNode/package.json +++ b/IntelliNode/package.json @@ -1,7 +1,7 @@ { "name": "intellinode", - "version": "1.7.0", - "description": "Integrate and evaluate various AI models, such as ChatGPT, Llama, Diffusion, Cohere, and Hugging Face.", + "version": "1.7.1", + "description": "Integrate and evaluate various AI models, such as ChatGPT, Llama, Diffusion, Cohere, Mistral and Hugging Face.", "main": "index.js", "keywords": [ "ai", From 4d76f7a1ca03a8a94b6366aa220134a9d399b842 Mon Sep 17 00:00:00 2001 From: cybercoder Date: Wed, 3 Jan 2024 13:05:00 +0300 Subject: [PATCH 15/15] Update README.md update git readme --- README.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/README.md b/README.md index 9fe7456..3c48ff4 100644 --- a/README.md +++ b/README.md @@ -71,6 +71,23 @@ input.addUserMessage('What is the distance between the Earth and the Moon?'); const chatbot = new Chatbot(OPENAI_API_KEY, 'openai'); const responses = await chatbot.chat(input); ``` +### Mistral Chatbot +IntelliNode enable effortless swapping between AI models. +1. imports: +```js +const { Chatbot, MistralInput, SupportedChatModels } = require('intellinode'); +``` +2. call: +```js +const input = new MistralInput('You are an art expert.'); +input.addUserMessage('Who painted the Mona Lisa?'); + +const mistralBot = new Chatbot(apiKey, SupportedChatModels.MISTRAL); +const responses = await mistralBot.chat(input); +``` + +The documentation to switch the chatbot provider between ChatGPT, LLama, Cohere and more can be found in the [IntelliNode Wiki](https://github.com/Barqawiz/IntelliNode/wiki/ChatBot). + The documentation on how to switch the chatbot between **ChatGPT** and **LLama** in this [wiki page](https://github.com/Barqawiz/IntelliNode/wiki/ChatBot). ### Semantic search