Skip to content

Commit

Permalink
mistral[minor]: Dynamically import mistral (#3670)
Browse files Browse the repository at this point in the history
* mistral[minor]: Dynamically import mistral

* dynamic import for embeddings

* cr

* chore: lint files
  • Loading branch information
bracesproul authored Dec 15, 2023
1 parent b3142f8 commit ce417d3
Show file tree
Hide file tree
Showing 2 changed files with 31 additions and 12 deletions.
21 changes: 16 additions & 5 deletions libs/langchain-mistralai/src/chat_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import {
AIMessageChunk,
} from "@langchain/core/messages";
import { type BaseLanguageModelCallOptions } from "@langchain/core/language_models/base";
import MistralClient, {
import {
type ChatCompletionResult as MistralAIChatCompletionResult,
type ChatCompletionOptions as MistralAIChatCompletionOptions,
type Message as MistralAIInputMessage,
Expand Down Expand Up @@ -167,7 +167,9 @@ export class ChatMistralAI<

modelName = "mistral-small";

client = new MistralClient();
apiKey: string;

endpoint?: string;

temperature = 0.7;

Expand All @@ -191,7 +193,9 @@ export class ChatMistralAI<
"API key MISTRAL_API_KEY is missing for MistralAI, but it is required."
);
}
this.client = new MistralClient(apiKey, fields?.endpoint);
this.apiKey = apiKey;

// this.client = new MistralClient(apiKey, fields?.endpoint);
}

_llmType() {
Expand Down Expand Up @@ -235,14 +239,16 @@ export class ChatMistralAI<
| MistralAIChatCompletionResult
| AsyncGenerator<MistralAIChatCompletionResult>
> {
const { MistralClient } = await this.imports();
const client = new MistralClient(this.apiKey, this.endpoint);
return this.caller.call(async () => {
let res:
| MistralAIChatCompletionResult
| AsyncGenerator<MistralAIChatCompletionResult>;
if (streaming) {
res = this.client.chatStream(input);
res = client.chatStream(input);
} else {
res = await this.client.chat(input);
res = await client.chat(input);
}
return res;
});
Expand Down Expand Up @@ -380,4 +386,9 @@ export class ChatMistralAI<
_combineLLMOutput() {
return [];
}

async imports() {
const { default: MistralClient } = await import("@mistralai/mistralai");
return { MistralClient };
}
}
22 changes: 15 additions & 7 deletions libs/langchain-mistralai/src/embeddings.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
import { getEnvironmentVariable } from "@langchain/core/utils/env";
import { Embeddings, type EmbeddingsParams } from "@langchain/core/embeddings";
import MistralClient, {
type EmbeddingsResult as MistralAIEmbeddingsResult,
} from "@mistralai/mistralai";
import { type EmbeddingsResult as MistralAIEmbeddingsResult } from "@mistralai/mistralai";

/**
* Interface for MistralAIEmbeddings parameters. Extends EmbeddingsParams and
Expand Down Expand Up @@ -57,8 +55,6 @@ export class MistralAIEmbeddings
extends Embeddings
implements MistralAIEmbeddingsParams
{
client = new MistralClient();

modelName = "mistral-embed";

encodingFormat = "float";
Expand All @@ -67,13 +63,18 @@ export class MistralAIEmbeddings

stripNewLines = true;

apiKey: string;

endpoint?: string;

constructor(fields?: Partial<MistralAIEmbeddingsParams>) {
super(fields ?? {});
const apiKey = fields?.apiKey ?? getEnvironmentVariable("MISTRAL_API_KEY");
if (!apiKey) {
throw new Error("API key missing for MistralAI, but it is required.");
}
this.client = new MistralClient(apiKey, fields?.endpoint);
this.apiKey = apiKey;
this.endpoint = fields?.endpoint;
this.modelName = fields?.modelName ?? this.modelName;
this.encodingFormat = fields?.encodingFormat ?? this.encodingFormat;
this.batchSize = fields?.batchSize ?? this.batchSize;
Expand Down Expand Up @@ -133,11 +134,18 @@ export class MistralAIEmbeddings
input: string | Array<string>
): Promise<MistralAIEmbeddingsResult> {
return this.caller.call(async () => {
const res = await this.client.embeddings({
const { MistralClient } = await this.imports();
const client = new MistralClient(this.apiKey, this.endpoint);
const res = await client.embeddings({
model: this.modelName,
input,
});
return res;
});
}

async imports() {
const { default: MistralClient } = await import("@mistralai/mistralai");
return { MistralClient };
}
}

2 comments on commit ce417d3

@vercel
Copy link

@vercel vercel bot commented on ce417d3 Dec 15, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@vercel
Copy link

@vercel vercel bot commented on ce417d3 Dec 15, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Successfully deployed to the following URLs:

langchainjs-docs – ./docs/core_docs/

langchainjs-docs-ruddy.vercel.app
langchainjs-docs-langchain.vercel.app
langchainjs-docs-git-main-langchain.vercel.app
js.langchain.com

Please sign in to comment.